repo_name
stringlengths 5
92
| path
stringlengths 4
221
| copies
stringclasses 19
values | size
stringlengths 4
6
| content
stringlengths 766
896k
| license
stringclasses 15
values | hash
int64 -9,223,277,421,539,062,000
9,223,102,107B
| line_mean
float64 6.51
99.9
| line_max
int64 32
997
| alpha_frac
float64 0.25
0.96
| autogenerated
bool 1
class | ratio
float64 1.5
13.6
| config_test
bool 2
classes | has_no_keywords
bool 2
classes | few_assignments
bool 1
class |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
moden-py/SWAPY
|
swapy-ob.py
|
1
|
1727
|
# GUI object/properties browser.
# Copyright (C) 2011 Matiychuk D.
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public License
# as published by the Free Software Foundation; either version 2.1
# of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the
# Free Software Foundation, Inc.,
# 59 Temple Place,
# Suite 330,
# Boston, MA 02111-1307 USA
#Boa:App:BoaApp
import sys
import traceback
import wx
import _mainframe
import tools
def hook(exctype, value, tb):
"""
Handle all unexpected exceptions. Show the msgbox then close main window.
"""
traceback_text = ''.join(traceback.format_exception(exctype, value, tb, 5))
tools.show_error_message('ERROR', traceback_text)
if not __debug__:
# Catch all unhandled exceptions and show the details in a msgbox.
sys.excepthook = hook
modules ={'_mainframe': [0, '', '_mainframe.py'], 'proxy': [0, '', 'proxy.py']}
class BoaApp(wx.App):
def OnInit(self):
self.main = _mainframe.create(None)
self.main.Center()
self.main.Show()
self.SetTopWindow(self.main)
return True
def main():
application = BoaApp(0)
application.MainLoop()
if __name__ == '__main__':
main()
|
lgpl-2.1
| -9,084,540,459,404,032,000
| 25.412698
| 79
| 0.665316
| false
| 3.651163
| false
| false
| false
|
blindtex/blindtex
|
blindtex/iotools/iotools.py
|
1
|
5776
|
#-*-:coding:utf-8-*-
import os
import copy
import json
import string
import subprocess
#from blindtex import mainBlindtex
import sys
from sys import argv
#HU1
#Method to open a file and return its content as a string.
def openFile(fileName):
'''This function takes a file a return its content as a string.
Args:
fileName(str): The name of the file to be oppened.
Returns:
str: The content of the file.'''
try:
myFile = open(fileName)
stringDocument = myFile.read()
myFile.close()
return stringDocument
except IOError:
print("File %s could not be openned."%(fileName))
return ""
#EndOfFunction
def read_json_file(fileName):
'''This function takes a file a return its content as a string.
Args:
fileName(str): The name of the file to be oppened.
Returns:
str: The content of the file.'''
try:
with open(fileName,'r') as myFile:
stringDocument = json.load(myFile)
except OSError as err:
print("OS error: {0}".format(err))
raise
except ValueError:
print("Could not parser",fileName,"file, please check json syntax.")
raise
except:
print("Unexpected error:", sys.exc_info()[0])
raise
return stringDocument
#Replace the document containing the LaTeX math with the output of the function seekAndReplace. Write the content in a new file.
def replaceAndWrite(contentList, replacedDocument, fileName):
'''Replace the document containing the LaTeX math with the output of the function seekAndReplace. Write the content in a new file.
Args:
contentList(list[str,str,str]): The list generated by extractContent.
replacedDocument(str): the LaTeX content without formulas, just markers.
fileName(str): The name of the .tex file where the result will be written. '''
newContentList = copy.deepcopy(contentList)
newContentList[1] = replacedDocument
try:
myFile = open(fileName, 'w')#TODO Check if the file already exits, warn about that and decide if the user wants to replace it.
myFile.write(string.join(newContentList))
myFile.close()
except IOError:
print("File could not be oppened.")
return
#EndOfFunction
def convertToHtml(fileName, biblioName=None):
'''This function uses LaTeXML to convert a .tex file in a html with accesible math formulas.
Args:
fileName(str): the name of the .tex file to be processed.
(opt)biblioName(str): the name o a .bib file. '''
noExtensionName = fileName.replace(".tex","")
if(biblioName):
if(os.name == 'nt'): #i.e is in windows
noExtensionBiblio = biblioName.replace(".bib","")
subprocess.call(["latexml","--dest=%s.xml"%(noExtensionName),"--quiet",fileName], shell=True)
subprocess.call(["latexml", "--dest=%s.xml"%(noExtensionBiblio),"--bibtex", biblioName], shell= True)
subprocess.call(["latexmlpost","-dest=%s.xhtml"%(noExtensionName),"--bibliography=%s.xml"%(noExtensionBiblio),noExtensionName+".xml"], shell=True)
else: #TODO: Do not repeat
noExtensionBiblio = biblioName.replace(".bib","")
subprocess.call(["latexml","--dest=%s.xml"%(noExtensionName),"--quiet",fileName])
subprocess.call(["latexml", "--dest=%s.xml"%(noExtensionBiblio),"--bibtex", biblioName])
subprocess.call(["latexmlpost","-dest=%s.xhtml"%(noExtensionName),"--bibliography=%s.xml"%(noExtensionBiblio),noExtensionName+".xml"])
else:
if(os.name == 'nt'):
subprocess.call(["latexml","--dest=%s.xml"%(noExtensionName),"--quiet",fileName], shell = True)#Generates xml file.
subprocess.call(["latexmlpost","-dest=%s.xhtml"%(noExtensionName),noExtensionName+".xml"], shell = True)#Generates xhtml file.
else:
subprocess.call(["latexml","--dest=%s.xml"%(noExtensionName),"--quiet",fileName])#Generates xml file.
subprocess.call(["latexmlpost","-dest=%s.xhtml"%(noExtensionName),noExtensionName+".xml"])#Generates xhtml file.
#EndOfFunction
def convertToPdf(filePath,fileName):
if(os.name == 'nt'):
subprocess.call(['pdflatex','-output-directory',filePath, fileName], shell = True)
subprocess.call(['pdflatex','-output-directory',filePath, fileName], shell = True)
else:
subprocess.call(['pdflatex','-output-directory',filePath, fileName])
subprocess.call(['pdflatex','-output-directory',filePath, fileName])
#EndOfFunction
#TODO ¿con alguna extensión o la extensión se da desde afuera?
def writeHtmlFile(htmlString, fileName):
'''Function to write the html result in a final file.
Args:
htmlString(str): The string with the html content of the final result.
fileName(str): The name of the file where the string will be written. '''
try:
htmlFile = open(fileName,'w')
htmlFile.write(htmlString)
htmlFile.close()
except IOError:
print('File could not be oppened.')
return
#EndOf Function
#This function works just when a .tex file is being converted.
def writeTroubles(strfileName, listtroubleFormulas):
(filePath, name) = os.path.split(strfileName)
try:
registerFile = open(os.path.join(filePath, 'TroubleFormulasOf'+name.replace('.tex','.txt')),'w')
for formula in listtroubleFormulas:
registerFile.write('I had troubles with:\n'+formula+'\n')
registerFile.close()
except IOError:
return
#EndOfFunction
|
gpl-3.0
| -1,899,846,381,258,995,200
| 39.65493
| 158
| 0.639182
| false
| 4.025802
| false
| false
| false
|
fanchao01/pythontools
|
Queue.py
|
1
|
4492
|
#!/bin/env python
#-*- encoding: utf-8 -*-
__author__ = "fanchao01"
__version__ = "0.0.1"
'''multi-thread queue likes Queue.queue'''
import threading as _threading
import time as _time
class Full(Exception):
"""Exception Full raised by Queue.put/put_nowait"""
class Empty(Exception):
"""Exception Empty raised by Queue.get/get_nowait"""
class Queue(object):
def __init__(self, maxsize=0):
self.maxsize = maxsize
self.queue = []
#one lock with three condition-waiting queue
self.mutex = _threading.Lock()
self.not_full = _threading.Condition(self.mutex)
self.not_empty = _threading.Condition(self.mutex)
self.all_tasks_done = _threading.Condition(self.mutex)
self.un_finished_tasks = 0
def clear(self):
with self.mutex as lock:
self.queue = []
self.not_full.notify_all()
def task_done(self):
with self.all_tasks_done as condition:
unfinished = self.un_finished_tasks - 1
if unfinished < 0:
raise ValueError("task_done() called too many times")
elif unfinished == 0:
self.all_tasks_done.notify_all()
self.un_finished_tasks = unfinished
def join(self):
with self.all_tasks_done as condition:
while self.un_finished_tasks > 0:
self.all_tasks_done.wait()
def qsize(self):
with self.mutex as lock:
return self._qsize()
def _qsize(self): #there must be a way to get the size of self.queue without lock
return len(self.queue)
def full(self):
with self.mutex as lock:
return self.qsize() >= self.maxsize if self.maxsize > 0 else False
def empty(self):
with self.mutex as lock:
return self.qsize() <= 0
def _put(self, ele):
self.queue.append(ele)
self.un_finished_tasks += 1
def put(self, ele, block=True, timeout=None):
with self.not_full as condition:
if self.maxsize > 0:
if not block:
if self._qsize() >= self.maxsize: #can not use self.qssize(), which will relock the self.mutex leading to deadlock
raise Full
elif timeout is None:
while self._qsize() >= self.maxsize:
self.not_full.wait()
elif timeout < 0:
raise ValueError("timeout must be >0, given(%d)" % timeout)
else:
end = _time.time() + timeout
while self._qsize() >= self.maxsize:
remaining = end - _time.time()
if remaining < 0.0:
raise Full
self.not_full.wait(remaining)
self._put(ele)
self.not_empty.notify()
def put_nowait(self, ele):
self.put(ele, False)
def _get(self):
return self.queue.pop(0)
def get(self, block=True, timeout=None):
with self.not_empty as condition:
if not block:
if self._qsize() == 0:
raise Empty
elif timeout is None:
while self._qsize() == 0:
self.not_empty.wait()
elif timeout < 0:
raise ValueError("timeout must be > 0, given(%d)" % timeout)
else:
end = _time.time() + timeout
while self._qsize() == 0:
remaining = end - _time.time()
if remaining < 0.0:
raise Empty
self.not_empty.wait(remaining)
ele = self._get()
self.not_full.notify()
return ele
def get_notwait(self):
self.get(False)
if __name__ == "__main__":
import random
import time
class Worker(_threading.Thread):
def __init__(self, queue):
super(Worker, self).__init__()
self.queue = queue
def run(self):
time.sleep(random.randint(1, 5) / 10.0)
print self.queue.get()
q = Queue(10)
for i in range(10):
q.put(i)
try:
q.put(11, True, 1)
except Full:
pass
try:
q.put_nowait(11)
except Full:
pass
for i in range(10):
Worker(q).start()
q.task_done()
w = Worker(q)
w.start()
q.put(10)
|
gpl-2.0
| 4,892,882,358,093,603,000
| 27.43038
| 134
| 0.511131
| false
| 4.050496
| false
| false
| false
|
FilWisher/distributed-project
|
icarus/icarus/results/visualize.py
|
1
|
3641
|
"""Functions for visualizing results on graphs of topologies"""
from __future__ import division
import os
import numpy as np
import matplotlib as mpl
import matplotlib.pyplot as plt
import networkx as nx
__all__ = [
'draw_stack_deployment',
'draw_network_load',
]
# Colormap for node stacks
COLORMAP = {'source': 'blue',
'receiver': 'green',
'router': 'white',
'cache': 'red',
}
def stack_map(topology):
"""Return dict mapping node ID to stack type
Parameters
----------
topology : Topology
The topology
Returns
-------
stack_map : dict
Dict mapping node to stack. Options are:
source | receiver | router | cache
"""
stack = {}
for v, (name, props) in topology.stacks().items():
if name == 'router':
cache = False
if 'cache_size' in props and props['cache_size'] > 0:
cache = True
elif cache:
name = 'cache'
else:
name = 'router'
stack[v] = name
return stack
def draw_stack_deployment(topology, filename, plotdir):
"""Draw a topology with different node colors according to stack
Parameters
----------
topology : Topology
The topology to draw
plotdir : string
The directory onto which draw plots
filename : string
The name of the image file to save
"""
stack = stack_map(topology)
node_color = [COLORMAP[stack[v]] for v in topology.nodes_iter()]
plt.figure()
nx.draw_graphviz(topology, node_color=node_color, with_labels=False)
plt.savefig(plt.savefig(os.path.join(plotdir, filename), bbox_inches='tight'))
def draw_network_load(topology, result, filename, plotdir):
"""Draw topology with node colors according to stack and node size and link
color according to server/cache hits and link loads.
Nodes are colored according to COLORMAP. Edge are colored on a blue-red
scale where blue means min link load and red means max link load.
Sources and caches have variable size proportional to their hit ratios.
Parameters
----------
topology : Topology
The topology to draw
result : Tree
The tree representing the specific experiment result from which metric
are read
plotdir : string
The directory onto which draw plots
filename : string
The name of the image file to save
"""
stack = stack_map(topology)
node_color = [COLORMAP[stack[v]] for v in topology.nodes_iter()]
node_min = 50
node_max = 600
hits = result['CACHE_HIT_RATIO']['PER_NODE_CACHE_HIT_RATIO'].copy()
hits.update(result['CACHE_HIT_RATIO']['PER_NODE_SERVER_HIT_RATIO'])
hits = np.array([hits[v] if v in hits else 0 for v in topology.nodes_iter()])
min_hits = np.min(hits)
max_hits = np.max(hits)
hits = node_min + (node_max - node_min)*(hits - min_hits)/(max_hits - min_hits)
link_load = result['LINK_LOAD']['PER_LINK_INTERNAL'].copy()
link_load.update(result['LINK_LOAD']['PER_LINK_EXTERNAL'])
link_load = [link_load[e] if e in link_load else 0 for e in topology.edges()]
plt.figure()
nx.draw_graphviz(topology, node_color=node_color, node_size=hits,
width=2.0,
edge_color=link_load,
edge_cmap=mpl.colors.LinearSegmentedColormap.from_list('bluered',['blue','red']),
with_labels=False)
plt.savefig(plt.savefig(os.path.join(plotdir, filename), bbox_inches='tight'))
|
mit
| -1,813,796,021,012,025,600
| 31.508929
| 102
| 0.608075
| false
| 3.983589
| false
| false
| false
|
jessada/pyCMM
|
setup.py
|
1
|
4017
|
import sys
import glob
import pkgutil
import os
import fnmatch
from setuptools import setup
from pycmm.settings import DNASEQ_SLURM_MONITOR_PIPELINE_BIN
from pycmm.settings import DUMMY_TABLE_ANNOVAR_BIN
from pycmm.settings import MUTREP_SLURM_MONITOR_PIPELINE_BIN
from pycmm.settings import MUTREP_FAMILY_REPORT_BIN
from pycmm.settings import MUTREP_SUMMARY_REPORT_BIN
from pycmm.settings import MUTREPDB_SEQ_REPORT_BIN
from pycmm.settings import PLINK_SLURM_MONITOR_PIPELINE_BIN
from pycmm.settings import PLINK_HAP_ASSOCS_REPORT_BIN
from pycmm.settings import PLINK_MERGE_HAP_ASSOCS_BIN
from pycmm.settings import DBMS_EXECUTE_DB_JOBS_BIN
def opj(*args):
path = os.path.join(*args)
return os.path.normpath(path)
def find_data_files(srcdir, *wildcards, **kw):
# get a list of all files under the srcdir matching wildcards,
# returned in a format to be used for install_data
def walk_helper(arg, dirname, files):
if '.svn' in dirname:
return
names = []
lst, wildcards = arg
for wc in wildcards:
wc_name = opj(dirname, wc)
for f in files:
filename = opj(dirname, f)
if fnmatch.fnmatch(filename, wc_name) and not os.path.isdir(filename):
names.append(filename)
if names:
lst.append( (dirname, names ) )
file_list = []
recursive = kw.get('recursive', True)
if recursive:
os.path.walk(srcdir, walk_helper, (file_list, wildcards))
else:
walk_helper((file_list, wildcards),
srcdir,
[os.path.basename(f) for f in glob.glob(opj(srcdir, '*'))])
return file_list
#csv_files = find_data_files('data/', '*.csv')
all_data_files = find_data_files('data/', '*.*')
#all_data_files = find_data_files('script/', '*.*')
setup(
name='pyCMM',
version='0.0.1',
author='Jessada Thutkawkorapin',
author_email='jessada.thutkawkorapin@gmail.com',
packages=['pycmm',
'pycmm.app',
'pycmm.utils',
'pycmm.cmmlib',
'pycmm.flow',
'pycmm.proc',
'pycmm.proc.db',
'pycmm.proc.mutrep',
],
scripts=['bin/'+DNASEQ_SLURM_MONITOR_PIPELINE_BIN,
'bin/pyCMM-dnaseq-pipeline',
'bin/pyCMM-dnaseq-create-job-setup-file',
'bin/pyCMM-cmmdb-cal-mut-stat',
'bin/pyCMM-cmmdb-vcf-AF-to-annovar',
'bin/pyCMM-cmmdb-table-annovar',
'bin/pyCMM-cmmdb-create-job-setup-file',
'bin/'+DUMMY_TABLE_ANNOVAR_BIN,
'bin/'+MUTREP_SLURM_MONITOR_PIPELINE_BIN,
'bin/pyCMM-mutrep-pipeline',
'bin/pyCMM-mutrep-mutation-reports',
'bin/'+MUTREP_FAMILY_REPORT_BIN,
'bin/'+MUTREP_SUMMARY_REPORT_BIN,
'bin/pyCMM-mutrep-create-job-setup-file',
'bin/pyCMM-mutrepdb-create-job-setup-file',
'bin/'+MUTREPDB_SEQ_REPORT_BIN,
'bin/pyCMM-mutrepdb-controller',
'bin/pyCMM-plink-create-job-setup-file',
'bin/pyCMM-plink-pipeline',
'bin/'+PLINK_SLURM_MONITOR_PIPELINE_BIN,
'bin/'+PLINK_HAP_ASSOCS_REPORT_BIN,
'bin/'+PLINK_MERGE_HAP_ASSOCS_BIN,
'bin/pyCMM-dbms-controller',
'bin/pyCMM-dbms-create-job-setup-file',
'bin/'+DBMS_EXECUTE_DB_JOBS_BIN,
],
package=['pyCMM'],
# package_data={'': ['data/CBV/*.cbv']
# },
data_files=all_data_files,
url='http://pypi.python.org/pypi/pyCMM/',
license='LICENSE.txt',
description='Python packages for my sequencing data analysis at Center of Molecular Medicine, Karolinska Institute, Stockholm, Sweden',
long_description=open('README.md').read(),
install_requires=[
"pysam >= 0.7",
"pyvcf >= 0.6.0",
"pyaml >= 15.5.7",
"openpyxl >= 2.3.3",
"xlsxwriter >= 0.5.3",
],
)
|
gpl-2.0
| -6,294,524,803,636,695,000
| 35.853211
| 139
| 0.591735
| false
| 3.188095
| false
| false
| false
|
bowen0701/algorithms_data_structures
|
lc0240_search_a_2d_matrix_ii.py
|
1
|
1900
|
"""Leetcode 240. Search a 2D Matrix II
URL: https://leetcode.com/problems/search-a-2d-matrix-ii/
Medium
Write an efficient algorithm that searches for a value in an m x n matrix.
This matrix has the following properties:
- Integers in each row are sorted in ascending from left to right.
- Integers in each column are sorted in ascending from top to bottom.
Example:
Consider the following matrix:
[
[1, 4, 7, 11, 15],
[2, 5, 8, 12, 19],
[3, 6, 9, 16, 22],
[10, 13, 14, 17, 24],
[18, 21, 23, 26, 30]
]
Given target = 5, return true.
Given target = 20, return false.
"""
class Solution(object):
def searchMatrix(self, matrix, target):
"""
:type matrix: List[List[int]]
:type target: int
:rtype: bool
Time complexity: O(m+n), where
- m is the row number, and
- n is the column number.
Space complexity: O(1).
"""
if not len(matrix) or not len(matrix[0]):
return False
# Search starting from the bottom-left, moving to top/right.
i, j = len(matrix) - 1, 0
while i >= 0 and j < len(matrix[0]):
if matrix[i][j] == target:
return True
elif matrix[i][j] > target:
# If entry is bigger than target, decrease next entry.
i -= 1
elif matrix[i][j] < target:
# If entry is smaller than target, increase next entry.
j += 1
return False
def main():
matrix = [
[1, 4, 7, 11, 15],
[2, 5, 8, 12, 19],
[3, 6, 9, 16, 22],
[10, 13, 14, 17, 24],
[18, 21, 23, 26, 30]
]
target = 5 # Should be True.
print Solution().searchMatrix(matrix, target)
target = 20 # Should be False.
print Solution().searchMatrix(matrix, target)
if __name__ == '__main__':
main()
|
bsd-2-clause
| -1,983,297,366,930,088,000
| 24.675676
| 75
| 0.543158
| false
| 3.442029
| false
| false
| false
|
angadpc/Alexa-Project-
|
twilio/rest/chat/v1/service/__init__.py
|
1
|
49759
|
# coding=utf-8
"""
This code was generated by
\ / _ _ _| _ _
| (_)\/(_)(_|\/| |(/_ v1.0.0
/ /
"""
from twilio.base import deserialize
from twilio.base import values
from twilio.base.instance_context import InstanceContext
from twilio.base.instance_resource import InstanceResource
from twilio.base.list_resource import ListResource
from twilio.base.page import Page
from twilio.rest.chat.v1.service.channel import ChannelList
from twilio.rest.chat.v1.service.role import RoleList
from twilio.rest.chat.v1.service.user import UserList
class ServiceList(ListResource):
def __init__(self, version):
"""
Initialize the ServiceList
:param Version version: Version that contains the resource
:returns: twilio.rest.ip_messaging.v1.service.ServiceList
:rtype: twilio.rest.ip_messaging.v1.service.ServiceList
"""
super(ServiceList, self).__init__(version)
# Path Solution
self._solution = {}
self._uri = '/Services'.format(**self._solution)
def create(self, friendly_name):
"""
Create a new ServiceInstance
:param unicode friendly_name: The friendly_name
:returns: Newly created ServiceInstance
:rtype: twilio.rest.ip_messaging.v1.service.ServiceInstance
"""
data = values.of({
'FriendlyName': friendly_name,
})
payload = self._version.create(
'POST',
self._uri,
data=data,
)
return ServiceInstance(
self._version,
payload,
)
def stream(self, limit=None, page_size=None):
"""
Streams ServiceInstance records from the API as a generator stream.
This operation lazily loads records as efficiently as possible until the limit
is reached.
The results are returned as a generator, so this operation is memory efficient.
:param int limit: Upper limit for the number of records to return. stream()
guarantees to never return more than limit. Default is no limit
:param int page_size: Number of records to fetch per request, when not set will use
the default value of 50 records. If no page_size is defined
but a limit is defined, stream() will attempt to read the
limit with the most efficient page size, i.e. min(limit, 1000)
:returns: Generator that will yield up to limit results
:rtype: list[twilio.rest.ip_messaging.v1.service.ServiceInstance]
"""
limits = self._version.read_limits(limit, page_size)
page = self.page(
page_size=limits['page_size'],
)
return self._version.stream(page, limits['limit'], limits['page_limit'])
def list(self, limit=None, page_size=None):
"""
Lists ServiceInstance records from the API as a list.
Unlike stream(), this operation is eager and will load `limit` records into
memory before returning.
:param int limit: Upper limit for the number of records to return. list() guarantees
never to return more than limit. Default is no limit
:param int page_size: Number of records to fetch per request, when not set will use
the default value of 50 records. If no page_size is defined
but a limit is defined, list() will attempt to read the limit
with the most efficient page size, i.e. min(limit, 1000)
:returns: Generator that will yield up to limit results
:rtype: list[twilio.rest.ip_messaging.v1.service.ServiceInstance]
"""
return list(self.stream(
limit=limit,
page_size=page_size,
))
def page(self, page_token=values.unset, page_number=values.unset,
page_size=values.unset):
"""
Retrieve a single page of ServiceInstance records from the API.
Request is executed immediately
:param str page_token: PageToken provided by the API
:param int page_number: Page Number, this value is simply for client state
:param int page_size: Number of records to return, defaults to 50
:returns: Page of ServiceInstance
:rtype: twilio.rest.ip_messaging.v1.service.ServicePage
"""
params = values.of({
'PageToken': page_token,
'Page': page_number,
'PageSize': page_size,
})
response = self._version.page(
'GET',
self._uri,
params=params,
)
return ServicePage(self._version, response, self._solution)
def get(self, sid):
"""
Constructs a ServiceContext
:param sid: The sid
:returns: twilio.rest.ip_messaging.v1.service.ServiceContext
:rtype: twilio.rest.ip_messaging.v1.service.ServiceContext
"""
return ServiceContext(
self._version,
sid=sid,
)
def __call__(self, sid):
"""
Constructs a ServiceContext
:param sid: The sid
:returns: twilio.rest.ip_messaging.v1.service.ServiceContext
:rtype: twilio.rest.ip_messaging.v1.service.ServiceContext
"""
return ServiceContext(
self._version,
sid=sid,
)
def __repr__(self):
"""
Provide a friendly representation
:returns: Machine friendly representation
:rtype: str
"""
return '<Twilio.Chat.V1.ServiceList>'
class ServicePage(Page):
def __init__(self, version, response, solution):
"""
Initialize the ServicePage
:param Version version: Version that contains the resource
:param Response response: Response from the API
:returns: twilio.rest.ip_messaging.v1.service.ServicePage
:rtype: twilio.rest.ip_messaging.v1.service.ServicePage
"""
super(ServicePage, self).__init__(version, response)
# Path Solution
self._solution = solution
def get_instance(self, payload):
"""
Build an instance of ServiceInstance
:param dict payload: Payload response from the API
:returns: twilio.rest.ip_messaging.v1.service.ServiceInstance
:rtype: twilio.rest.ip_messaging.v1.service.ServiceInstance
"""
return ServiceInstance(
self._version,
payload,
)
def __repr__(self):
"""
Provide a friendly representation
:returns: Machine friendly representation
:rtype: str
"""
return '<Twilio.Chat.V1.ServicePage>'
class ServiceContext(InstanceContext):
def __init__(self, version, sid):
"""
Initialize the ServiceContext
:param Version version: Version that contains the resource
:param sid: The sid
:returns: twilio.rest.ip_messaging.v1.service.ServiceContext
:rtype: twilio.rest.ip_messaging.v1.service.ServiceContext
"""
super(ServiceContext, self).__init__(version)
# Path Solution
self._solution = {
'sid': sid,
}
self._uri = '/Services/{sid}'.format(**self._solution)
# Dependents
self._channels = None
self._roles = None
self._users = None
def fetch(self):
"""
Fetch a ServiceInstance
:returns: Fetched ServiceInstance
:rtype: twilio.rest.ip_messaging.v1.service.ServiceInstance
"""
params = values.of({})
payload = self._version.fetch(
'GET',
self._uri,
params=params,
)
return ServiceInstance(
self._version,
payload,
sid=self._solution['sid'],
)
def delete(self):
"""
Deletes the ServiceInstance
:returns: True if delete succeeds, False otherwise
:rtype: bool
"""
return self._version.delete('delete', self._uri)
def update(self, friendly_name=values.unset,
default_service_role_sid=values.unset,
default_channel_role_sid=values.unset,
default_channel_creator_role_sid=values.unset,
read_status_enabled=values.unset, reachability_enabled=values.unset,
typing_indicator_timeout=values.unset,
consumption_report_interval=values.unset,
notifications_new_message_enabled=values.unset,
notifications_new_message_template=values.unset,
notifications_added_to_channel_enabled=values.unset,
notifications_added_to_channel_template=values.unset,
notifications_removed_from_channel_enabled=values.unset,
notifications_removed_from_channel_template=values.unset,
notifications_invited_to_channel_enabled=values.unset,
notifications_invited_to_channel_template=values.unset,
pre_webhook_url=values.unset, post_webhook_url=values.unset,
webhook_method=values.unset, webhook_filters=values.unset,
webhooks_on_message_send_url=values.unset,
webhooks_on_message_send_method=values.unset,
webhooks_on_message_send_format=values.unset,
webhooks_on_message_update_url=values.unset,
webhooks_on_message_update_method=values.unset,
webhooks_on_message_update_format=values.unset,
webhooks_on_message_remove_url=values.unset,
webhooks_on_message_remove_method=values.unset,
webhooks_on_message_remove_format=values.unset,
webhooks_on_channel_add_url=values.unset,
webhooks_on_channel_add_method=values.unset,
webhooks_on_channel_add_format=values.unset,
webhooks_on_channel_destroy_url=values.unset,
webhooks_on_channel_destroy_method=values.unset,
webhooks_on_channel_destroy_format=values.unset,
webhooks_on_channel_update_url=values.unset,
webhooks_on_channel_update_method=values.unset,
webhooks_on_channel_update_format=values.unset,
webhooks_on_member_add_url=values.unset,
webhooks_on_member_add_method=values.unset,
webhooks_on_member_add_format=values.unset,
webhooks_on_member_remove_url=values.unset,
webhooks_on_member_remove_method=values.unset,
webhooks_on_member_remove_format=values.unset,
webhooks_on_message_sent_url=values.unset,
webhooks_on_message_sent_method=values.unset,
webhooks_on_message_sent_format=values.unset,
webhooks_on_message_updated_url=values.unset,
webhooks_on_message_updated_method=values.unset,
webhooks_on_message_updated_format=values.unset,
webhooks_on_message_removed_url=values.unset,
webhooks_on_message_removed_method=values.unset,
webhooks_on_message_removed_format=values.unset,
webhooks_on_channel_added_url=values.unset,
webhooks_on_channel_added_method=values.unset,
webhooks_on_channel_added_format=values.unset,
webhooks_on_channel_destroyed_url=values.unset,
webhooks_on_channel_destroyed_method=values.unset,
webhooks_on_channel_destroyed_format=values.unset,
webhooks_on_channel_updated_url=values.unset,
webhooks_on_channel_updated_method=values.unset,
webhooks_on_channel_updated_format=values.unset,
webhooks_on_member_added_url=values.unset,
webhooks_on_member_added_method=values.unset,
webhooks_on_member_added_format=values.unset,
webhooks_on_member_removed_url=values.unset,
webhooks_on_member_removed_method=values.unset,
webhooks_on_member_removed_format=values.unset,
limits_channel_members=values.unset,
limits_user_channels=values.unset):
"""
Update the ServiceInstance
:param unicode friendly_name: The friendly_name
:param unicode default_service_role_sid: The default_service_role_sid
:param unicode default_channel_role_sid: The default_channel_role_sid
:param unicode default_channel_creator_role_sid: The default_channel_creator_role_sid
:param bool read_status_enabled: The read_status_enabled
:param bool reachability_enabled: The reachability_enabled
:param unicode typing_indicator_timeout: The typing_indicator_timeout
:param unicode consumption_report_interval: The consumption_report_interval
:param bool notifications_new_message_enabled: The notifications.new_message.enabled
:param unicode notifications_new_message_template: The notifications.new_message.template
:param bool notifications_added_to_channel_enabled: The notifications.added_to_channel.enabled
:param unicode notifications_added_to_channel_template: The notifications.added_to_channel.template
:param bool notifications_removed_from_channel_enabled: The notifications.removed_from_channel.enabled
:param unicode notifications_removed_from_channel_template: The notifications.removed_from_channel.template
:param bool notifications_invited_to_channel_enabled: The notifications.invited_to_channel.enabled
:param unicode notifications_invited_to_channel_template: The notifications.invited_to_channel.template
:param unicode pre_webhook_url: The pre_webhook_url
:param unicode post_webhook_url: The post_webhook_url
:param unicode webhook_method: The webhook_method
:param unicode webhook_filters: The webhook_filters
:param unicode webhooks_on_message_send_url: The webhooks.on_message_send.url
:param unicode webhooks_on_message_send_method: The webhooks.on_message_send.method
:param unicode webhooks_on_message_send_format: The webhooks.on_message_send.format
:param unicode webhooks_on_message_update_url: The webhooks.on_message_update.url
:param unicode webhooks_on_message_update_method: The webhooks.on_message_update.method
:param unicode webhooks_on_message_update_format: The webhooks.on_message_update.format
:param unicode webhooks_on_message_remove_url: The webhooks.on_message_remove.url
:param unicode webhooks_on_message_remove_method: The webhooks.on_message_remove.method
:param unicode webhooks_on_message_remove_format: The webhooks.on_message_remove.format
:param unicode webhooks_on_channel_add_url: The webhooks.on_channel_add.url
:param unicode webhooks_on_channel_add_method: The webhooks.on_channel_add.method
:param unicode webhooks_on_channel_add_format: The webhooks.on_channel_add.format
:param unicode webhooks_on_channel_destroy_url: The webhooks.on_channel_destroy.url
:param unicode webhooks_on_channel_destroy_method: The webhooks.on_channel_destroy.method
:param unicode webhooks_on_channel_destroy_format: The webhooks.on_channel_destroy.format
:param unicode webhooks_on_channel_update_url: The webhooks.on_channel_update.url
:param unicode webhooks_on_channel_update_method: The webhooks.on_channel_update.method
:param unicode webhooks_on_channel_update_format: The webhooks.on_channel_update.format
:param unicode webhooks_on_member_add_url: The webhooks.on_member_add.url
:param unicode webhooks_on_member_add_method: The webhooks.on_member_add.method
:param unicode webhooks_on_member_add_format: The webhooks.on_member_add.format
:param unicode webhooks_on_member_remove_url: The webhooks.on_member_remove.url
:param unicode webhooks_on_member_remove_method: The webhooks.on_member_remove.method
:param unicode webhooks_on_member_remove_format: The webhooks.on_member_remove.format
:param unicode webhooks_on_message_sent_url: The webhooks.on_message_sent.url
:param unicode webhooks_on_message_sent_method: The webhooks.on_message_sent.method
:param unicode webhooks_on_message_sent_format: The webhooks.on_message_sent.format
:param unicode webhooks_on_message_updated_url: The webhooks.on_message_updated.url
:param unicode webhooks_on_message_updated_method: The webhooks.on_message_updated.method
:param unicode webhooks_on_message_updated_format: The webhooks.on_message_updated.format
:param unicode webhooks_on_message_removed_url: The webhooks.on_message_removed.url
:param unicode webhooks_on_message_removed_method: The webhooks.on_message_removed.method
:param unicode webhooks_on_message_removed_format: The webhooks.on_message_removed.format
:param unicode webhooks_on_channel_added_url: The webhooks.on_channel_added.url
:param unicode webhooks_on_channel_added_method: The webhooks.on_channel_added.method
:param unicode webhooks_on_channel_added_format: The webhooks.on_channel_added.format
:param unicode webhooks_on_channel_destroyed_url: The webhooks.on_channel_destroyed.url
:param unicode webhooks_on_channel_destroyed_method: The webhooks.on_channel_destroyed.method
:param unicode webhooks_on_channel_destroyed_format: The webhooks.on_channel_destroyed.format
:param unicode webhooks_on_channel_updated_url: The webhooks.on_channel_updated.url
:param unicode webhooks_on_channel_updated_method: The webhooks.on_channel_updated.method
:param unicode webhooks_on_channel_updated_format: The webhooks.on_channel_updated.format
:param unicode webhooks_on_member_added_url: The webhooks.on_member_added.url
:param unicode webhooks_on_member_added_method: The webhooks.on_member_added.method
:param unicode webhooks_on_member_added_format: The webhooks.on_member_added.format
:param unicode webhooks_on_member_removed_url: The webhooks.on_member_removed.url
:param unicode webhooks_on_member_removed_method: The webhooks.on_member_removed.method
:param unicode webhooks_on_member_removed_format: The webhooks.on_member_removed.format
:param unicode limits_channel_members: The limits.channel_members
:param unicode limits_user_channels: The limits.user_channels
:returns: Updated ServiceInstance
:rtype: twilio.rest.ip_messaging.v1.service.ServiceInstance
"""
data = values.of({
'FriendlyName': friendly_name,
'DefaultServiceRoleSid': default_service_role_sid,
'DefaultChannelRoleSid': default_channel_role_sid,
'DefaultChannelCreatorRoleSid': default_channel_creator_role_sid,
'ReadStatusEnabled': read_status_enabled,
'ReachabilityEnabled': reachability_enabled,
'TypingIndicatorTimeout': typing_indicator_timeout,
'ConsumptionReportInterval': consumption_report_interval,
'Notifications.NewMessage.Enabled': notifications_new_message_enabled,
'Notifications.NewMessage.Template': notifications_new_message_template,
'Notifications.AddedToChannel.Enabled': notifications_added_to_channel_enabled,
'Notifications.AddedToChannel.Template': notifications_added_to_channel_template,
'Notifications.RemovedFromChannel.Enabled': notifications_removed_from_channel_enabled,
'Notifications.RemovedFromChannel.Template': notifications_removed_from_channel_template,
'Notifications.InvitedToChannel.Enabled': notifications_invited_to_channel_enabled,
'Notifications.InvitedToChannel.Template': notifications_invited_to_channel_template,
'PreWebhookUrl': pre_webhook_url,
'PostWebhookUrl': post_webhook_url,
'WebhookMethod': webhook_method,
'WebhookFilters': webhook_filters,
'Webhooks.OnMessageSend.Url': webhooks_on_message_send_url,
'Webhooks.OnMessageSend.Method': webhooks_on_message_send_method,
'Webhooks.OnMessageSend.Format': webhooks_on_message_send_format,
'Webhooks.OnMessageUpdate.Url': webhooks_on_message_update_url,
'Webhooks.OnMessageUpdate.Method': webhooks_on_message_update_method,
'Webhooks.OnMessageUpdate.Format': webhooks_on_message_update_format,
'Webhooks.OnMessageRemove.Url': webhooks_on_message_remove_url,
'Webhooks.OnMessageRemove.Method': webhooks_on_message_remove_method,
'Webhooks.OnMessageRemove.Format': webhooks_on_message_remove_format,
'Webhooks.OnChannelAdd.Url': webhooks_on_channel_add_url,
'Webhooks.OnChannelAdd.Method': webhooks_on_channel_add_method,
'Webhooks.OnChannelAdd.Format': webhooks_on_channel_add_format,
'Webhooks.OnChannelDestroy.Url': webhooks_on_channel_destroy_url,
'Webhooks.OnChannelDestroy.Method': webhooks_on_channel_destroy_method,
'Webhooks.OnChannelDestroy.Format': webhooks_on_channel_destroy_format,
'Webhooks.OnChannelUpdate.Url': webhooks_on_channel_update_url,
'Webhooks.OnChannelUpdate.Method': webhooks_on_channel_update_method,
'Webhooks.OnChannelUpdate.Format': webhooks_on_channel_update_format,
'Webhooks.OnMemberAdd.Url': webhooks_on_member_add_url,
'Webhooks.OnMemberAdd.Method': webhooks_on_member_add_method,
'Webhooks.OnMemberAdd.Format': webhooks_on_member_add_format,
'Webhooks.OnMemberRemove.Url': webhooks_on_member_remove_url,
'Webhooks.OnMemberRemove.Method': webhooks_on_member_remove_method,
'Webhooks.OnMemberRemove.Format': webhooks_on_member_remove_format,
'Webhooks.OnMessageSent.Url': webhooks_on_message_sent_url,
'Webhooks.OnMessageSent.Method': webhooks_on_message_sent_method,
'Webhooks.OnMessageSent.Format': webhooks_on_message_sent_format,
'Webhooks.OnMessageUpdated.Url': webhooks_on_message_updated_url,
'Webhooks.OnMessageUpdated.Method': webhooks_on_message_updated_method,
'Webhooks.OnMessageUpdated.Format': webhooks_on_message_updated_format,
'Webhooks.OnMessageRemoved.Url': webhooks_on_message_removed_url,
'Webhooks.OnMessageRemoved.Method': webhooks_on_message_removed_method,
'Webhooks.OnMessageRemoved.Format': webhooks_on_message_removed_format,
'Webhooks.OnChannelAdded.Url': webhooks_on_channel_added_url,
'Webhooks.OnChannelAdded.Method': webhooks_on_channel_added_method,
'Webhooks.OnChannelAdded.Format': webhooks_on_channel_added_format,
'Webhooks.OnChannelDestroyed.Url': webhooks_on_channel_destroyed_url,
'Webhooks.OnChannelDestroyed.Method': webhooks_on_channel_destroyed_method,
'Webhooks.OnChannelDestroyed.Format': webhooks_on_channel_destroyed_format,
'Webhooks.OnChannelUpdated.Url': webhooks_on_channel_updated_url,
'Webhooks.OnChannelUpdated.Method': webhooks_on_channel_updated_method,
'Webhooks.OnChannelUpdated.Format': webhooks_on_channel_updated_format,
'Webhooks.OnMemberAdded.Url': webhooks_on_member_added_url,
'Webhooks.OnMemberAdded.Method': webhooks_on_member_added_method,
'Webhooks.OnMemberAdded.Format': webhooks_on_member_added_format,
'Webhooks.OnMemberRemoved.Url': webhooks_on_member_removed_url,
'Webhooks.OnMemberRemoved.Method': webhooks_on_member_removed_method,
'Webhooks.OnMemberRemoved.Format': webhooks_on_member_removed_format,
'Limits.ChannelMembers': limits_channel_members,
'Limits.UserChannels': limits_user_channels,
})
payload = self._version.update(
'POST',
self._uri,
data=data,
)
return ServiceInstance(
self._version,
payload,
sid=self._solution['sid'],
)
@property
def channels(self):
"""
Access the channels
:returns: twilio.rest.ip_messaging.v1.service.channel.ChannelList
:rtype: twilio.rest.ip_messaging.v1.service.channel.ChannelList
"""
if self._channels is None:
self._channels = ChannelList(
self._version,
service_sid=self._solution['sid'],
)
return self._channels
@property
def roles(self):
"""
Access the roles
:returns: twilio.rest.ip_messaging.v1.service.role.RoleList
:rtype: twilio.rest.ip_messaging.v1.service.role.RoleList
"""
if self._roles is None:
self._roles = RoleList(
self._version,
service_sid=self._solution['sid'],
)
return self._roles
@property
def users(self):
"""
Access the users
:returns: twilio.rest.ip_messaging.v1.service.user.UserList
:rtype: twilio.rest.ip_messaging.v1.service.user.UserList
"""
if self._users is None:
self._users = UserList(
self._version,
service_sid=self._solution['sid'],
)
return self._users
def __repr__(self):
"""
Provide a friendly representation
:returns: Machine friendly representation
:rtype: str
"""
context = ' '.join('{}={}'.format(k, v) for k, v in self._solution.items())
return '<Twilio.Chat.V1.ServiceContext {}>'.format(context)
class ServiceInstance(InstanceResource):
def __init__(self, version, payload, sid=None):
"""
Initialize the ServiceInstance
:returns: twilio.rest.ip_messaging.v1.service.ServiceInstance
:rtype: twilio.rest.ip_messaging.v1.service.ServiceInstance
"""
super(ServiceInstance, self).__init__(version)
# Marshaled Properties
self._properties = {
'sid': payload['sid'],
'account_sid': payload['account_sid'],
'friendly_name': payload['friendly_name'],
'date_created': deserialize.iso8601_datetime(payload['date_created']),
'date_updated': deserialize.iso8601_datetime(payload['date_updated']),
'default_service_role_sid': payload['default_service_role_sid'],
'default_channel_role_sid': payload['default_channel_role_sid'],
'default_channel_creator_role_sid': payload['default_channel_creator_role_sid'],
'read_status_enabled': payload['read_status_enabled'],
'reachability_enabled': payload['reachability_enabled'],
'typing_indicator_timeout': deserialize.integer(payload['typing_indicator_timeout']),
'consumption_report_interval': deserialize.integer(payload['consumption_report_interval']),
'limits': payload['limits'],
'webhooks': payload['webhooks'],
'pre_webhook_url': payload['pre_webhook_url'],
'post_webhook_url': payload['post_webhook_url'],
'webhook_method': payload['webhook_method'],
'webhook_filters': payload['webhook_filters'],
'notifications': payload['notifications'],
'url': payload['url'],
'links': payload['links'],
}
# Context
self._context = None
self._solution = {
'sid': sid or self._properties['sid'],
}
@property
def _proxy(self):
"""
Generate an instance context for the instance, the context is capable of
performing various actions. All instance actions are proxied to the context
:returns: ServiceContext for this ServiceInstance
:rtype: twilio.rest.ip_messaging.v1.service.ServiceContext
"""
if self._context is None:
self._context = ServiceContext(
self._version,
sid=self._solution['sid'],
)
return self._context
@property
def sid(self):
"""
:returns: The sid
:rtype: unicode
"""
return self._properties['sid']
@property
def account_sid(self):
"""
:returns: The account_sid
:rtype: unicode
"""
return self._properties['account_sid']
@property
def friendly_name(self):
"""
:returns: The friendly_name
:rtype: unicode
"""
return self._properties['friendly_name']
@property
def date_created(self):
"""
:returns: The date_created
:rtype: datetime
"""
return self._properties['date_created']
@property
def date_updated(self):
"""
:returns: The date_updated
:rtype: datetime
"""
return self._properties['date_updated']
@property
def default_service_role_sid(self):
"""
:returns: The default_service_role_sid
:rtype: unicode
"""
return self._properties['default_service_role_sid']
@property
def default_channel_role_sid(self):
"""
:returns: The default_channel_role_sid
:rtype: unicode
"""
return self._properties['default_channel_role_sid']
@property
def default_channel_creator_role_sid(self):
"""
:returns: The default_channel_creator_role_sid
:rtype: unicode
"""
return self._properties['default_channel_creator_role_sid']
@property
def read_status_enabled(self):
"""
:returns: The read_status_enabled
:rtype: bool
"""
return self._properties['read_status_enabled']
@property
def reachability_enabled(self):
"""
:returns: The reachability_enabled
:rtype: bool
"""
return self._properties['reachability_enabled']
@property
def typing_indicator_timeout(self):
"""
:returns: The typing_indicator_timeout
:rtype: unicode
"""
return self._properties['typing_indicator_timeout']
@property
def consumption_report_interval(self):
"""
:returns: The consumption_report_interval
:rtype: unicode
"""
return self._properties['consumption_report_interval']
@property
def limits(self):
"""
:returns: The limits
:rtype: dict
"""
return self._properties['limits']
@property
def webhooks(self):
"""
:returns: The webhooks
:rtype: dict
"""
return self._properties['webhooks']
@property
def pre_webhook_url(self):
"""
:returns: The pre_webhook_url
:rtype: unicode
"""
return self._properties['pre_webhook_url']
@property
def post_webhook_url(self):
"""
:returns: The post_webhook_url
:rtype: unicode
"""
return self._properties['post_webhook_url']
@property
def webhook_method(self):
"""
:returns: The webhook_method
:rtype: unicode
"""
return self._properties['webhook_method']
@property
def webhook_filters(self):
"""
:returns: The webhook_filters
:rtype: unicode
"""
return self._properties['webhook_filters']
@property
def notifications(self):
"""
:returns: The notifications
:rtype: dict
"""
return self._properties['notifications']
@property
def url(self):
"""
:returns: The url
:rtype: unicode
"""
return self._properties['url']
@property
def links(self):
"""
:returns: The links
:rtype: unicode
"""
return self._properties['links']
def fetch(self):
"""
Fetch a ServiceInstance
:returns: Fetched ServiceInstance
:rtype: twilio.rest.ip_messaging.v1.service.ServiceInstance
"""
return self._proxy.fetch()
def delete(self):
"""
Deletes the ServiceInstance
:returns: True if delete succeeds, False otherwise
:rtype: bool
"""
return self._proxy.delete()
def update(self, friendly_name=values.unset,
default_service_role_sid=values.unset,
default_channel_role_sid=values.unset,
default_channel_creator_role_sid=values.unset,
read_status_enabled=values.unset, reachability_enabled=values.unset,
typing_indicator_timeout=values.unset,
consumption_report_interval=values.unset,
notifications_new_message_enabled=values.unset,
notifications_new_message_template=values.unset,
notifications_added_to_channel_enabled=values.unset,
notifications_added_to_channel_template=values.unset,
notifications_removed_from_channel_enabled=values.unset,
notifications_removed_from_channel_template=values.unset,
notifications_invited_to_channel_enabled=values.unset,
notifications_invited_to_channel_template=values.unset,
pre_webhook_url=values.unset, post_webhook_url=values.unset,
webhook_method=values.unset, webhook_filters=values.unset,
webhooks_on_message_send_url=values.unset,
webhooks_on_message_send_method=values.unset,
webhooks_on_message_send_format=values.unset,
webhooks_on_message_update_url=values.unset,
webhooks_on_message_update_method=values.unset,
webhooks_on_message_update_format=values.unset,
webhooks_on_message_remove_url=values.unset,
webhooks_on_message_remove_method=values.unset,
webhooks_on_message_remove_format=values.unset,
webhooks_on_channel_add_url=values.unset,
webhooks_on_channel_add_method=values.unset,
webhooks_on_channel_add_format=values.unset,
webhooks_on_channel_destroy_url=values.unset,
webhooks_on_channel_destroy_method=values.unset,
webhooks_on_channel_destroy_format=values.unset,
webhooks_on_channel_update_url=values.unset,
webhooks_on_channel_update_method=values.unset,
webhooks_on_channel_update_format=values.unset,
webhooks_on_member_add_url=values.unset,
webhooks_on_member_add_method=values.unset,
webhooks_on_member_add_format=values.unset,
webhooks_on_member_remove_url=values.unset,
webhooks_on_member_remove_method=values.unset,
webhooks_on_member_remove_format=values.unset,
webhooks_on_message_sent_url=values.unset,
webhooks_on_message_sent_method=values.unset,
webhooks_on_message_sent_format=values.unset,
webhooks_on_message_updated_url=values.unset,
webhooks_on_message_updated_method=values.unset,
webhooks_on_message_updated_format=values.unset,
webhooks_on_message_removed_url=values.unset,
webhooks_on_message_removed_method=values.unset,
webhooks_on_message_removed_format=values.unset,
webhooks_on_channel_added_url=values.unset,
webhooks_on_channel_added_method=values.unset,
webhooks_on_channel_added_format=values.unset,
webhooks_on_channel_destroyed_url=values.unset,
webhooks_on_channel_destroyed_method=values.unset,
webhooks_on_channel_destroyed_format=values.unset,
webhooks_on_channel_updated_url=values.unset,
webhooks_on_channel_updated_method=values.unset,
webhooks_on_channel_updated_format=values.unset,
webhooks_on_member_added_url=values.unset,
webhooks_on_member_added_method=values.unset,
webhooks_on_member_added_format=values.unset,
webhooks_on_member_removed_url=values.unset,
webhooks_on_member_removed_method=values.unset,
webhooks_on_member_removed_format=values.unset,
limits_channel_members=values.unset,
limits_user_channels=values.unset):
"""
Update the ServiceInstance
:param unicode friendly_name: The friendly_name
:param unicode default_service_role_sid: The default_service_role_sid
:param unicode default_channel_role_sid: The default_channel_role_sid
:param unicode default_channel_creator_role_sid: The default_channel_creator_role_sid
:param bool read_status_enabled: The read_status_enabled
:param bool reachability_enabled: The reachability_enabled
:param unicode typing_indicator_timeout: The typing_indicator_timeout
:param unicode consumption_report_interval: The consumption_report_interval
:param bool notifications_new_message_enabled: The notifications.new_message.enabled
:param unicode notifications_new_message_template: The notifications.new_message.template
:param bool notifications_added_to_channel_enabled: The notifications.added_to_channel.enabled
:param unicode notifications_added_to_channel_template: The notifications.added_to_channel.template
:param bool notifications_removed_from_channel_enabled: The notifications.removed_from_channel.enabled
:param unicode notifications_removed_from_channel_template: The notifications.removed_from_channel.template
:param bool notifications_invited_to_channel_enabled: The notifications.invited_to_channel.enabled
:param unicode notifications_invited_to_channel_template: The notifications.invited_to_channel.template
:param unicode pre_webhook_url: The pre_webhook_url
:param unicode post_webhook_url: The post_webhook_url
:param unicode webhook_method: The webhook_method
:param unicode webhook_filters: The webhook_filters
:param unicode webhooks_on_message_send_url: The webhooks.on_message_send.url
:param unicode webhooks_on_message_send_method: The webhooks.on_message_send.method
:param unicode webhooks_on_message_send_format: The webhooks.on_message_send.format
:param unicode webhooks_on_message_update_url: The webhooks.on_message_update.url
:param unicode webhooks_on_message_update_method: The webhooks.on_message_update.method
:param unicode webhooks_on_message_update_format: The webhooks.on_message_update.format
:param unicode webhooks_on_message_remove_url: The webhooks.on_message_remove.url
:param unicode webhooks_on_message_remove_method: The webhooks.on_message_remove.method
:param unicode webhooks_on_message_remove_format: The webhooks.on_message_remove.format
:param unicode webhooks_on_channel_add_url: The webhooks.on_channel_add.url
:param unicode webhooks_on_channel_add_method: The webhooks.on_channel_add.method
:param unicode webhooks_on_channel_add_format: The webhooks.on_channel_add.format
:param unicode webhooks_on_channel_destroy_url: The webhooks.on_channel_destroy.url
:param unicode webhooks_on_channel_destroy_method: The webhooks.on_channel_destroy.method
:param unicode webhooks_on_channel_destroy_format: The webhooks.on_channel_destroy.format
:param unicode webhooks_on_channel_update_url: The webhooks.on_channel_update.url
:param unicode webhooks_on_channel_update_method: The webhooks.on_channel_update.method
:param unicode webhooks_on_channel_update_format: The webhooks.on_channel_update.format
:param unicode webhooks_on_member_add_url: The webhooks.on_member_add.url
:param unicode webhooks_on_member_add_method: The webhooks.on_member_add.method
:param unicode webhooks_on_member_add_format: The webhooks.on_member_add.format
:param unicode webhooks_on_member_remove_url: The webhooks.on_member_remove.url
:param unicode webhooks_on_member_remove_method: The webhooks.on_member_remove.method
:param unicode webhooks_on_member_remove_format: The webhooks.on_member_remove.format
:param unicode webhooks_on_message_sent_url: The webhooks.on_message_sent.url
:param unicode webhooks_on_message_sent_method: The webhooks.on_message_sent.method
:param unicode webhooks_on_message_sent_format: The webhooks.on_message_sent.format
:param unicode webhooks_on_message_updated_url: The webhooks.on_message_updated.url
:param unicode webhooks_on_message_updated_method: The webhooks.on_message_updated.method
:param unicode webhooks_on_message_updated_format: The webhooks.on_message_updated.format
:param unicode webhooks_on_message_removed_url: The webhooks.on_message_removed.url
:param unicode webhooks_on_message_removed_method: The webhooks.on_message_removed.method
:param unicode webhooks_on_message_removed_format: The webhooks.on_message_removed.format
:param unicode webhooks_on_channel_added_url: The webhooks.on_channel_added.url
:param unicode webhooks_on_channel_added_method: The webhooks.on_channel_added.method
:param unicode webhooks_on_channel_added_format: The webhooks.on_channel_added.format
:param unicode webhooks_on_channel_destroyed_url: The webhooks.on_channel_destroyed.url
:param unicode webhooks_on_channel_destroyed_method: The webhooks.on_channel_destroyed.method
:param unicode webhooks_on_channel_destroyed_format: The webhooks.on_channel_destroyed.format
:param unicode webhooks_on_channel_updated_url: The webhooks.on_channel_updated.url
:param unicode webhooks_on_channel_updated_method: The webhooks.on_channel_updated.method
:param unicode webhooks_on_channel_updated_format: The webhooks.on_channel_updated.format
:param unicode webhooks_on_member_added_url: The webhooks.on_member_added.url
:param unicode webhooks_on_member_added_method: The webhooks.on_member_added.method
:param unicode webhooks_on_member_added_format: The webhooks.on_member_added.format
:param unicode webhooks_on_member_removed_url: The webhooks.on_member_removed.url
:param unicode webhooks_on_member_removed_method: The webhooks.on_member_removed.method
:param unicode webhooks_on_member_removed_format: The webhooks.on_member_removed.format
:param unicode limits_channel_members: The limits.channel_members
:param unicode limits_user_channels: The limits.user_channels
:returns: Updated ServiceInstance
:rtype: twilio.rest.ip_messaging.v1.service.ServiceInstance
"""
return self._proxy.update(
friendly_name=friendly_name,
default_service_role_sid=default_service_role_sid,
default_channel_role_sid=default_channel_role_sid,
default_channel_creator_role_sid=default_channel_creator_role_sid,
read_status_enabled=read_status_enabled,
reachability_enabled=reachability_enabled,
typing_indicator_timeout=typing_indicator_timeout,
consumption_report_interval=consumption_report_interval,
notifications_new_message_enabled=notifications_new_message_enabled,
notifications_new_message_template=notifications_new_message_template,
notifications_added_to_channel_enabled=notifications_added_to_channel_enabled,
notifications_added_to_channel_template=notifications_added_to_channel_template,
notifications_removed_from_channel_enabled=notifications_removed_from_channel_enabled,
notifications_removed_from_channel_template=notifications_removed_from_channel_template,
notifications_invited_to_channel_enabled=notifications_invited_to_channel_enabled,
notifications_invited_to_channel_template=notifications_invited_to_channel_template,
pre_webhook_url=pre_webhook_url,
post_webhook_url=post_webhook_url,
webhook_method=webhook_method,
webhook_filters=webhook_filters,
webhooks_on_message_send_url=webhooks_on_message_send_url,
webhooks_on_message_send_method=webhooks_on_message_send_method,
webhooks_on_message_send_format=webhooks_on_message_send_format,
webhooks_on_message_update_url=webhooks_on_message_update_url,
webhooks_on_message_update_method=webhooks_on_message_update_method,
webhooks_on_message_update_format=webhooks_on_message_update_format,
webhooks_on_message_remove_url=webhooks_on_message_remove_url,
webhooks_on_message_remove_method=webhooks_on_message_remove_method,
webhooks_on_message_remove_format=webhooks_on_message_remove_format,
webhooks_on_channel_add_url=webhooks_on_channel_add_url,
webhooks_on_channel_add_method=webhooks_on_channel_add_method,
webhooks_on_channel_add_format=webhooks_on_channel_add_format,
webhooks_on_channel_destroy_url=webhooks_on_channel_destroy_url,
webhooks_on_channel_destroy_method=webhooks_on_channel_destroy_method,
webhooks_on_channel_destroy_format=webhooks_on_channel_destroy_format,
webhooks_on_channel_update_url=webhooks_on_channel_update_url,
webhooks_on_channel_update_method=webhooks_on_channel_update_method,
webhooks_on_channel_update_format=webhooks_on_channel_update_format,
webhooks_on_member_add_url=webhooks_on_member_add_url,
webhooks_on_member_add_method=webhooks_on_member_add_method,
webhooks_on_member_add_format=webhooks_on_member_add_format,
webhooks_on_member_remove_url=webhooks_on_member_remove_url,
webhooks_on_member_remove_method=webhooks_on_member_remove_method,
webhooks_on_member_remove_format=webhooks_on_member_remove_format,
webhooks_on_message_sent_url=webhooks_on_message_sent_url,
webhooks_on_message_sent_method=webhooks_on_message_sent_method,
webhooks_on_message_sent_format=webhooks_on_message_sent_format,
webhooks_on_message_updated_url=webhooks_on_message_updated_url,
webhooks_on_message_updated_method=webhooks_on_message_updated_method,
webhooks_on_message_updated_format=webhooks_on_message_updated_format,
webhooks_on_message_removed_url=webhooks_on_message_removed_url,
webhooks_on_message_removed_method=webhooks_on_message_removed_method,
webhooks_on_message_removed_format=webhooks_on_message_removed_format,
webhooks_on_channel_added_url=webhooks_on_channel_added_url,
webhooks_on_channel_added_method=webhooks_on_channel_added_method,
webhooks_on_channel_added_format=webhooks_on_channel_added_format,
webhooks_on_channel_destroyed_url=webhooks_on_channel_destroyed_url,
webhooks_on_channel_destroyed_method=webhooks_on_channel_destroyed_method,
webhooks_on_channel_destroyed_format=webhooks_on_channel_destroyed_format,
webhooks_on_channel_updated_url=webhooks_on_channel_updated_url,
webhooks_on_channel_updated_method=webhooks_on_channel_updated_method,
webhooks_on_channel_updated_format=webhooks_on_channel_updated_format,
webhooks_on_member_added_url=webhooks_on_member_added_url,
webhooks_on_member_added_method=webhooks_on_member_added_method,
webhooks_on_member_added_format=webhooks_on_member_added_format,
webhooks_on_member_removed_url=webhooks_on_member_removed_url,
webhooks_on_member_removed_method=webhooks_on_member_removed_method,
webhooks_on_member_removed_format=webhooks_on_member_removed_format,
limits_channel_members=limits_channel_members,
limits_user_channels=limits_user_channels,
)
@property
def channels(self):
"""
Access the channels
:returns: twilio.rest.ip_messaging.v1.service.channel.ChannelList
:rtype: twilio.rest.ip_messaging.v1.service.channel.ChannelList
"""
return self._proxy.channels
@property
def roles(self):
"""
Access the roles
:returns: twilio.rest.ip_messaging.v1.service.role.RoleList
:rtype: twilio.rest.ip_messaging.v1.service.role.RoleList
"""
return self._proxy.roles
@property
def users(self):
"""
Access the users
:returns: twilio.rest.ip_messaging.v1.service.user.UserList
:rtype: twilio.rest.ip_messaging.v1.service.user.UserList
"""
return self._proxy.users
def __repr__(self):
"""
Provide a friendly representation
:returns: Machine friendly representation
:rtype: str
"""
context = ' '.join('{}={}'.format(k, v) for k, v in self._solution.items())
return '<Twilio.Chat.V1.ServiceInstance {}>'.format(context)
|
mit
| -8,875,363,262,685,539,000
| 46.29943
| 115
| 0.655721
| false
| 4.004748
| false
| false
| false
|
berndca/xmodels
|
xmodels/constraints.py
|
1
|
12866
|
from __future__ import unicode_literals
from collections import namedtuple
import logging
from six import string_types
from .fields import RegexField, ValidationException, NCName, Name
logger = logging.getLogger(__name__)
KeyRef = namedtuple('KeyRef', 'key_name key_value ref_path')
class KeyStore(object):
"""
Base class for all key and unique stores. It contains two dictionaries:
* index: {key_name: list_of_target_paths}
* keys: {'%s:%s % (key_name, target_path): {key_value: key_path}}
"""
def __init__(self):
self.index = {}
self.keys = {}
def add_key(self, key_names, target_path):
if isinstance(key_names, list):
key_names_list = key_names
else:
key_names_list = [key_names]
for key_name in key_names_list:
key = '%s:%s' % (key_name, target_path)
if key in self.keys:
raise ValidationException('Key %s does already exist.' % key,
target_path)
if key_name not in self.index:
self.index[key_name] = [target_path]
else:
self.index[key_name].append(target_path)
self.keys[key] = {}
def in_keys(self, key_name, target_path):
return '%s:%s' % (key_name, target_path) in self.keys
def add_value(self, key_names, target_path, key_value, key_path):
if isinstance(key_names, string_types):
key_names_list = [key_names]
else:
key_names_list = key_names
for key_name in key_names_list:
key = '%s:%s' % (key_name, target_path)
if self.in_keys(key_name, target_path):
if key_value in self.keys[key]:
msg = 'Duplicate key value %s for %s at %s' % (key_value,
key_name,
key_path)
raise ValidationException(msg, key_value)
self.keys[key][key_value] = key_path
return True
msg = 'Could not find target path %s for key name(s) %s' % \
(target_path, ', '.join(key_names_list))
raise ValidationException(msg, key_value)
def match_ref(self, key_name, ref_key_value):
if key_name not in self.index:
raise ValidationException('No key for %s exists' % key_name,
key_name)
for key_path in self.index[key_name]:
key = '%s:%s' % (key_name, key_path)
for key_value, instance_path in self.keys[key].items():
if key_value == ref_key_value:
return instance_path
raise ValidationException('Could not match ref %s for %s' % (
ref_key_value, key_name), ref_key_value)
def key_value_count(self, key_name, target_path):
key = '%s:%s' % (key_name, target_path)
if key in self.keys:
return len(self.keys[key])
return 0
class IDStore(KeyStore):
"""
ID's are a special case of key since all of them share the same path '/'
and of course they all share the same name 'ID'.
"""
key_name = 'ID'
path = '/'
def __init__(self):
super(IDStore, self).__init__()
super(IDStore, self).add_key(self.key_name, self.path)
def add_id(self, key_value, key_path):
super(IDStore, self).add_value(self.key_name, self.path,
key_value, key_path)
def match_id(self, ref_key_value):
return super(IDStore, self).match_ref(self.key_name, ref_key_value)
def id_count(self):
return super(IDStore, self).key_value_count(self.key_name, self.path)
class RefStore(object):
"""
Store for keyref identity constraints.
* refs: list of namedtuple KeyRef(key_name, key_value, ref_path)
* targets: dict {ref_path: target_path}
"""
def __init__(self):
self.refs = []
self.targets = {}
def add_key_ref(self, key_name, key_value, ref_path):
if not key_value:
raise ValidationException('key value is required', key_value)
self.refs.append(KeyRef(key_name, key_value, ref_path))
def set_target(self, ref_path, target_path):
if ref_path in self.targets:
raise ValidationException('Target for ref_path already exists.',
ref_path)
self.targets[ref_path] = target_path
class IDREFStore(RefStore):
"""
Store for IDREF. All IDREF refer to the same key: 'ID'.
"""
def add_idref(self, key_value, ref_path):
super(IDREFStore, self).add_key_ref('ID', key_value, ref_path)
class Stores(object):
"""
Combination of all identity constraint related stores in a single object.
"""
def __init__(self):
self.keyStore = KeyStore()
self.uniquesStore = KeyStore()
self.idStore = IDStore()
self.refStore = RefStore()
self.idrefStore = IDREFStore()
def get_value_path_stores(**kwargs):
messages = dict(
path='No path supplied.',
store='Parameter store of type Stores expected.',
)
stores = kwargs.get('stores')
path = kwargs.get('path')
if stores is not None:
if not isinstance(stores, Stores):
raise TypeError(messages['store'])
return path, stores
class InitStores(object):
"""
Initializes stores.keyStore uf key_names or stores.uniquesStore
if unique_names by adding keys/path.
"""
key_names = None
unique_names = None
messages = dict(
name='key names (string or list of strings) is required and can not '
'be empty.',
store='Parameter store of type Stores expected.',
)
def add_keys(self, path='', stores=None):
if self.key_names:
stores.keyStore.add_key(self.key_names, path)
if self.unique_names:
stores.uniquesStore.add_key(self.unique_names, path)
def check_key_name(self, key_name):
if not key_name or not isinstance(key_name, string_types):
raise ValueError(self.messages['name'])
class InitKeyStore(InitStores):
"""
Creates an empty dict under
stores.keyStore[keyName:keyTargetInstancePath]
"""
messages = dict(
name='keyName (string) is required and can not be empty.',
store='Parameter store of type Stores expected.',
)
def __init__(self, key_name):
self.check_key_name(key_name)
self.key_names = [key_name]
class InitUniqueStore(InitStores):
"""
Creates an empty dict under
stores.uniquesStore[keyName:keyTargetInstancePath]
"""
def __init__(self, key_name):
self.check_key_name(key_name)
self.unique_names = [key_name]
class SetupKeyRefsStore(object):
"""
"""
string_validator_instance = None
refer_key_name = None
messages = dict(
names='%(keyNames (type list of strings or string) is is required.',
emptyValue='Value may not be empty.',
)
def __init__(self, refer_key_name, **kwargs):
self.string_validator_instance = kwargs.get(
'string_validator_instance', self.string_validator_instance)
self.refer_key_name = refer_key_name
def validate(self, key_value, **kwargs):
path, stores = get_value_path_stores(**kwargs)
if self.string_validator_instance:
string_value = self.string_validator_instance.validate(key_value)
else:
string_value = key_value
if stores:
stores.refStore.add_key_ref(self.refer_key_name,
string_value, path)
return string_value
class CheckKeys(object):
"""
Determines the targetPath by removing <level>s from path.
Looks up store[keyName:keyTargetInstancePath] for all
keyNames and checks the dict if keyValue (element.value) is already
present (duplicate error). If not it adds the element.value as key
and element.path as value.
"""
not_empty = True
string_validator_instance = None
key_names = None
refer_key_name = None
level = None
messages = dict(
names='%(keyNames (type list of strings or string) is is required.',
stores='%(stores (type dict) is is required.',
missing='%(param)s is required for CheckKeys.',
type='%(param)s should be of type %(type)s.',
duplicate='%(value)s is a duplicate entry for key %(key)s.',
noMatch='Could not find match for path %(path)s.',
stateMissing='Parameter state is required.',
emptyValue='Value may not be empty.',
)
def __init__(self, **kwargs):
self.key_names = kwargs.get('key_names', self.key_names)
self.level = kwargs.get('level', self.level)
assert self.key_names, self.messages['names']
if isinstance(self.key_names, list):
assert self.key_names
for name in self.key_names:
assert isinstance(name, string_types)
else:
assert isinstance(self.key_names, string_types)
self.key_names = [self.key_names]
assert isinstance(self.level, int)
def validate(self, key_value, **kwargs):
path, stores = get_value_path_stores(**kwargs)
if not key_value:
if not self.not_empty:
return key_value
# self.not_empty
raise ValidationException(self.messages['emptyValue'], key_value)
if self.string_validator_instance:
string_value = self.string_validator_instance.validate(key_value)
else:
string_value = key_value
if stores is None:
return string_value
target_path = '.'.join(path.split('.')[:-self.level])
if self.refer_key_name:
stores.refStore.add_key_ref(self.refer_key_name, key_value, path)
self.add_value(stores, target_path, string_value, path)
return string_value
def add_value(self, stores, target_path, value, path):
if self.key_names:
stores.keyStore.add_value(self.key_names, target_path, value, path)
class CheckUniques(CheckKeys):
not_empty = False
key_names = None
def add_value(self, stores, target_path, value, path):
if self.key_names:
stores.uniquesStore.add_value(self.key_names, target_path,
value, path)
class KeyName(CheckKeys):
"""
"""
not_empty = True
store_name = 'keyStore'
string_validator_instance = Name()
class UniqueName(CheckUniques):
"""
A UniqueName is of type Name and may be empty.
"""
not_empty = False
string_validator_instance = Name()
class ID(NCName):
"""
The type ID is used for an attribute that uniquely identifies an element
in an XML document. An ID value must conform to the rules for an NCName.
This means that it must start with a letter or underscore, and can only
contain letters, digits, underscores, hyphens, and periods. ID values
must be unique within an XML instance, regardless of the attribute's name
or its element name.
"""
not_empty = True
def validate(self, key_value, **kwargs):
path, stores = get_value_path_stores(**kwargs)
string_value = super(ID, self).validate(key_value, **kwargs)
if stores:
stores.idStore.add_id(string_value, path)
return key_value
class IDREF(NCName):
"""
The type ID is used for an attribute that uniquely identifies an element
in an XML document. An ID value must conform to the rules for an NCName.
This means that it must start with a letter or underscore, and can only
contain letters, digits, underscores, hyphens, and periods. ID values
must be unique within an XML instance, regardless of the attribute's name
or its element name.
"""
default_build_value = 'testId0'
not_empty = True
def validate(self, key_value, **kwargs):
path, stores = get_value_path_stores(**kwargs)
string_value = super(IDREF, self).validate(key_value, **kwargs)
if stores:
stores.idrefStore.add_idref(string_value, path)
return key_value
def match_refs(stores):
def match_store_refs(key_store, ref_store):
for ref in ref_store.refs:
instance_path = key_store.match_ref(ref.key_name, ref.key_value)
ref_store.set_target(ref.ref_path, instance_path)
logger.debug('Successfully matched "%s/%s", got: %r'
% (ref.key_name, ref.key_value, instance_path))
match_store_refs(stores.keyStore, stores.refStore)
match_store_refs(stores.idStore, stores.idrefStore)
|
bsd-3-clause
| 3,555,487,826,444,224,000
| 33.218085
| 79
| 0.596844
| false
| 3.840597
| false
| false
| false
|
agripo/website
|
core/models/shop.py
|
1
|
12773
|
from django.conf import settings
from django.core.exceptions import ValidationError
from django.core.urlresolvers import reverse
from django.db import models, IntegrityError
from django.db.models import Q, Sum
from django.utils import timezone
from django.db.models.signals import pre_save, post_save
from django.contrib.auth.models import User
from core.exceptions import CantSetCartQuantityOnUnsavedProduct, AddedMoreToCartThanAvailable
from core.models.users import AgripoUser
class ProductCategory(models.Model):
on_change_delete_cache = True
name = models.CharField(verbose_name='Nom de la catégorie', max_length=60, blank=False, null=False, unique=True)
def clean(self):
if self.name == '':
raise ValidationError('Empty category name')
def __str__(self):
return "{} : {}".format(self.id, self.name)
class Meta:
verbose_name = "Catégorie de produits"
verbose_name_plural = "Catégories de produits"
class Product(models.Model):
on_change_delete_cache = True
name = models.CharField(
max_length=60, blank=False, null=False, unique=True, verbose_name="Nom",
help_text="Nom affiché dans les fiches produits")
scientific_name = models.CharField(
default="", max_length=60, blank=True, null=False, verbose_name="Nom scientifique",
help_text="Nom affiché entre parenthèses dans les fiches produits")
category = models.ForeignKey(
ProductCategory, blank=False, null=False, verbose_name="Catégorie",
help_text="Catégorie sous laquelle apparaît ce produit.")
price = models.PositiveIntegerField(verbose_name="Prix unitaire", default=0, blank=False, null=False)
QUANTITY_TYPE_KILO = "k"
QUANTITY_TYPE_UNIT = "U"
QUANTITY_TYPE_LITER = "L"
PROGRAMMED_STATUS = (
(QUANTITY_TYPE_KILO, 'le kg'),
(QUANTITY_TYPE_LITER, 'le litre'),
(QUANTITY_TYPE_UNIT, 'l\'unité'),
)
quantity_type = models.CharField(
verbose_name="Unité", max_length=1, choices=PROGRAMMED_STATUS, default=QUANTITY_TYPE_KILO)
image = models.ImageField(
upload_to='products', blank=True, null=True, default="default/not_found.jpg", verbose_name="Image",
help_text="Cette image représente le produit.<br />"
"Elle doit faire 150x150px. "
"Si la largeur est différente de la hauteur, l'image apparaitra déformée."
)
description = models.TextField(verbose_name="Description du produit", default="", blank=True, null=False)
farmers = models.ManyToManyField(AgripoUser, verbose_name='Agriculteurs', through="Stock")
stock = models.PositiveIntegerField(
verbose_name='Stock',
default=0,
help_text="Champ alimenté automatiquement en fonction des déclarations des agriculteurs.")
bought = models.PositiveIntegerField(
verbose_name='Acheté',
default=0,
help_text="Champ alimenté automatiquement en fonction des commandes passées")
def __str__(self):
return "{} : {}".format(self.id, self.name)
def clean(self):
if self.name == '':
raise ValidationError('Empty product name')
if self.price <= 0:
raise ValidationError('Price should be bigger than zero')
def image_tag(self):
return u'<img src="{}" style="width:150px;height:140px;"/>'.format(settings.MEDIA_URL + str(self.image))
image_tag.short_description = 'Miniature'
image_tag.allow_tags = True
def update_stock(self):
# Stock = Sum(farmers_stocks) - Sum(active_commands)
farmers_stock = Stock.objects.filter(product_id=self.id).aggregate(Sum('stock'))
stock = farmers_stock['stock__sum']
self.stock = stock
self.save()
def set_cart_quantity(self, user, quantity):
if not self.id:
raise CantSetCartQuantityOnUnsavedProduct
if quantity > self.available_stock():
raise AddedMoreToCartThanAvailable
if quantity == 0:
CartProduct.objects.filter(user=user, product_id=self.pk).delete()
else:
CartProduct.objects.update_or_create(user=user, product=self, defaults={'quantity': quantity})
return self
def get_cart_quantity(self, request):
cart_product = CartProduct.objects.filter(user=request.user, product=self)
if cart_product:
return cart_product[0].quantity
return 0
def buy(self, quantity):
if self.available_stock() < quantity:
raise AddedMoreToCartThanAvailable()
self.bought += quantity
self.save()
return self
def available_stock(self):
return self.stock - self.bought
def is_available(self):
return self.available_stock() > 0
is_available.__name__ = "Disponible"
is_available.boolean = True
@staticmethod
def static_get_cart_products(user):
cart_products = CartProduct.objects.filter(user=user)
ret = []
for cart_product in cart_products:
ret.append(dict(
id=cart_product.product_id, quantity=cart_product.quantity))
return ret
@staticmethod
def static_clear_cart(user):
CartProduct.objects.filter(user=user).delete()
class Meta:
verbose_name = "Produit"
verbose_name_plural = "Produits"
class CartProduct(models.Model):
user = models.ForeignKey(User)
product = models.ForeignKey(Product)
quantity = models.IntegerField()
class Meta:
unique_together = ("user", "product")
class Stock(models.Model):
on_change_delete_cache = True
product = models.ForeignKey(Product, verbose_name='Produit', related_name="one_farmers_stock")
farmer = models.ForeignKey(AgripoUser, verbose_name='Agriculteur', limit_choices_to=Q(groups__name='farmers'))
stock = models.PositiveIntegerField(default=0, verbose_name="Stock")
class Meta:
unique_together = ("product", "farmer", )
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
def save(self, **kwargs):
if not self.farmer.is_farmer():
raise IntegrityError("Only farmers have stocks")
ret = super().save(**kwargs)
self.product.update_stock()
return ret
def set(self, stock):
"""
Updating the stock for this product in this farmer's account and on the product's general data
:param stock: The new stock for this product and for this farmer
:return: the Stock object
"""
self.stock = stock
self.save()
self.product.update_stock()
return self
class DeliveryPoint(models.Model):
name = models.CharField(verbose_name='Nom', max_length=64, unique=True)
description = models.TextField(verbose_name='Description', max_length=512)
def __str__(self):
return self.name
class Meta:
verbose_name = "Lieu de livraison"
verbose_name_plural = "Lieux de livraison"
class DeliveryQueryset(models.query.QuerySet):
def available(self):
return self.filter(done=False, date__gte=timezone.now()).order_by("date")
def done(self):
return self.filter(Q(done=True) | Q(date__lt=timezone.now()))
class DeliveryManager(models.Manager):
def get_queryset(self):
return DeliveryQueryset(self.model, using=self._db)
def available(self):
return self.get_queryset().available()
def done(self):
return self.get_queryset().done()
class Delivery(models.Model):
on_change_delete_cache = True
date = models.DateTimeField(verbose_name='Date de la livraison', default=timezone.now)
delivery_point = models.ForeignKey(DeliveryPoint, verbose_name="Lieu de livraison")
done = models.BooleanField(default=False, verbose_name="Livraison effectuée")
objects = DeliveryManager()
def __str__(self):
return "{} à {}".format(self.date.strftime("Le %d/%m à %Hh%M"), self.delivery_point.name)
def details_link(self):
count = self.commands.count()
if not count:
return "", 0
return reverse("delivery_details", kwargs=dict(id=self.pk)), count
def details(self):
total = {}
total_price = 0
commands = self.commands.all()
for command in commands:
total_price += command.total
commandproducts = command.commandproduct_set.all()
for commandproduct in commandproducts:
if commandproduct.product.pk not in total:
total[commandproduct.product.pk] = dict(quantity=0, product=commandproduct, total=0)
total[commandproduct.product.pk]['quantity'] += commandproduct.quantity
return {
'total': total,
'total_price': total_price,
'commands': commands
}
def write_done(self, done=True):
self.done = done
self.save()
return self
class Meta:
verbose_name = "Livraison"
verbose_name_plural = "Livraisons"
previous_delivery_done = False
def delivery_pre_saved(sender, **kwargs):
global previous_delivery_done
instance = kwargs.get('instance')
if isinstance(instance, Delivery):
try:
previous_delivery_done = Delivery.objects.get(pk=instance.pk).done
except instance.DoesNotExist:
# Gives a false result, but should only be used during tests (the product was checked in memory
previous_delivery_done = instance.done
def delivery_saved(sender, **kwargs):
global previous_delivery_done
instance = kwargs.get('instance')
if isinstance(instance, Delivery):
if instance.done != previous_delivery_done:
# Listing the total quantities bought for all the commands in this delivery
bought_stocks = {}
for command in instance.commands.all():
for cp in command.commandproduct_set.all():
if cp.product.pk not in bought_stocks:
bought_stocks[cp.product.pk] = 0
bought_stocks[cp.product.pk] += cp.quantity
for product_id, stock in bought_stocks.items():
product = Product.objects.get(pk=product_id)
# We update the stocks for the commanded products
if instance.done:
product.bought -= stock
else:
product.bought += stock
product.update_stock()
pre_save.connect(delivery_pre_saved)
post_save.connect(delivery_saved)
class Command(models.Model):
"""
A command is the listing of the products for one customer in one delivery
"""
customer = models.ForeignKey(AgripoUser, verbose_name='Client', null=True)
delivery = models.ForeignKey(
Delivery, verbose_name="Lieu de livraison", related_name="commands",
help_text="Sélectionnez le lieu de livraison")
date = models.DateTimeField(verbose_name='Date', auto_now_add=True)
products = models.ManyToManyField(Product, verbose_name='Produits', through="CommandProduct")
sent = models.BooleanField(verbose_name='Envoyée ?', default=False)
message = models.TextField(
max_length=256, null=True, default="", verbose_name="Message",
help_text="Informations supplémentaires en rapport avec votre commande")
total = models.PositiveIntegerField(verbose_name='Total', default=0)
def __str__(self):
return "{} : {}".format(self.date.strftime("Le %d/%m à %Hh%M"), self.customer)
def validate(self):
# We get the products from the cart
products = Product.static_get_cart_products(self.customer)
for product in products:
the_product = Product.objects.get(id=product['id'])
cp = CommandProduct(command=self, product=the_product, quantity=product['quantity'])
cp.save()
the_product.buy(product['quantity'])
self.total += product['quantity'] * the_product.price
Product.static_clear_cart(self.customer)
self.save()
def is_sent(self):
return self.sent
def send(self):
self.sent = True
self.save()
return self
class CommandProduct(models.Model):
command = models.ForeignKey(Command)
product = models.ForeignKey(Product)
quantity = models.PositiveSmallIntegerField()
def __str__(self):
return "{} / {}".format(self.command, self.product)
def clean(self):
if self.quantity <= 0:
raise ValidationError('Quantity must be bigger than 0')
return super().clean()
class Meta:
unique_together = ('command', 'product', )
|
gpl-2.0
| -3,663,379,892,135,324,000
| 33.448649
| 116
| 0.642162
| false
| 3.853083
| false
| false
| false
|
zhuyue1314/simuvex
|
simuvex/s_slicer.py
|
1
|
3706
|
import pyvex
from .s_errors import SimSlicerError
class SimSlicer(object):
"""
A super lightweight single-IRSB slicing class.
"""
def __init__(self, statements, target_tmps=None, target_regs=None, inslice_callback=None, inslice_callback_infodict=None):
self._statements = statements
self._target_tmps = target_tmps if target_tmps else set()
self._target_regs = target_regs if target_regs else set()
self._inslice_callback = inslice_callback
# It could be accessed publicly
self.inslice_callback_infodict = inslice_callback_infodict
self.stmts = [ ]
self.stmt_indices = [ ]
self.final_regs = set()
if not self._target_tmps and not self._target_regs:
raise SimSlicerError('Target temps and/or registers must be specified.')
self._slice()
def _slice(self):
"""
Slice it!
"""
regs = set(self._target_regs)
tmps = set(self._target_tmps)
for stmt_idx, stmt in reversed(list(enumerate(self._statements))):
if self._backward_handler_stmt(stmt, tmps, regs):
self.stmts.insert(0, stmt)
self.stmt_indices.insert(0, stmt_idx)
if self._inslice_callback:
self._inslice_callback(stmt_idx, stmt, self.inslice_callback_infodict)
if not regs and not tmps:
break
self.final_regs = regs
#
# Backward slice IRStmt handlers
#
def _backward_handler_stmt(self, stmt, temps, regs):
funcname = "_backward_handler_stmt_%s" % type(stmt).__name__
in_slice = False
if hasattr(self, funcname):
in_slice = getattr(self, funcname)(stmt, temps, regs)
return in_slice
def _backward_handler_stmt_WrTmp(self, stmt, temps, regs):
tmp = stmt.tmp
if tmp not in temps:
return False
temps.remove(tmp)
self._backward_handler_expr(stmt.data, temps, regs)
return True
def _backward_handler_stmt_Put(self, stmt, temps, regs):
reg = stmt.offset
if reg in regs:
regs.remove(reg)
self._backward_handler_expr(stmt.data, temps, regs)
return True
else:
return False
#
# Backward slice IRExpr handlers
#
def _backward_handler_expr(self, expr, temps, regs):
funcname = "_backward_handler_expr_%s" % type(expr).__name__
in_slice = False
if hasattr(self, funcname):
in_slice = getattr(self, funcname)(expr, temps, regs)
return in_slice
def _backward_handler_expr_RdTmp(self, expr, temps, regs):
tmp = expr.tmp
temps.add(tmp)
def _backward_handler_expr_Get(self, expr, temps, regs):
reg = expr.offset
regs.add(reg)
def _backward_handler_expr_Load(self, expr, temps, regs):
addr = expr.addr
if type(addr) is pyvex.IRExpr.RdTmp:
# FIXME: Process other types
self._backward_handler_expr(addr, temps, regs)
def _backward_handler_expr_Unop(self, expr, temps, regs):
arg = expr.args[0]
if type(arg) is pyvex.IRExpr.RdTmp:
self._backward_handler_expr(arg, temps, regs)
def _backward_handler_expr_CCall(self, expr, temps, regs):
for arg in expr.args:
if type(arg) is pyvex.IRExpr.RdTmp:
self._backward_handler_expr(arg, temps, regs)
def _backward_handler_expr_Binop(self, expr, temps, regs):
for arg in expr.args:
if type(arg) is pyvex.IRExpr.RdTmp:
self._backward_handler_expr(arg, temps, regs)
|
bsd-2-clause
| 7,626,896,577,661,538,000
| 26.864662
| 126
| 0.588505
| false
| 3.566891
| false
| false
| false
|
ConflictGK/Codecatch-RSSE
|
properties.py
|
1
|
1114
|
import os
class Properties:
def __init__(self, query, example_query_index = -1, thepath = None):
self.query = query
main_dir = os.getcwd()
self.SCRAPY_EXEC = "C:/WinPython36/python-3.6.3.amd64/Scripts/scrapy.exe"
self.PARENT_DIR = main_dir + os.path.sep
if example_query_index >= 0:
self.DATA_DIR = self.PARENT_DIR + "experiments" + os.path.sep + "query" + str(example_query_index) + os.path.sep
else:
if thepath and thepath != "None":
self.DATA_DIR = self.PARENT_DIR + "data" + os.path.sep + thepath + os.path.sep
else:
self.DATA_DIR = self.PARENT_DIR + "data" + os.path.sep
self.SRESULTS_A = self.DATA_DIR[:-1].split(os.path.sep)[-2] + os.path.sep + self.DATA_DIR[:-1].split(os.path.sep)[-1] + os.path.sep + 'resultsA.json'
self.RESULTS_A = self.DATA_DIR + 'resultsA.json'
self.RESULTS_B = self.DATA_DIR + 'resultsB.json'
self.RESULTS_C = self.DATA_DIR + 'resultsC.json'
self.RESULTS_D = self.DATA_DIR + 'resultsD.json'
self.RESULTS_S = self.DATA_DIR + 'resultsS.json'
self.QUERY_DATA_FILE = self.RESULTS_A
self.RESULTS_FILE = self.RESULTS_D
|
mit
| 5,942,278,755,511,353,000
| 46.434783
| 151
| 0.654399
| false
| 2.63357
| false
| false
| false
|
npinto/pytest
|
_pytest/skipping.py
|
1
|
9336
|
""" support for skip/xfail functions and markers. """
import py, pytest
import sys
def pytest_addoption(parser):
group = parser.getgroup("general")
group.addoption('--runxfail',
action="store_true", dest="runxfail", default=False,
help="run tests even if they are marked xfail")
def pytest_configure(config):
config.addinivalue_line("markers",
"skipif(*conditions): skip the given test function if evaluation "
"of all conditions has a True value. Evaluation happens within the "
"module global context. Example: skipif('sys.platform == \"win32\"') "
"skips the test if we are on the win32 platform. "
)
config.addinivalue_line("markers",
"xfail(*conditions, reason=None, run=True): mark the the test function "
"as an expected failure. Optionally specify a reason and run=False "
"if you don't even want to execute the test function. Any positional "
"condition strings will be evaluated (like with skipif) and if one is "
"False the marker will not be applied."
)
def pytest_namespace():
return dict(xfail=xfail)
class XFailed(pytest.fail.Exception):
""" raised from an explicit call to py.test.xfail() """
def xfail(reason=""):
""" xfail an executing test or setup functions with the given reason."""
__tracebackhide__ = True
raise XFailed(reason)
xfail.Exception = XFailed
class MarkEvaluator:
def __init__(self, item, name):
self.item = item
self.name = name
@property
def holder(self):
return self.item.keywords.get(self.name, None)
def __bool__(self):
return bool(self.holder)
__nonzero__ = __bool__
def wasvalid(self):
return not hasattr(self, 'exc')
def istrue(self):
try:
return self._istrue()
except KeyboardInterrupt:
raise
except:
self.exc = sys.exc_info()
if isinstance(self.exc[1], SyntaxError):
msg = [" " * (self.exc[1].offset + 4) + "^",]
msg.append("SyntaxError: invalid syntax")
else:
msg = py.std.traceback.format_exception_only(*self.exc[:2])
pytest.fail("Error evaluating %r expression\n"
" %s\n"
"%s"
%(self.name, self.expr, "\n".join(msg)),
pytrace=False)
def _getglobals(self):
d = {'os': py.std.os, 'sys': py.std.sys, 'config': self.item.config}
func = self.item.obj
try:
d.update(func.__globals__)
except AttributeError:
d.update(func.func_globals)
return d
def _istrue(self):
if self.holder:
d = self._getglobals()
if self.holder.args:
self.result = False
for expr in self.holder.args:
self.expr = expr
if isinstance(expr, str):
result = cached_eval(self.item.config, expr, d)
else:
pytest.fail("expression is not a string")
if result:
self.result = True
self.expr = expr
break
else:
self.result = True
return getattr(self, 'result', False)
def get(self, attr, default=None):
return self.holder.kwargs.get(attr, default)
def getexplanation(self):
expl = self.get('reason', None)
if not expl:
if not hasattr(self, 'expr'):
return ""
else:
return "condition: " + str(self.expr)
return expl
def pytest_runtest_setup(item):
if not isinstance(item, pytest.Function):
return
evalskip = MarkEvaluator(item, 'skipif')
if evalskip.istrue():
py.test.skip(evalskip.getexplanation())
item._evalxfail = MarkEvaluator(item, 'xfail')
check_xfail_no_run(item)
def pytest_pyfunc_call(pyfuncitem):
check_xfail_no_run(pyfuncitem)
def check_xfail_no_run(item):
if not item.config.option.runxfail:
evalxfail = item._evalxfail
if evalxfail.istrue():
if not evalxfail.get('run', True):
py.test.xfail("[NOTRUN] " + evalxfail.getexplanation())
def pytest_runtest_makereport(__multicall__, item, call):
if not isinstance(item, pytest.Function):
return
# unitttest special case, see setting of _unexpectedsuccess
if hasattr(item, '_unexpectedsuccess'):
rep = __multicall__.execute()
if rep.when == "call":
# we need to translate into how py.test encodes xpass
rep.keywords['xfail'] = "reason: " + repr(item._unexpectedsuccess)
rep.outcome = "failed"
return rep
if not (call.excinfo and
call.excinfo.errisinstance(py.test.xfail.Exception)):
evalxfail = getattr(item, '_evalxfail', None)
if not evalxfail:
return
if call.excinfo and call.excinfo.errisinstance(py.test.xfail.Exception):
if not item.config.getvalue("runxfail"):
rep = __multicall__.execute()
rep.keywords['xfail'] = "reason: " + call.excinfo.value.msg
rep.outcome = "skipped"
return rep
rep = __multicall__.execute()
evalxfail = item._evalxfail
if not item.config.option.runxfail:
if evalxfail.wasvalid() and evalxfail.istrue():
if call.excinfo:
rep.outcome = "skipped"
rep.keywords['xfail'] = evalxfail.getexplanation()
elif call.when == "call":
rep.outcome = "failed"
rep.keywords['xfail'] = evalxfail.getexplanation()
return rep
if 'xfail' in rep.keywords:
del rep.keywords['xfail']
return rep
# called by terminalreporter progress reporting
def pytest_report_teststatus(report):
if 'xfail' in report.keywords:
if report.skipped:
return "xfailed", "x", "xfail"
elif report.failed:
return "xpassed", "X", "XPASS"
# called by the terminalreporter instance/plugin
def pytest_terminal_summary(terminalreporter):
tr = terminalreporter
if not tr.reportchars:
#for name in "xfailed skipped failed xpassed":
# if not tr.stats.get(name, 0):
# tr.write_line("HINT: use '-r' option to see extra "
# "summary info about tests")
# break
return
lines = []
for char in tr.reportchars:
if char == "x":
show_xfailed(terminalreporter, lines)
elif char == "X":
show_xpassed(terminalreporter, lines)
elif char in "fF":
show_simple(terminalreporter, lines, 'failed', "FAIL %s")
elif char in "sS":
show_skipped(terminalreporter, lines)
elif char == "E":
show_simple(terminalreporter, lines, 'error', "ERROR %s")
if lines:
tr._tw.sep("=", "short test summary info")
for line in lines:
tr._tw.line(line)
def show_simple(terminalreporter, lines, stat, format):
tw = terminalreporter._tw
failed = terminalreporter.stats.get(stat)
if failed:
for rep in failed:
pos = rep.nodeid
lines.append(format %(pos, ))
def show_xfailed(terminalreporter, lines):
xfailed = terminalreporter.stats.get("xfailed")
if xfailed:
for rep in xfailed:
pos = rep.nodeid
reason = rep.keywords['xfail']
lines.append("XFAIL %s" % (pos,))
if reason:
lines.append(" " + str(reason))
def show_xpassed(terminalreporter, lines):
xpassed = terminalreporter.stats.get("xpassed")
if xpassed:
for rep in xpassed:
pos = rep.nodeid
reason = rep.keywords['xfail']
lines.append("XPASS %s %s" %(pos, reason))
def cached_eval(config, expr, d):
if not hasattr(config, '_evalcache'):
config._evalcache = {}
try:
return config._evalcache[expr]
except KeyError:
#import sys
#print >>sys.stderr, ("cache-miss: %r" % expr)
exprcode = py.code.compile(expr, mode="eval")
config._evalcache[expr] = x = eval(exprcode, d)
return x
def folded_skips(skipped):
d = {}
for event in skipped:
key = event.longrepr
assert len(key) == 3, (event, key)
d.setdefault(key, []).append(event)
l = []
for key, events in d.items():
l.append((len(events),) + key)
return l
def show_skipped(terminalreporter, lines):
tr = terminalreporter
skipped = tr.stats.get('skipped', [])
if skipped:
#if not tr.hasopt('skipped'):
# tr.write_line(
# "%d skipped tests, specify -rs for more info" %
# len(skipped))
# return
fskips = folded_skips(skipped)
if fskips:
#tr.write_sep("_", "skipped test summary")
for num, fspath, lineno, reason in fskips:
if reason.startswith("Skipped: "):
reason = reason[9:]
lines.append("SKIP [%d] %s:%d: %s" %
(num, fspath, lineno, reason))
|
mit
| -6,791,166,302,753,156,000
| 33.450185
| 80
| 0.564053
| false
| 3.991449
| true
| false
| false
|
clembou/PCWG
|
pcwg/gui/grid_box.py
|
1
|
7539
|
import Tkinter as tk
import tkFont as tkFont
import ttk as ttk
from ..exceptions.handling import ExceptionHandler
class GridBox(object):
def __init__(self, master, headers, row, column):
self.master = master
self.headers = headers
self.items_dict = {}
self.tree = None
self.container = ttk.Frame(self.master)
self.container.grid(row=row, column=column, sticky=tk.W+tk.E+tk.N+tk.S)
self._set_up_tree_widget()
self._build_tree()
# create a popup menu
self.pop_menu = tk.Menu(self.tree, tearoff=0)
self.pop_menu.add_command(label="New", command=self.new)
self.pop_menu.add_command(label="Remove", command=self.remove)
self.pop_menu.add_command(label="Remove All", command=self.remove_all)
self.pop_menu.add_command(label="Edit", command=self.edit)
self.pop_menu_add = tk.Menu(self.tree, tearoff=0)
self.pop_menu_add.add_command(label="New", command=self.new)
self.pop_menu_add.add_command(label="Remove All", command=self.remove_all)
self.tree.bind("<Button-2>", self.pop_up)
self.tree.bind("<Button-3>", self.pop_up)
self.tip = None
def clearTip(self):
self.setTip("")
def setTipNotRequired(self):
self.setTip("Not Required")
def setTip(self, text):
if self.tip != None:
self.tip['text'] = text
def item_count(self):
return len(self.items_dict)
def pop_up(self, event):
item = self.tree.identify_row(event.y)
if item:
# mouse pointer over item
self.tree.selection_set(item)
self.tree.update()
self.pop_menu.post(event.x_root, event.y_root)
else:
self.pop_menu_add.post(event.x_root, event.y_root)
def get_selected_key(self):
selection = self.tree.selection()
if len(selection) > 0:
return selection[0]
else:
return None
def get_selected(self):
key = self.get_selected_key()
if key != None:
return self.items_dict[key]
else:
return None
def new(self):
pass
def get_item_values(self, item):
return {}
def edit_item(self, item):
pass
def remove_all(self):
keys = self.items_dict.keys()
for key in keys:
self.remove_item(key)
def remove_item(self, key):
del self.items_dict[key]
self.tree.delete(key)
def remove(self):
selection = self.get_selected_key()
if selection != None:
self.remove_item(selection)
def edit(self):
try:
item = self.get_selected()
if item != None:
self.edit_item(item)
except ExceptionHandler.ExceptionType as e:
ExceptionHandler.add(e, "Cannot edit item")
def add_item(self, item):
values = self.get_tree_values(item)
key = self.tree.insert('', 'end', values = values)
self.items_dict[key] = item
self.adjust_width(values)
def redraw_item(self, key):
item = self.items_dict[key]
values = self.get_tree_values(item)
self.tree.item(key, text='', values=values)
self.adjust_width(values)
def adjust_width(self, values):
# adjust column's width if necessary to fit each value
for ix, val in enumerate(values):
col_w = tkFont.Font().measure(val)
if self.tree.column(self.headers[ix],width=None)<col_w:
self.tree.column(self.headers[ix], width=col_w)
def get_tree_values(self, item):
values = []
values_dict = self.get_item_values(item)
for header in self.headers:
values.append(values_dict[header])
return values
def add_items(self, items):
for item in items:
self.add_item(item)
def get_items(self):
return self.items_dict.values()
def double_click(self, event):
key = self.tree.identify('item', event.x, event.y)
if key in self.items_dict:
item = self.items_dict[key]
self.edit_item(item)
def _set_up_tree_widget(self):
tree_container = ttk.Frame(self.container)
tree_container.grid(row=0, column=0, sticky=tk.W+tk.E+tk.N+tk.S)
#tree_container.pack(fill='both', expand=True)
# create a treeview with dual scrollbars
self.tree = ttk.Treeview(tree_container, columns=self.headers, show="headings")
vsb = ttk.Scrollbar(tree_container, orient="vertical", command=self.tree.yview)
hsb = ttk.Scrollbar(tree_container, orient="horizontal", command=self.tree.xview)
self.tree.configure(yscrollcommand=vsb.set, xscrollcommand=hsb.set)
self.tree.grid(column=0, row=0, sticky='nsew')
vsb.grid(column=1, row=0, sticky='ns')
hsb.grid(column=0, row=1, sticky='ew')
tree_container.grid_columnconfigure(0, weight=1)
tree_container.grid_rowconfigure(0, weight=1)
self.tree.bind("<Double-1>", self.double_click)
def get_header_width(self, header):
return tkFont.Font().measure(header.title()) * self.get_header_scale()
def get_header_scale(self):
return 1
def _build_tree(self):
for col in self.headers:
self.tree.heading(col, text=col.title(),
command=lambda c=col: self.sortby(self.tree, c, 0))
# adjust the column's width to the header string
self.tree.column(col, width=self.get_header_width(col))
def sortby(self, tree, col, descending):
"""sort tree contents when a column header is clicked on"""
# grab values to sort
data = [(tree.set(child, col), child) \
for child in tree.get_children('')]
# if the data to be sorted is numeric change to float
#data = change_numeric(data)
# now sort the data in place
data.sort(reverse=descending)
for ix, item in enumerate(data):
tree.move(item[1], '', ix)
# switch the heading so it will sort in the opposite direction
tree.heading(col, command=lambda col=col: self.sortby(tree, col, \
int(not descending)))
class DialogGridBox(GridBox):
def __init__(self, master, parent_dialog, row, column):
self.parent_dialog = parent_dialog
headers = self.get_headers()
GridBox.__init__(self, master, headers, row, column)
def get_headers(self):
pass
def get_item_values(self, item):
pass
def new_dialog(self, master, parent_dialog, item):
pass
def new(self):
dialog = self.new_dialog(self.master, self.parent_dialog, None)
self.add_item(dialog.item)
def edit_item(self, item):
try:
key = self.get_selected_key()
item = self.items_dict[key]
self.new_dialog(self.master, self.parent_dialog, item)
self.redraw_item(key)
except ExceptionHandler.ExceptionType as e:
ExceptionHandler.add(e, "ERROR editing item")
def remove(self):
GridBox.remove(self)
|
mit
| 2,690,398,291,229,796,400
| 27.026022
| 89
| 0.569041
| false
| 3.752613
| false
| false
| false
|
digitalocean/netbox
|
netbox/extras/admin.py
|
1
|
6231
|
from django import forms
from django.contrib import admin
from utilities.forms import LaxURLField
from .models import CustomField, CustomLink, ExportTemplate, JobResult, Webhook
def order_content_types(field):
"""
Order the list of available ContentTypes by application
"""
queryset = field.queryset.order_by('app_label', 'model')
field.choices = [(ct.pk, '{} > {}'.format(ct.app_label, ct.name)) for ct in queryset]
#
# Webhooks
#
class WebhookForm(forms.ModelForm):
payload_url = LaxURLField(
label='URL'
)
class Meta:
model = Webhook
exclude = ()
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
if 'content_types' in self.fields:
order_content_types(self.fields['content_types'])
@admin.register(Webhook)
class WebhookAdmin(admin.ModelAdmin):
list_display = [
'name', 'models', 'payload_url', 'http_content_type', 'enabled', 'type_create', 'type_update', 'type_delete',
'ssl_verification',
]
list_filter = [
'enabled', 'type_create', 'type_update', 'type_delete', 'content_types',
]
form = WebhookForm
fieldsets = (
(None, {
'fields': ('name', 'content_types', 'enabled')
}),
('Events', {
'fields': ('type_create', 'type_update', 'type_delete')
}),
('HTTP Request', {
'fields': (
'payload_url', 'http_method', 'http_content_type', 'additional_headers', 'body_template', 'secret',
),
'classes': ('monospace',)
}),
('SSL', {
'fields': ('ssl_verification', 'ca_file_path')
})
)
def models(self, obj):
return ', '.join([ct.name for ct in obj.content_types.all()])
#
# Custom fields
#
class CustomFieldForm(forms.ModelForm):
class Meta:
model = CustomField
exclude = []
widgets = {
'default': forms.TextInput(),
'validation_regex': forms.Textarea(
attrs={
'cols': 80,
'rows': 3,
}
)
}
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
order_content_types(self.fields['content_types'])
@admin.register(CustomField)
class CustomFieldAdmin(admin.ModelAdmin):
actions = None
form = CustomFieldForm
list_display = [
'name', 'models', 'type', 'required', 'filter_logic', 'default', 'weight', 'description',
]
list_filter = [
'type', 'required', 'content_types',
]
fieldsets = (
('Custom Field', {
'fields': ('type', 'name', 'weight', 'label', 'description', 'required', 'default', 'filter_logic')
}),
('Assignment', {
'description': 'A custom field must be assigned to one or more object types.',
'fields': ('content_types',)
}),
('Validation Rules', {
'fields': ('validation_minimum', 'validation_maximum', 'validation_regex'),
'classes': ('monospace',)
}),
('Choices', {
'description': 'A selection field must have two or more choices assigned to it.',
'fields': ('choices',)
})
)
def models(self, obj):
return ', '.join([ct.name for ct in obj.content_types.all()])
#
# Custom links
#
class CustomLinkForm(forms.ModelForm):
class Meta:
model = CustomLink
exclude = []
widgets = {
'text': forms.Textarea,
'url': forms.Textarea,
}
help_texts = {
'weight': 'A numeric weight to influence the ordering of this link among its peers. Lower weights appear '
'first in a list.',
'text': 'Jinja2 template code for the link text. Reference the object as <code>{{ obj }}</code>. Links '
'which render as empty text will not be displayed.',
'url': 'Jinja2 template code for the link URL. Reference the object as <code>{{ obj }}</code>.',
}
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
# Format ContentType choices
order_content_types(self.fields['content_type'])
self.fields['content_type'].choices.insert(0, ('', '---------'))
@admin.register(CustomLink)
class CustomLinkAdmin(admin.ModelAdmin):
fieldsets = (
('Custom Link', {
'fields': ('content_type', 'name', 'group_name', 'weight', 'button_class', 'new_window')
}),
('Templates', {
'fields': ('text', 'url'),
'classes': ('monospace',)
})
)
list_display = [
'name', 'content_type', 'group_name', 'weight',
]
list_filter = [
'content_type',
]
form = CustomLinkForm
#
# Export templates
#
class ExportTemplateForm(forms.ModelForm):
class Meta:
model = ExportTemplate
exclude = []
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
# Format ContentType choices
order_content_types(self.fields['content_type'])
self.fields['content_type'].choices.insert(0, ('', '---------'))
@admin.register(ExportTemplate)
class ExportTemplateAdmin(admin.ModelAdmin):
fieldsets = (
('Export Template', {
'fields': ('content_type', 'name', 'description', 'mime_type', 'file_extension')
}),
('Content', {
'fields': ('template_code',),
'classes': ('monospace',)
})
)
list_display = [
'name', 'content_type', 'description', 'mime_type', 'file_extension',
]
list_filter = [
'content_type',
]
form = ExportTemplateForm
#
# Reports
#
@admin.register(JobResult)
class JobResultAdmin(admin.ModelAdmin):
list_display = [
'obj_type', 'name', 'created', 'completed', 'user', 'status',
]
fields = [
'obj_type', 'name', 'created', 'completed', 'user', 'status', 'data', 'job_id'
]
list_filter = [
'status',
]
readonly_fields = fields
def has_add_permission(self, request):
return False
|
apache-2.0
| -5,184,452,310,745,300,000
| 26.091304
| 118
| 0.540363
| false
| 3.973852
| false
| false
| false
|
QISKit/qiskit-sdk-py
|
qiskit/extensions/standard/s.py
|
1
|
2513
|
# -*- coding: utf-8 -*-
# This code is part of Qiskit.
#
# (C) Copyright IBM 2017.
#
# This code is licensed under the Apache License, Version 2.0. You may
# obtain a copy of this license in the LICENSE.txt file in the root directory
# of this source tree or at http://www.apache.org/licenses/LICENSE-2.0.
#
# Any modifications or derivative works of this code must retain this
# copyright notice, and modified files need to carry a notice indicating
# that they have been altered from the originals.
# pylint: disable=invalid-name
"""
S=diag(1,i) Clifford phase gate or its inverse.
"""
import numpy
from qiskit.circuit import Gate
from qiskit.circuit import QuantumCircuit
from qiskit.circuit import QuantumRegister
from qiskit.qasm import pi
from qiskit.extensions.standard.u1 import U1Gate
class SGate(Gate):
"""S=diag(1,i) Clifford phase gate."""
def __init__(self, label=None):
"""Create new S gate."""
super().__init__("s", 1, [], label=label)
def _define(self):
"""
gate s a { u1(pi/2) a; }
"""
definition = []
q = QuantumRegister(1, "q")
rule = [
(U1Gate(pi/2), [q[0]], [])
]
for inst in rule:
definition.append(inst)
self.definition = definition
def inverse(self):
"""Invert this gate."""
return SdgGate()
def to_matrix(self):
"""Return a Numpy.array for the S gate."""
return numpy.array([[1, 0],
[0, 1j]], dtype=complex)
class SdgGate(Gate):
"""Sdg=diag(1,-i) Clifford adjoint phase gate."""
def __init__(self, label=None):
"""Create new Sdg gate."""
super().__init__("sdg", 1, [], label=label)
def _define(self):
"""
gate sdg a { u1(-pi/2) a; }
"""
definition = []
q = QuantumRegister(1, "q")
rule = [
(U1Gate(-pi/2), [q[0]], [])
]
for inst in rule:
definition.append(inst)
self.definition = definition
def inverse(self):
"""Invert this gate."""
return SGate()
def to_matrix(self):
"""Return a Numpy.array for the Sdg gate."""
return numpy.array([[1, 0],
[0, -1j]], dtype=complex)
def s(self, q):
"""Apply S to q."""
return self.append(SGate(), [q], [])
def sdg(self, q):
"""Apply Sdg to q."""
return self.append(SdgGate(), [q], [])
QuantumCircuit.s = s
QuantumCircuit.sdg = sdg
|
apache-2.0
| -7,865,258,483,430,124,000
| 24.383838
| 77
| 0.564266
| false
| 3.485437
| false
| false
| false
|
quantumlib/Cirq
|
cirq-core/cirq/sim/simulator_base.py
|
1
|
12429
|
# Copyright 2021 The Cirq Developers
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Batteries-included class for Cirq's built-in simulators."""
import abc
import collections
from typing import (
Any,
Dict,
Iterator,
List,
Tuple,
TYPE_CHECKING,
cast,
Generic,
Type,
Sequence,
Optional,
)
import numpy as np
from cirq import circuits, ops, protocols, study, value, devices
from cirq.sim import ActOnArgsContainer
from cirq.sim.operation_target import OperationTarget
from cirq.sim.simulator import (
TStepResult,
TSimulationTrialResult,
TSimulatorState,
TActOnArgs,
SimulatesIntermediateState,
SimulatesSamples,
check_all_resolved,
split_into_matching_protocol_then_general,
)
if TYPE_CHECKING:
import cirq
class SimulatorBase(
Generic[TStepResult, TSimulationTrialResult, TSimulatorState, TActOnArgs],
SimulatesIntermediateState[TStepResult, TSimulationTrialResult, TSimulatorState, TActOnArgs],
SimulatesSamples,
metaclass=abc.ABCMeta,
):
"""A base class for the built-in simulators.
Most implementors of this interface should implement the
`_create_partial_act_on_args` and `_create_step_result` methods. The first
one creates the simulator's quantum state representation at the beginning
of the simulation. The second creates the step result emitted after each
`Moment` in the simulation.
Iteration in the subclass is handled by the `_core_iterator` implementation
here, which handles moment stepping, application of operations, measurement
collection, and creation of noise. Simulators with more advanced needs can
override the implementation if necessary.
Sampling is handled by the implementation of `_run`. This implementation
iterates the circuit to create a final step result, and samples that
result when possible. If not possible, due to noise or classical
probabilities on a state vector, the implementation attempts to fully
iterate the unitary prefix once, then only repeat the non-unitary
suffix from copies of the state obtained by the prefix. If more advanced
functionality is required, then the `_run` method can be overridden.
Note that state here refers to simulator state, which is not necessarily
a state vector. The included simulators and corresponding states are state
vector, density matrix, Clifford, and MPS. Each of these use the default
`_core_iterator` and `_run` methods.
"""
def __init__(
self,
*,
dtype: Type[np.number] = np.complex64,
noise: 'cirq.NOISE_MODEL_LIKE' = None,
seed: 'cirq.RANDOM_STATE_OR_SEED_LIKE' = None,
ignore_measurement_results: bool = False,
split_untangled_states: bool = False,
):
"""Initializes the simulator.
Args:
dtype: The `numpy.dtype` used by the simulation.
noise: A noise model to apply while simulating.
seed: The random seed to use for this simulator.
ignore_measurement_results: If True, then the simulation
will treat measurement as dephasing instead of collapsing
process. This is only applicable to simulators that can
model dephasing.
split_untangled_states: If True, optimizes simulation by running
unentangled qubit sets independently and merging those states
at the end.
"""
self._dtype = dtype
self._prng = value.parse_random_state(seed)
self.noise = devices.NoiseModel.from_noise_model_like(noise)
self._ignore_measurement_results = ignore_measurement_results
self._split_untangled_states = split_untangled_states
@abc.abstractmethod
def _create_partial_act_on_args(
self,
initial_state: Any,
qubits: Sequence['cirq.Qid'],
logs: Dict[str, Any],
) -> TActOnArgs:
"""Creates an instance of the TActOnArgs class for the simulator.
It represents the supplied qubits initialized to the provided state.
Args:
initial_state: The initial state to represent. An integer state is
understood to be a pure state. Other state representations are
simulator-dependent.
qubits: The sequence of qubits to represent.
logs: The structure to hold measurement logs. A single instance
should be shared among all ActOnArgs within the simulation.
"""
@abc.abstractmethod
def _create_step_result(
self,
sim_state: TActOnArgs,
qubit_map: Dict['cirq.Qid', int],
) -> TStepResult:
"""This method should be implemented to create a step result.
Args:
sim_state: The TActOnArgs for this trial.
qubit_map: Determines the canonical ordering of the qubits. This
is often used in specifying the initial state, i.e. the
ordering of the computational basis states.
Returns:
The StepResult.
"""
def _can_be_in_run_prefix(self, val: Any):
"""Determines what should be put in the prefix in `_run`
The `_run` method has an optimization that reduces repetition by
splitting the circuit into a prefix that is pure with respect to the
state representation, and only executing that once per sample set. For
state vectors, any unitary operation is pure, and we make this the
default here. For density matrices, any non-measurement operation can
be represented wholely in the matrix, and thus this method is
overridden there to enable greater optimization there.
Custom simulators can override this method appropriately.
Args:
val: An operation or noise model to test for purity within the
state representation.
Returns:
A boolean representing whether the value can be added to the
`_run` prefix."""
return protocols.has_unitary(val)
def _core_iterator(
self,
circuit: circuits.Circuit,
sim_state: OperationTarget[TActOnArgs],
all_measurements_are_terminal: bool = False,
) -> Iterator[TStepResult]:
"""Standard iterator over StepResult from Moments of a Circuit.
Args:
circuit: The circuit to simulate.
sim_state: The initial args for the simulation. The form of
this state depends on the simulation implementation. See
documentation of the implementing class for details.
Yields:
StepResults from simulating a Moment of the Circuit.
"""
if len(circuit) == 0:
step_state = sim_state.create_merged_state()
yield self._create_step_result(step_state, step_state.qubit_map)
return
noisy_moments = self.noise.noisy_moments(circuit, sorted(circuit.all_qubits()))
measured: Dict[Tuple['cirq.Qid', ...], bool] = collections.defaultdict(bool)
for moment in noisy_moments:
for op in ops.flatten_to_ops(moment):
try:
# TODO: support more general measurements.
# Github issue: https://github.com/quantumlib/Cirq/issues/3566
# Preprocess measurements
if all_measurements_are_terminal and measured[op.qubits]:
continue
if isinstance(op.gate, ops.MeasurementGate):
measured[op.qubits] = True
if all_measurements_are_terminal:
continue
if self._ignore_measurement_results:
op = ops.phase_damp(1).on(*op.qubits)
# Simulate the operation
sim_state.apply_operation(op)
except TypeError:
raise TypeError(f"{self.__class__.__name__} doesn't support {op!r}")
step_state = sim_state.create_merged_state()
yield self._create_step_result(step_state, step_state.qubit_map)
step_state.log_of_measurement_results.clear()
def _run(
self, circuit: circuits.Circuit, param_resolver: study.ParamResolver, repetitions: int
) -> Dict[str, np.ndarray]:
"""See definition in `cirq.SimulatesSamples`."""
if self._ignore_measurement_results:
raise ValueError("run() is not supported when ignore_measurement_results = True")
param_resolver = param_resolver or study.ParamResolver({})
resolved_circuit = protocols.resolve_parameters(circuit, param_resolver)
check_all_resolved(resolved_circuit)
qubits = tuple(sorted(resolved_circuit.all_qubits()))
act_on_args = self._create_act_on_args(0, qubits)
prefix, general_suffix = (
split_into_matching_protocol_then_general(resolved_circuit, self._can_be_in_run_prefix)
if self._can_be_in_run_prefix(self.noise)
else (resolved_circuit[0:0], resolved_circuit)
)
step_result = None
for step_result in self._core_iterator(
circuit=prefix,
sim_state=act_on_args,
):
pass
general_ops = list(general_suffix.all_operations())
if all(isinstance(op.gate, ops.MeasurementGate) for op in general_ops):
for step_result in self._core_iterator(
circuit=general_suffix,
sim_state=act_on_args,
all_measurements_are_terminal=True,
):
pass
assert step_result is not None
measurement_ops = [cast(ops.GateOperation, op) for op in general_ops]
return step_result.sample_measurement_ops(measurement_ops, repetitions, seed=self._prng)
measurements: Dict[str, List[np.ndarray]] = {}
for i in range(repetitions):
all_step_results = self._core_iterator(
general_suffix,
sim_state=act_on_args.copy() if i < repetitions - 1 else act_on_args,
)
for step_result in all_step_results:
for k, v in step_result.measurements.items():
if k not in measurements:
measurements[k] = []
measurements[k].append(np.array(v, dtype=np.uint8))
return {k: np.array(v) for k, v in measurements.items()}
def _create_act_on_args(
self,
initial_state: Any,
qubits: Sequence['cirq.Qid'],
) -> OperationTarget[TActOnArgs]:
if isinstance(initial_state, OperationTarget):
return initial_state
log: Dict[str, Any] = {}
if self._split_untangled_states:
args_map: Dict[Optional['cirq.Qid'], TActOnArgs] = {}
if isinstance(initial_state, int):
for q in reversed(qubits):
args_map[q] = self._create_partial_act_on_args(
initial_state=initial_state % q.dimension,
qubits=[q],
logs=log,
)
initial_state = int(initial_state / q.dimension)
else:
args = self._create_partial_act_on_args(
initial_state=initial_state,
qubits=qubits,
logs=log,
)
for q in qubits:
args_map[q] = args
args_map[None] = self._create_partial_act_on_args(0, (), log)
return ActOnArgsContainer(args_map, qubits, self._split_untangled_states, log)
else:
return self._create_partial_act_on_args(
initial_state=initial_state,
qubits=qubits,
logs=log,
)
|
apache-2.0
| 7,946,348,196,334,659,000
| 39.223301
| 100
| 0.620323
| false
| 4.364115
| false
| false
| false
|
lelandbatey/defuse_division
|
defusedivision/game.py
|
1
|
9622
|
import logging
import random
import curses
import queue
from .minesweeper.minefield import MineField
from .minesweeper.contents import Contents
class Conveyor(object):
"""
Abstract class Conveyor describes the basic contract for communicating about games of Minesweeper.
"""
def get_state(self):
raise NotImplementedError
def send_input(self, inpt):
raise NotImplementedError
class Keys:
UP = 'UP'
DOWN = 'DOWN'
LEFT = 'LEFT'
RIGHT = 'RIGHT'
PROBE = 'PROBE'
FLAG = 'FLAG'
DIRECTIONKEYS = [Keys.UP, Keys.DOWN, Keys.LEFT, Keys.RIGHT]
def _move_select(direction, field):
"""
Function _move_select changes the 'selected' field of a MineField depending
on the direction provided. 'direction' must be a curses.KEY_* instance,
either UP, DOWN, LEFT, or RIGHT. If moving the selected cell would move to
an out of bounds position, we do nothing.
"""
startloc = field.selected
delta = [0, 0]
if direction == Keys.UP:
delta = [0, -1]
elif direction == Keys.DOWN:
delta = [0, 1]
elif direction == Keys.RIGHT:
delta = [1, 0]
elif direction == Keys.LEFT:
delta = [-1, 0]
# Filter out-of-bounds deltas
x, y = startloc
nx, ny = [x + delta[0], y + delta[1]]
if nx < 0 or nx >= field.width:
nx = x
if ny < 0 or ny >= field.height:
ny = y
field.selected = [nx, ny]
def create_foothold(field):
"""
Function create_foothold will remove mines from around the currently
selected cell, ensuring that the current cell cannot have a mine, and that
probing that cell will open up some amount of space.
"""
x, y = field.selected
cell = field.board[x][y]
moved_count = 0
safe_cells = [v for _, v in cell.neighbors.items() if v]
safe_cells += [cell]
for neighbor in safe_cells:
if neighbor.contents == Contents.mine:
neighbor.contents = Contents.empty
moved_count += 1
# Place a new mine for each of the mines we had to move out of the way
while moved_count > 0:
rx, ry = random.randint(0, field.width - 1), random.randint(
0, field.height - 1)
possible_mine = field.board[rx][ry]
# Ensure any new location won't be in the desired foothold
if not possible_mine in safe_cells:
# Only place mines where there aren't existing mines
if not possible_mine.contents == Contents.mine:
possible_mine.contents = Contents.mine
moved_count -= 1
def _first_probe(field):
"""
Function _first_probe checks if this is the first probe of any cell in this
minefield, returning True if it is the first probe, and False if it's not.
"""
cells = [c for row in field.board for c in row]
for cell in cells:
if cell.probed:
return False
return True
def _probe_selected(field):
"""
Function _probe_selected probes the currently selected cell. If the
cell if flagged, ignore probe and return True immediately. If the
probed cell contains a mine, return False, otherwise, returns True.
"""
x, y = field.selected
cell = field.board[x][y]
if cell.flagged:
return True
# Create a foothold for the first probe
if _first_probe(field):
create_foothold(field)
cell.probe()
if cell.contents == Contents.mine:
return False
return True
def _flag_selected(field):
x, y = field.selected
cell = field.board[x][y]
cell.flagged = not cell.flagged
def check_win(mfield):
flags = 0
correct_flags = 0
for h in range(mfield.height):
for w in range(mfield.width):
c = mfield.board[w][h]
if c.contents == Contents.mine and c.flagged:
correct_flags += 1
if c.flagged:
flags += 1
if correct_flags == mfield.mine_count and flags == correct_flags:
return True
return False
class Player(Conveyor):
"""
Class Player contains the minefield that a particular player is playing
against, as well as passthrough-methods to send input to a parent Bout.
"""
def __init__(self, name, bout, mine_count=None, height=None, width=None):
# self._args = args
self.name = name
self.bout = bout
self.stateq = queue.Queue()
self.mfield = MineField(
height=height, width=width, mine_count=mine_count)
self.living = True
self.victory = False
def send_input(self, inpt):
# Just pass the input to the parent bout, but with info saying that
# this input comes from this player
self.bout.send_input({'player': self.name, 'input': inpt})
def get_state(self):
return self.stateq.get()
def json(self):
return {
'name': self.name,
'living': self.living,
'minefield': self.mfield.json(),
'victory': self.victory,
}
class Bout(object):
"""
Class Bout holds information on the state of the game (won/lost) as well as
all the players playing currently.
`player_constructor` is a callable which accepts the same arguments as
class `Player`, and returns a `Player`-like object. Allows a Bout to use a
Player which gets it's input from anywhere.
"""
def __init__(self,
max_players=2,
minefield_size=(12, 12),
mine_count=None,
player_constructor=None):
self.max_players = max_players
self.minefield_size = minefield_size
self.mine_count = mine_count
self.players = dict()
self.ready = False
if player_constructor is None:
player_constructor = Player
self.player_constructor = player_constructor
def send_input(self, inpt_event):
'''
Method send_input is the final stop for an inpt_event, as those events
are used here by the Bout to modify the state of the game.
'''
player = self.players[inpt_event['player']]
field = player.mfield
inpt = inpt_event['input']
if isinstance(inpt, dict):
# Change the name of a player
if 'change-name' in inpt:
newname = inpt['change-name']
while newname in self.players:
newname = newname + str(random.randint(0, 100))
oldname = player.name
logging.info('Changing player name from: "{}" to "{}"'.format(
oldname, newname))
player.name = newname
self.players[newname] = player
del self.players[oldname]
if 'new-minefield' in inpt:
info = inpt['new-minefield']
height = info['height']
width = info['width']
mine_count = info['mine_count']
new_mfield = MineField(
height=height, width=width, mine_count=mine_count)
player.mfield = new_mfield
if inpt in DIRECTIONKEYS:
_move_select(inpt, field)
self._push_selected(player.name, field.selected)
return
if inpt == Keys.PROBE:
if not _probe_selected(field):
player.living = False
if inpt == Keys.FLAG:
_flag_selected(field)
if check_win(field):
player.victory = True
self._push_state()
def _push_state(self):
'''
Method _push_state put's the state of this bout into every Player's
stateq.
'''
for _, v in self.players.items():
v.stateq.put(('new-state', self.json()))
def _push_selected(self, playername, selected):
'''
Method _push_selected pushes a state to all Players updating one
players selected position.
'''
for _, v in self.players.items():
v.stateq.put(('update-selected', (playername, selected)))
def add_player(self):
'''
Method add_player creates a new player object for this Bout, and
returns a reference to that player. If there are already
self.max_players players set to play in this bout, then returns None.
'''
if self.max_players <= len(self.players):
return None
pname = "Player{}-{}".format(
len(self.players) + 1, random.randint(0, 10000))
width, height = self.minefield_size
player = self.player_constructor(
pname,
self,
mine_count=self.mine_count,
height=height,
width=width)
self.players[pname] = player
logging.info('Adding player: "{}" {}'.format(pname, player))
if len(self.players) >= self.max_players:
self.ready = True
self._push_state()
return player
def remove_player(self, playername):
'''
Method remove_player removes a player with the given name from this
Bout's collection of players. If no player exists with the given name,
does nothing.
'''
logging.info('Removing player: "{}"'.format(playername))
if playername in self.players:
del self.players[playername]
if len(self.players) < self.max_players:
self.ready = False
self._push_state()
def json(self):
jplayers = {k: v.json() for k, v in self.players.items()}
return {"players": jplayers, 'ready': self.ready}
|
gpl-3.0
| -569,778,188,004,578,700
| 30.34202
| 102
| 0.585014
| false
| 3.985915
| false
| false
| false
|
beni55/rinohtype
|
rinohlib/templates/article.py
|
1
|
1907
|
from rinoh.document import DocumentSection
from rinoh.paragraph import Paragraph
from rinoh.structure import GroupedFlowables
from .base import (ContentsPart, DocumentBase, DocumentOptions,
TableOfContentsSection)
class ArticleFrontMatter(GroupedFlowables):
def __init__(self):
self.toc_section = TableOfContentsSection()
super().__init__()
def prepare(self, document):
self.toc_section.prepare(document)
def flowables(self, document):
meta = document.metadata
yield Paragraph(meta['title'], style='title')
if 'subtitle' in meta:
yield Paragraph(meta['subtitle'], style='subtitle')
if 'date' in meta:
date = meta['date']
try:
yield Paragraph(date.strftime('%B %d, %Y'), style='author')
except AttributeError:
yield Paragraph(date, style='author')
if 'author' in meta:
yield Paragraph(meta['author'], style='author')
if document.options['table_of_contents']:
yield self.toc_section
# document parts
# ----------------------------------------------------------------------------
class ArticlePart(ContentsPart):
def __init__(self, document_section):
self.front_matter = ArticleFrontMatter()
super().__init__(document_section)
def prepare(self):
self.front_matter.prepare(self.document)
def flowables(self):
yield self.front_matter
for flowable in super().flowables():
yield flowable
class ArticleSection(DocumentSection):
parts = [ArticlePart]
# main document
# ----------------------------------------------------------------------------
class ArticleOptions(DocumentOptions):
options = {'table_of_contents': True}
class Article(DocumentBase):
sections = [ArticleSection]
options_class = ArticleOptions
|
agpl-3.0
| -5,709,940,673,110,627,000
| 28.338462
| 78
| 0.588359
| false
| 4.551313
| false
| false
| false
|
junion-org/junion
|
junion/twitter/util.py
|
1
|
6239
|
#!/usr/bin/env python
# coding: utf-8
"""
Twitter関連のユーティリティモジュール
"""
import re
import time
import calendar
import HTMLParser
# html special charsをアンエスケープするためのパーサ
parser = HTMLParser.HTMLParser()
def get_text_and_entities(tw):
"""
ツイートを渡すと,エンティティ除外文書とエンティティリストを返す
"""
if 'entities' in tw:
return _get_text_and_entities_ent(tw)
else:
return _get_text_and_entities_reg(tw)
def _get_text_and_entities_ent(tw):
"""
entities情報を用いて,エンティティ除外文書とエンティティリストを返す
URLが存在するにも関わらず,entities情報にはないということがあったので
正規表現もあわせて用いて抽出を行う
"""
raw_text = tw['text']
entities = tw['entities']
indices = []
urls = []
mentions = []
hashtags = []
# entitiesを取得
if 'urls' in entities:
for url in entities['urls']:
urls.append(url['url'])
indices.append(url['indices'])
if 'user_mentions' in entities:
for mention in entities['user_mentions']:
mentions.append(mention['screen_name'])
indices.append(mention['indices'])
if 'hashtags' in entities:
for hashtag in entities['hashtags']:
hashtags.append(hashtag['text'])
indices.append(hashtag['indices'])
# textからentitiesを除外
cur = 0
text = ''
for i, indice in enumerate(sorted(indices, key=lambda x:x[0])):
text += raw_text[cur:indice[0]]
cur = indice[1]
text += raw_text[cur:]
# 正規表現を用いてentitiesを抽出
text, ent_reg = _get_text_and_entities_reg({'text': text})
if 'urls' in ent_reg:
urls += ent_reg['urls']
if 'mentions' in ent_reg:
mentions += ent_reg['mentions']
if 'hashtags' in ent_reg:
hashtags += ent_reg['hashtags']
# entitiesの保存
entities = {}
if urls:
entities['urls'] = urls
if mentions:
entities['mentions'] = mentions
if hashtags:
entities['hashtags'] = hashtags
return text, entities
def _get_text_and_entities_reg(tw):
"""
正規表現を用いて,エンティティ除外文書とエンティティリストを返す
"""
text = tw['text'] if tw['text'] else ''
urls = get_urls(text)
mentions = get_mentions(text)
hashtags = get_hashtags(text)
entities = urls + mentions + hashtags
for entity in entities:
text = text.replace(entity, '')
entities = {}
if urls:
entities['urls'] = urls
if mentions:
entities['mentions'] = mentions
if hashtags:
entities['hashtags'] = hashtags
return unescape(text), entities
def get_urls(s):
"""
文字列からURLを抽出して返す
URLに使用可能な文字は以下を参照
http://tools.ietf.org/html/rfc2396
http://jbpe.tripod.com/rfcj/rfc2396.ej.sjis.txt(日本語訳)
正規表現の特殊文字は以下を参照
http://www.python.jp/doc/release/library/re.html#module-re
"""
r = re.compile(r"https?://[\w;/?:@&=+$,\-.!~*'()%]+")
return r.findall(s)
def get_mentions(s):
"""
文字列から@screen_nameを抽出して返す
@screen_nameに使用可能な文字は英数字とアンダースコアで15文字以下
adminやtwitterが含まれるものは不可
以下を参照
https://support.twitter.com/groups/31-twitter-basics/topics/104-welcome-to-twitter-support/articles/230266-#
"""
r = re.compile(r'@\w+')
return r.findall(s)
def get_hashtags(s):
"""
文字列から#hashtagを抽出して返す
ハッシュタグに句読 ( , . ; ' ? ! 等) が含まれると、その句読までの文字がハッシュタグとして扱われる
以下を参照
ハッシュ記号#の直前はスペースである必要がある
https://support.twitter.com/articles/450254-#
http://d.hatena.ne.jp/sutara_lumpur/20101012/1286860552
"""
r = re.compile(
u'(^|[^ヲ-゚ー゛゜々ヾヽぁ-ヶ一-龠a-zA-Z0-9\w&/]+)(' +
u'[##]' +
u'[ヲ-゚ー゛゜々ヾヽぁ-ヶ一-龠a-zA-Z0-9\w]*' +
u'[ヲ-゚ー゛゜々ヾヽぁ-ヶ一-龠a-zA-Z0-9a-zA-Z]+' +
u'[ヲ-゚ー゛゜々ヾヽぁ-ヶ一-龠a-zA-Z0-9\w]*)'
)
return [x[1] for x in r.findall(s)]
def twittertime2unixtime(twitter_time):
"""
Twitter時間からUNIX時間に変換
"""
unix_time = calendar.timegm(time.strptime(twitter_time, '%a %b %d %H:%M:%S +0000 %Y'))
return unix_time
def twittertime2localtime(twitter_time):
"""
Twitter時間からローカル時間(日本時間)に変換
"""
unix_time = calendar.timegm(time.strptime(twitter_time, '%a %b %d %H:%M:%S +0000 %Y'))
return time.localtime(unix_time)
def unixtime2localtime(unix_time):
"""
UNIX時間からローカル時間(日本時間)に変換
"""
return time.localtime(unix_time)
def unixtime2twittertime(unix_time):
"""
UNIX時間からTwitter時間に変換
"""
return time.strftime('%a %b %d %H:%M:%S +0000 %Y', time.gmtime(unix_time))
def localtime2unixtime(local_time):
"""
ローカル時間(日本時間)からUNIX時間に変換
"""
return time.mktime(local_time)
def localtime2twittertime(local_time):
"""
ローカル時間(日本時間)からTwitter時間に変換
"""
unix_time = time.mktime(local_time)
return time.strftime('%a %b %d %H:%M:%S +0000 %Y', time.gmtime(unix_time))
def unescape(text):
"""
html special charsをアンエスケープして元の文字列に戻す
"""
return parser.unescape(text)
def unescape_dquote(text):
"""
MeCab辞書の文字列がダブルクォーテーションで囲まれていた場合
通常の文字列に直して返す
"""
if text[0] == '"' and text[-1] == '"':
text = text[1:-1].replace('""', '"')
return text
|
mit
| 7,831,926,185,606,672,000
| 25.989305
| 116
| 0.602536
| false
| 2.131334
| false
| false
| false
|
deepmind/lamb
|
lamb/dyneval.py
|
1
|
4538
|
# Copyright 2018 DeepMind Technologies Limited. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""Dynamic evaluation."""
# pylint: disable=missing-docstring
# pylint: disable=g-complex-comprehension
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import tensorflow.compat.v1 as tf
class Dyneval(object):
def __init__(self, grads_and_vars, learning_rate, decay_rate, epsilon):
with tf.variable_scope('dyneval'):
# convert_to_tensor densifies IndexedSlices
self._grads = [tf.convert_to_tensor(grad) for grad, _ in grads_and_vars]
self._vars = [var for _, var in grads_and_vars]
self._learning_rate = learning_rate
self._decay_rate = decay_rate
def shadow_vars():
return [
tf.get_variable(
var.name.replace('/', '-').replace(':', '-'),
var.get_shape(), initializer=tf.zeros_initializer(),
trainable=False)
for var in self._vars]
with tf.variable_scope('save'):
self._saves = shadow_vars()
with tf.variable_scope('sum_squared_grads'):
self._sum_squared_grads = shadow_vars()
self._save = self._make_save()
self._restore = self._make_restore()
# These are for computing an RMSProplike estimate of the variance of
# minibatch gradients. Here, this quantity is estimated on the training
# set once, while gradient descent happens on validation/test.
self._num_squared_grads = tf.get_variable(
'num_squared_grads', [], initializer=tf.zeros_initializer(),
trainable=False)
self._zero_sum_squared_grads = self._make_zero_sum_squared_grads()
self._add_squared_grads = self._make_add_squared_grads()
self._epsilon = epsilon
self._update = self._make_update()
def _make_save(self):
assignments = []
for save, var in zip(self._saves, self._vars):
assignments.append(save.assign(var))
return tf.group(assignments)
def _make_restore(self):
assignments = []
for save, var in zip(self._saves, self._vars):
assignments.append(var.assign(save))
return tf.group(assignments)
def _make_update(self):
mss = []
gsum = 0.0
count = 0
for sum_squared_grads in self._sum_squared_grads:
ms = tf.sqrt(sum_squared_grads / self._num_squared_grads)
gsum += tf.reduce_sum(ms)
count += tf.reduce_sum(tf.ones_like(ms))
mss.append(ms)
gsum = gsum / count
assignments = []
for grad, var, save, sum_squared_grads, ms in zip(
self._grads, self._vars, self._saves, self._sum_squared_grads, mss):
decay_rate = tf.minimum(1.0, self._decay_rate*(ms/gsum))
delta = (-self._learning_rate*grad / (ms + self._epsilon) +
decay_rate*(save-var))
assignments.append(var.assign_add(delta))
return tf.group(assignments)
def _make_add_squared_grads(self):
assignments = []
for sum_squared_grads, grads in zip(self._sum_squared_grads, self._grads):
assignments.append(sum_squared_grads.assign_add(tf.square(grads)))
return tf.group(assignments + [self._num_squared_grads.assign_add(1)])
def _make_zero_sum_squared_grads(self):
assignments = []
for sum_squared_grads in self._sum_squared_grads:
assignments.append(sum_squared_grads.assign(
tf.zeros_like(sum_squared_grads)))
return tf.group(assignments + [self._num_squared_grads.assign(0)])
def save(self):
tf.get_default_session().run(self._save)
def restore(self):
tf.get_default_session().run(self._restore)
def update_op(self):
return self._update
def zero_sum_squared_grads(self):
tf.get_default_session().run(self._zero_sum_squared_grads)
def add_squared_grads_op(self):
return self._add_squared_grads
def __enter__(self):
self.save()
def __exit__(self, type_, value, traceback):
self.restore()
|
apache-2.0
| -5,901,291,351,476,678,000
| 34.732283
| 78
| 0.647422
| false
| 3.674494
| false
| false
| false
|
SUSE-Cloud/glance
|
glance/tests/integration/legacy_functional/test_v1_api.py
|
1
|
62578
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import datetime
import hashlib
import json
import os
import tempfile
import testtools
from glance.openstack.common import jsonutils
from glance.openstack.common import timeutils
from glance.tests.integration.legacy_functional import base
from glance.tests.utils import minimal_headers
FIVE_KB = 5 * 1024
FIVE_GB = 5 * 1024 * 1024 * 1024
class TestApi(base.ApiTest):
def test_get_head_simple_post(self):
# 0. GET /images
# Verify no public images
path = "/v1/images"
response, content = self.http.request(path, 'GET')
self.assertEqual(response.status, 200)
self.assertEqual(content, '{"images": []}')
# 1. GET /images/detail
# Verify no public images
path = "/v1/images/detail"
response, content = self.http.request(path, 'GET')
self.assertEqual(response.status, 200)
self.assertEqual(content, '{"images": []}')
# 2. POST /images with public image named Image1
# attribute and no custom properties. Verify a 200 OK is returned
image_data = "*" * FIVE_KB
headers = minimal_headers('Image1')
path = "/v1/images"
response, content = self.http.request(path, 'POST', headers=headers,
body=image_data)
self.assertEqual(response.status, 201)
data = json.loads(content)
image_id = data['image']['id']
self.assertEqual(data['image']['checksum'],
hashlib.md5(image_data).hexdigest())
self.assertEqual(data['image']['size'], FIVE_KB)
self.assertEqual(data['image']['name'], "Image1")
self.assertEqual(data['image']['is_public'], True)
# 3. HEAD image
# Verify image found now
path = "/v1/images/%s" % image_id
response, content = self.http.request(path, 'HEAD')
self.assertEqual(response.status, 200)
self.assertEqual(response['x-image-meta-name'], "Image1")
# 4. GET image
# Verify all information on image we just added is correct
path = "/v1/images/%s" % image_id
response, content = self.http.request(path, 'GET')
self.assertEqual(response.status, 200)
expected_image_headers = {
'x-image-meta-id': image_id,
'x-image-meta-name': 'Image1',
'x-image-meta-is_public': 'True',
'x-image-meta-status': 'active',
'x-image-meta-disk_format': 'raw',
'x-image-meta-container_format': 'ovf',
'x-image-meta-size': str(FIVE_KB)}
expected_std_headers = {
'content-length': str(FIVE_KB),
'content-type': 'application/octet-stream'}
for expected_key, expected_value in expected_image_headers.items():
self.assertEqual(response[expected_key], expected_value,
"For key '%s' expected header value '%s'. "
"Got '%s'" % (expected_key,
expected_value,
response[expected_key]))
for expected_key, expected_value in expected_std_headers.items():
self.assertEqual(response[expected_key], expected_value,
"For key '%s' expected header value '%s'. "
"Got '%s'" % (expected_key,
expected_value,
response[expected_key]))
self.assertEqual(content, "*" * FIVE_KB)
self.assertEqual(hashlib.md5(content).hexdigest(),
hashlib.md5("*" * FIVE_KB).hexdigest())
# 5. GET /images
# Verify no public images
path = "/v1/images"
response, content = self.http.request(path, 'GET')
self.assertEqual(response.status, 200)
expected_result = {"images": [
{"container_format": "ovf",
"disk_format": "raw",
"id": image_id,
"name": "Image1",
"checksum": "c2e5db72bd7fd153f53ede5da5a06de3",
"size": 5120}]}
self.assertEqual(json.loads(content), expected_result)
# 6. GET /images/detail
# Verify image and all its metadata
path = "/v1/images/detail"
response, content = self.http.request(path, 'GET')
self.assertEqual(response.status, 200)
expected_image = {
"status": "active",
"name": "Image1",
"deleted": False,
"container_format": "ovf",
"disk_format": "raw",
"id": image_id,
"is_public": True,
"deleted_at": None,
"properties": {},
"size": 5120}
image = json.loads(content)
for expected_key, expected_value in expected_image.items():
self.assertEqual(expected_value, image['images'][0][expected_key],
"For key '%s' expected header value '%s'. "
"Got '%s'" % (expected_key,
expected_value,
image['images'][0][expected_key]))
# 7. PUT image with custom properties of "distro" and "arch"
# Verify 200 returned
headers = {'X-Image-Meta-Property-Distro': 'Ubuntu',
'X-Image-Meta-Property-Arch': 'x86_64'}
path = "/v1/images/%s" % image_id
response, content = self.http.request(path, 'PUT', headers=headers)
self.assertEqual(response.status, 200)
data = json.loads(content)
self.assertEqual(data['image']['properties']['arch'], "x86_64")
self.assertEqual(data['image']['properties']['distro'], "Ubuntu")
# 8. GET /images/detail
# Verify image and all its metadata
path = "/v1/images/detail"
response, content = self.http.request(path, 'GET')
self.assertEqual(response.status, 200)
expected_image = {
"status": "active",
"name": "Image1",
"deleted": False,
"container_format": "ovf",
"disk_format": "raw",
"id": image_id,
"is_public": True,
"deleted_at": None,
"properties": {'distro': 'Ubuntu', 'arch': 'x86_64'},
"size": 5120}
image = json.loads(content)
for expected_key, expected_value in expected_image.items():
self.assertEqual(expected_value, image['images'][0][expected_key],
"For key '%s' expected header value '%s'. "
"Got '%s'" % (expected_key,
expected_value,
image['images'][0][expected_key]))
# 9. PUT image and remove a previously existing property.
headers = {'X-Image-Meta-Property-Arch': 'x86_64'}
path = "/v1/images/%s" % image_id
response, content = self.http.request(path, 'PUT', headers=headers)
self.assertEqual(response.status, 200)
path = "/v1/images/detail"
response, content = self.http.request(path, 'GET')
self.assertEqual(response.status, 200)
data = json.loads(content)['images'][0]
self.assertEqual(len(data['properties']), 1)
self.assertEqual(data['properties']['arch'], "x86_64")
# 10. PUT image and add a previously deleted property.
headers = {'X-Image-Meta-Property-Distro': 'Ubuntu',
'X-Image-Meta-Property-Arch': 'x86_64'}
path = "/v1/images/%s" % image_id
response, content = self.http.request(path, 'PUT', headers=headers)
self.assertEqual(response.status, 200)
data = json.loads(content)
path = "/v1/images/detail"
response, content = self.http.request(path, 'GET')
self.assertEqual(response.status, 200)
data = json.loads(content)['images'][0]
self.assertEqual(len(data['properties']), 2)
self.assertEqual(data['properties']['arch'], "x86_64")
self.assertEqual(data['properties']['distro'], "Ubuntu")
self.assertNotEqual(data['created_at'], data['updated_at'])
# DELETE image
path = "/v1/images/%s" % image_id
response, content = self.http.request(path, 'DELETE')
self.assertEqual(response.status, 200)
def test_queued_process_flow(self):
"""
We test the process flow where a user registers an image
with Glance but does not immediately upload an image file.
Later, the user uploads an image file using a PUT operation.
We track the changing of image status throughout this process.
0. GET /images
- Verify no public images
1. POST /images with public image named Image1 with no location
attribute and no image data.
- Verify 201 returned
2. GET /images
- Verify one public image
3. HEAD image
- Verify image now in queued status
4. PUT image with image data
- Verify 200 returned
5. HEAD images
- Verify image now in active status
6. GET /images
- Verify one public image
"""
# 0. GET /images
# Verify no public images
path = "/v1/images"
response, content = self.http.request(path, 'GET')
self.assertEqual(response.status, 200)
self.assertEqual(content, '{"images": []}')
# 1. POST /images with public image named Image1
# with no location or image data
headers = minimal_headers('Image1')
path = "/v1/images"
response, content = self.http.request(path, 'POST', headers=headers)
self.assertEqual(response.status, 201)
data = json.loads(content)
self.assertEqual(data['image']['checksum'], None)
self.assertEqual(data['image']['size'], 0)
self.assertEqual(data['image']['container_format'], 'ovf')
self.assertEqual(data['image']['disk_format'], 'raw')
self.assertEqual(data['image']['name'], "Image1")
self.assertEqual(data['image']['is_public'], True)
image_id = data['image']['id']
# 2. GET /images
# Verify 1 public image
path = "/v1/images"
response, content = self.http.request(path, 'GET')
self.assertEqual(response.status, 200)
data = json.loads(content)
self.assertEqual(data['images'][0]['id'], image_id)
self.assertEqual(data['images'][0]['checksum'], None)
self.assertEqual(data['images'][0]['size'], 0)
self.assertEqual(data['images'][0]['container_format'], 'ovf')
self.assertEqual(data['images'][0]['disk_format'], 'raw')
self.assertEqual(data['images'][0]['name'], "Image1")
# 3. HEAD /images
# Verify status is in queued
path = "/v1/images/%s" % (image_id)
response, content = self.http.request(path, 'HEAD')
self.assertEqual(response.status, 200)
self.assertEqual(response['x-image-meta-name'], "Image1")
self.assertEqual(response['x-image-meta-status'], "queued")
self.assertEqual(response['x-image-meta-size'], '0')
self.assertEqual(response['x-image-meta-id'], image_id)
# 4. PUT image with image data, verify 200 returned
image_data = "*" * FIVE_KB
headers = {'Content-Type': 'application/octet-stream'}
path = "/v1/images/%s" % (image_id)
response, content = self.http.request(path, 'PUT', headers=headers,
body=image_data)
self.assertEqual(response.status, 200)
data = json.loads(content)
self.assertEqual(data['image']['checksum'],
hashlib.md5(image_data).hexdigest())
self.assertEqual(data['image']['size'], FIVE_KB)
self.assertEqual(data['image']['name'], "Image1")
self.assertEqual(data['image']['is_public'], True)
# 5. HEAD /images
# Verify status is in active
path = "/v1/images/%s" % (image_id)
response, content = self.http.request(path, 'HEAD')
self.assertEqual(response.status, 200)
self.assertEqual(response['x-image-meta-name'], "Image1")
self.assertEqual(response['x-image-meta-status'], "active")
# 6. GET /images
# Verify 1 public image still...
path = "/v1/images"
response, content = self.http.request(path, 'GET')
self.assertEqual(response.status, 200)
data = json.loads(content)
self.assertEqual(data['images'][0]['checksum'],
hashlib.md5(image_data).hexdigest())
self.assertEqual(data['images'][0]['id'], image_id)
self.assertEqual(data['images'][0]['size'], FIVE_KB)
self.assertEqual(data['images'][0]['container_format'], 'ovf')
self.assertEqual(data['images'][0]['disk_format'], 'raw')
self.assertEqual(data['images'][0]['name'], "Image1")
# DELETE image
path = "/v1/images/%s" % (image_id)
response, content = self.http.request(path, 'DELETE')
self.assertEqual(response.status, 200)
def test_size_greater_2G_mysql(self):
"""
A test against the actual datastore backend for the registry
to ensure that the image size property is not truncated.
:see https://bugs.launchpad.net/glance/+bug/739433
"""
# 1. POST /images with public image named Image1
# attribute and a size of 5G. Use the HTTP engine with an
# X-Image-Meta-Location attribute to make Glance forego
# "adding" the image data.
# Verify a 201 OK is returned
headers = {'Content-Type': 'application/octet-stream',
'X-Image-Meta-Location': 'http://example.com/fakeimage',
'X-Image-Meta-Size': str(FIVE_GB),
'X-Image-Meta-Name': 'Image1',
'X-Image-Meta-disk_format': 'raw',
'X-image-Meta-container_format': 'ovf',
'X-Image-Meta-Is-Public': 'True'}
path = "/v1/images"
response, content = self.http.request(path, 'POST', headers=headers)
self.assertEqual(response.status, 201)
# 2. HEAD /images
# Verify image size is what was passed in, and not truncated
path = response.get('location')
response, content = self.http.request(path, 'HEAD')
self.assertEqual(response.status, 200)
self.assertEqual(response['x-image-meta-size'], str(FIVE_GB))
self.assertEqual(response['x-image-meta-name'], 'Image1')
self.assertEqual(response['x-image-meta-is_public'], 'True')
def test_v1_not_enabled(self):
self.config(enable_v1_api=False)
path = "/v1/images"
response, content = self.http.request(path, 'GET')
self.assertEqual(response.status, 300)
def test_v1_enabled(self):
self.config(enable_v1_api=True)
path = "/v1/images"
response, content = self.http.request(path, 'GET')
self.assertEqual(response.status, 200)
def test_zero_initial_size(self):
"""
A test to ensure that an image with size explicitly set to zero
has status that immediately transitions to active.
"""
# 1. POST /images with public image named Image1
# attribute and a size of zero.
# Verify a 201 OK is returned
headers = {'Content-Type': 'application/octet-stream',
'X-Image-Meta-Size': '0',
'X-Image-Meta-Name': 'Image1',
'X-Image-Meta-disk_format': 'raw',
'X-image-Meta-container_format': 'ovf',
'X-Image-Meta-Is-Public': 'True'}
path = "/v1/images"
response, content = self.http.request(path, 'POST', headers=headers)
self.assertEqual(response.status, 201)
# 2. HEAD image-location
# Verify image size is zero and the status is active
path = response.get('location')
response, content = self.http.request(path, 'HEAD')
self.assertEqual(response.status, 200)
self.assertEqual(response['x-image-meta-size'], '0')
self.assertEqual(response['x-image-meta-status'], 'active')
# 3. GET image-location
# Verify image content is empty
response, content = self.http.request(path, 'GET')
self.assertEqual(response.status, 200)
self.assertEqual(len(content), 0)
def test_traceback_not_consumed(self):
"""
A test that errors coming from the POST API do not
get consumed and print the actual error message, and
not something like <traceback object at 0x1918d40>
:see https://bugs.launchpad.net/glance/+bug/755912
"""
# POST /images with binary data, but not setting
# Content-Type to application/octet-stream, verify a
# 400 returned and that the error is readable.
with tempfile.NamedTemporaryFile() as test_data_file:
test_data_file.write("XXX")
test_data_file.flush()
path = "/v1/images"
headers = minimal_headers('Image1')
headers['Content-Type'] = 'not octet-stream'
response, content = self.http.request(path, 'POST',
body=test_data_file.name,
headers=headers)
self.assertEqual(response.status, 400)
expected = "Content-Type must be application/octet-stream"
self.assertTrue(expected in content,
"Could not find '%s' in '%s'" % (expected, content))
def test_filtered_images(self):
"""
Set up four test images and ensure each query param filter works
"""
# 0. GET /images
# Verify no public images
path = "/v1/images"
response, content = self.http.request(path, 'GET')
self.assertEqual(response.status, 200)
self.assertEqual(content, '{"images": []}')
image_ids = []
# 1. POST /images with three public images, and one private image
# with various attributes
headers = {'Content-Type': 'application/octet-stream',
'X-Image-Meta-Name': 'Image1',
'X-Image-Meta-Status': 'active',
'X-Image-Meta-Container-Format': 'ovf',
'X-Image-Meta-Disk-Format': 'vdi',
'X-Image-Meta-Size': '19',
'X-Image-Meta-Is-Public': 'True',
'X-Image-Meta-Protected': 'True',
'X-Image-Meta-Property-pants': 'are on'}
path = "/v1/images"
response, content = self.http.request(path, 'POST', headers=headers)
self.assertEqual(response.status, 201)
data = json.loads(content)
self.assertEqual(data['image']['properties']['pants'], "are on")
self.assertEqual(data['image']['is_public'], True)
image_ids.append(data['image']['id'])
headers = {'Content-Type': 'application/octet-stream',
'X-Image-Meta-Name': 'My Image!',
'X-Image-Meta-Status': 'active',
'X-Image-Meta-Container-Format': 'ovf',
'X-Image-Meta-Disk-Format': 'vhd',
'X-Image-Meta-Size': '20',
'X-Image-Meta-Is-Public': 'True',
'X-Image-Meta-Protected': 'False',
'X-Image-Meta-Property-pants': 'are on'}
path = "/v1/images"
response, content = self.http.request(path, 'POST', headers=headers)
self.assertEqual(response.status, 201)
data = json.loads(content)
self.assertEqual(data['image']['properties']['pants'], "are on")
self.assertEqual(data['image']['is_public'], True)
image_ids.append(data['image']['id'])
headers = {'Content-Type': 'application/octet-stream',
'X-Image-Meta-Name': 'My Image!',
'X-Image-Meta-Status': 'saving',
'X-Image-Meta-Container-Format': 'ami',
'X-Image-Meta-Disk-Format': 'ami',
'X-Image-Meta-Size': '21',
'X-Image-Meta-Is-Public': 'True',
'X-Image-Meta-Protected': 'False',
'X-Image-Meta-Property-pants': 'are off'}
path = "/v1/images"
response, content = self.http.request(path, 'POST', headers=headers)
self.assertEqual(response.status, 201)
data = json.loads(content)
self.assertEqual(data['image']['properties']['pants'], "are off")
self.assertEqual(data['image']['is_public'], True)
image_ids.append(data['image']['id'])
headers = {'Content-Type': 'application/octet-stream',
'X-Image-Meta-Name': 'My Private Image',
'X-Image-Meta-Status': 'active',
'X-Image-Meta-Container-Format': 'ami',
'X-Image-Meta-Disk-Format': 'ami',
'X-Image-Meta-Size': '22',
'X-Image-Meta-Is-Public': 'False',
'X-Image-Meta-Protected': 'False'}
path = "/v1/images"
response, content = self.http.request(path, 'POST', headers=headers)
self.assertEqual(response.status, 201)
data = json.loads(content)
self.assertEqual(data['image']['is_public'], False)
image_ids.append(data['image']['id'])
# 2. GET /images
# Verify three public images
path = "/v1/images"
response, content = self.http.request(path, 'GET')
self.assertEqual(response.status, 200)
data = json.loads(content)
self.assertEqual(len(data['images']), 3)
# 3. GET /images with name filter
# Verify correct images returned with name
params = "name=My%20Image!"
path = "/v1/images?%s" % (params)
response, content = self.http.request(path, 'GET')
self.assertEqual(response.status, 200)
data = json.loads(content)
self.assertEqual(len(data['images']), 2)
for image in data['images']:
self.assertEqual(image['name'], "My Image!")
# 4. GET /images with status filter
# Verify correct images returned with status
params = "status=queued"
path = "/v1/images/detail?%s" % (params)
response, content = self.http.request(path, 'GET')
self.assertEqual(response.status, 200)
data = json.loads(content)
self.assertEqual(len(data['images']), 3)
for image in data['images']:
self.assertEqual(image['status'], "queued")
params = "status=active"
path = "/v1/images/detail?%s" % (params)
response, content = self.http.request(path, 'GET')
self.assertEqual(response.status, 200)
data = json.loads(content)
self.assertEqual(len(data['images']), 0)
# 5. GET /images with container_format filter
# Verify correct images returned with container_format
params = "container_format=ovf"
path = "/v1/images?%s" % (params)
response, content = self.http.request(path, 'GET')
self.assertEqual(response.status, 200)
data = json.loads(content)
self.assertEqual(len(data['images']), 2)
for image in data['images']:
self.assertEqual(image['container_format'], "ovf")
# 6. GET /images with disk_format filter
# Verify correct images returned with disk_format
params = "disk_format=vdi"
path = "/v1/images?%s" % (params)
response, content = self.http.request(path, 'GET')
self.assertEqual(response.status, 200)
data = json.loads(content)
self.assertEqual(len(data['images']), 1)
for image in data['images']:
self.assertEqual(image['disk_format'], "vdi")
# 7. GET /images with size_max filter
# Verify correct images returned with size <= expected
params = "size_max=20"
path = "/v1/images?%s" % (params)
response, content = self.http.request(path, 'GET')
self.assertEqual(response.status, 200)
data = json.loads(content)
self.assertEqual(len(data['images']), 2)
for image in data['images']:
self.assertTrue(image['size'] <= 20)
# 8. GET /images with size_min filter
# Verify correct images returned with size >= expected
params = "size_min=20"
path = "/v1/images?%s" % (params)
response, content = self.http.request(path, 'GET')
self.assertEqual(response.status, 200)
data = json.loads(content)
self.assertEqual(len(data['images']), 2)
for image in data['images']:
self.assertTrue(image['size'] >= 20)
# 9. Get /images with is_public=None filter
# Verify correct images returned with property
# Bug lp:803656 Support is_public in filtering
params = "is_public=None"
path = "/v1/images?%s" % (params)
response, content = self.http.request(path, 'GET')
self.assertEqual(response.status, 200)
data = json.loads(content)
self.assertEqual(len(data['images']), 4)
# 10. Get /images with is_public=False filter
# Verify correct images returned with property
# Bug lp:803656 Support is_public in filtering
params = "is_public=False"
path = "/v1/images?%s" % (params)
response, content = self.http.request(path, 'GET')
self.assertEqual(response.status, 200)
data = json.loads(content)
self.assertEqual(len(data['images']), 1)
for image in data['images']:
self.assertEqual(image['name'], "My Private Image")
# 11. Get /images with is_public=True filter
# Verify correct images returned with property
# Bug lp:803656 Support is_public in filtering
params = "is_public=True"
path = "/v1/images?%s" % (params)
response, content = self.http.request(path, 'GET')
self.assertEqual(response.status, 200)
data = json.loads(content)
self.assertEqual(len(data['images']), 3)
for image in data['images']:
self.assertNotEqual(image['name'], "My Private Image")
# 12. Get /images with protected=False filter
# Verify correct images returned with property
params = "protected=False"
path = "/v1/images?%s" % (params)
response, content = self.http.request(path, 'GET')
self.assertEqual(response.status, 200)
data = json.loads(content)
self.assertEqual(len(data['images']), 2)
for image in data['images']:
self.assertNotEqual(image['name'], "Image1")
# 13. Get /images with protected=True filter
# Verify correct images returned with property
params = "protected=True"
path = "/v1/images?%s" % (params)
response, content = self.http.request(path, 'GET')
self.assertEqual(response.status, 200)
data = json.loads(content)
self.assertEqual(len(data['images']), 1)
for image in data['images']:
self.assertEqual(image['name'], "Image1")
# 14. GET /images with property filter
# Verify correct images returned with property
params = "property-pants=are%20on"
path = "/v1/images/detail?%s" % (params)
response, content = self.http.request(path, 'GET')
self.assertEqual(response.status, 200)
data = json.loads(content)
self.assertEqual(len(data['images']), 2)
for image in data['images']:
self.assertEqual(image['properties']['pants'], "are on")
# 15. GET /images with property filter and name filter
# Verify correct images returned with property and name
# Make sure you quote the url when using more than one param!
params = "name=My%20Image!&property-pants=are%20on"
path = "/v1/images/detail?%s" % (params)
response, content = self.http.request(path, 'GET')
self.assertEqual(response.status, 200)
data = json.loads(content)
self.assertEqual(len(data['images']), 1)
for image in data['images']:
self.assertEqual(image['properties']['pants'], "are on")
self.assertEqual(image['name'], "My Image!")
# 16. GET /images with past changes-since filter
yesterday = timeutils.isotime(timeutils.utcnow() -
datetime.timedelta(1))
params = "changes-since=%s" % yesterday
path = "/v1/images?%s" % (params)
response, content = self.http.request(path, 'GET')
self.assertEqual(response.status, 200)
data = json.loads(content)
self.assertEqual(len(data['images']), 3)
# one timezone west of Greenwich equates to an hour ago
# taking care to pre-urlencode '+' as '%2B', otherwise the timezone
# '+' is wrongly decoded as a space
# TODO(eglynn): investigate '+' --> <SPACE> decoding, an artifact
# of WSGI/webob dispatch?
now = timeutils.utcnow()
hour_ago = now.strftime('%Y-%m-%dT%H:%M:%S%%2B01:00')
params = "changes-since=%s" % hour_ago
path = "/v1/images?%s" % (params)
response, content = self.http.request(path, 'GET')
self.assertEqual(response.status, 200)
data = json.loads(content)
self.assertEqual(len(data['images']), 3)
# 17. GET /images with future changes-since filter
tomorrow = timeutils.isotime(timeutils.utcnow() +
datetime.timedelta(1))
params = "changes-since=%s" % tomorrow
path = "/v1/images?%s" % (params)
response, content = self.http.request(path, 'GET')
self.assertEqual(response.status, 200)
data = json.loads(content)
self.assertEqual(len(data['images']), 0)
# one timezone east of Greenwich equates to an hour from now
now = timeutils.utcnow()
hour_hence = now.strftime('%Y-%m-%dT%H:%M:%S-01:00')
params = "changes-since=%s" % hour_hence
path = "/v1/images?%s" % (params)
response, content = self.http.request(path, 'GET')
self.assertEqual(response.status, 200)
data = json.loads(content)
self.assertEqual(len(data['images']), 0)
# 18. GET /images with size_min filter
# Verify correct images returned with size >= expected
params = "size_min=-1"
path = "/v1/images?%s" % (params)
response, content = self.http.request(path, 'GET')
self.assertEqual(response.status, 400)
self.assertTrue("filter size_min got -1" in content)
# 19. GET /images with size_min filter
# Verify correct images returned with size >= expected
params = "size_max=-1"
path = "/v1/images?%s" % (params)
response, content = self.http.request(path, 'GET')
self.assertEqual(response.status, 400)
self.assertTrue("filter size_max got -1" in content)
# 20. GET /images with size_min filter
# Verify correct images returned with size >= expected
params = "min_ram=-1"
path = "/v1/images?%s" % (params)
response, content = self.http.request(path, 'GET')
self.assertEqual(response.status, 400)
self.assertTrue("Bad value passed to filter min_ram got -1" in content)
# 21. GET /images with size_min filter
# Verify correct images returned with size >= expected
params = "protected=imalittleteapot"
path = "/v1/images?%s" % (params)
response, content = self.http.request(path, 'GET')
self.assertEqual(response.status, 400)
self.assertTrue("protected got imalittleteapot" in content)
# 22. GET /images with size_min filter
# Verify correct images returned with size >= expected
params = "is_public=imalittleteapot"
path = "/v1/images?%s" % (params)
response, content = self.http.request(path, 'GET')
self.assertEqual(response.status, 400)
self.assertTrue("is_public got imalittleteapot" in content)
def test_limited_images(self):
"""
Ensure marker and limit query params work
"""
# 0. GET /images
# Verify no public images
path = "/v1/images"
response, content = self.http.request(path, 'GET')
self.assertEqual(response.status, 200)
self.assertEqual(content, '{"images": []}')
image_ids = []
# 1. POST /images with three public images with various attributes
headers = minimal_headers('Image1')
path = "/v1/images"
response, content = self.http.request(path, 'POST', headers=headers)
self.assertEqual(response.status, 201)
image_ids.append(json.loads(content)['image']['id'])
headers = minimal_headers('Image2')
path = "/v1/images"
response, content = self.http.request(path, 'POST', headers=headers)
self.assertEqual(response.status, 201)
image_ids.append(json.loads(content)['image']['id'])
headers = minimal_headers('Image3')
path = "/v1/images"
response, content = self.http.request(path, 'POST', headers=headers)
self.assertEqual(response.status, 201)
image_ids.append(json.loads(content)['image']['id'])
# 2. GET /images with all images
path = "/v1/images"
response, content = self.http.request(path, 'GET')
self.assertEqual(response.status, 200)
images = json.loads(content)['images']
self.assertEqual(len(images), 3)
# 3. GET /images with limit of 2
# Verify only two images were returned
params = "limit=2"
path = "/v1/images?%s" % (params)
response, content = self.http.request(path, 'GET')
self.assertEqual(response.status, 200)
data = json.loads(content)['images']
self.assertEqual(len(data), 2)
self.assertEqual(data[0]['id'], images[0]['id'])
self.assertEqual(data[1]['id'], images[1]['id'])
# 4. GET /images with marker
# Verify only two images were returned
params = "marker=%s" % images[0]['id']
path = "/v1/images?%s" % (params)
response, content = self.http.request(path, 'GET')
self.assertEqual(response.status, 200)
data = json.loads(content)['images']
self.assertEqual(len(data), 2)
self.assertEqual(data[0]['id'], images[1]['id'])
self.assertEqual(data[1]['id'], images[2]['id'])
# 5. GET /images with marker and limit
# Verify only one image was returned with the correct id
params = "limit=1&marker=%s" % images[1]['id']
path = "/v1/images?%s" % (params)
response, content = self.http.request(path, 'GET')
self.assertEqual(response.status, 200)
data = json.loads(content)['images']
self.assertEqual(len(data), 1)
self.assertEqual(data[0]['id'], images[2]['id'])
# 6. GET /images/detail with marker and limit
# Verify only one image was returned with the correct id
params = "limit=1&marker=%s" % images[1]['id']
path = "/v1/images?%s" % (params)
response, content = self.http.request(path, 'GET')
self.assertEqual(response.status, 200)
data = json.loads(content)['images']
self.assertEqual(len(data), 1)
self.assertEqual(data[0]['id'], images[2]['id'])
# DELETE images
for image_id in image_ids:
path = "/v1/images/%s" % (image_id)
response, content = self.http.request(path, 'DELETE')
self.assertEqual(response.status, 200)
def test_ordered_images(self):
"""
Set up three test images and ensure each query param filter works
"""
# 0. GET /images
# Verify no public images
path = "/v1/images"
response, content = self.http.request(path, 'GET')
self.assertEqual(response.status, 200)
self.assertEqual(content, '{"images": []}')
# 1. POST /images with three public images with various attributes
image_ids = []
headers = {'Content-Type': 'application/octet-stream',
'X-Image-Meta-Name': 'Image1',
'X-Image-Meta-Status': 'active',
'X-Image-Meta-Container-Format': 'ovf',
'X-Image-Meta-Disk-Format': 'vdi',
'X-Image-Meta-Size': '19',
'X-Image-Meta-Is-Public': 'True'}
path = "/v1/images"
response, content = self.http.request(path, 'POST', headers=headers)
self.assertEqual(response.status, 201)
image_ids.append(json.loads(content)['image']['id'])
headers = {'Content-Type': 'application/octet-stream',
'X-Image-Meta-Name': 'ASDF',
'X-Image-Meta-Status': 'active',
'X-Image-Meta-Container-Format': 'bare',
'X-Image-Meta-Disk-Format': 'iso',
'X-Image-Meta-Size': '2',
'X-Image-Meta-Is-Public': 'True'}
path = "/v1/images"
response, content = self.http.request(path, 'POST', headers=headers)
self.assertEqual(response.status, 201)
image_ids.append(json.loads(content)['image']['id'])
headers = {'Content-Type': 'application/octet-stream',
'X-Image-Meta-Name': 'XYZ',
'X-Image-Meta-Status': 'saving',
'X-Image-Meta-Container-Format': 'ami',
'X-Image-Meta-Disk-Format': 'ami',
'X-Image-Meta-Size': '5',
'X-Image-Meta-Is-Public': 'True'}
path = "/v1/images"
response, content = self.http.request(path, 'POST', headers=headers)
self.assertEqual(response.status, 201)
image_ids.append(json.loads(content)['image']['id'])
# 2. GET /images with no query params
# Verify three public images sorted by created_at desc
path = "/v1/images"
response, content = self.http.request(path, 'GET')
self.assertEqual(response.status, 200)
data = json.loads(content)
self.assertEqual(len(data['images']), 3)
self.assertEqual(data['images'][0]['id'], image_ids[2])
self.assertEqual(data['images'][1]['id'], image_ids[1])
self.assertEqual(data['images'][2]['id'], image_ids[0])
# 3. GET /images sorted by name asc
params = 'sort_key=name&sort_dir=asc'
path = "/v1/images?%s" % (params)
response, content = self.http.request(path, 'GET')
self.assertEqual(response.status, 200)
data = json.loads(content)
self.assertEqual(len(data['images']), 3)
self.assertEqual(data['images'][0]['id'], image_ids[1])
self.assertEqual(data['images'][1]['id'], image_ids[0])
self.assertEqual(data['images'][2]['id'], image_ids[2])
# 4. GET /images sorted by size desc
params = 'sort_key=size&sort_dir=desc'
path = "/v1/images?%s" % (params)
response, content = self.http.request(path, 'GET')
self.assertEqual(response.status, 200)
data = json.loads(content)
self.assertEqual(len(data['images']), 3)
self.assertEqual(data['images'][0]['id'], image_ids[0])
self.assertEqual(data['images'][1]['id'], image_ids[2])
self.assertEqual(data['images'][2]['id'], image_ids[1])
# 5. GET /images sorted by size desc with a marker
params = 'sort_key=size&sort_dir=desc&marker=%s' % image_ids[0]
path = "/v1/images?%s" % (params)
response, content = self.http.request(path, 'GET')
self.assertEqual(response.status, 200)
data = json.loads(content)
self.assertEqual(len(data['images']), 2)
self.assertEqual(data['images'][0]['id'], image_ids[2])
self.assertEqual(data['images'][1]['id'], image_ids[1])
# 6. GET /images sorted by name asc with a marker
params = 'sort_key=name&sort_dir=asc&marker=%s' % image_ids[2]
path = "/v1/images?%s" % (params)
response, content = self.http.request(path, 'GET')
self.assertEqual(response.status, 200)
data = json.loads(content)
self.assertEqual(len(data['images']), 0)
# DELETE images
for image_id in image_ids:
path = "/v1/images/%s" % (image_id)
response, content = self.http.request(path, 'DELETE')
self.assertEqual(response.status, 200)
def test_duplicate_image_upload(self):
"""
Upload initial image, then attempt to upload duplicate image
"""
# 0. GET /images
# Verify no public images
path = "/v1/images"
response, content = self.http.request(path, 'GET')
self.assertEqual(response.status, 200)
self.assertEqual(content, '{"images": []}')
# 1. POST /images with public image named Image1
headers = {'Content-Type': 'application/octet-stream',
'X-Image-Meta-Name': 'Image1',
'X-Image-Meta-Status': 'active',
'X-Image-Meta-Container-Format': 'ovf',
'X-Image-Meta-Disk-Format': 'vdi',
'X-Image-Meta-Size': '19',
'X-Image-Meta-Is-Public': 'True'}
path = "/v1/images"
response, content = self.http.request(path, 'POST', headers=headers)
self.assertEqual(response.status, 201)
image = json.loads(content)['image']
# 2. POST /images with public image named Image1, and ID: 1
headers = {'Content-Type': 'application/octet-stream',
'X-Image-Meta-Name': 'Image1 Update',
'X-Image-Meta-Status': 'active',
'X-Image-Meta-Container-Format': 'ovf',
'X-Image-Meta-Disk-Format': 'vdi',
'X-Image-Meta-Size': '19',
'X-Image-Meta-Id': image['id'],
'X-Image-Meta-Is-Public': 'True'}
path = "/v1/images"
response, content = self.http.request(path, 'POST', headers=headers)
self.assertEqual(response.status, 409)
def test_delete_not_existing(self):
"""
We test the following:
0. GET /images/1
- Verify 404
1. DELETE /images/1
- Verify 404
"""
# 0. GET /images
# Verify no public images
path = "/v1/images"
response, content = self.http.request(path, 'GET')
self.assertEqual(response.status, 200)
self.assertEqual(content, '{"images": []}')
# 1. DELETE /images/1
# Verify 404 returned
path = "/v1/images/1"
response, content = self.http.request(path, 'DELETE')
self.assertEqual(response.status, 404)
def _do_test_post_image_content_bad_format(self, format):
"""
We test that missing container/disk format fails with 400 "Bad Request"
:see https://bugs.launchpad.net/glance/+bug/933702
"""
# Verify no public images
path = "/v1/images"
response, content = self.http.request(path, 'GET')
self.assertEqual(response.status, 200)
images = json.loads(content)['images']
self.assertEqual(len(images), 0)
path = "/v1/images"
# POST /images without given format being specified
headers = minimal_headers('Image1')
headers['X-Image-Meta-' + format] = 'bad_value'
with tempfile.NamedTemporaryFile() as test_data_file:
test_data_file.write("XXX")
test_data_file.flush()
response, content = self.http.request(path, 'POST',
headers=headers,
body=test_data_file.name)
self.assertEqual(response.status, 400)
type = format.replace('_format', '')
expected = "Invalid %s format 'bad_value' for image" % type
self.assertTrue(expected in content,
"Could not find '%s' in '%s'" % (expected, content))
# make sure the image was not created
# Verify no public images
path = "/v1/images"
response, content = self.http.request(path, 'GET')
self.assertEqual(response.status, 200)
images = json.loads(content)['images']
self.assertEqual(len(images), 0)
def test_post_image_content_bad_container_format(self):
self._do_test_post_image_content_bad_format('container_format')
def test_post_image_content_bad_disk_format(self):
self._do_test_post_image_content_bad_format('disk_format')
def _do_test_put_image_content_missing_format(self, format):
"""
We test that missing container/disk format only fails with
400 "Bad Request" when the image content is PUT (i.e. not
on the original POST of a queued image).
:see https://bugs.launchpad.net/glance/+bug/937216
"""
# POST queued image
path = "/v1/images"
headers = {
'X-Image-Meta-Name': 'Image1',
'X-Image-Meta-Is-Public': 'True',
}
response, content = self.http.request(path, 'POST', headers=headers)
self.assertEqual(response.status, 201)
data = json.loads(content)
image_id = data['image']['id']
self.addDetail('image_data', testtools.content.json_content(data))
# PUT image content images without given format being specified
path = "/v1/images/%s" % (image_id)
headers = minimal_headers('Image1')
del headers['X-Image-Meta-' + format]
with tempfile.NamedTemporaryFile() as test_data_file:
test_data_file.write("XXX")
test_data_file.flush()
response, content = self.http.request(path, 'PUT',
headers=headers,
body=test_data_file.name)
self.assertEqual(response.status, 400)
type = format.replace('_format', '')
expected = "Invalid %s format 'None' for image" % type
self.assertTrue(expected in content,
"Could not find '%s' in '%s'" % (expected, content))
def test_put_image_content_bad_container_format(self):
self._do_test_put_image_content_missing_format('container_format')
def test_put_image_content_bad_disk_format(self):
self._do_test_put_image_content_missing_format('disk_format')
def _do_test_mismatched_attribute(self, attribute, value):
"""
Test mismatched attribute.
"""
image_data = "*" * FIVE_KB
headers = minimal_headers('Image1')
headers[attribute] = value
path = "/v1/images"
response, content = self.http.request(path, 'POST', headers=headers,
body=image_data)
self.assertEqual(response.status, 400)
images_dir = os.path.join(self.test_dir, 'images')
image_count = len([name for name in os.listdir(images_dir)
if os.path.isfile(os.path.join(images_dir, name))])
self.assertEquals(image_count, 0)
def test_mismatched_size(self):
"""
Test mismatched size.
"""
self._do_test_mismatched_attribute('x-image-meta-size',
str(FIVE_KB + 1))
def test_mismatched_checksum(self):
"""
Test mismatched checksum.
"""
self._do_test_mismatched_attribute('x-image-meta-checksum',
'foobar')
class TestApiWithFakeAuth(base.ApiTest):
def __init__(self, *args, **kwargs):
super(TestApiWithFakeAuth, self).__init__(*args, **kwargs)
self.api_flavor = 'fakeauth'
self.registry_flavor = 'fakeauth'
def test_ownership(self):
# Add an image with admin privileges and ensure the owner
# can be set to something other than what was used to authenticate
auth_headers = {
'X-Auth-Token': 'user1:tenant1:admin',
}
create_headers = {
'X-Image-Meta-Name': 'MyImage',
'X-Image-Meta-disk_format': 'raw',
'X-Image-Meta-container_format': 'ovf',
'X-Image-Meta-Is-Public': 'True',
'X-Image-Meta-Owner': 'tenant2',
}
create_headers.update(auth_headers)
path = "/v1/images"
response, content = self.http.request(path, 'POST',
headers=create_headers)
self.assertEqual(response.status, 201)
data = json.loads(content)
image_id = data['image']['id']
path = "/v1/images/%s" % (image_id)
response, content = self.http.request(path, 'HEAD',
headers=auth_headers)
self.assertEqual(response.status, 200)
self.assertEqual('tenant2', response['x-image-meta-owner'])
# Now add an image without admin privileges and ensure the owner
# cannot be set to something other than what was used to authenticate
auth_headers = {
'X-Auth-Token': 'user1:tenant1:role1',
}
create_headers.update(auth_headers)
path = "/v1/images"
response, content = self.http.request(path, 'POST',
headers=create_headers)
self.assertEqual(response.status, 201)
data = json.loads(content)
image_id = data['image']['id']
# We have to be admin to see the owner
auth_headers = {
'X-Auth-Token': 'user1:tenant1:admin',
}
create_headers.update(auth_headers)
path = "/v1/images/%s" % (image_id)
response, content = self.http.request(path, 'HEAD',
headers=auth_headers)
self.assertEqual(response.status, 200)
self.assertEqual('tenant1', response['x-image-meta-owner'])
# Make sure the non-privileged user can't update their owner either
update_headers = {
'X-Image-Meta-Name': 'MyImage2',
'X-Image-Meta-Owner': 'tenant2',
'X-Auth-Token': 'user1:tenant1:role1',
}
path = "/v1/images/%s" % (image_id)
response, content = self.http.request(path, 'PUT',
headers=update_headers)
self.assertEqual(response.status, 200)
# We have to be admin to see the owner
auth_headers = {
'X-Auth-Token': 'user1:tenant1:admin',
}
path = "/v1/images/%s" % (image_id)
response, content = self.http.request(path, 'HEAD',
headers=auth_headers)
self.assertEqual(response.status, 200)
self.assertEqual('tenant1', response['x-image-meta-owner'])
# An admin user should be able to update the owner
auth_headers = {
'X-Auth-Token': 'user1:tenant3:admin',
}
update_headers = {
'X-Image-Meta-Name': 'MyImage2',
'X-Image-Meta-Owner': 'tenant2',
}
update_headers.update(auth_headers)
path = "/v1/images/%s" % (image_id)
response, content = self.http.request(path, 'PUT',
headers=update_headers)
self.assertEqual(response.status, 200)
path = "/v1/images/%s" % (image_id)
response, content = self.http.request(path, 'HEAD',
headers=auth_headers)
self.assertEqual(response.status, 200)
self.assertEqual('tenant2', response['x-image-meta-owner'])
def test_image_visibility_to_different_users(self):
owners = ['admin', 'tenant1', 'tenant2', 'none']
visibilities = {'public': 'True', 'private': 'False'}
image_ids = {}
for owner in owners:
for visibility, is_public in visibilities.items():
name = '%s-%s' % (owner, visibility)
headers = {
'Content-Type': 'application/octet-stream',
'X-Image-Meta-Name': name,
'X-Image-Meta-Status': 'active',
'X-Image-Meta-Is-Public': is_public,
'X-Image-Meta-Owner': owner,
'X-Auth-Token': 'createuser:createtenant:admin',
}
path = "/v1/images"
response, content = self.http.request(path, 'POST',
headers=headers)
self.assertEqual(response.status, 201)
data = json.loads(content)
image_ids[name] = data['image']['id']
def list_images(tenant, role='', is_public=None):
auth_token = 'user:%s:%s' % (tenant, role)
headers = {'X-Auth-Token': auth_token}
path = "/v1/images/detail"
if is_public is not None:
path += '?is_public=%s' % is_public
response, content = self.http.request(path, 'GET', headers=headers)
self.assertEqual(response.status, 200)
return json.loads(content)['images']
# 1. Known user sees public and their own images
images = list_images('tenant1')
self.assertEquals(len(images), 5)
for image in images:
self.assertTrue(image['is_public'] or image['owner'] == 'tenant1')
# 2. Unknown user sees only public images
images = list_images('none')
self.assertEquals(len(images), 4)
for image in images:
self.assertTrue(image['is_public'])
# 3. Unknown admin sees only public images
images = list_images('none', role='admin')
self.assertEquals(len(images), 4)
for image in images:
self.assertTrue(image['is_public'])
# 4. Unknown admin, is_public=none, shows all images
images = list_images('none', role='admin', is_public='none')
self.assertEquals(len(images), 8)
# 5. Unknown admin, is_public=true, shows only public images
images = list_images('none', role='admin', is_public='true')
self.assertEquals(len(images), 4)
for image in images:
self.assertTrue(image['is_public'])
# 6. Unknown admin, is_public=false, sees only private images
images = list_images('none', role='admin', is_public='false')
self.assertEquals(len(images), 4)
for image in images:
self.assertFalse(image['is_public'])
# 7. Known admin sees public and their own images
images = list_images('admin', role='admin')
self.assertEquals(len(images), 5)
for image in images:
self.assertTrue(image['is_public'] or image['owner'] == 'admin')
# 8. Known admin, is_public=none, shows all images
images = list_images('admin', role='admin', is_public='none')
self.assertEquals(len(images), 8)
# 9. Known admin, is_public=true, sees all public and their images
images = list_images('admin', role='admin', is_public='true')
self.assertEquals(len(images), 5)
for image in images:
self.assertTrue(image['is_public'] or image['owner'] == 'admin')
# 10. Known admin, is_public=false, sees all private images
images = list_images('admin', role='admin', is_public='false')
self.assertEquals(len(images), 4)
for image in images:
self.assertFalse(image['is_public'])
def test_property_protections(self):
# Enable property protection
self.config(property_protection_file=self.property_file)
self.init()
CREATE_HEADERS = {
'X-Image-Meta-Name': 'MyImage',
'X-Image-Meta-disk_format': 'raw',
'X-Image-Meta-container_format': 'ovf',
'X-Image-Meta-Is-Public': 'True',
'X-Image-Meta-Owner': 'tenant2',
}
# Create an image for role member with extra properties
# Raises 403 since user is not allowed to create 'foo'
auth_headers = {
'X-Auth-Token': 'user1:tenant1:member',
}
custom_props = {
'x-image-meta-property-foo': 'bar'
}
auth_headers.update(custom_props)
auth_headers.update(CREATE_HEADERS)
path = "/v1/images"
response, content = self.http.request(path, 'POST',
headers=auth_headers)
self.assertEqual(response.status, 403)
# Create an image for role member without 'foo'
auth_headers = {
'X-Auth-Token': 'user1:tenant1:member',
}
custom_props = {
'x-image-meta-property-x_owner_foo': 'o_s_bar',
}
auth_headers.update(custom_props)
auth_headers.update(CREATE_HEADERS)
path = "/v1/images"
response, content = self.http.request(path, 'POST',
headers=auth_headers)
self.assertEqual(response.status, 201)
# Returned image entity should have 'x_owner_foo'
data = json.loads(content)
self.assertEqual(data['image']['properties']['x_owner_foo'],
'o_s_bar')
# Create an image for role spl_role with extra properties
auth_headers = {
'X-Auth-Token': 'user1:tenant1:spl_role',
}
custom_props = {
'X-Image-Meta-Property-spl_create_prop': 'create_bar',
'X-Image-Meta-Property-spl_read_prop': 'read_bar',
'X-Image-Meta-Property-spl_update_prop': 'update_bar',
'X-Image-Meta-Property-spl_delete_prop': 'delete_bar'
}
auth_headers.update(custom_props)
auth_headers.update(CREATE_HEADERS)
path = "/v1/images"
response, content = self.http.request(path, 'POST',
headers=auth_headers)
self.assertEqual(response.status, 201)
data = json.loads(content)
image_id = data['image']['id']
# Attempt to update two properties, one protected(spl_read_prop), the
# other not(spl_update_prop). Request should be forbidden.
auth_headers = {
'X-Auth-Token': 'user1:tenant1:spl_role',
}
custom_props = {
'X-Image-Meta-Property-spl_read_prop': 'r',
'X-Image-Meta-Property-spl_update_prop': 'u',
'X-Glance-Registry-Purge-Props': 'False'
}
auth_headers.update(auth_headers)
auth_headers.update(custom_props)
path = "/v1/images/%s" % image_id
response, content = self.http.request(path, 'PUT',
headers=auth_headers)
self.assertEqual(response.status, 403)
# Attempt to create properties which are forbidden
auth_headers = {
'X-Auth-Token': 'user1:tenant1:spl_role',
}
custom_props = {
'X-Image-Meta-Property-spl_new_prop': 'new',
'X-Glance-Registry-Purge-Props': 'True'
}
auth_headers.update(auth_headers)
auth_headers.update(custom_props)
path = "/v1/images/%s" % image_id
response, content = self.http.request(path, 'PUT',
headers=auth_headers)
self.assertEqual(response.status, 403)
# Attempt to update, create and delete properties
auth_headers = {
'X-Auth-Token': 'user1:tenant1:spl_role',
}
custom_props = {
'X-Image-Meta-Property-spl_create_prop': 'create_bar',
'X-Image-Meta-Property-spl_read_prop': 'read_bar',
'X-Image-Meta-Property-spl_update_prop': 'u',
'X-Glance-Registry-Purge-Props': 'True'
}
auth_headers.update(auth_headers)
auth_headers.update(custom_props)
path = "/v1/images/%s" % image_id
response, content = self.http.request(path, 'PUT',
headers=auth_headers)
self.assertEqual(response.status, 200)
# Returned image entity should reflect the changes
image = json.loads(content)
# 'spl_update_prop' has update permission for spl_role
# hence the value has changed
self.assertEqual('u', image['image']['properties']['spl_update_prop'])
# 'spl_delete_prop' has delete permission for spl_role
# hence the property has been deleted
self.assertTrue('spl_delete_prop' not in image['image']['properties'])
# 'spl_create_prop' has create permission for spl_role
# hence the property has been created
self.assertEqual('create_bar',
image['image']['properties']['spl_create_prop'])
# Image Deletion should work
auth_headers = {
'X-Auth-Token': 'user1:tenant1:spl_role',
}
path = "/v1/images/%s" % image_id
response, content = self.http.request(path, 'DELETE',
headers=auth_headers)
self.assertEqual(response.status, 200)
# This image should be no longer be directly accessible
auth_headers = {
'X-Auth-Token': 'user1:tenant1:spl_role',
}
path = "/v1/images/%s" % image_id
response, content = self.http.request(path, 'HEAD',
headers=auth_headers)
self.assertEqual(response.status, 404)
|
apache-2.0
| -2,538,296,298,288,473,000
| 41.339648
| 79
| 0.566205
| false
| 3.977752
| true
| false
| false
|
Vagab0nd/SiCKRAGE
|
lib3/twilio/rest/events/v1/subscription/subscribed_event.py
|
1
|
14449
|
# coding=utf-8
r"""
This code was generated by
\ / _ _ _| _ _
| (_)\/(_)(_|\/| |(/_ v1.0.0
/ /
"""
from twilio.base import deserialize
from twilio.base import values
from twilio.base.instance_context import InstanceContext
from twilio.base.instance_resource import InstanceResource
from twilio.base.list_resource import ListResource
from twilio.base.page import Page
class SubscribedEventList(ListResource):
""" PLEASE NOTE that this class contains preview products that are subject
to change. Use them with caution. If you currently do not have developer
preview access, please contact help@twilio.com. """
def __init__(self, version, subscription_sid):
"""
Initialize the SubscribedEventList
:param Version version: Version that contains the resource
:param subscription_sid: Subscription SID.
:returns: twilio.rest.events.v1.subscription.subscribed_event.SubscribedEventList
:rtype: twilio.rest.events.v1.subscription.subscribed_event.SubscribedEventList
"""
super(SubscribedEventList, self).__init__(version)
# Path Solution
self._solution = {'subscription_sid': subscription_sid, }
self._uri = '/Subscriptions/{subscription_sid}/SubscribedEvents'.format(**self._solution)
def stream(self, limit=None, page_size=None):
"""
Streams SubscribedEventInstance records from the API as a generator stream.
This operation lazily loads records as efficiently as possible until the limit
is reached.
The results are returned as a generator, so this operation is memory efficient.
:param int limit: Upper limit for the number of records to return. stream()
guarantees to never return more than limit. Default is no limit
:param int page_size: Number of records to fetch per request, when not set will use
the default value of 50 records. If no page_size is defined
but a limit is defined, stream() will attempt to read the
limit with the most efficient page size, i.e. min(limit, 1000)
:returns: Generator that will yield up to limit results
:rtype: list[twilio.rest.events.v1.subscription.subscribed_event.SubscribedEventInstance]
"""
limits = self._version.read_limits(limit, page_size)
page = self.page(page_size=limits['page_size'], )
return self._version.stream(page, limits['limit'])
def list(self, limit=None, page_size=None):
"""
Lists SubscribedEventInstance records from the API as a list.
Unlike stream(), this operation is eager and will load `limit` records into
memory before returning.
:param int limit: Upper limit for the number of records to return. list() guarantees
never to return more than limit. Default is no limit
:param int page_size: Number of records to fetch per request, when not set will use
the default value of 50 records. If no page_size is defined
but a limit is defined, list() will attempt to read the limit
with the most efficient page size, i.e. min(limit, 1000)
:returns: Generator that will yield up to limit results
:rtype: list[twilio.rest.events.v1.subscription.subscribed_event.SubscribedEventInstance]
"""
return list(self.stream(limit=limit, page_size=page_size, ))
def page(self, page_token=values.unset, page_number=values.unset,
page_size=values.unset):
"""
Retrieve a single page of SubscribedEventInstance records from the API.
Request is executed immediately
:param str page_token: PageToken provided by the API
:param int page_number: Page Number, this value is simply for client state
:param int page_size: Number of records to return, defaults to 50
:returns: Page of SubscribedEventInstance
:rtype: twilio.rest.events.v1.subscription.subscribed_event.SubscribedEventPage
"""
data = values.of({'PageToken': page_token, 'Page': page_number, 'PageSize': page_size, })
response = self._version.page(method='GET', uri=self._uri, params=data, )
return SubscribedEventPage(self._version, response, self._solution)
def get_page(self, target_url):
"""
Retrieve a specific page of SubscribedEventInstance records from the API.
Request is executed immediately
:param str target_url: API-generated URL for the requested results page
:returns: Page of SubscribedEventInstance
:rtype: twilio.rest.events.v1.subscription.subscribed_event.SubscribedEventPage
"""
response = self._version.domain.twilio.request(
'GET',
target_url,
)
return SubscribedEventPage(self._version, response, self._solution)
def create(self, type, version=values.unset):
"""
Create the SubscribedEventInstance
:param unicode type: Type of event being subscribed to.
:param unicode version: The schema version that the subscription should use.
:returns: The created SubscribedEventInstance
:rtype: twilio.rest.events.v1.subscription.subscribed_event.SubscribedEventInstance
"""
data = values.of({'Type': type, 'Version': version, })
payload = self._version.create(method='POST', uri=self._uri, data=data, )
return SubscribedEventInstance(
self._version,
payload,
subscription_sid=self._solution['subscription_sid'],
)
def get(self, type):
"""
Constructs a SubscribedEventContext
:param type: Type of event being subscribed to.
:returns: twilio.rest.events.v1.subscription.subscribed_event.SubscribedEventContext
:rtype: twilio.rest.events.v1.subscription.subscribed_event.SubscribedEventContext
"""
return SubscribedEventContext(
self._version,
subscription_sid=self._solution['subscription_sid'],
type=type,
)
def __call__(self, type):
"""
Constructs a SubscribedEventContext
:param type: Type of event being subscribed to.
:returns: twilio.rest.events.v1.subscription.subscribed_event.SubscribedEventContext
:rtype: twilio.rest.events.v1.subscription.subscribed_event.SubscribedEventContext
"""
return SubscribedEventContext(
self._version,
subscription_sid=self._solution['subscription_sid'],
type=type,
)
def __repr__(self):
"""
Provide a friendly representation
:returns: Machine friendly representation
:rtype: str
"""
return '<Twilio.Events.V1.SubscribedEventList>'
class SubscribedEventPage(Page):
""" PLEASE NOTE that this class contains preview products that are subject
to change. Use them with caution. If you currently do not have developer
preview access, please contact help@twilio.com. """
def __init__(self, version, response, solution):
"""
Initialize the SubscribedEventPage
:param Version version: Version that contains the resource
:param Response response: Response from the API
:param subscription_sid: Subscription SID.
:returns: twilio.rest.events.v1.subscription.subscribed_event.SubscribedEventPage
:rtype: twilio.rest.events.v1.subscription.subscribed_event.SubscribedEventPage
"""
super(SubscribedEventPage, self).__init__(version, response)
# Path Solution
self._solution = solution
def get_instance(self, payload):
"""
Build an instance of SubscribedEventInstance
:param dict payload: Payload response from the API
:returns: twilio.rest.events.v1.subscription.subscribed_event.SubscribedEventInstance
:rtype: twilio.rest.events.v1.subscription.subscribed_event.SubscribedEventInstance
"""
return SubscribedEventInstance(
self._version,
payload,
subscription_sid=self._solution['subscription_sid'],
)
def __repr__(self):
"""
Provide a friendly representation
:returns: Machine friendly representation
:rtype: str
"""
return '<Twilio.Events.V1.SubscribedEventPage>'
class SubscribedEventContext(InstanceContext):
""" PLEASE NOTE that this class contains preview products that are subject
to change. Use them with caution. If you currently do not have developer
preview access, please contact help@twilio.com. """
def __init__(self, version, subscription_sid, type):
"""
Initialize the SubscribedEventContext
:param Version version: Version that contains the resource
:param subscription_sid: Subscription SID.
:param type: Type of event being subscribed to.
:returns: twilio.rest.events.v1.subscription.subscribed_event.SubscribedEventContext
:rtype: twilio.rest.events.v1.subscription.subscribed_event.SubscribedEventContext
"""
super(SubscribedEventContext, self).__init__(version)
# Path Solution
self._solution = {'subscription_sid': subscription_sid, 'type': type, }
self._uri = '/Subscriptions/{subscription_sid}/SubscribedEvents/{type}'.format(**self._solution)
def update(self, version):
"""
Update the SubscribedEventInstance
:param unicode version: The schema version that the subscription should use.
:returns: The updated SubscribedEventInstance
:rtype: twilio.rest.events.v1.subscription.subscribed_event.SubscribedEventInstance
"""
data = values.of({'Version': version, })
payload = self._version.update(method='POST', uri=self._uri, data=data, )
return SubscribedEventInstance(
self._version,
payload,
subscription_sid=self._solution['subscription_sid'],
type=self._solution['type'],
)
def delete(self):
"""
Deletes the SubscribedEventInstance
:returns: True if delete succeeds, False otherwise
:rtype: bool
"""
return self._version.delete(method='DELETE', uri=self._uri, )
def __repr__(self):
"""
Provide a friendly representation
:returns: Machine friendly representation
:rtype: str
"""
context = ' '.join('{}={}'.format(k, v) for k, v in self._solution.items())
return '<Twilio.Events.V1.SubscribedEventContext {}>'.format(context)
class SubscribedEventInstance(InstanceResource):
""" PLEASE NOTE that this class contains preview products that are subject
to change. Use them with caution. If you currently do not have developer
preview access, please contact help@twilio.com. """
def __init__(self, version, payload, subscription_sid, type=None):
"""
Initialize the SubscribedEventInstance
:returns: twilio.rest.events.v1.subscription.subscribed_event.SubscribedEventInstance
:rtype: twilio.rest.events.v1.subscription.subscribed_event.SubscribedEventInstance
"""
super(SubscribedEventInstance, self).__init__(version)
# Marshaled Properties
self._properties = {
'account_sid': payload.get('account_sid'),
'type': payload.get('type'),
'version': deserialize.integer(payload.get('version')),
'subscription_sid': payload.get('subscription_sid'),
'url': payload.get('url'),
}
# Context
self._context = None
self._solution = {'subscription_sid': subscription_sid, 'type': type or self._properties['type'], }
@property
def _proxy(self):
"""
Generate an instance context for the instance, the context is capable of
performing various actions. All instance actions are proxied to the context
:returns: SubscribedEventContext for this SubscribedEventInstance
:rtype: twilio.rest.events.v1.subscription.subscribed_event.SubscribedEventContext
"""
if self._context is None:
self._context = SubscribedEventContext(
self._version,
subscription_sid=self._solution['subscription_sid'],
type=self._solution['type'],
)
return self._context
@property
def account_sid(self):
"""
:returns: Account SID.
:rtype: unicode
"""
return self._properties['account_sid']
@property
def type(self):
"""
:returns: Type of event being subscribed to.
:rtype: unicode
"""
return self._properties['type']
@property
def version(self):
"""
:returns: The schema version that the subscription should use.
:rtype: unicode
"""
return self._properties['version']
@property
def subscription_sid(self):
"""
:returns: Subscription SID.
:rtype: unicode
"""
return self._properties['subscription_sid']
@property
def url(self):
"""
:returns: The URL of this resource.
:rtype: unicode
"""
return self._properties['url']
def update(self, version):
"""
Update the SubscribedEventInstance
:param unicode version: The schema version that the subscription should use.
:returns: The updated SubscribedEventInstance
:rtype: twilio.rest.events.v1.subscription.subscribed_event.SubscribedEventInstance
"""
return self._proxy.update(version, )
def delete(self):
"""
Deletes the SubscribedEventInstance
:returns: True if delete succeeds, False otherwise
:rtype: bool
"""
return self._proxy.delete()
def __repr__(self):
"""
Provide a friendly representation
:returns: Machine friendly representation
:rtype: str
"""
context = ' '.join('{}={}'.format(k, v) for k, v in self._solution.items())
return '<Twilio.Events.V1.SubscribedEventInstance {}>'.format(context)
|
gpl-3.0
| 4,776,578,688,993,812,000
| 35.579747
| 107
| 0.639006
| false
| 4.533731
| false
| false
| false
|
mosaic-cloud/mosaic-components-httpg
|
applications/mosaic-httpg/sources/mosaic_httpg_tester.py
|
1
|
20676
|
import json
import os
import pprint
import random
import string
import struct
import sys
import time
import pika
_verbose = True
_broker_host = "127.0.0.1"
_broker_port = 21688
_broker_user = "guest"
_broker_password = "guest"
_broker_virtual_host = "/"
_handlers_exchange_identifier = "mosaic-http-requests"
_handlers_queue_identifier = "mosaic-http-requests"
_handlers_queue_routing_key = "#"
_reconnect_sleep = 1
_consume_sleep = 1
_glitch_probability_ = 0.0
def _loop () :
while True :
_connection = None
_channel = None
try :
if _verbose : print >> sys.stderr, "[ ] connecting..."
_connection = pika.BlockingConnection (pika.ConnectionParameters (
_broker_host, port = _broker_port, virtual_host = _broker_virtual_host,
credentials = pika.PlainCredentials (_broker_user, _broker_password)))
_channel = _connection.channel ()
except Exception as _error :
if _connection is not None :
try :
_connection.close ()
except :
pass
del _connection
del _channel
if _verbose : print >> sys.stderr, "[ee] failed while connecting: %r; sleeping and then reconnecting..." % (_error,)
time.sleep (_reconnect_sleep)
continue
try :
if _verbose : print >> sys.stderr, "[ ] declaring..."
_channel.exchange_declare (
exchange = _handlers_exchange_identifier, type = "topic",
durable = False, auto_delete = False)
_channel.queue_declare (
queue = _handlers_queue_identifier,
exclusive = False, durable = False, auto_delete = False)
_channel.queue_bind (
queue = _handlers_queue_identifier, exchange = _handlers_exchange_identifier,
routing_key = _handlers_queue_routing_key)
except Exception as _error :
print >> sys.stderr, "[ee] failed while declaring: %r; aborting!" % (_error,)
exit (1)
def _handle (_channel, _method, _properties, _body) :
if _verbose : print >> sys.stderr, "[ ] handling..."
_request_data = _body
_request_content_type = _properties.content_type
_request_content_encoding = _properties.content_encoding
_response_data, _response_content_type, _response_content_encoding, _callback_exchange, _callback_routing_key \
= _handle_message (_request_data, _request_content_type, _request_content_encoding)
if _verbose : print >> sys.stderr, "[ ] publishing: `%s` <- `%s`..." % (_callback_exchange, _callback_routing_key)
_channel.basic_publish (
_callback_exchange, _callback_routing_key, _response_data,
properties = pika.BasicProperties (content_type = _response_content_type, content_encoding = _response_content_encoding),
mandatory = False, immediate = False)
_channel.basic_ack (delivery_tag = _method.delivery_tag, multiple = False)
return
# _channel.basic_qos (prefetch_size = 0, prefetch_count = 16, global_ = False)
if False :
# while _connection.is_alive () :
while True :
_outcome = None
try :
if _verbose : print >> sys.stderr, "[ ] polling..."
_outcome = _channel.basic_get (queue = _handlers_queue_identifier)
except Exception as _error :
del _outcome
if _verbose : print >> sys.stderr, "[ee] failed while polling: %r; exiting loop..." % (_error,)
break
if isinstance (_outcome, pika.spec.Basic.GetOk) :
_handle (_channel, _outcome, _outcome.get_properties (), _outcome.get_body ())
elif isinstance (_outcome, pika.spec.Basic.GetEmpty) :
if _verbose : print >> sys.stderr, "[ ] nothing; sleeping..."
time.sleep (_consume_sleep)
else :
print >> sys.stderr, "[ee] unexpected polling outcome: %r; ignoring" % (_outcome,)
del _outcome
else :
_channel.basic_consume (_handle, queue = _handlers_queue_identifier, exclusive = False, no_ack = False)
_channel.start_consuming ()
try :
_channel.close ()
except :
pass
try :
_connection.close ()
except :
pass
del _connection
del _channel
return
def _handle_message (_request_data, _request_content_type, _request_content_encoding) :
_request, _callback_identifier, _callback_exchange, _callback_routing_key \
= _decode_request_message_body (_request_data, _request_content_type, _request_content_encoding)
_response = _process (_request)
_response_data, _response_content_type, _response_content_encoding \
= _encode_response_message_body (_response, _callback_identifier)
_glitch = _maybe_glitch (_response, _callback_identifier, _response_data, _response_content_type, _response_content_encoding)
if _glitch is not None :
_response_data, _response_content_type, _response_content_encoding = _glitch
return (_response_data, _response_content_type, _response_content_encoding, _callback_exchange, _callback_routing_key)
def _encode_response_message_body (_response, _callback_identifier) :
if _verbose : print >> sys.stderr, "[ ] encoding message:"
_decoded_headers = {
"version" : 1,
"callback-identifier" : _callback_identifier,
"http-version" : _response.http_version,
"http-code" : _response.http_code,
"http-status" : _response.http_status,
"http-headers" : _response.http_headers,
"http-body" : "following"
}
if _verbose : print >> sys.stderr, "[ ] -> decoded headers:"
if _verbose : pprint.pprint (_decoded_headers, sys.stderr)
_decoded_body = _response.http_body
if _verbose : print >> sys.stderr, "[ ] -> decoded body:"
if _verbose : print >> sys.stderr, _decoded_headers
_encoded_headers = json.dumps (_decoded_headers, False, True, False, True, None, None, None, 'utf-8')
_encoded_headers_size = len (_encoded_headers)
if _verbose : print >> sys.stderr, "[ ] -> encoded headers size: %d" % (_encoded_headers_size,)
if _verbose : print >> sys.stderr, "[ ] -> encoded headers: %r" % (_encoded_headers,)
_encoded_body = _response.http_body
_encoded_body_size = len (_encoded_body)
if _verbose : print >> sys.stderr, "[ ] -> encoded body size: %d" % (_encoded_body_size,)
if _verbose : print >> sys.stderr, "[ ] -> encoded body: %r" % (_encoded_body,)
_data = ''.join ([
struct.pack (">L", _encoded_headers_size),
_encoded_headers,
struct.pack (">L", _encoded_body_size),
_encoded_body])
_data_size = len (_data)
if _verbose : print >> sys.stderr, "[ ] -> data size: %d" % (_data_size)
if _verbose : print >> sys.stderr, "[ ] -> data: %r" % (_data,)
_content_type = 'application/octet-stream'
_content_encoding = 'binary'
if _verbose : print >> sys.stderr, "[ ] -> content type: %r;" % (_content_type,)
if _verbose : print >> sys.stderr, "[ ] -> content encoding: %r;" % (_content_encoding,)
return (_data, _content_type, _content_encoding)
def _decode_request_message_body (_data, _content_type, _content_encoding) :
if _verbose : print >> sys.stderr, "[ ] decoding message:"
if _verbose : print >> sys.stderr, "[ ] -> content type: %r;" % (_content_type,)
if _verbose : print >> sys.stderr, "[ ] -> content encoding: %r;" % (_content_encoding,)
_data_size = len (_data)
if _verbose : print >> sys.stderr, "[ ] -> data size: %d;" % (_data_size,)
if _verbose : print >> sys.stderr, "[ ] -> data: %r;" % (_data,)
assert _content_type == 'application/octet-stream'
assert _content_encoding == 'binary'
assert _data_size >= 4
_encoded_headers_size = struct.unpack (">L", _data[0:4]) [0]
_encoded_headers_offset = 4
_encoded_headers_limit = _encoded_headers_offset + _encoded_headers_size
assert _data_size >= _encoded_headers_limit
_encoded_headers = _data[_encoded_headers_offset : _encoded_headers_limit]
if _verbose : print >> sys.stderr, "[ ] -> encoded headers size: %d;" % (_encoded_headers_size,)
if _verbose : print >> sys.stderr, "[ ] -> encoded headers: %r;" % (_encoded_headers,)
_decoded_headers = json.loads (_encoded_headers, 'utf-8')
if _verbose : print >> sys.stderr, "[ ] -> decoded headers: %r;" % (_decoded_headers,)
if _verbose : print >> sys.stderr, "[ ] -> decoded headers:"
if _verbose : pprint.pprint (_decoded_headers, sys.stderr)
assert _decoded_headers.get ('version') == 1
_http_body_type = _decoded_headers.get ('http-body')
if _http_body_type == 'empty' :
assert _data_size == _encoded_headers_limit
_encoded_body = ''
_encoded_body_size = len (_encoded_body)
elif _http_body_type == 'embedded' :
assert _data_size == _encoded_headers_limit
_encoded_body = _decoded_headers.get ('http-body-content')
_encoded_body_size = len (_encoded_body)
elif _http_body_type == 'following' :
assert _data_size >= _encoded_headers_limit + 4
_encoded_body_size = struct.unpack (">L", _data[_encoded_headers_limit : _encoded_headers_limit + 4]) [0]
_encoded_body_offset = _encoded_headers_limit + 4
_encoded_body_limit = _encoded_body_offset + _encoded_body_size
assert _data_size == _encoded_body_limit
_encoded_body = _data[_encoded_body_offset : 4 + _encoded_body_limit]
else :
assert False
if _verbose : print >> sys.stderr, "[ ] -> encoded body size: %d;" % (_encoded_body_size,)
if _verbose : print >> sys.stderr, "[ ] -> encoded body: %r;" % (_encoded_body,)
_decoded_body = _encoded_body
if _verbose : print >> sys.stderr, "[ ] -> decoded body:"
if _verbose : print >> sys.stderr, _decoded_body
_request = _Request (
socket_remote_ip = _decoded_headers.get ('socket-remote-ip'),
socket_remote_port = _decoded_headers.get ('socket-remote-port'),
socket_remote_fqdn = _decoded_headers.get ('socket-remote-fqdn'),
socket_local_ip = _decoded_headers.get ('socket-local-ip'),
socket_local_port = _decoded_headers.get ('socket-local-port'),
socket_local_fqdn = _decoded_headers.get ('socket-local-fqdn'),
http_version = _decoded_headers.get ('http-version'),
http_method = _decoded_headers.get ('http-method'),
http_uri = _decoded_headers.get ('http-uri'),
http_headers = _decoded_headers.get ('http-headers'),
http_body = _decoded_body)
_callback_identifier = str (_decoded_headers.get ('callback-identifier'))
_callback_exchange = str (_decoded_headers.get ('callback-exchange'))
_callback_routing_key = str (_decoded_headers.get ('callback-routing-key'))
if _verbose : print >> sys.stderr, "[ ] -> callback identifier: %r;" % (_callback_identifier,)
if _verbose : print >> sys.stderr, "[ ] -> callback exchange: %r;" % (_callback_exchange,)
if _verbose : print >> sys.stderr, "[ ] -> callback routing key: %r;" % (_callback_routing_key,)
return (_request, _callback_identifier, _callback_exchange, _callback_routing_key)
class _Request (object) :
def __init__ (self,
socket_remote_ip = None, socket_remote_port = None, socket_remote_fqdn = None,
socket_local_ip = None, socket_local_port = None, socket_local_fqdn = None,
http_version = None, http_method = None, http_uri = None,
http_headers = None, http_body = None) :
self.socket_remote_ip = socket_remote_ip
self.socket_remote_port = socket_remote_port
self.socket_remote_fqdn = socket_remote_fqdn
self.socket_local_ip = socket_local_ip
self.socket_local_port = socket_local_port
self.socket_local_fqdn = socket_local_fqdn
self.http_version = http_version
self.http_method = http_method
self.http_uri = http_uri
self.http_headers = http_headers
self.http_body = http_body
return
class _Response (object) :
def __init__ (self,
http_version = None, http_code = None, http_status = None,
http_headers = None, http_body = None) :
self.http_version = http_version
self.http_code = http_code
self.http_status = http_status
self.http_headers = http_headers
self.http_body = http_body
return
def _process (_request) :
if _verbose : print >> sys.stderr, "[ ] processing:"
if _verbose : print >> sys.stderr, "[ ] -> method: %s" % (_request.http_method,)
if _verbose : print >> sys.stderr, "[ ] -> uri: %s" % (_request.http_uri,)
_body = "Ok: pid = %d, time = %f" % (os.getpid (), time.time ())
_response = _Response (
http_version = _request.http_version,
http_code = 200,
http_status = "Ok",
http_headers = {
"Content-Length" : str (len (_body)),
"Content-Type" : "text/plain",
},
http_body = _body)
return _response
def _maybe_glitch (_response_, _callback_identifier_, _response_data_, _response_content_type_, _response_content_encoding_) :
global _glitch_probability_
if random.random () > _glitch_probability_ :
sys.stderr.write ('.')
return None
sys.stderr.write ('!')
_response_data = None
_response_content_type = None
_response_content_encoding = None
_response_headers_data = None
_response_headers_size = None
_response_body_data = None
_response_body_size = None
_response_headers = {
"version" : 1,
"callback-identifier" : _callback_identifier_,
"http-version" : _response_.http_version,
"http-code" : _response_.http_code,
"http-status" : _response_.http_status,
"http-headers" : _response_.http_headers,
"http-body" : "following"}
_response_body = None
if not hasattr (_maybe_glitch, "_glitches") :
_glitches = [
('content-type/none', 0.1), ('content-type/random', 0.1), ('content-type/garbage', 0.1),
('content-encoding/none', 0.1), ('content-encoding/random', 0.1), ('content-encoding/garbage', 0.1),
('response-headers/version', 0.1), ('response-headers/callback-identifier', 0.1),
('response-headers/http-version', 0.1), ('response-headers/http-code', 0.0), ('response-headers/http-status', 0.1),
('response-headers/http-headers', 0.1), ('response-headers/http-body', 0.1), ('response-headers/http-body-content', 0.1),
('response-body/none', 0.01), ('response-body/random', 0.01), ('response-body/garbage', 0.01),
('response-data/none', 0.01), ('response-data/random', 0.01), ('response-data/garbage', 0.01),
('response-headers-data/none', 0.01), ('response-headers-data/random', 0.01), ('response-headers-data/garbage', 0.01),
('response-headers-data/size', 0.01), ('response-body-data/size', 0.01)]
_sum = 0.0
for _glitch_identifier, _glitch_probability in _glitches :
_sum += _glitch_probability
for _glitch_index in xrange (len (_glitches)) :
_glitches[_glitch_index] = (_glitches[_glitch_index][0], _glitches[_glitch_index][1] / _sum)
_maybe_glitch._glitches = _glitches
else :
_glitches = _maybe_glitch._glitches
while True :
_glitch = None
_glitch_remaining_probability = 1.0
for _glitch_identifier, _glitch_probability in _glitches :
if random.random () <= (_glitch_probability / _glitch_remaining_probability) :
_glitch = _glitch_identifier
break
_glitch_remaining_probability -= _glitch_probability
assert _glitch is not None
if _glitch == 'content-type/none' :
if _response_content_type is not None :
continue
_response_content_type = ''
elif _glitch == 'content-type/random' :
if _response_content_type is not None :
continue
_response_content_type = _generate_printable_string (1, 64)
elif _glitch == 'content-type/garbage' :
if _response_content_type is not None :
continue
_response_content_type = _generate_garbage_string (1, 64)
elif _glitch == 'content-encoding/none' :
if _response_content_encoding is not None :
continue
_response_content_encoding = ''
elif _glitch == 'content-encoding/random' :
if _response_content_encoding is not None :
continue
_response_content_encoding = _generate_printable_string (1, 64)
elif _glitch == 'content-encoding/garbage' :
if _response_content_encoding is not None :
continue
_response_content_encoding = _generate_garbage_string (1, 64)
elif _glitch == 'response-data/none' :
if _response_data is not None :
continue
_response_data = ''
elif _glitch == 'response-data/random' :
if _response_data is not None :
continue
_response_data = _generate_printable_string (1, 128)
elif _glitch == 'response-data/garbage' :
if _response_data is not None :
continue
_response_data = _generate_garbage_string (1, 128)
elif _glitch == 'response-headers-data/none' :
if _response_headers_data is not None :
continue
_response_headers_data = ''
elif _glitch == 'response-headers-data/random' :
if _response_headers_data is not None :
continue
_response_headers_data = _generate_printable_string (1, 128)
elif _glitch == 'response-headers-data/garbage' :
if _response_headers_data is not None :
continue
_response_headers_data = _generate_garbage_string (1, 128)
elif _glitch == 'response-headers-data/size' :
if _response_headers_size is not None :
continue
_response_headers_size = random.randint (0, 1 << 32 - 1)
elif _glitch == 'response-body-data/size' :
if _response_headers_size is not None :
continue
_response_body_size = random.randint (0, 1 << 32 - 1)
elif _glitch == 'response-headers/version' :
_response_headers['version'] = _generate_random_json ()
elif _glitch == 'response-headers/callback-identifier' :
_response_headers['callback-identifier'] = _generate_random_json ()
elif _glitch == 'response-headers/http-version' :
_response_headers['http-version'] = _generate_random_json ()
elif _glitch == 'response-headers/http-code' :
_response_headers['http-code'] = _generate_random_json ()
elif _glitch == 'response-headers/http-status' :
_response_headers['http-status'] = _generate_random_json ()
elif _glitch == 'response-headers/http-headers' :
_response_headers['http-headers'] = _generate_random_json ()
elif _glitch == 'response-headers/http-body' :
_response_headers['http-body'] = _generate_random_json ()
elif _glitch == 'response-headers/http-body-content' :
_response_headers['http-body-content'] = _generate_random_json ()
elif _glitch == 'response-body/none' :
if _response_body is not None :
continue
_response_body = ''
elif _glitch == 'response-body/random' :
if _response_body is not None :
continue
_response_body = _generate_printable_string (1, 128)
elif _glitch == 'response-body/garbage' :
if _response_body is not None :
continue
_response_body = _generate_garbage_string (1, 128)
else :
print >> sys.stderr, '[ee] unknown glitch: ' + _glitch
if _response_data is not None :
break
if random.random > 0.2 :
break
if _response_data is None :
if _response_headers_data is None :
_response_headers_data = json.dumps (_response_headers, False, True, False, True, None, None, None, 'utf-8')
if _response_headers_size is None :
_response_headers_size = len (_response_headers_data)
_response_headers_data = struct.pack (">L", _response_headers_size) + _response_headers_data
if _response_body_data is None :
if _response_body is None :
_response_body = _response_.http_body
_response_body_data = _response_body
if _response_body_size is None :
_response_body_size = len (_response_body_data)
_response_body_data = struct.pack (">L", _response_body_size) + _response_body_data
_response_data = _response_headers_data + _response_body_data
if _response_content_type is None :
_response_content_type = _response_content_type_
if _response_content_encoding is None :
_response_content_encoding = _response_content_encoding_
return _response_data, _response_content_type, _response_content_encoding
def _generate_printable_string (_min_length, _max_length) :
return ''.join ([chr (random.randint (32, 127)) for i in xrange (random.randint (_min_length, _max_length))])
def _generate_garbage_string (_min_length, _max_length) :
return ''.join ([chr (random.randint (0, 255)) for i in xrange (random.randint (_min_length, _max_length))])
def _generate_random_json (_depth_probability = 1.0) :
if random.random () < _depth_probability :
_choice = random.randint (0, 5)
else :
_choice = random.randint (0, 3)
if _choice == 0 :
return _generate_printable_string (1, 32)
elif _choice == 1 :
return random.randint (-1 << 31, 1 << 31 - 1)
elif _choice == 2 :
return random.random () * random.randint (-1 << 31, 1 << 31 - 1)
elif _choice == 3 :
return random.choice ([True, False, None])
elif _choice == 4 :
return [_generate_random_json (_depth_probability * 0.01) for i in xrange (0, 128)]
elif _choice == 5 :
_dict = {}
for i in xrange (0, 128) :
_dict[_generate_printable_string (1, 32)] = _generate_random_json (_depth_probability * 0.01)
return _dict
else :
assert False
return None
if __name__ == '__main__' :
assert len (sys.argv) == 1
_loop ()
|
apache-2.0
| -3,355,641,044,008,359,000
| 34.895833
| 126
| 0.653415
| false
| 3.116672
| false
| false
| false
|
pferreir/indico-backup
|
indico/MaKaC/plugins/Collaboration/ravem.py
|
1
|
3751
|
# -*- coding: utf-8 -*-
##
## $id$
##
## This file is part of Indico.
## Copyright (C) 2002 - 2014 European Organization for Nuclear Research (CERN).
##
## Indico is free software; you can redistribute it and/or
## modify it under the terms of the GNU General Public License as
## published by the Free Software Foundation; either version 3 of the
## License, or (at your option) any later version.
##
## Indico is distributed in the hope that it will be useful, but
## WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with Indico;if not, see <http://www.gnu.org/licenses/>.
from MaKaC.plugins.Collaboration.collaborationTools import CollaborationTools
from indico.core.logger import Logger
from requests.auth import HTTPDigestAuth
import requests
from urllib import urlencode
class RavemClient(object):
""" Singleton for the client for RAVEM API
"""
_instance = None
def __init__(self, username, password, url):
self._username = username
self._password = password
self._url = url
def performOperation(self, operation):
data = requests.get(self._url + operation, auth=HTTPDigestAuth(self._username, self._password), verify=False)
return data
@classmethod
def getInstance(cls, ravem_api_url=None, username=None, password=None):
if cls._instance is None or (ravem_api_url is not None or username is not None or password is not None):
if ravem_api_url is None:
ravem_api_url = CollaborationTools.getCollaborationOptionValue('ravemAPIURL')
if username is None:
username = CollaborationTools.getCollaborationOptionValue('ravemUsername')
if password is None:
password = CollaborationTools.getCollaborationOptionValue('ravemPassword')
try:
cls._instance = RavemClient(username, password, ravem_api_url)
except Exception:
Logger.get("Ravem").exception("Problem building RavemClient")
raise
return cls._instance
class RavemApi(object):
""" This class performs low-level operations by getting the corresponding
client and calling a service.
"""
@classmethod
def _api_operation(cls, service, *args, **kwargs):
try:
url = "/%s?%s" % (service, urlencode(kwargs))
ravemClient = RavemClient.getInstance()
return ravemClient.performOperation(url)
except Exception, e:
Logger.get('Ravem').exception("""Ravem API's '%s' operation not successfull: %s""" % (service, e.message))
raise
@classmethod
def isLegacyEndpointConnected(cls, room_ip):
return cls._api_operation("getstatus", where="vc_endpoint_legacy_ip", value=room_ip)
@classmethod
def isVidyoPanoramaConnected(cls, vidyo_panorama_id):
return cls._api_operation("getstatus", where="vc_endpoint_vidyo_username", value=vidyo_panorama_id)
@classmethod
def disconnectLegacyEndpoint(cls, room_ip, service_type, room_name):
return cls._api_operation("videoconference/disconnect", type=service_type, where="vc_endpoint_legacy_ip",
value=room_ip, vidyo_room_name=room_name)
@classmethod
def disconnectVidyoPanorama(cls, vidyo_panorama_id, service_type, room_name):
return cls._api_operation("videoconference/disconnect", type=service_type, where="vc_endpoint_vidyo_username",
value=vidyo_panorama_id, vidyo_room_name=room_name)
|
gpl-3.0
| 205,378,133,880,764,830
| 39.333333
| 118
| 0.672354
| false
| 3.948421
| false
| false
| false
|
helixyte/TheLMA
|
thelma/repositories/rdb/schema/tables/samplemolecule.py
|
1
|
1342
|
"""
This file is part of the TheLMA (THe Laboratory Management Application) project.
See LICENSE.txt for licensing, CONTRIBUTORS.txt for contributor information.
Sample molecule table.
"""
from sqlalchemy import CheckConstraint
from sqlalchemy import Column
from sqlalchemy import DateTime
from sqlalchemy import Float
from sqlalchemy import ForeignKey
from sqlalchemy import Integer
from sqlalchemy import Table
__docformat__ = 'reStructuredText en'
__all__ = ['create_table']
def create_table(metadata, sample_tbl, molecule_tbl):
"Table factory."
tbl = Table('sample_molecule', metadata,
Column('sample_id', Integer,
ForeignKey(sample_tbl.c.sample_id,
onupdate='CASCADE', ondelete='CASCADE'),
primary_key=True, index=True),
Column('molecule_id', Integer,
ForeignKey(molecule_tbl.c.molecule_id,
onupdate='CASCADE', ondelete='RESTRICT'),
primary_key=True, index=True),
Column('concentration', Float, CheckConstraint('concentration>=0.0')),
Column('freeze_thaw_cycles', Integer,
CheckConstraint('freeze_thaw_cycles IS NULL OR '
'freeze_thaw_cycles >= 0')),
Column('checkout_date', DateTime(timezone=True)),
)
return tbl
|
mit
| -2,361,379,783,342,005,000
| 35.27027
| 80
| 0.646796
| false
| 4.549153
| false
| false
| false
|
bdang2012/taiga-back-casting
|
taiga/timeline/apps.py
|
1
|
1772
|
# Copyright (C) 2014-2015 Andrey Antukh <niwi@niwi.be>
# Copyright (C) 2014-2015 Jesús Espino <jespinog@gmail.com>
# Copyright (C) 2014-2015 David Barragán <bameda@dbarragan.com>
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from django.apps import AppConfig
from django.apps import apps
from django.db.models import signals
from . import signals as handlers
from taiga.projects.history.models import HistoryEntry
class TimelineAppConfig(AppConfig):
name = "taiga.timeline"
verbose_name = "Timeline"
def ready(self):
signals.post_save.connect(handlers.on_new_history_entry, sender=HistoryEntry, dispatch_uid="timeline")
signals.pre_save.connect(handlers.create_membership_push_to_timeline,
sender=apps.get_model("projects", "Membership"))
signals.post_delete.connect(handlers.delete_membership_push_to_timeline,
sender=apps.get_model("projects", "Membership"))
signals.post_save.connect(handlers.create_user_push_to_timeline,
sender=apps.get_model("users", "User"))
|
agpl-3.0
| -1,320,559,556,682,931,500
| 48.166667
| 110
| 0.697175
| false
| 3.995485
| false
| false
| false
|
andrewthetechie/slack_rtmbot_core_plugins
|
status/status.py
|
1
|
1633
|
#! env/bin/python
from datetime import timedelta
import psutil
import re
import yaml
outputs = []
# load default configs
config = yaml.load(file('conf/rtmbot.conf', 'r'))
def status_main():
"""
Does the work of checking the server's status
Returns the message to output
:return: message
"""
message = "Unable to check server status"
cpu_usage = psutil.cpu_percent()
disk_io = psutil.disk_io_counters(perdisk=False)
with open('/proc/uptime', 'r') as f:
uptime_seconds = float(f.readline().split()[0])
uptime_string = str(timedelta(seconds=uptime_seconds))
if cpu_usage and disk_io and uptime_string:
message = "Load: {}\nDisk IO: {}\nUptime: {}".format(cpu_usage, disk_io, uptime_string)
return message
def process_directmessage(data):
match = re.match(r"status", data['text'], flags=re.IGNORECASE)
if match:
message = status_main()
outputs.append([data['channel'], "{}".format(message)])
return
def process_message(data):
match = re.match(r"{} status".format(config['BOT_NAME']), data['text'], flags=re.IGNORECASE)
if match:
message = status_main()
outputs.append([data['channel'], "{}".format(message)])
return
def process_help():
dm_help = []
channel_help = []
plugin_help = []
# setup help
dm_help.append("status - Responds with some basic status on the server running the bot")
channel_help.append("status - Responds with some basic status on the server running the bot")
plugin_help.append(dm_help)
plugin_help.append(channel_help)
return plugin_help
|
lgpl-3.0
| -2,207,216,231,104,388,900
| 25.33871
| 97
| 0.6485
| false
| 3.645089
| false
| false
| false
|
openprocurement/openprocurement.tender.esco
|
openprocurement/tender/esco/tests/bid.py
|
1
|
8048
|
# -*- coding: utf-8 -*-
import unittest
from esculator import npv, escp
from openprocurement.api.utils import get_now
from openprocurement.tender.esco.tests.base import (
test_bids, test_features_tender_data,
BaseESCOContentWebTest, NBU_DISCOUNT_RATE
)
from openprocurement.tender.belowthreshold.tests.base import (
test_organization,
)
from openprocurement.api.tests.base import snitch
from openprocurement.tender.belowthreshold.tests.bid_blanks import (
# TenderBidBatchDocumentWithDSResourceTest
create_tender_bid_with_documents,
create_tender_bid_with_document_invalid,
create_tender_bid_with_document,
)
from openprocurement.tender.openeu.tests.bid import TenderBidDocumentResourceTestMixin
from openprocurement.tender.openeu.tests.bid_blanks import (
# TenderBidDocumentWithDSResourceTest
patch_tender_bidder_document_private_json,
put_tender_bidder_document_private_json,
get_tender_bidder_document_ds,
# TenderBidDocumentResourceTest
create_tender_bidder_document_nopending,
# TenderBidBatchDocumentWithDSResourceTest
create_tender_bid_with_all_documents,
create_tender_bid_with_eligibility_document_invalid,
create_tender_bid_with_financial_document_invalid,
create_tender_bid_with_qualification_document_invalid,
create_tender_bid_with_eligibility_document,
create_tender_bid_with_qualification_document,
create_tender_bid_with_financial_document,
create_tender_bid_with_financial_documents,
create_tender_bid_with_eligibility_documents,
create_tender_bid_with_qualification_documents,
get_tender_bidder,
get_tender_tenderers,
)
from openprocurement.tender.esco.tests.bid_blanks import (
create_tender_bid_invalid,
create_tender_bid,
patch_tender_bid,
deleted_bid_is_not_restorable,
bid_Administrator_change,
bids_activation_on_tender_documents,
features_bid_invalid,
features_bid,
patch_and_put_document_into_invalid_bid,
# TenderBidResourceTest
delete_tender_bidder,
bids_invalidation_on_tender_change,
deleted_bid_do_not_locks_tender_in_state,
create_tender_bid_invalid_funding_kind_budget,
create_tender_bid_31_12,
)
from openprocurement.tender.esco.utils import to_decimal
bid_amountPerformance = round(to_decimal(npv(
test_bids[0]['value']['contractDuration']['years'],
test_bids[0]['value']['contractDuration']['days'],
test_bids[0]['value']['yearlyPaymentsPercentage'],
test_bids[0]['value']['annualCostsReduction'],
get_now(),
NBU_DISCOUNT_RATE)), 2)
bid_amount = round(to_decimal(escp(
test_bids[0]['value']['contractDuration']['years'],
test_bids[0]['value']['contractDuration']['days'],
test_bids[0]['value']['yearlyPaymentsPercentage'],
test_bids[0]['value']['annualCostsReduction'],
get_now())), 2)
class TenderBidResourceTest(BaseESCOContentWebTest):
initial_status = 'active.tendering'
test_bids_data = test_bids
author_data = test_bids_data[0]['tenderers'][0]
expected_bid_amountPerformance = bid_amountPerformance
expected_bid_amount = bid_amount
test_create_tender_bid_invalid = snitch(create_tender_bid_invalid)
test_create_tender_bid = snitch(create_tender_bid)
test_patch_tender_bid = snitch(patch_tender_bid)
test_get_tender_bidder = snitch(get_tender_bidder)
test_deleted_bid_do_not_locks_tender_in_state = snitch(deleted_bid_do_not_locks_tender_in_state)
test_get_tender_tenderers = snitch(get_tender_tenderers)
test_deleted_bid_is_not_restorable = snitch(deleted_bid_is_not_restorable)
test_bid_Administrator_change = snitch(bid_Administrator_change)
test_bids_activation_on_tender_documents = snitch(bids_activation_on_tender_documents)
test_delete_tender_bidder = snitch(delete_tender_bidder)
test_bids_invalidation_on_tender_change = snitch(bids_invalidation_on_tender_change)
test_create_tender_bid_invalid_funding_kind_budget = snitch(create_tender_bid_invalid_funding_kind_budget)
test_create_tender_bid_31_12 = snitch(create_tender_bid_31_12)
class TenderBidFeaturesResourceTest(BaseESCOContentWebTest):
initial_status = 'active.tendering'
initial_data = test_features_tender_data
test_bids_data = test_bids
test_features_bid = snitch(features_bid)
test_features_bid_invalid = snitch(features_bid_invalid)
class TenderBidDocumentResourceTest(BaseESCOContentWebTest, TenderBidDocumentResourceTestMixin):
initial_auth = ('Basic', ('broker', ''))
initial_status = 'active.tendering'
test_bids_data = test_bids
def setUp(self):
super(TenderBidDocumentResourceTest, self).setUp()
# Create bid
response = self.app.post_json('/tenders/{}/bids'.format(
self.tender_id), {'data': test_bids[0]})
bid = response.json['data']
self.bid_id = bid['id']
self.bid_token = response.json['access']['token']
# create second bid
response = self.app.post_json('/tenders/{}/bids'.format(
self.tender_id), {'data': test_bids[1]})
bid2 = response.json['data']
self.bid2_id = bid2['id']
self.bid2_token = response.json['access']['token']
test_patch_and_put_document_into_invalid_bid = snitch(patch_and_put_document_into_invalid_bid)
test_create_tender_bidder_document_nopending = snitch(create_tender_bidder_document_nopending)
class TenderBidDocumentWithDSResourceTest(TenderBidDocumentResourceTest):
docservice = True
test_patch_tender_bidder_document_private_json = snitch(patch_tender_bidder_document_private_json)
test_put_tender_bidder_document_private_json = snitch(put_tender_bidder_document_private_json)
test_get_tender_bidder_document_ds = snitch(get_tender_bidder_document_ds)
class TenderBidDocumentWithoutDSResourceTest(TenderBidDocumentResourceTest):
docservice = False
class TenderBidBatchDocumentsWithDSResourceTest(BaseESCOContentWebTest):
docservice = True
initial_status = 'active.tendering'
bid_data_wo_docs = {
'tenderers': [test_organization],
'value': test_bids[0]['value'],
'selfEligible': True,
'selfQualified': True,
'documents': []
}
test_create_tender_bid_with_document_invalid = snitch(create_tender_bid_with_document_invalid)
test_create_tender_bid_with_document = snitch(create_tender_bid_with_document)
test_create_tender_bid_with_documents = snitch(create_tender_bid_with_documents)
test_create_tender_bid_with_eligibility_document_invalid = snitch(create_tender_bid_with_eligibility_document_invalid)
test_create_tender_bid_with_eligibility_document = snitch(create_tender_bid_with_eligibility_document)
test_create_tender_bid_with_eligibility_documents = snitch(create_tender_bid_with_eligibility_documents)
test_create_tender_bid_with_qualification_document_invalid = snitch(create_tender_bid_with_qualification_document_invalid)
test_create_tender_bid_with_qualification_document = snitch(create_tender_bid_with_qualification_document)
test_create_tender_bid_with_qualification_documents = snitch(create_tender_bid_with_qualification_documents)
test_create_tender_bid_with_financial_document_invalid = snitch(create_tender_bid_with_financial_document_invalid)
test_create_tender_bid_with_financial_document = snitch(create_tender_bid_with_financial_document)
test_create_tender_bid_with_financial_documents = snitch(create_tender_bid_with_financial_documents)
test_create_tender_bid_with_all_documents = snitch(create_tender_bid_with_all_documents)
def suite():
suite = unittest.TestSuite()
suite.addTest(TenderBidResourceTest)
suite.addTest(TenderBidFeaturesResourceTest)
suite.addTest(TenderBidDocumentResourceTest)
suite.addTest(TenderBidDocumentWithDSResourceTest)
suite.addTest(TenderBidDocumentWithoutDSResourceTest)
suite.addTest(TenderBidBatchDocumentsWithDSResourceTest)
return suite
if __name__ == '__main__':
unittest.main(defaultTest='suite')
|
apache-2.0
| -7,882,602,119,402,993
| 41.136126
| 126
| 0.742917
| false
| 3.232129
| true
| false
| false
|
disulfiram/AntFarm
|
Game.py
|
1
|
3954
|
import pygame
import sys
from Menu.MenuItem import *
white = (255, 255, 255)
black = (0, 0, 0)
grey = (195, 195, 195)
pink = (242, 217, 229)
green = (210, 255, 191)
# TODO: More robust menu system. Different menus with different menu items. Maybe have button action here.
main_menu = [MenuItem("New Game", True), MenuItem("Settings"), MenuItem("About"), MenuItem("Exit")]
settings_menu = [MenuItem("Resolution", True), MenuItem("Controls"), MenuItem("Full screen")]
_window_border = 100
# TODO: These need to be parsed from ini file and set in the menu.
window_width = 800
window_height = 600
fps = 30
def main():
menu_loop()
pygame.quit()
def draw_menu(game_display, menu):
menu_items_count = len(main_menu)
button_index = 0
for menu_item in menu:
# Calculating button size
b_width = window_width * 0.3
b_height = window_height * 0.1
b_top = _window_border + (button_index * (window_height - b_height - _window_border * 2) / (menu_items_count - 1))
b_left = (window_width / 2) - (b_width / 2)
# Checking button state
button_state = MenuItemStates.inactive
mouse = pygame.mouse.get_pos()
click = pygame.mouse.get_pressed()
if b_left + b_width > mouse[0] > b_left and b_top + b_height > mouse[1] > b_top:
if click[0] == 1: # and action != None:
button_state = MenuItemStates.active
else:
change_active_item(menu, menu_item)
button_state = MenuItemStates.highlight
elif menu_item.highlight:
button_state = MenuItemStates.highlight
# Drawing the button
button(game_display, menu_item, button_state, b_left, b_top, b_width, b_height, pink, white)
button_index += 1
def change_active_item(menu, menu_item):
if menu_item.highlight:
return
for item in menu:
if item == menu_item:
item.highlight = True
else:
item.highlight = False
def text_objects(text, font, color=black):
text_surface = font.render(text, True, color)
return text_surface, text_surface.get_rect()
def button(game_display, m_item, state, left, top, width, height, inactive_colour, active_colour, action=None):
if state == MenuItemStates.active:
pygame.draw.rect(game_display, active_colour, (left, top, width, height))
# TODO: Still not sure how this will work, but it needs to.
# action()
elif state == MenuItemStates.highlight:
# TODO: This will crash if one, or more, of the values of inactive_colour is greater than 245. Fix later! Experiment with incresing only the values that are below 245 or increasing these values to maximum of 255
highlight_colour = (inactive_colour[0] + 10, inactive_colour[1] + 10, inactive_colour[2] + 10)
pygame.draw.rect(game_display, highlight_colour, (left, top, width, height))
elif state == MenuItemStates.inactive:
pygame.draw.rect(game_display, inactive_colour, (left, top, width, height))
# TODO: change font size based on button size and/or window size
small_text = pygame.font.SysFont("Berlin Sans FB", 20)
text_surf, text_rect = text_objects(m_item.text, small_text, white)
text_rect.center = ((left + (width / 2)), (top + (height / 2)))
game_display.blit(text_surf, text_rect)
def menu_loop(current_menu=main_menu):
pygame.init()
game_exit = False
pygame.display.set_caption('Ant Farm')
game_display = pygame.display.set_mode((window_width, window_height))
game_display.fill(grey)
clock = pygame.time.Clock()
while not game_exit:
for event in pygame.event.get():
if event.type == pygame.QUIT:
game_exit = True
draw_menu(game_display, current_menu)
pygame.display.update()
clock.tick(fps)
def game_loop():
return 0
if __name__ == "__main__":
sys.exit(main())
|
gpl-3.0
| -1,382,443,975,517,226,800
| 33.382609
| 219
| 0.634294
| false
| 3.505319
| false
| false
| false
|
KimTaehee/eucalyptus
|
admin-tools/eucalyptus_admin/commands/bootstrap/__init__.py
|
1
|
2345
|
# Copyright 2015 Eucalyptus Systems, Inc.
#
# Redistribution and use of this software in source and binary forms,
# with or without modification, are permitted provided that the following
# conditions are met:
#
# Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from requestbuilder import Arg
import requestbuilder.auth.aws
import requestbuilder.request
import requestbuilder.service
from eucalyptus_admin.commands import EucalyptusAdmin
from eucalyptus_admin.exceptions import AWSError
from eucalyptus_admin.util import add_fake_region_name
class Bootstrap(requestbuilder.service.BaseService):
NAME = 'bootstrap'
DESCRIPTION = 'Bootstrap service'
REGION_ENVVAR = 'AWS_DEFAULT_REGION'
URL_ENVVAR = 'EUCA_BOOTSTRAP_URL'
ARGS = [Arg('-U', '--url', metavar='URL',
help='bootstrap service endpoint URL')]
def configure(self):
requestbuilder.service.BaseService.configure(self)
add_fake_region_name(self)
def handle_http_error(self, response):
raise AWSError(response)
class BootstrapRequest(requestbuilder.request.AWSQueryRequest):
SUITE = EucalyptusAdmin
SERVICE_CLASS = Bootstrap
AUTH_CLASS = requestbuilder.auth.aws.HmacV4Auth
API_VERSION = 'eucalyptus'
METHOD = 'POST'
|
gpl-3.0
| -4,270,303,309,189,916,700
| 39.431034
| 74
| 0.764606
| false
| 4.10683
| false
| false
| false
|
weiyuanke/mykeystone
|
keystone/openstack/common/cfg.py
|
1
|
52287
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2012 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
r"""
Configuration options which may be set on the command line or in config files.
The schema for each option is defined using the Opt sub-classes, e.g.:
::
common_opts = [
cfg.StrOpt('bind_host',
default='0.0.0.0',
help='IP address to listen on'),
cfg.IntOpt('bind_port',
default=9292,
help='Port number to listen on')
]
Options can be strings, integers, floats, booleans, lists or 'multi strings'::
enabled_apis_opt = cfg.ListOpt('enabled_apis',
default=['ec2', 'osapi_compute'],
help='List of APIs to enable by default')
DEFAULT_EXTENSIONS = [
'nova.api.openstack.compute.contrib.standard_extensions'
]
osapi_compute_extension_opt = cfg.MultiStrOpt('osapi_compute_extension',
default=DEFAULT_EXTENSIONS)
Option schemas are registered with with the config manager at runtime, but
before the option is referenced::
class ExtensionManager(object):
enabled_apis_opt = cfg.ListOpt(...)
def __init__(self, conf):
self.conf = conf
self.conf.register_opt(enabled_apis_opt)
...
def _load_extensions(self):
for ext_factory in self.conf.osapi_compute_extension:
....
A common usage pattern is for each option schema to be defined in the module or
class which uses the option::
opts = ...
def add_common_opts(conf):
conf.register_opts(opts)
def get_bind_host(conf):
return conf.bind_host
def get_bind_port(conf):
return conf.bind_port
An option may optionally be made available via the command line. Such options
must registered with the config manager before the command line is parsed (for
the purposes of --help and CLI arg validation)::
cli_opts = [
cfg.BoolOpt('verbose',
short='v',
default=False,
help='Print more verbose output'),
cfg.BoolOpt('debug',
short='d',
default=False,
help='Print debugging output'),
]
def add_common_opts(conf):
conf.register_cli_opts(cli_opts)
The config manager has two CLI options defined by default, --config-file
and --config-dir::
class ConfigOpts(object):
def __call__(self, ...):
opts = [
MultiStrOpt('config-file',
...),
StrOpt('config-dir',
...),
]
self.register_cli_opts(opts)
Option values are parsed from any supplied config files using
openstack.common.iniparser. If none are specified, a default set is used
e.g. glance-api.conf and glance-common.conf::
glance-api.conf:
[DEFAULT]
bind_port = 9292
glance-common.conf:
[DEFAULT]
bind_host = 0.0.0.0
Option values in config files override those on the command line. Config files
are parsed in order, with values in later files overriding those in earlier
files.
The parsing of CLI args and config files is initiated by invoking the config
manager e.g.::
conf = ConfigOpts()
conf.register_opt(BoolOpt('verbose', ...))
conf(sys.argv[1:])
if conf.verbose:
...
Options can be registered as belonging to a group::
rabbit_group = cfg.OptGroup(name='rabbit',
title='RabbitMQ options')
rabbit_host_opt = cfg.StrOpt('host',
default='localhost',
help='IP/hostname to listen on'),
rabbit_port_opt = cfg.IntOpt('port',
default=5672,
help='Port number to listen on')
def register_rabbit_opts(conf):
conf.register_group(rabbit_group)
# options can be registered under a group in either of these ways:
conf.register_opt(rabbit_host_opt, group=rabbit_group)
conf.register_opt(rabbit_port_opt, group='rabbit')
If it no group attributes are required other than the group name, the group
need not be explicitly registered e.g.
def register_rabbit_opts(conf):
# The group will automatically be created, equivalent calling::
# conf.register_group(OptGroup(name='rabbit'))
conf.register_opt(rabbit_port_opt, group='rabbit')
If no group is specified, options belong to the 'DEFAULT' section of config
files::
glance-api.conf:
[DEFAULT]
bind_port = 9292
...
[rabbit]
host = localhost
port = 5672
use_ssl = False
userid = guest
password = guest
virtual_host = /
Command-line options in a group are automatically prefixed with the
group name::
--rabbit-host localhost --rabbit-port 9999
Option values in the default group are referenced as attributes/properties on
the config manager; groups are also attributes on the config manager, with
attributes for each of the options associated with the group::
server.start(app, conf.bind_port, conf.bind_host, conf)
self.connection = kombu.connection.BrokerConnection(
hostname=conf.rabbit.host,
port=conf.rabbit.port,
...)
Option values may reference other values using PEP 292 string substitution::
opts = [
cfg.StrOpt('state_path',
default=os.path.join(os.path.dirname(__file__), '../'),
help='Top-level directory for maintaining nova state'),
cfg.StrOpt('sqlite_db',
default='nova.sqlite',
help='file name for sqlite'),
cfg.StrOpt('sql_connection',
default='sqlite:///$state_path/$sqlite_db',
help='connection string for sql database'),
]
Note that interpolation can be avoided by using '$$'.
For command line utilities that dispatch to other command line utilities, the
disable_interspersed_args() method is available. If this this method is called,
then parsing e.g.::
script --verbose cmd --debug /tmp/mything
will no longer return::
['cmd', '/tmp/mything']
as the leftover arguments, but will instead return::
['cmd', '--debug', '/tmp/mything']
i.e. argument parsing is stopped at the first non-option argument.
Options may be declared as required so that an error is raised if the user
does not supply a value for the option.
Options may be declared as secret so that their values are not leaked into
log files:
opts = [
cfg.StrOpt('s3_store_access_key', secret=True),
cfg.StrOpt('s3_store_secret_key', secret=True),
...
]
This module also contains a global instance of the CommonConfigOpts class
in order to support a common usage pattern in OpenStack:
from openstack.common import cfg
opts = [
cfg.StrOpt('bind_host' default='0.0.0.0'),
cfg.IntOpt('bind_port', default=9292),
]
CONF = cfg.CONF
CONF.register_opts(opts)
def start(server, app):
server.start(app, CONF.bind_port, CONF.bind_host)
"""
import collections
import copy
import functools
import glob
import optparse
import os
import string
import sys
from keystone.openstack.common import iniparser
class Error(Exception):
"""Base class for cfg exceptions."""
def __init__(self, msg=None):
self.msg = msg
def __str__(self):
return self.msg
class ArgsAlreadyParsedError(Error):
"""Raised if a CLI opt is registered after parsing."""
def __str__(self):
ret = "arguments already parsed"
if self.msg:
ret += ": " + self.msg
return ret
class NoSuchOptError(Error, AttributeError):
"""Raised if an opt which doesn't exist is referenced."""
def __init__(self, opt_name, group=None):
self.opt_name = opt_name
self.group = group
def __str__(self):
if self.group is None:
return "no such option: %s" % self.opt_name
else:
return "no such option in group %s: %s" % (self.group.name,
self.opt_name)
class NoSuchGroupError(Error):
"""Raised if a group which doesn't exist is referenced."""
def __init__(self, group_name):
self.group_name = group_name
def __str__(self):
return "no such group: %s" % self.group_name
class DuplicateOptError(Error):
"""Raised if multiple opts with the same name are registered."""
def __init__(self, opt_name):
self.opt_name = opt_name
def __str__(self):
return "duplicate option: %s" % self.opt_name
class RequiredOptError(Error):
"""Raised if an option is required but no value is supplied by the user."""
def __init__(self, opt_name, group=None):
self.opt_name = opt_name
self.group = group
def __str__(self):
if self.group is None:
return "value required for option: %s" % self.opt_name
else:
return "value required for option: %s.%s" % (self.group.name,
self.opt_name)
class TemplateSubstitutionError(Error):
"""Raised if an error occurs substituting a variable in an opt value."""
def __str__(self):
return "template substitution error: %s" % self.msg
class ConfigFilesNotFoundError(Error):
"""Raised if one or more config files are not found."""
def __init__(self, config_files):
self.config_files = config_files
def __str__(self):
return ('Failed to read some config files: %s' %
string.join(self.config_files, ','))
class ConfigFileParseError(Error):
"""Raised if there is an error parsing a config file."""
def __init__(self, config_file, msg):
self.config_file = config_file
self.msg = msg
def __str__(self):
return 'Failed to parse %s: %s' % (self.config_file, self.msg)
class ConfigFileValueError(Error):
"""Raised if a config file value does not match its opt type."""
pass
def _get_config_dirs(project=None):
"""Return a list of directors where config files may be located.
:param project: an optional project name
If a project is specified, following directories are returned::
~/.${project}/
~/
/etc/${project}/
/etc/
Otherwise, these directories::
~/
/etc/
"""
fix_path = lambda p: os.path.abspath(os.path.expanduser(p))
cfg_dirs = [
fix_path(os.path.join('~', '.' + project)) if project else None,
fix_path('~'),
os.path.join('/etc', project) if project else None,
'/etc'
]
return filter(bool, cfg_dirs)
def _search_dirs(dirs, basename, extension=""):
"""Search a list of directories for a given filename.
Iterator over the supplied directories, returning the first file
found with the supplied name and extension.
:param dirs: a list of directories
:param basename: the filename, e.g. 'glance-api'
:param extension: the file extension, e.g. '.conf'
:returns: the path to a matching file, or None
"""
for d in dirs:
path = os.path.join(d, '%s%s' % (basename, extension))
if os.path.exists(path):
return path
def find_config_files(project=None, prog=None, extension='.conf'):
"""Return a list of default configuration files.
:param project: an optional project name
:param prog: the program name, defaulting to the basename of sys.argv[0]
:param extension: the type of the config file
We default to two config files: [${project}.conf, ${prog}.conf]
And we look for those config files in the following directories::
~/.${project}/
~/
/etc/${project}/
/etc/
We return an absolute path for (at most) one of each the default config
files, for the topmost directory it exists in.
For example, if project=foo, prog=bar and /etc/foo/foo.conf, /etc/bar.conf
and ~/.foo/bar.conf all exist, then we return ['/etc/foo/foo.conf',
'~/.foo/bar.conf']
If no project name is supplied, we only look for ${prog.conf}.
"""
if prog is None:
prog = os.path.basename(sys.argv[0])
cfg_dirs = _get_config_dirs(project)
config_files = []
if project:
config_files.append(_search_dirs(cfg_dirs, project, extension))
config_files.append(_search_dirs(cfg_dirs, prog, extension))
return filter(bool, config_files)
def _is_opt_registered(opts, opt):
"""Check whether an opt with the same name is already registered.
The same opt may be registered multiple times, with only the first
registration having any effect. However, it is an error to attempt
to register a different opt with the same name.
:param opts: the set of opts already registered
:param opt: the opt to be registered
:returns: True if the opt was previously registered, False otherwise
:raises: DuplicateOptError if a naming conflict is detected
"""
if opt.dest in opts:
if opts[opt.dest]['opt'] is not opt:
raise DuplicateOptError(opt.name)
return True
else:
return False
class Opt(object):
"""Base class for all configuration options.
An Opt object has no public methods, but has a number of public string
properties:
name:
the name of the option, which may include hyphens
dest:
the (hyphen-less) ConfigOpts property which contains the option value
short:
a single character CLI option name
default:
the default value of the option
metavar:
the name shown as the argument to a CLI option in --help output
help:
an string explaining how the options value is used
"""
multi = False
def __init__(self, name, dest=None, short=None, default=None,
metavar=None, help=None, secret=False, required=False):
"""Construct an Opt object.
The only required parameter is the option's name. However, it is
common to also supply a default and help string for all options.
:param name: the option's name
:param dest: the name of the corresponding ConfigOpts property
:param short: a single character CLI option name
:param default: the default value of the option
:param metavar: the option argument to show in --help
:param help: an explanation of how the option is used
:param secret: true iff the value should be obfuscated in log output
:param required: true iff a value must be supplied for this option
"""
self.name = name
if dest is None:
self.dest = self.name.replace('-', '_')
else:
self.dest = dest
self.short = short
self.default = default
self.metavar = metavar
self.help = help
self.secret = secret
self.required = required
def _get_from_config_parser(self, cparser, section):
"""Retrieves the option value from a MultiConfigParser object.
This is the method ConfigOpts uses to look up the option value from
config files. Most opt types override this method in order to perform
type appropriate conversion of the returned value.
:param cparser: a ConfigParser object
:param section: a section name
"""
return cparser.get(section, self.dest)
def _add_to_cli(self, parser, group=None):
"""Makes the option available in the command line interface.
This is the method ConfigOpts uses to add the opt to the CLI interface
as appropriate for the opt type. Some opt types may extend this method,
others may just extend the helper methods it uses.
:param parser: the CLI option parser
:param group: an optional OptGroup object
"""
container = self._get_optparse_container(parser, group)
kwargs = self._get_optparse_kwargs(group)
prefix = self._get_optparse_prefix('', group)
self._add_to_optparse(container, self.name, self.short, kwargs, prefix)
def _add_to_optparse(self, container, name, short, kwargs, prefix=''):
"""Add an option to an optparse parser or group.
:param container: an optparse.OptionContainer object
:param name: the opt name
:param short: the short opt name
:param kwargs: the keyword arguments for add_option()
:param prefix: an optional prefix to prepend to the opt name
:raises: DuplicateOptError if a naming confict is detected
"""
args = ['--' + prefix + name]
if short:
args += ['-' + short]
for a in args:
if container.has_option(a):
raise DuplicateOptError(a)
container.add_option(*args, **kwargs)
def _get_optparse_container(self, parser, group):
"""Returns an optparse.OptionContainer.
:param parser: an optparse.OptionParser
:param group: an (optional) OptGroup object
:returns: an optparse.OptionGroup if a group is given, else the parser
"""
if group is not None:
return group._get_optparse_group(parser)
else:
return parser
def _get_optparse_kwargs(self, group, **kwargs):
"""Build a dict of keyword arguments for optparse's add_option().
Most opt types extend this method to customize the behaviour of the
options added to optparse.
:param group: an optional group
:param kwargs: optional keyword arguments to add to
:returns: a dict of keyword arguments
"""
dest = self.dest
if group is not None:
dest = group.name + '_' + dest
kwargs.update({
'dest': dest,
'metavar': self.metavar,
'help': self.help,
})
return kwargs
def _get_optparse_prefix(self, prefix, group):
"""Build a prefix for the CLI option name, if required.
CLI options in a group are prefixed with the group's name in order
to avoid conflicts between similarly named options in different
groups.
:param prefix: an existing prefix to append to (e.g. 'no' or '')
:param group: an optional OptGroup object
:returns: a CLI option prefix including the group name, if appropriate
"""
if group is not None:
return group.name + '-' + prefix
else:
return prefix
class StrOpt(Opt):
"""
String opts do not have their values transformed and are returned as
str objects.
"""
pass
class BoolOpt(Opt):
"""
Bool opts are set to True or False on the command line using --optname or
--noopttname respectively.
In config files, boolean values are case insensitive and can be set using
1/0, yes/no, true/false or on/off.
"""
_boolean_states = {'1': True, 'yes': True, 'true': True, 'on': True,
'0': False, 'no': False, 'false': False, 'off': False}
def _get_from_config_parser(self, cparser, section):
"""Retrieve the opt value as a boolean from ConfigParser."""
def convert_bool(v):
value = self._boolean_states.get(v.lower())
if value is None:
raise ValueError('Unexpected boolean value %r' % v)
return value
return [convert_bool(v) for v in cparser.get(section, self.dest)]
def _add_to_cli(self, parser, group=None):
"""Extends the base class method to add the --nooptname option."""
super(BoolOpt, self)._add_to_cli(parser, group)
self._add_inverse_to_optparse(parser, group)
def _add_inverse_to_optparse(self, parser, group):
"""Add the --nooptname option to the option parser."""
container = self._get_optparse_container(parser, group)
kwargs = self._get_optparse_kwargs(group, action='store_false')
prefix = self._get_optparse_prefix('no', group)
kwargs["help"] = "The inverse of --" + self.name
self._add_to_optparse(container, self.name, None, kwargs, prefix)
def _get_optparse_kwargs(self, group, action='store_true', **kwargs):
"""Extends the base optparse keyword dict for boolean options."""
return super(BoolOpt,
self)._get_optparse_kwargs(group, action=action, **kwargs)
class IntOpt(Opt):
"""Int opt values are converted to integers using the int() builtin."""
def _get_from_config_parser(self, cparser, section):
"""Retrieve the opt value as a integer from ConfigParser."""
return [int(v) for v in cparser.get(section, self.dest)]
def _get_optparse_kwargs(self, group, **kwargs):
"""Extends the base optparse keyword dict for integer options."""
return super(IntOpt,
self)._get_optparse_kwargs(group, type='int', **kwargs)
class FloatOpt(Opt):
"""Float opt values are converted to floats using the float() builtin."""
def _get_from_config_parser(self, cparser, section):
"""Retrieve the opt value as a float from ConfigParser."""
return [float(v) for v in cparser.get(section, self.dest)]
def _get_optparse_kwargs(self, group, **kwargs):
"""Extends the base optparse keyword dict for float options."""
return super(FloatOpt,
self)._get_optparse_kwargs(group, type='float', **kwargs)
class ListOpt(Opt):
"""
List opt values are simple string values separated by commas. The opt value
is a list containing these strings.
"""
def _get_from_config_parser(self, cparser, section):
"""Retrieve the opt value as a list from ConfigParser."""
return [v.split(',') for v in cparser.get(section, self.dest)]
def _get_optparse_kwargs(self, group, **kwargs):
"""Extends the base optparse keyword dict for list options."""
return super(ListOpt,
self)._get_optparse_kwargs(group,
type='string',
action='callback',
callback=self._parse_list,
**kwargs)
def _parse_list(self, option, opt, value, parser):
"""An optparse callback for parsing an option value into a list."""
setattr(parser.values, self.dest, value.split(','))
class MultiStrOpt(Opt):
"""
Multistr opt values are string opts which may be specified multiple times.
The opt value is a list containing all the string values specified.
"""
multi = True
def _get_optparse_kwargs(self, group, **kwargs):
"""Extends the base optparse keyword dict for multi str options."""
return super(MultiStrOpt,
self)._get_optparse_kwargs(group, action='append')
class OptGroup(object):
"""
Represents a group of opts.
CLI opts in the group are automatically prefixed with the group name.
Each group corresponds to a section in config files.
An OptGroup object has no public methods, but has a number of public string
properties:
name:
the name of the group
title:
the group title as displayed in --help
help:
the group description as displayed in --help
"""
def __init__(self, name, title=None, help=None):
"""Constructs an OptGroup object.
:param name: the group name
:param title: the group title for --help
:param help: the group description for --help
"""
self.name = name
if title is None:
self.title = "%s options" % title
else:
self.title = title
self.help = help
self._opts = {} # dict of dicts of (opt:, override:, default:)
self._optparse_group = None
def _register_opt(self, opt):
"""Add an opt to this group.
:param opt: an Opt object
:returns: False if previously registered, True otherwise
:raises: DuplicateOptError if a naming conflict is detected
"""
if _is_opt_registered(self._opts, opt):
return False
self._opts[opt.dest] = {'opt': opt, 'override': None, 'default': None}
return True
def _unregister_opt(self, opt):
"""Remove an opt from this group.
:param opt: an Opt object
"""
if opt.dest in self._opts:
del self._opts[opt.dest]
def _get_optparse_group(self, parser):
"""Build an optparse.OptionGroup for this group."""
if self._optparse_group is None:
self._optparse_group = optparse.OptionGroup(parser, self.title,
self.help)
return self._optparse_group
def _clear(self):
"""Clear this group's option parsing state."""
self._optparse_group = None
class ParseError(iniparser.ParseError):
def __init__(self, msg, lineno, line, filename):
super(ParseError, self).__init__(msg, lineno, line)
self.filename = filename
def __str__(self):
return 'at %s:%d, %s: %r' % (self.filename, self.lineno,
self.msg, self.line)
class ConfigParser(iniparser.BaseParser):
def __init__(self, filename, sections):
super(ConfigParser, self).__init__()
self.filename = filename
self.sections = sections
self.section = None
def parse(self):
with open(self.filename) as f:
return super(ConfigParser, self).parse(f)
def new_section(self, section):
self.section = section
self.sections.setdefault(self.section, {})
def assignment(self, key, value):
if not self.section:
raise self.error_no_section()
self.sections[self.section].setdefault(key, [])
self.sections[self.section][key].append('\n'.join(value))
def parse_exc(self, msg, lineno, line=None):
return ParseError(msg, lineno, line, self.filename)
def error_no_section(self):
return self.parse_exc('Section must be started before assignment',
self.lineno)
class MultiConfigParser(object):
def __init__(self):
self.sections = {}
def read(self, config_files):
read_ok = []
for filename in config_files:
parser = ConfigParser(filename, self.sections)
try:
parser.parse()
except IOError:
continue
read_ok.append(filename)
return read_ok
def get(self, section, name):
return self.sections[section][name]
class ConfigOpts(collections.Mapping):
"""
Config options which may be set on the command line or in config files.
ConfigOpts is a configuration option manager with APIs for registering
option schemas, grouping options, parsing option values and retrieving
the values of options.
"""
def __init__(self):
"""Construct a ConfigOpts object."""
self._opts = {} # dict of dicts of (opt:, override:, default:)
self._groups = {}
self._args = None
self._oparser = None
self._cparser = None
self._cli_values = {}
self.__cache = {}
self._config_opts = []
self._disable_interspersed_args = False
def _setup(self, project, prog, version, usage, default_config_files):
"""Initialize a ConfigOpts object for option parsing."""
if prog is None:
prog = os.path.basename(sys.argv[0])
if default_config_files is None:
default_config_files = find_config_files(project, prog)
self._oparser = optparse.OptionParser(prog=prog,
version=version,
usage=usage)
if self._disable_interspersed_args:
self._oparser.disable_interspersed_args()
self._config_opts = [
MultiStrOpt('config-file',
default=default_config_files,
metavar='PATH',
help='Path to a config file to use. Multiple config '
'files can be specified, with values in later '
'files taking precedence. The default files '
' used are: %s' % (default_config_files, )),
StrOpt('config-dir',
metavar='DIR',
help='Path to a config directory to pull *.conf '
'files from. This file set is sorted, so as to '
'provide a predictable parse order if individual '
'options are over-ridden. The set is parsed after '
'the file(s), if any, specified via --config-file, '
'hence over-ridden options in the directory take '
'precedence.'),
]
self.register_cli_opts(self._config_opts)
self.project = project
self.prog = prog
self.version = version
self.usage = usage
self.default_config_files = default_config_files
def __clear_cache(f):
@functools.wraps(f)
def __inner(self, *args, **kwargs):
if kwargs.pop('clear_cache', True):
self.__cache.clear()
return f(self, *args, **kwargs)
return __inner
def __call__(self,
args=None,
project=None,
prog=None,
version=None,
usage=None,
default_config_files=None):
"""Parse command line arguments and config files.
Calling a ConfigOpts object causes the supplied command line arguments
and config files to be parsed, causing opt values to be made available
as attributes of the object.
The object may be called multiple times, each time causing the previous
set of values to be overwritten.
Automatically registers the --config-file option with either a supplied
list of default config files, or a list from find_config_files().
If the --config-dir option is set, any *.conf files from this
directory are pulled in, after all the file(s) specified by the
--config-file option.
:param args: command line arguments (defaults to sys.argv[1:])
:param project: the toplevel project name, used to locate config files
:param prog: the name of the program (defaults to sys.argv[0] basename)
:param version: the program version (for --version)
:param usage: a usage string (%prog will be expanded)
:param default_config_files: config files to use by default
:returns: the list of arguments left over after parsing options
:raises: SystemExit, ConfigFilesNotFoundError, ConfigFileParseError,
RequiredOptError, DuplicateOptError
"""
self.clear()
self._setup(project, prog, version, usage, default_config_files)
self._cli_values, leftovers = self._parse_cli_opts(args)
self._parse_config_files()
self._check_required_opts()
return leftovers
def __getattr__(self, name):
"""Look up an option value and perform string substitution.
:param name: the opt name (or 'dest', more precisely)
:returns: the option value (after string subsititution) or a GroupAttr
:raises: NoSuchOptError,ConfigFileValueError,TemplateSubstitutionError
"""
return self._get(name)
def __getitem__(self, key):
"""Look up an option value and perform string substitution."""
return self.__getattr__(key)
def __contains__(self, key):
"""Return True if key is the name of a registered opt or group."""
return key in self._opts or key in self._groups
def __iter__(self):
"""Iterate over all registered opt and group names."""
for key in self._opts.keys() + self._groups.keys():
yield key
def __len__(self):
"""Return the number of options and option groups."""
return len(self._opts) + len(self._groups)
def reset(self):
"""Clear the object state and unset overrides and defaults."""
self._unset_defaults_and_overrides()
self.clear()
@__clear_cache
def clear(self):
"""Clear the state of the object to before it was called."""
self._args = None
self._cli_values.clear()
self._oparser = None
self._cparser = None
self.unregister_opts(self._config_opts)
for group in self._groups.values():
group._clear()
@__clear_cache
def register_opt(self, opt, group=None):
"""Register an option schema.
Registering an option schema makes any option value which is previously
or subsequently parsed from the command line or config files available
as an attribute of this object.
:param opt: an instance of an Opt sub-class
:param group: an optional OptGroup object or group name
:return: False if the opt was already register, True otherwise
:raises: DuplicateOptError
"""
if group is not None:
return self._get_group(group, autocreate=True)._register_opt(opt)
if _is_opt_registered(self._opts, opt):
return False
self._opts[opt.dest] = {'opt': opt, 'override': None, 'default': None}
return True
@__clear_cache
def register_opts(self, opts, group=None):
"""Register multiple option schemas at once."""
for opt in opts:
self.register_opt(opt, group, clear_cache=False)
@__clear_cache
def register_cli_opt(self, opt, group=None):
"""Register a CLI option schema.
CLI option schemas must be registered before the command line and
config files are parsed. This is to ensure that all CLI options are
show in --help and option validation works as expected.
:param opt: an instance of an Opt sub-class
:param group: an optional OptGroup object or group name
:return: False if the opt was already register, True otherwise
:raises: DuplicateOptError, ArgsAlreadyParsedError
"""
if self._args is not None:
raise ArgsAlreadyParsedError("cannot register CLI option")
return self.register_opt(opt, group, clear_cache=False)
@__clear_cache
def register_cli_opts(self, opts, group=None):
"""Register multiple CLI option schemas at once."""
for opt in opts:
self.register_cli_opt(opt, group, clear_cache=False)
def register_group(self, group):
"""Register an option group.
An option group must be registered before options can be registered
with the group.
:param group: an OptGroup object
"""
if group.name in self._groups:
return
self._groups[group.name] = copy.copy(group)
@__clear_cache
def unregister_opt(self, opt, group=None):
"""Unregister an option.
:param opt: an Opt object
:param group: an optional OptGroup object or group name
:raises: ArgsAlreadyParsedError, NoSuchGroupError
"""
if self._args is not None:
raise ArgsAlreadyParsedError("reset before unregistering options")
if group is not None:
self._get_group(group)._unregister_opt(opt)
elif opt.dest in self._opts:
del self._opts[opt.dest]
@__clear_cache
def unregister_opts(self, opts, group=None):
"""Unregister multiple CLI option schemas at once."""
for opt in opts:
self.unregister_opt(opt, group, clear_cache=False)
@__clear_cache
def set_override(self, name, override, group=None):
"""Override an opt value.
Override the command line, config file and default values of a
given option.
:param name: the name/dest of the opt
:param override: the override value
:param group: an option OptGroup object or group name
:raises: NoSuchOptError, NoSuchGroupError
"""
opt_info = self._get_opt_info(name, group)
opt_info['override'] = override
@__clear_cache
def set_default(self, name, default, group=None):
"""Override an opt's default value.
Override the default value of given option. A command line or
config file value will still take precedence over this default.
:param name: the name/dest of the opt
:param default: the default value
:param group: an option OptGroup object or group name
:raises: NoSuchOptError, NoSuchGroupError
"""
opt_info = self._get_opt_info(name, group)
opt_info['default'] = default
def _all_opt_infos(self):
"""A generator function for iteration opt infos."""
for info in self._opts.values():
yield info, None
for group in self._groups.values():
for info in group._opts.values():
yield info, group
def _all_opts(self):
"""A generator function for iteration opts."""
for info, group in self._all_opt_infos():
yield info['opt'], group
def _unset_defaults_and_overrides(self):
"""Unset any default or override on all options."""
for info, group in self._all_opt_infos():
info['default'] = None
info['override'] = None
def disable_interspersed_args(self):
"""Set parsing to stop on the first non-option.
If this this method is called, then parsing e.g.
script --verbose cmd --debug /tmp/mything
will no longer return:
['cmd', '/tmp/mything']
as the leftover arguments, but will instead return:
['cmd', '--debug', '/tmp/mything']
i.e. argument parsing is stopped at the first non-option argument.
"""
self._disable_interspersed_args = True
def enable_interspersed_args(self):
"""Set parsing to not stop on the first non-option.
This it the default behaviour."""
self._disable_interspersed_args = False
def find_file(self, name):
"""Locate a file located alongside the config files.
Search for a file with the supplied basename in the directories
which we have already loaded config files from and other known
configuration directories.
The directory, if any, supplied by the config_dir option is
searched first. Then the config_file option is iterated over
and each of the base directories of the config_files values
are searched. Failing both of these, the standard directories
searched by the module level find_config_files() function is
used. The first matching file is returned.
:param basename: the filename, e.g. 'policy.json'
:returns: the path to a matching file, or None
"""
dirs = []
if self.config_dir:
dirs.append(self.config_dir)
for cf in reversed(self.config_file):
dirs.append(os.path.dirname(cf))
dirs.extend(_get_config_dirs(self.project))
return _search_dirs(dirs, name)
def log_opt_values(self, logger, lvl):
"""Log the value of all registered opts.
It's often useful for an app to log its configuration to a log file at
startup for debugging. This method dumps to the entire config state to
the supplied logger at a given log level.
:param logger: a logging.Logger object
:param lvl: the log level (e.g. logging.DEBUG) arg to logger.log()
"""
logger.log(lvl, "*" * 80)
logger.log(lvl, "Configuration options gathered from:")
logger.log(lvl, "command line args: %s", self._args)
logger.log(lvl, "config files: %s", self.config_file)
logger.log(lvl, "=" * 80)
def _sanitize(opt, value):
"""Obfuscate values of options declared secret"""
return value if not opt.secret else '*' * len(str(value))
for opt_name in sorted(self._opts):
opt = self._get_opt_info(opt_name)['opt']
logger.log(lvl, "%-30s = %s", opt_name,
_sanitize(opt, getattr(self, opt_name)))
for group_name in self._groups:
group_attr = self.GroupAttr(self, self._get_group(group_name))
for opt_name in sorted(self._groups[group_name]._opts):
opt = self._get_opt_info(opt_name, group_name)['opt']
logger.log(lvl, "%-30s = %s",
"%s.%s" % (group_name, opt_name),
_sanitize(opt, getattr(group_attr, opt_name)))
logger.log(lvl, "*" * 80)
def print_usage(self, file=None):
"""Print the usage message for the current program."""
self._oparser.print_usage(file)
def print_help(self, file=None):
"""Print the help message for the current program."""
self._oparser.print_help(file)
def _get(self, name, group=None):
if isinstance(group, OptGroup):
key = (group.name, name)
else:
key = (group, name)
try:
return self.__cache[key]
except KeyError:
value = self._substitute(self._do_get(name, group))
self.__cache[key] = value
return value
def _do_get(self, name, group=None):
"""Look up an option value.
:param name: the opt name (or 'dest', more precisely)
:param group: an OptGroup
:returns: the option value, or a GroupAttr object
:raises: NoSuchOptError, NoSuchGroupError, ConfigFileValueError,
TemplateSubstitutionError
"""
if group is None and name in self._groups:
return self.GroupAttr(self, self._get_group(name))
info = self._get_opt_info(name, group)
default, opt, override = [info[k] for k in sorted(info.keys())]
if override is not None:
return override
values = []
if self._cparser is not None:
section = group.name if group is not None else 'DEFAULT'
try:
value = opt._get_from_config_parser(self._cparser, section)
except KeyError:
pass
except ValueError as ve:
raise ConfigFileValueError(str(ve))
else:
if not opt.multi:
# No need to continue since the last value wins
return value[-1]
values.extend(value)
name = name if group is None else group.name + '_' + name
value = self._cli_values.get(name)
if value is not None:
if not opt.multi:
return value
return value + values
if values:
return values
if default is not None:
return default
return opt.default
def _substitute(self, value):
"""Perform string template substitution.
Substititue any template variables (e.g. $foo, ${bar}) in the supplied
string value(s) with opt values.
:param value: the string value, or list of string values
:returns: the substituted string(s)
"""
if isinstance(value, list):
return [self._substitute(i) for i in value]
elif isinstance(value, str):
tmpl = string.Template(value)
return tmpl.safe_substitute(self.StrSubWrapper(self))
else:
return value
def _get_group(self, group_or_name, autocreate=False):
"""Looks up a OptGroup object.
Helper function to return an OptGroup given a parameter which can
either be the group's name or an OptGroup object.
The OptGroup object returned is from the internal dict of OptGroup
objects, which will be a copy of any OptGroup object that users of
the API have access to.
:param group_or_name: the group's name or the OptGroup object itself
:param autocreate: whether to auto-create the group if it's not found
:raises: NoSuchGroupError
"""
group = group_or_name if isinstance(group_or_name, OptGroup) else None
group_name = group.name if group else group_or_name
if not group_name in self._groups:
if not group is None or not autocreate:
raise NoSuchGroupError(group_name)
self.register_group(OptGroup(name=group_name))
return self._groups[group_name]
def _get_opt_info(self, opt_name, group=None):
"""Return the (opt, override, default) dict for an opt.
:param opt_name: an opt name/dest
:param group: an optional group name or OptGroup object
:raises: NoSuchOptError, NoSuchGroupError
"""
if group is None:
opts = self._opts
else:
group = self._get_group(group)
opts = group._opts
if not opt_name in opts:
raise NoSuchOptError(opt_name, group)
return opts[opt_name]
def _parse_config_files(self):
"""Parse the config files from --config-file and --config-dir.
:raises: ConfigFilesNotFoundError, ConfigFileParseError
"""
config_files = list(self.config_file)
if self.config_dir:
config_dir_glob = os.path.join(self.config_dir, '*.conf')
config_files += sorted(glob.glob(config_dir_glob))
self._cparser = MultiConfigParser()
try:
read_ok = self._cparser.read(config_files)
except iniparser.ParseError as pe:
raise ConfigFileParseError(pe.filename, str(pe))
if read_ok != config_files:
not_read_ok = filter(lambda f: f not in read_ok, config_files)
raise ConfigFilesNotFoundError(not_read_ok)
def _check_required_opts(self):
"""Check that all opts marked as required have values specified.
:raises: RequiredOptError
"""
for info, group in self._all_opt_infos():
default, opt, override = [info[k] for k in sorted(info.keys())]
if opt.required:
if (default is not None or override is not None):
continue
if self._get(opt.name, group) is None:
raise RequiredOptError(opt.name, group)
def _parse_cli_opts(self, args):
"""Parse command line options.
Initializes the command line option parser and parses the supplied
command line arguments.
:param args: the command line arguments
:returns: a dict of parsed option values
:raises: SystemExit, DuplicateOptError
"""
self._args = args
for opt, group in self._all_opts():
opt._add_to_cli(self._oparser, group)
values, leftovers = self._oparser.parse_args(args)
return vars(values), leftovers
class GroupAttr(collections.Mapping):
"""
A helper class representing the option values of a group as a mapping
and attributes.
"""
def __init__(self, conf, group):
"""Construct a GroupAttr object.
:param conf: a ConfigOpts object
:param group: an OptGroup object
"""
self.conf = conf
self.group = group
def __getattr__(self, name):
"""Look up an option value and perform template substitution."""
return self.conf._get(name, self.group)
def __getitem__(self, key):
"""Look up an option value and perform string substitution."""
return self.__getattr__(key)
def __contains__(self, key):
"""Return True if key is the name of a registered opt or group."""
return key in self.group._opts
def __iter__(self):
"""Iterate over all registered opt and group names."""
for key in self.group._opts.keys():
yield key
def __len__(self):
"""Return the number of options and option groups."""
return len(self.group._opts)
class StrSubWrapper(object):
"""
A helper class exposing opt values as a dict for string substitution.
"""
def __init__(self, conf):
"""Construct a StrSubWrapper object.
:param conf: a ConfigOpts object
"""
self.conf = conf
def __getitem__(self, key):
"""Look up an opt value from the ConfigOpts object.
:param key: an opt name
:returns: an opt value
:raises: TemplateSubstitutionError if attribute is a group
"""
value = getattr(self.conf, key)
if isinstance(value, self.conf.GroupAttr):
raise TemplateSubstitutionError(
'substituting group %s not supported' % key)
return value
class CommonConfigOpts(ConfigOpts):
DEFAULT_LOG_FORMAT = "%(asctime)s %(levelname)8s [%(name)s] %(message)s"
DEFAULT_LOG_DATE_FORMAT = "%Y-%m-%d %H:%M:%S"
common_cli_opts = [
BoolOpt('debug',
short='d',
default=False,
help='Print debugging output'),
BoolOpt('verbose',
short='v',
default=False,
help='Print more verbose output'),
]
logging_cli_opts = [
StrOpt('log-config',
metavar='PATH',
help='If this option is specified, the logging configuration '
'file specified is used and overrides any other logging '
'options specified. Please see the Python logging module '
'documentation for details on logging configuration '
'files.'),
StrOpt('log-format',
default=DEFAULT_LOG_FORMAT,
metavar='FORMAT',
help='A logging.Formatter log message format string which may '
'use any of the available logging.LogRecord attributes. '
'Default: %default'),
StrOpt('log-date-format',
default=DEFAULT_LOG_DATE_FORMAT,
metavar='DATE_FORMAT',
help='Format string for %(asctime)s in log records. '
'Default: %default'),
StrOpt('log-file',
metavar='PATH',
help='(Optional) Name of log file to output to. '
'If not set, logging will go to stdout.'),
StrOpt('log-dir',
help='(Optional) The directory to keep log files in '
'(will be prepended to --logfile)'),
BoolOpt('use-syslog',
default=False,
help='Use syslog for logging.'),
StrOpt('syslog-log-facility',
default='LOG_USER',
help='syslog facility to receive log lines')
]
def __init__(self):
super(CommonConfigOpts, self).__init__()
self.register_cli_opts(self.common_cli_opts)
self.register_cli_opts(self.logging_cli_opts)
CONF = CommonConfigOpts()
|
apache-2.0
| 1,294,673,558,742,574,300
| 32.517308
| 79
| 0.597969
| false
| 4.321597
| true
| false
| false
|
malmiron/incubator-airflow
|
airflow/operators/bash_operator.py
|
1
|
5597
|
# -*- coding: utf-8 -*-
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import os
import signal
from subprocess import Popen, STDOUT, PIPE
from tempfile import gettempdir, NamedTemporaryFile
from builtins import bytes
from airflow.exceptions import AirflowException
from airflow.models import BaseOperator
from airflow.utils.decorators import apply_defaults
from airflow.utils.file import TemporaryDirectory
from airflow.utils.operator_helpers import context_to_airflow_vars
class BashOperator(BaseOperator):
"""
Execute a Bash script, command or set of commands.
:param bash_command: The command, set of commands or reference to a
bash script (must be '.sh') to be executed. (templated)
:type bash_command: str
:param xcom_push: If xcom_push is True, the last line written to stdout
will also be pushed to an XCom when the bash command completes.
:type xcom_push: bool
:param env: If env is not None, it must be a mapping that defines the
environment variables for the new process; these are used instead
of inheriting the current process environment, which is the default
behavior. (templated)
:type env: dict
:param output_encoding: Output encoding of bash command
:type output_encoding: str
On execution of this operator the task will be up for retry
when exception is raised. However, if a sub-command exits with non-zero
value Airflow will not recognize it as failure unless the whole shell exits
with a failure. The easiest way of achieving this is to prefix the command
with ``set -e;``
Example:
.. code-block:: python
bash_command = "set -e; python3 script.py '{{ next_execution_date }}'"
"""
template_fields = ('bash_command', 'env')
template_ext = ('.sh', '.bash',)
ui_color = '#f0ede4'
@apply_defaults
def __init__(
self,
bash_command,
xcom_push=False,
env=None,
output_encoding='utf-8',
*args, **kwargs):
super(BashOperator, self).__init__(*args, **kwargs)
self.bash_command = bash_command
self.env = env
self.xcom_push_flag = xcom_push
self.output_encoding = output_encoding
def execute(self, context):
"""
Execute the bash command in a temporary directory
which will be cleaned afterwards
"""
self.log.info("Tmp dir root location: \n %s", gettempdir())
# Prepare env for child process.
if self.env is None:
self.env = os.environ.copy()
airflow_context_vars = context_to_airflow_vars(context,
in_env_var_format=True)
self.log.info("Exporting the following env vars:\n" +
'\n'.join(["{}={}".format(k, v)
for k, v in
airflow_context_vars.items()]))
self.env.update(airflow_context_vars)
self.lineage_data = self.bash_command
with TemporaryDirectory(prefix='airflowtmp') as tmp_dir:
with NamedTemporaryFile(dir=tmp_dir, prefix=self.task_id) as f:
f.write(bytes(self.bash_command, 'utf_8'))
f.flush()
fname = f.name
script_location = os.path.abspath(fname)
self.log.info(
"Temporary script location: %s",
script_location
)
def pre_exec():
# Restore default signal disposition and invoke setsid
for sig in ('SIGPIPE', 'SIGXFZ', 'SIGXFSZ'):
if hasattr(signal, sig):
signal.signal(getattr(signal, sig), signal.SIG_DFL)
os.setsid()
self.log.info("Running command: %s", self.bash_command)
sp = Popen(
['bash', fname],
stdout=PIPE, stderr=STDOUT,
cwd=tmp_dir, env=self.env,
preexec_fn=pre_exec)
self.sp = sp
self.log.info("Output:")
line = ''
for line in iter(sp.stdout.readline, b''):
line = line.decode(self.output_encoding).rstrip()
self.log.info(line)
sp.wait()
self.log.info(
"Command exited with return code %s",
sp.returncode
)
if sp.returncode:
raise AirflowException("Bash command failed")
if self.xcom_push_flag:
return line
def on_kill(self):
self.log.info('Sending SIGTERM signal to bash process group')
os.killpg(os.getpgid(self.sp.pid), signal.SIGTERM)
|
apache-2.0
| 6,163,552,212,366,461,000
| 36.313333
| 79
| 0.595676
| false
| 4.365835
| false
| false
| false
|
sjpet/epysteme
|
epysteme/sets/sql.py
|
1
|
19387
|
# -*- coding: utf-8 -*-
import os
import sqlite3
import weakref
from collections import OrderedDict
from functools import reduce
from ..helpers import (repeat_to_match,
key_depth,
expand_key,
extend_functions,
tuple_,
maybe_list,
sql_friendly)
class _SqliteLocIndexer(object):
"""Label-location based indexer for selection by label in Sqlite data
frames.
"""
def __init__(self, parent):
self._parent = weakref.ref(parent)
def __getitem__(self, item):
for observation in self._parent().query(index=item):
yield observation
class _SqliteIlocIndexer(object):
"""Indexer for selection by integer index in Sqlite data
frames.
"""
def __init__(self, parent):
self._parent = weakref.ref(parent)
def __getitem__(self, item):
for observation in self._parent().query(int_index=item):
yield observation
class SqliteSeries(object):
"""One-dimensional array analogous to a pandas series. Depends on a parent
SqliteDataFrame which holds the database connection and index.
"""
def __init__(self,
parent,
table_name,
column_name,
sql_index,
func=lambda x: x):
"""SqliteSeries init method.
Parameters
----------
parent : SqliteDataFrame
Parent data frame
table_name : str
Database table name
column_name : str
Column name in the given table holding data values
sql_index : str, optional
Column name in the given table holding index values
func : function, optional
Function to map over the values
"""
self.parent = weakref.ref(parent)
self.table_name = table_name
self.column_name = column_name
self.sql_index = sql_index
self.func = func
self._loc = _SqliteLocIndexer(self)
self._iloc = _SqliteIlocIndexer(self)
@property
def loc(self):
return self._loc
@property
def iloc(self):
return self._iloc
@property
def n_columns(self):
if isinstance(self.column_name, str):
return 1
else:
return len(self.column_name)
@property
def index(self):
# return [k for k in range(len(self))]
return self.parent().index
def __iter__(self):
if isinstance(self.column_name, list):
observations = zip(*[self._iter_single(table_name,
column_name,
index)
for table_name, column_name, index in
zip(self.table_name,
self.column_name,
self.sql_index)])
for observation in observations:
yield self.func(*observation)
else:
for observation in self._iter_single(self.table_name,
self.column_name,
self.sql_index):
yield self.func(observation)
# def __iter__(self):
# for observation in self.query():
# yield observation
def __len__(self):
n_observations = 0
for _ in iter(self):
n_observations += 1
return n_observations
def _iter_single(self, table_name, column_name, index):
if table_name is None:
for x in self.parent()[column_name]:
yield x
elif self.parent().connection is not None:
crs = self.parent().connection.cursor()
crs.execute("SELECT {column} FROM {table} ORDER BY {index}".format(
column=column_name,
table=table_name,
index=index))
for x in crs:
yield x[0]
else:
while False:
yield None
def query(self, index=None, int_index=None):
"""Query the database for values.
Parameters
----------
index : list or slice of index labels or single index label, optional
int_index : list or slice of integer indices or single index, optional
index takes presedence if not None
Yields
------
list
The next observation
"""
this_column = self.parent().column_name(self)
for observation in self.parent().query(index=index,
int_index=int_index,
columns=this_column):
yield observation
class SqliteDataFrame(object):
"""Two-dimensional data structure providing a read-only connection to an
SQLite database and an interface similar to that of a pandas data frame.
"""
def __init__(self, path=None, columns=None, index_col=None):
"""SqliteDataFrame init method.
Parameters
----------
path : str, optional
Path to an SQLite database
columns : dict, optional
Dictionary of columns to add, given as
{key: (table_name, column_name, index_name)}
index_col : dict key, optional
Key of the column to use as index
"""
self._columns = OrderedDict()
self._index = None
self._connection = None
self._loc = _SqliteLocIndexer(self)
self._iloc = _SqliteIlocIndexer(self)
if path is not None:
self.connect(path)
if columns is not None:
for column_name, column_details in columns.items():
self[column_name] = column_details
if index_col is not None:
self.set_index(index_col)
@property
def database(self):
"""Path to the connected database, if any"""
if self._connection is not None:
crs = self._connection.cursor()
crs.execute("PRAGMA database_list")
_, _, db_path = crs.fetchone()
return db_path
else:
return None
@property
def connection(self):
"""Database connection"""
return self._connection
@property
def columns(self):
return [column for column in self._columns.keys()]
@columns.setter
def columns(self, value):
if not len(value) == len(self._columns):
error_message = ("Length mismatch, data frame has {n_data} "
"columns but {n_names} names were given")
raise ValueError(error_message.format(n_data=len(self._columns),
n_names=len(value)))
if not len(value) == len(set(value)):
raise ValueError("Column names must be unique")
max_depth = max(map(key_depth, value))
expanded_names = [expand_key(name, max_depth) for name in value]
self._columns = OrderedDict(
[(key, val) for key, val in zip(expanded_names,
list(self._columns.values()))])
@property
def index(self):
if self._index is None:
try:
return list(range(len(next(iter(self._columns.values())))))
except StopIteration:
return []
else:
return list(self._index)
@property
def loc(self):
return self._loc
@property
def iloc(self):
return self._iloc
def _expand_item(self, items):
"""Expand a list of items to present multi-indexing keys."""
depth = key_depth(next(iter(self._columns.keys())))
if depth == 0:
return items
expanded_items = []
for item in items:
if key_depth(item) == depth:
expanded_items.append(item)
else:
expanded_items.extend(
[key for key in self._columns
if all(a == b for a, b in zip(tuple_(item), key))])
return expanded_items
def __setitem__(self, item, value):
if isinstance(value, SqliteSeries):
self._columns[item] = value
else:
table_name, column_name, index = repeat_to_match(*value[:3])
if len(value) > 3:
func = value[3]
else:
def func(x): return x
series = SqliteSeries(self, table_name, column_name, index, func)
self._columns[item] = series
def __getitem__(self, item):
if isinstance(item, list):
items = self._expand_item(item)
return SqliteDataFrame(self.database,
columns={column: self._columns[column]
for column in items})
else:
items = self._expand_item([item])
if len(items) == 1:
return self._columns[items[0]]
else:
return SqliteDataFrame(
self.database,
columns={column[1:]: self._columns[column]
for column in items})
def __iter__(self):
for observation in self.query():
yield observation
def connect(self, path):
"""Connect to a database.
Parameters
----------
path : str
Path to the database
"""
current_connection = self._connection
if os.path.isfile(path):
connection = sqlite3.connect("file:{}?mode=ro".format(path),
uri=True)
try:
connection.execute("PRAGMA schema_version")
self._connection = connection
if current_connection is not None:
current_connection.close()
except sqlite3.DatabaseError:
raise ValueError(
"{} is not a valid SQLite database".format(path))
else:
raise ValueError("{} is not a file".format(path))
def drop(self, label, axis=0):
"""Remove a label from the requested axis.
Parameters
----------
label : str
Label to be removed
axis : int
Axis from which to remove the label.
"""
if axis == 1:
if label in self._columns:
self._columns.pop(label)
else:
raise KeyError("No column labeled '{}'".format(label))
else:
raise ValueError("Dropping of indices is not yet implemented")
def rename(self, columns=None):
"""Rename a label.
Parameters
----------
columns : dict, optional
Dictionary of column label substitutions
"""
if columns is not None:
self.columns = [key if key not in columns else columns[key]
for key in self.columns]
def set_index(self, index):
"""Set a column as index.
Parameters
----------
index : column label
"""
if index in self._columns:
self._index = self._columns.pop(index)
else:
raise ValueError("No such column: {}".format(index))
def column_name(self, target):
"""Find the column label of a series if it is part of the data frame.
Parameters
----------
target : SqliteSeries
Returns
-------
column label or None
"""
for column_name, column in self._columns.items():
if column == target:
return column_name
def query(self, index=None, int_index=None, columns=None):
"""Query the database for values.
Parameters
----------
index : list or slice of index labels or single index label, optional
int_index : list or slice of integer indices or single index, optional
index takes presedence if not None
columns : list of column labels or a single column label
Yields
------
list
The next observation
"""
if columns is None:
columns_ = [column for column in self._columns.values()]
elif isinstance(columns, list):
columns_ = [self._columns[column] for column in columns]
else:
columns_ = [self._columns[columns]]
if any(column.func is not None for column in columns_):
def f_0():
return []
f, n = reduce(extend_functions,
[(column.func, column.n_columns)
for column in columns_],
(f_0, 0))
else:
f = None
crs = self._connection.cursor()
query_ = self._build_query(index=index,
int_index=int_index,
columns=columns)
crs.execute(query_)
if f is None:
for observation in crs:
yield maybe_list(observation)
else:
for observation in crs:
yield(maybe_list(f(*observation)))
def _build_query(self, index=None, int_index=None, columns=None):
"""Build a suitable SQL query.
Parameters
----------
index : list or slice of index labels or single index label, optional
int_index : list or slice of integer indices or single index, optional
index takes presedence if not None
Returns
-------
str
An SQL query
"""
if columns is None:
columns_ = [column for column in self._columns.values()]
elif isinstance(columns, list):
columns_ = [self._columns[column] for column in columns]
else:
columns_ = [self._columns[columns]]
join_string = "INNER JOIN {table} ON {table}.{index} " \
"== {master}.{master_index}"
column_list = []
table_list = []
for column in columns_:
if isinstance(column.column_name, str):
column_list.append(".".join((column.table_name,
column.column_name)))
table_list.append((column.table_name, column.sql_index))
else:
column_list.extend(
".".join((table_, column_))
for table_, column_ in zip(column.table_name,
column.column_name))
table_list.extend((table_, index_)
for table_, index_ in zip(column.table_name,
column.sql_index))
columns = ", ".join(column_list)
first_column = columns_[0]
if isinstance(first_column.table_name, list):
table = first_column.table_name[0]
master_index = first_column.sql_index[0]
else:
table = first_column.table_name
master_index = first_column.sql_index
joins_set = set(join_string.format(table=table_,
index=index_,
master=table,
master_index=master_index)
for table_, index_ in table_list
if not table_ == table)
if len(joins_set) > 0:
joins = " " + "".join(joins_set)
else:
joins = ""
indices = ""
limit_and_offset = ""
if index is not None and self._index is not None:
inner_query = \
"SELECT {index_index} FROM {index_table}{where_clause}"
if isinstance(index, slice):
slice_parts = []
if index.start is not None:
slice_parts.append(
"{index_column}>={slice_start}".format(
index_column=self._index.column_name,
slice_start=sql_friendly(index.start)))
if index.stop is not None:
slice_parts.append("{index_column}<={slice_stop}".format(
index_column=self._index.column_name,
slice_stop=sql_friendly(index.stop)))
if index.step is not None:
raise NotImplementedError("Slices with steps are not yet "
"supported")
if len(slice_parts) > 0:
where_clause = " WHERE " + " AND ".join(slice_parts)
else:
where_clause = ""
elif isinstance(index, list):
where_clause = \
" WHERE {index_column} IN ({index_values})".format(
index_column=self._index.column_name,
index_values=", ".join(sql_friendly(value)
for value in index))
else:
where_clause = " WHERE {index_column}={index_value}".format(
index_column=self._index.column_name,
index_value=sql_friendly(index))
indices = " WHERE {index} IN ({inner_query})".format(
index=".".join([first_column.table_name,
first_column.sql_index]),
inner_query=inner_query.format(
index_index=self._index.sql_index,
index_table=self._index.table_name,
where_clause=where_clause))
elif index is not None or int_index is not None:
if index is None:
index = int_index
elif isinstance(index, slice):
if index.stop is not None:
# mimic pandas by including the stop
index = slice(index.start, index.stop + 1, index.step)
if isinstance(index, slice):
if index.start is None and index.stop is None:
pass
elif index.stop is None:
limit_and_offset = \
" LIMIT -1 OFFSET {}".format(index.start)
elif index.start is None:
limit_and_offset = " LIMIT {}".format(index.stop)
else:
limit_and_offset = " LIMIT {limit} OFFSET {offset}".format(
limit=index.stop - index.start, offset=index.start)
elif isinstance(index, list):
indices = " WHERE {table}.ROWID IN ({index_values})".format(
table=first_column.table_name,
index_values=", ".join(str(value + 1) for value in index))
else:
limit_and_offset = " LIMIT 1 OFFSET {}".format(index)
else:
pass
query_template = \
"SELECT {columns} FROM {table}{joins}{indices}{limit_and_offset}"
return query_template.format(**locals())
|
mit
| 4,250,631,575,813,475,000
| 32.834206
| 79
| 0.497911
| false
| 4.84675
| false
| false
| false
|
blackpan2/HealthNet
|
src/healthnet/patient/forms.py
|
1
|
5185
|
"""
Application: HealthNet
File: /patient/forms.py
Authors:
- Nathan Stevens
- Philip Bedward
- Daniel Herzig
- George Herde
- Samuel Launt
Description:
- This file contains all view controller information
"""
from base.models import ExtendedStay
from django.apps import apps
from django import forms
from django.contrib.auth.models import User
from django.forms.extras.widgets import SelectDateWidget
from django.forms.widgets import NumberInput
address = apps.get_model('base', 'Address')
person = apps.get_model('base', 'Person')
insurance = apps.get_model('base', 'Insurance')
doctor = apps.get_model('base', 'Doctor')
nurse = apps.get_model('base', 'Nurse')
admin = apps.get_model('base', 'Admin')
# Custom forms for the PatientRegistration
class UserForm(forms.ModelForm):
"""
@class: UserForm
@description: This form is where the User information is updated
"""
first_name = forms.CharField(required=True, label='First Name:')
last_name = forms.CharField(required=True, label='Last Name:')
email = forms.EmailField(required=True, label='Email:')
username = forms.CharField(required=True, label='Username:',
help_text='Required. 30 characters or fewer. Letters, digits and @/./+/-/_ only.')
class Meta:
model = User
fields = ('first_name', 'last_name', 'username', 'email')
class PersonRegistrationForm(forms.ModelForm):
"""
@class: PersonRegistrationForm
@description: This form is where the Person specific information is entered
"""
birthday = forms.DateField(widget=SelectDateWidget(years={1950, 1951, 1952, 1953, 1954, 1955, 1956,
1957, 1958, 1959, 1960, 1961, 1962, 1963,
1964, 1965, 1966, 1967, 1968, 1969, 1970,
1971, 1972, 1973, 1974, 1975, 1976, 1977,
1978, 1979, 1980, 1981, 1982, 1983, 1984,
1985, 1986, 1987, 1988, 1989, 1990, 1991,
1992, 1993, 1994, 1995, 1996, 1997, 1998,
1999, 2000, 2001, 2002, 2003, 2004, 2005,
2006, 2007, 2008, 2009, 2010, 2011, 2012,
2013, 2014, 2015}),
label='Birthday:')
# ssn = forms.IntegerField(widget=NumberInput, label='SSN:')
# phoneNumber = USPhoneNumberField()
class Meta:
model = apps.get_model('base', 'Person')
fields = ('birthday', 'phoneNumber')
exclude = ('ssn',)
class InsuranceForm(forms.ModelForm):
"""
@class: InsuranceForm
@description: This form is where the Insurance information is supplied
"""
name = forms.CharField(label='Name:')
policyNumber = forms.IntegerField(label='Policy Number:')
class Meta:
model = apps.get_model('base', 'Insurance')
fields = ('name', 'policyNumber')
exclude = ('addressID',)
class AddressForm(forms.ModelForm):
"""
@class: AddressForm
@description: This form is where the Address information is provided
"""
# zip = USZipCodeField()
# state = USStateField()
#
class Meta:
model = apps.get_model('base', 'Address')
fields = ('street', 'zip', 'city', 'state')
class EmergencyContactForm(forms.ModelForm):
"""
@class: EmergencyContactForm
@description: This form is where the Emergency Contact information is entered
"""
firstName = forms.CharField(required=True, label='First Name:')
lastName = forms.CharField(required=True, label='Last Name:')
# emergencyNumber = USPhoneNumberField()
class Meta:
model = apps.get_model('base', 'EmergencyContact')
fields = ('firstName', 'lastName', 'emergencyNumber')
exclude = ('personID',)
class AdminForm(forms.ModelForm):
hospital = forms.ModelChoiceField(queryset=admin.objects.all(), empty_label='Choose A Hospital')
class Meta:
model = admin
fields = ('hospital',)
class DeleteDoctor(forms.ModelForm):
class Meta:
model = doctor
fields = []
class DeleteNurse(forms.ModelForm):
class Meta:
model = nurse
fields = []
class DeleteAdmin(forms.ModelForm):
class Meta:
model = admin
fields = []
class AdmitPatient(forms.ModelForm):
endDate = forms.DateField(label='Choose A date to discharge this patient')
endTime = forms.TimeField(label='Choose A time to discharge this patient')
class Meta:
model = ExtendedStay
fields = ('endDate','endTime')
class DischargePatient(forms.ModelForm):
class Meta:
model = ExtendedStay
fields = []
class TransferPatientForm(forms.ModelForm):
class Meta:
model = ExtendedStay
fields = []
|
mit
| -8,182,136,124,556,290,000
| 32.668831
| 113
| 0.580714
| false
| 4.144684
| false
| false
| false
|
toirl/ringo
|
ringo/lib/helpers/appinfo.py
|
1
|
4117
|
"""Modul to get information about the application"""
import os
import pkg_resources
from pyramid.threadlocal import get_current_registry
from ringo.lib.sitetree import build_breadcrumbs, site_tree_branches
def get_ringo_version():
return pkg_resources.get_distribution('ringo').version
def get_app_inheritance_path():
"""Returns a list of application names. The names describe the path
to the root of the application inheritance. e.g if the current
application is 'foo' which is based and 'bar' which is based on
'ringo' the function will return the follwing result: ['foo', 'bar',
'ringo'].
The default path is [<nameofcurrentapp>, "ringo"]. The path can be
extended by setting the app.base config variable.
:returns: List of application name which build the inheritance path.
"""
path = ['ringo']
registry = get_current_registry()
settings = registry.settings
base = settings.get("app.base")
if base:
path.append(base)
path.append(get_app_name())
return reversed(path)
def get_app_name():
registry = get_current_registry()
return registry.__name__
def get_app_version():
return pkg_resources.get_distribution(get_app_name()).version
def get_app_location(name=None):
if not name:
name = get_app_name()
return pkg_resources.get_distribution(name).location
def get_app_url(request):
"""Returns the path of the application under which the application
is hosted on the server.
.. note::
This function is a helper function. It is only used to build
correct URLs for client sided AJAX requests in case the
application is hosted in a subpath.
Example:
If the application is hosted on "http://localhost:6543/foo" the
function will return "foo". If it is hosted under the root
directory '' is returned."""
return request.environ.get("SCRIPT_NAME", "")
def get_app_mode(request):
"""Will return a tuple of the mode configuration (if configured)
Tuple: (mode, desc, color)
If no mode is configured return None.
:request: Current request
:return: Tuple of mode configruation
"""
settings = request.registry.settings
mode = settings.get("app.mode")
desc = settings.get("app.mode_desc", "").decode('utf-8')
color_primary = settings.get("app.mode_color_primary", "#F2DEDE")
color_secondary = settings.get("app.mode_color_secondary", "red")
if mode:
return (mode, desc, color_primary, color_secondary)
return None
def get_app_title():
"""Will return the title of the application
:return: The title of the application"""
registry = get_current_registry()
settings = registry.settings
return settings['app.title']
def get_path_to(location, app=None):
"""Will return the full pathname the given file name with in the
path. path is relativ to the application package (pkg_ressource
location + ressource name). You can define a alternative
application."""
if app:
app_name = app
else:
app_name = get_app_name()
base_path = os.path.join(get_app_location(app_name), app_name)
return os.path.join(base_path, location)
def get_breadcrumbs(request, strategy=None):
"""Will return a list of elements which are used to build the
breadcrumbs in the UI.
The function take a strategy attribute which is called to build this
list instead of the default mechanism of ringo. The strategy
function takes the current request as attribute
The returned list currently must have the follwing format::
[(label of element, url of element), (), ...]
The last element in the list shoul be the current element and has no
link. (URL is None)
:request: Current request
:strategy: Optional function which is called to build the site tree.
:returns: List of elements used for building a the breadcrumbs.
"""
if strategy is None:
strategy = build_breadcrumbs
tree = {}
for branch in site_tree_branches:
tree.update(branch)
return strategy(request, tree)
|
gpl-2.0
| 7,238,456,900,135,183,000
| 29.954887
| 72
| 0.686422
| false
| 4.020508
| false
| false
| false
|
disqus/django-old
|
django/contrib/formtools/wizard/views.py
|
1
|
26572
|
import re
from django import forms
from django.shortcuts import redirect
from django.core.urlresolvers import reverse
from django.forms import formsets, ValidationError
from django.views.generic import TemplateView
from django.utils.datastructures import SortedDict
from django.utils.decorators import classonlymethod
from django.contrib.formtools.wizard.storage import get_storage
from django.contrib.formtools.wizard.storage.exceptions import NoFileStorageConfigured
from django.contrib.formtools.wizard.forms import ManagementForm
def normalize_name(name):
new = re.sub('(((?<=[a-z])[A-Z])|([A-Z](?![A-Z]|$)))', '_\\1', name)
return new.lower().strip('_')
class StepsHelper(object):
def __init__(self, wizard):
self._wizard = wizard
def __dir__(self):
return self.all
def __len__(self):
return self.count
def __repr__(self):
return '<StepsHelper for %s (steps: %s)>' % (self._wizard, self.all)
@property
def all(self):
"Returns the names of all steps/forms."
return self._wizard.get_form_list().keys()
@property
def count(self):
"Returns the total number of steps/forms in this the wizard."
return len(self.all)
@property
def current(self):
"""
Returns the current step. If no current step is stored in the
storage backend, the first step will be returned.
"""
return self._wizard.storage.current_step or self.first
@property
def first(self):
"Returns the name of the first step."
return self.all[0]
@property
def last(self):
"Returns the name of the last step."
return self.all[-1]
@property
def next(self):
"Returns the next step."
return self._wizard.get_next_step()
@property
def prev(self):
"Returns the previous step."
return self._wizard.get_prev_step()
@property
def index(self):
"Returns the index for the current step."
return self._wizard.get_step_index()
@property
def step0(self):
return int(self.index)
@property
def step1(self):
return int(self.index) + 1
class WizardView(TemplateView):
"""
The WizardView is used to create multi-page forms and handles all the
storage and validation stuff. The wizard is based on Django's generic
class based views.
"""
storage_name = None
form_list = None
initial_dict = None
instance_dict = None
condition_dict = None
template_name = 'formtools/wizard/wizard_form.html'
def __repr__(self):
return '<%s: forms: %s>' % (self.__class__.__name__, self.form_list)
@classonlymethod
def as_view(cls, *args, **kwargs):
"""
This method is used within urls.py to create unique formwizard
instances for every request. We need to override this method because
we add some kwargs which are needed to make the formwizard usable.
"""
initkwargs = cls.get_initkwargs(*args, **kwargs)
return super(WizardView, cls).as_view(**initkwargs)
@classmethod
def get_initkwargs(cls, form_list, initial_dict=None,
instance_dict=None, condition_dict=None, *args, **kwargs):
"""
Creates a dict with all needed parameters for the form wizard instances.
* `form_list` - is a list of forms. The list entries can be single form
classes or tuples of (`step_name`, `form_class`). If you pass a list
of forms, the formwizard will convert the class list to
(`zero_based_counter`, `form_class`). This is needed to access the
form for a specific step.
* `initial_dict` - contains a dictionary of initial data dictionaries.
The key should be equal to the `step_name` in the `form_list` (or
the str of the zero based counter - if no step_names added in the
`form_list`)
* `instance_dict` - contains a dictionary of instance objects. This list
is only used when `ModelForm`s are used. The key should be equal to
the `step_name` in the `form_list`. Same rules as for `initial_dict`
apply.
* `condition_dict` - contains a dictionary of boolean values or
callables. If the value of for a specific `step_name` is callable it
will be called with the formwizard instance as the only argument.
If the return value is true, the step's form will be used.
"""
kwargs.update({
'initial_dict': initial_dict or {},
'instance_dict': instance_dict or {},
'condition_dict': condition_dict or {},
})
init_form_list = SortedDict()
assert len(form_list) > 0, 'at least one form is needed'
# walk through the passed form list
for i, form in enumerate(form_list):
if isinstance(form, (list, tuple)):
# if the element is a tuple, add the tuple to the new created
# sorted dictionary.
init_form_list[unicode(form[0])] = form[1]
else:
# if not, add the form with a zero based counter as unicode
init_form_list[unicode(i)] = form
# walk through the new created list of forms
for form in init_form_list.itervalues():
if issubclass(form, formsets.BaseFormSet):
# if the element is based on BaseFormSet (FormSet/ModelFormSet)
# we need to override the form variable.
form = form.form
# check if any form contains a FileField, if yes, we need a
# file_storage added to the formwizard (by subclassing).
for field in form.base_fields.itervalues():
if (isinstance(field, forms.FileField) and
not hasattr(cls, 'file_storage')):
raise NoFileStorageConfigured
# build the kwargs for the formwizard instances
kwargs['form_list'] = init_form_list
return kwargs
def get_wizard_name(self):
return normalize_name(self.__class__.__name__)
def get_prefix(self):
# TODO: Add some kind of unique id to prefix
return self.wizard_name
def get_form_list(self):
"""
This method returns a form_list based on the initial form list but
checks if there is a condition method/value in the condition_list.
If an entry exists in the condition list, it will call/read the value
and respect the result. (True means add the form, False means ignore
the form)
The form_list is always generated on the fly because condition methods
could use data from other (maybe previous forms).
"""
form_list = SortedDict()
for form_key, form_class in self.form_list.iteritems():
# try to fetch the value from condition list, by default, the form
# gets passed to the new list.
condition = self.condition_dict.get(form_key, True)
if callable(condition):
# call the value if needed, passes the current instance.
condition = condition(self)
if condition:
form_list[form_key] = form_class
return form_list
def dispatch(self, request, *args, **kwargs):
"""
This method gets called by the routing engine. The first argument is
`request` which contains a `HttpRequest` instance.
The request is stored in `self.request` for later use. The storage
instance is stored in `self.storage`.
After processing the request using the `dispatch` method, the
response gets updated by the storage engine (for example add cookies).
"""
# add the storage engine to the current formwizard instance
self.wizard_name = self.get_wizard_name()
self.prefix = self.get_prefix()
self.storage = get_storage(self.storage_name, self.prefix, request,
getattr(self, 'file_storage', None))
self.steps = StepsHelper(self)
response = super(WizardView, self).dispatch(request, *args, **kwargs)
# update the response (e.g. adding cookies)
self.storage.update_response(response)
return response
def get(self, request, *args, **kwargs):
"""
This method handles GET requests.
If a GET request reaches this point, the wizard assumes that the user
just starts at the first step or wants to restart the process.
The data of the wizard will be resetted before rendering the first step.
"""
self.storage.reset()
# reset the current step to the first step.
self.storage.current_step = self.steps.first
return self.render(self.get_form())
def post(self, *args, **kwargs):
"""
This method handles POST requests.
The wizard will render either the current step (if form validation
wasn't successful), the next step (if the current step was stored
successful) or the done view (if no more steps are available)
"""
# Look for a wizard_prev_step element in the posted data which
# contains a valid step name. If one was found, render the requested
# form. (This makes stepping back a lot easier).
wizard_prev_step = self.request.POST.get('wizard_prev_step', None)
if wizard_prev_step and wizard_prev_step in self.get_form_list():
self.storage.current_step = wizard_prev_step
form = self.get_form(
data=self.storage.get_step_data(self.steps.current),
files=self.storage.get_step_files(self.steps.current))
return self.render(form)
# Check if form was refreshed
management_form = ManagementForm(self.request.POST, prefix=self.prefix)
if not management_form.is_valid():
raise ValidationError(
'ManagementForm data is missing or has been tampered.')
form_current_step = management_form.cleaned_data['current_step']
if (form_current_step != self.steps.current and
self.storage.current_step is not None):
# form refreshed, change current step
self.storage.current_step = form_current_step
# get the form for the current step
form = self.get_form(data=self.request.POST, files=self.request.FILES)
# and try to validate
if form.is_valid():
# if the form is valid, store the cleaned data and files.
self.storage.set_step_data(self.steps.current, self.process_step(form))
self.storage.set_step_files(self.steps.current, self.process_step_files(form))
# check if the current step is the last step
if self.steps.current == self.steps.last:
# no more steps, render done view
return self.render_done(form, **kwargs)
else:
# proceed to the next step
return self.render_next_step(form)
return self.render(form)
def render_next_step(self, form, **kwargs):
"""
THis method gets called when the next step/form should be rendered.
`form` contains the last/current form.
"""
# get the form instance based on the data from the storage backend
# (if available).
next_step = self.steps.next
new_form = self.get_form(next_step,
data=self.storage.get_step_data(next_step),
files=self.storage.get_step_files(next_step))
# change the stored current step
self.storage.current_step = next_step
return self.render(new_form, **kwargs)
def render_done(self, form, **kwargs):
"""
This method gets called when all forms passed. The method should also
re-validate all steps to prevent manipulation. If any form don't
validate, `render_revalidation_failure` should get called.
If everything is fine call `done`.
"""
final_form_list = []
# walk through the form list and try to validate the data again.
for form_key in self.get_form_list():
form_obj = self.get_form(step=form_key,
data=self.storage.get_step_data(form_key),
files=self.storage.get_step_files(form_key))
if not form_obj.is_valid():
return self.render_revalidation_failure(form_key, form_obj, **kwargs)
final_form_list.append(form_obj)
# render the done view and reset the wizard before returning the
# response. This is needed to prevent from rendering done with the
# same data twice.
done_response = self.done(final_form_list, **kwargs)
self.storage.reset()
return done_response
def get_form_prefix(self, step=None, form=None):
"""
Returns the prefix which will be used when calling the actual form for
the given step. `step` contains the step-name, `form` the form which
will be called with the returned prefix.
If no step is given, the form_prefix will determine the current step
automatically.
"""
if step is None:
step = self.steps.current
return str(step)
def get_form_initial(self, step):
"""
Returns a dictionary which will be passed to the form for `step`
as `initial`. If no initial data was provied while initializing the
form wizard, a empty dictionary will be returned.
"""
return self.initial_dict.get(step, {})
def get_form_instance(self, step):
"""
Returns a object which will be passed to the form for `step`
as `instance`. If no instance object was provied while initializing
the form wizard, None be returned.
"""
return self.instance_dict.get(step, None)
def get_form_kwargs(self, step=None):
"""
Returns the keyword arguments for instantiating the form
(or formset) on given step.
"""
return {}
def get_form(self, step=None, data=None, files=None):
"""
Constructs the form for a given `step`. If no `step` is defined, the
current step will be determined automatically.
The form will be initialized using the `data` argument to prefill the
new form. If needed, instance or queryset (for `ModelForm` or
`ModelFormSet`) will be added too.
"""
if step is None:
step = self.steps.current
# prepare the kwargs for the form instance.
kwargs = self.get_form_kwargs(step)
kwargs.update({
'data': data,
'files': files,
'prefix': self.get_form_prefix(step, self.form_list[step]),
'initial': self.get_form_initial(step),
})
if issubclass(self.form_list[step], forms.ModelForm):
# If the form is based on ModelForm, add instance if available.
kwargs.update({'instance': self.get_form_instance(step)})
elif issubclass(self.form_list[step], forms.models.BaseModelFormSet):
# If the form is based on ModelFormSet, add queryset if available.
kwargs.update({'queryset': self.get_form_instance(step)})
return self.form_list[step](**kwargs)
def process_step(self, form):
"""
This method is used to postprocess the form data. By default, it
returns the raw `form.data` dictionary.
"""
return self.get_form_step_data(form)
def process_step_files(self, form):
"""
This method is used to postprocess the form files. By default, it
returns the raw `form.files` dictionary.
"""
return self.get_form_step_files(form)
def render_revalidation_failure(self, step, form, **kwargs):
"""
Gets called when a form doesn't validate when rendering the done
view. By default, it changed the current step to failing forms step
and renders the form.
"""
self.storage.current_step = step
return self.render(form, **kwargs)
def get_form_step_data(self, form):
"""
Is used to return the raw form data. You may use this method to
manipulate the data.
"""
return form.data
def get_form_step_files(self, form):
"""
Is used to return the raw form files. You may use this method to
manipulate the data.
"""
return form.files
def get_all_cleaned_data(self):
"""
Returns a merged dictionary of all step cleaned_data dictionaries.
If a step contains a `FormSet`, the key will be prefixed with formset
and contain a list of the formset' cleaned_data dictionaries.
"""
cleaned_data = {}
for form_key in self.get_form_list():
form_obj = self.get_form(
step=form_key,
data=self.storage.get_step_data(form_key),
files=self.storage.get_step_files(form_key)
)
if form_obj.is_valid():
if isinstance(form_obj.cleaned_data, (tuple, list)):
cleaned_data.update({
'formset-%s' % form_key: form_obj.cleaned_data
})
else:
cleaned_data.update(form_obj.cleaned_data)
return cleaned_data
def get_cleaned_data_for_step(self, step):
"""
Returns the cleaned data for a given `step`. Before returning the
cleaned data, the stored values are being revalidated through the
form. If the data doesn't validate, None will be returned.
"""
if step in self.form_list:
form_obj = self.get_form(step=step,
data=self.storage.get_step_data(step),
files=self.storage.get_step_files(step))
if form_obj.is_valid():
return form_obj.cleaned_data
return None
def get_next_step(self, step=None):
"""
Returns the next step after the given `step`. If no more steps are
available, None will be returned. If the `step` argument is None, the
current step will be determined automatically.
"""
if step is None:
step = self.steps.current
form_list = self.get_form_list()
key = form_list.keyOrder.index(step) + 1
if len(form_list.keyOrder) > key:
return form_list.keyOrder[key]
return None
def get_prev_step(self, step=None):
"""
Returns the previous step before the given `step`. If there are no
steps available, None will be returned. If the `step` argument is
None, the current step will be determined automatically.
"""
if step is None:
step = self.steps.current
form_list = self.get_form_list()
key = form_list.keyOrder.index(step) - 1
if key >= 0:
return form_list.keyOrder[key]
return None
def get_step_index(self, step=None):
"""
Returns the index for the given `step` name. If no step is given,
the current step will be used to get the index.
"""
if step is None:
step = self.steps.current
return self.get_form_list().keyOrder.index(step)
def get_context_data(self, form, *args, **kwargs):
"""
Returns the template context for a step. You can overwrite this method
to add more data for all or some steps. This method returns a
dictionary containing the rendered form step. Available template
context variables are:
* all extra data stored in the storage backend
* `form` - form instance of the current step
* `wizard` - the wizard instance itself
Example:
.. code-block:: python
class MyWizard(FormWizard):
def get_context_data(self, form, **kwargs):
context = super(MyWizard, self).get_context_data(form, **kwargs)
if self.steps.current == 'my_step_name':
context.update({'another_var': True})
return context
"""
context = super(WizardView, self).get_context_data(*args, **kwargs)
context.update(self.storage.extra_data)
context['wizard'] = {
'form': form,
'steps': self.steps,
'management_form': ManagementForm(prefix=self.prefix, initial={
'current_step': self.steps.current,
}),
}
return context
def render(self, form=None, **kwargs):
"""
Returns a ``HttpResponse`` containing a all needed context data.
"""
form = form or self.get_form()
context = self.get_context_data(form, **kwargs)
return self.render_to_response(context)
def done(self, form_list, **kwargs):
"""
This method muss be overrided by a subclass to process to form data
after processing all steps.
"""
raise NotImplementedError("Your %s class has not defined a done() "
"method, which is required." % self.__class__.__name__)
class SessionWizardView(WizardView):
"""
A WizardView with pre-configured SessionStorage backend.
"""
storage_name = 'django.contrib.formtools.wizard.storage.session.SessionStorage'
class CookieWizardView(WizardView):
"""
A WizardView with pre-configured CookieStorage backend.
"""
storage_name = 'django.contrib.formtools.wizard.storage.cookie.CookieStorage'
class NamedUrlWizardView(WizardView):
"""
A WizardView with URL named steps support.
"""
url_name = None
done_step_name = None
@classmethod
def get_initkwargs(cls, *args, **kwargs):
"""
We require a url_name to reverse URLs later. Additionally users can
pass a done_step_name to change the URL name of the "done" view.
"""
assert 'url_name' in kwargs, 'URL name is needed to resolve correct wizard URLs'
extra_kwargs = {
'done_step_name': kwargs.pop('done_step_name', 'done'),
'url_name': kwargs.pop('url_name'),
}
initkwargs = super(NamedUrlWizardView, cls).get_initkwargs(*args, **kwargs)
initkwargs.update(extra_kwargs)
assert initkwargs['done_step_name'] not in initkwargs['form_list'], \
'step name "%s" is reserved for "done" view' % initkwargs['done_step_name']
return initkwargs
def get(self, *args, **kwargs):
"""
This renders the form or, if needed, does the http redirects.
"""
step_url = kwargs.get('step', None)
if step_url is None:
if 'reset' in self.request.GET:
self.storage.reset()
self.storage.current_step = self.steps.first
if self.request.GET:
query_string = "?%s" % self.request.GET.urlencode()
else:
query_string = ""
next_step_url = reverse(self.url_name, kwargs={
'step': self.steps.current,
}) + query_string
return redirect(next_step_url)
# is the current step the "done" name/view?
elif step_url == self.done_step_name:
last_step = self.steps.last
return self.render_done(self.get_form(step=last_step,
data=self.storage.get_step_data(last_step),
files=self.storage.get_step_files(last_step)
), **kwargs)
# is the url step name not equal to the step in the storage?
# if yes, change the step in the storage (if name exists)
elif step_url == self.steps.current:
# URL step name and storage step name are equal, render!
return self.render(self.get_form(
data=self.storage.current_step_data,
files=self.storage.current_step_data,
), **kwargs)
elif step_url in self.get_form_list():
self.storage.current_step = step_url
return self.render(self.get_form(
data=self.storage.current_step_data,
files=self.storage.current_step_data,
), **kwargs)
# invalid step name, reset to first and redirect.
else:
self.storage.current_step = self.steps.first
return redirect(self.url_name, step=self.steps.first)
def post(self, *args, **kwargs):
"""
Do a redirect if user presses the prev. step button. The rest of this
is super'd from FormWizard.
"""
prev_step = self.request.POST.get('wizard_prev_step', None)
if prev_step and prev_step in self.get_form_list():
self.storage.current_step = prev_step
return redirect(self.url_name, step=prev_step)
return super(NamedUrlWizardView, self).post(*args, **kwargs)
def render_next_step(self, form, **kwargs):
"""
When using the NamedUrlFormWizard, we have to redirect to update the
browser's URL to match the shown step.
"""
next_step = self.get_next_step()
self.storage.current_step = next_step
return redirect(self.url_name, step=next_step)
def render_revalidation_failure(self, failed_step, form, **kwargs):
"""
When a step fails, we have to redirect the user to the first failing
step.
"""
self.storage.current_step = failed_step
return redirect(self.url_name, step=failed_step)
def render_done(self, form, **kwargs):
"""
When rendering the done view, we have to redirect first (if the URL
name doesn't fit).
"""
if kwargs.get('step', None) != self.done_step_name:
return redirect(self.url_name, step=self.done_step_name)
return super(NamedUrlWizardView, self).render_done(form, **kwargs)
class NamedUrlSessionWizardView(NamedUrlWizardView):
"""
A NamedUrlWizardView with pre-configured SessionStorage backend.
"""
storage_name = 'django.contrib.formtools.wizard.storage.session.SessionStorage'
class NamedUrlCookieWizardView(NamedUrlWizardView):
"""
A NamedUrlFormWizard with pre-configured CookieStorageBackend.
"""
storage_name = 'django.contrib.formtools.wizard.storage.cookie.CookieStorage'
|
bsd-3-clause
| -6,143,436,178,708,382,000
| 37.734694
| 90
| 0.607594
| false
| 4.231884
| false
| false
| false
|
Tommekster/kotelnik
|
pokus.py
|
1
|
5511
|
#!/usr/bin/python3
#import time # time.sleep(5.5)
import http.client
from socket import error as socket_error
import time
logCtrlFile = '/tmp/kotelnik.log'
logTempFile = '/tmp/kotelnikTemps.log'
class connectionError(RuntimeError):
def __init__(self, arg):
self.args = arg
class sensorError(RuntimeError):
def __init__(self, arg):
self.args = arg
def logCtrl(str):
file = open(logCtrlFile,'a')
file.write(str)
file.write("\n")
file.close()
def logTemp(str):
file = open(logTempFile,'a')
file.write(str)
file.write("\n")
file.close()
def switchKotelOnOff(on=False):
conn = http.client.HTTPConnection('192.168.11.99') # nastavim spojeni na maleho kotelnika
if on:
cmd = '/on'
else:
cmd = '/off'
try:
conn.request('GET',cmd) # necham kotel zapnout
except (sensorError,connectionError,socket_error) as e:
logCtrl(time.strftime('%d.%m.%Y %H:%M')+'switchKotel('+str(on)+') Exception: '+str(e))
return
else:
logCtrl(time.strftime('%d.%m.%Y %H:%M')+' '+cmd)
def kotelOn():
switchKotelOnOff(True)
def kotelOff():
switchKotelOnOff(False)
def readSens(loc=0):
if loc:
data1 = b'<html><head><title>Kotelnik Senzory</title></head><body><h2>Senzory</h2><pre>\n609\n665\n674\n653\n697\n666\n174\n747\n</pre><hr></body></html>'
else:
conn = http.client.HTTPConnection('192.168.11.99') # nastavim spojeni na maleho kotelnika
conn.request('GET','/sens') # pozadem o GET /sens
r1 = conn.getresponse() # ziskam vysledek
if r1.status != 200: # skontroluji status vysledku
raise connectionError('/sens is not 200 OK')
data1 = r1.read() # vezmu si data
sens_str = data1.decode('utf8') # preveduje na string
sens = sens_str.split('\n') # rozdelim je podle odradkovani
if len(sens) < 10: # mam-li mene radku, asi se zrovna Atmel resetuj
raise sensorError('Dostal jsem malo dat.',sens)
del(sens[-1]) # odstranim HTML paticku
del(sens[0]) # odstranim HTML hlavicku
return [int(s) for s in sens]
class mTime:
def __init__(self,_h,_m):
self.h=_h
self.m=_m
def isLess(self,h,m): # tento cas uz byl, oproti zadanemu
return self.h < h or (self.h == h and self.m < m)
def isGreater(self,h,m): # tento cas teprve bude oproti zadanemu
return self.h > h or (self.h == h and self.m > m)
class mDay:
def __init__(self):
self.filledStart = False
pass
def setStartTime(self,h,m):
setattr(self,'start',mTime(h,m))
self.filledStart = True
def setStopTime(self,h,m):
setattr(self,'stop',mTime(h,m))
self.filledStop = True
def setStartStop(self,h,m,hh,mm):
setattr(self,'start',mTime(h,m))
setattr(self,'stop',mTime(hh,mm))
self.filledStart = True
self.filledStart = True
def isTimeForHeating(self):
if not (self.filledStart and self.filledStart):
return False
h = time.localtime().tm_hour
m = time.localtime().tm_min
return self.start.isLess(h,m) and self.stop.isGreater(h,m)
class mWeek:
def __init__(self):
self.days=[mDay() for i in range(0,7)]
#def getDay(self,index):
# return self.days[index]
def isTimeForHeating(self):
day = self.days[time.localtime().tm_wday]
return day.isTimeForHeating()
class Kotelnik:
def __init__(self):
self.out_temperature = 15.0 # je-li venku vyssi teplota, tak netopi
self.pipes_temperature = 30.0 # je-li trubka ohrata, tak kotel topi
self.week = mWeek()
self.week.days[0].setStartStop(5,0,22,30) # casy na vytapeni behem tydne
self.week.days[1].setStartStop(5,0,22,30)
self.week.days[2].setStartStop(5,0,22,30)
self.week.days[3].setStartStop(5,0,22,30)
self.week.days[4].setStartStop(5,0,23,59)
self.week.days[5].setStartStop(8,0,23,59)
self.week.days[6].setStartStop(8,0,23,0)
self.timeout_interval = 3600 # kdyz bude podle trubek natopeno, jak dlouho ma kotel odpocivat
self.filterWeight = 1/32 # parametr dolnopropustoveho filtru
self.referenceVoltage=1.1 # referencni napeti pro mereni referencnich "5V"
self.temperatures = [15.0 for i in range(0,6)] # vychozi teploty, aby predesli selhani
def refreshTemperature(self):
try:
sens = readSens() # ziskam hodnoty ze senzoru
except (sensorError,connectionError,socket_error) as e:
logCtrl(time.strftime('%d.%m.%Y %H:%M')+' refreshTemperature() Exception: '+str(e))
return
pom = sens[-2] # pomer merice VCC
vcc = sens[-1] # hodnota na merici VCC pri VREF
rawTemps = [s/10.24*vcc/pom*1.1-273 for s in sens[:-2]] # prepocet hodnot senzoru do stupni Celsia
newTemps = [self.temperatures[i] + (rawTemps[i] - self.temperatures[i])*self.filterWeight for i in range(0,6)]
self.temperatures = newTemps
tempstr='%d' % int(time.time())
for t in self.temperatures:
tempstr+=" %.5f" % t
logTemp(tempstr)
def isTemperatureForHeating(self):
return self.out_temperature > self.temperatures[0] # venkovni teplota je nizka
def boilerHeats(self):
return max(self.temperatures[1:]) > self.pipes_temperature
def mayBoilerHeat(self):
return self.isTemperatureForHeating() and self.week.isTimeForHeating()
def controlBoiler(self):
if self.mayBoilerHeat():
#if not self.boilerHeats():
kotelOn()
elif self.boilerHeats():
kotelOff()
def doYourWork(self):
self.work = True
cycles = 0
while(self.work):
self.refreshTemperature()
if cycles % 10 == 0:
self.controlBoiler()
cycles += 1
time.sleep(60)
def cancelWork(self):
self.work = False
if __name__ == '__main__':
#print('Pokus: uvidime, co zmuzeme s kotelnikem.')
k=Kotelnik()
k.doYourWork()
print('Kotelnik skoncil. ')
|
gpl-3.0
| 2,927,955,303,797,610,500
| 29.269231
| 156
| 0.685424
| false
| 2.444099
| false
| false
| false
|
Southpaw-TACTIC/Team
|
src/python/Lib/site-packages/PySide/examples/declarative/extending/chapter3-bindings/bindings.py
|
1
|
3388
|
#!/usr/bin/python
# Copyright (C) 2010 Nokia Corporation and/or its subsidiary(-ies).
# All rights reserved.
# Contact: PySide Team (pyside@openbossa.org)
#
# This file is part of the examples of PySide: Python for Qt.
#
# You may use this file under the terms of the BSD license as follows:
#
# "Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in
# the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Nokia Corporation and its Subsidiary(-ies) nor
# the names of its contributors may be used to endorse or promote
# products derived from this software without specific prior written
# permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE."
import sys
from PySide.QtCore import *
from PySide.QtGui import *
from PySide.QtDeclarative import *
class PieChart (QDeclarativeItem):
def __init__(self, parent = None):
QDeclarativeItem.__init__(self, parent)
# need to disable this flag to draw inside a QDeclarativeItem
self.setFlag(QGraphicsItem.ItemHasNoContents, False)
self._name = u''
self._color = QColor()
def paint(self, painter, options, widget):
pen = QPen(self._color, 2)
painter.setPen(pen);
painter.setRenderHints(QPainter.Antialiasing, True);
painter.drawPie(self.boundingRect(), 90 * 16, 290 * 16);
def getColor(self):
return self._color
def setColor(self, value):
if value != self._color:
self._color = value
self.update()
self.colorChanged.emit()
def getName(self):
return self._name
def setName(self, value):
self._name = value
colorChanged = Signal()
color = Property(QColor, getColor, setColor, notify=colorChanged)
name = Property(unicode, getName, setName)
chartCleared = Signal()
@Slot() # This should be something like @Invokable
def clearChart(self):
self.setColor(Qt.transparent)
self.update()
self.chartCleared.emit()
if __name__ == '__main__':
app = QApplication(sys.argv)
qmlRegisterType(PieChart, 'Charts', 1, 0, 'PieChart');
view = QDeclarativeView()
view.setSource(QUrl.fromLocalFile('app.qml'))
view.show()
sys.exit(app.exec_())
|
epl-1.0
| -8,127,580,666,819,666,000
| 35.826087
| 72
| 0.702184
| false
| 4
| false
| false
| false
|
dreadrel/UWF_2014_spring_COP3990C-2507
|
notebooks/scripts/book_code/code/registry-deco.py
|
1
|
1048
|
# Registering decorated objects to an API
from __future__ import print_function # 2.X
registry = {}
def register(obj): # Both class and func decorator
registry[obj.__name__] = obj # Add to registry
return obj # Return obj itself, not a wrapper
@register
def spam(x):
return(x ** 2) # spam = register(spam)
@register
def ham(x):
return(x ** 3)
@register
class Eggs: # Eggs = register(Eggs)
def __init__(self, x):
self.data = x ** 4
def __str__(self):
return str(self.data)
print('Registry:')
for name in registry:
print(name, '=>', registry[name], type(registry[name]))
print('\nManual calls:')
print(spam(2)) # Invoke objects manually
print(ham(2)) # Later calls not intercepted
X = Eggs(2)
print(X)
print('\nRegistry calls:')
for name in registry:
print(name, '=>', registry[name](2)) # Invoke from registry
|
apache-2.0
| 154,931,190,101,978,340
| 28.111111
| 78
| 0.533397
| false
| 3.867159
| false
| false
| false
|
sbenthall/chantbot
|
parse.py
|
1
|
1431
|
import ConfigParser
import re
import math
config= ConfigParser.ConfigParser()
config.read('config.cfg')
def hash_word(match):
return '#' + match.group()
def hash_line(line,kw_re):
for kr in kw_re:
line = re.sub(kr, hash_word, line)
return line
def prepare_chants(source,num_bursts,keywords):
"""
prepare_chants(source) -> list of Chants
Read in the text from the source file and
return a list whose elements are
"""
chants = []
f = open(source)
text = ""
kw_re = [re.compile(r'\b%s\b' % kw,flags=re.I) for kw in keywords]
for line in f:
if re.match(r'^\s*$',line) is not None:
if text is not "":
chants.append(Chant(text,num_bursts))
text = ""
else:
# add hashtags where necessary
text += hash_line(line,kw_re)
f.close()
return chants
class Chant:
lines = []
bursts = []
# lines per burst
lpb = 0
def __init__(self,text,num_bursts):
self.lines = text.split("\n")
if self.lines[-1] is "":
self.lines = self.lines[0:-1]
# lines per burst
self.lpb = int(math.ceil(float(len(self.lines)) / num_bursts))
self.bursts = [self.lines[i:i+self.lpb] for i
in xrange(0,len(self.lines),self.lpb)]
if len(self.bursts) < num_bursts:
self.bursts.append([])
|
mit
| -8,786,346,433,938,706,000
| 20.358209
| 70
| 0.55276
| false
| 3.3125
| true
| false
| false
|
acapet/GHER-POSTPROC
|
Examples/Conservation.py
|
1
|
2831
|
import numpy as np
import numpy.ma as ma
from netCDF4 import Dataset
#from mpl_toolkits.basemap import Basemap
#from multiprocessing import Pool
#import gsw
import matplotlib
matplotlib.use('pdf')
import matplotlib.pyplot as plt
import matplotlib.dates as mdates
import datetime as dt
import sys
import os
import G3D_class
ztab = -1*np.concatenate([np.arange(0,10,2), np.arange(10,40,5),np.arange(50,120,10),np.arange(120,320,50)])
firstyear=1
lastyear =4
for yy in range(firstyear,lastyear+1):
G1 = G3D_class.G3D('../Out_CARTseq/r'+str(yy)+'.nc')
maskDS = (G1.bat<120) | (G1.bat.mask)
G1.gload('T1age')
G1.gstore('T1age')
G1.testz()
NAgeClasses = 100
AgeClasses = np.linspace(0,1000,NAgeClasses )
AgeVolumes = np.zeros([len(G1.dates),NAgeClasses])
Vol = G1.dx*G1.dy*G1.dz*1e-9
daysincebeg=np.zeros(len(G1.dates))
if yy==firstyear:
datebeg=G1.dates[0]
for t in range(len(G1.dates)):
# make a vector with the volume of water For each age class
localagevector = G1.T1age[t]
for ageClassindex in range(len(AgeClasses)-1):
bi = ma.masked_where( (localagevector<AgeClasses[ageClassindex]) | (localagevector>=AgeClasses[ageClassindex+1]), Vol)
AgeVolumes[t,ageClassindex]=bi.sum()
daysincebeg[t]=(G1.dates[t]-datebeg).days
if yy==firstyear:
AVa=AgeVolumes
datesa=daysincebeg
else:
AVa=np.append(AVa,AgeVolumes,0)
datesa=np.append(datesa,daysincebeg,0)
locator = mdates.AutoDateLocator()
formator = mdates.AutoDateFormatter(locator)
AVa=AVa/Vol.sum()*100
####################
# 1st figure :
####################
fig=plt.figure(figsize=(15, 15))
ax=plt.subplot(1, 1, 1)
#ax.xaxis_date()
#ax.xaxis.set_major_locator(locator)
#ax.xaxis.set_major_formatter(formator)
#plt.contourf(datesa, AgeClasses, AVa.T,levels=np.linspace(0,10,100),cmap='GnBu')
plt.contourf(datesa, AgeClasses, AVa.T,levels=np.linspace(0,1.5,100),cmap='gist_ncar_r')
plt.colorbar()
plt.plot([0.0, datesa.max()], [0.0, datesa.max()], 'r-', lw=2)
plt.title('Volumes for age of Waters - [% of volume]')
plt.ylabel('Age - [d]')
plt.xlabel('Time- [d]')
plt.grid(True)
fig.savefig(G1.figoutputdir+'AgeVolumes.png')
|
gpl-3.0
| 3,543,788,336,527,825,000
| 33.950617
| 212
| 0.530908
| false
| 3.390419
| false
| false
| false
|
kaffeebrauer/Lean
|
Algorithm.Framework/Execution/StandardDeviationExecutionModel.py
|
1
|
7152
|
# QUANTCONNECT.COM - Democratizing Finance, Empowering Individuals.
# Lean Algorithmic Trading Engine v2.0. Copyright 2014 QuantConnect Corporation.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from clr import AddReference
AddReference("System")
AddReference("QuantConnect.Common")
AddReference("QuantConnect.Indicators")
AddReference("QuantConnect.Algorithm")
AddReference("QuantConnect.Algorithm.Framework")
from System import *
from QuantConnect import *
from QuantConnect.Indicators import *
from QuantConnect.Data import *
from QuantConnect.Data.Market import *
from QuantConnect.Orders import *
from QuantConnect.Algorithm import *
from QuantConnect.Algorithm.Framework import *
from QuantConnect.Algorithm.Framework.Execution import *
from QuantConnect.Algorithm.Framework.Portfolio import *
import numpy as np
class StandardDeviationExecutionModel(ExecutionModel):
'''Execution model that submits orders while the current market prices is at least the configured number of standard
deviations away from the mean in the favorable direction (below/above for buy/sell respectively)'''
def __init__(self,
period = 60,
deviations = 2,
resolution = Resolution.Minute):
'''Initializes a new instance of the StandardDeviationExecutionModel class
Args:
period: Period of the standard deviation indicator
deviations: The number of deviations away from the mean before submitting an order
resolution: The resolution of the STD and SMA indicators'''
self.period = period
self.deviations = deviations
self.resolution = resolution
self.targetsCollection = PortfolioTargetCollection()
self.symbolData = {}
# Gets or sets the maximum order value in units of the account currency.
# This defaults to $20,000. For example, if purchasing a stock with a price
# of $100, then the maximum order size would be 200 shares.
self.MaximumOrderValue = 20000
def Execute(self, algorithm, targets):
'''Executes market orders if the standard deviation of price is more
than the configured number of deviations in the favorable direction.
Args:
algorithm: The algorithm instance
targets: The portfolio targets'''
self.targetsCollection.AddRange(targets)
for target in self.targetsCollection.OrderByMarginImpact(algorithm):
symbol = target.Symbol
# calculate remaining quantity to be ordered
unorderedQuantity = OrderSizing.GetUnorderedQuantity(algorithm, target)
# fetch our symbol data containing our STD/SMA indicators
data = self.symbolData.get(symbol, None)
if data is None: return
# check order entry conditions
if data.STD.IsReady and self.PriceIsFavorable(data, unorderedQuantity):
# get the maximum order size based on total order value
maxOrderSize = OrderSizing.Value(data.Security, self.MaximumOrderValue)
orderSize = np.min([maxOrderSize, np.abs(unorderedQuantity)])
# round down to even lot size
orderSize -= orderSize % data.Security.SymbolProperties.LotSize
if orderSize != 0:
algorithm.MarketOrder(symbol, np.sign(unorderedQuantity) * orderSize)
self.targetsCollection.ClearFulfilled(algorithm)
def OnSecuritiesChanged(self, algorithm, changes):
'''Event fired each time the we add/remove securities from the data feed
Args:
algorithm: The algorithm instance that experienced the change in securities
changes: The security additions and removals from the algorithm'''
for removed in changes.RemovedSecurities:
# clean up data from removed securities
if removed.Symbol in self.symbolData:
if self.IsSafeToRemove(algorithm, removed.Symbol):
data = self.symbolData.pop(removed.Symbol)
algorithm.SubscriptionManager.RemoveConsolidator(removed.Symbol, data.Consolidator)
addedSymbols = []
for added in changes.AddedSecurities:
if added.Symbol not in self.symbolData:
self.symbolData[added.Symbol] = SymbolData(algorithm, added, self.period, self.resolution)
addedSymbols.append(added.Symbol)
if len(addedSymbols) > 0:
# warmup our indicators by pushing history through the consolidators
history = algorithm.History(addedSymbols, self.period, self.resolution)
if history.empty: return
tickers = history.index.levels[0]
for ticker in tickers:
symbol = SymbolCache.GetSymbol(ticker)
symbolData = self.symbolData[symbol]
for tuple in history.loc[ticker].itertuples():
bar = TradeBar(tuple.Index, symbol, tuple.open, tuple.high, tuple.low, tuple.close, tuple.volume)
symbolData.Consolidator.Update(bar)
def PriceIsFavorable(self, data, unorderedQuantity):
'''Determines if the current price is more than the configured
number of standard deviations away from the mean in the favorable direction.'''
deviations = self.deviations * data.STD.Current.Value
if unorderedQuantity > 0:
if data.Security.BidPrice < data.SMA.Current.Value - deviations:
return True
else:
if data.Security.AskPrice > data.SMA.Current.Value + deviations:
return True
return False
def IsSafeToRemove(self, algorithm, symbol):
'''Determines if it's safe to remove the associated symbol data'''
# confirm the security isn't currently a member of any universe
return not any([kvp.Value.ContainsMember(symbol) for kvp in algorithm.UniverseManager])
class SymbolData:
def __init__(self, algorithm, security, period, resolution):
self.Security = security
self.Consolidator = algorithm.ResolveConsolidator(security.Symbol, resolution)
smaName = algorithm.CreateIndicatorName(security.Symbol, "SMA{}".format(period), resolution)
self.SMA = SimpleMovingAverage(smaName, period)
algorithm.RegisterIndicator(security.Symbol, self.SMA, self.Consolidator)
stdName = algorithm.CreateIndicatorName(security.Symbol, "STD{}".format(period), resolution)
self.STD = StandardDeviation(stdName, period)
algorithm.RegisterIndicator(security.Symbol, self.STD, self.Consolidator)
|
apache-2.0
| -3,448,392,155,997,403,000
| 46.673333
| 120
| 0.68993
| false
| 4.508197
| false
| false
| false
|
ShakedY/ai-project
|
py2.5/bin/smtpd.py
|
1
|
18102
|
#!/home/shaked/Desktop/prob-plan-recognition/seq-sat-lama/py2.5/bin/python
"""An RFC 2821 smtp proxy.
Usage: %(program)s [options] [localhost:localport [remotehost:remoteport]]
Options:
--nosetuid
-n
This program generally tries to setuid `nobody', unless this flag is
set. The setuid call will fail if this program is not run as root (in
which case, use this flag).
--version
-V
Print the version number and exit.
--class classname
-c classname
Use `classname' as the concrete SMTP proxy class. Uses `PureProxy' by
default.
--debug
-d
Turn on debugging prints.
--help
-h
Print this message and exit.
Version: %(__version__)s
If localhost is not given then `localhost' is used, and if localport is not
given then 8025 is used. If remotehost is not given then `localhost' is used,
and if remoteport is not given, then 25 is used.
"""
# Overview:
#
# This file implements the minimal SMTP protocol as defined in RFC 821. It
# has a hierarchy of classes which implement the backend functionality for the
# smtpd. A number of classes are provided:
#
# SMTPServer - the base class for the backend. Raises NotImplementedError
# if you try to use it.
#
# DebuggingServer - simply prints each message it receives on stdout.
#
# PureProxy - Proxies all messages to a real smtpd which does final
# delivery. One known problem with this class is that it doesn't handle
# SMTP errors from the backend server at all. This should be fixed
# (contributions are welcome!).
#
# MailmanProxy - An experimental hack to work with GNU Mailman
# <www.list.org>. Using this server as your real incoming smtpd, your
# mailhost will automatically recognize and accept mail destined to Mailman
# lists when those lists are created. Every message not destined for a list
# gets forwarded to a real backend smtpd, as with PureProxy. Again, errors
# are not handled correctly yet.
#
# Please note that this script requires Python 2.0
#
# Author: Barry Warsaw <barry@python.org>
#
# TODO:
#
# - support mailbox delivery
# - alias files
# - ESMTP
# - handle error codes from the backend smtpd
import sys
import os
import errno
import getopt
import time
import socket
import asyncore
import asynchat
__all__ = ["SMTPServer","DebuggingServer","PureProxy","MailmanProxy"]
program = sys.argv[0]
__version__ = 'Python SMTP proxy version 0.2'
class Devnull:
def write(self, msg): pass
def flush(self): pass
DEBUGSTREAM = Devnull()
NEWLINE = '\n'
EMPTYSTRING = ''
COMMASPACE = ', '
def usage(code, msg=''):
print >> sys.stderr, __doc__ % globals()
if msg:
print >> sys.stderr, msg
sys.exit(code)
class SMTPChannel(asynchat.async_chat):
COMMAND = 0
DATA = 1
def __init__(self, server, conn, addr):
asynchat.async_chat.__init__(self, conn)
self.__server = server
self.__conn = conn
self.__addr = addr
self.__line = []
self.__state = self.COMMAND
self.__greeting = 0
self.__mailfrom = None
self.__rcpttos = []
self.__data = ''
self.__fqdn = socket.getfqdn()
self.__peer = conn.getpeername()
print >> DEBUGSTREAM, 'Peer:', repr(self.__peer)
self.push('220 %s %s' % (self.__fqdn, __version__))
self.set_terminator('\r\n')
# Overrides base class for convenience
def push(self, msg):
asynchat.async_chat.push(self, msg + '\r\n')
# Implementation of base class abstract method
def collect_incoming_data(self, data):
self.__line.append(data)
# Implementation of base class abstract method
def found_terminator(self):
line = EMPTYSTRING.join(self.__line)
print >> DEBUGSTREAM, 'Data:', repr(line)
self.__line = []
if self.__state == self.COMMAND:
if not line:
self.push('500 Error: bad syntax')
return
method = None
i = line.find(' ')
if i < 0:
command = line.upper()
arg = None
else:
command = line[:i].upper()
arg = line[i+1:].strip()
method = getattr(self, 'smtp_' + command, None)
if not method:
self.push('502 Error: command "%s" not implemented' % command)
return
method(arg)
return
else:
if self.__state != self.DATA:
self.push('451 Internal confusion')
return
# Remove extraneous carriage returns and de-transparency according
# to RFC 821, Section 4.5.2.
data = []
for text in line.split('\r\n'):
if text and text[0] == '.':
data.append(text[1:])
else:
data.append(text)
self.__data = NEWLINE.join(data)
status = self.__server.process_message(self.__peer,
self.__mailfrom,
self.__rcpttos,
self.__data)
self.__rcpttos = []
self.__mailfrom = None
self.__state = self.COMMAND
self.set_terminator('\r\n')
if not status:
self.push('250 Ok')
else:
self.push(status)
# SMTP and ESMTP commands
def smtp_HELO(self, arg):
if not arg:
self.push('501 Syntax: HELO hostname')
return
if self.__greeting:
self.push('503 Duplicate HELO/EHLO')
else:
self.__greeting = arg
self.push('250 %s' % self.__fqdn)
def smtp_NOOP(self, arg):
if arg:
self.push('501 Syntax: NOOP')
else:
self.push('250 Ok')
def smtp_QUIT(self, arg):
# args is ignored
self.push('221 Bye')
self.close_when_done()
# factored
def __getaddr(self, keyword, arg):
address = None
keylen = len(keyword)
if arg[:keylen].upper() == keyword:
address = arg[keylen:].strip()
if not address:
pass
elif address[0] == '<' and address[-1] == '>' and address != '<>':
# Addresses can be in the form <person@dom.com> but watch out
# for null address, e.g. <>
address = address[1:-1]
return address
def smtp_MAIL(self, arg):
print >> DEBUGSTREAM, '===> MAIL', arg
address = self.__getaddr('FROM:', arg) if arg else None
if not address:
self.push('501 Syntax: MAIL FROM:<address>')
return
if self.__mailfrom:
self.push('503 Error: nested MAIL command')
return
self.__mailfrom = address
print >> DEBUGSTREAM, 'sender:', self.__mailfrom
self.push('250 Ok')
def smtp_RCPT(self, arg):
print >> DEBUGSTREAM, '===> RCPT', arg
if not self.__mailfrom:
self.push('503 Error: need MAIL command')
return
address = self.__getaddr('TO:', arg) if arg else None
if not address:
self.push('501 Syntax: RCPT TO: <address>')
return
self.__rcpttos.append(address)
print >> DEBUGSTREAM, 'recips:', self.__rcpttos
self.push('250 Ok')
def smtp_RSET(self, arg):
if arg:
self.push('501 Syntax: RSET')
return
# Resets the sender, recipients, and data, but not the greeting
self.__mailfrom = None
self.__rcpttos = []
self.__data = ''
self.__state = self.COMMAND
self.push('250 Ok')
def smtp_DATA(self, arg):
if not self.__rcpttos:
self.push('503 Error: need RCPT command')
return
if arg:
self.push('501 Syntax: DATA')
return
self.__state = self.DATA
self.set_terminator('\r\n.\r\n')
self.push('354 End data with <CR><LF>.<CR><LF>')
class SMTPServer(asyncore.dispatcher):
def __init__(self, localaddr, remoteaddr):
self._localaddr = localaddr
self._remoteaddr = remoteaddr
asyncore.dispatcher.__init__(self)
self.create_socket(socket.AF_INET, socket.SOCK_STREAM)
# try to re-use a server port if possible
self.set_reuse_addr()
self.bind(localaddr)
self.listen(5)
print >> DEBUGSTREAM, \
'%s started at %s\n\tLocal addr: %s\n\tRemote addr:%s' % (
self.__class__.__name__, time.ctime(time.time()),
localaddr, remoteaddr)
def handle_accept(self):
conn, addr = self.accept()
print >> DEBUGSTREAM, 'Incoming connection from %s' % repr(addr)
channel = SMTPChannel(self, conn, addr)
# API for "doing something useful with the message"
def process_message(self, peer, mailfrom, rcpttos, data):
"""Override this abstract method to handle messages from the client.
peer is a tuple containing (ipaddr, port) of the client that made the
socket connection to our smtp port.
mailfrom is the raw address the client claims the message is coming
from.
rcpttos is a list of raw addresses the client wishes to deliver the
message to.
data is a string containing the entire full text of the message,
headers (if supplied) and all. It has been `de-transparencied'
according to RFC 821, Section 4.5.2. In other words, a line
containing a `.' followed by other text has had the leading dot
removed.
This function should return None, for a normal `250 Ok' response;
otherwise it returns the desired response string in RFC 821 format.
"""
raise NotImplementedError
class DebuggingServer(SMTPServer):
# Do something with the gathered message
def process_message(self, peer, mailfrom, rcpttos, data):
inheaders = 1
lines = data.split('\n')
print '---------- MESSAGE FOLLOWS ----------'
for line in lines:
# headers first
if inheaders and not line:
print 'X-Peer:', peer[0]
inheaders = 0
print line
print '------------ END MESSAGE ------------'
class PureProxy(SMTPServer):
def process_message(self, peer, mailfrom, rcpttos, data):
lines = data.split('\n')
# Look for the last header
i = 0
for line in lines:
if not line:
break
i += 1
lines.insert(i, 'X-Peer: %s' % peer[0])
data = NEWLINE.join(lines)
refused = self._deliver(mailfrom, rcpttos, data)
# TBD: what to do with refused addresses?
print >> DEBUGSTREAM, 'we got some refusals:', refused
def _deliver(self, mailfrom, rcpttos, data):
import smtplib
refused = {}
try:
s = smtplib.SMTP()
s.connect(self._remoteaddr[0], self._remoteaddr[1])
try:
refused = s.sendmail(mailfrom, rcpttos, data)
finally:
s.quit()
except smtplib.SMTPRecipientsRefused, e:
print >> DEBUGSTREAM, 'got SMTPRecipientsRefused'
refused = e.recipients
except (socket.error, smtplib.SMTPException), e:
print >> DEBUGSTREAM, 'got', e.__class__
# All recipients were refused. If the exception had an associated
# error code, use it. Otherwise,fake it with a non-triggering
# exception code.
errcode = getattr(e, 'smtp_code', -1)
errmsg = getattr(e, 'smtp_error', 'ignore')
for r in rcpttos:
refused[r] = (errcode, errmsg)
return refused
class MailmanProxy(PureProxy):
def process_message(self, peer, mailfrom, rcpttos, data):
from cStringIO import StringIO
from Mailman import Utils
from Mailman import Message
from Mailman import MailList
# If the message is to a Mailman mailing list, then we'll invoke the
# Mailman script directly, without going through the real smtpd.
# Otherwise we'll forward it to the local proxy for disposition.
listnames = []
for rcpt in rcpttos:
local = rcpt.lower().split('@')[0]
# We allow the following variations on the theme
# listname
# listname-admin
# listname-owner
# listname-request
# listname-join
# listname-leave
parts = local.split('-')
if len(parts) > 2:
continue
listname = parts[0]
if len(parts) == 2:
command = parts[1]
else:
command = ''
if not Utils.list_exists(listname) or command not in (
'', 'admin', 'owner', 'request', 'join', 'leave'):
continue
listnames.append((rcpt, listname, command))
# Remove all list recipients from rcpttos and forward what we're not
# going to take care of ourselves. Linear removal should be fine
# since we don't expect a large number of recipients.
for rcpt, listname, command in listnames:
rcpttos.remove(rcpt)
# If there's any non-list destined recipients left,
print >> DEBUGSTREAM, 'forwarding recips:', ' '.join(rcpttos)
if rcpttos:
refused = self._deliver(mailfrom, rcpttos, data)
# TBD: what to do with refused addresses?
print >> DEBUGSTREAM, 'we got refusals:', refused
# Now deliver directly to the list commands
mlists = {}
s = StringIO(data)
msg = Message.Message(s)
# These headers are required for the proper execution of Mailman. All
# MTAs in existance seem to add these if the original message doesn't
# have them.
if not msg.getheader('from'):
msg['From'] = mailfrom
if not msg.getheader('date'):
msg['Date'] = time.ctime(time.time())
for rcpt, listname, command in listnames:
print >> DEBUGSTREAM, 'sending message to', rcpt
mlist = mlists.get(listname)
if not mlist:
mlist = MailList.MailList(listname, lock=0)
mlists[listname] = mlist
# dispatch on the type of command
if command == '':
# post
msg.Enqueue(mlist, tolist=1)
elif command == 'admin':
msg.Enqueue(mlist, toadmin=1)
elif command == 'owner':
msg.Enqueue(mlist, toowner=1)
elif command == 'request':
msg.Enqueue(mlist, torequest=1)
elif command in ('join', 'leave'):
# TBD: this is a hack!
if command == 'join':
msg['Subject'] = 'subscribe'
else:
msg['Subject'] = 'unsubscribe'
msg.Enqueue(mlist, torequest=1)
class Options:
setuid = 1
classname = 'PureProxy'
def parseargs():
global DEBUGSTREAM
try:
opts, args = getopt.getopt(
sys.argv[1:], 'nVhc:d',
['class=', 'nosetuid', 'version', 'help', 'debug'])
except getopt.error, e:
usage(1, e)
options = Options()
for opt, arg in opts:
if opt in ('-h', '--help'):
usage(0)
elif opt in ('-V', '--version'):
print >> sys.stderr, __version__
sys.exit(0)
elif opt in ('-n', '--nosetuid'):
options.setuid = 0
elif opt in ('-c', '--class'):
options.classname = arg
elif opt in ('-d', '--debug'):
DEBUGSTREAM = sys.stderr
# parse the rest of the arguments
if len(args) < 1:
localspec = 'localhost:8025'
remotespec = 'localhost:25'
elif len(args) < 2:
localspec = args[0]
remotespec = 'localhost:25'
elif len(args) < 3:
localspec = args[0]
remotespec = args[1]
else:
usage(1, 'Invalid arguments: %s' % COMMASPACE.join(args))
# split into host/port pairs
i = localspec.find(':')
if i < 0:
usage(1, 'Bad local spec: %s' % localspec)
options.localhost = localspec[:i]
try:
options.localport = int(localspec[i+1:])
except ValueError:
usage(1, 'Bad local port: %s' % localspec)
i = remotespec.find(':')
if i < 0:
usage(1, 'Bad remote spec: %s' % remotespec)
options.remotehost = remotespec[:i]
try:
options.remoteport = int(remotespec[i+1:])
except ValueError:
usage(1, 'Bad remote port: %s' % remotespec)
return options
if __name__ == '__main__':
options = parseargs()
# Become nobody
if options.setuid:
try:
import pwd
except ImportError:
print >> sys.stderr, \
'Cannot import module "pwd"; try running with -n option.'
sys.exit(1)
nobody = pwd.getpwnam('nobody')[2]
try:
os.setuid(nobody)
except OSError, e:
if e.errno != errno.EPERM: raise
print >> sys.stderr, \
'Cannot setuid "nobody"; try running with -n option.'
sys.exit(1)
classname = options.classname
if "." in classname:
lastdot = classname.rfind(".")
mod = __import__(classname[:lastdot], globals(), locals(), [""])
classname = classname[lastdot+1:]
else:
import __main__ as mod
class_ = getattr(mod, classname)
proxy = class_((options.localhost, options.localport),
(options.remotehost, options.remoteport))
try:
asyncore.loop()
except KeyboardInterrupt:
pass
|
gpl-3.0
| -7,248,076,950,587,915,000
| 31.382826
| 78
| 0.553861
| false
| 4.021773
| false
| false
| false
|
Grognak/Grognaks-Mod-Manager
|
lib/killable_threading.py
|
1
|
4056
|
import threading
class KillableThread(threading.Thread):
"""A base class for threads that die on command.
Subclasses' run() loops test if self.keep_alive is False.
Instead of sleeping, they should call nap().
And any subclass method, meant to be called by other
threads, that interrupts a nap() should include wake_up().
"""
def __init__(self):
threading.Thread.__init__(self)
self.snooze_cond = threading.Condition()
self.keep_alive = True
def nap(self, seconds):
"""Sleep but stay responsive.
This sleep is preempted by a call to wake_up().
According to this site, timeouts for Queues,
Conditions, etc., can waste CPU cycles polling
excessively often (20x/sec). But you'd need
hundreds of threads to have a problem.
http://blog.codedstructure.net/2011/02/concurrent-queueget-with-timeouts-eats.html
:param seconds: How long to wait. Or None for indefinite.
"""
with self.snooze_cond:
self.snooze_cond.wait(seconds)
def wake_up(self):
"""Interrupts a nap(). (thread-safe)"""
with self.snooze_cond:
self.snooze_cond.notify()
def stop_living(self):
"""Tells this thread to die. (thread-safe)
This method is preferred over setting keep_alive directly,
for the benefit of threads that need to sleep with interruption.
"""
self.keep_alive = False
self.wake_up()
class WrapperThread(KillableThread):
"""A thread that runs a payload func and stays killable.
It manages this by letting the payload know how to
check keep_alive and how to sleep.
"""
def __init__(self):
KillableThread.__init__(self)
self._payload = None
self._payload_args = None
self._payload_kwargs = None
self._failure_func = None
self._success_func = None
def set_payload(self, payload, *args, **kwargs):
"""Sets the payload function.
All further args will be forwarded to the payload.
This thread will inject two extra keyword args:
"keep_alive_func": Callback to check keep_alive.
No args.
"sleep_func": Callback to sleep.
A number in seconds.
So the payload must be capable of accepting those.
"""
self._payload = payload
self._payload_args = args
self._payload_kwargs = kwargs
self._payload_kwargs["keep_alive_func"] = self.keeping_alive
self._payload_kwargs["sleep_func"] = self.nap
def set_failure_func(self, failure_func):
"""Sets a callback to run on failure.
It will be given 1 arg: an exception.
"""
self._failure_func = failure_func
def set_success_func(self, successs_func):
"""Sets a callback to run on success.
It will be given 1 arg: whatever the payload returned.
"""
self._success_func = successs_func
def run(self):
result = None
if (self._payload is not None):
try:
result = self._payload(*self._payload_args, **self._payload_kwargs)
except (Exception) as err:
if (self.keeping_alive()):
if (self._failure_func is not None):
try:
self._failure_func(err)
except (Exception) as err:
logging.exception(err)
self.keep_alive = False
else:
if (self.keeping_alive()):
if (self._success_func is not None):
try:
self._success_func(result)
except (Exception) as err:
logging.exception(err)
self.keep_alive = False
def keeping_alive(self):
"""Returns True if this thread should continue, False otherwise."""
return self.keep_alive
|
gpl-3.0
| -4,480,165,473,395,033,000
| 32.520661
| 90
| 0.571006
| false
| 4.337968
| false
| false
| false
|
flipjack/misrutas
|
project/app/views.py
|
1
|
22105
|
# -*- encoding: utf-8 -*-
from django.shortcuts import render, redirect
from django.contrib.auth import logout
from django.http import HttpResponseRedirect, HttpResponse
from django.core.urlresolvers import reverse
from django.contrib import messages
from django.views.decorators.csrf import csrf_exempt
import json
from forms import *
from models import *
from open_facebook import OpenFacebook
from allauth.socialaccount.models import SocialToken
import datetime
from django.db.models import Q
from django.contrib.sites.models import Site
from datetime import datetime
from gmaps import Geocoding
#from googleplaces import GooglePlaces, types, lang
from django.core.serializers.base import Serializer as BaseSerializer
from django.core.serializers.python import Serializer as PythonSerializer
from django.core.serializers.json import Serializer as JsonSerializer
from django.utils import six
class ExtBaseSerializer(BaseSerializer):
""" Abstract serializer class; everything is the same as Django's base except from the marked lines """
def serialize(self, queryset, **options):
self.options = options
self.stream = options.pop('stream', six.StringIO())
self.selected_fields = options.pop('fields', None)
self.selected_props = options.pop('props', None)
self.use_natural_keys = options.pop('use_natural_keys', False)
self.use_natural_foreign_keys = options.pop('use_natural_foreign_keys', False)
self.use_natural_primary_keys = options.pop('use_natural_primary_keys', False)
self.start_serialization()
self.first = True
for obj in queryset:
self.start_object(obj)
concrete_model = obj._meta.concrete_model
for field in concrete_model._meta.local_fields:
if field.serialize:
if field.rel is None:
if self.selected_fields is None or field.attname in self.selected_fields:
self.handle_field(obj, field)
else:
if self.selected_fields is None or field.attname[:-3] in self.selected_fields:
self.handle_fk_field(obj, field)
for field in concrete_model._meta.many_to_many:
if field.serialize:
if self.selected_fields is None or field.attname in self.selected_fields:
self.handle_m2m_field(obj, field)
for field in self.selected_props:
self.handle_prop(obj, field)
self.end_object(obj)
if self.first:
self.first = False
self.end_serialization()
return self.getvalue()
def handle_prop(self, obj, field):
self._current[field] = getattr(obj, field)()
class ExtPythonSerializer(ExtBaseSerializer, PythonSerializer):
pass
class ExtJsonSerializer(ExtPythonSerializer, JsonSerializer):
pass
def user_logout(request):
logout(request)
return HttpResponseRedirect(reverse('landing'))
def landing(request):
if not request.user.is_anonymous():
return HttpResponseRedirect(reverse('index'))
return render(request, 'app/login.html',locals())
def index(request):
return render(request, 'app/index.html',locals())
@csrf_exempt
def profile(request, user, ide):
user = User.objects.filter(username=user, ide=ide)[0]
form = UserForm(instance=request.user)
form_2 = FileForm()
form_3 = My_groupForm()
form_4 = My_vehiclesForm()
my_groups = My_groups.objects.filter(user=request.user)
if request.method == "POST":
if 'my_info' in request.POST:
form = UserForm(request.POST, instance=request.user)
if form.is_valid():
form.save()
messages.success(request, 'Se modificó la información de tu perfil')
else:
modal_my_info = 'open'
messages.error(request, 'Hay errores con tu formulario')
if 'my_files' in request.POST:
form_2 = FileForm(request.POST, request.FILES)
if form_2.is_valid():
archive = form_2.save(commit=False)
archive.user = request.user
archive.save()
form_2 = FileForm()
messages.success(request, 'Se guardo exitosamente tu archivo')
else:
modal_my_files = 'open'
messages.error(request, 'Algunos datos en tu formulario estan incorrectos')
if 'my_vehicles' in request.POST:
form_4 = My_vehiclesForm(request.POST, request.FILES)
if form_4.is_valid():
obj = form_4.save(commit=False)
obj.user = request.user
obj.save()
form_4 = My_vehiclesForm()
messages.success(request, 'Se guardo exitosamente tu vehículo')
else:
messages.error(request, 'Algunos datos en tu formulario estan incorrectos')
if 'transport' in request.POST:
transport = request.POST.get('transport')
interest = Interests.objects.get(pk=transport)
request.user.interest = interest
request.user.save()
messages.success(request, 'Se cambió tu transporte principal')
return HttpResponse(json.dumps({"data": "true"}),content_type="application/json")
if 'delete_file' in request.POST:
ide = request.POST.get('delete_file')
Documents.objects.get(ide=ide).delete()
messages.success(request, 'Se eliminó correctamente tu archivo')
return HttpResponse(json.dumps({"data": "true"}),content_type="application/json")
if 'delete_vehicle' in request.POST:
ide = request.POST.get('delete_vehicle')
My_vehicles.objects.get(ide=ide).delete()
messages.success(request, 'Se eliminó correctamente tu vehículo')
return HttpResponse(json.dumps({"data": "true"}),content_type="application/json")
if 'button_action' in request.POST:
accion = request.POST.get('accion')
if accion == "follow_friend":
if user in request.user.friend_request.all():
request.user.friends.add(user)
user.friends.add(request.user)
messages.success(request, 'Se agregó amigo a tu lista')
request.user.friend_request.remove(user)
else:
user.friend_request.add(request.user)
messages.success(request, 'Se envió tu solicitud de amistad')
return HttpResponse(json.dumps({"data": "true"}),content_type="application/json")
if accion == "delete_friend":
request.user.friends.remove(user)
user.friends.remove(request.user)
messages.success(request, 'Se eliminó este amigo de tu lista')
return HttpResponse(json.dumps({"data": "true"}),content_type="application/json")
if accion == "cancel_request":
user.friend_request.remove(request.user)
messages.success(request, 'Se canceló tu solicitud')
return HttpResponse(json.dumps({"data": "true"}),content_type="application/json")
if accion == "negate_request":
request.user.friend_request.remove(user)
messages.success(request, 'Se canceló la solicitud')
return HttpResponse(json.dumps({"data": "true"}),content_type="application/json")
if 'new_group' in request.POST:
form3 = My_groupForm(request.POST)
if form3.is_valid():
model = form3.save(commit = False)
model.user = request.user
model.save()
messages.success(request, 'Se agregó un nuevo grupo')
else:
modal_my_groups = 'open'
messages.error(request, 'Hay errores con tu formulario')
interests = Interests.objects.all()
user = User.objects.filter(username=user, ide=ide)[0]
my_vehicles = My_vehicles.objects.filter(user=request.user)
return render(request, 'app/profile.html',locals())
def events(request):
return render(request, 'app/events.html',locals())
def my_routes(request):
events = Events.objects.all()
return render(request, 'app/my_routes.html',locals())
def interest(request, interest, id, ide):
waypoint = Waypoint_event.objects.filter(id=id, ide=ide)[0]
return render(request, 'app/interest.html',locals())
def my_events(request):
form = MassiveForm()
now = datetime.now()
if request.POST:
form = MassiveForm(request.POST)
if form.is_valid():
massive = form.save()
massive.administrators.add(request.user)
massive.guests.add(request.user)
massive.confirmeds.add(request.user)
massive_itinerary = Massive_itinerary()
massive_itinerary.name = massive.name
massive_itinerary.start_date = massive.start_date
massive_itinerary.start_time = massive.start_time
massive_itinerary.place = massive.place_point
massive_itinerary.place_lat = massive.place_point_lat
massive_itinerary.place_lng = massive.place_point_lng
massive_itinerary.event = massive
massive_itinerary.description = ''
massive_itinerary.user = request.user
massive_itinerary.principal = True
massive_itinerary.save()
return HttpResponseRedirect( reverse('event_details', args=(massive.slug, massive.ide)) )
guests = Massive.objects.filter(guests = request.user, start_date__gte = datetime.now())
confirmeds = Massive.objects.filter(confirmeds = request.user, start_date__gte = datetime.now())
all_events = Massive.objects.filter(confirmeds = request.user)
return render(request, 'app/my_events.html',locals())
@csrf_exempt
def event_details(request, slug, ide):
massive = Massive.objects.filter(slug=slug, ide=ide)[0]
form2 = Massive_itineraryForm()
form3 = Waypoint_eventForm()
if request.POST:
if 'assist' in request.POST:
event = Massive.objects.filter(id=request.POST.get('ide'))[0]
user = User.objects.filter(id=request.POST.get('user'))[0]
event.confirmeds.add(user)
event.save()
messages.success(request, 'Se actualizo la información de tu evento')
return HttpResponse(json.dumps({"data": "true"}),content_type="application/json")
if 'update_description' in request.POST:
massive.description = request.POST.get('code')
massive.save()
messages.success(request, 'Se actualizo la información de tu evento')
return HttpResponse(json.dumps({"data": "true"}),content_type="application/json")
if 'asign_route' in request.POST:
form3 = Waypoint_eventForm(request.POST)
if form3.is_valid():
point = form3.save(commit=False)
massive_itinerary = Massive_itinerary.objects.filter(event = massive).exclude(place='').order_by('start_date','start_time')
events = Events()
events.name = 'Ruta de evento, ' + massive.name
events.start_date = point.start_date
events.start_time = point.start_time
events.end_date = massive.end_date
events.event_type = "Secreto"
events.massive = massive
events.save()
events.administrators.add(request.user)
count = 1
waypoint_event = Waypoint_event()
waypoint_event.point = point.point
waypoint_event.point_lat = point.point_lat
waypoint_event.point_lng = point.point_lng
waypoint_event.user = request.user
waypoint_event.order = count
waypoint_event.number = count
waypoint_event.event = events
waypoint_event.save()
count += 1
for obj in massive_itinerary:
waypoint_event = Waypoint_event()
waypoint_event.point = obj.place
waypoint_event.point_lat = obj.place_lat
waypoint_event.point_lng = obj.place_lng
waypoint_event.user = request.user
waypoint_event.order = count
waypoint_event.number = count
waypoint_event.event = events
waypoint_event.save()
count += 1
messages.success(request, 'Se asocio una nueva ruta a tu evento')
return HttpResponseRedirect( reverse('route_details', args=(events.id, events.ide)) )
if 'update_event' in request.POST:
form = Massive2Form(request.POST, instance = massive)
if form.is_valid():
massive = form.save()
massive_itinerary = Massive_itinerary.objects.filter(principal=True, event=massive)[0]
massive_itinerary.name = massive.name
massive_itinerary.start_date = massive.start_date
massive_itinerary.start_time = massive.start_time
massive_itinerary.place = massive.place_point
massive_itinerary.place_lat = massive.place_point_lat
massive_itinerary.place_lng = massive.place_point_lng
massive_itinerary.event = massive
massive_itinerary.description = ''
massive_itinerary.user = request.user
massive_itinerary.principal = True
massive_itinerary.save()
messages.success(request, 'Se actualizo la información de tu evento')
if 'new_massive_itinerary' in request.POST:
form2 = Massive_itineraryForm(request.POST)
if form2.is_valid():
obj = form2.save(commit=False)
obj.event = massive
obj.user = request.user
obj.save()
messages.success(request, 'Se agregó un nuevo registro al itinerario')
now = datetime.now()
massive = Massive.objects.filter(slug=slug, ide=ide)[0]
form = Massive2Form(instance = massive)
massive_itinerary = Massive_itinerary.objects.filter(event = massive).order_by('start_date','start_time')
site = Site.objects.all()[0]
return render(request, 'app/event_details.html',locals())
def event_itinerary(request, slug, ide):
massive = Massive_itinerary.objects.filter( event__slug = slug, ide = ide)[0]
form2 = Massive_itineraryForm(instance = massive)
massive = Massive_itinerary.objects.filter( event__slug = slug, ide = ide)[0]
if request.POST:
form2 = Massive_itineraryForm(request.POST)
if form2.is_valid():
form2.save()
messages.success(request, 'Se modificó el itinerario')
site = Site.objects.all()[0]
return render(request, 'app/event_itinerary.html',locals())
def new_event(request):
form = EventsForm()
if request.POST:
form = EventsForm(request.POST)
if form.is_valid():
event = form.save(commit=False)
waypoint_event = Waypoint_event()
waypoint_event.name = event.meeting_point
waypoint_event.point = event.meeting_point
waypoint_event.point_lat = event.meeting_point_lat
waypoint_event.point_lng = event.meeting_point_lng
waypoint_event.user = request.user
waypoint_event.order = 1
waypoint_event.number = 1
waypoint_event_ = Waypoint_event()
waypoint_event_.name = event.destination_point
waypoint_event_.point = event.destination_point
waypoint_event_.point_lat = event.destination_point_lat
waypoint_event_.point_lng = event.destination_point_lng
waypoint_event_.user = request.user
waypoint_event_.order = 2
waypoint_event_.number = 2
event.meeting_point = None
event.meeting_point_lat = None
event.meeting_point_lng = None
event.destination_point = None
event.destination_point_lat = None
event.destination_point_lng = None
event.save()
event.administrators.add(request.user)
waypoint_event.event = event
waypoint_event.save()
waypoint_event_.event = event
waypoint_event_.save()
return HttpResponseRedirect( reverse('route_details', args=(event.id, event.ide)) )
else:
messages.error(request, 'Algunos datos en tu formulario estan incorrectos')
return render(request, 'app/new_event.html',locals())
@csrf_exempt
def route_planing(request, id, ide):
def new_last_point(event, point, lat, lng):
waypoint_event = Waypoint_event()
waypoint_event.point = point
waypoint_event.name = point
waypoint_event.point_lat = lat
waypoint_event.point_lng = lng
waypoint_event.user = request.user
event_end_point = event.end_point()
waypoint_event.order = event_end_point.order
waypoint_event.number = event_end_point.number
waypoint_event.event = event
waypoint_event.save()
event_end_point.order += 1
event_end_point.number += 1
event_end_point.save()
event = Events.objects.filter(id = id, ide = ide)[0]
form1a = Events3aForm(instance = event)
form2a = Events4aForm(instance = event)
form3a = Events5aForm(instance = event)
form1b = Events3bForm(instance = event)
form2b = Events4bForm(instance = event)
form3b = Events5bForm(instance = event)
if request.method == "POST":
if 'save_route' in request.POST:
form1a = Events3aForm(request.POST, instance = event)
form2a = Events4aForm(request.POST, instance = event)
form3a = Events5aForm(request.POST, instance = event)
form1b = Events3bForm(request.POST, instance = event)
form2b = Events4bForm(request.POST, instance = event)
form3b = Events5bForm(request.POST, instance = event)
if form1a.is_valid() and form2a.is_valid() and form3a.is_valid() and form1b.is_valid() and form2b.is_valid() and form3b.is_valid():
form1a.save()
form2a.save()
form3a.save()
form1b.save()
form2b.save()
form3b.save()
return HttpResponseRedirect( reverse('route_details', args=(event.id, event.ide)) )
if 'new_point_form' in request.POST:
new_last_point(Events.objects.filter(id = id, ide = ide)[0], request.POST.get('point'), request.POST.get('point_lat'), request.POST.get('point_lng'))
return HttpResponse(json.dumps({"data": "true"}),content_type="application/json")
if 'new_point_click' in request.POST:
event = Events.objects.filter(id = id, ide = ide)[0]
api = Geocoding()
nombre = ''
for direccion in api.reverse( float(request.POST.get('lat')), float(request.POST.get('lng')))[0]['address_components']:
entra = True
nombre += direccion['long_name']
nombre += ', '
if entra:
nombre = nombre[:-2]
new_last_point(Events.objects.filter(id = id, ide = ide)[0], nombre, request.POST.get('lat'), request.POST.get('lng'))
return HttpResponse(json.dumps({"data": "true"}),content_type="application/json")
if 'del_waypoint' in request.POST:
event = Events.objects.filter(id = id, ide = ide)[0]
Waypoint_event.objects.filter(id=request.POST.get('del_waypoint'))[0].delete()
waypoints = Waypoint_event.objects.filter(event=event).order_by('order')
order = 1
number = 1
for obj in waypoints:
obj.order = order
obj.number = number
obj.save()
order += 1
number += 1
return HttpResponse(json.dumps({"data": "true"}),content_type="application/json")
if 'order' in request.POST:
data = json.loads(request.POST.get('order'))
order = 1
number = 1
for obj in data:
waypoint = Waypoint_event.objects.filter(id=obj)[0]
waypoint.order = order
waypoint.number = number
waypoint.save()
order += 1
number += 1
return HttpResponse(json.dumps({"data": "true"}),content_type="application/json")
event = Events.objects.filter(id = id, ide = ide)[0]
waypoints = Waypoint_event.objects.filter(event=event).order_by('order')
return render(request, 'app/route_planing.html',locals())
def route_details(request, id, ide):
site = Site.objects.all()[0].domain
event = Events.objects.filter(id = id, ide = ide)[0]
waypoints = Waypoint_event.objects.filter(event=event).order_by('order')
form = Events2Form(instance=event)
if request.method == "POST":
form = Events2Form(request.POST, instance=event)
if form.is_valid():
form.save()
messages.success(request, 'Se actualizaron los datos de la orden')
else:
print form.errors
messages.error(request, 'Hay datos incorrectos en la orden')
return render(request, 'app/route_details.html',locals())
def interest(request, interest, id, ide):
waypoint = Waypoint_event.objects.filter(id=id, ide=ide)[0]
return render(request, 'app/interest.html',locals())
@csrf_exempt
def invite_friends(request, slug, ide):
if request.method == "POST":
if 'selected_friends' in request.POST:
massive = Massive.objects.filter(slug = slug, ide = ide)[0]
ide = request.POST.get('ide')
typeobj = request.POST.get('typeobj')
if typeobj == 'grupo':
group = My_groups.objects.filter(id=ide)[0]
for user in group.friends.all():
massive.guests.add(user)
if typeobj == 'friend':
user = User.objects.filter(id=ide)[0]
massive.guests.add(user)
return HttpResponse(json.dumps({"data": "true"}),content_type="application/json")
massive = Massive.objects.filter(slug = slug, ide = ide)[0]
my_groups = My_groups.objects.filter(user=request.user)
return render(request, 'app/invite_friends.html',locals())
@csrf_exempt
def friends(request):
if request.method == "POST":
if 'email' in request.POST:
email = request.POST.get('email')
user = User.objects.filter(email = email)
if user:
if user[0] == request.user:
data = {"ok":"false"}
messages.error(request, 'No puedes seguirte a ti mismo')
else:
data = {"ok": "true"}
request.user.friends.add(user[0])
messages.success(request, 'Ahora estas siguiendo a: '+ user[0].first_name+', '+user[0].last_name )
else:
data = {"ok":"false"}
messages.error(request, 'No hay ningún usuario con este email asociado')
return HttpResponse(json.dumps(data),content_type="application/json")
return render(request, 'app/friends.html',locals())
def details(request, slug, id):
return render(request, 'app/details.html',locals())
@csrf_exempt
def group(request, slug, ide):
group = My_groups.objects.filter(user=request.user, slug=slug, ide=ide)[0]
form = My_group_editForm(instance = group)
if request.method == "POST":
if 'add_friend' in request.POST:
print request.POST.get('add_friend')
friend = User.objects.filter(id =request.POST.get('add_friend'))[0]
group.friends.add(friend)
messages.success(request, 'Se agregó amigo al grupo')
return HttpResponse(json.dumps({"data": "true"}),content_type="application/json")
if 'delete_friend' in request.POST:
friend = User.objects.filter(id =request.POST.get('delete_friend'))[0]
group.friends.remove(friend)
messages.success(request, 'Se eliminó amigo del grupo')
return HttpResponse(json.dumps({"data": "true"}),content_type="application/json")
if 'info_group' in request.POST:
form = My_group_editForm(request.POST, instance = group)
if form.is_valid():
form.save()
messages.success(request, 'Grupo editado con éxito')
else:
modal_info = 'open'
messages.error(request, 'Algunos datos en tu formulario estan incorrectos')
return render(request, 'app/group.html',locals())
@csrf_exempt
def search_friends(request):
data = ExtJsonSerializer().serialize(User.objects.filter( Q(complete_name__icontains=request.POST.get('friend_value')) | Q(email=request.POST.get('friend_value')) ), fields=['first_name', 'last_name', 'date_joined', 'username', 'ide'], props=['picture'])
return HttpResponse(data, content_type="application/json")
def policy(request):
return render(request, 'privacy.html',locals())
|
mit
| -6,006,489,336,733,788,000
| 37.272097
| 256
| 0.700765
| false
| 3.132785
| false
| false
| false
|
flavour/ifrc_qa
|
modules/s3/s3translate.py
|
1
|
66270
|
# -*- coding: utf-8 -*-
""" Translation API
@copyright: 2012-2016 (c) Sahana Software Foundation
@license: MIT
Permission is hereby granted, free of charge, to any person
obtaining a copy of this software and associated documentation
files (the "Software"), to deal in the Software without
restriction, including without limitation the rights to use,
copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the
Software is furnished to do so, subject to the following
conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
OTHER DEALINGS IN THE SOFTWARE.
"""
import os
import parser
import token
from gluon import current
from gluon.languages import read_dict, write_dict
from gluon.storage import Storage
from s3fields import S3ReusableField
"""
List of classes with description :
TranslateAPI : API class to retrieve strings and files by module
TranslateGetFiles : Class to traverse the eden directory and
categorize files based on module
TranslateParseFiles : Class to extract strings to translate from code files
TranslateReadFiles : Class to open a file, read its contents and build
a parse tree (for .py files) or use regex
(for html/js files) to obtain a list of strings
by calling methods from TranslateParseFiles
Strings : Class to manipulate strings and their files
Pootle : Class to synchronise a Pootle server's translation
with the local one
TranslateReportStatus : Class to report the translated percentage of each
language file for each module. It also updates
these percentages as and when required
"""
# =============================================================================
class TranslateAPI:
"""
API class for the Translation module to get
files, modules and strings individually
"""
core_modules = ("auth", "default", "errors", "appadmin")
def __init__(self):
self.grp = TranslateGetFiles()
self.grp.group_files(current.request.folder)
# ---------------------------------------------------------------------
@staticmethod
def get_langcodes():
""" Return a list of language codes """
lang_list = []
langdir = os.path.join(current.request.folder, "languages")
files = os.listdir(langdir)
for f in files:
lang_list.append(f[:-3])
return lang_list
# ---------------------------------------------------------------------
def get_modules(self):
""" Return a list of modules """
return self.grp.modlist
# ---------------------------------------------------------------------
def get_strings_by_module(self, module):
""" Return a list of strings corresponding to a module """
grp = self.grp
d = grp.d
if module in d.keys():
fileList = d[module]
else:
current.log.warning("Module '%s' doesn't exist!" % module)
return []
modlist = grp.modlist
strings = []
sappend = strings.append
R = TranslateReadFiles()
findstr = R.findstr
for f in fileList:
if f.endswith(".py") == True:
tmpstr = findstr(f, "ALL", modlist)
elif f.endswith(".html") == True or \
f.endswith(".js") == True:
tmpstr = R.read_html_js(f)
else:
tmpstr = []
for s in tmpstr:
sappend(("%s:%s" % (f, str(s[0])), s[1]))
# Handle "special" files separately
fileList = d["special"]
for f in fileList:
if f.endswith(".py") == True:
tmpstr = findstr(f, module, modlist)
for s in tmpstr:
sappend(("%s:%s" % (f, str(s[0])), s[1]))
return strings
# ---------------------------------------------------------------------
def get_strings_by_file(self, filename):
""" Return a list of strings in a given file """
if os.path.isfile(filename):
filename = os.path.abspath(filename)
else:
print "'%s' is not a valid file path!" % filename
return []
R = TranslateReadFiles()
strings = []
sappend = strings.append
tmpstr = []
if filename.endswith(".py") == True:
tmpstr = R.findstr(filename, "ALL", self.grp.modlist)
elif filename.endswith(".html") == True or \
filename.endswith(".js") == True:
tmpstr = R.read_html_js(filename)
else:
print "Please enter a '.py', '.js' or '.html' file path"
return []
for s in tmpstr:
sappend(("%s:%s" % (filename, str(s[0])), s[1]))
return strings
# =============================================================================
class TranslateGetFiles:
""" Class to group files by modules """
def __init__(self):
"""
Set up a dictionary to hold files belonging to a particular
module with the module name as the key. Files which contain
strings belonging to more than one module are grouped under
the "special" key.
"""
# Initialize to an empty list for each module
d = {}
modlist = self.get_module_list(current.request.folder)
for m in modlist:
d[m] = []
# List of files belonging to 'core' module
d["core"] = []
# 'special' files which contain strings from more than one module
d["special"] = []
self.d = d
self.modlist = modlist
# ---------------------------------------------------------------------
@staticmethod
def get_module_list(dir):
"""
Returns a list of modules using files in /controllers/
as point of reference
"""
mod = []
mappend = mod.append
cont_dir = os.path.join(dir, "controllers")
mod_files = os.listdir(cont_dir)
for f in mod_files:
if f[0] != ".":
# Strip extension
mappend(f[:-3])
# Add Modules which aren't in controllers
mod += ["support",
"translate",
]
return mod
# ---------------------------------------------------------------------
def group_files(self, currentDir, curmod="", vflag=0):
"""
Recursive function to group Eden files into respective modules
"""
path = os.path
currentDir = path.abspath(currentDir)
base_dir = path.basename(currentDir)
if base_dir in (".git",
"docs",
"languages",
"private",
"templates", # Added separately
"tests",
"uploads",
):
# Skip
return
# If current directory is '/views', set vflag
if base_dir == "views":
vflag = 1
d = self.d
files = os.listdir(currentDir)
for f in files:
if f.startswith(".") or f.endswith(".pyc") or f in ("test.py", "tests.py"):
continue
curFile = path.join(currentDir, f)
if path.isdir(curFile):
# If the current directory is /views,
# categorize files based on the directory name
if vflag:
self.group_files(curFile, f, vflag)
else:
self.group_files(curFile, curmod, vflag)
else:
# If in /appname/views, categorize by parent directory name
if vflag:
base = curmod
# Categorize file as "special" as it contains strings
# belonging to various modules
elif f in ("s3menus.py",
"s3cfg.py",
"000_config.py",
"config.py",
"menus.py"):
base = "special"
else:
# Remove extension ('.py')
base = path.splitext(f)[0]
# If file has "s3" as prefix, remove "s3" to get module name
if "s3" in base:
base = base[2:]
# If file is inside /models and file name is
# of the form var_module.py, remove the "var_" prefix
#elif base_dir == "models" and "_" in base:
# base = base.split("_")[1]
# If base refers to a module, append to corresponding list
if base in d.keys():
d[base].append(curFile)
else:
# Append it to "core" files list
d["core"].append(curFile)
# =============================================================================
class TranslateParseFiles:
"""
Class to extract strings to translate from code files
"""
def __init__(self):
""" Initializes all object variables """
self.cflag = 0 # To indicate if next element is a class
self.fflag = 0 # To indicate if next element is a function
self.sflag = 0 # To indicate 'T' has just been found
self.tflag = 0 # To indicate we are currently inside T(...)
self.mflag = 0 # To indicate we are currently inside M(...)
self.bracket = 0 # Acts as a counter for parenthesis in T(...)
self.outstr = "" # Collects all the data inside T(...)
self.class_name = "" # Stores the current class name
self.func_name = "" # Stores the current function name
self.mod_name = "" # Stores module that the string may belong to
self.findent = -1 # Stores indentation level in menus.py
# ---------------------------------------------------------------------
def parseList(self, entry, tmpstr):
""" Recursive function to extract strings from a parse tree """
if isinstance(entry, list):
id = entry[0]
value = entry[1]
if isinstance(value, list):
parseList = self.parseList
for element in entry:
parseList(element, tmpstr)
else:
if token.tok_name[id] == "STRING":
tmpstr.append(value)
# ---------------------------------------------------------------------
def parseConfig(self, spmod, strings, entry, modlist):
""" Function to extract strings from config.py / 000_config.py """
if isinstance(entry, list):
id = entry[0]
value = entry[1]
# If the element is not a root node,
# go deeper into the tree using dfs
if isinstance(value, list):
parseConfig = self.parseConfig
for element in entry:
parseConfig(spmod, strings, element, modlist)
else:
if self.fflag == 1 and token.tok_name[id] == "NAME":
# Here, func_name stores the module_name of the form
# deployment.settings.module_name.variable
self.func_name = value
self.fflag = 0
# Set flag to store the module name from
# deployment_settings.module_name
elif token.tok_name[id] == "NAME" and \
(value == "deployment_settings" or \
value == "settings"):
self.fflag = 1
# Get module name from deployment_setting.modules list
elif self.tflag == 0 and self.func_name == "modules" and \
token.tok_name[id] == "STRING":
if value[1:-1] in modlist:
self.mod_name = value[1:-1]
# If 'T' is encountered, set sflag
elif token.tok_name[id] == "NAME" and value == "T":
self.sflag = 1
# If sflag is set and '(' is found, set tflag
elif self.sflag == 1:
if token.tok_name[id] == "LPAR":
self.tflag = 1
self.bracket = 1
self.sflag = 0
# Check if inside 'T()'
elif self.tflag == 1:
# If '(' is encountered, append it to outstr
if token.tok_name[id] == "LPAR":
self.bracket += 1
if self.bracket > 1:
self.outstr += "("
elif token.tok_name[id] == "RPAR":
self.bracket -= 1
# If it's not the last ')' of 'T()',
# append to outstr
if self.bracket > 0:
self.outstr += ")"
# If it's the last ')', add string to list
else:
if spmod == "core":
if self.func_name != "modules" and \
self.func_name not in modlist:
strings.append((entry[2], self.outstr))
elif (self.func_name == "modules" and \
self.mod_name == spmod) or \
(self.func_name == spmod):
strings.append((entry[2], self.outstr))
self.outstr = ""
self.tflag = 0
# If we are inside 'T()', append value to outstr
elif self.bracket > 0:
self.outstr += value
# ---------------------------------------------------------------------
def parseS3cfg(self, spmod, strings, entry, modlist):
""" Function to extract the strings from s3cfg.py """
if isinstance(entry, list):
id = entry[0]
value = entry[1]
if isinstance(value, list):
parseS3cfg = self.parseS3cfg
for element in entry:
parseS3cfg(spmod, strings, element, modlist)
else:
# If value is a function name, store it in func_name
if self.fflag == 1:
self.func_name = value
self.fflag = 0
# If value is 'def', set fflag to store func_name next
elif token.tok_name[id] == "NAME" and value == "def":
self.fflag = 1
# If 'T' is encountered, set sflag
elif token.tok_name[id] == "NAME" and value == "T":
self.sflag = 1
elif self.sflag == 1:
if token.tok_name[id] == "LPAR":
self.tflag = 1
self.bracket = 1
self.sflag = 0
elif self.tflag == 1:
if token.tok_name[id] == "LPAR":
self.bracket += 1
if self.bracket > 1:
self.outstr += "("
elif token.tok_name[id] == "RPAR":
self.bracket -= 1
if self.bracket > 0:
self.outstr += ")"
else:
# If core module is requested
if spmod == "core":
# If extracted data doesn't belong
# to any other module, append to list
if "_" not in self.func_name or \
self.func_name.split("_")[1] not in modlist:
strings.append((entry[2], self.outstr))
# If 'module' in 'get_module_variable()'
# is the requested module, append to list
elif "_" in self.func_name and \
self.func_name.split("_")[1] == spmod:
strings.append((entry[2], self.outstr))
self.outstr = ""
self.tflag = 0
elif self.bracket > 0:
self.outstr += value
# ---------------------------------------------------------------------
def parseMenu(self, spmod, strings, entry, level):
""" Function to extract the strings from menus.py """
if isinstance(entry, list):
id = entry[0]
value = entry[1]
if isinstance(value, list):
parseMenu = self.parseMenu
for element in entry:
parseMenu(spmod, strings, element, level + 1)
else:
# If value is a class name, store it in class_name
if self.cflag == 1:
self.class_name = value
self.cflag = 0
# If value is 'class', set cflag to store class name next
elif token.tok_name[id] == "NAME" and value == "class":
self.cflag = 1
elif self.fflag == 1:
# Here func_name is used to store the function names
# which are in 'S3OptionsMenu' class
self.func_name = value
self.fflag = 0
# If value is "def" and it's the first function in the
# S3OptionsMenu class or its indentation level is equal
# to the first function in 'S3OptionsMenu class', then
# set fflag and store the indentation level in findent
elif token.tok_name[id] == "NAME" and value == "def" and \
(self.findent == -1 or level == self.findent):
if self.class_name == "S3OptionsMenu":
self.findent = level
self.fflag = 1
else:
self.func_name = ""
# If current element is 'T', set sflag
elif token.tok_name[id] == "NAME" and value == "T":
self.sflag = 1
elif self.sflag == 1:
if token.tok_name[id] == "LPAR":
self.tflag = 1
self.bracket = 1
self.sflag = 0
# If inside 'T()', extract the data accordingly
elif self.tflag == 1:
if token.tok_name[id] == "LPAR":
self.bracket += 1
if self.bracket > 1:
self.outstr += "("
elif token.tok_name[id] == "RPAR":
self.bracket -= 1
if self.bracket > 0:
self.outstr += ")"
else:
# If the requested module is 'core' and
# extracted data doesn't lie inside the
# S3OptionsMenu class, append it to list
if spmod == "core":
if self.func_name == "":
strings.append((entry[2], self.outstr))
# If the function name (in S3OptionsMenu class)
# is equal to the module requested,
# then append it to list
elif self.func_name == spmod:
strings.append((entry[2], self.outstr))
self.outstr = ""
self.tflag = 0
elif self.bracket > 0:
self.outstr += value
else:
# Get strings inside 'M()'
# If value is 'M', set mflag
if token.tok_name[id] == "NAME" and value == "M":
self.mflag = 1
elif self.mflag == 1:
# If mflag is set and argument inside is a string,
# append it to list
if token.tok_name[id] == "STRING":
if spmod == "core":
if self.func_name == "":
strings.append((entry[2], value))
elif self.func_name == spmod:
strings.append((entry[2], value))
# If current argument in 'M()' is of type arg = var
# or if ')' is found, unset mflag
elif token.tok_name[id] == "EQUAL" or \
token.tok_name[id] == "RPAR":
self.mflag = 0
# ---------------------------------------------------------------------
def parseAll(self, strings, entry):
""" Function to extract all the strings from a file """
if isinstance(entry, list):
id = entry[0]
value = entry[1]
if isinstance(value, list):
parseAll = self.parseAll
for element in entry:
parseAll(strings, element)
else:
# If current element is 'T', set sflag
if token.tok_name[id] == "NAME" and value == "T":
self.sflag = 1
elif self.sflag == 1:
if token.tok_name[id] == "LPAR":
self.tflag = 1
self.bracket = 1
self.sflag = 0
# If inside 'T', extract data accordingly
elif self.tflag == 1:
if token.tok_name[id] == "LPAR":
self.bracket += 1
if self.bracket > 1:
self.outstr += "("
elif token.tok_name[id] == "RPAR":
self.bracket -= 1
if self.bracket > 0:
self.outstr += ")"
else:
strings.append((entry[2], self.outstr))
self.outstr = ""
self.tflag = 0
elif self.bracket > 0:
self.outstr += value
else:
# If current element is 'M', set mflag
if token.tok_name[id] == "NAME" and value == "M":
self.mflag = 1
elif self.mflag == 1:
# If inside 'M()', extract string accordingly
if token.tok_name[id] == "STRING":
strings.append((entry[2], value))
elif token.tok_name[id] == "EQUAL" or \
token.tok_name[id] == "RPAR":
self.mflag = 0
# =============================================================================
class TranslateReadFiles:
""" Class to read code files """
# ---------------------------------------------------------------------
@staticmethod
def findstr(fileName, spmod, modlist):
"""
Using the methods in TranslateParseFiles to extract the strings
fileName -> the file to be used for extraction
spmod -> the required module
modlist -> a list of all modules in Eden
"""
try:
f = open(fileName)
except:
path = os.path.split(__file__)[0]
fileName = os.path.join(path, fileName)
try:
f = open(fileName)
except:
return
# Read all contents of file
fileContent = f.read()
f.close()
# Remove CL-RF and NOEOL characters
fileContent = "%s\n" % fileContent.replace("\r", "")
try:
st = parser.suite(fileContent)
except:
return []
# Create a parse tree list for traversal
stList = parser.st2list(st, line_info=1)
P = TranslateParseFiles()
# List which holds the extracted strings
strings = []
if spmod == "ALL":
# If all strings are to be extracted, call ParseAll()
parseAll = P.parseAll
for element in stList:
parseAll(strings, element)
else:
# Handle cases for special files which contain
# strings belonging to different modules
appname = current.request.application
fileName = os.path.basename(fileName)
if fileName == "s3menus.py":
parseMenu = P.parseMenu
for element in stList:
parseMenu(spmod, strings, element, 0)
elif fileName == "s3cfg.py":
parseS3cfg = P.parseS3cfg
for element in stList:
parseS3cfg(spmod, strings, element, modlist)
elif fileName in ("000_config.py", "config.py"):
parseConfig = P.parseConfig
for element in stList:
parseConfig(spmod, strings, element, modlist)
# Extract strings from deployment_settings.variable() calls
final_strings = []
fsappend = final_strings.append
settings = current.deployment_settings
for (loc, s) in strings:
if s[0] != '"' and s[0] != "'":
# This is a variable
if "settings." in s:
# Convert the call to a standard form
s = s.replace("current.deployment_settings", "settings")
s = s.replace("()", "")
l = s.split(".")
obj = settings
# Get the actual value
for atr in l[1:]:
try:
obj = getattr(obj, atr)()
except:
current.log.warning("Can't find this deployment_setting, maybe a crud.settings", atr)
else:
s = obj
fsappend((loc, s))
else:
#@ToDo : Get the value of non-settings variables
pass
else:
fsappend((loc, s))
return final_strings
# ---------------------------------------------------------------------
@staticmethod
def read_html_js(filename):
"""
Function to read and extract strings from html/js files
using regular expressions
"""
import re
PY_STRING_LITERAL_RE = r'(?<=[^\w]T\()(?P<name>'\
+ r"[uU]?[rR]?(?:'''(?:[^']|'{1,2}(?!'))*''')|"\
+ r"(?:'(?:[^'\\]|\\.)*')|"\
+ r'(?:"""(?:[^"]|"{1,2}(?!"))*""")|'\
+ r'(?:"(?:[^"\\]|\\.)*"))'
regex_trans = re.compile(PY_STRING_LITERAL_RE, re.DOTALL)
findall = regex_trans.findall
html_js_file = open(filename)
linecount = 0
strings = []
sappend = strings.append
for line in html_js_file:
linecount += 1
occur = findall(line)
for s in occur:
sappend((linecount, s))
html_js_file.close()
return strings
# ---------------------------------------------------------------------
@staticmethod
def get_user_strings():
"""
Function to return the list of user-supplied strings
"""
user_file = os.path.join(current.request.folder, "uploads",
"user_strings.txt")
strings = []
COMMENT = "User supplied"
if os.path.exists(user_file):
f = open(user_file, "r")
for line in f:
line = line.replace("\n", "").replace("\r", "")
strings.append((COMMENT, line))
f.close()
return strings
# ---------------------------------------------------------------------
@staticmethod
def merge_user_strings_file(newstrings):
"""
Function to merge the existing file of user-supplied strings
with newly uploaded strings
"""
user_file = os.path.join(current.request.folder, "uploads",
"user_strings.txt")
oldstrings = []
oappend = oldstrings.append
if os.path.exists(user_file):
f = open(user_file, "r")
for line in f:
oappend(line)
f.close()
# Append user strings if not already present
f = open(user_file, "a")
for s in newstrings:
if s not in oldstrings:
f.write(s)
f.close()
# ---------------------------------------------------------------------
@staticmethod
def get_database_strings(all_template_flag):
"""
Function to get database strings from csv files
which are to be considered for translation.
"""
from s3import import S3BulkImporter
# List of database strings
database_strings = []
dappend = database_strings.append
template_list = []
base_dir = current.request.folder
path = os.path
# If all templates flag is set we look in all templates' tasks.cfg file
if all_template_flag:
template_dir = path.join(base_dir, "modules", "templates")
files = os.listdir(template_dir)
# template_list will have the list of all templates
tappend = template_list.append
for f in files:
curFile = path.join(template_dir, f)
baseFile = path.basename(curFile)
if path.isdir(curFile):
tappend(baseFile)
else:
# Set current template.
template_list.append(current.deployment_settings.base.template)
# List of fields which don't have an S3ReusableFiled defined but we
# know we wish to translate
# @ToDo: Extend to dict if we need to support some which don't just translate the name
always_translate = ("project_beneficiary_type_id",
"stats_demographic_id",
)
# Use bulk importer class to parse tasks.cfg in template folder
bi = S3BulkImporter()
S = Strings()
read_csv = S.read_csv
for template in template_list:
pth = path.join(base_dir, "modules", "templates", template)
if path.exists(path.join(pth, "tasks.cfg")) == False:
continue
bi.load_descriptor(pth)
s3db = current.s3db
for csv in bi.tasks:
# Ignore special import files
if csv[0] != 1:
continue
# csv is in format: prefix, tablename, path of csv file
# assuming represent.translate is always on primary key id
translate = False
fieldname = "%s_%s_id" % (csv[1], csv[2])
if fieldname in always_translate:
translate = True
represent = Storage(fields = ["name"])
elif hasattr(s3db, fieldname) is False:
continue
else:
reusable_field = s3db.get(fieldname)
# Excludes lambdas which are in defaults()
# i.e. reusable fields in disabled modules
if reusable_field and isinstance(reusable_field, S3ReusableField):
represent = reusable_field.attr.represent
if hasattr(represent, "translate"):
translate = represent.translate
# If translate attribute is set to True
if translate:
if hasattr(represent, "fields") is False:
# Only name field is considered
fields = ["name"]
else:
# List of fields is retrieved from represent.fields
fields = represent.fields
# Consider it for translation (csv[3])
csv_path = csv[3]
try:
data = read_csv(csv_path)
except IOError:
# Phantom
continue
title_row = data[0]
idx = 0
idxlist = []
idxappend = idxlist.append
for e in title_row:
if e.lower() in fields:
idxappend(idx)
idx += 1
if idxlist:
# Line number of string retrieved.
line_number = 1
for row in data[1:]:
line_number += 1
# If string is not empty
for idx in idxlist:
try:
s = row[idx]
except:
current.log.error("CSV row incomplete", csv_path)
if s != "":
loc = "%s:%s" % (csv_path, line_number)
dappend((loc, s))
return database_strings
# =============================================================================
class Strings:
""" Class to manipulate strings and their files """
# ---------------------------------------------------------------------
@staticmethod
def remove_quotes(Strings):
"""
Function to remove single or double quotes around the strings
"""
l = []
lappend = l.append
for (d1, d2) in Strings:
if (d1[0] == '"' and d1[-1] == '"') or \
(d1[0] == "'" and d1[-1] == "'"):
d1 = d1[1:-1]
if (d2[0] == '"' and d2[-1] == '"') or \
(d2[0] == "'" and d2[-1] == "'"):
d2 = d2[1:-1]
lappend((d1, d2))
return l
# ---------------------------------------------------------------------
@staticmethod
def remove_duplicates(Strings):
"""
Function to club all the duplicate strings into one row
with ";" separated locations
"""
uniq = {}
appname = current.request.application
for (loc, data) in Strings:
uniq[data] = ""
for (loc, data) in Strings:
# Remove the prefix from the filename
loc = loc.split(appname, 1)[1]
if uniq[data] != "":
uniq[data] = uniq[data] + ";" + loc
else:
uniq[data] = loc
l = []
lappend = l.append
for data in uniq.keys():
lappend((uniq[data], data))
return l
# ---------------------------------------------------------------------
@staticmethod
def remove_untranslated(lang_code):
"""
Function to remove all untranslated strings from a lang_code.py
"""
w2pfilename = os.path.join(current.request.folder, "languages",
"%s.py" % lang_code)
data = read_dict(w2pfilename)
#try:
# # Python 2.7
# # - won't even compile
# data = {k: v for k, v in data.iteritems() if k != v}
#except:
# Python 2.6
newdata = {}
for k, v in data.iteritems():
if k != v:
new_data[k] = v
data = new_data
write_dict(w2pfilename, data)
# ---------------------------------------------------------------------
def export_file(self, langfile, modlist, filelist, filetype, all_template_flag):
"""
Function to get the strings by module(s)/file(s), merge with
those strings from existing w2p language file which are already
translated and call the "write_xls()" method if the
default filetype "xls" is chosen. If "po" is chosen, then the
write_po()" method is called.
"""
request = current.request
settings = current.deployment_settings
appname = request.application
folder = request.folder
join = os.path.join
langcode = langfile[:-3]
langfile = join(folder, "languages", langfile)
# If the language file doesn't exist, create it
if not os.path.exists(langfile):
f = open(langfile, "w")
f.write("")
f.close()
NewStrings = []
A = TranslateAPI()
if all_template_flag == 1:
# Select All Templates
A.grp.group_files(join(folder, "modules", "templates"))
else:
# Specific template(s) is selected
templates = settings.get_template()
if not isinstance(templates, (tuple, list)):
templates = (templates,)
group_files = A.grp.group_files
for template in templates:
template_folder = join(folder, "modules", "templates", template)
group_files(template_folder)
R = TranslateReadFiles()
## Select Modules
# Core Modules are always included
core_modules = ("auth", "default")
for mod in core_modules:
modlist.append(mod)
# appadmin and error are part of admin
if "admin" in modlist:
modlist.append("appadmin")
modlist.append("error")
# Select dependent modules
models = current.models
for mod in modlist:
if hasattr(models, mod):
obj = getattr(models, mod)
# Currently only inv module has a depends list
if hasattr(obj, "depends"):
for element in obj.depends:
if element not in modlist:
modlist.append(element)
get_strings_by_module = A.get_strings_by_module
for mod in modlist:
NewStrings += get_strings_by_module(mod)
# Retrieve strings in a file
get_strings_by_file = A.get_strings_by_file
for f in filelist:
NewStrings += get_strings_by_file(f)
# Remove quotes
NewStrings = self.remove_quotes(NewStrings)
# Add database strings
NewStrings += R.get_database_strings(all_template_flag)
# Add user-supplied strings
NewStrings += R.get_user_strings()
# Remove duplicates
NewStrings = self.remove_duplicates(NewStrings)
NewStrings.sort(key=lambda tup: tup[1])
# Retrieve strings from existing w2p language file
OldStrings = self.read_w2p(langfile)
OldStrings.sort(key=lambda tup: tup[0])
# Merge those strings which were already translated earlier
Strings = []
sappend = Strings.append
i = 0
lim = len(OldStrings)
for (l, s) in NewStrings:
while i < lim and OldStrings[i][0] < s:
i += 1
if i != lim and OldStrings[i][0] == s and \
OldStrings[i][1].startswith("*** ") == False:
sappend((l, s, OldStrings[i][1]))
else:
sappend((l, s, ""))
if filetype == "xls":
# Create excel file
return self.write_xls(Strings, langcode)
elif filetype == "po":
# Create pootle file
return self.write_po(Strings)
# ---------------------------------------------------------------------
@staticmethod
def read_csv(fileName):
""" Function to read a CSV file and return a list of rows """
import csv
csv.field_size_limit(2**20) # 1 Mb
data = []
dappend = data.append
f = open(fileName, "rb")
transReader = csv.reader(f)
for row in transReader:
dappend(row)
f.close()
return data
# ---------------------------------------------------------------------
@staticmethod
def read_w2p(fileName):
"""
Function to read a web2py language file and
return a list of translation string pairs
"""
data = read_dict(fileName)
# Convert to list of tuples
# @ToDo: Why?
strings = []
sappend = strings.append
for s in data:
sappend((s, data[s]))
return strings
# ---------------------------------------------------------------------
@staticmethod
def write_csv(fileName, data):
""" Function to write a list of rows into a csv file """
import csv
f = open(fileName, "wb")
# Quote all the elements while writing
transWriter = csv.writer(f, delimiter=" ",
quotechar='"', quoting = csv.QUOTE_ALL)
transWriter.writerow(("location", "source", "target"))
for row in data:
transWriter.writerow(row)
f.close()
# ---------------------------------------------------------------------
def write_po(self, data):
""" Returns a ".po" file constructed from given strings """
from subprocess import call
from tempfile import NamedTemporaryFile
from gluon.contenttype import contenttype
f = NamedTemporaryFile(delete=False)
csvfilename = "%s.csv" % f.name
self.write_csv(csvfilename, data)
g = NamedTemporaryFile(delete=False)
pofilename = "%s.po" % g.name
# Shell needed on Win32
# @ToDo: Copy relevant parts of Translate Toolkit internally to avoid external dependencies
call(["csv2po", "-i", csvfilename, "-o", pofilename], shell=True)
h = open(pofilename, "r")
# Modify headers to return the po file for download
filename = "trans.po"
disposition = "attachment; filename=\"%s\"" % filename
response = current.response
response.headers["Content-Type"] = contenttype(".po")
response.headers["Content-disposition"] = disposition
h.seek(0)
return h.read()
# ---------------------------------------------------------------------
def write_w2p(self, csvfiles, lang_code, option):
"""
Function to merge multiple translated csv files into one
and then merge/overwrite the existing w2p language file
"""
w2pfilename = os.path.join(current.request.folder, "languages",
"%s.py" % lang_code)
# Dictionary to store translated strings
# with untranslated string as the key
data = {}
errors = 0
for f in csvfiles:
newdata = self.read_csv(f)
# Test: 2 cols or 3?
cols = len(newdata[0])
if cols == 1:
raise SyntaxError("CSV file needs to have at least 2 columns!")
elif cols == 2:
# 1st column is source, 2nd is target
for row in newdata:
data[row[0]] = row[1]
else:
# 1st column is location, 2nd is source, 3rd is target
for row in newdata:
data[row[1]] = row[2]
if option == "m":
# Merge strings with existing .py file
keys = data.keys()
olddata = read_dict(w2pfilename)
for s in olddata:
if s not in keys:
data[s] = olddata[s]
write_dict(w2pfilename, data)
# ---------------------------------------------------------------------
@staticmethod
def write_xls(Strings, langcode):
"""
Function to create a spreadsheet (.xls file) of strings with
location, original string and translated string as columns
"""
try:
from cStringIO import StringIO # Faster, where available
except:
from StringIO import StringIO
import xlwt
from gluon.contenttype import contenttype
# Define spreadsheet properties
wbk = xlwt.Workbook("utf-8")
sheet = wbk.add_sheet("Translate")
style = xlwt.XFStyle()
font = xlwt.Font()
font.name = "Times New Roman"
style.font = font
sheet.write(0, 0, "location", style)
sheet.write(0, 1, "source", style)
sheet.write(0, 2, "target", style)
row_num = 1
# Write the data to spreadsheet
for (loc, d1, d2) in Strings:
d2 = d2.decode("string-escape").decode("utf-8")
sheet.write(row_num, 0, loc, style)
try:
sheet.write(row_num, 1, d1, style)
except:
current.log.warning("Invalid source string!", loc)
sheet.write(row_num, 1, "", style)
sheet.write(row_num, 2, d2, style)
row_num += 1
# Set column width
for colx in range(0, 3):
sheet.col(colx).width = 15000
# Initialize output
output = StringIO()
# Save the spreadsheet
wbk.save(output)
# Modify headers to return the xls file for download
filename = "%s.xls" % langcode
disposition = "attachment; filename=\"%s\"" % filename
response = current.response
response.headers["Content-Type"] = contenttype(".xls")
response.headers["Content-disposition"] = disposition
output.seek(0)
return output.read()
# =============================================================================
class Pootle:
"""
Class to synchronise a Pootle server's translation with the local
one
@ToDo: Before uploading file to Pootle, ensure all relevant
untranslated strings are present.
"""
# ---------------------------------------------------------------------
def upload(self, lang_code, filename):
"""
Upload a file to Pootle
"""
# @ToDo try/except error
import mechanize
import re
br = mechanize.Browser()
br.addheaders = [("User-agent", "Firefox")]
br.set_handle_equiv(False)
# Ignore robots.txt
br.set_handle_robots(False)
# Don't add Referer (sic) header
br.set_handle_referer(False)
settings = current.deployment_settings
username = settings.get_L10n_pootle_username()
if username is False:
current.log.error("No login information found")
return
pootle_url = settings.get_L10n_pootle_url()
login_url = "%saccounts/login" % pootle_url
try:
br.open(login_url)
except:
current.log.error("Connecton Error")
return
br.select_form("loginform")
br.form["username"] = username
br.form["password"] = settings.get_L10n_pootle_password()
br.submit()
current_url = br.geturl()
if current_url.endswith("login/"):
current.log.error("Login Error")
return
pattern = "<option value=(.+?)>%s.po" % lang_code
# Process lang_code (if of form ab_cd --> convert to ab_CD)
if len(lang_code) > 2:
lang_code = "%s_%s" % (lang_code[:2], lang_code[-2:].upper())
link = "%s%s/eden/" % (pootle_url, lang_code)
page_source = br.open(link).read()
# Use Regex to extract the value for field : "upload to"
regex = re.search(pattern, page_source)
result = regex.group(0)
result = re.split(r'[="]', result)
upload_code = result[2]
try:
br.select_form("uploadform")
# If user is not admin then overwrite option is not there
br.form.find_control(name="overwrite").value = ["overwrite"]
br.form.find_control(name ="upload_to").value = [upload_code]
br.form.add_file(open(filename), "text/plain", file_name)
br.submit()
except:
current.log.error("Error in Uploading form")
return
# ---------------------------------------------------------------------
def download(self, lang_code):
"""
Download a file from Pootle
@ToDo: Allow selection between different variants of language files
"""
import requests
import zipfile
try:
from cStringIO import StringIO # Faster, where available
except:
from StringIO import StringIO
from subprocess import call
from tempfile import NamedTemporaryFile
code = lang_code
if len(lang_code) > 2:
code = "%s_%s" % (lang_code[:2], lang_code[-2:].upper())
pootle_url = current.deployment_settings.get_L10n_pootle_url()
link = "%s%s/eden/export/zip" % (pootle_url, code)
try:
r = requests.get(link)
except:
current.log.error("Connection Error")
return False
zipf = zipfile.ZipFile(StringIO.StringIO(r.content))
zipf.extractall()
file_name_po = "%s.po" % lang_code
file_name_py = "%s.py" % lang_code
f = NamedTemporaryFile(delete=False)
w2pfilename = "%s.py" % f.name
call(["po2web2py", "-i", file_name_po, "-o", w2pfilename])
S = Strings()
path = os.path.join(current.request.folder, "languages", file_name_py)
pystrings = S.read_w2p(path)
pystrings.sort(key=lambda tup: tup[0])
postrings = S.read_w2p(w2pfilename)
# Remove untranslated strings
postrings = [tup for tup in postrings if tup[0] != tup[1]]
postrings.sort(key=lambda tup: tup[0])
os.unlink(file_name_po)
os.unlink(w2pfilename)
return (postrings, pystrings)
# ---------------------------------------------------------------------
def merge_strings(self, postrings, pystrings, preference):
"""
Merge strings from a PO file and a Py file
"""
lim_po = len(postrings)
lim_py = len(pystrings)
i = 0
j = 0
# Store strings which are missing from pootle
extra = []
eappend = extra.append
while i < lim_py and j < lim_po:
if pystrings[i][0] < postrings[j][0]:
if preference == False:
eappend(pystrings[i])
i += 1
elif pystrings[i][0] > postrings[j][0]:
j += 1
# pystrings[i] == postrings[j]
else:
# Pootle is being given preference
if preference:
# Check if string is not empty
if postrings[j][1] and not postrings[j][1].startswith("***"):
pystrings[i] = postrings[j]
# Py is being given prefernece
else:
if pystrings[i][1] and not pystrings[i][1].startswith("***"):
postrings[j] = pystrings[i]
i += 1
j += 1
if preference:
return pystrings
else:
# Add strings which were left
while i < lim_py:
extra.append(pystrings[i])
i += 1
# Add extra strings to Pootle list
for st in extra:
postrings.append(st)
postrings.sort(key=lambda tup: tup[0])
return postrings
# ---------------------------------------------------------------------
def merge_pootle(self, preference, lang_code):
# returns a tuple (postrings, pystrings)
ret = self.download(lang_code)
if not ret:
return
from subprocess import call
from tempfile import NamedTemporaryFile
import sys
# returns pystrings if preference was True else returns postrings
ret = self.merge_strings(ret[0], ret[1], preference)
S = Strings()
data = []
dappend = data.append
temp_csv = NamedTemporaryFile(delete=False)
csvfilename = "%s.csv" % temp_csv.name
if preference:
# Only python file has been changed
for i in ret:
dappend(("", i[0], i[1].decode("string-escape")))
S.write_csv(csvfilename, data)
# overwrite option
S.write_w2p([csvfilename], lang_code, "o")
os.unlink(csvfilename)
else:
# Only Pootle file has been changed
for i in ret:
dappend(("", i[0], i[1].decode("string-escape")))
S.write_csv(csvfilename, data)
temp_po = NamedTemporaryFile(delete=False)
pofilename = "%s.po" % temp_po.name
# Shell needed on Win32
# @ToDo: Copy relevant parts of Translate Toolkit internally to avoid external dependencies
call(["csv2po", "-i", csvfilename, "-o", pofilename], shell=True)
self.upload(lang_code, pofilename)
# Clean up extra created files
os.unlink(csvfilename)
os.unlink(pofilename)
# =============================================================================
class TranslateReportStatus(object):
"""
Class to report the percentage of translated strings for
each module for a given language.
"""
# -------------------------------------------------------------------------
@classmethod
def create_master_file(cls):
"""
Create master file of strings and their distribution in modules
"""
try:
import cPickle as pickle
except:
import pickle
# Instantiate the translateAPI
api = TranslateAPI()
# Generate list of modules
modules = api.get_modules()
modules.append("core")
# The list of all strings
all_strings = []
addstring = all_strings.append
# Dictionary of {module: indices of strings used in this module}
indices = {}
# Helper dict for fast lookups
string_indices = {}
index = 0
get_strings_by_module = api.get_strings_by_module
for module in modules:
module_indices = []
addindex = module_indices.append
strings = get_strings_by_module(module)
for (origin, string) in strings:
# Remove outermost quotes around the string
if (string[0] == '"' and string[-1] == '"') or\
(string[0] == "'" and string[-1] == "'"):
string = string[1:-1]
string_index = string_indices.get(string)
if string_index is None:
string_indices[string] = index
addstring(string)
addindex(index)
index += 1
else:
addindex(string_index)
indices[module] = module_indices
# Save all_strings and string_dict as pickle objects in a file
data_file = os.path.join(current.request.folder,
"uploads",
"temp.pkl")
f = open(data_file, "wb")
pickle.dump(all_strings, f)
pickle.dump(indices, f)
f.close()
# Mark all string counts as dirty
ptable = current.s3db.translate_percentage
current.db(ptable.id > 0).update(dirty=True)
# -------------------------------------------------------------------------
@classmethod
def update_string_counts(cls, lang_code):
"""
Update the translation percentages for all modules for a given
language.
@ToDo: Generate fresh .py files with all relevant strings for this
(since we don't store untranslated strings)
"""
try:
import cPickle as pickle
except:
import pickle
base_dir = current.request.folder
# Read the language file
langfile = "%s.py" % lang_code
langfile = os.path.join(base_dir, "languages", langfile)
lang_strings = read_dict(langfile)
# Retrieve the data stored in master file
data_file = os.path.join(base_dir, "uploads", "temp.pkl")
f = open(data_file, "rb")
all_strings = pickle.load(f)
string_dict = pickle.load(f)
f.close()
db = current.db
ptable = current.s3db.translate_percentage
translated = set()
addindex = translated.add
for index, string in enumerate(all_strings):
translation = lang_strings.get(string)
if translation is None or translation[:4] == "*** ":
continue
elif translation != string or lang_code == "en-gb":
addindex(index)
for module, indices in string_dict.items():
all_indices = set(indices)
num_untranslated = len(all_indices - translated)
num_translated = len(all_indices) - num_untranslated
data = dict(code = lang_code,
module = module,
translated = num_translated,
untranslated = num_untranslated,
dirty=False)
query = (ptable.code == lang_code) & \
(ptable.module == module)
record = db(query).select(ptable._id, limitby=(0, 1)).first()
if record:
record.update_record(**data)
else:
ptable.insert(**data)
return
# -------------------------------------------------------------------------
@classmethod
def get_translation_percentages(cls, lang_code):
"""
Get the percentages of translated strings per module for
the given language code.
@param lang_code: the language code
"""
pickle_file = os.path.join(current.request.folder,
"uploads",
"temp.pkl")
# If master file doesn't exist, create it
if not os.path.exists(pickle_file):
cls.create_master_file()
db = current.db
ptable = current.s3db.translate_percentage
query = (ptable.code == lang_code)
fields = ("dirty", "translated", "untranslated", "module")
rows = db(query).select(*fields)
if not rows or rows.first().dirty:
# Update the string counts
cls.update_string_counts(lang_code)
rows = db(query).select(*fields)
percentage = {}
total_strings = 0
total_translated = 0
total_untranslated = 0
for row in rows:
num_translated = row.translated
num_untranslated = row.untranslated
total_strings += num_translated + num_untranslated
if not num_untranslated:
percentage[row.module] = 100
else:
total = num_translated + num_untranslated
total_translated += num_translated
total_untranslated += num_untranslated
percentage[row.module] = \
round((float(num_translated) / total) * 100, 2)
if not total_untranslated:
percentage["complete_file"] = 100
else:
percentage["complete_file"] = \
round((float(total_translated) / (total_strings)) * 100, 2)
return percentage
# END =========================================================================
|
mit
| 234,777,986,828,744,220
| 37.195965
| 117
| 0.432322
| false
| 5.1396
| false
| false
| false
|
hariseldon99/archives
|
dtwa_ising_longrange/dtwa_ising_longrange/dtwa_ising_longrange.py
|
1
|
45340
|
#!/usr/bin/env python
#Author: Analabha Roy
from __future__ import division, print_function
from mpi4py import MPI
from reductions import Intracomm
from redirect_stdout import stdout_redirected
import sys
import copy
import random
import numpy as np
from scipy.signal import fftconvolve
from scipy.sparse import *
from scipy.integrate import odeint
from pprint import pprint
from tabulate import tabulate
threshold = 1e-4
root = 0
#This is the kronecker delta symbol for vector indices
deltaij = np.eye(3)
#This is the Levi-Civita symbol for vector indices
eijk = np.zeros((3, 3, 3))
eijk[0, 1, 2] = eijk[1, 2, 0] = eijk[2, 0, 1] = 1
eijk[0, 2, 1] = eijk[2, 1, 0] = eijk[1, 0, 2] = -1
def t_deriv(quantities, times):
dt = np.gradient(times)
return np.gradient(quantities, dt)
def drive(t, params):
return params.h0 * np.cos(params.omega * t)
def weyl_hamilt(s,times,param):
"""
Evaluates the Weyl Symbols of the Hamiltonian, H_w
Does this at all times
If |s^a> = (s^a_0, s^a_1 ... s^a_N), and
H_w = -(1/2) * \sum_{nm} J_{nm} (J_x s^n_x s^m_x + J_y s^n_y s^m_y
+ J_z s^n_z s^m_z) - h(t) * \sum_n (h_x s^n_x +h_y s^n_y
+ h_z s^n_z)
"""
N = param.latsize
#s[:, 0:N] = sx , s[:, N:2*N] = sy, s[:, 2*N:3*N] = sz
drvs = drive(times, param)
hw = param.jx * np.dot(s[:,0*N:1*N],param.jmat.dot(s[:,0*N:1*N].T))
hw += param.jy * np.dot(s[:,1*N:2*N],param.jmat.dot(s[:,1*N:2*N].T))
hw += param.jz * np.dot(s[:,2*N:3*N],param.jmat.dot(s[:,2*N:3*N].T))
hw = hw /(2.0 * param.norm)
hw += drvs * (param.hx * np.sum(s[:, 0:N]) +\
param.hy * np.sum(s[:, N:2*N]) + param.hz * np.sum(s[:, 2*N:3*N]))
return -hw
def func_1storder(s, t, param):
"""
The RHS of general case, per Schachemmayer eq A2
"""
N = param.latsize
#s[0:N] = sx , s[N:2*N] = sy, s[2*N:3*N] = sz
drv = drive(t, param)
jsx = 2.0 * param.jx * param.jmat.dot(s[0:N])/param.norm
jsx += 2.0 * drv * param.hx
jsy = 2.0 * param.jy * param.jmat.dot(s[N:2*N])/param.norm
jsy += 2.0 * drv * param.hy
jsz = 2.0 * param.jz * param.jmat.dot(s[2*N:3*N])/param.norm
jsz += 2.0 * drv * param.hz
dsxdt = s[N:2*N] * jsz - s[2*N:3*N] * jsy
dsydt = s[2*N:3*N] * jsx - s[0:N] * jsz
dszdt = s[0:N] * jsy - s[N:2*N] * jsx
return np.concatenate((dsxdt, dsydt, dszdt))
def jac_1storder(s, t, param):
"""
Jacobian of the general case. First order.
This is given by 9 NXN submatrices:
J00=J11=J22=0
Although Jacobian is NOT antisymmetric in general! See below
J01 = +J_z diag(J|s^x>) + h(t) h_z - J_y (J#|s^z>)
J10 = -J_z diag(J|s^x>) - h(t) h_z + J_x (J#|s^z>)
J02 = -J_y diag(J|s^y>) - h(t) h_y + J_z (J#|s^y>)
J20 = +J_y diag(J|s^y>) + h(t) h_y - J_x (J#|s^y>)
J12 = +J_x diag(J|s^x>) + h(t) h_x - J_z (J#|s^x>)
J21 = -J_x diag(J|s^x>) - h(t) h_x + J_y (J#|s^x>)
Here, '#' (hash operator) means multiply each row of a matrix by the
corresponding vector element. This is implemented by numpy.multiply()
"""
N = param.latsize
#s[0:N] = sx , s[N:2*N] = sy, s[2*N:3*N] = sz
full_jacobian = np.zeros(shape=(3*N, 3*N))
drivemat = 2.0 * drive(t, param) * np.eye(N)
diag_jsx = np.diagflat((param.jmat.dot(s[0:N])))/param.norm
diag_jsy = np.diagflat((param.jmat.dot(s[N:2*N])))/param.norm
#diag_jsz = np.diagflat((param.jmat.dot(s[2*N:3*N])))/param.norm
hash_jsx = (np.multiply(param.jmat.T, s[0:N]).T)/param.norm
hash_jsy = (np.multiply(param.jmat.T, s[N:2*N]).T)/param.norm
hash_jsz = (np.multiply(param.jmat.T, s[2*N:3*N]).T)/param.norm
full_jacobian[0:N, N:2*N] = param.jz * diag_jsx + drivemat * param.hz\
-param.jy * hash_jsz
full_jacobian[N:2*N, 0:N] = -param.jz * diag_jsx - \
drivemat * param.hz + param.jx * hash_jsz
full_jacobian[0:N, 2*N:3*N] = -param.jy * diag_jsy - drivemat * \
param.hy + param.jz * hash_jsy
full_jacobian[2*N:3*N, 0:N] = param.jy * diag_jsy + drivemat * \
param.hy - param.jx * hash_jsy
full_jacobian[N:2*N, 2*N:3*N] = param.jx * diag_jsx + drivemat * \
param.hx - param.jz * hash_jsx
full_jacobian[2*N:3*N, N:2*N] = -param.jx * diag_jsx - drivemat * \
param.hx + param.jy * hash_jsx
return full_jacobian
def func_2ndorder(s, t, param):
"""
The RHS of general case, second order correction, per Lorenzo
"J" is the J_{ij} hopping matrix
-\partial_t |s^x> = -first order + 2 (J^y Jg^{yz} - J^z Jg^{zy})
/norm,
-\partial_t |s^y> = -first order + 2 (-J^z Jg^{zx} + J^x Jg^{xz})
/norm,
-\partial_t |s^z> = -first order + 2 (-J^x Jg^{xy} + J^y Jg^{yx})
/norm.
"""
N = param.latsize
#svec is the tensor s^l_\mu
#G = s[3*N:].reshape(3,3,N,N) is the tensor g^{ab}_{\mu\nu}.
sview = s.view()
stensor = sview[0:3*N].reshape(3, N)
gtensor = sview[3*N:].reshape(3, 3, N, N)
gtensor[:,:,range(N),range(N)] = 0.0 #Set the diagonals of g_munu to 0
htensor = np.zeros_like(stensor)
htensor[0].fill(param.hvec[0])
htensor[1].fill(param.hvec[1])
htensor[2].fill(param.hvec[2])
Gtensor = np.einsum("mg,abgn->abmn", param.jmat, gtensor)/param.norm
Mtensor = np.einsum("am,b,mn->abmn", stensor, param.jvec, \
param.jmat)/param.norm
hvec_dressed = htensor + np.einsum("llgm->lm", Mtensor)
dtensor = gtensor + np.einsum("am,bn", stensor, stensor)
dsdt_1 = func_1storder(sview[0:3*N], t, param).reshape(3, N)
dsdt = dsdt_1 - \
2.0 * np.einsum("bcmm,b,abc->am", Gtensor, param.jvec, eijk)
dgdt = -np.einsum("lbmn,abl->abmn", Mtensor, eijk) + \
np.einsum("lanm,abl->abmn", Mtensor, eijk)
dgdt -= np.einsum("lm,kbmn,lka->abmn", hvec_dressed, gtensor, eijk) -\
np.einsum("llnm,kbmn,lka->abmn", Mtensor, gtensor, eijk) +\
np.einsum("ln,akmn,lkb->abmn", hvec_dressed, gtensor, eijk) -\
np.einsum("llmn,akmn,lkb->abmn", Mtensor, gtensor, eijk)
dgdt -= np.einsum("l,km,lbmn,lka->abmn", \
param.jvec, stensor, Gtensor, eijk) + \
np.einsum("l,kn,lanm,lkb->abmn", param.jvec, stensor, \
Gtensor, eijk)
dgdt += np.einsum("almn,lkmn,lkb->abmn", Mtensor, dtensor, eijk)\
+ np.einsum("blnm,lknm,lka->abmn", Mtensor, dtensor, eijk)
#Flatten it before returning
return np.concatenate((dsdt.flatten(), 2.0 * dgdt.flatten()))
def jac_2ndorder(s, t, param):
"""
Jacobian of the general case. Second order.
"""
N = param.latsize
fullsize_2ndorder = 3 * N + 9 * N**2
#svec is the tensor s^l_\mu
#G = s[3*N:].reshape(3,3,N,N) is the tensor g^{ab}_{\mu\nu}.
sview = s.view()
stensor = sview[0:3*N].reshape(3, N)
gtensor = sview[3*N:].reshape(3, 3, N, N)
htensor = np.zeros_like(stensor)
htensor[0].fill(param.hvec[0])
htensor[1].fill(param.hvec[1])
htensor[2].fill(param.hvec[2])
jjtensor = np.einsum("a,mn->amn", param.jvec, param.jmat)
sstensor = np.einsum("km,ln->klmn",stensor,stensor)
Mtensor = np.einsum("am,b,mn->abmn", stensor, param.jvec, \
param.jmat)/param.norm
hvec_dressed = htensor + np.einsum("llgm->lm", Mtensor)
full_jacobian = np.zeros(shape=(fullsize_2ndorder, fullsize_2ndorder))
#J00 subblock
full_jacobian[0:3*N, 0:3*N] = jac_1storder(s, t, param)
#J01 subblock. Precalculated
full_jacobian[0:3*N, 3*N:] = param.dsdotdg
#J10 subblock
full_jacobian[3*N:, 0:3*N] = -(np.einsum("pml,kbmn,pka->abpmnl", \
jjtensor,gtensor, eijk) + np.einsum("pnl,akmn,pkb->abpmnl", \
jjtensor, gtensor, eijk)).reshape(9*N*N,3*N)
full_jacobian[3*N:, 0:3*N] -= (np.einsum("qmg,ml,bqng,qpa->abpmnl",\
jjtensor, param.deltamn,gtensor, eijk) + \
np.einsum("qng,nl,aqmg,qpb->abpmnl",jjtensor, param.deltamn, \
gtensor, eijk) ).reshape(9*N*N,3*N)
full_jacobian[3*N:, 0:3*N] += (np.einsum("qmn,ml,bqnn,qpa->abpmnl",\
jjtensor, param.deltamn,gtensor, eijk) + \
np.einsum("qnm,nl,aqmm,qpb->abpmnl", jjtensor,param.deltamn, \
gtensor, eijk)).reshape(9*N*N,3*N)
full_jacobian[3*N:, 0:3*N] += (np.einsum("qmn,ml,pa,qkmn,qkb->abpmnl",\
jjtensor,param.deltamn,deltaij,gtensor+sstensor,eijk) + \
np.einsum("qmn,nl,pb,kqmn,qka->abpmnl", jjtensor,param.deltamn, \
deltaij,gtensor+sstensor,eijk)).reshape(9*N*N,3*N)
full_jacobian[3*N:, 0:3*N] += (np.einsum("pmn,ml,akmn,pkb->abpmnl",\
jjtensor,param.deltamn, sstensor, eijk) + \
np.einsum("pmn,nl,bknm,pka->abpmnl", jjtensor,param.deltamn, \
sstensor, eijk) + np.einsum("kmn,nl,akmm,kpb->abpmnl",\
jjtensor,param.deltamn, sstensor, eijk) + \
np.einsum("kmn,ml,bknn,kpa->abpmnl", jjtensor,param.deltamn, \
sstensor, eijk)).reshape(9*N*N,3*N)
full_jacobian[3*N:, 0:3*N] = 2.0 * \
(full_jacobian[3*N:, 0:3*N]/param.norm)
full_jacobian[3*N:, 0:3*N] += param.dsdotdg.T
#J11 subblock:
full_jacobian[3*N:, 3*N:] = -(np.einsum("qm,mlnhbpqra->abrpmnlh",\
hvec_dressed, param.delta_eps_tensor)).reshape(9*N*N,9*N*N)
full_jacobian[3*N:, 3*N:] += (np.einsum("qqmn,mlnhbpqra->abrpmnlh", \
Mtensor, param.delta_eps_tensor)).reshape(9*N*N,9*N*N)
full_jacobian[3*N:, 3*N:] -= (np.einsum("qn,mlnharqpb->abrpmnlh",\
hvec_dressed, param.delta_eps_tensor)).reshape(9*N*N,9*N*N)
full_jacobian[3*N:, 3*N:] += (np.einsum("qqnm,mlnharqpb->abrpmnlh",\
Mtensor, param.delta_eps_tensor)).reshape(9*N*N,9*N*N)
excl_tensor = -np.einsum("qmh,km,nl,br,pka->abrpmnlh",\
jjtensor,stensor, param.deltamn, deltaij,eijk)
excl_tensor += -np.einsum("qnh,kn,ml,ar,pkb->abrpmnlh",\
jjtensor,stensor, param.deltamn, deltaij,eijk)
excl_tensor += -np.einsum("qml,km,nh,bp,rka->abrpmnlh",\
jjtensor,stensor, param.deltamn, deltaij,eijk)
excl_tensor += -np.einsum("qnl,kn,mh,ap,rkb->abrpmnlh",\
jjtensor,stensor, param.deltamn, deltaij,eijk)
#Set the \eta=\mu,\nu components of excl_tensor to 0
excl_tensor[:,:,:,:,range(N),:,:,range(N)] = 0.0
excl_tensor[:,:,:,:,:,range(N),:,range(N)] = 0.0
full_jacobian[3*N:, 3*N:] += excl_tensor.reshape(9*N*N,9*N*N)
full_jacobian[3*N:, 3*N:] += (np.einsum("rmn,am,ml,nh,rpb->abrpmnlh",\
jjtensor,stensor,param.deltamn,param.deltamn,eijk) + \
np.einsum("rmn,bn,mh,nl,rpa->abrpmnlh",\
jjtensor,stensor,param.deltamn,param.deltamn,eijk)).reshape(9*N*N,9*N*N)
full_jacobian[3*N:, 3*N:] -= (np.einsum("pmn,am,mh,nl,prb->abrpmnlh",\
jjtensor,stensor,param.deltamn,param.deltamn,eijk) + \
np.einsum("pmn,bn,ml,nh,pra->abrpmnlh",\
jjtensor,stensor,param.deltamn,param.deltamn,eijk)).reshape(9*N*N,9*N*N)
full_jacobian[3*N:, 3*N:] = 2.0 * (full_jacobian[3*N:, 3*N:]/param.norm)
return full_jacobian
class ParamData:
"""Class to store parameters, precalculated objects,
filenames, objects like Kac norm
time-independent part of Jacobian. Set s_order to
true if doing second order dtwa'
"""
def __init__(self,pbc=False ,nonorm=True, latsize=101, beta=1.0, \
h0=1.0, omega=0.0, hx=0.0, hy=0.0, hz=0.0,\
jx=0.0, jy=0.0, jz=1.0):
#Default Output file names. Each file dumps a different observable
self.output_magx = "sx_outfile.txt"
self.output_magy = "sy_outfile.txt"
self.output_magz = "sz_outfile.txt"
self.output_sxvar = "sxvar_outfile.txt"
self.output_syvar = "syvar_outfile.txt"
self.output_szvar = "szvar_outfile.txt"
self.output_sxyvar = "sxyvar_outfile.txt"
self.output_sxzvar = "sxzvar_outfile.txt"
self.output_syzvar = "syzvar_outfile.txt"
#Whether to normalize with Kac norm or not
self.nonorm = nonorm
self.latsize = latsize
self.beta = beta #Power law index for long range interactions
self.h0 = h0 # Drive amplitude
self.omega = omega #Drive frequency
self.hx = hx #x transverse field
self.hy = hy #y transverse field
self.hz = hz #z transverse field
self.jx = jx #x hopping
self.jy = jy #y hopping
self.jz = jz #z hopping
self.jvec = np.array([jx, jy, jz])
self.hvec = np.array([hx, hy, hz])
N = self.latsize
self.fullsize_2ndorder = 3 * N + 9 * N**2
self.deltamn = np.eye(N)
# These are the lattice sites for two point density matrix calc.
self.tpnt_sites = (np.floor(N/2), np.floor(N/2)+2)
if(pbc):
self.periodic_boundary_conditions = True
self.open_boundary_conditions = False
#This is the dense Jmn hopping matrix with power law decay for
#periodic or open boundary conditions.
J = dia_matrix((N, N))
mid_diag = np.floor(N/2).astype(int)
for i in xrange(1,mid_diag+1):
elem = pow(i, -self.beta)
J.setdiag(elem, k=i)
J.setdiag(elem, k=-i)
for i in xrange(mid_diag+1, N):
elem = pow(N-i, -self.beta)
J.setdiag(elem, k=i)
J.setdiag(elem, k=-i)
else: #Open boundary conditions
self.periodic_boundary_conditions = False
self.open_boundary_conditions = True
J = dia_matrix((N, N))
for i in xrange(1,N):
elem = pow(i, -self.beta)
J.setdiag(elem, k=i)
J.setdiag(elem, k=-i)
self.jmat = J.toarray()
#This is the optional Kac norm
mid = np.floor(N/2).astype(int)
if self.nonorm:
self.norm = 1.0
else:
self.norm =\
2.0 * np.sum(1/(pow(\
np.arange(1, mid+1), self.beta).astype(float)))
class OutData:
"""Class to store output data"""
def __init__(self, t, sx, sy, sz, sxx, syy, szz, sxy, sxz, syz,\
params):
self.t_output = t
self.sx, self.sy, self.sz = sx, sy, sz
self.sxvar, self.syvar, self.szvar = sxx, syy, szz
self.sxyvar, self.sxzvar, self.syzvar = sxy, sxz, syz
self.__dict__.update(params.__dict__)
def normalize_data(self, w_totals, lsize):
self.sx = self.sx/(w_totals * lsize)
self.sy = self.sy/(w_totals * lsize)
self.sz = self.sz/(w_totals * lsize)
self.sxvar = (1/lsize) + (self.sxvar/(w_totals * lsize * lsize))
self.sxvar = self.sxvar - (self.sx)**2
self.syvar = (1/lsize) + (self.syvar/(w_totals * lsize * lsize))
self.syvar = self.syvar - (self.sy)**2
self.szvar = (1/lsize) + (self.szvar/(w_totals * lsize * lsize))
self.szvar = self.szvar - (self.sz)**2
self.sxyvar = (self.sxyvar/(w_totals * lsize * lsize))
self.sxyvar = self.sxyvar - (self.sx * self.sy)
self.sxzvar = (self.sxzvar/(w_totals * lsize * lsize))
self.sxzvar = self.sxzvar - (self.sx * self.sz)
self.syzvar = (self.syzvar/(w_totals * lsize * lsize))
self.syzvar = self.syzvar - (self.sy * self.sz)
def dump_data(self):
np.savetxt(self.output_magx, \
np.vstack((self.t_output, self.sx)).T, delimiter=' ')
np.savetxt(self.output_magy, \
np.vstack((self.t_output, self.sy)).T, delimiter=' ')
np.savetxt(self.output_magz, \
np.vstack((self.t_output, self.sz)).T, delimiter=' ')
np.savetxt(self.output_sxvar, \
np.vstack((self.t_output, self.sxvar)).T, delimiter=' ')
np.savetxt(self.output_syvar, \
np.vstack((self.t_output, self.syvar)).T, delimiter=' ')
np.savetxt(self.output_szvar, \
np.vstack((self.t_output, self.szvar)).T, delimiter=' ')
np.savetxt(self.output_sxyvar, \
np.vstack((self.t_output, self.sxyvar)).T, delimiter=' ')
np.savetxt(self.output_sxzvar, \
np.vstack((self.t_output, self.sxzvar)).T, delimiter=' ')
np.savetxt(self.output_syzvar, \
np.vstack((self.t_output, self.syzvar)).T, delimiter=' ')
class OutData_ij:
"""
Class to store ij output data
The gij is a numpy array of 3X3 gij matrices at multiple times
"""
def __init__(self, t, sites, sxi, syi, szi, sxj, syj, szj, sy_iplusk,\
syy_k, gij=None):
self.times = t
self.sites = sites
self.sxi, self.syi, self.szi = sxi, syi, szi
self.sxj, self.syj, self.szj = sxj, syj, szj
#Output formatting dictionaries
self.sitespinsdict = {"time": t,\
"sxi": self.sxi.view(),\
"syi": self.syi.view(),\
"szi": self.szi.view(),\
"sxj": self.sxj.view(),\
"syj": self.syj.view(),\
"szj": self.szj.view()}
self.sy_iplusk = sy_iplusk
self.syy_k = syy_k
if gij is not None:
self.gij = gij
v = self.gij.view()
self.sitecorrdict = {"time": t,\
"gxxij": v[:, 0, 0],\
"gxyij": v[:, 0, 1],\
"gxzij": v[:, 0, 2],\
"gyxij": v[:, 1, 0],\
"gyyij": v[:, 1, 1],\
"gyzij": v[:, 1, 2],\
"gzxij": v[:, 2, 0],\
"gzyij": v[:, 2, 1],\
"gzzij": v[:, 2, 2]}
def normalize_data(self, w_totals):
self.sitespinsdict['sxi'] = self.sitespinsdict['sxi']/(w_totals)
self.sitespinsdict['syi'] = self.sitespinsdict['syi']/(w_totals)
self.sitespinsdict['szi'] = self.sitespinsdict['szi']/(w_totals)
self.sitespinsdict['sxj'] = self.sitespinsdict['sxj']/(w_totals)
self.sitespinsdict['syj'] = self.sitespinsdict['syj']/(w_totals)
self.sitespinsdict['szj'] = self.sitespinsdict['szj']/(w_totals)
#Normalize the spatial correlations:
self.sy_iplusk = self.sy_iplusk/(w_totals)
self.syy_k = self.syy_k/(w_totals)
self.syy_k -= np.array([self.sy_iplusk[i] *\
self.sitespinsdict['syi'][i] for i in xrange(self.times.size)])
if hasattr(self, 'sitecorrdict'):
for key in self.sitecorrdict.iterkeys():
if key is not "time":
self.sitecorrdict[key] = self.sitecorrdict[key]/(w_totals)
def dump_data(self):
print("\n\n Tabular dump of site data:\n\n")
print("\n Note that, in the first order case,")
print("the 'gij' columns actually print sijs,")
print("which are s(..)i * s(..)j\n\n")
print("Sites chosen:", self.sites)
print("\n")
print(tabulate(self.sitespinsdict, headers="keys", floatfmt=".6f" ))
print("Spatial correlations from site i = \n", self.sites[0])
print(np.vstack((self.times,self.syy_k.T)).T)
if hasattr(self, 'sitecorrdict'):
print(" ")
print(tabulate(self.sitecorrdict, headers="keys", floatfmt=".6f"))
class Dtwa_System:
"""
This is the class that creates the DTWA system,
has all MPI_Gather routines for aggregating the
samples, and executes the dtwa methods (1st and 2nd order)
Set s_order to true if doing second order dtwa
Set jac to false if you don't want to evaluate the jacobian, since
it may be too big in some cases and cause the routine to crash.
"""
def __init__(self, params, mpicomm, n_t=2000, file_output=True,\
seed_offset=0, s_order=False, jac=False,\
verbose=True, sitedata=False):
"""
Input default values and amke precalculated objects
Comm = MPI Communicator
"""
self.jac = jac
self.__dict__.update(params.__dict__)
self.n_t = n_t
self.file_output = file_output
self.comm=mpicomm
self.seed_offset = seed_offset
self.s_order = s_order
#Booleans for verbosity and for calculating site data
self.verbose = verbose
self.sitedata = sitedata
N = params.latsize
#Only computes these if you want 2nd order
if self.s_order and self.jac:
#Below are the constant subblocks of the 2nd order Jacobian
#The 00 subblock is the first order Jacobian in func below
#The entire 01 subblock, fully time independent (ds_dot/dg):
self.dsdotdg = -np.einsum("p,mh,ml,apr->arpmlh",\
self.jvec, self.jmat, self.deltamn, eijk)
self.dsdotdg += np.einsum("r,ml,mh,arp->arpmlh", \
self.jvec,self.jmat, self.deltamn, eijk)
self.dsdotdg = 2.0 * (self.dsdotdg/self.norm)
self.dsdotdg = self.dsdotdg.reshape(3*N, 9*N**2)
self.delta_eps_tensor = np.einsum("ml,nh,ar,qpb->mlnharqpb",\
self.deltamn,self.deltamn,deltaij,eijk)
self.delta_eps_tensor += np.einsum("mh,nl,ap,qrb->mhnlapqrb",\
self.deltamn,self.deltamn,deltaij,eijk)
#The time independent part of the 10 subblock (dg_dot/ds):
#is the SAME as ds_dot/dg
def sum_reduce_all_data(self, datalist_loc,t, mpcomm):
"""
Does the parallel sum reduction of all data
"""
#Do local sums
sx_locsum = np.sum(data.sx for data in datalist_loc)
sy_locsum = np.sum(data.sy for data in datalist_loc)
sz_locsum = np.sum(data.sz for data in datalist_loc)
sxvar_locsum = np.sum(data.sxvar for data in datalist_loc)
syvar_locsum = np.sum(data.syvar for data in datalist_loc)
szvar_locsum = np.sum(data.szvar for data in datalist_loc)
sxyvar_locsum = np.sum(data.sxyvar for data in datalist_loc)
sxzvar_locsum = np.sum(data.sxzvar for data in datalist_loc)
syzvar_locsum = np.sum(data.syzvar for data in datalist_loc)
#Only root processor will actually get the data
sx_totals = np.zeros_like(sx_locsum) if mpcomm.rank == root\
else None
sy_totals = np.zeros_like(sy_locsum) if mpcomm.rank == root\
else None
sz_totals = np.zeros_like(sz_locsum) if mpcomm.rank == root\
else None
sxvar_totals = np.zeros_like(sxvar_locsum) if mpcomm.rank == root\
else None
syvar_totals = np.zeros_like(syvar_locsum) if mpcomm.rank == root\
else None
szvar_totals = np.zeros_like(szvar_locsum) if mpcomm.rank == root\
else None
sxyvar_totals = np.zeros_like(sxyvar_locsum) if mpcomm.rank == root\
else None
sxzvar_totals = np.zeros_like(sxzvar_locsum) if mpcomm.rank == root\
else None
syzvar_totals = np.zeros_like(syzvar_locsum) if mpcomm.rank == root\
else None
#To prevent conflicts with other comms
duplicate_comm = Intracomm(mpcomm)
sx_totals = duplicate_comm.reduce(sx_locsum, root=root)
sy_totals = duplicate_comm.reduce(sy_locsum, root=root)
sz_totals = duplicate_comm.reduce(sz_locsum, root=root)
sxvar_totals = duplicate_comm.reduce(sxvar_locsum, root=root)
syvar_totals = duplicate_comm.reduce(syvar_locsum, root=root)
szvar_totals = duplicate_comm.reduce(szvar_locsum, root=root)
sxyvar_totals = duplicate_comm.reduce(sxyvar_locsum, root=root)
sxzvar_totals = duplicate_comm.reduce(sxzvar_locsum, root=root)
syzvar_totals = duplicate_comm.reduce(syzvar_locsum, root=root)
if mpcomm.rank == root:
return OutData(t, sx_totals, sy_totals, sz_totals, sxvar_totals, \
syvar_totals, szvar_totals, sxyvar_totals, sxzvar_totals,\
syzvar_totals, self)
else:
return None
def sum_reduce_site_data(self, datalist_loc, t, sites, mpcomm):
"""
Does the parallel sum reduction of site data
"""
sxi_locsum = np.sum(data.sxi for data in datalist_loc)
syi_locsum = np.sum(data.syi for data in datalist_loc)
szi_locsum = np.sum(data.szi for data in datalist_loc)
sxj_locsum = np.sum(data.sxj for data in datalist_loc)
syj_locsum = np.sum(data.syj for data in datalist_loc)
szj_locsum = np.sum(data.szj for data in datalist_loc)
sy_iplusk_locsum = np.sum(data.sy_iplusk for data in datalist_loc)
syy_k_locsum = np.sum(data.syy_k for data in datalist_loc)
try: #This is to take care of the case when gij = None
gijs_locsum = np.sum(data.gij for data in datalist_loc)
except AttributeError:
gijs_locsum = None
sxi_totals = np.zeros_like(sxi_locsum) if mpcomm.rank == root\
else None
syi_totals = np.zeros_like(syi_locsum) if mpcomm.rank == root\
else None
szi_totals = np.zeros_like(szi_locsum) if mpcomm.rank == root\
else None
sxj_totals = np.zeros_like(sxj_locsum) if mpcomm.rank == root\
else None
syj_totals = np.zeros_like(syj_locsum) if mpcomm.rank == root \
else None
szj_totals = np.zeros_like(szj_locsum) if mpcomm.rank == root \
else None
sy_iplusk_totals = np.zeros_like(sy_iplusk_locsum) \
if mpcomm.rank == root else None
syy_k_totals = np.zeros_like(syy_k_locsum) \
if mpcomm.rank == root else None
gijs_totals = np.zeros_like(gijs_locsum) if mpcomm.rank == root \
else None
#To prevent conflicts with other comms
duplicate_comm = Intracomm(mpcomm)
#Broadcast these reductions to root
sxi_totals = duplicate_comm.reduce(sxi_locsum, root=root)
syi_totals = duplicate_comm.reduce(syi_locsum, root=root)
szi_totals = duplicate_comm.reduce(szi_locsum, root=root)
sxj_totals = duplicate_comm.reduce(sxj_locsum, root=root)
syj_totals = duplicate_comm.reduce(syj_locsum, root=root)
szj_totals = duplicate_comm.reduce(szj_locsum, root=root)
sy_iplusk_totals = duplicate_comm.reduce(sy_iplusk_locsum,root=root)
syy_k_totals = duplicate_comm.reduce(syy_k_locsum, root=root)
if gijs_locsum is not None:
gijs_totals = duplicate_comm.reduce(gijs_locsum, root=root)
else:
gijs_totals = None
if mpcomm.rank == root:
return OutData_ij(t, sites, sxi_totals, syi_totals, \
szi_totals, sxj_totals, syj_totals, szj_totals, \
sy_iplusk_totals, syy_k_totals, gijs_totals)
else:
return None
def dtwa_ising_longrange_1storder(self, time_info):
comm = self.comm
N = self.latsize
(t_init, n_cycles, n_steps) = time_info
rank = comm.rank
if rank == root and self.verbose:
pprint("# Run parameters:")
pprint(vars(self), depth=2)
if rank == root and not self.verbose:
pprint("# Starting run ...")
if self.omega == 0:
t_final = t_init + n_cycles
else:
t_final = t_init + (n_cycles * (2.0* np.pi/self.omega))
dt = (t_final-t_init)/(n_steps-1.0)
t_output = np.arange(t_init, t_final, dt)
#Let each process get its chunk of n_t by round robin
nt_loc = 0
iterator = rank
while iterator < self.n_t:
nt_loc += 1
iterator += comm.size
#Scatter unique seeds for generating unique random number arrays :
#each processor gets its own nt_loc seeds, and allocates nt_loc
#initial conditions. Each i.c. is a 2N sized array
#now, each process sends its value of nt_loc to root
all_ntlocs = comm.gather(nt_loc, root=root)
#Let the root process initialize nt unique integers for random seeds
if rank == root:
all_seeds = np.arange(self.n_t, dtype=np.int64)+1
all_ntlocs = np.array(all_ntlocs)
all_displacements = np.roll(np.cumsum(all_ntlocs), root+1)
all_displacements[root] = 0 # First displacement
else:
all_seeds = None
all_displacements = None
local_seeds = np.zeros(nt_loc, dtype=np.int64)
#Root scatters nt_loc sized seed data to that particular process
comm.Scatterv([all_seeds, all_ntlocs, all_displacements,\
MPI.DOUBLE], local_seeds)
list_of_local_data = []
if self.sitedata:
list_of_local_ijdata = []
for runcount in xrange(0, nt_loc, 1):
random.seed(local_seeds[runcount] + self.seed_offset)
#According to Schachenmayer, the wigner function of the quantum
#state generates the below initial conditions classically
sx_init = np.ones(N)
sy_init = 2.0 * np.random.randint(0,2, size=N) - 1.0
sz_init = 2.0 * np.random.randint(0,2, size=N) - 1.0
#Set initial conditions for the dynamics locally to vector
#s_init and store it as [s^x,s^x,s^x, .... s^y,s^y,s^y ...,
#s^z,s^z,s^z, ...]
s_init = np.concatenate((sx_init, sy_init, sz_init))
if self.verbose:
if self.jac:
s, info = odeint(func_1storder, s_init, t_output,\
args=(self,), Dfun=jac_1storder, full_output=True)
else:
s, info = odeint(func_1storder, s_init, t_output,\
args=(self,), Dfun=None, full_output=True)
else:
if self.jac:
s = odeint(func_1storder, s_init, t_output, args=(self,),\
Dfun=jac_1storder)
else:
s = odeint(func_1storder, s_init, t_output, args=(self,),\
Dfun=None)
#Compute expectations <sx> and \sum_{ij}<sx_i sx_j> -<sx>^2 with
#wigner func at t_output values LOCALLY for each initcond and
#store them
sx_expectations = np.sum(s[:, 0:N], axis=1)
sy_expectations = np.sum(s[:, N:2*N], axis=1)
sz_expectations = np.sum(s[:, 2*N:3*N], axis=1)
if self.sitedata:
(i, j) = self.tpnt_sites
sxi, syi, szi = s[:, i], s[:, i+N], s[:, i+2*N]
sxj, syj, szj = s[:, j], s[:, j+N], s[:, j+2*N]
sxxij, syyij, szzij = sxi * sxj, syi * syj, szi * szj
sxyij, sxzij, syzij = sxi * syj, sxi * szj, syi * szj
syxij, szxij, szyij = syi * sxj, szi * sxj, szi * syj
gij = np.array([sxxij, sxyij, sxzij, syxij, syyij, syzij,\
szxij, szyij, szzij]).T.reshape(t_output.size,3,3)
sxi, syi, szi = sxi , syi , \
szi
sxj, syj, szj = sxj , syj , \
szj
#Calculate Spatial Correlations
sy_iplusk = s[:, N:2*N][:,i:] #This is a matrix
syy_k = np.array([sy_iplusk[t] * syi[t] \
for t in xrange(t_output.size)])# This is also a matrix
localdataij = OutData_ij(t_output, self.tpnt_sites, \
sxi, syi, szi,\
sxj, syj, szj,\
sy_iplusk,\
syy_k,\
gij)
list_of_local_ijdata.append(localdataij)
#Quantum spin variance maps to the classical expression
# (1/N) + (1/N^2)\sum_{i\neq j} S^x_i S^x_j - <S^x>^2 and
# (1/N) + (1/N^2)\sum_{i\neq j} S^y_i S^z_j
# since the i=j terms quantum average to unity
sx_var = (np.sum(s[:, 0:N], axis=1)**2 \
- np.sum(s[:, 0:N]**2, axis=1))
sy_var = (np.sum(s[:, N:2*N], axis=1)**2 \
- np.sum(s[:, N:2*N]**2, axis=1))
sz_var = (np.sum(s[:, 2*N:3*N], axis=1)**2 \
- np.sum(s[:, 2*N:3*N]**2, axis=1))
sxy_var = np.sum([fftconvolve(s[m, 0:N], \
s[m, N:2*N]) for m in xrange(t_output.size)], axis=1)
sxz_var = np.sum([fftconvolve(s[m, 0:N], \
s[m, 2*N:3*N]) for m in xrange(t_output.size)], axis=1)
syz_var = np.sum([fftconvolve(s[m, N:2*N], \
s[m, 2*N:3*N]) for m in xrange(t_output.size)], axis=1)
localdata = OutData(t_output, sx_expectations, sy_expectations,\
sz_expectations, sx_var, sy_var, sz_var, sxy_var, sxz_var, \
syz_var, self)
list_of_local_data.append(localdata)
#After loop above sum reduce (don't forget to average) all locally
#calculated expectations at each time to root
outdat = \
self.sum_reduce_all_data(list_of_local_data, t_output, comm)
if self.sitedata:
sij = self.sum_reduce_site_data(list_of_local_ijdata,\
t_output, self.tpnt_sites, comm)
if rank == root:
sij.normalize_data(self.n_t)
if self.file_output:
sij.dump_data()
if rank == root:
#Dump to file
outdat.normalize_data(self.n_t, N)
if self.file_output:
outdat.dump_data()
if self.verbose:
print(" ")
print("Integration output info:")
pprint(info)
print("""# Cumulative number of Jacobian evaluations
by root:""", \
np.sum(info['nje']))
print('# Done!')
return outdat
else:
return None
def dtwa_ising_longrange_2ndorder(self, time_info, sampling):
old_settings = np.seterr(all='ignore') #Prevent overflow warnings
comm=self.comm
N = self.latsize
(t_init, n_cycles, n_steps) = time_info
rank = comm.rank
if rank == root and self.verbose:
pprint("# Run parameters:")
#Copy params to another object, then delete
#the output that you don't want printed
out = copy.copy(self)
out.dsdotdg = 0.0
out.delta_eps_tensor = 0.0
out.jmat = 0.0
out.deltamn = 0.0
pprint(vars(out), depth=2)
if rank == root and not self.verbose:
pprint("# Starting run ...")
if self.omega == 0:
t_final = t_init + n_cycles
else:
t_final = t_init + (n_cycles * (2.0* np.pi/self.omega))
dt = (t_final-t_init)/(n_steps-1.0)
t_output = np.arange(t_init, t_final, dt)
#Let each process get its chunk of n_t by round robin
nt_loc = 0
iterator = rank
while iterator < self.n_t:
nt_loc += 1
iterator += comm.size
#Scatter unique seeds for generating unique random number arrays :
#each processor gets its own nt_loc seeds, and allocates nt_loc
#initial conditions. Each i.c. is a 2N sized array
#now, each process sends its value of nt_loc to root
all_ntlocs = comm.gather(nt_loc, root=root)
#Let the root process initialize nt unique integers for random seeds
if rank == root:
all_seeds = np.arange(self.n_t, dtype=np.int64)+1
all_ntlocs = np.array(all_ntlocs)
all_displacements = np.roll(np.cumsum(all_ntlocs), root+1)
all_displacements[root] = 0 # First displacement
else:
all_seeds = None
all_displacements = None
local_seeds = np.zeros(nt_loc, dtype=np.int64)
#Root scatters nt_loc sized seed data to that particular process
comm.Scatterv([all_seeds, all_ntlocs, all_displacements,\
MPI.DOUBLE],local_seeds)
list_of_local_data = []
if self.verbose:
list_of_dhwdt_abs2 = []
if self.sitedata:
list_of_local_ijdata = []
for runcount in xrange(0, nt_loc, 1):
random.seed(local_seeds[runcount] + self.seed_offset)
sx_init = np.ones(N)
if sampling == "spr":
#According to Schachenmayer, the wigner function of the quantum
#state generates the below initial conditions classically
sy_init = 2.0 * np.random.randint(0,2, size=N) - 1.0
sz_init = 2.0 * np.random.randint(0,2, size=N) - 1.0
#Set initial conditions for the dynamics locally to vector
#s_init and store it as [s^x,s^x,s^x, .... s^y,s^y,s^y ...,
#s^z,s^z,s^z, ...]
s_init_spins = np.concatenate((sx_init, sy_init, sz_init))
elif sampling == "1-0":
spin_choices = np.array([(1, 1,0),(1, 0,1),(1, -1,0),(1, 0,-1)])
spins = np.array([random.choice(spin_choices) for i in xrange(N)])
s_init_spins = spins.T.flatten()
elif sampling == "all":
spin_choices_spr = np.array([(1, 1,1),(1, 1,-1),(1, -1,1),(1, -1,-1)])
spin_choices_10 = np.array([(1, 1,0),(1, 0,1),(1, -1,0),(1, 0,-1)])
spin_choices = np.concatenate((spin_choices_10, spin_choices_spr))
spins = np.array([random.choice(spin_choices) for i in xrange(N)])
s_init_spins = spins.T.flatten()
else:
pass
# Set initial correlations to 0.
s_init_corrs = np.zeros(9*N*N)
#Redirect unwanted stdout warning messages to /dev/null
with stdout_redirected():
if self.verbose:
if self.jac:
s, info = odeint(func_2ndorder, \
np.concatenate((s_init_spins, s_init_corrs)), t_output, \
args=(self,), Dfun=jac_2ndorder, full_output=True)
else:
s, info = odeint(func_2ndorder, \
np.concatenate((s_init_spins, s_init_corrs)),t_output, \
args=(self,), Dfun=None, full_output=True)
else:
if self.jac:
s = odeint(func_2ndorder, \
np.concatenate((s_init_spins, s_init_corrs)), \
t_output, args=(self,), Dfun=jac_2ndorder)
else:
s = odeint(func_2ndorder, \
np.concatenate((s_init_spins, s_init_corrs)), t_output, \
args=(self,), Dfun=None)
#Computes |dH/dt|^2 for a particular alphavec & weighes it
#If the rms over alphavec of these are 0, then each H is const
if self.verbose:
hws = weyl_hamilt(s,t_output, self)
dhwdt = np.array([t_deriv(hw, t_output) for hw in hws])
dhwdt_abs2 = np.square(dhwdt)
list_of_dhwdt_abs2.extend(dhwdt_abs2)
s = np.array(s, dtype="float128")#Widen memory to reduce overflows
#Compute expectations <sx> and \sum_{ij}<sx_i sx_j> -<sx>^2 with
#wigner func at t_output values LOCALLY for each initcond and
#store them
sx_expectations = np.sum(s[:, 0:N], axis=1)
sy_expectations = np.sum(s[:, N:2*N], axis=1)
sz_expectations = np.sum(s[:, 2*N:3*N], axis=1)
if self.sitedata:
(i, j) = self.tpnt_sites
sxi, syi, szi = s[:, i], s[:, i+N], s[:, i+2*N]
sxi, syi, szi = sxi , syi ,\
szi
sxj, syj, szj = s[:, j], s[:, j+N], s[:, j+2*N]
sxj, syj, szj = sxj , syj ,\
szj
sview = s.view()
gij = sview[:,3*N:].reshape(\
t_output.size,3, 3, N, N)[:, :, :, i, j]
#Calculate Spatial Correlations
sy_iplusk = s[:, N:2*N][:,i:] #This is a matrix
syy_k = np.array([sy_iplusk[t] * syi[t] \
for t in xrange(t_output.size)])# This is also a matrix
localdataij = OutData_ij(t_output, self.tpnt_sites, \
sxi, syi, szi,\
sxj, syj, szj,\
sy_iplusk,\
syy_k,\
gij)
list_of_local_ijdata.append(localdataij)
#svec is the tensor s^l_\mu
#G = s[3*N:].reshape(3,3,N,N) is the tensor g^{ab}_{\mu\nu}.
s = np.array(s, dtype="float128")#Enlarge in mem
sview = s.view()
gt = sview[:, 3*N:].reshape(s.shape[0], 3, 3, N, N)
gt[:,:,:,range(N),range(N)] = 0.0 #Set diags to 0
#Quantum spin variance
sx_var = np.sum(gt[:,0,0,:,:], axis=(-1,-2))
sx_var += (np.sum(s[:, 0:N], axis=1)**2 \
- np.sum(s[:, 0:N]**2, axis=1))
sy_var = np.sum(gt[:,1,1,:,:], axis=(-1,-2))
sy_var += (np.sum(s[:, N:2*N], axis=1)**2 \
- np.sum(s[:, N:2*N]**2, axis=1))
sz_var = np.sum(gt[:,2,2,:,:], axis=(-1,-2))
sz_var += (np.sum(s[:, 2*N:3*N], axis=1)**2 \
- np.sum(s[:, 2*N:3*N]**2, axis=1))
sxy_var = np.sum(gt[:,0,1,:,:], axis=(-1,-2))
sxy_var += np.sum([fftconvolve(s[m, 0:N], s[m, N:2*N]) \
for m in xrange(t_output.size)], axis=1)
#Remove the diagonal parts
sxy_var -= np.sum(s[:, 0:N] * s[:, N:2*N], axis=1)
sxz_var = np.sum(gt[:,0,2,:,:], axis=(-1,-2))
sxz_var += np.sum([fftconvolve(s[m, 0:N], s[m, 2*N:3*N]) \
for m in xrange(t_output.size)], axis=1)
#Remove the diagonal parts
sxz_var -= np.sum(s[:, 0:N] * s[:, 2*N:3*N], axis=1)
syz_var = np.sum(gt[:,1,2,:,:], axis=(-1,-2))
syz_var += np.sum([fftconvolve(s[m, N:2*N], s[m, 2*N:3*N]) \
for m in xrange(t_output.size)], axis=1)
#Remove the diagonal parts
syz_var -= np.sum(s[:, N:2*N] * s[:, 2*N:3*N], axis=1)
localdata = OutData(t_output, sx_expectations, sy_expectations,\
sz_expectations, sx_var, sy_var, sz_var, sxy_var, sxz_var, \
syz_var, self)
list_of_local_data.append(localdata)
#After loop above sum reduce (don't forget to average) all locally
#calculated expectations at each time to root
outdat = \
self.sum_reduce_all_data(list_of_local_data, t_output, comm)
if self.verbose:
dhwdt_abs2_locsum = np.sum(list_of_dhwdt_abs2, axis=0)
dhwdt_abs2_totals = np.zeros_like(dhwdt_abs2_locsum)\
if rank == root else None
if self.sitedata:
sij = self.sum_reduce_site_data(list_of_local_ijdata, t_output,\
self.tpnt_sites, comm)
if rank == root:
sij.normalize_data(self.n_t)
sij.dump_data()
if self.verbose:
temp_comm = Intracomm(comm)
dhwdt_abs2_totals = temp_comm.reduce(dhwdt_abs2_locsum, root=root)
if rank == root:
dhwdt_abs2_totals = dhwdt_abs2_totals/(self.n_t * N * N)
dhwdt_abs_totals = np.sqrt(dhwdt_abs2_totals)
#Dump to file
if rank == root:
outdat.normalize_data(self.n_t, N)
if self.file_output:
outdat.dump_data()
if self.verbose:
print("t-deriv of Hamilt (abs square) with wigner avg: ")
print(" ")
print(tabulate({"time": t_output, \
"dhwdt_abs": dhwdt_abs_totals}, \
headers="keys", floatfmt=".6f"))
if self.jac and self.verbose:
print('# Cumulative number of Jacobian evaluations by root:', \
np.sum(info['nje']))
print('# Done!')
np.seterr(**old_settings) # reset to default
return outdat
else:
np.seterr(**old_settings) # reset to default
return None
def evolve(self, time_info, sampling="spr"):
if self.s_order:
return self.dtwa_ising_longrange_2ndorder(time_info, sampling)
else:
return self.dtwa_ising_longrange_1storder(time_info)
if __name__ == '__main__':
comm = MPI.COMM_WORLD
rank = comm.Get_rank()
size = comm.Get_size()
#Initiate the parameters in object
p = ParamData(latsize=101, beta=1.0)
#Initiate the DTWA system with the parameters and niter
d = Dtwa_System(p, comm, n_t=2000)
data = d.evolve((0.0, 1.0, 1000))
|
gpl-2.0
| -5,499,009,293,101,981,000
| 43.714004
| 86
| 0.537847
| false
| 2.984073
| false
| false
| false
|
ph147/dcf77
|
reader.py
|
1
|
3135
|
#!/usr/bin/python
import sys
import struct
EPSILON = 0.02
SAMPLE_RATE = 44100
ZERO_BIT_IN_SECS = 0.1
ONE_BIT_IN_SECS = 0.2
NEW_MINUTE_BEEP_THRESHOLD = 1.7
def read_word(inf):
return struct.unpack('<l', inf.read(struct.calcsize('=l')))[0]
def read_words(inf):
word = read_word(inf)
while True:
yield word
word = read_word(inf)
def average(words, num):
try:
return 1.0*sum(abs(next(words)) for i in xrange(num))/num
except struct.error:
raise EOF
def sgn(num):
return -1 if num < 0 else 1 if num > 0 else 0
def steps(length):
count = 0
while True:
yield count
count += length
def in_vicinity(num, center):
return abs(num-center) < EPSILON
class SignalError(Exception):
pass
class EOF(Exception):
pass
class DCF77(object):
amplitude_factor = 0.3
block_length = 100
def __init__(self, filename=None):
if not filename:
print 'Reading from stdin...'
filename = '/dev/stdin'
else:
print 'Reading from file {}...'.format(filename)
self.filename = filename
self.lasts = [0]*3
self.data = True
self.bits = []
self.start = 0
self.end = 0
self.minute_started = False
def went_down(self, ave):
return ave < self.amplitude_factor*self.lasts[0] and not self.data
def went_up(self, ave):
return self.lasts[0] < self.amplitude_factor*ave and self.data
def start_new_minute(self):
print
if self.minute_started:
yield ''.join(self.bits)
self.bits = []
self.minute_started = True
print '*** New minute started. ***'
def process_carrier(self, step):
self.start = step
time = 1.0*(self.start-self.end)/SAMPLE_RATE
if time > NEW_MINUTE_BEEP_THRESHOLD:
for answer in self.start_new_minute():
yield answer
self.data = True
def append(self, bit):
self.bits.append(bit)
sys.stdout.write(bit)
def process_bit(self, time):
if in_vicinity(time, ZERO_BIT_IN_SECS):
self.append('0')
elif in_vicinity(time, ONE_BIT_IN_SECS):
self.append('1')
else:
raise SignalError
def process_silence(self, step):
self.end = step
time = 1.0*(self.end-self.start)/SAMPLE_RATE
if self.minute_started:
self.process_bit(time)
sys.stdout.flush()
self.data = False
def process_block(self, block, step):
if self.went_down(block):
for answer in self.process_carrier(step):
yield answer
elif self.went_up(block):
self.process_silence(step)
self.lasts.pop(0)
self.lasts.append(block)
def run(self):
with open(self.filename) as inf:
words = read_words(inf)
for step in steps(self.block_length):
ave = average(words, self.block_length)
for answer in self.process_block(ave, step):
yield answer
|
mit
| 750,148,069,711,881,200
| 22.75
| 74
| 0.56555
| false
| 3.582857
| false
| false
| false
|
alfa-addon/addon
|
plugin.video.alfa/servers/crunchyroll.py
|
1
|
3429
|
# -*- coding: utf-8 -*-
from builtins import range
import sys
PY3 = False
if sys.version_info[0] >= 3: PY3 = True; unicode = str; unichr = chr; long = int
if PY3:
#from future import standard_library
#standard_library.install_aliases()
import urllib.parse as urllib # Es muy lento en PY2. En PY3 es nativo
else:
import urllib # Usamos el nativo de PY2 que es más rápido
import base64
import struct
import zlib
from hashlib import sha1
from core import filetools
from core import jsontools
from core import httptools
from core import scrapertools
from platformcode import config, logger
GLOBAL_HEADER = {'User-Agent': 'Mozilla/5.0', 'Accept-Language': '*'}
proxy_i = "https://www.usa-proxy.org/index.php"
proxy = "https://www.usa-proxy.org/"
def test_video_exists(page_url):
logger.info("(page_url='%s')" % page_url)
global data
data = httptools.downloadpage(page_url, headers=GLOBAL_HEADER).data
#logger.error(data)
if "showmedia-trailer-notice" in data:
disp = scrapertools.find_single_match(data, '<a href="/freetrial".*?</span>.*?<span>\s*(.*?)</span>')
disp = disp.strip()
if disp:
disp = "Disponible gratuitamente: %s" % disp
return False, "[Crunchyroll] Error, se necesita cuenta premium. %s" % disp
return True, ""
def get_video_url(page_url, premium=False, user="", password="", video_password=""):
#page_url='https://www.crunchyroll.com/es-es/one-piece/episode-891-climbing-up-a-waterfall-a-great-journey-through-the-land-of-wanos-sea-zone-786643'
logger.error("url=" + page_url)
video_urls = []
media_url = ''
file_sub = ""
idiomas = ['deDE', 'ptBR', 'frFR', 'itIT', 'enUS', 'esES', 'esLA']
index_sub = int(config.get_setting("crunchyrollsub", "crunchyroll"))
idioma_sub = idiomas[index_sub]
raw_data = scrapertools.find_single_match(data, r'"streams":(\[[^\]]+])')
if idioma_sub == 'esES' and not idioma_sub in raw_data:
idioma_sub = 'esLA'
elif idioma_sub == 'esLA' and not idioma_sub in raw_data:
idioma_sub = 'esES'
if idioma_sub not in raw_data:
idioma_sub = 'enUS'
json_data = jsontools.load(raw_data)
#logger.error(json_data)
for elem in json_data:
formato = elem.get('format', '')
if formato in ['vo_adaptive_hls', 'adaptive_hls']:
lang = elem.get('hardsub_lang', '')
audio_lang = elem.get('audio_lang', '')
if lang == idioma_sub:
media_url = elem.get('url', '')
break
if not lang and audio_lang != 'jaJP':
media_url = elem.get('url', '')
break
if not media_url:
return video_urls
m3u_data = httptools.downloadpage(media_url, headers=GLOBAL_HEADER).data.decode('utf-8')
matches = scrapertools.find_multiple_matches(m3u_data, 'TION=\d+x(\d+).*?\s(.*?)\s')
filename = scrapertools.get_filename_from_url(media_url)[-4:]
if matches:
for quality, media_url in matches:
video_urls.append(["%s %sp [crunchyroll]" % (filename, quality), media_url])
else:
video_urls.append(["m3u8 [crunchyroll]", media_url])
return video_urls
|
gpl-3.0
| 2,660,880,265,300,349,000
| 33.697917
| 153
| 0.585352
| false
| 3.236072
| false
| false
| false
|
pklimai/py-junos-eznc
|
tests/unit/test_factcache.py
|
2
|
10508
|
import unittest2 as unittest
from nose.plugins.attrib import attr
from mock import patch, MagicMock, call
from jnpr.junos.exception import FactLoopError
from jnpr.junos import Device
from ncclient.manager import Manager, make_device_handler
from ncclient.transport import SSHSession
__author__ = "Stacy Smith"
__credits__ = "Jeremy Schulman, Nitin Kumar"
@attr('unit')
class TestFactCache(unittest.TestCase):
@patch('ncclient.manager.connect')
def setUp(self, mock_connect):
mock_connect.side_effect = self._mock_manager_setup
self.dev = Device(host='1.1.1.1', user='rick', password='password123')
self.dev.open()
def test_factcache_unknown_fact(self):
with self.assertRaises(KeyError):
unknown = self.dev.facts['unknown']
def test_factcache_fact_loop(self):
# The personality fact calls the
# model fact.
# Change the callback for the model
# fact to be the same as the personality fact
# in order to induce a fact loop.
self.dev.facts._callbacks['model'] = \
self.dev.facts._callbacks['personality']
# Now, trying to fetch the personality
# fact should cause a FactLoopError
with self.assertRaises(FactLoopError):
personality = self.dev.facts['personality']
def test_factcache_return_unexpected_fact(self):
# Create a callback for the foo fact.
self.dev.facts._callbacks['foo'] = get_foo_bar_fact
# Now, trying to access the foo fact should cause a
# RunTimeError because the bar fact is also unexpectedly provided
with self.assertRaises(RuntimeError):
foo = self.dev.facts['foo']
@patch('jnpr.junos.factcache.warnings')
def test_factcache_nonmatching_old_and_new_fact(self, mock_warn):
# Set fact style to 'both'
self.dev._fact_style = 'both'
# Create a callback for the foo fact.
self.dev.facts._callbacks['foo'] = get_foo_fact
# Cache the new-style foo fact
self.dev.facts._cache['foo'] = 'foo'
# Set the old-style foo fact to a different value
self.dev._ofacts['foo'] = 'bar'
# Now, trying to access the foo fact should cause a
# RunTimeWarning because the values of the new and old-style facts
# do not match
foo = self.dev.facts['foo']
mock_warn.assert_has_calls([call.warn(
'New and old-style facts do not match for the foo fact.\n'
' New-style value: foo\n Old-style value: bar\n',
RuntimeWarning)])
def test_factcache_fail_to_return_expected_fact(self):
# Create a callback for the foo fact.
self.dev.facts._callbacks['foo'] = get_bar_fact
self.dev.facts._callbacks['bar'] = get_bar_fact
# Now, trying to access the foo fact should cause a
# RunTimeError because the foo fact is not provided
with self.assertRaises(RuntimeError):
foo = self.dev.facts['foo']
def test_factcache_delete_fact(self):
# Create a callback for the foo fact.
self.dev.facts._callbacks['foo'] = get_foo_fact
foo = self.dev.facts['foo']
# Now, trying to delete the foo fact should cause a
# RunTimeError
with self.assertRaises(RuntimeError):
self.dev.facts.pop('foo', None)
def test_factcache_set_fact(self):
# Create a callback for the foo fact.
self.dev.facts._callbacks['foo'] = get_foo_fact
foo = self.dev.facts['foo']
# Now, trying to set the foo fact should cause a
# RunTimeError
with self.assertRaises(RuntimeError):
self.dev.facts['foo'] = 'bar'
def test_factcache_iter_facts(self):
# Override the callbacks
self.dev.facts._callbacks = {'foo': get_foo_fact,
'bar': get_bar_fact,
'_hidden': get_foo_bar_fact}
# Now, get the length of the facts
self.assertEqual(len(list(self.dev.facts)), 2)
def test_factcache_len_facts(self):
# Override the callbacks
self.dev.facts._callbacks = {'foo': get_foo_fact,
'bar': get_bar_fact}
# Now, get the length of the facts
self.assertEqual(len(self.dev.facts), 2)
def test_factcache_string_repr(self):
# Override the callbacks to only support foo and bar facts.
self.dev.facts._callbacks = {'foo': get_foo_fact,
'bar': get_bar_fact}
# Set values for foo and bar facts
self.dev.facts._cache['foo'] = 'foo'
self.dev.facts._cache['bar'] = {'bar': 'bar'}
# Now, get the string (pretty) representation of the facts
self.assertEqual(str(self.dev.facts), "{'bar': {'bar': 'bar'}, "
"'foo': 'foo'}")
def test_factcache_repr_facts(self):
# Override the callbacks
self.dev.facts._callbacks = {'foo': get_foo_fact,
'bar': get_bar_fact}
# Now, get the length of the facts
self.assertEqual(str(self.dev.facts), "{'bar': 'bar', 'foo': 'foo'}")
def test_factcache_refresh_single_key(self):
# Override the callbacks
self.dev.facts._callbacks = {'foo': get_foo_fact,
'bar': get_bar_fact,
'_hidden': get_hidden_fact}
# Populate the cache
self.dev.facts._cache['foo'] = 'before'
self.dev.facts._cache['bar'] = 'before'
self.dev.facts._cache['_hidden'] = 'before'
# Confirm the cached values
self.assertEqual(self.dev.facts['foo'], 'before')
self.assertEqual(self.dev.facts['bar'], 'before')
self.assertEqual(self.dev.facts['_hidden'], 'before')
# Refresh just the foo fact
self.dev.facts._refresh(keys='foo')
# Confirm the values now
self.assertEqual(self.dev.facts['foo'], 'foo')
self.assertEqual(self.dev.facts['bar'], 'before')
self.assertEqual(self.dev.facts['_hidden'], 'before')
def test_factcache_refresh_two_keys(self):
# Override the callbacks
self.dev.facts._callbacks = {'foo': get_foo_fact,
'bar': get_bar_fact,
'_hidden': get_hidden_fact}
# Populate the cache
self.dev.facts._cache['foo'] = 'before'
self.dev.facts._cache['bar'] = 'before'
self.dev.facts._cache['_hidden'] = 'before'
# Confirm the cached values
self.assertEqual(self.dev.facts['foo'], 'before')
self.assertEqual(self.dev.facts['bar'], 'before')
self.assertEqual(self.dev.facts['_hidden'], 'before')
# Refresh the foo and _hidden facts
self.dev.facts._refresh(keys=('foo', '_hidden'))
# Confirm the values now
self.assertEqual(self.dev.facts['foo'], 'foo')
self.assertEqual(self.dev.facts['bar'], 'before')
self.assertEqual(self.dev.facts['_hidden'], True)
def test_factcache_refresh_unknown_fact(self):
# Override the callbacks
self.dev.facts._callbacks = {'foo': get_foo_fact,
'_hidden': get_hidden_fact}
# Populate the cache
self.dev.facts._cache['foo'] = 'before'
self.dev.facts._cache['_hidden'] = 'before'
# Confirm the cached values
self.assertEqual(self.dev.facts['foo'], 'before')
self.assertEqual(self.dev.facts['_hidden'], 'before')
# Refresh just the unknown bar fact which should raise a RuntimeError
with self.assertRaises(RuntimeError):
self.dev.facts._refresh(keys=('bar'))
def test_factcache_refresh_all_facts(self):
# Override the callbacks
self.dev.facts._callbacks = {'foo': get_foo_fact,
'bar': get_bar_fact,
'_hidden': get_hidden_fact}
# Populate the cache
self.dev.facts._cache['foo'] = 'before'
self.dev.facts._cache['bar'] = 'before'
self.dev.facts._cache['_hidden'] = 'before'
# Confirm the cached values
self.assertEqual(self.dev.facts['foo'], 'before')
self.assertEqual(self.dev.facts['bar'], 'before')
self.assertEqual(self.dev.facts['_hidden'], 'before')
# Refresh all facts
self.dev.facts._refresh()
# Confirm the values now
self.assertEqual(self.dev.facts['foo'], 'foo')
self.assertEqual(self.dev.facts['bar'], 'bar')
self.assertEqual(self.dev.facts['_hidden'], True)
@patch('jnpr.junos.device.warnings')
def test_factcache_refresh_exception_on_failure(self, mock_warn):
with self.assertRaises(ValueError):
# Refresh all facts with exception on failure
self.dev.facts._refresh(exception_on_failure=True)
@patch('jnpr.junos.device.warnings')
@patch('jnpr.junos.factcache.warnings')
def test_factcache_refresh_warnings_on_failure(self,
mock_warn,
mock_device_warn):
# Refresh all facts with warnings on failure
self.dev.facts._refresh(warnings_on_failure=True)
mock_warn.assert_has_calls([call.warn(
'Facts gathering is incomplete. To know the reason call '
'"dev.facts_refresh(exception_on_failure=True)"',
RuntimeWarning)])
# mock_warn.assert_called_once('Facts gathering is incomplete. '
# 'To know the reason call '
# '"dev.facts_refresh('
# 'exception_on_failure=True)"',
# RuntimeWarning)
def _mock_manager_setup(self, *args, **kwargs):
if kwargs:
device_params = kwargs['device_params']
device_handler = make_device_handler(device_params)
session = SSHSession(device_handler)
return Manager(session, device_handler)
def get_foo_fact(device):
return {'foo': 'foo'}
def get_foo_bar_fact(device):
return {'foo': 'foo',
'bar': 'bar', }
def get_bar_fact(device):
return {'bar': 'bar', }
def get_hidden_fact(device):
return {'_hidden': True, }
|
apache-2.0
| 7,211,522,377,934,764,000
| 41.54251
| 78
| 0.576418
| false
| 4.01682
| true
| false
| false
|
haphaeu/yoshimi
|
EulerProject/121.py
|
1
|
2562
|
from fractions import Fraction
from fractions import gcd
def nextPermLexic(perm):
# ###########################################################################
#The following algorithm generates the next permutation lexicographically
#after a given permutation. It changes the given permutation in-place.
#1- Find the largest index k such that a[k] < a[k + 1]. If no such index
# exists, the permutation is the last permutation.
#2- Find the largest index l such that a[k] < a[l]. Since k + 1 is such
# an index, l is well defined and satisfies k < l.
#3- Swap a[k] with a[l].
#4- Reverse the sequence from a[k + 1] up to and including the final
# element a[n].
#
# Written by R.Rossi, 26th/Oct/2011
#
# Reference:
# http://en.wikipedia.org/wiki/Permutation#Generation_in_lexicographic_order
# ###########################################################################
#will return the next permutation
#after 'perm' in lexicographic order
sz=len(perm)
#Step 1: find largest k st a[k]<a[k+1]
k= -666
for i in range(sz-2,-1,-1):
if perm[i] < perm[i+1]:
k=i
break
if k==-666:
#print "\nAchieved last permutation in lexicographic order"
return []
else:
#Step 2: find largest index l such that a[k] < a[l]
l=-666
if k==sz-2:
l=k+1
else:
for i in range(sz-1,k,-1):
if perm[k] < perm[i]:
l=i
break
if l==-666:
print "\nError! Oh my god, what to do?"
return []
else:
#step 3: Swap a[k] with a[l]
tmp=perm[0:k] + perm[l] + perm[k+1:l] + perm[k] + perm[l+1:]
#step 4: reverse a[k+1:]
tmp2=tmp[0:k+1] + tmp[-1:k:-1]
#done.
#save as perm
nextPerm=tmp2
return nextPerm
# ### MAIN ###
TURNS=15
MXRED=(TURNS-1)/2
winPlays=[]
#generate initial conditions
for i in range(MXRED+1):
nxt='b'*(TURNS-i)+'r'*i
while nxt:
winPlays.append(nxt)
nxt=nextPermLexic(nxt)
#sum the probabilities of all wins
ProbTot = Fraction(0,1)
prob = Fraction(1,1)
for play in winPlays:
for i,disk in enumerate(play):
if disk=='b':
prob *= Fraction(1,i+2)
else:
prob *= Fraction(i+1,i+2)
#print ProbTot, "+", prob,
ProbTot = ProbTot + prob
#print "=", ProbTot
prob = Fraction(1,1)
print "Probability of winning is", ProbTot
print "Required fund", ProbTot.denominator/ProbTot.numerator
|
lgpl-3.0
| -6,066,579,934,885,888,000
| 30.243902
| 77
| 0.541374
| false
| 3.259542
| false
| false
| false
|
ArteliaTelemac/PostTelemac
|
PostTelemac/meshlayertools/toshape/posttelemac_util_extractpts_caduc.py
|
1
|
20503
|
##[01_Telemac]=group
# *************************************************************************
"""
Versions :
0.0 premier script
0.2 : un seul script pour modeleur ou non
"""
# *************************************************************************
##Type_de_traitement=selection En arriere plan;Modeler;Modeler avec creation de fichiers
##Fichier_resultat_telemac=file
##Temps_a_exploiter_fichier_max_0=number 0.0
##Pas_d_espace_0_si_tous_les_points=number 0.0
##fichier_point_avec_vecteur_vitesse=boolean False
##Parametre_vitesse_X=string UVmax
##Parametre_vitesse_Y=string VVmax
##systeme_de_projection=crs EPSG:2154
##forcage_attribut_fichier_de_sortie=string
##fichier_de_sortie_points=output vector
import sys
from PyQt4.QtCore import *
from PyQt4.QtGui import *
from qgis.core import *
from qgis.gui import *
from os import path
import numpy as np
from matplotlib.path import Path
from processing.core.GeoAlgorithmExecutionException import GeoAlgorithmExecutionException
from processing.tools.vector import VectorWriter
import matplotlib.pyplot as plt
from matplotlib import tri
from qgis.utils import *
from PyQt4.QtCore import SIGNAL, Qt
from PyQt4 import QtCore, QtGui
# from utils.files import getFileContent
# from parsers.parserSortie import getValueHistorySortie
# from parsers.parserSELAFIN import getValueHistorySLF, getValuePolylineSLF,subsetVariablesSLF
from parsers.parserSELAFIN import SELAFIN
# from parsers.parserStrings import parseArrayPaires
import threading
from time import ctime
import math
def isFileLocked(file, readLockCheck=False):
"""
Checks to see if a file is locked. Performs three checks
1. Checks if the file even exists
2. Attempts to open the file for reading. This will determine if the file has a write lock.
Write locks occur when the file is being edited or copied to, e.g. a file copy destination
3. If the readLockCheck parameter is True, attempts to rename the file. If this fails the
file is open by some other process for reading. The file can be read, but not written to
or deleted.
@param file:
@param readLockCheck:
"""
if not (os.path.exists(file)):
return False
try:
f = open(file, "r")
f.close()
except IOError:
return True
if readLockCheck:
lockFile = file + ".lckchk"
if os.path.exists(lockFile):
os.remove(lockFile)
try:
os.rename(file, lockFile)
time.sleep(1)
os.rename(lockFile, file)
except WindowsError:
return True
return False
# *************************************************************************
def workerFinished(str1):
progress.setText(str(ctime()) + " - Fin du thread - Chargement du fichier resultat")
vlayer = QgsVectorLayer(str1, os.path.basename(str1).split(".")[0], "ogr")
QgsMapLayerRegistry.instance().addMapLayer(vlayer)
class Worker_pts(QtCore.QObject):
def __init__(self, donnees_d_entree):
QtCore.QObject.__init__(self)
self.pathshp = donnees_d_entree["pathshp"]
self.mesh = donnees_d_entree["mesh"]
self.x = donnees_d_entree["x"]
self.y = donnees_d_entree["y"]
self.ztri = donnees_d_entree["ztri"]
self.vlayer = ""
self.pasespace = donnees_d_entree["pasdespace"]
self.vitesse = "0"
self.paramvalueX = donnees_d_entree["paramvalueX"]
self.paramvalueY = donnees_d_entree["paramvalueY"]
self.traitementarriereplan = donnees_d_entree["traitementarriereplan"]
fields = donnees_d_entree["champs"]
if self.paramvalueX != None:
fields.append(QgsField("UV", QVariant.Double))
fields.append(QgsField("VV", QVariant.Double))
fields.append(QgsField("norme", QVariant.Double))
fields.append(QgsField("angle", QVariant.Double))
self.vitesse = "1"
if self.traitementarriereplan == 0 or self.traitementarriereplan == 2:
self.writerw1 = QgsVectorFileWriter(
self.pathshp,
None,
donnees_d_entree["champs"],
QGis.WKBPoint,
QgsCoordinateReferenceSystem(str(donnees_d_entree["crs"])),
"ESRI Shapefile",
)
if self.traitementarriereplan == 1 or self.traitementarriereplan == 2:
self.writerw2 = VectorWriter(
donnees_d_entree["fichierdesortie_point"],
None,
donnees_d_entree["champs"],
QGis.WKBMultiPoint,
QgsCoordinateReferenceSystem(str(donnees_d_entree["crs"])),
)
def run(self):
strtxt = (
str(ctime())
+ " - Thread - repertoire : "
+ os.path.dirname(self.pathshp)
+ " - fichier : "
+ os.path.basename(self.pathshp)
)
if self.traitementarriereplan == 0:
self.status.emit(strtxt)
else:
progress.setText(strtxt)
fet = QgsFeature()
try:
if True:
if self.paramvalueX == None:
boolvitesse = False
else:
boolvitesse = True
# ------------------------------------- TRaitement de tous les points
if self.pasespace == 0:
noeudcount = len(self.x)
strtxt = str(ctime()) + " - Thread - Traitement des vitesses - " + str(noeudcount) + " noeuds"
if self.traitementarriereplan == 0:
self.status.emit(strtxt)
else:
progress.setText(strtxt)
for k in range(len(self.x)):
if k % 5000 == 0:
strtxt = str(ctime()) + " - Thread - noeud n " + str(k) + "/" + str(noeudcount)
if self.traitementarriereplan == 0:
self.status.emit(strtxt)
else:
progress.setText(strtxt)
if self.traitementarriereplan == 0:
self.progress.emit(int(100.0 * k / noeudcount))
else:
progress.setPercentage(int(100.0 * k / noeudcount))
fet.setGeometry(QgsGeometry.fromPoint(QgsPoint(float(self.x[k]), float(self.y[k]))))
tabattr = []
for l in range(len(self.ztri)):
tabattr.append(float(self.ztri[l][k]))
if boolvitesse:
norme = (
(float(self.ztri[self.paramvalueX][k])) ** 2.0
+ (float(self.ztri[self.paramvalueY][k])) ** 2.0
) ** (0.5)
atanUVVV = math.atan2(
float(self.ztri[self.paramvalueY][k]), float(self.ztri[self.paramvalueX][k])
)
angle = atanUVVV / math.pi * 180.0
if angle < 0:
angle = angle + 360
# angle YML
# angle = atanUVVV*180.0/math.pi+min(atanUVVV,0)/atanUVVV*360.0
tabattr.append(float(self.ztri[self.paramvalueX][k]))
tabattr.append(float(self.ztri[self.paramvalueY][k]))
tabattr.append(norme)
tabattr.append(angle)
fet.setAttributes(tabattr)
if self.traitementarriereplan == 0 or self.traitementarriereplan == 2:
self.writerw1.addFeature(fet)
if self.traitementarriereplan == 1 or self.traitementarriereplan == 2:
self.writerw2.addFeature(fet)
# ------------------------------------- Traitement du pas d'espace des points
else:
triangul = tri.Triangulation(self.x, self.y, self.mesh)
lineartri = []
for i in range(len(self.ztri)):
lineartri.append(tri.LinearTriInterpolator(triangul, self.ztri[i]))
xmin = np.min(self.x)
xmax = np.max(self.x)
ymin = np.min(self.y)
ymax = np.max(self.y)
pasx = int((xmax - xmin) / self.pasespace)
pasy = int((ymax - ymin) / self.pasespace)
strtxt = (
str(ctime())
+ " - Thread - Traitement des vitesses - pas d espace : "
+ str(self.pasespace)
+ "m - nombre de points : "
+ str(pasx)
+ "*"
+ str(pasy)
+ "="
+ str(pasx * pasy)
)
if self.traitementarriereplan == 0:
self.status.emit(strtxt)
else:
progress.setText(strtxt)
compt = 0
for x2 in range(pasx):
xtemp = float(xmin + x2 * self.pasespace)
for y2 in range(pasy):
compt = compt + 1
if (compt) % 5000 == 0:
strtxt = str(ctime()) + " - Thread - noeud n " + str(compt) + "/" + str(pasx * pasy)
if self.traitementarriereplan == 0:
self.status.emit(strtxt)
else:
progress.setText(strtxt)
if self.traitementarriereplan == 0:
self.progress.emit(int(100.0 * compt / (pasy * pasx)))
else:
progress.setPercentage(int(100.0 * compt / (pasy * pasx)))
ytemp = float(ymin + y2 * self.pasespace)
fet.setGeometry(QgsGeometry.fromPoint(QgsPoint(xtemp, ytemp)))
tabattr1 = []
if str(float(lineartri[0].__call__(xtemp, ytemp))) == "nan":
continue
for j in range(len(lineartri)):
tabattr1.append(float(lineartri[j].__call__(xtemp, ytemp)))
if boolvitesse:
VX = float(lineartri[self.paramvalueX].__call__(xtemp, ytemp))
VY = float(lineartri[self.paramvalueY].__call__(xtemp, ytemp))
norme = ((VX) ** 2.0 + (VY) ** 2.0) ** (0.5)
angle = math.atan2(VY, VX) / math.pi * 180.0
if angle < 0:
angle = angle + 360
tabattr1.append(VX)
tabattr1.append(VY)
tabattr1.append(norme)
tabattr1.append(angle)
fet.setAttributes(tabattr1)
if self.traitementarriereplan == 0 or self.traitementarriereplan == 2:
self.writerw1.addFeature(fet)
if self.traitementarriereplan == 1 or self.traitementarriereplan == 2:
self.writerw2.addFeature(fet)
# del self.writerw
except Exception, e:
strtxt = str(ctime()) + " ************ PROBLEME CALCUL DES VITESSES : " + str(e)
if self.traitementarriereplan == 0:
self.status.emit(strtxt)
else:
progress.setText(strtxt)
if self.traitementarriereplan == 0:
self.progress.emit(int(100.0))
else:
progress.setPercentage(int(100.0))
if self.traitementarriereplan == 0 or self.traitementarriereplan == 2:
del self.writerw1
if self.traitementarriereplan == 1 or self.traitementarriereplan == 2:
del self.writerw2
strtxt = str(ctime()) + " - Thread - fichier " + self.pathshp + " cree"
if self.traitementarriereplan == 0:
self.status.emit(strtxt)
else:
progress.setText(strtxt)
# self.status.emit("Fichier " + self.nomrept+ '\ '.strip()+ self.nomfilet + " cree")
if self.traitementarriereplan == 0:
self.finished.emit(self.pathshp)
if self.traitementarriereplan == 2:
t = workerFinished(self.pathshp)
progress = QtCore.pyqtSignal(int)
status = QtCore.pyqtSignal(str)
error = QtCore.pyqtSignal(str)
killed = QtCore.pyqtSignal()
finished = QtCore.pyqtSignal(str)
# ****************************************************************************
# *************** Classe de lancement du thread ***********************************
# ****************************************************************************
class traitementSelafin:
def __init__(self, donnees_d_entree):
self.donnees_d_entree = donnees_d_entree
self.thread = QtCore.QThread()
if donnees_d_entree["forcage_attribut_fichier_de_sortie"] == "":
if self.donnees_d_entree["pasdespace"] == 0:
self.donnees_d_entree["pathshp"] = os.path.join(
os.path.dirname(self.donnees_d_entree["pathselafin"]),
os.path.basename(self.donnees_d_entree["pathselafin"]).split(".")[0]
+ "_points_t_"
+ str(int(self.donnees_d_entree["temps"]))
+ str(".shp"),
)
else:
self.donnees_d_entree["pathshp"] = os.path.join(
os.path.dirname(self.donnees_d_entree["pathselafin"]),
os.path.basename(self.donnees_d_entree["pathselafin"]).split(".")[0]
+ "_points_"
+ str(int(self.donnees_d_entree["pasdespace"]))
+ "m_t_"
+ str(int(self.donnees_d_entree["temps"]))
+ str(".shp"),
)
else:
self.donnees_d_entree["pathshp"] = os.path.join(
os.path.dirname(self.donnees_d_entree["pathselafin"]),
os.path.basename(self.donnees_d_entree["pathselafin"]).split(".")[0]
+ "_"
+ str(self.donnees_d_entree["forcage_attribut_fichier_de_sortie"])
+ str(".shp"),
)
if self.donnees_d_entree["fichier_point_avec_vecteur_vitesse"]:
self.donnees_d_entree["Parametre_vitesse_X"] = donnees_d_entree["Parametre_vitesse_X"]
self.donnees_d_entree["Parametre_vitesse_Y"] = donnees_d_entree["Parametre_vitesse_Y"]
else:
self.donnees_d_entree["Parametre_vitesse_X"] = None
self.donnees_d_entree["Parametre_vitesse_Y"] = None
self.worker = ""
def main1(self):
progress.setPercentage(0)
progress.setText(str(ctime()) + " - Initialisation - Debut du script")
# Chargement du fichier .res****************************************
slf = SELAFIN(self.donnees_d_entree["pathselafin"])
# Recherche du temps a traiter ***********************************************
test = False
for i, time in enumerate(slf.tags["times"]):
progress.setText(
str(ctime()) + " - Initialisation - Temps present dans le fichier : " + str(np.float64(time))
)
# print str(i) +" "+ str(time) + str(type(time))
if float(time) == float(self.donnees_d_entree["temps"]):
test = True
values = slf.getVALUES(i)
if test:
progress.setText(
str(ctime()) + " - Initialisation - Temps traite : " + str(np.float64(self.donnees_d_entree["temps"]))
)
else:
raise GeoAlgorithmExecutionException(
str(ctime())
+ " - Initialisation - Erreur : \
Temps non trouve"
)
# Recherche de la variable a traiter ****************************************
test = [False, False]
tabparam = []
donnees_d_entree["champs"] = QgsFields()
for i, name in enumerate(slf.VARNAMES):
progress.setText(str(ctime()) + " - Initialisation - Variable dans le fichier res : " + name.strip())
tabparam.append([i, name.strip()])
donnees_d_entree["champs"].append(QgsField(str(name.strip()).translate(None, "?,!.;"), QVariant.Double))
if self.donnees_d_entree["Parametre_vitesse_X"] != None:
if str(name).strip() == self.donnees_d_entree["Parametre_vitesse_X"].strip():
test[0] = True
self.donnees_d_entree["paramvalueX"] = i
if str(name).strip() == self.donnees_d_entree["Parametre_vitesse_Y"].strip():
test[1] = True
self.donnees_d_entree["paramvalueY"] = i
else:
self.donnees_d_entree["paramvalueX"] = None
self.donnees_d_entree["paramvalueY"] = None
if self.donnees_d_entree["Parametre_vitesse_X"] != None:
if test == [True, True]:
progress.setText(
str(ctime())
+ " - Initialisation - Parametre trouvee : "
+ str(tabparam[self.donnees_d_entree["paramvalueX"]][1]).strip()
+ " "
+ str(tabparam[self.donnees_d_entree["paramvalueY"]][1]).strip()
)
else:
raise GeoAlgorithmExecutionException(
str(ctime())
+ " - Initialisation - Erreur : \
Parametre vitesse non trouve"
)
# Chargement de la topologie du .res ********************************************
self.donnees_d_entree["mesh"] = np.array(slf.IKLE3)
self.donnees_d_entree["x"] = slf.MESHX
self.donnees_d_entree["y"] = slf.MESHY
# Verifie que le shp n existe pas
if isFileLocked(self.donnees_d_entree["pathshp"], True):
raise GeoAlgorithmExecutionException(
str(ctime())
+ " - Initialisation - Erreur :\
Fichier shape deja charge !!"
)
# Chargement des donnees ***********************************
self.donnees_d_entree["ztri"] = []
for i in range(len(tabparam)):
self.donnees_d_entree["ztri"].append(values[i])
# Lancement du thread **************************************************************************************
self.worker = Worker(donnees_d_entree)
if donnees_d_entree["traitementarriereplan"] == 0:
self.worker.moveToThread(self.thread)
self.thread.started.connect(self.worker.run)
self.worker.progress.connect(progress.setPercentage)
self.worker.status.connect(progress.setText)
self.worker.finished.connect(workerFinished)
self.worker.finished.connect(self.worker.deleteLater)
self.thread.finished.connect(self.thread.deleteLater)
self.worker.finished.connect(self.thread.quit)
champ = QgsFields()
writercontour = VectorWriter(
self.donnees_d_entree["fichierdesortie_point"],
None,
champ,
QGis.WKBMultiPoint,
QgsCoordinateReferenceSystem(str(self.donnees_d_entree["crs"])),
)
self.thread.start()
else:
self.worker.run()
# *************************************************************************
# ************** Initialisation des variables ****************************************
# *************************************************************************
|
gpl-3.0
| -5,462,225,697,454,298,000
| 42.809829
| 118
| 0.485441
| false
| 3.91802
| true
| false
| false
|
NinjaMSP/crossbar
|
crossbar/twisted/processutil.py
|
1
|
4681
|
#####################################################################################
#
# Copyright (c) Crossbar.io Technologies GmbH
#
# Unless a separate license agreement exists between you and Crossbar.io GmbH (e.g.
# you have purchased a commercial license), the license terms below apply.
#
# Should you enter into a separate license agreement after having received a copy of
# this software, then the terms of such license agreement replace the terms below at
# the time at which such license agreement becomes effective.
#
# In case a separate license agreement ends, and such agreement ends without being
# replaced by another separate license agreement, the license terms below apply
# from the time at which said agreement ends.
#
# LICENSE TERMS
#
# This program is free software: you can redistribute it and/or modify it under the
# terms of the GNU Affero General Public License, version 3, as published by the
# Free Software Foundation. This program is distributed in the hope that it will be
# useful, but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
#
# See the GNU Affero General Public License Version 3 for more details.
#
# You should have received a copy of the GNU Affero General Public license along
# with this program. If not, see <http://www.gnu.org/licenses/agpl-3.0.en.html>.
#
#####################################################################################
from __future__ import absolute_import
from twisted.internet.endpoints import _WrapIProtocol, ProcessEndpoint
from twisted.internet.address import _ProcessAddress
from twisted.internet import defer
from twisted.python.runtime import platform
__all__ = ('WorkerProcessEndpoint',)
if platform.isWindows():
# On Windows, we're only using FDs 0, 1, and 2.
class _WorkerWrapIProtocol(_WrapIProtocol):
"""
Wraps an IProtocol into an IProcessProtocol which forwards data
received on Worker._log_fds to WorkerProcess.log().
"""
def childDataReceived(self, childFD, data):
"""
Some data has come in from the process child. If it's one of our
log FDs, log it. Otherwise, let _WrapIProtocol deal with it.
"""
# track bytes received per child FD
self._worker.track_stats(childFD, len(data))
if childFD in self._worker._log_fds:
self._worker.log(childFD, data)
else:
_WrapIProtocol.childDataReceived(self, childFD, data)
else:
# On UNIX-likes, we're logging FD1/2, and using FD3 for our own
# communication.
class _WorkerWrapIProtocol(_WrapIProtocol):
"""
Wraps an IProtocol into an IProcessProtocol which forwards data
received on Worker._log_fds to WorkerProcess.log().
"""
def childDataReceived(self, childFD, data):
"""
Some data has come in from the process child. If it's one of our
log FDs, log it. If it's on FD3, send it to the WAMP connection.
Otherwise, let _WrapIProtocol deal with it.
"""
# track bytes received per child FD
self._worker.track_stats(childFD, len(data))
if childFD in self._worker._log_fds:
self._worker.log(childFD, data)
elif childFD == 3:
self.protocol.dataReceived(data)
else:
_WrapIProtocol.childDataReceived(self, childFD, data)
class WorkerProcessEndpoint(ProcessEndpoint):
"""
A custom process endpoint for workers.
:see: http://twistedmatrix.com/documents/current/api/twisted.internet.endpoints.ProcessEndpoint.html
"""
def __init__(self, *args, **kwargs):
"""
Ctor.
:param worker: The worker this endpoint is being used for.
:type worker: instance of WorkerProcess
"""
self._worker = kwargs.pop('worker')
ProcessEndpoint.__init__(self, *args, **kwargs)
def connect(self, protocolFactory):
"""
See base class.
"""
proto = protocolFactory.buildProtocol(_ProcessAddress())
try:
wrapped = _WorkerWrapIProtocol(proto, self._executable, self._errFlag)
wrapped._worker = self._worker
self._spawnProcess(wrapped,
self._executable, self._args, self._env,
self._path, self._uid, self._gid, self._usePTY,
self._childFDs)
except:
return defer.fail()
else:
return defer.succeed(proto)
|
agpl-3.0
| 1,370,651,899,697,463,800
| 37.68595
| 104
| 0.620594
| false
| 4.416038
| false
| false
| false
|
MatthewWilkes/mw4068-packaging
|
src/melange/src/soc/views/helper/params.py
|
1
|
16606
|
#!/usr/bin/env python2.5
#
# Copyright 2008 the Melange authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Params related methods.
"""
__authors__ = [
'"Madhusudan.C.S" <madhusudancs@gmail.com>',
'"Mario Ferraro" <fadinlight@gmail.com>',
'"Sverre Rabbelier" <sverre@rabbelier.nl>',
]
import copy
from django import forms
from django.utils.translation import ugettext
from soc.logic import cleaning
from soc.logic import dicts
from soc.models import linkable
from soc.views import helper
from soc.views.helper import access
from soc.views.helper import dynaform
from soc.views.helper import redirects
from soc.views.helper import widgets
DEF_LIST_DESCRIPTION_FMT = ugettext(
'List of %(name_plural)s.')
DEF_CREATE_INSTRUCTION_MSG_FMT = ugettext(
'Please use this form to select a %(name).')
DEF_SUBMIT_MSG_PARAM_NAME = 's'
DEF_SUBMIT_MSG_PROFILE_SAVED = 0
DEF_SUBMIT_MSG_CANNOT_DELETE_ENTITY = 1
# list with all js scripts used for documentary purposes
DEF_JS_USES_LIST = [
'jq',
'jq_ajaqQueue',
'jq_autocomplete',
'jq_bgiframe',
'jq_grid',
'jq_purr',
'jq_spin',
'jq_datetimepicker',
'jq_progressbar',
'jq_thickbox',
'jq_ui_core',
'jlinq',
'json',
'menu',
'melange',
'melangelist',
'melangetooltip',
'melangeautocomplete',
'tinymce',
]
DEF_FIELD_INIT_PARAMS = ['required', 'widget', 'label', 'initial', 'help_text',
'error_messages', 'show_hidden_initial']
def constructParams(params):
"""Constructs a new params dictionary based on params.
Params usage:
The params dictionary is passed to getCreateForm and getEditForm,
see their docstring on how they use it.
rights: The rights value is merged with a default rights
dictionary and then used as rights value.
url_name: The url_name value is used in constructing several
redirects as the first part of the url.
module_name: The module_name value is used in constructing the
location of several templates. It is expected that it matches
the part after "/templates/soc/" for this View.
name_plural: The name_plural argument is provided to the
LIST_DESCRIPTION when constructing the list_description field.
extra_dynainclude: The extra_dynainclude value is used when
constructing the create_dynainclude value.
extra_dynaexclude: The extra_dynaexclude value is used when
constructing the create_dynaexclude value.
logic: The logic value is used as argument to save the scope_logic
and create a create form.
"""
logic = params['logic']
if params.get('rights'):
rights = params['rights']
else:
rights = access.Checker(params)
rights['unspecified'] = ['deny']
rights['allow'] = ['allow']
rights['any_access'] = ['checkIsLoggedIn']
rights['show'] = ['checkIsUser']
rights['create'] = ['checkIsDeveloper']
rights['edit'] = ['checkIsDeveloper']
rights['delete'] = ['checkIsDeveloper']
rights['list'] = ['checkIsDeveloper']
rights['pick'] = ['checkIsUser'] # TODO(SRabbelier): proper check
new_params = {}
new_params['scope_logic'] = logic.getScopeLogic()
if 'name_short' not in params:
params['name_short'] = params['name']
if 'name_plural' not in params:
params['name_plural'] = params['name'] + 's'
if 'module_name' not in params:
params['module_name'] = params['name_short'].replace(' ', '_').lower()
if 'url_name' not in params:
params['url_name'] = params['module_name']
if 'document_prefix' not in params:
params['document_prefix'] = params['url_name']
# Do not expand edit_redirect to allow it to be overwritten without suffix
new_params['edit_redirect'] = '/%(url_name)s/edit/%(suffix)s'
new_params['missing_redirect'] = '/%(url_name)s/create' % params
new_params['delete_redirect'] = '/%(url_name)s/list' % params
new_params['invite_redirect'] = '/request/list'
# new_params['cancel_redirect'] = '/%(url_name)s/list' % params
new_params['public_redirect'] = None
new_params['sidebar'] = None
new_params['sidebar_grouping'] = 'main'
new_params['sidebar_defaults'] = []
new_params['sidebar_developer'] = [
# TODO(SRabbelier): remove create once new list code is in
('/%s/create', 'New %(name)s', 'create'),
('/%s/list', 'List %(name_plural)s', 'list'),
]
new_params['sidebar_additional'] = []
names_sans_link_id = [i for i in logic.getKeyFieldNames() if i != 'link_id']
sans_link_id_pattern = getPattern(names_sans_link_id,
linkable.SCOPE_PATH_ARG_PATTERN)
new_params['link_id_arg_pattern'] = linkable.LINK_ID_ARG_PATTERN
new_params['link_id_pattern_core'] = linkable.LINK_ID_PATTERN_CORE
new_params['scope_path_pattern'] = getScopePattern(params)
new_params['sans_link_id_pattern'] = sans_link_id_pattern
new_params['django_patterns'] = None
new_params['extra_django_patterns'] = []
new_params['django_patterns_defaults'] = []
# Defines the module package that the view is in. If it is not
# already defined in the respective view, it defaults to
# soc.views.models
if not params.get('module_package'):
new_params['module_package'] = 'soc.views.models'
if not params.get('no_edit'):
new_params['django_patterns_defaults'] += [
(r'^%(url_name)s/(?P<access_type>edit)/%(key_fields)s$',
'%(module_package)s.%(module_name)s.edit', 'Edit %(name_short)s')]
if not params.get('no_delete'):
new_params['django_patterns_defaults'] += [
(r'^%(url_name)s/(?P<access_type>delete)/%(key_fields)s$',
'%(module_package)s.%(module_name)s.delete', 'Delete %(name_short)s')]
if not params.get('no_show'):
new_params['django_patterns_defaults'] += [
(r'^%(url_name)s/(?P<access_type>show)/%(key_fields)s$',
'%(module_package)s.%(module_name)s.public', 'Show %(name_short)s')]
if not params.get('no_admin'):
new_params['django_patterns_defaults'] += [
(r'^%(url_name)s/(?P<access_type>admin)/%(key_fields)s$',
'%(module_package)s.%(module_name)s.admin',
'Show %(name_short)s (admin)')]
if not params.get('no_create_raw'):
new_params['django_patterns_defaults'] += [
(r'^%(url_name)s/(?P<access_type>create)$',
'%(module_package)s.%(module_name)s.create', 'Create %(name_short)s')]
if not params.get('no_create_with_scope'):
new_params['django_patterns_defaults'] += [
(r'^%(url_name)s/(?P<access_type>create)/%(scope)s$',
'%(module_package)s.%(module_name)s.create', 'Create %(name_short)s')]
if not params.get('no_create_with_key_fields'):
new_params['django_patterns_defaults'] += [
(r'^%(url_name)s/(?P<access_type>create)/%(key_fields)s$',
'%(module_package)s.%(module_name)s.create', 'Create %(name_short)s')]
if not params.get('no_list_raw'):
new_params['django_patterns_defaults'] += [
(r'^%(url_name)s/(?P<access_type>list)$',
'%(module_package)s.%(module_name)s.list', 'List %(name_plural)s')]
if params.get('pickable'):
new_params['django_patterns_defaults'] += [
(r'^%(url_name)s/(?P<access_type>pick)$',
'%(module_package)s.%(module_name)s.pick', 'Pick %(name_short)s')]
if params.get('export_content_type'):
new_params['django_patterns_defaults'] += [
(r'^%(url_name)s/(?P<access_type>export)/%(key_fields)s$',
'%(module_package)s.%(module_name)s.export', 'Export %(name_short)s')]
if params.get('sans_link_id_create'):
new_params['django_patterns_defaults'] += [
(r'^%(url_name)s/(?P<access_type>create)/%(sans_link_id)s$',
'%(module_package)s.%(module_name)s.create', 'Create %(name_short)s')]
if params.get('sans_link_id_list'):
new_params['django_patterns_defaults'] += [
(r'^%(url_name)s/(?P<access_type>list)/%(sans_link_id)s$',
'%(module_package)s.%(module_name)s.list', 'List %(name_plural)s')]
if params.get('sans_link_id_public_list'):
new_params['django_patterns_defaults'] += [
(r'^%(url_name)s/(?P<access_type>list_public)/%(sans_link_id)s$',
'%(module_package)s.%(module_name)s.list_public',
'List %(name_plural)s')]
new_params['public_template'] = 'soc/%(module_name)s/public.html' % params
new_params['export_template'] = 'soc/export.html'
new_params['create_template'] = 'soc/models/edit.html'
new_params['edit_template'] = 'soc/models/edit.html'
new_params['admin_template'] = 'soc/models/admin.html'
new_params['list_template'] = 'soc/models/list.html'
new_params['invite_template'] = 'soc/models/invite.html'
new_params['context'] = None
new_params['cache_pick'] = False
new_params['export_content_type'] = None
new_params['export_extension'] = '.txt'
new_params['csv_fieldnames'] = []
# TODO: Use only the js modules needed instead of js_uses_all
new_params['js_uses_all'] = DEF_JS_USES_LIST
new_params['js_uses_list'] = ['jq', 'menu']
new_params['js_uses_show'] = ['jq', 'menu']
new_params['js_uses_edit'] = ['jq', 'menu', 'tinymce', 'jq_purr',
'jq_spin', 'jq_autocomplete']
new_params['error_public'] = 'soc/%(module_name)s/error.html' % params
new_params['error_export'] = new_params['error_public']
new_params['error_edit'] = new_params['error_public']
new_params['public_row_action'] = {
"type": "redirect_custom",
"parameters": dict(new_window=False),
}
new_params['public_row_extra'] = lambda entity, *args: {
"link": redirects.getEditRedirect(entity, params),
}
# TODO(ljvderijk): refactor this out of there
new_params['list_params'] = {
'list_description': 'description',
}
new_params['list_description'] = DEF_LIST_DESCRIPTION_FMT % params
new_params['save_message'] = [ugettext('%(name)s saved.' % params),
ugettext('Cannot delete %(name)s.' % params)]
new_params['submit_msg_param_name'] = DEF_SUBMIT_MSG_PARAM_NAME
new_params['edit_params'] = {
DEF_SUBMIT_MSG_PARAM_NAME: DEF_SUBMIT_MSG_PROFILE_SAVED,
}
new_params['cannot_delete_params'] = {
DEF_SUBMIT_MSG_PARAM_NAME: DEF_SUBMIT_MSG_CANNOT_DELETE_ENTITY,
}
new_params['dynabase'] = helper.forms.BaseForm
create_dynaproperties = {
'clean_link_id': cleaning.clean_link_id('link_id'),
'clean_feed_url': cleaning.clean_feed_url,
}
create_dynaproperties.update(params.get('create_extra_dynaproperties', {}))
# dynafields override any dynaproperties
create_dynafields = getDynaFields(params.get('create_dynafields', {}))
create_dynaproperties = dicts.merge(create_dynafields, create_dynaproperties)
new_params['references'] = []
new_params['create_dynainclude'] = [] + params.get('extra_dynainclude', [])
new_params['create_dynaexclude'] = ['scope', 'scope_path'] + \
params.get('extra_dynaexclude', [])
new_params['create_dynaproperties'] = create_dynaproperties
edit_dynaproperties = {
'clean_link_id': cleaning.clean_link_id('link_id'),
'link_id': forms.CharField(widget=helper.widgets.ReadOnlyInput()),
}
edit_dynaproperties.update(params.get('edit_extra_dynaproperties', {}))
# dynafields override any dynaproperties
edit_dynafields = getDynaFields(params.get('edit_dynafields', {}))
edit_dynaproperties = dicts.merge(edit_dynafields, edit_dynaproperties)
new_params['edit_dynainclude'] = None
new_params['edit_dynaexclude'] = None
new_params['edit_dynaproperties'] = edit_dynaproperties
params = dicts.merge(params, new_params)
# These need to be constructed separately, because they require
# parameters that can be defined either in params, or new_params.
if not 'create_form' in params:
params['create_form'] = getCreateForm(params, logic.getModel())
if not 'edit_form' in params:
params['edit_form'] = getEditForm(params, params['create_form'])
if not 'admin_form' in params:
params['admin_form'] = getAdminForm(params['edit_form'])
if not 'key_fields_pattern' in params:
params['key_fields_pattern'] = getKeyFieldsPattern(params)
# merge already done by access.Checker
params['rights'] = rights
return params
def getDynaFields(fields):
"""Constructs a new DynaField using params.
Args:
params: the params dictionary used to extract the dyanfields
param_name: the name of the parameter to use
"""
dynafields = {}
# generate the dynafields
for field in fields:
base = field.pop('base')
name = field.pop('name')
passthrough = field.pop('passthrough', DEF_FIELD_INIT_PARAMS)
dynafield = dynaform.newDynaField(field, base, passthrough)
dynafields[name] = dynafield()
return dynafields
def getCreateForm(params, model):
"""Constructs a new CreateForm using params.
Params usage:
dynabase: The dynabase value is used as the base argument to
dynaform.newDynaForm.
logic: The logic value is used to get the model argument to newDynaForm.
create_dynainclude: same as dynabase, but as dynainclude argument
create_dynaexclude: same as dynabase, but as dynaexclude argument
create_dynaproperties: same as dynabase, but as dynaproperties argument
"""
create_form = dynaform.newDynaForm(
dynabase = params['dynabase'],
dynamodel = model,
dynainclude = params['create_dynainclude'],
dynaexclude = params['create_dynaexclude'],
dynaproperties = params['create_dynaproperties'],
)
if 'extra_key_order' in params:
for field in params['extra_key_order']:
if field in create_form.base_fields.keyOrder:
create_form.base_fields.keyOrder.remove(field)
create_form.base_fields.keyOrder.extend(params['extra_key_order'])
return create_form
def getEditForm(params, base_form):
"""Constructs a new EditForm using params.
Params usage:
create_form: The dynabase value is used as the dynaform argument
to dyanform.extendDynaForm.
edit_dynainclude: same as create_form, but as dynainclude argument
edit_dynaexclude: same as create_form, but as dynaexclude argument
edit_dynaproperties: same as create_form, but as dynaproperties argument
"""
edit_form = dynaform.extendDynaForm(
dynaform = base_form,
dynainclude = params['edit_dynainclude'],
dynaexclude = params['edit_dynaexclude'],
dynaproperties = params['edit_dynaproperties'],
)
return edit_form
def getAdminForm(base_form):
"""Constructs a new AdminForm from base_form.
"""
# extend _and_ deepcopy the base_fields to do a proper copy
admin_form = dynaform.extendDynaForm(dynaform = base_form)
admin_form.base_fields = copy.deepcopy(admin_form.base_fields)
# replace all widgets with PTW's
for _, value in admin_form.base_fields.iteritems():
if not isinstance(value, forms.fields.Field):
continue
value.widget = widgets.PlainTextWidget()
return admin_form
def getKeyFieldsPattern(params):
"""Returns the Django pattern for this View's entity.
"""
logic = params['logic']
if logic.isIdBased():
return r"(?P<id>[0-9]*)"
names = logic.getKeyFieldNames()
scope_path_pattern = params['scope_path_pattern']
return getPattern(names, scope_path_pattern)
def getPattern(names, scope_path_pattern):
"""Returns the Django patterns for the specified names.
Args:
names: the field names that should be included in the pattern
scope_path_pattern: the pattern to use if the name is 'scope_path'
"""
patterns = []
for name in names:
if name == 'scope_path':
pattern = scope_path_pattern
else:
pattern = r'(?P<%s>%s)' % (name, linkable.LINK_ID_PATTERN_CORE)
patterns.append(pattern)
result = '/'.join(patterns)
return result
def getScopePattern(params):
"""Returns the Scope pattern for this entity.
"""
logic = params['logic']
depth = logic.getScopeDepth()
if depth is None:
return linkable.SCOPE_PATH_ARG_PATTERN
regexps = [linkable.LINK_ID_PATTERN_CORE for _ in range(depth)]
regexp = '/'.join(regexps)
return r'(?P<scope_path>%s)' % regexp
|
apache-2.0
| 4,683,945,472,841,034,000
| 33.239175
| 80
| 0.665422
| false
| 3.350686
| false
| false
| false
|
HIPS/optofit
|
cosyne/make_figure_1.py
|
1
|
17433
|
import os
import copy
import cPickle
import itertools
import numpy as np
seed = np.random.randint(2**16)
# seed = 2958
seed = 58187
#seed = 60017
print "Seed: ", seed
import matplotlib.pyplot as plt
from matplotlib.patches import Path, PathPatch
from hips.inference.particle_mcmc import *
from optofit.cneuron.compartment import SquidCompartment
from optofit.cinference.pmcmc import *
from hips.plotting.layout import *
import brewer2mpl
colors = brewer2mpl.get_map('Set1', 'Qualitative', 9).mpl_colors
logistic = lambda x: 1.0/(1+np.exp(-x))
logit = lambda p: np.log(p/(1-p))
# Set the random seed for reproducibility
np.random.seed(seed)
# Make a simple compartment
hypers = {
'C' : 1.0,
'V0' : -60.0,
'g_leak' : 0.03,
'E_leak' : -65.0}
gp1_hypers = {'D': 2,
'sig' : 1,
'g_gp' : 12.0,
'E_gp' : 50.0,
'alpha_0': 1.0,
'beta_0' : 2.0,
'sigma_kernel': 1.0}
gp2_hypers = {'D' : 1,
'sig' : 1,
'g_gp' : 3.60,
# 'g_gp' : 0,
'E_gp' : -77.0,
'alpha_0': 1.0,
'beta_0' : 2.0,
'sigma_kernel': 1.0}
squid_hypers = {
'C' : 1.0,
'V0' : -60.0,
'g_leak' : 0.03,
'E_leak' : -65.0,
'g_na' : 12.0,
# 'g_na' : 0.0,
'E_na' : 50.0,
'g_kdr' : 3.60,
'E_kdr' : -77.0
}
def sample_squid_model():
squid_body = SquidCompartment(name='body', hypers=squid_hypers)
# Initialize the model
D, I = squid_body.initialize_offsets()
# Set the recording duration
t_start = 0
t_stop = 300.
dt = 0.1
t = np.arange(t_start, t_stop, dt)
T = len(t)
# Make input with an injected current from 500-600ms
inpt = np.zeros((T, I))
inpt[20/dt:40/dt,:] = 3.
inpt[120/dt:160/dt,:] = 5.
inpt[220/dt:280/dt,:] = 7.
inpt += np.random.randn(T, I)
# Set the initial distribution to be Gaussian around the steady state
z0 = np.zeros(D)
squid_body.steady_state(z0)
init = GaussianInitialDistribution(z0, 0.1**2 * np.eye(D))
# Set the proposal distribution using Hodgkin Huxley dynamics
# TODO: Fix the hack which requires us to know the number of particles
N = 100
sigmas = 0.0001*np.ones(D)
# Set the voltage transition dynamics to be a bit noisier
sigmas[squid_body.x_offset] = 0.25
prop = HodgkinHuxleyProposal(T, N, D, squid_body, sigmas, t, inpt)
# Set the observation model to observe only the voltage
etas = np.ones(1)
observed_dims = np.array([squid_body.x_offset]).astype(np.int32)
lkhd = PartialGaussianLikelihood(observed_dims, etas)
# Initialize the latent state matrix to sample N=1 particle
z = np.zeros((T,N,D))
z[0,0,:] = init.sample()
# Initialize the output matrix
x = np.zeros((T,D))
# Sample the latent state sequence
for i in np.arange(0,T-1):
# The interface kinda sucks. We have to tell it that
# the first particle is always its ancestor
prop.sample_next(z, i, np.zeros((N,), dtype=np.int32))
# Sample observations
for i in np.arange(0,T):
lkhd.sample(z,x,i,0)
# Extract the first (and in this case only) particle
z = z[:,0,:].copy(order='C')
# Downsample
t_ds = 0.1
intvl = int(t_ds / dt)
td = t[::intvl].copy('C')
zd = z[::intvl, :].copy('C')
xd = x[::intvl, :].copy('C')
inptd = inpt[::intvl].copy('C')
return td, zd, xd, inptd
def sausage_plot(ax, t, z_mean, z_std, lw=1, alpha=0.5, color='r'):
"""
Make a sausage plot
:param ax:
:param t:
:param z_mean:
:param z_std:
:return:
"""
T = len(t)
z_env = np.zeros((T*2,2))
z_env[:,0] = np.concatenate((t, t[::-1]))
z_env[:,1] = np.concatenate((z_mean + z_std, z_mean[::-1] - z_std[::-1]))
ax.add_patch(PathPatch(Path(z_env),
facecolor=color,
alpha=alpha,
edgecolor='none',
linewidth=0))
ax.plot(t, z_mean, color=color, lw=lw)
def make_figure_1(t, inpt, z_true, z_smpls, gpna_smpls, gpk_smpls):
"""
Make figure 1.
:param t:
:param z_true:
:param z_smpls:
:param gpna_smpls:
:param gpk_smpls:
:return:
"""
# Parse out the true latent states
V_true = z_true[:,0]
m_true = z_true[:,1]
h_true = z_true[:,2]
n_true = z_true[:,3]
na_true = m_true**3 * h_true
k_true = n_true**4
# Extract the inferred states
offset = 6
z_mean = z_smpls[offset:,...].mean(0)
z_std = z_smpls[offset:,...].std(0)
V_inf_mean = z_smpls[offset:,:,0].mean(0)
V_inf_std = z_smpls[offset:,:,0].std(0)
na_inf_mean = logistic(z_smpls[offset:,:,1]).mean(0)
na_inf_std = logistic(z_smpls[offset:,:,1]).std(0)
k_inf_mean = logistic(z_smpls[offset:,:,3]).mean(0)
k_inf_std = logistic(z_smpls[offset:,:,3]).std(0)
# Make the figure
fig = create_figure((6.5,3))
# Plot the true and inferred voltage
V_ax = create_axis_at_location(fig, 0.75, 2.375, 5.25, 0.5,
transparent=True, box=False)
V_ax.plot(t, V_true, 'k', lw=2)
sausage_plot(V_ax, t, V_inf_mean, V_inf_std, color=colors[0])
V_ax.set_ylabel('$V \mathrm{ [mV]}$')
# Plot the true and inferred sodium channel state
na_ax = create_axis_at_location(fig, 0.75, 1.625, 5.25, 0.5,
transparent=True, box=False)
na_ax.plot(t, na_true, 'k', lw=2)
sausage_plot(na_ax, t, na_inf_mean, na_inf_std, color=colors[0])
na_ax.set_ylabel('$\sigma(z_{Na})$')
na_ax.set_ylim([0,0.3])
# Plot the true and inferred sodium channel state
k_ax = create_axis_at_location(fig, 0.75, .875, 5.25, 0.5,
transparent=True, box=False)
k_ax.plot(t, k_true, 'k', lw=2)
sausage_plot(k_ax, t, k_inf_mean, k_inf_std, color=colors[0])
k_ax.set_ylabel('$\sigma(z_{K})$')
k_ax.set_ylim([0,1])
# Plot the driving current
I_ax = create_axis_at_location(fig, 0.75, 0.375, 5.25, 0.25,
transparent=True, box=False)
I_ax.plot(t, inpt, 'k', lw=2)
I_ax.set_ylabel('$I \mathrm{}$')
I_ax.set_yticks([0,4,8])
I_ax.set_ylim([-2,10])
I_ax.set_xlabel('$\mathrm{time [ms]}$')
plt.savefig(os.path.join('cosyne', 'figure1.pdf'))
plt.ioff()
plt.show()
def make_figure_2(gpk_smpls):
grid = 100
z_min = logit(0.001)
z_max = logit(0.999)
V_min = -80.
V_max = 50.
Z = np.array(list(
itertools.product(*([np.linspace(z_min, z_max, grid) for _ in range(1)]
+ [np.linspace(V_min, V_max, grid)]))))
h_smpls = []
for gps in gpk_smpls:
m_pred, _, _, _ = gps[0].predict(Z)
h_smpls.append(m_pred)
h_mean = np.array(h_smpls).mean(0)
h_mean = h_mean.reshape((grid, grid))
fig = create_figure((2,2))
ax = create_axis_at_location(fig, .5, .5, 1, 1, box=True, transparent=True)
print "h_lim: ", np.amin(h_mean), " ", np.amax(h_mean)
im = ax.imshow(h_mean, extent=(V_min, V_max, z_max, z_min), cmap='RdGy',
vmin=-3, vmax=3)
ax.set_aspect((V_max-V_min)/(z_max-z_min))
ax.set_ylabel('$z_{K}$')
ax.set_xlabel('$V$')
ax.set_title('$\\frac{\mathrm{d}z_{K}}{\mathrm{d}t}(z_{K},V)$')
ax.set_xticks([-80, -40, 0, 40])
fig.savefig('dk_dt.pdf')
def make_figure_3():
z_min = logit(0.001)
z_max = logit(0.999)
V_min = -80.
V_max = 50.
dlogit = lambda x: 1./(x*(1-x))
g = lambda x: x**4
ginv = lambda u: u**(1./4)
dg_dx = lambda x: 4*x**3
u_to_x = lambda u: ginv(logistic(u))
x_to_u = lambda x: logit(g(x))
uu = np.linspace(-6,0,1000)
xx = u_to_x(uu)
#g = lambda x: x
#ginv = lambda u: u
#dg_dx = lambda x: 1.0
# Compute dynamics du/dt
alpha = lambda V: 0.01 * (10.01-V) / (np.exp((10.01-V)/10.) - 1)
beta = lambda V: 0.125 * np.exp(-V/80.)
dx_dt = lambda x,V: alpha(V)*(1-x) - beta(V) * x
du_dt = lambda u,V: dlogit(g(u_to_x(u))) * dg_dx(u_to_x(u)) * dx_dt(u_to_x(u),V)
# Plot the change in u as a function of u and V
V = np.linspace(0,(V_max-V_min),100)
fig = create_figure((2,2))
ax = create_axis_at_location(fig, .5, .5, 1, 1, box=True, transparent=True)
ax.imshow(du_dt(uu[:,None], V[None,:]),
extent=[V_min, V_max, uu[-1], uu[0]],
interpolation="none",
cmap='RdGy')
ax.set_xlabel('V')
ax.set_aspect((V_max-V_min)/(z_max-z_min))
ax.set_ylabel('u')
ax.set_title('du_dt(u,V)')
# ax2 = fig.add_subplot(1,2,2)
# ax2.imshow(dx_dt(xx[:,None], V[None,:]),
# extent=[V[0], V[-1], xx[-1], xx[0]],
# interpolation="none",
# cmap=plt.cm.Reds)
# ax2.set_aspect(100)
# ax2.set_xlabel('V')
# ax2.set_ylabel('x')
# ax2.set_title('dx_dt(x,V)')
plt.ioff()
plt.show()
def make_figure_4():
logit = lambda x: np.log(x / (1-x))
logistic = lambda u: np.exp(u) / (1 + np.exp(u))
dlogit = lambda x: 1./(x*(1-x))
g = lambda x: x**4
ginv = lambda u: u**(1./4)
dg_dx = lambda x: 4*x**3
u_to_x = lambda u: ginv(logistic(u))
x_to_u = lambda x: logit(g(x))
uu = np.linspace(-6,6,1000)
xx = u_to_x(uu)
# Compute dynamics du/dt
alpha = lambda V: 0.01 * (10.01-V) / (np.exp((10.01-V)/10.) - 1)
beta = lambda V: 0.125 * np.exp(-V/80.)
dx_dt = lambda x,V: alpha(V)*(1-x) - beta(V) * x
du_dt = lambda u,V: dlogit(g(u_to_x(u))) * dg_dx(u_to_x(u)) * dx_dt(u_to_x(u),V)
# Plot the change in u as a function of u and V
V = np.linspace(0,100,100)
fig = plt.figure()
ax1 = fig.add_subplot(1,2,1)
ax1.imshow(du_dt(uu[:,None], V[None,:]),
extent=[V[0], V[-1], uu[-1], uu[0]],
interpolation="none",
cmap=plt.cm.Reds)
ax1.set_aspect(20)
ax1.set_xlabel('V')
ax1.set_ylabel('u')
ax1.set_title('du_dt(u,V)')
ax2 = fig.add_subplot(1,2,2)
ax2.imshow(dx_dt(xx[:,None], V[None,:]),
extent=[V[0], V[-1], xx[-1], xx[0]],
interpolation="none",
cmap=plt.cm.Reds)
ax2.set_aspect(100)
ax2.set_xlabel('V')
ax2.set_ylabel('x')
ax2.set_title('dx_dt(x,V)')
plt.show()
def make_figure_5(gpk_smpls):
g = lambda x: x**4
ginv = lambda u: u**(1./4)
dg_dx = lambda x: 4*x**3
u_to_x = lambda u: ginv(logistic(u))
x_to_u = lambda x: logit(g(x))
dlogit = lambda x: 1./(x*(1-x))
uu = np.linspace(-6,6,100)
xx = u_to_x(uu)
# Compute dynamics du/dt
alpha = lambda V: 0.01 * (10.01-V) / (np.exp((10.01-V)/10.) - 1)
beta = lambda V: 0.125 * np.exp(-V/80.)
dx_dt = lambda x,V: alpha(V)*(1-x) - beta(V) * x
du_dt = lambda u,V: dlogit(g(u_to_x(u))) * dg_dx(u_to_x(u)) * dx_dt(u_to_x(u),V)
grid = 100
z_min = logit(0.001)
z_max = logit(0.999)
V_min = -80
V_max = 50
zz = np.linspace(z_min, z_max, grid)
V_gp = np.linspace(V_min, V_max, grid)
Z = np.array(list(
itertools.product(*([zz for _ in range(1)]
+ [V_gp]))))
h_smpls = []
for gps in gpk_smpls:
m_pred, _, _, _ = gps[0].predict(Z)
h_smpls.append(m_pred)
h_mean = np.array(h_smpls).mean(0)
h_mean = h_mean.reshape((grid, grid))
# Plot the change in u as a function of u and V
def dsig(z):
sigz = logistic(z)
return np.multiply(sigz, 1 - sigz)
df_dt = lambda z, dzdt: np.multiply(dsig(z), dzdt)
fig = plt.figure()
ax1 = fig.add_subplot(2,2,1)
dudt = du_dt(uu[:,None], V_gp[None,:])
v_max = max((np.max(dudt), np.max(h_mean)))
v_min = min((np.min(dudt), np.min(h_mean)))
ax1.imshow(du_dt(uu[:,None], V_gp[None,:]),
extent=[V_gp[0], V_gp[-1], uu[-1], uu[0]],
interpolation="none",
cmap=plt.cm.Reds,
vmin=v_min,
vmax=v_max)
ax1.set_aspect(20)
ax1.set_xlabel('V')
ax1.set_ylabel('latent state')
ax1.set_title('Ground Truth: dz_dt(z,V)')
ax2 = fig.add_subplot(2,2,3)
ax2.imshow(h_mean,
extent=[V_gp[0], V_gp[-1], uu[-1], uu[0]],
interpolation="none",
cmap=plt.cm.Reds,
vmin=v_min,
vmax=v_max)
ax2.set_aspect(20)
ax2.set_xlabel('V')
ax2.set_ylabel('latent state')
ax2.set_title('Inferred: dz_dt(z,V)')
ax1 = fig.add_subplot(2,2,2)
ax1.imshow(uu[:, None] * dg_dx(u_to_x(uu[:, None])) * dx_dt(u_to_x(uu[:, None]), V_gp[None, :]+60),
extent=[V_gp[0], V_gp[-1], xx[-1], xx[0]],
interpolation="none",
cmap=plt.cm.Reds,
vmin=-1,
vmax=.5)
ax1.set_aspect(100)
ax1.set_xlabel('V')
ax1.set_ylabel('open fraction')
ax1.set_title('Ground Truth: df_dt(f,V)')
ax2 = fig.add_subplot(2,2,4)
ax2.imshow(df_dt(np.array([zz for a in range(grid)]).transpose(), h_smpls[0].reshape((grid, grid))),
extent=[V_gp[0], V_gp[-1], xx[-1], xx[0]],
interpolation="none",
cmap=plt.cm.Reds,
vmin=-1,
vmax=.5)
ax2.set_aspect(100)
ax2.set_xlabel('V')
ax2.set_ylabel('open fraction')
ax2.set_title('Inferred: df_dt(f,V)')
plt.show()
def plot_at_x(ax, index):
mean = uu[:, None] * dg_dx(u_to_x(uu[:, None])) * dx_dt(u_to_x(uu[:, None]), V_gp[None, :]+60)
mean = mean[index, :]
#std = 0.0001 * np.ones(mean.shape)
voltage = V_gp
color = 'r'
ax.plot(voltage, mean, color=color)
#ax.fill_between(voltage, mean - std, mean + std, color=color, alpha = 0.5)
mean, _, dzdt_low, dzdt_high = gpk_smpls[7][0].predict(Z) #62
mean = mean.reshape((grid, grid))
dzdt_low = dzdt_low.reshape((grid, grid))
dzdt_high = dzdt_high.reshape((grid, grid))
zs = np.array([zz for b in range(grid)]).transpose()
dfdt_mean = df_dt(zs, mean)
dfdt_low = df_dt(zs, dzdt_low)
dfdt_high = df_dt(zs, dzdt_high)
color = 'b'
ax.plot(voltage, dfdt_mean[index, :], color=color)
ax.fill_between(voltage, dfdt_low[index, :], dfdt_high[index, :], color=color, alpha = 0.5)
f, axs = plt.subplots(9, sharex=True)
for i in range(len(axs)):
plot_at_x(axs[i], i*2 + 42)
plt.show()
fig = plt.figure()
ax = fig.add_subplot(1, 1, 1)
ax.imshow((uu[:, None] * dg_dx(u_to_x(uu[:, None])) * dx_dt(u_to_x(uu[:, None]), V_gp[None, :]+60)) - h_mean,
extent=[V_gp[0], V_gp[-1], xx[-1], xx[0]],
cmap=plt.cm.RdGy,
vmin=-.5,
vmax=.5,
)
ax.set_aspect(100)
plt.show()
def make_figure_7(z_smpls, gpk_smpls):
g = lambda x: x**4
ginv = lambda u: u**(1./4)
dg_dx = lambda x: 4*x**3
u_to_x = lambda u: ginv(logistic(u))
x_to_u = lambda x: logit(g(x))
dlogit = lambda x: 1./(x*(1-x))
uu = np.linspace(-6,6,100)
xx = u_to_x(uu)
# Compute dynamics du/dt
alpha = lambda V: 0.01 * (10.01-V) / (np.exp((10.01-V)/10.) - 1)
beta = lambda V: 0.125 * np.exp(-V/80.)
dx_dt = lambda x,V: alpha(V)*(1-x) - beta(V) * x
du_dt = lambda u,V: dlogit(g(u_to_x(u))) * dg_dx(u_to_x(u)) * dx_dt(u_to_x(u),V)
grid = 100
z_min = logit(0.001)
z_max = logit(0.999)
V_min = -80
V_max = 50
zz = np.linspace(z_min, z_max, grid)
V_gp = np.linspace(V_min, V_max, grid)
Z = np.array(list(
itertools.product(*([zz for _ in range(1)]
+ [V_gp]))))
h_smpls = []
for gps in gpk_smpls:
m_pred, _, _, _ = gps[0].predict(Z)
h_smpls.append(m_pred)
h_mean = np.array(h_smpls).mean(0)
h_mean = h_mean.reshape((grid, grid))
# Plot the change in u as a function of u and V
def dsig(z):
sigz = logistic(z)
return np.multiply(sigz, 1 - sigz)
df_dt = lambda z, dzdt: np.multiply(dsig(z), dzdt)
fig = plt.figure()
ax = fig.add_subplot(1, 1, 1)
ax.imshow((uu[:, None] * dg_dx(u_to_x(uu[:, None])) * dx_dt(u_to_x(uu[:, None]), V_gp[None, :]+60)) - df_dt(np.array([zz for a in range(grid)]).transpose(), h_mean),
extent=[V_gp[0], V_gp[-1], xx[-1], xx[0]],
cmap=plt.cm.RdGy
)
ax.set_aspect(100)
ax.scatter(z_smpls[:11, :, 0].reshape((11*3000)), logistic(z_smpls[:11, :, 3].reshape((11*3000))))
ax.set_title("Errors")
plt.show()
# Simulate the squid compartment to get the ground truth
t, z_true, x, inpt = sample_squid_model()
# Load the results of the pMCMC inference
with open('squid2_results5.pkl', 'r') as f:
z_smpls, gpna_smpls, gpk_smpls = cPickle.load(f)
burn = 30
z_smpls = z_smpls[burn:]
gpna_smpls = gpna_smpls[burn:]
gpk_smpls = gpk_smpls[burn:]
make_figure_1(t, inpt, z_true, z_smpls, gpna_smpls, gpk_smpls)
#make_figure_2(gpk_smpls)
#make_figure_3()
#make_figure_4()
make_figure_5(gpk_smpls)
make_figure_7(z_smpls, gpk_smpls)
|
gpl-2.0
| 7,677,128,331,014,776,000
| 29.265625
| 169
| 0.5224
| false
| 2.57694
| false
| false
| false
|
Metonimie/benchmark-scoreboard
|
src/models/__init__.py
|
1
|
1802
|
"""
Author: Denis Nutiu <denis.nutiu@gmail.com>
This file is part of scoreboard-benchmark.
scoreboard-benchmark is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
scoreboard-benchmark is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with scoreboard-benchmark . If not, see <http://www.gnu.org/licenses/>.
"""
from flask_sqlalchemy import SQLAlchemy
import sqlalchemy_utils
db = SQLAlchemy()
class Result(db.Model):
"""
The result model will store benchmark results.
"""
__tablename__ = 'results'
id = db.Column(db.Integer, primary_key=True, index=True)
name = db.Column(db.String(50))
gpu = db.Column(db.String(256))
cpu = db.Column(db.String(256))
log = db.Column(db.Text)
score = db.Column(db.Integer, index=True)
ip = db.Column(sqlalchemy_utils.IPAddressType)
created = db.Column(db.DateTime(timezone=True), server_default=db.func.now()) # Update time created server time.
def __init__(self, name="Anonymous", gpu=None, cpu=None, log=None, ip=None, score=1):
self.name = name
self.gpu = gpu
self.cpu = cpu
self.log = log
self.score = score
self.ip = ip
def __repr__(self):
return self.gpu
__table_args__ = (
db.CheckConstraint(score > 0, name="positive_score_constraint"),
{}
)
|
lgpl-3.0
| 8,096,559,075,137,097,000
| 33.653846
| 117
| 0.668147
| false
| 3.850427
| false
| false
| false
|
hsavolai/vmlab
|
src/kiwi/log.py
|
1
|
5433
|
#
# Kiwi: a Framework and Enhanced Widgets for Python
#
# Copyright (C) 2005-2006 Async Open Source
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307
# USA
#
# Author(s): Johan Dahlin <jdahlin@async.com.br>
#
"""
Extension to the logging module
This module defines a couple of extensions to the logging module included
in the python standard distribution.
It creates an additional logging handler that print log records on the
standard output. This handler is only showing records which has a level
set to logging.WARNING or higher by default.
The messages printed by this handler can be modified by using the environment
variable called KIWI_LOG.
The syntax for the string which KIWI_LOG points to is the following::
domain ':' level [, domain ':', level]
domain can contain wildcards such as * and ?
level is an integer 1-5 which defines the minimal level:
- B{5}: DEBUG
- B{4}: INFO
- B{3}: WARNING
- B{2}: ERROR
- B{1}: CRITICAL
Examples::
KIWI_LOG="stoq*:5"
will print all the messages in a domain starting with stoq with DEBUG or higher::
KIWI_LOG="kiwi*:4,stoq.*:5"
will print all the messages with INFO or higher in all domains starting with kiwi,
and all the messages in the stoq.* domains which are DEBUG or higher
Inspiration for the syntax is taken from the U{debugging facilities<http://gstreamer.freedesktop.org/data/doc/gstreamer/head/gstreamer/html/gstreamer-GstInfo.html#id2857358>} of the
U{GStreamer<http://www.gstreamer.net>} multimedia framework.
"""
import fnmatch
import logging
import os
# Globals
_console = None
_filter = None
class LogError(Exception):
pass
class Logger(object):
# Backwards compatibility, we should probably replace the callsites
# with import logging; logging.getLogger(name)
def __new__(self, name):
return logging.getLogger(name)
class _Logger(logging.Logger):
def __call__(self, message, *args, **kwargs):
self.info(message, *args, **kwargs)
logging.setLoggerClass(_Logger)
class ReversedGlobalFilter(logging.Filter):
"""
It's like a reversed filter, the default behavior
is to not show the message, you need to add custom filters for all
the records you wish to see
"""
def __init__(self):
logging.Filter.__init__(self)
self.filters = []
def add_filter(self, f, level=logging.DEBUG):
self.filters.append((f, level))
def filter(self, record):
for f, level in self.filters:
if (record.levelno >= level and
fnmatch.fnmatch(record.name, f)):
return True
return False
def set_log_file(filename, mask=None):
"""
Set the filename used for logging.
@param filename:
@param mask: optional
"""
file_handler = logging.FileHandler(filename, 'w')
file_handler.setFormatter(logging.Formatter(
'%(asctime)s %(name)-18s %(levelname)-8s %(message)s',
datefmt='%Y-%m-%d %H:%M:%S'))
root = logging.getLogger()
root.addHandler(file_handler)
if mask:
file_filter = ReversedGlobalFilter()
file_filter.add_filter(mask, logging.DEBUG)
file_handler.addFilter(file_filter)
return file_handler.stream
def set_log_level(name, level):
"""
Set the log level.
@param name: logging category
@param level: level
"""
global _filter
_filter.add_filter(name, level)
def _read_log_levels(console_filter):
log_levels = {}
# bootstrap issue, cannot depend on kiwi.environ
log_level = os.environ.get('KIWI_LOG')
if not log_level:
return log_levels
for part in log_level.split(','):
if not ':' in part:
continue
if part.count(':') > 1:
raise LogError("too many : in part %s" % part)
name, level = part.split(':')
try:
level = int(level)
except ValueError:
raise LogError("invalid level: %s" % level)
if level < 0 or level > 5:
raise LogError("level must be between 0 and 5")
level = 50 - (level * 10)
console_filter.add_filter(name, level)
def _create_console():
global _filter, _console
console = logging.StreamHandler()
console.setFormatter(logging.Formatter(
"%(asctime)s %(name)-20s %(message)s", datefmt='%T'))
root = logging.getLogger()
root.addHandler(console)
root.setLevel(logging.DEBUG)
console_filter = ReversedGlobalFilter()
# Always display warnings or higher on the console
console_filter.add_filter('*', logging.WARNING)
console.addFilter(console_filter)
_read_log_levels(console_filter)
# Set globals
_filter = console_filter
_console = console
_create_console()
kiwi_log = Logger('kiwi')
|
gpl-3.0
| -9,216,188,398,400,686,000
| 27.746032
| 181
| 0.674581
| false
| 3.812632
| false
| false
| false
|
DedMemez/ODS-August-2017
|
safezone/GZPlayground.py
|
1
|
3107
|
# Fuck you Disyer. Stealing my fucking paypal. GET FUCKED: toontown.safezone.GZPlayground
from direct.fsm import State
from toontown.safezone import GolfKart
from toontown.toonbase import ToontownGlobals, TTLocalizer
from toontown.toontowngui import TTDialog
import Playground
import sys
class GZPlayground(Playground.Playground):
def __init__(self, loader, parentFSM, doneEvent):
Playground.Playground.__init__(self, loader, parentFSM, doneEvent)
self.parentFSM = parentFSM
self.golfKartBlockDoneEvent = 'golfKartBlockDone'
self.fsm.addState(State.State('golfKartBlock', self.enterGolfKartBlock, self.exitGolfKartBlock, ['walk']))
state = self.fsm.getStateNamed('walk')
state.addTransition('golfKartBlock')
self.golfKartDoneEvent = 'golfKartDone'
self.trolley = None
self.warningDialog = None
return
def destroyWarningDialog(self):
if self.warningDialog:
self.warningDialog.destroy()
self.warningDialog = None
return
def warningDone(self, *args):
self.destroyWarningDialog()
self.fsm.request('walk')
def enterGolfKartBlock(self, golfKart):
if sys.platform == 'android':
base.localAvatar.b_setAnimState('neutral', 1)
self.destroyWarningDialog()
self.warningDialog = TTDialog.TTDialog(text=TTLocalizer.AndroidGolfMessage, command=self.warningDone, style=TTDialog.Acknowledge)
self.warningDialog.show()
return
base.localAvatar.laffMeter.start()
base.localAvatar.b_setAnimState('off', 1)
self.accept(self.golfKartDoneEvent, self.handleGolfKartDone)
self.trolley = GolfKart.GolfKart(self, self.fsm, self.golfKartDoneEvent, golfKart.getDoId())
self.trolley.load()
self.trolley.enter()
def exitGolfKartBlock(self):
base.localAvatar.laffMeter.stop()
self.destroyWarningDialog()
self.ignore(self.golfKartDoneEvent)
if self.trolley:
self.trolley.unload()
self.trolley.exit()
self.trolley = None
return
def detectedGolfKartCollision(self, golfKart):
self.notify.debug('detectedGolfkartCollision()')
self.fsm.request('golfKartBlock', [golfKart])
def handleGolfKartDone(self, doneStatus):
self.notify.debug('handling golf kart done event')
mode = doneStatus['mode']
if mode == 'reject':
self.fsm.request('walk')
elif mode == 'exit':
self.fsm.request('walk')
elif mode == 'golfcourse':
self.doneStatus = {'loader': 'golfcourse',
'where': 'golfcourse',
'hoodId': self.loader.hood.id,
'zoneId': doneStatus['zoneId'],
'shardId': None,
'courseId': doneStatus['courseId']}
messenger.send(self.doneEvent)
else:
self.notify.error('Unknown mode: ' + mode + ' in handleGolfKartDone')
return
|
apache-2.0
| 5,831,154,515,158,108,000
| 37.858974
| 141
| 0.629868
| false
| 3.663915
| false
| false
| false
|
mic4ael/indico
|
indico/modules/events/timetable/blueprint.py
|
1
|
6046
|
# This file is part of Indico.
# Copyright (C) 2002 - 2020 CERN
#
# Indico is free software; you can redistribute it and/or
# modify it under the terms of the MIT License; see the
# LICENSE file for more details.
from __future__ import unicode_literals
from indico.modules.events.timetable.controllers.display import (RHTimetable, RHTimetableEntryInfo,
RHTimetableExportDefaultPDF, RHTimetableExportPDF)
from indico.modules.events.timetable.controllers.legacy import (RHLegacyTimetableAddBreak,
RHLegacyTimetableAddContribution,
RHLegacyTimetableAddSession,
RHLegacyTimetableAddSessionBlock,
RHLegacyTimetableBreakREST,
RHLegacyTimetableDeleteEntry,
RHLegacyTimetableEditEntry,
RHLegacyTimetableEditEntryDateTime,
RHLegacyTimetableEditEntryTime,
RHLegacyTimetableEditSession, RHLegacyTimetableFitBlock,
RHLegacyTimetableGetUnscheduledContributions,
RHLegacyTimetableMoveEntry, RHLegacyTimetableReschedule,
RHLegacyTimetableScheduleContribution,
RHLegacyTimetableShiftEntries,
RHLegacyTimetableSwapEntries)
from indico.modules.events.timetable.controllers.manage import (RHCloneContribution, RHManageSessionTimetable,
RHManageTimetable, RHManageTimetableEntryInfo,
RHTimetableREST)
from indico.web.flask.util import make_compat_redirect_func
from indico.web.flask.wrappers import IndicoBlueprint
_bp = IndicoBlueprint('timetable', __name__, template_folder='templates', virtual_template_folder='events/timetable',
url_prefix='/event/<confId>')
# Management
_bp.add_url_rule('/manage/timetable/', 'management', RHManageTimetable)
_bp.add_url_rule('/manage/timetable/', 'timetable_rest', RHTimetableREST, methods=('POST',))
_bp.add_url_rule('/manage/timetable/<int:entry_id>', 'timetable_rest', RHTimetableREST, methods=('PATCH', 'DELETE'))
_bp.add_url_rule('/manage/timetable/session/<int:session_id>/', 'manage_session', RHManageSessionTimetable)
# Timetable legacy operations
_bp.add_url_rule('/manage/timetable/add-session', 'add_session', RHLegacyTimetableAddSession, methods=('GET', 'POST'))
_bp.add_url_rule('/manage/timetable/break/<int:break_id>', 'legacy_break_rest', RHLegacyTimetableBreakREST,
methods=('PATCH',))
with _bp.add_prefixed_rules('/manage/timetable/session/<int:session_id>', '/manage/timetable'):
_bp.add_url_rule('/', 'session_rest', RHLegacyTimetableEditSession,
methods=('PATCH',))
_bp.add_url_rule('/entry/<int:entry_id>/info', 'entry_info_manage', RHManageTimetableEntryInfo)
_bp.add_url_rule('/entry/<int:entry_id>/delete', 'delete_entry', RHLegacyTimetableDeleteEntry, methods=('POST',))
_bp.add_url_rule('/entry/<int:entry_id>/move', 'move_entry', RHLegacyTimetableMoveEntry,
methods=('GET', 'POST'))
_bp.add_url_rule('/entry/<int:entry_id>/shift', 'shift_entries', RHLegacyTimetableShiftEntries, methods=('POST',))
_bp.add_url_rule('/entry/<int:entry_id>/swap', 'swap_entries', RHLegacyTimetableSwapEntries, methods=('POST',))
_bp.add_url_rule('/entry/<int:entry_id>/edit/', 'edit_entry', RHLegacyTimetableEditEntry, methods=('GET', 'POST'))
_bp.add_url_rule('/entry/<int:entry_id>/edit/time', 'edit_entry_time', RHLegacyTimetableEditEntryTime,
methods=('GET', 'POST'))
_bp.add_url_rule('/entry/<int:entry_id>/edit/datetime', 'edit_entry_datetime', RHLegacyTimetableEditEntryDateTime,
methods=('POST',))
_bp.add_url_rule('/block/<block_id>/schedule', 'schedule', RHLegacyTimetableScheduleContribution, methods=('POST',))
_bp.add_url_rule('/block/<block_id>/fit', 'fit_session_block', RHLegacyTimetableFitBlock, methods=('POST',))
_bp.add_url_rule('/not-scheduled', 'not_scheduled', RHLegacyTimetableGetUnscheduledContributions)
_bp.add_url_rule('/schedule', 'schedule', RHLegacyTimetableScheduleContribution, methods=('POST',))
_bp.add_url_rule('/reschedule', 'reschedule', RHLegacyTimetableReschedule, methods=('POST',))
_bp.add_url_rule('/add-break', 'add_break', RHLegacyTimetableAddBreak, methods=('GET', 'POST'))
_bp.add_url_rule('/add-contribution', 'add_contribution', RHLegacyTimetableAddContribution, methods=('GET', 'POST'))
_bp.add_url_rule('/add-session-block', 'add_session_block', RHLegacyTimetableAddSessionBlock,
methods=('GET', 'POST'))
_bp.add_url_rule('/clone-contribution', 'clone_contribution', RHCloneContribution, methods=('POST',))
# Display
_bp.add_url_rule('/timetable/', 'timetable', RHTimetable)
_bp.add_url_rule('/timetable/pdf', 'export_pdf', RHTimetableExportPDF, methods=('GET', 'POST'))
_bp.add_url_rule('/timetable/timetable.pdf', 'export_default_pdf', RHTimetableExportDefaultPDF)
_bp.add_url_rule('/timetable/entry/<int:entry_id>/info', 'entry_info', RHTimetableEntryInfo)
# Legacy URLs
_compat_bp = IndicoBlueprint('compat_timetable', __name__)
_compat_bp.add_url_rule('/conferenceTimeTable.py', 'timetable_modpython', make_compat_redirect_func(_bp, 'timetable'))
|
mit
| 2,053,347,587,523,897,600
| 73.641975
| 120
| 0.606351
| false
| 4.014608
| false
| false
| false
|
savant-nz/carbon
|
Scripts/SCons/Compilers/Clang.sconscript.py
|
1
|
2022
|
#
# This Source Code Form is subject to the terms of the Mozilla Public License, v. 2.0. If a copy of the MPL was not
# distributed with this file, You can obtain one at http://mozilla.org/MPL/2.0/.
#
# Creates and returns a build environment that uses Clang. This is done by altering the GCC build environment.
import os
import sys
Import('*')
env = SConscript('GCC.sconscript.py')
env['CC'] = 'clang'
env['CXX'] = 'clang++'
env['LINK'] = 'clang++'
env['CCFLAGS'] += ['-stdlib=libc++', '-Wno-undefined-func-template', '-Wno-undefined-var-template']
env['LINKFLAGS'] += ['-stdlib=libc++']
# Make color diagnostics work when piping Clang output through SCons
if 'TERM' in os.environ:
env['ENV']['TERM'] = os.environ['TERM']
env['CCFLAGS'] += ['-fcolor-diagnostics']
# Access the toolchain through xcrun when building on macOS
if sys.platform == 'darwin':
for key in ['CC', 'CXX', 'LINK', 'AR', 'AS', 'RANLIB']:
env[key] = 'xcrun ' + env[key]
# Extra warnings for strict builds
if isStrictBuild:
env['CCFLAGS'] += ['-Weverything', '-Wno-c++98-compat', '-Wno-disabled-macro-expansion', '-Wno-documentation',
'-Wno-documentation-unknown-command', '-Wno-exit-time-destructors', '-Wno-float-equal',
'-Wno-format-nonliteral', '-Wno-global-constructors', '-Wno-header-hygiene',
'-Wno-implicit-fallthrough', '-Wno-keyword-macro', '-Wno-missing-noreturn',
'-Wno-missing-prototypes', '-Wno-nullable-to-nonnull-conversion', '-Wno-over-aligned',
'-Wno-padded', '-Wno-sign-conversion', '-Wno-switch-enum', '-Wno-unused-template',
'-Wno-weak-vtables']
# Alter GCC's precompiled header support to pass a -include-pch through to Clang
def UsePrecompiledHeader(self, header, **keywords):
self.BuildPrecompiledHeader(header, **keywords)
self['CCFLAGS'] += ['-Xclang', '-include-pch', '-Xclang', self['GCH']]
env.AddMethod(UsePrecompiledHeader)
Return('env')
|
mpl-2.0
| 6,866,951,723,079,990,000
| 39.44
| 115
| 0.64095
| false
| 3.37
| false
| false
| false
|
jberci/resolwe
|
resolwe/elastic/signals.py
|
1
|
1451
|
""".. Ignore pydocstyle D400.
=======================
Elastic Signal Handlers
=======================
"""
from django.db.models.signals import post_delete, post_save
from django.dispatch import receiver
from guardian.models import GroupObjectPermission, UserObjectPermission
from .builder import index_builder
def _process_permission(perm):
"""Rebuild indexes affected by the given permission."""
# XXX: Optimize: rebuild only permissions, not whole document
codename = perm.permission.codename
if not codename.startswith('view') and not codename.startswith('owner'):
return
index_builder.build(perm.content_object)
@receiver(post_save, sender=UserObjectPermission)
def add_user_permission(sender, instance, **kwargs):
"""Process indexes after adding user permission."""
_process_permission(instance)
@receiver(post_save, sender=GroupObjectPermission)
def add_group_permission(sender, instance, **kwargs):
"""Process indexes after adding group permission."""
_process_permission(instance)
@receiver(post_delete, sender=UserObjectPermission)
def remove_user_permission(sender, instance, **kwargs):
"""Process indexes after removing user permission."""
_process_permission(instance)
@receiver(post_delete, sender=GroupObjectPermission)
def remove_group_permission(sender, instance, **kwargs):
"""Process indexes after removing group permission."""
_process_permission(instance)
|
apache-2.0
| 3,300,003,933,398,636,000
| 29.87234
| 76
| 0.73122
| false
| 4.331343
| false
| false
| false
|
CptSpaceToaster/memegen
|
memegen/services/template.py
|
1
|
1885
|
import logging
from ._base import Service
from ..domain import Template
log = logging.getLogger(__name__)
class TemplateService(Service):
def __init__(self, template_store, **kwargs):
super().__init__(**kwargs)
self.template_store = template_store
def all(self):
"""Get all templates."""
templates = self.template_store.filter()
return templates
def find(self, key):
"""Find a template with a matching key."""
key = Template.strip(key)
# Find an exact match
template = self.template_store.read(key)
if template:
return template
# Else, find an alias match
for template in self.all():
if key in template.aliases_stripped:
return template
# Else, no match
raise self.exceptions.not_found
def validate(self):
"""Ensure all template are valid and conflict-free."""
templates = self.all()
keys = {template.key: template for template in templates}
for template in templates:
log.info("checking template '%s' ...", template)
if not template.validate():
return False
for alias in template.aliases:
log.info("checking alias '%s' -> '%s' ...", alias, template.key)
if alias not in template.aliases_lowercase:
msg = "alias '%s' should be lowercase characters or dashes"
log.error(msg, alias)
return False
try:
existing = keys[alias]
except KeyError:
keys[alias] = template
else:
msg = "alias '%s' already used in template: %s"
log.error(msg, alias, existing)
return False
return True
|
mit
| -175,650,737,700,038,140
| 30.416667
| 80
| 0.537931
| false
| 4.934555
| false
| false
| false
|
0--key/lib
|
portfolio/2013_OrSys/dispatcher.py
|
1
|
6451
|
from flask import Flask, render_template, session, redirect, url_for, request
from flask import logging, g
from settings import users
from functions import fetch_pending_orders_data, fetch_products_data,\
fetch_suppliers_data, get_user_id, suppDataCheck, suppDataInsert, \
suppDataUpdate, setActiveTab, appendProduct, checkProduct, throw_product,\
fetch_invoices_data, removeProduct, getSupplierData, revocateOrder,\
sendPurchaseOrder, fetch_held_products, checkOTLock, grasp_product,\
eliminate_product
app = Flask(__name__)
@app.route('/')
def index():
"""Composes operator dashboard"""
if 'username' in session:
user = session['username']
logo = users.get(user).get('img')
else:
return redirect(url_for('login'))
if 'active_tab' not in session: # active tab defining
session['active_tab'] = 'orders' # <-- initial value
o_dataset, pii_data = fetch_pending_orders_data() # compose
otl = checkOTLock()
agg_products = fetch_products_data(pii_data) # tabs
supp_tab_data = fetch_suppliers_data()
p_invoices_tab_data = fetch_invoices_data('pending')
sent_PO_tab_data = fetch_invoices_data('sent')
heldP_tab_data = fetch_held_products()
a_tab = setActiveTab(session['active_tab'])
return render_template(
'index.htm', user=user, logo=logo, orders=o_dataset, o_t_lock=otl,
orders_agg=agg_products, agg_products_qty=len(agg_products),
active=a_tab, supp_data=supp_tab_data, pItab=p_invoices_tab_data,
sItab = sent_PO_tab_data, hTd = heldP_tab_data
)
@app.route('/login', methods=['GET', 'POST'])
def login():
"""A primitive authentication feature"""
if request.method == 'POST':
input_username = request.form['username']
input_password = request.form['password']
if (input_username in users and
users.get(input_username).get('password') == input_password):
session['username'] = input_username
session['userID'] = get_user_id(input_username)
return redirect(url_for('index'))
return render_template('login.htm')
@app.route('/logout')
def logout():
"""LogOut implementation"""
session.pop('username', None)
return redirect(url_for('login'))
@app.route('/addNewSupplier')
def addS_modal_form():
"""Modal for upload data about a new supplier"""
app.logger.debug('This is SupplierForm modal')
sup_data = {'city': 'Sydney', 'province': 'New South Wales'}
return render_template('addNewSupplierForm.htm', sup_data=sup_data)
@app.route('/editSupplier', methods=['GET'])
def editS_modal_form():
"""Modal for upload data about a new supplier"""
app.logger.debug('This is editSupplierForm')
sup_data = getSupplierData(request.args.get('s_id'))
return render_template('editSupplierForm.htm', sup_data=sup_data)
@app.route('/SupplierDataFiller', methods=['GET', 'POST'])
def supplierDataFill():
"""Manipulation with the input data and redirect"""
app.logger.debug('This is supplier data filler')
if request.method == 'POST':
(pure_data, check_up) = suppDataCheck(request.form)
if check_up == 'new':
suppDataInsert(pure_data, session['userID'])
session['active_tab'] = 'supplier'
elif check_up == 'known':
suppDataUpdate(pure_data, session['userID'])
session['active_tab'] = 'supplier'
elif check_up == 'update':
suppDataUpdate(pure_data, session['userID'])
session['active_tab'] = 'supplier'
return redirect(url_for('index'))
@app.route('/appendItem', methods=['GET', 'POST'])
def appendItem():
"""Includes product into invoice and redirect"""
app.logger.debug('This is appendItem to PO process')
if request.method == 'POST':
(prod_properties, check_up) = checkProduct(request.form)
if check_up:
appendProduct(prod_properties, session['userID'])
session['active_tab'] = 'p_agg'
return redirect(url_for('index'))
@app.route('/removeItem', methods=['GET', 'POST'])
def freeItem():
"""Removes product out from invoice and redirect"""
app.logger.debug('This is freeItem out from PO process')
if request.method == 'POST':
removeProduct(session['userID'], request.form['piID'])
session['active_tab'] = 'invoices'
return redirect(url_for('index'))
@app.route('/toggleOrder', methods=['GET'])
def toggleOrder():
"""Exclude or include order and its products out from
processing and redirect to index page"""
o_id = request.args.get('o_id')
app.logger.debug('This is revOrder id=%s' % (o_id,))
revocateOrder(o_id, session['username'])
session['active_tab'] = 'orders'
return redirect(url_for('index'))
@app.route('/sendPO', methods=['GET'])
def sendPurOrder():
"""Organize application output"""
i_id = request.args.get('i_id')
app.logger.debug('This is send purchase order with id=%s' % (i_id,))
sendPurchaseOrder(i_id, session['username'])
session['active_tab'] = 'invoices'
return redirect(url_for('index'))
@app.route('/graspProduct', methods=['GET'])
def graspProduct():
"""Move product to the pail"""
sku = request.args.get('p_id')
app.logger.debug('This is grasp product with sku=%s and userID=%s' %
(sku, session['userID']))
result = grasp_product(sku, session['userID'])
session['active_tab'] = 'p_agg'
return redirect(url_for('index'))
@app.route('/throwProduct', methods=['GET'])
def throwProduct():
"""Move product to the agg product tab"""
pipID = request.args.get('p_id')
app.logger.debug('This is throw product with ID=%s out from product pail \
and userID=%s' % (pipID, session['userID']))
result = throw_product(pipID, session['userID'])
session['active_tab'] = 'p_agg'
return redirect(url_for('index'))
@app.route('/eliminateProduct', methods=['GET'])
def eliminateProduct():
"""Move product to the trash"""
pipID = request.args.get('p_id')
app.logger.debug('This is eliminate product with ID=%s out from product\
pail and userID=%s' % (pipID, session['userID']))
result = eliminate_product(pipID, session['userID'])
session['active_tab'] = 'p_agg'
return redirect(url_for('index'))
app.secret_key = 'A0Zr98j/3yX R~XHH!jmN]LWX/,?RT'
if __name__ == '__main__':
app.run(host='0.0.0.0', debug=True)
|
apache-2.0
| 8,784,297,107,164,680,000
| 35.862857
| 79
| 0.647341
| false
| 3.490801
| false
| false
| false
|
javiercantero/streamlink
|
src/streamlink/plugins/kingkong.py
|
1
|
2561
|
import re
from streamlink.plugin import Plugin
from streamlink.plugin.api import http, validate
from streamlink.stream import HTTPStream, HLSStream
API_URL = "https://g-api.langlive.com/webapi/v1/room/info?room_id={0}"
VOD_API_URL = (
"https://g-api.langlive.com/webapi/v1/replayer/detail?live_id={0}")
STATUS_ONLINE = 1
STATUS_OFFLINE = 0
STREAM_WEIGHTS = {
"360P": 360,
"480P": 480,
"720P": 720,
"source": 1080
}
_url_re = re.compile(r"""
https://www\.kingkong\.com\.tw/
(?:
video/(?P<vid>[0-9]+G[0-9A-Za-z]+)|
(?P<channel>[0-9]+)
)
""", re.VERBOSE)
_room_schema = validate.Schema(
{
"data": {
"live_info": {
"live_status": int,
"stream_items": [{
"title": validate.text,
"video": validate.any('', validate.url(
scheme="https",
path=validate.endswith(".flv")
))
}]
}
}
},
validate.get("data")
)
_vod_schema = validate.Schema(
{
"data": {
"live_info": {
"video": validate.text
}
}
},
validate.get("data")
)
class Kingkong(Plugin):
@classmethod
def can_handle_url(cls, url):
return _url_re.match(url)
@classmethod
def stream_weight(cls, stream):
if stream in STREAM_WEIGHTS:
return STREAM_WEIGHTS[stream], "kingkong"
return Plugin.stream_weight(stream)
def _get_streams(self):
match = _url_re.match(self.url)
vid = match.group("vid")
if vid:
res = http.get(VOD_API_URL.format(vid))
data = http.json(res, schema=_vod_schema)
yield "source", HLSStream(
self.session, data["live_info"]["video"])
return
channel = match.group("channel")
res = http.get(API_URL.format(channel))
room = http.json(res, schema=_room_schema)
if not room:
self.logger.info("Not a valid room url.")
return
live_info = room["live_info"]
if live_info["live_status"] != STATUS_ONLINE:
self.logger.info("Stream currently unavailable.")
return
for item in live_info["stream_items"]:
quality = item["title"]
if quality == u"\u6700\u4f73": # "Best" in Chinese
quality = "source"
yield quality, HTTPStream(self.session, item["video"])
__plugin__ = Kingkong
|
bsd-2-clause
| 8,535,942,782,514,645,000
| 25.132653
| 71
| 0.5205
| false
| 3.556944
| false
| false
| false
|
MRtrix3/mrtrix3
|
lib/mrtrix3/fsl.py
|
1
|
6782
|
# Copyright (c) 2008-2021 the MRtrix3 contributors.
#
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
#
# Covered Software is provided under this License on an "as is"
# basis, without warranty of any kind, either expressed, implied, or
# statutory, including, without limitation, warranties that the
# Covered Software is free of defects, merchantable, fit for a
# particular purpose or non-infringing.
# See the Mozilla Public License v. 2.0 for more details.
#
# For more details, see http://www.mrtrix.org/.
import os
from distutils.spawn import find_executable
from mrtrix3 import MRtrixError
_SUFFIX = ''
# Functions that may be useful for scripts that interface with FMRIB FSL tools
# FSL's run_first_all script can be difficult to wrap, since it does not provide
# a meaningful return code, and may run via SGE, which then requires waiting for
# the output files to appear.
def check_first(prefix, structures): #pylint: disable=unused-variable
from mrtrix3 import app, path #pylint: disable=import-outside-toplevel
vtk_files = [ prefix + '-' + struct + '_first.vtk' for struct in structures ]
existing_file_count = sum([ os.path.exists(filename) for filename in vtk_files ])
if existing_file_count != len(vtk_files):
if 'SGE_ROOT' in os.environ and os.environ['SGE_ROOT']:
app.console('FSL FIRST job may have been run via SGE; awaiting completion')
app.console('(note however that FIRST may fail silently, and hence this script may hang indefinitely)')
path.wait_for(vtk_files)
else:
app.DO_CLEANUP = False
raise MRtrixError('FSL FIRST has failed; ' + ('only ' if existing_file_count else '') + str(existing_file_count) + ' of ' + str(len(vtk_files)) + ' structures were segmented successfully (check ' + path.to_scratch('first.logs', False) + ')')
# Get the name of the binary file that should be invoked to run eddy;
# this depends on both whether or not the user has requested that the CUDA
# version of eddy be used, and the various names that this command could
# conceivably be installed as.
def eddy_binary(cuda): #pylint: disable=unused-variable
from mrtrix3 import app #pylint: disable=import-outside-toplevel
if cuda:
if find_executable('eddy_cuda'):
app.debug('Selected soft-linked CUDA version (\'eddy_cuda\')')
return 'eddy_cuda'
# Cuda versions are now provided with a CUDA trailing version number
# Users may not necessarily create a softlink to one of these and
# call it "eddy_cuda"
# Therefore, hunt through PATH looking for them; if more than one,
# select the one with the highest version number
binaries = [ ]
for directory in os.environ['PATH'].split(os.pathsep):
if os.path.isdir(directory):
for entry in os.listdir(directory):
if entry.startswith('eddy_cuda'):
binaries.append(entry)
max_version = 0.0
exe_path = ''
for entry in binaries:
try:
version = float(entry.lstrip('eddy_cuda'))
if version > max_version:
max_version = version
exe_path = entry
except:
pass
if exe_path:
app.debug('CUDA version ' + str(max_version) + ': ' + exe_path)
return exe_path
app.debug('No CUDA version of eddy found')
return ''
for candidate in [ 'eddy_openmp', 'eddy_cpu', 'eddy', 'fsl5.0-eddy' ]:
if find_executable(candidate):
app.debug(candidate)
return candidate
app.debug('No CPU version of eddy found')
return ''
# In some FSL installations, all binaries get prepended with "fsl5.0-". This function
# makes it more convenient to locate these commands.
# Note that if FSL 4 and 5 are installed side-by-side, the approach taken in this
# function will select the version 5 executable.
def exe_name(name): #pylint: disable=unused-variable
from mrtrix3 import app #pylint: disable=import-outside-toplevel
if find_executable(name):
output = name
elif find_executable('fsl5.0-' + name):
output = 'fsl5.0-' + name
app.warn('Using FSL binary \"' + output + '\" rather than \"' + name + '\"; suggest checking FSL installation')
else:
raise MRtrixError('Could not find FSL program \"' + name + '\"; please verify FSL install')
app.debug(output)
return output
# In some versions of FSL, even though we try to predict the names of image files that
# FSL commands will generate based on the suffix() function, the FSL binaries themselves
# ignore the FSLOUTPUTTYPE environment variable. Therefore, the safest approach is:
# Whenever receiving an output image from an FSL command, explicitly search for the path
def find_image(name): #pylint: disable=unused-variable
from mrtrix3 import app #pylint: disable=import-outside-toplevel
prefix = os.path.join(os.path.dirname(name), os.path.basename(name).split('.')[0])
if os.path.isfile(prefix + suffix()):
app.debug('Image at expected location: \"' + prefix + suffix() + '\"')
return prefix + suffix()
for suf in ['.nii', '.nii.gz', '.img']:
if os.path.isfile(prefix + suf):
app.debug('Expected image at \"' + prefix + suffix() + '\", but found at \"' + prefix + suf + '\"')
return prefix + suf
raise MRtrixError('Unable to find FSL output file for path \"' + name + '\"')
# For many FSL commands, the format of any output images will depend on the string
# stored in 'FSLOUTPUTTYPE'. This may even override a filename extension provided
# to the relevant command. Therefore use this function to 'guess' what the names
# of images provided by FSL commands will be.
def suffix(): #pylint: disable=unused-variable
from mrtrix3 import app #pylint: disable=import-outside-toplevel
global _SUFFIX
if _SUFFIX:
return _SUFFIX
fsl_output_type = os.environ.get('FSLOUTPUTTYPE', '')
if fsl_output_type == 'NIFTI':
app.debug('NIFTI -> .nii')
_SUFFIX = '.nii'
elif fsl_output_type == 'NIFTI_GZ':
app.debug('NIFTI_GZ -> .nii.gz')
_SUFFIX = '.nii.gz'
elif fsl_output_type == 'NIFTI_PAIR':
app.debug('NIFTI_PAIR -> .img')
_SUFFIX = '.img'
elif fsl_output_type == 'NIFTI_PAIR_GZ':
raise MRtrixError('MRtrix3 does not support compressed NIFTI pairs; please change FSLOUTPUTTYPE environment variable')
elif fsl_output_type:
app.warn('Unrecognised value for environment variable FSLOUTPUTTYPE (\"' + fsl_output_type + '\"): Expecting compressed NIfTIs, but FSL commands may fail')
_SUFFIX = '.nii.gz'
else:
app.warn('Environment variable FSLOUTPUTTYPE not set; FSL commands may fail, or script may fail to locate FSL command outputs')
_SUFFIX = '.nii.gz'
return _SUFFIX
|
mpl-2.0
| 6,801,620,265,861,604,000
| 42.754839
| 247
| 0.692716
| false
| 3.534132
| false
| false
| false
|
ESS-LLP/erpnext-healthcare
|
erpnext/regional/italy/utils.py
|
1
|
13784
|
from __future__ import unicode_literals
import frappe, json, os
from frappe.utils import flt, cstr
from erpnext.controllers.taxes_and_totals import get_itemised_tax
from frappe import _
from frappe.utils.file_manager import save_file, remove_file
from frappe.desk.form.load import get_attachments
from erpnext.regional.italy import state_codes
def update_itemised_tax_data(doc):
if not doc.taxes: return
itemised_tax = get_itemised_tax(doc.taxes)
for row in doc.items:
tax_rate = 0.0
if itemised_tax.get(row.item_code):
tax_rate = sum([tax.get('tax_rate', 0) for d, tax in itemised_tax.get(row.item_code).items()])
row.tax_rate = flt(tax_rate, row.precision("tax_rate"))
row.tax_amount = flt((row.net_amount * tax_rate) / 100, row.precision("net_amount"))
row.total_amount = flt((row.net_amount + row.tax_amount), row.precision("total_amount"))
@frappe.whitelist()
def export_invoices(filters=None):
saved_xmls = []
invoices = frappe.get_all("Sales Invoice", filters=get_conditions(filters), fields=["*"])
for invoice in invoices:
attachments = get_e_invoice_attachments(invoice)
saved_xmls += [attachment.file_name for attachment in attachments]
zip_filename = "{0}-einvoices.zip".format(frappe.utils.get_datetime().strftime("%Y%m%d_%H%M%S"))
download_zip(saved_xmls, zip_filename)
@frappe.whitelist()
def prepare_invoice(invoice, progressive_number):
#set company information
company = frappe.get_doc("Company", invoice.company)
invoice.progressive_number = progressive_number
invoice.unamended_name = get_unamended_name(invoice)
invoice.company_data = company
company_address = frappe.get_doc("Address", invoice.company_address)
invoice.company_address_data = company_address
#Set invoice type
if invoice.is_return and invoice.return_against:
invoice.type_of_document = "TD04" #Credit Note (Nota di Credito)
invoice.return_against_unamended = get_unamended_name(frappe.get_doc("Sales Invoice", invoice.return_against))
else:
invoice.type_of_document = "TD01" #Sales Invoice (Fattura)
#set customer information
invoice.customer_data = frappe.get_doc("Customer", invoice.customer)
customer_address = frappe.get_doc("Address", invoice.customer_address)
invoice.customer_address_data = customer_address
if invoice.shipping_address_name:
invoice.shipping_address_data = frappe.get_doc("Address", invoice.shipping_address_name)
if invoice.customer_data.is_public_administration:
invoice.transmission_format_code = "FPA12"
else:
invoice.transmission_format_code = "FPR12"
invoice.e_invoice_items = [item for item in invoice.items]
tax_data = get_invoice_summary(invoice.e_invoice_items, invoice.taxes)
invoice.tax_data = tax_data
#Check if stamp duty (Bollo) of 2 EUR exists.
stamp_duty_charge_row = next((tax for tax in invoice.taxes if tax.charge_type == _("Actual") and tax.tax_amount == 2.0 ), None)
if stamp_duty_charge_row:
invoice.stamp_duty = stamp_duty_charge_row.tax_amount
for item in invoice.e_invoice_items:
if item.tax_rate == 0.0 and item.tax_amount == 0.0:
item.tax_exemption_reason = tax_data["0.0"]["tax_exemption_reason"]
return invoice
def get_conditions(filters):
filters = json.loads(filters)
conditions = {"docstatus": 1}
if filters.get("company"): conditions["company"] = filters["company"]
if filters.get("customer"): conditions["customer"] = filters["customer"]
if filters.get("from_date"): conditions["posting_date"] = (">=", filters["from_date"])
if filters.get("to_date"): conditions["posting_date"] = ("<=", filters["to_date"])
if filters.get("from_date") and filters.get("to_date"):
conditions["posting_date"] = ("between", [filters.get("from_date"), filters.get("to_date")])
return conditions
#TODO: Use function from frappe once PR #6853 is merged.
def download_zip(files, output_filename):
from zipfile import ZipFile
input_files = [frappe.get_site_path('private', 'files', filename) for filename in files]
output_path = frappe.get_site_path('private', 'files', output_filename)
with ZipFile(output_path, 'w') as output_zip:
for input_file in input_files:
output_zip.write(input_file, arcname=os.path.basename(input_file))
with open(output_path, 'rb') as fileobj:
filedata = fileobj.read()
frappe.local.response.filename = output_filename
frappe.local.response.filecontent = filedata
frappe.local.response.type = "download"
def get_invoice_summary(items, taxes):
summary_data = frappe._dict()
for tax in taxes:
#Include only VAT charges.
if tax.charge_type == "Actual":
continue
#Charges to appear as items in the e-invoice.
if tax.charge_type in ["On Previous Row Total", "On Previous Row Amount"]:
reference_row = next((row for row in taxes if row.idx == int(tax.row_id or 0)), None)
if reference_row:
items.append(
frappe._dict(
idx=len(items)+1,
item_code=reference_row.description,
item_name=reference_row.description,
rate=reference_row.tax_amount,
qty=1.0,
amount=reference_row.tax_amount,
stock_uom=frappe.db.get_single_value("Stock Settings", "stock_uom") or _("Nos"),
tax_rate=tax.rate,
tax_amount=(reference_row.tax_amount * tax.rate) / 100,
net_amount=reference_row.tax_amount,
taxable_amount=reference_row.tax_amount,
item_tax_rate="{}",
charges=True
)
)
#Check item tax rates if tax rate is zero.
if tax.rate == 0:
for item in items:
item_tax_rate = json.loads(item.item_tax_rate)
if tax.account_head in item_tax_rate:
key = cstr(item_tax_rate[tax.account_head])
summary_data.setdefault(key, {"tax_amount": 0.0, "taxable_amount": 0.0, "tax_exemption_reason": "", "tax_exemption_law": ""})
summary_data[key]["tax_amount"] += item.tax_amount
summary_data[key]["taxable_amount"] += item.net_amount
if key == "0.0":
summary_data[key]["tax_exemption_reason"] = tax.tax_exemption_reason
summary_data[key]["tax_exemption_law"] = tax.tax_exemption_law
if summary_data == {}: #Implies that Zero VAT has not been set on any item.
summary_data.setdefault("0.0", {"tax_amount": 0.0, "taxable_amount": tax.total,
"tax_exemption_reason": tax.tax_exemption_reason, "tax_exemption_law": tax.tax_exemption_law})
else:
item_wise_tax_detail = json.loads(tax.item_wise_tax_detail)
for rate_item in [tax_item for tax_item in item_wise_tax_detail.items() if tax_item[1][0] == tax.rate]:
key = cstr(tax.rate)
if not summary_data.get(key): summary_data.setdefault(key, {"tax_amount": 0.0, "taxable_amount": 0.0})
summary_data[key]["tax_amount"] += rate_item[1][1]
summary_data[key]["taxable_amount"] += sum([item.net_amount for item in items if item.item_code == rate_item[0]])
for item in items:
key = cstr(tax.rate)
if item.get("charges"):
if not summary_data.get(key): summary_data.setdefault(key, {"taxable_amount": 0.0})
summary_data[key]["taxable_amount"] += item.taxable_amount
return summary_data
#Preflight for successful e-invoice export.
def sales_invoice_validate(doc):
#Validate company
if doc.doctype != 'Sales Invoice':
return
if not doc.company_address:
frappe.throw(_("Please set an Address on the Company '%s'" % doc.company), title=_("E-Invoicing Information Missing"))
else:
validate_address(doc.company_address)
company_fiscal_regime = frappe.get_cached_value("Company", doc.company, 'fiscal_regime')
if not company_fiscal_regime:
frappe.throw(_("Fiscal Regime is mandatory, kindly set the fiscal regime in the company {0}")
.format(doc.company))
else:
doc.company_fiscal_regime = company_fiscal_regime
if not doc.company_tax_id and not doc.company_fiscal_code:
frappe.throw(_("Please set either the Tax ID or Fiscal Code on Company '%s'" % doc.company), title=_("E-Invoicing Information Missing"))
#Validate customer details
customer_type, is_public_administration = frappe.db.get_value("Customer", doc.customer, ["customer_type", "is_public_administration"])
if customer_type == _("Individual"):
if not doc.customer_fiscal_code:
frappe.throw(_("Please set Fiscal Code for the customer '%s'" % doc.customer), title=_("E-Invoicing Information Missing"))
else:
if is_public_administration:
if not doc.customer_fiscal_code:
frappe.throw(_("Please set Fiscal Code for the public administration '%s'" % doc.customer), title=_("E-Invoicing Information Missing"))
else:
if not doc.tax_id:
frappe.throw(_("Please set Tax ID for the customer '%s'" % doc.customer), title=_("E-Invoicing Information Missing"))
if not doc.customer_address:
frappe.throw(_("Please set the Customer Address"), title=_("E-Invoicing Information Missing"))
else:
validate_address(doc.customer_address)
if not len(doc.taxes):
frappe.throw(_("Please set at least one row in the Taxes and Charges Table"), title=_("E-Invoicing Information Missing"))
else:
for row in doc.taxes:
if row.rate == 0 and row.tax_amount == 0 and not row.tax_exemption_reason:
frappe.throw(_("Row {0}: Please set at Tax Exemption Reason in Sales Taxes and Charges".format(row.idx)),
title=_("E-Invoicing Information Missing"))
for schedule in doc.payment_schedule:
if schedule.mode_of_payment and not schedule.mode_of_payment_code:
schedule.mode_of_payment_code = frappe.get_cached_value('Mode of Payment',
schedule.mode_of_payment, 'mode_of_payment_code')
#Ensure payment details are valid for e-invoice.
def sales_invoice_on_submit(doc, method):
#Validate payment details
if get_company_country(doc.company) not in ['Italy',
'Italia', 'Italian Republic', 'Repubblica Italiana']:
return
if not len(doc.payment_schedule):
frappe.throw(_("Please set the Payment Schedule"), title=_("E-Invoicing Information Missing"))
else:
for schedule in doc.payment_schedule:
if not schedule.mode_of_payment:
frappe.throw(_("Row {0}: Please set the Mode of Payment in Payment Schedule".format(schedule.idx)),
title=_("E-Invoicing Information Missing"))
elif not frappe.db.get_value("Mode of Payment", schedule.mode_of_payment, "mode_of_payment_code"):
frappe.throw(_("Row {0}: Please set the correct code on Mode of Payment {1}".format(schedule.idx, schedule.mode_of_payment)),
title=_("E-Invoicing Information Missing"))
prepare_and_attach_invoice(doc)
def prepare_and_attach_invoice(doc, replace=False):
progressive_name, progressive_number = get_progressive_name_and_number(doc, replace)
invoice = prepare_invoice(doc, progressive_number)
invoice_xml = frappe.render_template('erpnext/regional/italy/e-invoice.xml', context={"doc": invoice}, is_path=True)
invoice_xml = invoice_xml.replace("&", "&")
xml_filename = progressive_name + ".xml"
return save_file(xml_filename, invoice_xml, dt=doc.doctype, dn=doc.name, is_private=True)
@frappe.whitelist()
def generate_single_invoice(docname):
doc = frappe.get_doc("Sales Invoice", docname)
e_invoice = prepare_and_attach_invoice(doc, True)
content = None
with open(frappe.get_site_path('private', 'files', e_invoice.file_name), "r") as f:
content = f.read()
frappe.local.response.filename = e_invoice.file_name
frappe.local.response.filecontent = content
frappe.local.response.type = "download"
#Delete e-invoice attachment on cancel.
def sales_invoice_on_cancel(doc, method):
if get_company_country(doc.company) not in ['Italy',
'Italia', 'Italian Republic', 'Repubblica Italiana']:
return
for attachment in get_e_invoice_attachments(doc):
remove_file(attachment.name, attached_to_doctype=doc.doctype, attached_to_name=doc.name)
def get_company_country(company):
return frappe.get_cached_value('Company', company, 'country')
def get_e_invoice_attachments(invoice):
out = []
attachments = get_attachments(invoice.doctype, invoice.name)
company_tax_id = invoice.company_tax_id if invoice.company_tax_id.startswith("IT") else "IT" + invoice.company_tax_id
for attachment in attachments:
if attachment.file_name and attachment.file_name.startswith(company_tax_id) and attachment.file_name.endswith(".xml"):
out.append(attachment)
return out
def validate_address(address_name):
fields = ["pincode", "city", "country_code"]
data = frappe.get_cached_value("Address", address_name, fields, as_dict=1) or {}
for field in fields:
if not data.get(field):
frappe.throw(_("Please set {0} for address {1}".format(field.replace('-',''), address_name)),
title=_("E-Invoicing Information Missing"))
def get_unamended_name(doc):
attributes = ["naming_series", "amended_from"]
for attribute in attributes:
if not hasattr(doc, attribute):
return doc.name
if doc.amended_from:
return "-".join(doc.name.split("-")[:-1])
else:
return doc.name
def get_progressive_name_and_number(doc, replace=False):
if replace:
for attachment in get_e_invoice_attachments(doc):
remove_file(attachment.name, attached_to_doctype=doc.doctype, attached_to_name=doc.name)
filename = attachment.file_name.split(".xml")[0]
return filename, filename.split("_")[1]
company_tax_id = doc.company_tax_id if doc.company_tax_id.startswith("IT") else "IT" + doc.company_tax_id
progressive_name = frappe.model.naming.make_autoname(company_tax_id + "_.#####")
progressive_number = progressive_name.split("_")[1]
return progressive_name, progressive_number
def set_state_code(doc, method):
if doc.get('country_code'):
doc.country_code = doc.country_code.upper()
if not doc.get('state'):
return
if not (hasattr(doc, "state_code") and doc.country in ["Italy", "Italia", "Italian Republic", "Repubblica Italiana"]):
return
state_codes_lower = {key.lower():value for key,value in state_codes.items()}
state = doc.get('state','').lower()
if state_codes_lower.get(state):
doc.state_code = state_codes_lower.get(state)
|
gpl-3.0
| -7,304,240,070,135,029,000
| 38.495702
| 139
| 0.714089
| false
| 3.127751
| false
| false
| false
|
GoogleChrome/chromium-dashboard
|
pages/intentpreview_test.py
|
1
|
6198
|
# Copyright 2020 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License")
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import division
from __future__ import print_function
import testing_config # Must be imported before the module under test.
import mock
import flask
import werkzeug
from pages import intentpreview
from internals import models
class IntentEmailPreviewHandlerTest(testing_config.CustomTestCase):
def setUp(self):
self.feature_1 = models.Feature(
name='feature one', summary='sum', category=1, visibility=1,
standardization=1, web_dev_views=1, impl_status_chrome=1,
intent_stage=models.INTENT_IMPLEMENT)
self.feature_1.put()
self.request_path = '/admin/features/launch/%d/%d?intent' % (
models.INTENT_SHIP, self.feature_1.key.integer_id())
self.handler = intentpreview.IntentEmailPreviewHandler()
def tearDown(self):
self.feature_1.key.delete()
def test_get__anon(self):
"""Anon cannot view this preview features, gets redirected to login."""
testing_config.sign_out()
feature_id = self.feature_1.key.integer_id()
with intentpreview.app.test_request_context(self.request_path):
actual_response = self.handler.get_template_data(feature_id=feature_id)
self.assertEqual('302 FOUND', actual_response.status)
def test_get__no_existing(self):
"""Trying to view a feature that does not exist gives a 404."""
testing_config.sign_in('user1@google.com', 123567890)
bad_feature_id = self.feature_1.key.integer_id() + 1
with intentpreview.app.test_request_context(self.request_path):
with self.assertRaises(werkzeug.exceptions.NotFound):
self.handler.get_template_data(feature_id=bad_feature_id)
def test_get__no_stage_specified(self):
"""Allowed user can preview intent email for a feature using an old URL."""
request_path = (
'/admin/features/launch/%d?intent' % self.feature_1.key.integer_id())
testing_config.sign_in('user1@google.com', 123567890)
feature_id = self.feature_1.key.integer_id()
with intentpreview.app.test_request_context(self.request_path):
actual_data = self.handler.get_template_data(feature_id=feature_id)
self.assertIn('feature', actual_data)
self.assertEqual('feature one', actual_data['feature']['name'])
def test_get__normal(self):
"""Allowed user can preview intent email for a feature."""
testing_config.sign_in('user1@google.com', 123567890)
feature_id = self.feature_1.key.integer_id()
with intentpreview.app.test_request_context(self.request_path):
actual_data = self.handler.get_template_data(feature_id=feature_id)
self.assertIn('feature', actual_data)
self.assertEqual('feature one', actual_data['feature']['name'])
def test_get_page_data(self):
"""page_data has correct values."""
feature_id = self.feature_1.key.integer_id()
with intentpreview.app.test_request_context(self.request_path):
page_data = self.handler.get_page_data(
feature_id, self.feature_1, models.INTENT_IMPLEMENT)
self.assertEqual(
'http://localhost/feature/%d' % feature_id,
page_data['default_url'])
self.assertEqual(
['motivation'],
page_data['sections_to_show'])
self.assertEqual(
'Intent to Prototype',
page_data['subject_prefix'])
def test_compute_subject_prefix__incubate_new_feature(self):
"""We offer users the correct subject line for each intent stage."""
self.assertEqual(
'Intent stage "None"',
self.handler.compute_subject_prefix(
self.feature_1, models.INTENT_NONE))
self.assertEqual(
'Intent stage "Start incubating"',
self.handler.compute_subject_prefix(
self.feature_1, models.INTENT_INCUBATE))
self.assertEqual(
'Intent to Prototype',
self.handler.compute_subject_prefix(
self.feature_1, models.INTENT_IMPLEMENT))
self.assertEqual(
'Ready for Trial',
self.handler.compute_subject_prefix(
self.feature_1, models.INTENT_EXPERIMENT))
self.assertEqual(
'Intent stage "Evaluate readiness to ship"',
self.handler.compute_subject_prefix(
self.feature_1, models.INTENT_IMPLEMENT_SHIP))
self.assertEqual(
'Intent to Experiment',
self.handler.compute_subject_prefix(
self.feature_1, models.INTENT_EXTEND_TRIAL))
self.assertEqual(
'Intent to Ship',
self.handler.compute_subject_prefix(
self.feature_1, models.INTENT_SHIP))
self.assertEqual(
'Intent to Extend Deprecation Trial',
self.handler.compute_subject_prefix(
self.feature_1, models.INTENT_REMOVED))
self.assertEqual(
'Intent stage "Shipped"',
self.handler.compute_subject_prefix(
self.feature_1, models.INTENT_SHIPPED))
self.assertEqual(
'Intent stage "Parked"',
self.handler.compute_subject_prefix(
self.feature_1, models.INTENT_PARKED))
def test_compute_subject_prefix__deprecate_feature(self):
"""We offer users the correct subject line for each intent stage."""
self.feature_1.feature_type = models.FEATURE_TYPE_DEPRECATION_ID
self.assertEqual(
'Intent stage "None"',
self.handler.compute_subject_prefix(
self.feature_1, models.INTENT_NONE))
self.assertEqual(
'Intent to Deprecate and Remove',
self.handler.compute_subject_prefix(
self.feature_1, models.INTENT_INCUBATE))
self.assertEqual(
'Request for Deprecation Trial',
self.handler.compute_subject_prefix(
self.feature_1, models.INTENT_EXTEND_TRIAL))
|
apache-2.0
| 5,732,362,012,227,741,000
| 36.569697
| 79
| 0.68264
| false
| 3.702509
| true
| false
| false
|
kmadathil/sanskrit_parser
|
sanskrit_parser/base/maheshvara_sutra.py
|
1
|
8576
|
#! /usr/bin/env python
# -*- coding: utf-8 -*-
"""
Intro
======
Get varnas in a pratyahara:
.. code:: python
>>> from sanskrit_parser.base.maheshvara_sutra import MaheshvaraSutras
>>> MS = MaheshvaraSutras()
>>> jaS = SanskritImmutableString('jaS', encoding=SLP1)
>>> print(MS.getPratyahara(jaS))
jabagaqada
Check if a varna is in a pratyahara:
.. code:: python
>>> g = SanskritImmutableString('g')
>>> print(MS.isInPratyahara(jaS, g))
True
>>> k = SanskritImmutableString('k')
>>> print(MS.isInPratyahara(jaS, k))
False
Command line usage
==================
::
$ python -m sanskrit_parser.base.maheshvara_sutra --encoding SLP1 --pratyahara jaS
aiuR fxk eoN EOc hayavaraw laR YamaNaRanam JaBaY GaQaDaz jabagaqadaS KaPaCaWaTacawatav kapay Sazasar hal
जश्
जबगडद
"""
from __future__ import print_function
from . import sanskrit_base
import re
import six
class MaheshvaraSutras(object):
"""
Singleton MaheshvaraSutras class
Attributes:
MS(SanskritImmutableString) : Internal representation of mAheshvara sutras
MSS(str) : Canonical (SLP1) representation
"""
def __init__(self):
"""
Initialize Maheshvara Sutras object
"""
# Note that a space is deliberately left after each it to help in
# demarcating them.
self.MS = sanskrit_base.SanskritImmutableString(
u'अइउण् ऋऌक् एओङ् ऐऔच् हयवरट् लण् ञमङणनम् झभञ् घढधष् जबगडदश् खफछठथचटतव् कपय् शषसर् हल् ',
sanskrit_base.DEVANAGARI)
# SLP1 version for internal operations
self.MSS = self.MS.canonical()
def __str__(self):
# Use SLP1 for default string output
return self.MSS
def getPratyahara(self, p, longp=True, remove_a=False, dirghas=False):
"""
Return list of varnas covered by a pratyahara
Args:
p(:class:SanskritImmutableString): Pratyahara
longp(boolean :optional:): When True (default), uses long pratyaharas
remove_a(boolean :optional:): When True, removes intermediate 'a'.This is better for computational use
dirghas(boolean :optional:) When True (default=False) adds dirgha vowels to the returned varnas
Returns:
(SanskritImmutableString): List of varnas to the same encoding as p
"""
# SLP1 encoded pratyahara string
ps = p.canonical()
# it - halantyam
pit = ps[-1]
# Non it - all except it
pnit = ps[:-1]
# Non it position
pnpos = self.MSS.find(pnit)
# It position - space added to match it marker in internal
# representation
if longp: # Find last occurence of it
pitpos = self.MSS.rfind(pit + ' ', pnpos)
else: # Find first occurence of it
pitpos = self.MSS.find(pit + ' ', pnpos)
# Substring. This includes intermediate its and spaces
ts = self.MSS[pnpos:pitpos]
# Replace its and spaces
ts = re.sub('. ', '', ts)
# Remove अकारः मुखसुखार्थः
if remove_a:
ts = ts[0] + ts[1:].replace('a', '')
# Add dIrgha vowels if requested
if dirghas:
ts = ts.replace('a', 'aA').replace('i', 'iI').replace('u', 'uU').replace('f', 'fF').replace('x', 'xX')
return sanskrit_base.SanskritImmutableString(ts, sanskrit_base.SLP1)
def isInPratyahara(self, p, v, longp=True):
"""
Checks whether a given varna is in a pratyahara
Args:
p(SanskritImmutableString): Pratyahara
v(SanskritImmutableString): Varna
longp(boolean :optional:): When True (default), uses long pratyaharas
Returns
boolean: Is v in p?
"""
vs = v.canonical()
# १ . १ . ६९ अणुदित् सवर्णस्य चाप्रत्ययः
# So, we change long and pluta vowels to short ones in the input string
# Replace long vowels with short ones (note SLP1 encoding)
vs = re.sub('[AIUFX]+', lambda m: m.group(0).lower(), vs)
# Remove pluta
vs = vs.replace('3', '')
# Convert Pratyahara into String
# the 'a' varna needs special treatment - we remove the
# अकारः मुखसुखार्थः before searching!
pos = self.getPratyahara(p, longp, remove_a=vs[0] == 'a').canonical()
# Check if varna String is in Pratyahara String
return (pos.find(vs) != -1)
def isSavarna(self, v, a):
"""
Checks whether a given varna "a" is savarna to another "v"
Args:
v(SanskritImmutableString): Varna Indicator
a(SanskritImmutableString): Varna
v can be a svara (in which case we return True irrespective of length
of a)
v can be an udit, in which we return True for anything in the group
v can be tapara in which we return true only for the right length
Returns
boolean: Is v savarna to p?
"""
ac = a.canonical()
vc = v.canonical()
# Single
if len(vc) == 1:
# १ . १ . ६९ अणुदित् सवर्णस्य चाप्रत्ययः
# So, we change long and pluta vowels to short ones in the input string
# Replace long vowels with short ones (note SLP1 encoding)
ac = re.sub('[AIUFX]+', lambda m: m.group(0).lower(), ac)
# Remove pluta
ac = ac.replace('3', '')
vc = re.sub('[AIUFX]+', lambda m: m.group(0).lower(), vc)
# Remove pluta
vc = vc.replace('3', '')
return ac == vc
elif vc[-1] == "t":
# taparastatkAlasya
return ac == vc[:-1]
# FIXME implment tkArsya para interpretation
elif vc[-1] == "u":
# १ . १ . ६९ अणुदित् सवर्णस्य चाप्रत्ययः
if vc[0] == "k":
vc = "kKgGN"
elif vc[0] == "c":
vc = "cCjJY"
elif vc[0] == "w":
vc = "wWqQR"
elif vc[0] == "t":
vc = "tTdDn"
elif vc[0] == "p":
vc = "pPbBm"
return ac in vc
else:
return ac in vc
if __name__ == "__main__":
import argparse
def getArgs():
"""
Argparse routine.
Returns args variable
"""
# Parser Setup
parser = argparse.ArgumentParser(description='SanskritImmutableString')
# Pratyahara - print out the list of varnas in this
parser.add_argument('--pratyahara', type=str, default="ik")
# Varna. Optional. Check if this varna is in pratyahara above
parser.add_argument('--varna', type=str, default=None)
# Encoding Optional
parser.add_argument('--encoding', type=str, default=None)
# Short pratyaharas
parser.add_argument('--short', action='store_true')
# Remove intermediate as
parser.add_argument('--remove-a', action='store_true')
# Include dIrghas when returning the pratyAhAra
parser.add_argument('--dirghas', action='store_true', default=False)
parser.add_argument('--output-slp1', action='store_true')
return parser.parse_args()
def main():
args = getArgs()
m = MaheshvaraSutras()
print(m)
if args.encoding is not None:
e = sanskrit_base.SCHEMES[args.encoding]
else:
e = None
p = sanskrit_base.SanskritImmutableString(args.pratyahara, e)
longp = not args.short
if args.output_slp1 is False:
print(six.text_type(p.devanagari()))
print(six.text_type(m.getPratyahara(p, longp, args.remove_a, args.dirghas).devanagari()))
else:
print(six.text_type(p.canonical()))
print(six.text_type(m.getPratyahara(p, longp, args.remove_a, args.dirghas).canonical()))
if args.varna is not None:
v = sanskrit_base.SanskritImmutableString(args.varna, e)
print(u"Is {} in {}?".format(v.devanagari(),
p.devanagari()))
print(m.isInPratyahara(p, v, longp))
main()
|
mit
| -1,527,397,545,974,053,400
| 33.369748
| 116
| 0.566259
| false
| 3.144944
| false
| false
| false
|
to266/hyperspy
|
hyperspy/_components/expression.py
|
1
|
4851
|
from functools import wraps
from hyperspy.component import Component
_CLASS_DOC = \
"""%s component (created with Expression).
.. math::
f(x) = %s
"""
def _fill_function_args(fn):
@wraps(fn)
def fn_wrapped(self, x):
return fn(x, *[p.value for p in self.parameters])
return fn_wrapped
class Expression(Component):
def __init__(self, expression, name, position=None, module="numpy",
autodoc=True, **kwargs):
"""Create a component from a string expression.
It automatically generates the partial derivatives and the
class docstring.
Parameters
----------
expression: str
Component function in SymPy text expression format. See the SymPy
documentation for details. The only additional constraint is that
the variable must be `x`. Also, in `module` is "numexpr" the
functions are limited to those that numexpr support. See its
documentation for details.
name : str
Name of the component.
position: str, optional
The parameter name that defines the position of the component if
applicable. It enables adjusting the position of the component
interactively in a model.
module: {"numpy", "numexpr"}, default "numpy"
Module used to evaluate the function. numexpr is often faster but
it supports less functions.
**kwargs
Keyword arguments can be used to initialise the value of the
parameters.
Methods
-------
recompile: useful to recompile the function and gradient with a
a different module.
Examples
--------
The following creates a Gaussian component and set the initial value
of the parameters:
>>> hs.model.components.Expression(
... expression="height * exp(-(x - x0) ** 2 * 4 * log(2)/ fwhm ** 2)",
... name="Gaussian",
... height=1,
... fwhm=1,
... x0=0,
... position="x0",)
"""
import sympy
self._str_expression = expression
self.compile_function(module=module)
# Initialise component
Component.__init__(self, self._parameter_strings)
self._whitelist['expression'] = ('init', expression)
self._whitelist['name'] = ('init', name)
self._whitelist['position'] = ('init', position)
self._whitelist['module'] = ('init', module)
self.name = name
# Set the position parameter
if position:
self._position = getattr(self, position)
# Set the initial value of the parameters
if kwargs:
for kwarg, value in kwargs.items():
setattr(getattr(self, kwarg), 'value', value)
if autodoc:
self.__doc__ = _CLASS_DOC % (
name, sympy.latex(sympy.sympify(expression)))
def function(self, x):
return self._f(x, *[p.value for p in self.parameters])
def compile_function(self, module="numpy"):
import sympy
from sympy.utilities.lambdify import lambdify
expr = sympy.sympify(self._str_expression)
rvars = sympy.symbols([s.name for s in expr.free_symbols], real=True)
real_expr = expr.subs(
{orig: real_ for (orig, real_) in zip(expr.free_symbols, rvars)})
# just replace with the assumption that all our variables are real
expr = real_expr
eval_expr = expr.evalf()
# Extract parameters
parameters = [
symbol for symbol in expr.free_symbols if symbol.name != "x"]
parameters.sort(key=lambda x: x.name) # to have a reliable order
# Extract x
x, = [symbol for symbol in expr.free_symbols if symbol.name == "x"]
# Create compiled function
self._f = lambdify([x] + parameters, eval_expr,
modules=module, dummify=False)
parnames = [symbol.name for symbol in parameters]
self._parameter_strings = parnames
for parameter in parameters:
grad_expr = sympy.diff(eval_expr, parameter)
setattr(self,
"_f_grad_%s" % parameter.name,
lambdify([x] + parameters,
grad_expr.evalf(),
modules=module,
dummify=False)
)
setattr(self,
"grad_%s" % parameter.name,
_fill_function_args(
getattr(
self,
"_f_grad_%s" %
parameter.name)).__get__(
self,
Expression)
)
|
gpl-3.0
| 4,370,389,324,684,439,000
| 33.404255
| 78
| 0.545867
| false
| 4.651007
| false
| false
| false
|
rwl/PyCIM
|
PyCIM/PrettyPrintXML.py
|
1
|
1717
|
# Copyright (C) 2010-2011 Richard Lincoln
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to
# deal in the Software without restriction, including without limitation the
# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
# sell copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
from xml.etree.cElementTree import parse, tostring
def xmlpp(source):
root = parse(source).getroot()
indent(root)
return tostring(root)
def indent(elem, level=0):
i = "\n" + level*" "
if len(elem):
if not elem.text or not elem.text.strip():
elem.text = i + " "
if not elem.tail or not elem.tail.strip():
elem.tail = i
for elem in elem:
indent(elem, level+1)
if not elem.tail or not elem.tail.strip():
elem.tail = i
else:
if level and (not elem.tail or not elem.tail.strip()):
elem.tail = i
|
mit
| 6,270,850,627,351,509,000
| 40.902439
| 78
| 0.699476
| false
| 4.107656
| false
| false
| false
|
Enteee/pdml2flow
|
pdml2flow/flow.py
|
1
|
3496
|
# vim: set fenc=utf8 ts=4 sw=4 et :
import json
import dict2xml
from .autovivification import AutoVivification
from .conf import Conf
from .utils import call_plugin
from .logging import *
class Flow():
# The overall frame time
newest_overall_frame_time = 0
@staticmethod
def get_flow_id(frame):
flowid = [frame[d] for d in Conf.FLOW_DEF]
valid = any([type(i) is not AutoVivification for i in flowid])
# check if flowid is empty
if not valid:
return None
return str(flowid)
def __init__(self, first_frame):
first_frame_time = first_frame[Conf.FRAME_TIME]
self.__newest_frame_time = self.__first_frame_time = first_frame_time
self.__id = self.get_flow_id(first_frame)
if Conf.FRAMES_ARRAY:
self.__frames = []
else:
self.__frames = AutoVivification()
self.__framecount = 0
for plugin in Conf.PLUGINS:
call_plugin(
plugin,
'flow_new',
self,
first_frame.cast_dicts(dict)
)
self.add_frame(first_frame)
def __hash__(self):
return hash(self.__id)
def __eq__(self, other):
return self.__id == other.__id
@property
def id(self):
return self.__id
@property
def frames(self):
# clean the frame data
if Conf.FRAMES_ARRAY:
self.__frames = [
f.clean_empty()
for f in self.__frames
]
ret = [
f.cast_dicts(dict)
for f in self.__frames
]
else:
self.__frames = self.__frames.clean_empty()
ret = self.__frames.cast_dicts(dict)
return ret
@property
def first_frame_time(self):
return self.__first_frame_time
@property
def newest_frame_time(self):
return self.__newest_frame_time
@property
def framecount(self):
return self.__framecount
def add_frame(self, frame):
# check if frame expands flow length
frame_time = frame[Conf.FRAME_TIME]
self.__first_frame_time = min(self.__first_frame_time, frame_time)
self.__newest_frame_time = max(self.__newest_frame_time, frame_time)
self.__framecount += 1
# Extract data
if Conf.FRAMES_ARRAY:
self.__frames.append(
frame.clean_empty()
)
else:
self.__frames.merge(
frame.clean_empty()
)
if Conf.COMPRESS_DATA:
self.__frames = self.__frames.compress()
debug(
'flow duration: {}'.format(
self.__newest_frame_time - self.__first_frame_time
)
)
for plugin in Conf.PLUGINS:
call_plugin(
plugin,
'frame_new',
frame.cast_dicts(dict),
self
)
def not_expired(self):
return self.__newest_frame_time > (Flow.newest_overall_frame_time - Conf.FLOW_BUFFER_TIME)
def expired(self):
for plugin in Conf.PLUGINS:
call_plugin(
plugin,
'flow_expired',
self
)
self.end()
def end(self):
for plugin in Conf.PLUGINS:
call_plugin(
plugin,
'flow_end',
self
)
|
apache-2.0
| -4,154,306,614,671,607,300
| 24.705882
| 98
| 0.507151
| false
| 4.103286
| false
| false
| false
|
jyejare/robottelo
|
robottelo/cli/base.py
|
1
|
16328
|
"""Generic base class for cli hammer commands."""
import logging
import re
from wait_for import wait_for
from robottelo import ssh
from robottelo.cli import hammer
from robottelo.config import settings
class CLIError(Exception):
"""Indicates that a CLI command could not be run."""
class CLIBaseError(Exception):
"""Indicates that a CLI command has finished with return code different
from zero.
:param return_code: CLI command return code
:param stderr: contents of the ``stderr``
:param msg: explanation of the error
"""
def __init__(self, return_code, stderr, msg):
self.return_code = return_code
self.stderr = stderr
self.msg = msg
super(CLIBaseError, self).__init__(msg)
self.message = msg
def __str__(self):
"""Include class name, return_code, stderr and msg to string repr so
assertRaisesRegexp can be used to assert error present on any
attribute
"""
return repr(self)
def __repr__(self):
"""Include class name return_code, stderr and msg to improve logging
"""
return '{}(return_code={!r}, stderr={!r}, msg={!r}'.format(
type(self).__name__, self.return_code, self.stderr, self.msg
)
class CLIReturnCodeError(CLIBaseError):
"""Error to be raised when an error occurs due to some validation error
when execution hammer cli.
See: https://github.com/SatelliteQE/robottelo/issues/3790 for more details
"""
class CLIDataBaseError(CLIBaseError):
"""Error to be raised when an error occurs due to some missing parameter
which cause a data base error on hammer
See: https://github.com/SatelliteQE/robottelo/issues/3790 for more details
"""
class Base(object):
"""
@param command_base: base command of hammer.
Output of recent `hammer --help`::
activation-key Manipulate activation keys.
architecture Manipulate architectures.
auth Foreman connection login/logout.
auth-source Manipulate auth sources.
bootdisk Download boot disks
capsule Manipulate capsule
compute-resource Manipulate compute resources.
content-host Manipulate content hosts on the server
content-report View Content Reports
content-view Manipulate content views.
defaults Defaults management
docker Manipulate docker content
domain Manipulate domains.
environment Manipulate environments.
erratum Manipulate errata
fact Search facts.
filter Manage permission filters.
global-parameter Manipulate global parameters.
gpg Manipulate GPG Key actions on the server
host Manipulate hosts.
host-collection Manipulate host collections
hostgroup Manipulate hostgroups.
import Import data exported from a Red Hat Sat..
lifecycle-environment Manipulate lifecycle_environments
location Manipulate locations.
medium Manipulate installation media.
model Manipulate hardware models.
organization Manipulate organizations
os Manipulate operating system.
ostree-branch Manipulate ostree branches
package Manipulate packages.
package-group Manipulate package groups
partition-table Manipulate partition tables.
ping Get the status of the server
product Manipulate products.
proxy Manipulate smart proxies.
puppet-class Search puppet modules.
puppet-module View Puppet Module details.
report Browse and read reports.
repository Manipulate repositories
repository-set Manipulate repository sets on the server
role Manage user roles.
sc-param Manipulate smart class parameters.
settings Change server settings.
shell Interactive shell
subnet Manipulate subnets.
subscription Manipulate subscriptions.
sync-plan Manipulate sync plans
task Tasks related actions.
template Manipulate provisioning templates.
user Manipulate users.
user-group Manage user groups.
@since: 27.Nov.2013
"""
command_base = None # each inherited instance should define this
command_sub = None # specific to instance, like: create, update, etc
command_requires_org = False # True when command requires organization-id
logger = logging.getLogger('robottelo')
_db_error_regex = re.compile(r'.*INSERT INTO|.*SELECT .*FROM|.*violates foreign key')
@classmethod
def _handle_response(cls, response, ignore_stderr=None):
"""Verify ``return_code`` of the CLI command.
Check for a non-zero return code or any stderr contents.
:param response: a ``SSHCommandResult`` object, returned by
:mod:`robottelo.ssh.command`.
:param ignore_stderr: indicates whether to throw a warning in logs if
``stderr`` is not empty.
:returns: contents of ``stdout``.
:raises robottelo.cli.base.CLIReturnCodeError: If return code is
different from zero.
"""
if response.return_code != 0:
full_msg = (
'Command "{0} {1}" finished with return_code {2}\n'
'stderr contains following message:\n{3}'.format(
cls.command_base, cls.command_sub, response.return_code, response.stderr
)
)
error_data = (response.return_code, response.stderr, full_msg)
if cls._db_error_regex.search(full_msg):
raise CLIDataBaseError(*error_data)
raise CLIReturnCodeError(*error_data)
if len(response.stderr) != 0 and not ignore_stderr:
cls.logger.warning('stderr contains following message:\n{0}'.format(response.stderr))
return response.stdout
@classmethod
def add_operating_system(cls, options=None):
"""
Adds OS to record.
"""
cls.command_sub = 'add-operatingsystem'
result = cls.execute(cls._construct_command(options))
return result
@classmethod
def create(cls, options=None, timeout=None):
"""
Creates a new record using the arguments passed via dictionary.
"""
cls.command_sub = 'create'
if options is None:
options = {}
result = cls.execute(cls._construct_command(options), output_format='csv', timeout=timeout)
# Extract new object ID if it was successfully created
if len(result) > 0 and 'id' in result[0]:
obj_id = result[0]['id']
# Fetch new object
# Some Katello obj require the organization-id for subcommands
info_options = {'id': obj_id}
if cls.command_requires_org:
if 'organization-id' not in options:
tmpl = 'organization-id option is required for {0}.create'
raise CLIError(tmpl.format(cls.__name__))
info_options['organization-id'] = options['organization-id']
# organization creation can take some time
if cls.command_base == 'organization':
new_obj, _ = wait_for(
lambda: cls.info(info_options),
timeout=300,
delay=5,
silent_failure=True,
handle_exception=True,
)
else:
new_obj = cls.info(info_options)
# stdout should be a dictionary containing the object
if len(new_obj) > 0:
result = new_obj
return result
@classmethod
def delete(cls, options=None, timeout=None):
"""Deletes existing record."""
cls.command_sub = 'delete'
return cls.execute(cls._construct_command(options), ignore_stderr=True, timeout=timeout)
@classmethod
def delete_parameter(cls, options=None):
"""
Deletes parameter from record.
"""
cls.command_sub = 'delete-parameter'
result = cls.execute(cls._construct_command(options))
return result
@classmethod
def dump(cls, options=None):
"""
Displays the content for existing partition table.
"""
cls.command_sub = 'dump'
result = cls.execute(cls._construct_command(options))
return result
@classmethod
def _get_username_password(cls, username=None, password=None):
"""Lookup for the username and password for cli command in following
order:
1. ``user`` or ``password`` parameters
2. ``foreman_admin_username`` or ``foreman_admin_password`` attributes
3. foreman.admin.username or foreman.admin.password configuration
:return: A tuple with the username and password found
:rtype: tuple
"""
if username is None:
try:
username = getattr(cls, 'foreman_admin_username')
except AttributeError:
username = settings.server.admin_username
if password is None:
try:
password = getattr(cls, 'foreman_admin_password')
except AttributeError:
password = settings.server.admin_password
return (username, password)
@classmethod
def execute(
cls,
command,
user=None,
password=None,
output_format=None,
timeout=None,
ignore_stderr=None,
return_raw_response=None,
connection_timeout=None,
):
"""Executes the cli ``command`` on the server via ssh"""
user, password = cls._get_username_password(user, password)
time_hammer = False
if settings.performance:
time_hammer = settings.performance.time_hammer
# add time to measure hammer performance
cmd = 'LANG={0} {1} hammer -v {2} {3} {4} {5}'.format(
settings.locale,
'time -p' if time_hammer else '',
'-u {0}'.format(user) if user is not None else '--interactive no',
'-p {0}'.format(password) if password is not None else '',
'--output={0}'.format(output_format) if output_format else '',
command,
)
response = ssh.command(
cmd.encode('utf-8'),
output_format=output_format,
timeout=timeout,
connection_timeout=connection_timeout,
)
if return_raw_response:
return response
else:
return cls._handle_response(response, ignore_stderr=ignore_stderr)
@classmethod
def exists(cls, options=None, search=None):
"""Search for an entity using the query ``search[0]="search[1]"``
Will be used the ``list`` command with the ``--search`` option to do
the search.
If ``options`` argument already have a search key, then the ``search``
argument will not be evaluated. Which allows different search query.
"""
if options is None:
options = {}
if search is not None and 'search' not in options:
options.update({'search': '{0}=\\"{1}\\"'.format(search[0], search[1])})
result = cls.list(options)
if result:
result = result[0]
return result
@classmethod
def info(cls, options=None, output_format=None, return_raw_response=None):
"""Reads the entity information."""
cls.command_sub = 'info'
if options is None:
options = {}
if cls.command_requires_org and 'organization-id' not in options:
raise CLIError('organization-id option is required for {0}.info'.format(cls.__name__))
result = cls.execute(
command=cls._construct_command(options),
output_format=output_format,
return_raw_response=return_raw_response,
)
if not return_raw_response and output_format != 'json':
result = hammer.parse_info(result)
return result
@classmethod
def list(cls, options=None, per_page=True, output_format='csv'):
"""
List information.
@param options: ID (sometimes name works as well) to retrieve info.
"""
cls.command_sub = 'list'
if options is None:
options = {}
if 'per-page' not in options and per_page:
options['per-page'] = 10000
if cls.command_requires_org and 'organization-id' not in options:
raise CLIError('organization-id option is required for {0}.list'.format(cls.__name__))
result = cls.execute(cls._construct_command(options), output_format=output_format)
return result
@classmethod
def puppetclasses(cls, options=None):
"""
Lists all puppet classes.
"""
cls.command_sub = 'puppet-classes'
result = cls.execute(cls._construct_command(options), output_format='csv')
return result
@classmethod
def remove_operating_system(cls, options=None):
"""
Removes OS from record.
"""
cls.command_sub = 'remove-operatingsystem'
result = cls.execute(cls._construct_command(options))
return result
@classmethod
def sc_params(cls, options=None):
"""
Lists all smart class parameters.
"""
cls.command_sub = 'sc-params'
result = cls.execute(cls._construct_command(options), output_format='csv')
return result
@classmethod
def set_parameter(cls, options=None):
"""
Creates or updates parameter for a record.
"""
cls.command_sub = 'set-parameter'
result = cls.execute(cls._construct_command(options))
return result
@classmethod
def update(cls, options=None, return_raw_response=None):
"""
Updates existing record.
"""
cls.command_sub = 'update'
result = cls.execute(
cls._construct_command(options),
output_format='csv',
return_raw_response=return_raw_response,
)
return result
@classmethod
def with_user(cls, username=None, password=None):
"""Context Manager for credentials"""
class Wrapper(cls):
"""Wrapper class which defines the foreman admin username and
password to be used when executing any cli command.
"""
foreman_admin_username = username
foreman_admin_password = password
return Wrapper
@classmethod
def _construct_command(cls, options=None):
"""Build a hammer cli command based on the options passed"""
tail = ''
if options is None:
options = {}
for key, val in options.items():
if val is None:
continue
if val is True:
tail += ' --{0}'.format(key)
elif val is not False:
if isinstance(val, list):
val = ','.join(str(el) for el in val)
tail += ' --{0}="{1}"'.format(key, val)
cmd = f"{cls.command_base} {cls.command_sub or ''} {tail.strip()}"
return cmd
|
gpl-3.0
| -8,573,411,306,033,115,000
| 33.447257
| 99
| 0.563694
| false
| 4.712266
| false
| false
| false
|
sanjuro/RCJK
|
vendor/django/core/cache/backends/db.py
|
2
|
4332
|
"Database cache backend."
from django.core.cache.backends.base import BaseCache
from django.db import connection, transaction, DatabaseError
import base64, time
from datetime import datetime
try:
import cPickle as pickle
except ImportError:
import pickle
class CacheClass(BaseCache):
def __init__(self, table, params):
BaseCache.__init__(self, params)
self._table = connection.ops.quote_name(table)
max_entries = params.get('max_entries', 300)
try:
self._max_entries = int(max_entries)
except (ValueError, TypeError):
self._max_entries = 300
cull_frequency = params.get('cull_frequency', 3)
try:
self._cull_frequency = int(cull_frequency)
except (ValueError, TypeError):
self._cull_frequency = 3
def get(self, key, default=None):
cursor = connection.cursor()
cursor.execute("SELECT cache_key, value, expires FROM %s WHERE cache_key = %%s" % self._table, [key])
row = cursor.fetchone()
if row is None:
return default
now = datetime.now()
if row[2] < now:
cursor.execute("DELETE FROM %s WHERE cache_key = %%s" % self._table, [key])
transaction.commit_unless_managed()
return default
value = connection.ops.process_clob(row[1])
return pickle.loads(base64.decodestring(value))
def set(self, key, value, timeout=None):
self._base_set('set', key, value, timeout)
def add(self, key, value, timeout=None):
return self._base_set('add', key, value, timeout)
def _base_set(self, mode, key, value, timeout=None):
if timeout is None:
timeout = self.default_timeout
cursor = connection.cursor()
cursor.execute("SELECT COUNT(*) FROM %s" % self._table)
num = cursor.fetchone()[0]
now = datetime.now().replace(microsecond=0)
exp = datetime.fromtimestamp(time.time() + timeout).replace(microsecond=0)
if num > self._max_entries:
self._cull(cursor, now)
encoded = base64.encodestring(pickle.dumps(value, 2)).strip()
cursor.execute("SELECT cache_key, expires FROM %s WHERE cache_key = %%s" % self._table, [key])
try:
result = cursor.fetchone()
if result and (mode == 'set' or
(mode == 'add' and result[1] < now)):
cursor.execute("UPDATE %s SET value = %%s, expires = %%s WHERE cache_key = %%s" % self._table,
[encoded, connection.ops.value_to_db_datetime(exp), key])
else:
cursor.execute("INSERT INTO %s (cache_key, value, expires) VALUES (%%s, %%s, %%s)" % self._table,
[key, encoded, connection.ops.value_to_db_datetime(exp)])
except DatabaseError:
# To be threadsafe, updates/inserts are allowed to fail silently
transaction.rollback_unless_managed()
return False
else:
transaction.commit_unless_managed()
return True
def delete(self, key):
cursor = connection.cursor()
cursor.execute("DELETE FROM %s WHERE cache_key = %%s" % self._table, [key])
transaction.commit_unless_managed()
def has_key(self, key):
now = datetime.now().replace(microsecond=0)
cursor = connection.cursor()
cursor.execute("SELECT cache_key FROM %s WHERE cache_key = %%s and expires > %%s" % self._table,
[key, connection.ops.value_to_db_datetime(now)])
return cursor.fetchone() is not None
def _cull(self, cursor, now):
if self._cull_frequency == 0:
cursor.execute("DELETE FROM %s" % self._table)
else:
cursor.execute("DELETE FROM %s WHERE expires < %%s" % self._table,
[connection.ops.value_to_db_datetime(now)])
cursor.execute("SELECT COUNT(*) FROM %s" % self._table)
num = cursor.fetchone()[0]
if num > self._max_entries:
cursor.execute("SELECT cache_key FROM %s ORDER BY cache_key LIMIT 1 OFFSET %%s" % self._table, [num / self._cull_frequency])
cursor.execute("DELETE FROM %s WHERE cache_key < %%s" % self._table, [cursor.fetchone()[0]])
|
apache-2.0
| -6,734,824,525,095,844,000
| 43.204082
| 140
| 0.584949
| false
| 3.992627
| false
| false
| false
|
guori12321/todo
|
todo/generator.py
|
1
|
1176
|
# coding=utf8
"""
Generator from todo to todo format string.
from todo.generator import generator
generator.generate(todo) # return str
"""
from models import Task
from models import Todo
class TodoGenerator(object):
"""
Generator from python list to string.
"""
g_newline = "\n"
def g_id(self, v):
return str(v) + "."
def g_done(self, v):
if v is True:
return '(x)'
else:
return ' '
def g_task(self, v):
return v
def gen_task(self, task):
lst = []
lst.append(self.g_id(task.id))
lst.append(self.g_done(task.done))
lst.append(self.g_task(task.content))
return " ".join(lst)
def generate(self, todo):
"""
Generate todo to string format.
e.g.
[<task object>, ..] => "1. (x) do something .."
"""
re = []
for i in todo:
if isinstance(i, Task):
re.append(self.gen_task(i))
else:
raise SyntaxError('Not support type: ' + type(i))
return self.g_newline.join(re)
generator = TodoGenerator() # build generator
|
mit
| 8,895,891,163,284,157,000
| 20.381818
| 65
| 0.527211
| false
| 3.68652
| false
| false
| false
|
Gnewt/bhs_sales
|
shirts/models.py
|
1
|
1486
|
from django.db import models
from json_field import JSONField
SMALL = 'S'
MEDIUM = 'M'
LARGE = 'L'
XLARGE = 'XL'
ITEM_SIZE_CHOICES = (
(SMALL, 'Small (S)'),
(MEDIUM, 'Medium (M)'),
(LARGE, 'Large (L)'),
(XLARGE, 'Extra Large (XL)'),
)
class StoreItem(models.Model):
name = models.CharField(max_length=128)
image = models.URLField()
description = models.TextField()
price = models.DecimalField(max_digits=5, decimal_places=2)
def __unicode__(self):
return self.name
class Order(models.Model):
item = models.ForeignKey("shirts.StoreItem")
first_name = models.CharField(max_length=128)
last_name = models.CharField(max_length=128)
size = models.CharField(max_length=2,
choices=ITEM_SIZE_CHOICES)
timestamp = models.DateTimeField(auto_now_add=True)
purchase_price = models.DecimalField(max_digits=5, decimal_places=2)
stripe_charge_id = models.CharField(max_length=64, blank=True, null=True)
notes = models.TextField(blank=True)
STRIPE = 'ST'
OTHER = 'OT'
PAYMENT_METHOD_CHOICES = (
(STRIPE, 'Stripe'),
(OTHER, 'Other'),
)
payment_method = models.CharField(max_length=2,
choices=PAYMENT_METHOD_CHOICES,
default=STRIPE)
def __unicode__(self):
return "%s %s: %s (%s)" % (self.first_name, self.last_name, self.item.name, self.size)
|
mit
| -7,521,620,094,661,558,000
| 29.326531
| 94
| 0.597577
| false
| 3.488263
| false
| false
| false
|
elgambitero/FreeCAD_sf_master
|
src/Mod/Arch/importIFC.py
|
1
|
62430
|
#***************************************************************************
#* *
#* Copyright (c) 2014 *
#* Yorik van Havre <yorik@uncreated.net> *
#* *
#* This program is free software; you can redistribute it and/or modify *
#* it under the terms of the GNU Lesser General Public License (LGPL) *
#* as published by the Free Software Foundation; either version 2 of *
#* the License, or (at your option) any later version. *
#* for detail see the LICENCE text file. *
#* *
#* This program is distributed in the hope that it will be useful, *
#* but WITHOUT ANY WARRANTY; without even the implied warranty of *
#* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *
#* GNU Library General Public License for more details. *
#* *
#* You should have received a copy of the GNU Library General Public *
#* License along with this program; if not, write to the Free Software *
#* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 *
#* USA *
#* *
#***************************************************************************
__title__ = "FreeCAD IFC importer - Enhanced ifcopenshell-only version"
__author__ = "Yorik van Havre"
__url__ = "http://www.freecadweb.org"
import os,time,tempfile,uuid,FreeCAD,Part,Draft,Arch,math,DraftVecUtils
if open.__module__ == '__builtin__':
pyopen = open # because we'll redefine open below
# which IFC type must create which FreeCAD type
typesmap = { "Site": ["IfcSite"],
"Building": ["IfcBuilding"],
"Floor": ["IfcBuildingStorey"],
"Structure": ["IfcBeam", "IfcBeamStandardCase", "IfcColumn", "IfcColumnStandardCase", "IfcSlab", "IfcFooting", "IfcPile", "IfcTendon"],
"Wall": ["IfcWall", "IfcWallStandardCase", "IfcCurtainWall"],
"Window": ["IfcWindow", "IfcWindowStandardCase", "IfcDoor", "IfcDoorStandardCase"],
"Roof": ["IfcRoof"],
"Stairs": ["IfcStair", "IfcStairFlight", "IfcRamp", "IfcRampFlight"],
"Space": ["IfcSpace"],
"Rebar": ["IfcReinforcingBar"],
"Equipment": ["IfcFurnishingElement","IfcSanitaryTerminal","IfcFlowTerminal","IfcElectricAppliance"]
}
# which IFC entity (product) is a structural object
structuralifcobjects = (
"IfcStructuralCurveMember", "IfcStructuralSurfaceMember",
"IfcStructuralPointConnection", "IfcStructuralCurveConnection", "IfcStructuralSurfaceConnection",
"IfcStructuralAction", "IfcStructuralPointAction",
"IfcStructuralLinearAction", "IfcStructuralLinearActionVarying", "IfcStructuralPlanarAction"
)
# specific name translations
translationtable = { "Foundation":"Footing",
"Floor":"BuildingStorey",
"Rebar":"ReinforcingBar",
"HydroEquipment":"SanitaryTerminal",
"ElectricEquipment":"ElectricAppliance",
"Furniture":"FurnishingElement",
"Stair Flight":"StairFlight",
"Curtain Wall":"CurtainWall"
}
ifctemplate = """ISO-10303-21;
HEADER;
FILE_DESCRIPTION(('ViewDefinition [CoordinationView]'),'2;1');
FILE_NAME('$filename','$timestamp',('$owner','$email'),('$company'),'IfcOpenShell','IfcOpenShell','');
FILE_SCHEMA(('IFC2X3'));
ENDSEC;
DATA;
#1=IFCPERSON($,$,'$owner',$,$,$,$,$);
#2=IFCORGANIZATION($,'$company',$,$,$);
#3=IFCPERSONANDORGANIZATION(#1,#2,$);
#4=IFCAPPLICATION(#2,'$version','FreeCAD','118df2cf_ed21_438e_a41');
#5=IFCOWNERHISTORY(#3,#4,$,.ADDED.,$,#3,#4,$now);
#6=IFCDIRECTION((1.,0.,0.));
#7=IFCDIRECTION((0.,0.,1.));
#8=IFCCARTESIANPOINT((0.,0.,0.));
#9=IFCAXIS2PLACEMENT3D(#8,#7,#6);
#10=IFCDIRECTION((0.,1.,0.));
#11=IFCGEOMETRICREPRESENTATIONCONTEXT('Plan','Model',3,1.E-05,#9,#10);
#12=IFCDIMENSIONALEXPONENTS(0,0,0,0,0,0,0);
#13=IFCSIUNIT(*,.LENGTHUNIT.,$,.METRE.);
#14=IFCSIUNIT(*,.AREAUNIT.,$,.SQUARE_METRE.);
#15=IFCSIUNIT(*,.VOLUMEUNIT.,$,.CUBIC_METRE.);
#16=IFCSIUNIT(*,.PLANEANGLEUNIT.,$,.RADIAN.);
#17=IFCMEASUREWITHUNIT(IFCPLANEANGLEMEASURE(0.017453292519943295),#16);
#18=IFCCONVERSIONBASEDUNIT(#12,.PLANEANGLEUNIT.,'DEGREE',#17);
#19=IFCUNITASSIGNMENT((#13,#14,#15,#18));
#20=IFCPROJECT('$projectid',#5,'$project',$,$,$,$,(#11),#19);
ENDSEC;
END-ISO-10303-21;
"""
def decode(filename,utf=False):
if isinstance(filename,unicode):
# workaround since ifcopenshell currently can't handle unicode filenames
if utf:
encoding = "utf8"
else:
import sys
encoding = sys.getfilesystemencoding()
filename = filename.encode(encoding)
return filename
def doubleClickTree(item,column):
txt = item.text(column)
if "Entity #" in txt:
eid = txt.split("#")[1].split(":")[0]
addr = tree.findItems(eid,0,0)
if addr:
tree.scrollToItem(addr[0])
addr[0].setSelected(True)
def explore(filename=None):
"""explore([filename]): opens a dialog showing
the contents of an IFC file. If no filename is given, a dialog will
pop up to choose a file."""
p = FreeCAD.ParamGet("User parameter:BaseApp/Preferences/Mod/Arch")
DEBUG = p.GetBool("ifcDebug",False)
try:
import ifcopenshell
except:
FreeCAD.Console.PrintError("IfcOpenShell was not found on this system. IFC support is disabled\n")
return
if not filename:
from PySide import QtGui
filename = QtGui.QFileDialog.getOpenFileName(QtGui.qApp.activeWindow(),'IFC files','*.ifc')
if filename:
filename = filename[0]
from PySide import QtCore,QtGui
filename = decode(filename,utf=True)
if not os.path.exists(filename):
print "File not found"
return
ifc = ifcopenshell.open(filename)
global tree
tree = QtGui.QTreeWidget()
tree.setColumnCount(3)
tree.setWordWrap(True)
tree.header().setDefaultSectionSize(60)
tree.header().resizeSection(0,60)
tree.header().resizeSection(1,30)
tree.header().setStretchLastSection(True)
tree.headerItem().setText(0, "ID")
tree.headerItem().setText(1, "")
tree.headerItem().setText(2, "Item and Properties")
bold = QtGui.QFont()
bold.setWeight(75)
bold.setBold(True)
entities = ifc.by_type("IfcRoot")
entities += ifc.by_type("IfcRepresentation")
entities += ifc.by_type("IfcRepresentationItem")
entities += ifc.by_type("IfcPlacement")
entities += ifc.by_type("IfcProperty")
entities += ifc.by_type("IfcPhysicalSimpleQuantity")
entities += ifc.by_type("IfcMaterial")
entities += ifc.by_type("IfcProductRepresentation")
entities = sorted(entities, key=lambda eid: eid.id())
done = []
for entity in entities:
if hasattr(entity,"id"):
if entity.id() in done:
continue
done.append(entity.id())
item = QtGui.QTreeWidgetItem(tree)
item.setText(0,str(entity.id()))
if entity.is_a() in ["IfcWall","IfcWallStandardCase"]:
item.setIcon(1,QtGui.QIcon(":icons/Arch_Wall_Tree.svg"))
elif entity.is_a() in ["IfcBuildingElementProxy"]:
item.setIcon(1,QtGui.QIcon(":icons/Arch_Component.svg"))
elif entity.is_a() in ["IfcColumn","IfcColumnStandardCase","IfcBeam","IfcBeamStandardCase","IfcSlab","IfcFooting","IfcPile","IfcTendon"]:
item.setIcon(1,QtGui.QIcon(":icons/Arch_Structure_Tree.svg"))
elif entity.is_a() in ["IfcSite"]:
item.setIcon(1,QtGui.QIcon(":icons/Arch_Site_Tree.svg"))
elif entity.is_a() in ["IfcBuilding"]:
item.setIcon(1,QtGui.QIcon(":icons/Arch_Building_Tree.svg"))
elif entity.is_a() in ["IfcBuildingStorey"]:
item.setIcon(1,QtGui.QIcon(":icons/Arch_Floor_Tree.svg"))
elif entity.is_a() in ["IfcWindow","IfcWindowStandardCase","IfcDoor","IfcDoorStandardCase"]:
item.setIcon(1,QtGui.QIcon(":icons/Arch_Window_Tree.svg"))
elif entity.is_a() in ["IfcRoof"]:
item.setIcon(1,QtGui.QIcon(":icons/Arch_Roof_Tree.svg"))
elif entity.is_a() in ["IfcExtrudedAreaSolid","IfcClosedShell"]:
item.setIcon(1,QtGui.QIcon(":icons/Tree_Part.svg"))
elif entity.is_a() in ["IfcFace"]:
item.setIcon(1,QtGui.QIcon(":icons/Draft_SwitchMode.svg"))
elif entity.is_a() in ["IfcArbitraryClosedProfileDef","IfcPolyloop"]:
item.setIcon(1,QtGui.QIcon(":icons/Draft_Draft.svg"))
elif entity.is_a() in ["IfcPropertySingleValue","IfcQuantityArea","IfcQuantityVolume"]:
item.setIcon(1,QtGui.QIcon(":icons/Tree_Annotation.svg"))
elif entity.is_a() in ["IfcMaterial"]:
item.setIcon(1,QtGui.QIcon(":icons/Arch_Material.svg"))
item.setText(2,str(entity.is_a()))
item.setFont(2,bold);
i = 0
while True:
try:
argname = entity.attribute_name(i)
except:
break
else:
try:
argvalue = getattr(entity,argname)
except:
print "Error in entity ",entity
break
else:
if not argname in ["Id", "GlobalId"]:
colored = False
if isinstance(argvalue,ifcopenshell.entity_instance):
if argvalue.id() == 0:
t = str(argvalue)
else:
colored = True
t = "Entity #" + str(argvalue.id()) + ": " + str(argvalue.is_a())
elif isinstance(argvalue,list):
t = ""
else:
t = str(argvalue)
t = " " + str(argname) + " : " + str(t)
item = QtGui.QTreeWidgetItem(tree)
item.setText(2,str(t))
if colored:
item.setForeground(2,QtGui.QBrush(QtGui.QColor("#005AFF")))
if isinstance(argvalue,list):
for argitem in argvalue:
colored = False
if isinstance(argitem,ifcopenshell.entity_instance):
if argitem.id() == 0:
t = str(argitem)
else:
colored = True
t = "Entity #" + str(argitem.id()) + ": " + str(argitem.is_a())
else:
t = argitem
t = " " + str(t)
item = QtGui.QTreeWidgetItem(tree)
item.setText(2,str(t))
if colored:
item.setForeground(2,QtGui.QBrush(QtGui.QColor("#005AFF")))
i += 1
d = QtGui.QDialog()
d.setObjectName("IfcExplorer")
d.setWindowTitle("Ifc Explorer")
d.resize(640, 480)
layout = QtGui.QVBoxLayout(d)
layout.addWidget(tree)
tree.itemDoubleClicked.connect(doubleClickTree)
d.exec_()
del tree
return
def open(filename,skip=[],only=[],root=None):
"opens an IFC file in a new document"
docname = os.path.splitext(os.path.basename(filename))[0]
docname = decode(docname,utf=True)
doc = FreeCAD.newDocument(docname)
doc.Label = docname
doc = insert(filename,doc.Name,skip,only,root)
return doc
def insert(filename,docname,skip=[],only=[],root=None):
"""insert(filename,docname,skip=[],only=[],root=None): imports the contents of an IFC file.
skip can contain a list of ids of objects to be skipped, only can restrict the import to
certain object ids (will also get their children) and root can be used to
import only the derivates of a certain element type (default = ifcProduct)."""
p = FreeCAD.ParamGet("User parameter:BaseApp/Preferences/Mod/Arch")
DEBUG = p.GetBool("ifcDebug",False)
PREFIX_NUMBERS = p.GetBool("ifcPrefixNumbers",False)
SKIP = p.GetString("ifcSkip","").split(",")
SEPARATE_OPENINGS = p.GetBool("ifcSeparateOpenings",False)
ROOT_ELEMENT = p.GetString("ifcRootElement","IfcProduct")
GET_EXTRUSIONS = p.GetBool("ifcGetExtrusions",False)
MERGE_MATERIALS = p.GetBool("ifcMergeMaterials",False)
if root:
ROOT_ELEMENT = root
MERGE_MODE_ARCH = p.GetInt("ifcImportModeArch",0)
MERGE_MODE_STRUCT = p.GetInt("ifcImportModeStruct",1)
if MERGE_MODE_ARCH > 0:
SEPARATE_OPENINGS = False
GET_EXTRUSIONS = False
if not SEPARATE_OPENINGS:
SKIP.append("IfcOpeningElement")
try:
import ifcopenshell
except:
FreeCAD.Console.PrintError("IfcOpenShell was not found on this system. IFC support is disabled\n")
return
if DEBUG: print "Opening ",filename,"...",
try:
doc = FreeCAD.getDocument(docname)
except:
doc = FreeCAD.newDocument(docname)
FreeCAD.ActiveDocument = doc
if DEBUG: print "done."
global ifcfile # keeping global for debugging purposes
filename = decode(filename,utf=True)
ifcfile = ifcopenshell.open(filename)
from ifcopenshell import geom
settings = ifcopenshell.geom.settings()
settings.set(settings.USE_BREP_DATA,True)
settings.set(settings.SEW_SHELLS,True)
settings.set(settings.USE_WORLD_COORDS,True)
if SEPARATE_OPENINGS:
settings.set(settings.DISABLE_OPENING_SUBTRACTIONS,True)
if MERGE_MODE_STRUCT != 3:
try:
settings.set(settings.INCLUDE_CURVES,True)
except:
FreeCAD.Console.PrintError("Set INCLUDE_CURVES failed. IfcOpenShell seams to be an Outdated Developer Version.\n")
FreeCAD.Console.PrintError("Import of StructuralAnalysisView Entities will not work!\n")
sites = ifcfile.by_type("IfcSite")
buildings = ifcfile.by_type("IfcBuilding")
floors = ifcfile.by_type("IfcBuildingStorey")
products = ifcfile.by_type(ROOT_ELEMENT)
openings = ifcfile.by_type("IfcOpeningElement")
annotations = ifcfile.by_type("IfcAnnotation")
materials = ifcfile.by_type("IfcMaterial")
if DEBUG: print "Building relationships table...",
# building relations tables
objects = {} # { id:object, ... }
additions = {} # { host:[child,...], ... }
groups = {} # { host:[child,...], ... } # used in structural IFC
subtractions = [] # [ [opening,host], ... ]
properties = {} # { host:[property, ...], ... }
colors = {} # { id:(r,g,b) }
shapes = {} # { id:shaoe } only used for merge mode
structshapes = {} # { id:shaoe } only used for merge mode
mattable = {} # { objid:matid }
sharedobjects = {} # { representationmapid:object }
for r in ifcfile.by_type("IfcRelContainedInSpatialStructure"):
additions.setdefault(r.RelatingStructure.id(),[]).extend([e.id() for e in r.RelatedElements])
for r in ifcfile.by_type("IfcRelAggregates"):
additions.setdefault(r.RelatingObject.id(),[]).extend([e.id() for e in r.RelatedObjects])
for r in ifcfile.by_type("IfcRelAssignsToGroup"):
groups.setdefault(r.RelatingGroup.id(),[]).extend([e.id() for e in r.RelatedObjects])
for r in ifcfile.by_type("IfcRelVoidsElement"):
subtractions.append([r.RelatedOpeningElement.id(), r.RelatingBuildingElement.id()])
for r in ifcfile.by_type("IfcRelDefinesByProperties"):
for obj in r.RelatedObjects:
if r.RelatingPropertyDefinition.is_a("IfcPropertySet"):
properties.setdefault(obj.id(),[]).extend([e.id() for e in r.RelatingPropertyDefinition.HasProperties])
for r in ifcfile.by_type("IfcRelAssociatesMaterial"):
for o in r.RelatedObjects:
mattable[o.id()] = r.RelatingMaterial.id()
for r in ifcfile.by_type("IfcStyledItem"):
if r.Styles[0].is_a("IfcPresentationStyleAssignment"):
if r.Styles[0].Styles[0].is_a("IfcSurfaceStyle"):
if r.Styles[0].Styles[0].Styles[0].is_a("IfcSurfaceStyleRendering"):
if r.Styles[0].Styles[0].Styles[0].SurfaceColour:
c = r.Styles[0].Styles[0].Styles[0].SurfaceColour
if r.Item:
for p in ifcfile.by_type("IfcProduct"):
if p.Representation:
for it in p.Representation.Representations:
if it.Items:
if it.Items[0].id() == r.Item.id():
colors[p.id()] = (c.Red,c.Green,c.Blue)
elif it.Items[0].is_a("IfcBooleanResult"):
if (it.Items[0].FirstOperand.id() == r.Item.id()):
colors[p.id()] = (c.Red,c.Green,c.Blue)
else:
for m in ifcfile.by_type("IfcMaterialDefinitionRepresentation"):
for it in m.Representations:
if it.Items:
if it.Items[0].id() == r.id():
colors[m.RepresentedMaterial.id()] = (c.Red,c.Green,c.Blue)
if only: # only import a list of IDs and their children
ids = []
while only:
currentid = only.pop()
ids.append(currentid)
if currentid in additions.keys():
only.extend(additions[currentid])
products = [ifcfile[currentid] for currentid in ids]
if DEBUG: print "done."
count = 0
from FreeCAD import Base
progressbar = Base.ProgressIndicator()
progressbar.start("Importing IFC objects...",len(products))
if DEBUG: print "Processing objects..."
# products
for product in products:
pid = product.id()
guid = product.GlobalId
ptype = product.is_a()
if DEBUG: print count+1,"/",len(products)," creating object #",pid," : ",ptype,
name = str(ptype[3:])
if product.Name:
name = product.Name.decode("unicode_escape").encode("utf8")
if PREFIX_NUMBERS: name = "ID" + str(pid) + " " + name
obj = None
baseobj = None
brep = None
shape = None
archobj = True # assume all objects not in structuralifcobjects are architecture
if ptype in structuralifcobjects:
archobj = False
if DEBUG: print " (struct)",
else:
if DEBUG: print " (arch)",
if MERGE_MODE_ARCH == 4 and archobj:
if DEBUG: print " skipped."
continue
if MERGE_MODE_STRUCT == 3 and not archobj:
if DEBUG: print " skipped."
continue
if pid in skip: # user given id skip list
if DEBUG: print " skipped."
continue
if ptype in SKIP: # preferences-set type skip list
if DEBUG: print " skipped."
continue
# detect if this object is sharing its shape
clone = None
store = None
if product.Representation and MERGE_MODE_ARCH == 0 and archobj:
for s in product.Representation.Representations:
if s.RepresentationIdentifier.upper() == "BODY":
if s.Items[0].is_a("IfcMappedItem"):
bid = s.Items[0].MappingSource.id()
if bid in sharedobjects:
clone = sharedobjects[bid]
else:
sharedobjects[bid] = None
store = bid
try:
cr = ifcopenshell.geom.create_shape(settings,product)
brep = cr.geometry.brep_data
except:
pass # IfcOpenShell will yield an error if a given product has no shape, but we don't care
if brep:
if DEBUG: print " ",str(len(brep)/1000),"k ",
shape = Part.Shape()
shape.importBrepFromString(brep)
shape.scale(1000.0) # IfcOpenShell always outputs in meters
if not shape.isNull():
if (MERGE_MODE_ARCH > 0 and archobj) or not archobj:
if ptype == "IfcSpace": # do not add spaces to compounds
if DEBUG: print "skipping space ",pid
elif not archobj:
structshapes[pid] = shape
if DEBUG: print shape.Solids," ",
baseobj = shape
else:
shapes[pid] = shape
if DEBUG: print shape.Solids," ",
baseobj = shape
else:
if clone:
if DEBUG: print "clone ",
else:
if GET_EXTRUSIONS:
ex = Arch.getExtrusionData(shape)
if ex:
print "extrusion ",
baseface = FreeCAD.ActiveDocument.addObject("Part::Feature",name+"_footprint")
baseface.Shape = ex[0]
baseobj = FreeCAD.ActiveDocument.addObject("Part::Extrusion",name+"_body")
baseobj.Base = baseface
baseobj.Dir = ex[1]
if FreeCAD.GuiUp:
baseface.ViewObject.hide()
if (not baseobj):
baseobj = FreeCAD.ActiveDocument.addObject("Part::Feature",name+"_body")
baseobj.Shape = shape
else:
if DEBUG: print "null shape ",
if not shape.isValid():
if DEBUG: print "invalid shape ",
#continue
else:
if DEBUG: print " no brep ",
if MERGE_MODE_ARCH == 0 and archobj:
# full Arch objects
for freecadtype,ifctypes in typesmap.items():
if ptype in ifctypes:
if clone:
obj = getattr(Arch,"make"+freecadtype)(name=name)
obj.CloneOf = clone
if shape:
v = shape.Solids[0].CenterOfMass.sub(clone.Shape.Solids[0].CenterOfMass)
r = getRotation(product)
if not r.isNull():
v = v.add(clone.Shape.Solids[0].CenterOfMass)
v = v.add(r.multVec(clone.Shape.Solids[0].CenterOfMass.negative()))
obj.Placement.Rotation = r
obj.Placement.move(v)
else:
obj = getattr(Arch,"make"+freecadtype)(baseobj=baseobj,name=name)
if store:
sharedobjects[store] = obj
obj.Label = name
if FreeCAD.GuiUp and baseobj:
if hasattr(baseobj,"ViewObject"):
baseobj.ViewObject.hide()
# setting role
try:
r = ptype[3:]
tr = dict((v,k) for k, v in translationtable.iteritems())
if r in tr.keys():
r = tr[r]
# remove the "StandardCase"
if "StandardCase" in r:
r = r[:-12]
obj.Role = r
except:
pass
# setting uid
if hasattr(obj,"IfcAttributes"):
a = obj.IfcAttributes
a["IfcUID"] = str(guid)
obj.IfcAttributes = a
break
if not obj:
obj = Arch.makeComponent(baseobj,name=name)
if obj:
sols = str(obj.Shape.Solids) if hasattr(obj,"Shape") else ""
if DEBUG: print sols
objects[pid] = obj
elif (MERGE_MODE_ARCH == 1 and archobj) or (MERGE_MODE_STRUCT == 0 and not archobj):
# non-parametric Arch objects
if ptype in ["IfcSite","IfcBuilding","IfcBuildingStorey"]:
for freecadtype,ifctypes in typesmap.items():
if ptype in ifctypes:
obj = getattr(Arch,"make"+freecadtype)(baseobj=None,name=name)
elif baseobj:
obj = Arch.makeComponent(baseobj,name=name,delete=True)
elif (MERGE_MODE_ARCH == 2 and archobj) or (MERGE_MODE_STRUCT == 1 and not archobj):
# Part shapes
if ptype in ["IfcSite","IfcBuilding","IfcBuildingStorey"]:
for freecadtype,ifctypes in typesmap.items():
if ptype in ifctypes:
obj = getattr(Arch,"make"+freecadtype)(baseobj=None,name=name)
elif baseobj:
obj = FreeCAD.ActiveDocument.addObject("Part::Feature",name)
obj.Shape = shape
if obj:
obj.Label = name
objects[pid] = obj
# properties
if pid in properties:
if hasattr(obj,"IfcAttributes"):
a = obj.IfcAttributes
for p in properties[pid]:
o = ifcfile[p]
if o.is_a("IfcPropertySingleValue"):
a[o.Name.decode("unicode_escape").encode("utf8")] = str(o.NominalValue)
obj.IfcAttributes = a
# color
if FreeCAD.GuiUp and (pid in colors) and hasattr(obj.ViewObject,"ShapeColor"):
if DEBUG: print " setting color: ",int(colors[pid][0]*255),"/",int(colors[pid][1]*255),"/",int(colors[pid][2]*255)
obj.ViewObject.ShapeColor = colors[pid]
# if DEBUG is on, recompute after each shape
if DEBUG: FreeCAD.ActiveDocument.recompute()
count += 1
progressbar.next()
progressbar.stop()
FreeCAD.ActiveDocument.recompute()
if MERGE_MODE_STRUCT == 2:
if DEBUG: print "Joining Structural shapes..."
for host,children in groups.items(): # Structural
if ifcfile[host].is_a("IfcStructuralAnalysisModel"):
compound = []
for c in children:
if c in structshapes.keys():
compound.append(structshapes[c])
del structshapes[c]
if compound:
name = ifcfile[host].Name or "AnalysisModel"
if PREFIX_NUMBERS: name = "ID" + str(host) + " " + name
obj = FreeCAD.ActiveDocument.addObject("Part::Feature",name)
obj.Label = name
obj.Shape = Part.makeCompound(compound)
if structshapes: # remaining Structural shapes
obj = FreeCAD.ActiveDocument.addObject("Part::Feature","UnclaimedStruct")
obj.Shape = Part.makeCompound(structshapes.values())
else:
if DEBUG: print "Processing Struct relationships..."
# groups
for host,children in groups.items():
if ifcfile[host].is_a("IfcStructuralAnalysisModel"):
# print host, ' --> ', children
obj = FreeCAD.ActiveDocument.addObject("App::DocumentObjectGroup","AnalysisModel")
objects[host] = obj
if host in objects.keys():
cobs = [objects[child] for child in children if child in objects.keys()]
if cobs:
if DEBUG: print "adding ",len(cobs), " object(s) to ", objects[host].Label
Arch.addComponents(cobs,objects[host])
if DEBUG: FreeCAD.ActiveDocument.recompute()
if MERGE_MODE_ARCH > 2: # if ArchObj is compound or ArchObj not imported
FreeCAD.ActiveDocument.recompute()
# cleaning bad shapes
for obj in objects.values():
if obj.isDerivedFrom("Part::Feature"):
if obj.Shape.isNull():
Arch.rebuildArchShape(obj)
if MERGE_MODE_ARCH == 3:
if DEBUG: print "Joining Arch shapes..."
for host,children in additions.items(): # Arch
if ifcfile[host].is_a("IfcBuildingStorey"):
compound = []
for c in children:
if c in shapes.keys():
compound.append(shapes[c])
del shapes[c]
if c in additions.keys():
for c2 in additions[c]:
if c2 in shapes.keys():
compound.append(shapes[c2])
del shapes[c2]
if compound:
name = ifcfile[host].Name or "Floor"
if PREFIX_NUMBERS: name = "ID" + str(host) + " " + name
obj = FreeCAD.ActiveDocument.addObject("Part::Feature",name)
obj.Label = name
obj.Shape = Part.makeCompound(compound)
if shapes: # remaining Arch shapes
obj = FreeCAD.ActiveDocument.addObject("Part::Feature","UnclaimedArch")
obj.Shape = Part.makeCompound(shapes.values())
else:
if DEBUG: print "Processing Arch relationships..."
# subtractions
if SEPARATE_OPENINGS:
for subtraction in subtractions:
if (subtraction[0] in objects.keys()) and (subtraction[1] in objects.keys()):
if DEBUG: print "subtracting ",objects[subtraction[0]].Label, " from ", objects[subtraction[1]].Label
Arch.removeComponents(objects[subtraction[0]],objects[subtraction[1]])
if DEBUG: FreeCAD.ActiveDocument.recompute()
# additions
for host,children in additions.items():
if host in objects.keys():
cobs = [objects[child] for child in children if child in objects.keys()]
if cobs:
if DEBUG and (len(cobs) > 10) and ( not(Draft.getType(objects[host]) in ["Site","Building","Floor"])):
# avoid huge fusions
print "more than 10 shapes to add: skipping."
else:
if DEBUG: print "adding ",len(cobs), " object(s) to ", objects[host].Label
Arch.addComponents(cobs,objects[host])
if DEBUG: FreeCAD.ActiveDocument.recompute()
FreeCAD.ActiveDocument.recompute()
# cleaning bad shapes
for obj in objects.values():
if obj.isDerivedFrom("Part::Feature"):
if obj.Shape.isNull():
Arch.rebuildArchShape(obj)
FreeCAD.ActiveDocument.recompute()
# 2D elements
if DEBUG and annotations: print "Creating 2D geometry..."
for annotation in annotations:
aid = annotation.id()
if aid in skip: continue # user given id skip list
if "IfcAnnotation" in SKIP: continue # preferences-set type skip list
name = "Annotation"
if annotation.Name:
name = annotation.Name.decode("unicode_escape").encode("utf8")
if PREFIX_NUMBERS: name = "ID" + str(aid) + " " + name
shapes2d = []
for repres in annotation.Representation.Representations:
shapes2d.extend(setRepresentation(repres))
if shapes2d:
sh = Part.makeCompound(shapes2d)
pc = str(int((float(count)/(len(products)+len(annotations))*100)))+"% "
if DEBUG: print pc,"creating object ",aid," : Annotation with shape: ",sh
o = FreeCAD.ActiveDocument.addObject("Part::Feature",name)
o.Shape = sh
count += 1
FreeCAD.ActiveDocument.recompute()
# Materials
if DEBUG and materials: print "Creating materials..."
fcmats = {}
for material in materials:
name = "Material"
if material.Name:
name = material.Name.decode("unicode_escape").encode("utf8")
if MERGE_MATERIALS and (name in fcmats.keys()):
mat = fcmats[name]
else:
mat = Arch.makeMaterial(name=name)
mdict = {}
if material.id() in colors:
mdict["Color"] = str(colors[material.id()])
if mdict:
mat.Material = mdict
fcmats[name] = mat
for o,m in mattable.items():
if m == material.id():
if o in objects:
if hasattr(objects[o],"BaseMaterial"):
objects[o].BaseMaterial = mat
FreeCAD.ActiveDocument.recompute()
if FreeCAD.GuiUp:
import FreeCADGui
FreeCADGui.SendMsgToActiveView("ViewFit")
print "Finished importing."
return doc
def export(exportList,filename):
"exports FreeCAD contents to an IFC file"
p = FreeCAD.ParamGet("User parameter:BaseApp/Preferences/Mod/Arch")
FORCEBREP = p.GetBool("ifcExportAsBrep",False)
DEBUG = p.GetBool("ifcDebug",False)
try:
global ifcopenshell
import ifcopenshell
except:
FreeCAD.Console.PrintError("IfcOpenShell was not found on this system. IFC support is disabled\n")
return
version = FreeCAD.Version()
owner = FreeCAD.ActiveDocument.CreatedBy
email = ''
if ("@" in owner) and ("<" in owner):
s = owner.split("<")
owner = s[0]
email = s[1].strip(">")
global template
template = ifctemplate.replace("$version",version[0]+"."+version[1]+" build "+version[2])
template = template.replace("$owner",owner)
template = template.replace("$company",FreeCAD.ActiveDocument.Company)
template = template.replace("$email",email)
template = template.replace("$now",str(int(time.time())))
template = template.replace("$projectid",FreeCAD.ActiveDocument.Uid[:22].replace("-","_"))
template = template.replace("$project",FreeCAD.ActiveDocument.Name)
template = template.replace("$filename",filename)
template = template.replace("$timestamp",str(time.strftime("%Y-%m-%dT%H:%M:%S", time.gmtime())))
templatefile = tempfile.mkstemp(suffix=".ifc")[1]
of = pyopen(templatefile,"wb")
of.write(template.encode("utf8"))
of.close()
global ifcfile, surfstyles, clones, sharedobjects
ifcfile = ifcopenshell.open(templatefile)
history = ifcfile.by_type("IfcOwnerHistory")[0]
context = ifcfile.by_type("IfcGeometricRepresentationContext")[0]
project = ifcfile.by_type("IfcProject")[0]
objectslist = Draft.getGroupContents(exportList,walls=True,addgroups=True)
objectslist = Arch.pruneIncluded(objectslist)
products = {} # { Name: IfcEntity, ... }
surfstyles = {} # { (r,g,b): IfcEntity, ... }
clones = {} # { Basename:[Clonename1,Clonename2,...] }
sharedobjects = {} # { BaseName: IfcRepresentationMap }
count = 1
# build clones table
for o in objectslist:
b = Draft.getCloneBase(o,strict=True)
if b:
clones.setdefault(b.Name,[]).append(o.Name)
print "clones table: ",clones
print objectslist
# products
for obj in objectslist:
# getting generic data
name = str(obj.Label.encode("utf8"))
description = str(obj.Description) if hasattr(obj,"Description") else ""
# getting uid
uid = None
if hasattr(obj,"IfcAttributes"):
if "IfcUID" in obj.IfcAttributes.keys():
uid = str(obj.IfcAttributes["IfcUID"])
if not uid:
uid = ifcopenshell.guid.compress(uuid.uuid1().hex)
# setting the IFC type + name conversions
if hasattr(obj,"Role"):
ifctype = obj.Role.replace(" ","")
else:
ifctype = Draft.getType(obj)
if ifctype in translationtable.keys():
ifctype = translationtable[ifctype]
ifctype = "Ifc" + ifctype
if ifctype == "IfcGroup":
continue
ifctypes = []
for v in typesmap.values():
ifctypes.extend(v)
if not ifctype in ifctypes:
ifctype = "IfcBuildingElementProxy"
# getting the "Force BREP" flag
brepflag = False
if hasattr(obj,"IfcAttributes"):
if "FlagForceBrep" in obj.IfcAttributes.keys():
if obj.IfcAttributes["FlagForceBrep"] == "True":
brepflag = True
# getting the representation
representation,placement,shapetype = getRepresentation(ifcfile,context,obj,forcebrep=(brepflag or FORCEBREP))
if DEBUG: print str(count).ljust(3)," : ", ifctype, " (",shapetype,") : ",name
# setting the arguments
args = [uid,history,name,description,None,placement,representation,None]
if ifctype in ["IfcSlab","IfcFooting","IfcRoof"]:
args = args + ["NOTDEFINED"]
elif ifctype in ["IfcWindow","IfcDoor"]:
args = args + [obj.Width.Value/1000.0, obj.Height.Value/1000.0]
elif ifctype == "IfcSpace":
args = args + ["ELEMENT","INTERNAL",obj.Shape.BoundBox.ZMin/1000.0]
elif ifctype == "IfcBuildingElementProxy":
args = args + ["ELEMENT"]
elif ifctype == "IfcSite":
latitude = None
longitude = None
elevation = None
landtitlenumber = None
address = None
args = args + ["ELEMENT",latitude,longitude,elevation,landtitlenumber,address]
elif ifctype == "IfcBuilding":
args = args + ["ELEMENT",None,None,None]
elif ifctype == "IfcBuildingStorey":
args = args + ["ELEMENT",obj.Placement.Base.z]
# creating the product
product = getattr(ifcfile,"create"+ifctype)(*args)
products[obj.Name] = product
# additions
if hasattr(obj,"Additions") and (shapetype == "extrusion"):
for o in obj.Additions:
r2,p2,c2 = getRepresentation(ifcfile,context,o,forcebrep=True)
if DEBUG: print " adding ",c2," : ",str(o.Label)
prod2 = ifcfile.createIfcBuildingElementProxy(ifcopenshell.guid.compress(uuid.uuid1().hex),history,str(o.Label),None,None,p2,r2,None,"ELEMENT")
ifcfile.createIfcRelAggregates(ifcopenshell.guid.compress(uuid.uuid1().hex),history,'Addition','',product,[prod2])
# subtractions
if hasattr(obj,"Subtractions") and (shapetype == "extrusion"):
for o in obj.Subtractions:
r2,p2,c2 = getRepresentation(ifcfile,context,o,forcebrep=True,subtraction=True)
if DEBUG: print " subtracting ",c2," : ",str(o.Label)
prod2 = ifcfile.createIfcOpeningElement(ifcopenshell.guid.compress(uuid.uuid1().hex),history,str(o.Label),None,None,p2,r2,None)
ifcfile.createIfcRelVoidsElement(ifcopenshell.guid.compress(uuid.uuid1().hex),history,'Subtraction','',product,prod2)
# properties
if hasattr(obj,"IfcAttributes"):
props = []
for key in obj.IfcAttributes:
if not (key in ["IfcUID","FlagForceBrep"]):
r = obj.IfcAttributes[key].strip(")").split("(")
if len(r) == 1:
tp = "IfcText"
val = r[0]
else:
tp = r[0]
val = "(".join(r[1:])
val = val.strip("'")
val = val.strip('"')
if DEBUG: print " property ",key," : ",str(val), " (", str(tp), ")"
if tp in ["IfcLabel","IfcText","IfcIdentifier"]:
val = str(val)
elif tp == "IfcBoolean":
if val == ".T.":
val = True
else:
val = False
elif tp == "IfcInteger":
val = int(val)
else:
val = float(val)
props.append(ifcfile.createIfcPropertySingleValue(str(key),None,ifcfile.create_entity(str(tp),val),None))
if props:
pset = ifcfile.createIfcPropertySet(ifcopenshell.guid.compress(uuid.uuid1().hex),history,'PropertySet',None,props)
ifcfile.createIfcRelDefinesByProperties(ifcopenshell.guid.compress(uuid.uuid1().hex),history,None,None,[product],pset)
count += 1
# relationships
sites = []
buildings = []
floors = []
for site in Draft.getObjectsOfType(objectslist,"Site"):
for building in Draft.getObjectsOfType(site.Group,"Building"):
for floor in Draft.getObjectsOfType(building.Group,"Floor"):
children = Draft.getGroupContents(floor,walls=True)
children = Arch.pruneIncluded(children)
children = [products[c.Name] for c in children if c.Name in products.keys()]
floor = products[floor.Name]
ifcfile.createIfcRelContainedInSpatialStructure(ifcopenshell.guid.compress(uuid.uuid1().hex),history,'StoreyLink','',children,floor)
floors.append(floor)
building = products[building.Name]
if floors:
ifcfile.createIfcRelAggregates(ifcopenshell.guid.compress(uuid.uuid1().hex),history,'BuildingLink','',building,floors)
buildings.append(building)
site = products[site.Name]
if buildings:
ifcfile.createIfcRelAggregates(ifcopenshell.guid.compress(uuid.uuid1().hex),history,'SiteLink','',site,buildings)
sites.append(site)
if not sites:
if DEBUG: print "adding default site"
sites = [ifcfile.createIfcSite(ifcopenshell.guid.compress(uuid.uuid1().hex),history,"Default Site",'',None,None,None,None,"ELEMENT",None,None,None,None,None)]
ifcfile.createIfcRelAggregates(ifcopenshell.guid.compress(uuid.uuid1().hex),history,'ProjectLink','',project,sites)
if not buildings:
if DEBUG: print "adding default building"
buildings = [ifcfile.createIfcBuilding(ifcopenshell.guid.compress(uuid.uuid1().hex),history,"Default Building",'',None,None,None,None,"ELEMENT",None,None,None)]
ifcfile.createIfcRelAggregates(ifcopenshell.guid.compress(uuid.uuid1().hex),history,'SiteLink','',sites[0],buildings)
ifcfile.createIfcRelContainedInSpatialStructure(ifcopenshell.guid.compress(uuid.uuid1().hex),history,'BuildingLink','',products.values(),buildings[0])
# materials
materials = {}
for m in Arch.getDocumentMaterials():
mat = ifcfile.createIfcMaterial(m.Label.encode("utf8"))
materials[m.Label] = mat
if "Color" in m.Material:
rgb = tuple([float(f) for f in m.Material['Color'].strip("()").split(",")])
col = ifcfile.createIfcColourRgb(None,rgb[0],rgb[1],rgb[2])
ssr = ifcfile.createIfcSurfaceStyleRendering(col,None,None,None,None,None,None,None,"FLAT")
iss = ifcfile.createIfcSurfaceStyle(None,"BOTH",[ssr])
psa = ifcfile.createIfcPresentationStyleAssignment([iss])
isi = ifcfile.createIfcStyledItem(None,[psa],None)
isr = ifcfile.createIfcStyledRepresentation(context,"Style","Material",[isi])
imd = ifcfile.createIfcMaterialDefinitionRepresentation(None,None,[isr],mat)
relobjs = []
for o in m.InList:
if hasattr(o,"BaseMaterial"):
if o.BaseMaterial:
if o.BaseMaterial.Name == m.Name:
if o.Name in products:
relobjs.append(products[o.Name])
if relobjs:
ifcfile.createIfcRelAssociatesMaterial(ifcopenshell.guid.compress(uuid.uuid1().hex),history,'MaterialLink','',relobjs,mat)
if DEBUG: print "writing ",filename,"..."
filename = decode(filename)
ifcfile.write(filename)
def getRepresentation(ifcfile,context,obj,forcebrep=False,subtraction=False,tessellation=1):
"""returns an IfcShapeRepresentation object or None"""
import Part,math,DraftGeomUtils,DraftVecUtils
shapes = []
placement = None
productdef = None
shapetype = "no shape"
tostore = False
# check for clones
for k,v in clones.items():
if (obj.Name == k ) or (obj.Name in v):
if k in sharedobjects:
# base shape already exists
repmap = sharedobjects[k]
pla = obj.Placement
axis1 = ifcfile.createIfcDirection(tuple(pla.Rotation.multVec(FreeCAD.Vector(1,0,0))))
axis2 = ifcfile.createIfcDirection(tuple(pla.Rotation.multVec(FreeCAD.Vector(0,1,0))))
axis3 = ifcfile.createIfcDirection(tuple(pla.Rotation.multVec(FreeCAD.Vector(0,0,1))))
origin = ifcfile.createIfcCartesianPoint(tuple(FreeCAD.Vector(pla.Base).multiply(0.001)))
transf = ifcfile.createIfcCartesianTransformationOperator3D(axis1,axis2,origin,1.0,axis3)
mapitem = ifcfile.createIfcMappedItem(repmap,transf)
shapes = [mapitem]
solidType = "MappedRepresentation"
shapetype = "clone"
else:
# base shape not yet created
tostore = k
if (not shapes) and (not forcebrep):
profile = None
if hasattr(obj,"Proxy"):
if hasattr(obj.Proxy,"getProfiles"):
p = obj.Proxy.getProfiles(obj,noplacement=True)
extrusionv = obj.Proxy.getExtrusionVector(obj,noplacement=True)
if not DraftVecUtils.isNull(extrusionv):
extrusionv.multiply(0.001) # to meters
if (len(p) == 1) and extrusionv:
p = p[0]
p.scale(0.001) # to meters
r = obj.Proxy.getPlacement(obj)
r.Base = r.Base.multiply(0.001) # to meters
if len(p.Edges) == 1:
pxvc = ifcfile.createIfcDirection((1.0,0.0))
povc = ifcfile.createIfcCartesianPoint((0.0,0.0))
pt = ifcfile.createIfcAxis2Placement2D(povc,pxvc)
# extruded circle
if isinstance(p.Edges[0].Curve,Part.Circle):
profile = ifcfile.createIfcCircleProfileDef("AREA",None,pt, p.Edges[0].Curve.Radius)
# extruded ellipse
elif isinstance(p.Edges[0].Curve,Part.Ellipse):
profile = ifcfile.createIfcEllipseProfileDef("AREA",None,pt, p.Edges[0].Curve.MajorRadius, p.Edges[0].Curve.MinorRadius)
else:
curves = False
for e in p.Edges:
if isinstance(e.Curve,Part.Circle):
curves = True
# extruded polyline
if not curves:
w = Part.Wire(DraftGeomUtils.sortEdges(p.Edges))
pts = [ifcfile.createIfcCartesianPoint(tuple(v.Point)[:2]) for v in w.Vertexes+[w.Vertexes[0]]]
pol = ifcfile.createIfcPolyline(pts)
# extruded composite curve
else:
segments = []
last = None
edges = DraftGeomUtils.sortEdges(p.Edges)
for e in edges:
if isinstance(e.Curve,Part.Circle):
follow = True
if last:
if not DraftVecUtils.equals(last,e.Vertexes[0].Point):
follow = False
last = e.Vertexes[0].Point
else:
last = e.Vertexes[-1].Point
else:
last = e.Vertexes[-1].Point
p1 = math.degrees(-DraftVecUtils.angle(e.Vertexes[0].Point.sub(e.Curve.Center)))
p2 = math.degrees(-DraftVecUtils.angle(e.Vertexes[-1].Point.sub(e.Curve.Center)))
da = DraftVecUtils.angle(e.valueAt(e.FirstParameter+0.1).sub(e.Curve.Center),e.Vertexes[0].Point.sub(e.Curve.Center))
if p1 < 0:
p1 = 360 + p1
if p2 < 0:
p2 = 360 + p2
if da > 0:
follow = not(follow)
xvc = ifcfile.createIfcDirection((1.0,0.0))
ovc = ifcfile.createIfcCartesianPoint(tuple(e.Curve.Center)[:2])
plc = ifcfile.createIfcAxis2Placement2D(ovc,xvc)
cir = ifcfile.createIfcCircle(plc,e.Curve.Radius)
curve = ifcfile.createIfcTrimmedCurve(cir,[ifcfile.createIfcParameterValue(p1)],[ifcfile.createIfcParameterValue(p2)],follow,"PARAMETER")
else:
verts = [vertex.Point for vertex in e.Vertexes]
if last:
if not DraftVecUtils.equals(last,verts[0]):
verts.reverse()
last = e.Vertexes[0].Point
else:
last = e.Vertexes[-1].Point
else:
last = e.Vertexes[-1].Point
pts = [ifcfile.createIfcCartesianPoint(tuple(v)[:2]) for v in verts]
curve = ifcfile.createIfcPolyline(pts)
segment = ifcfile.createIfcCompositeCurveSegment("CONTINUOUS",True,curve)
segments.append(segment)
pol = ifcfile.createIfcCompositeCurve(segments,False)
profile = ifcfile.createIfcArbitraryClosedProfileDef("AREA",None,pol)
if profile:
xvc = ifcfile.createIfcDirection(tuple(r.Rotation.multVec(FreeCAD.Vector(1,0,0))))
zvc = ifcfile.createIfcDirection(tuple(r.Rotation.multVec(FreeCAD.Vector(0,0,1))))
ovc = ifcfile.createIfcCartesianPoint(tuple(r.Base))
lpl = ifcfile.createIfcAxis2Placement3D(ovc,zvc,xvc)
edir = ifcfile.createIfcDirection(tuple(FreeCAD.Vector(extrusionv).normalize()))
shape = ifcfile.createIfcExtrudedAreaSolid(profile,lpl,edir,extrusionv.Length)
shapes.append(shape)
solidType = "SweptSolid"
shapetype = "extrusion"
if not shapes:
# brep representation
fcshape = None
solidType = "Brep"
if subtraction:
if hasattr(obj,"Proxy"):
if hasattr(obj.Proxy,"getSubVolume"):
fcshape = obj.Proxy.getSubVolume(obj)
if not fcshape:
if hasattr(obj,"Shape"):
if obj.Shape:
if not obj.Shape.isNull():
fcshape = obj.Shape
elif hasattr(obj,"Terrain"):
if obj.Terrain:
if hasattr(obj.Terrain,"Shape"):
if obj.Terrain.Shape:
if not obj.Terrain.Shape.isNull():
fcshape = obj.Terrain.Shape
if fcshape:
solids = []
if fcshape.Solids:
dataset = fcshape.Solids
else:
dataset = fcshape.Shells
print "Warning! object contains no solids"
for fcsolid in dataset:
fcsolid.scale(0.001) # to meters
faces = []
curves = False
for fcface in fcsolid.Faces:
for e in fcface.Edges:
if not isinstance(e.Curve,Part.Line):
if e.curvatureAt(e.FirstParameter+(e.LastParameter-e.FirstParameter)/2) > 0.0001:
curves = True
break
if curves:
#shapetype = "triangulated"
#tris = fcsolid.tessellate(tessellation)
#for tri in tris[1]:
# pts = [ifcfile.createIfcCartesianPoint(tuple(tris[0][i])) for i in tri]
# loop = ifcfile.createIfcPolyLoop(pts)
# bound = ifcfile.createIfcFaceOuterBound(loop,True)
# face = ifcfile.createIfcFace([bound])
# faces.append(face)
fcsolid = Arch.removeCurves(fcsolid)
shapetype = "brep"
for fcface in fcsolid.Faces:
loops = []
verts = [v.Point for v in Part.Wire(DraftGeomUtils.sortEdges(fcface.OuterWire.Edges)).Vertexes]
c = fcface.CenterOfMass
v1 = verts[0].sub(c)
v2 = verts[1].sub(c)
n = fcface.normalAt(0,0)
if DraftVecUtils.angle(v2,v1,n) >= 0:
verts.reverse() # inverting verts order if the direction is couterclockwise
pts = [ifcfile.createIfcCartesianPoint(tuple(v)) for v in verts]
loop = ifcfile.createIfcPolyLoop(pts)
bound = ifcfile.createIfcFaceOuterBound(loop,True)
loops.append(bound)
for wire in fcface.Wires:
if wire.hashCode() != fcface.OuterWire.hashCode():
verts = [v.Point for v in Part.Wire(DraftGeomUtils.sortEdges(wire.Edges)).Vertexes]
v1 = verts[0].sub(c)
v2 = verts[1].sub(c)
if DraftVecUtils.angle(v2,v1,DraftVecUtils.neg(n)) >= 0:
verts.reverse()
pts = [ifcfile.createIfcCartesianPoint(tuple(v)) for v in verts]
loop = ifcfile.createIfcPolyLoop(pts)
bound = ifcfile.createIfcFaceBound(loop,True)
loops.append(bound)
face = ifcfile.createIfcFace(loops)
faces.append(face)
shell = ifcfile.createIfcClosedShell(faces)
shape = ifcfile.createIfcFacetedBrep(shell)
shapes.append(shape)
if shapes:
if tostore:
subrep = ifcfile.createIfcShapeRepresentation(context,'Body',solidType,shapes)
xvc = ifcfile.createIfcDirection((1.0,0.0,0.0))
zvc = ifcfile.createIfcDirection((0.0,0.0,1.0))
ovc = ifcfile.createIfcCartesianPoint((0.0,0.0,0.0))
gpl = ifcfile.createIfcAxis2Placement3D(ovc,zvc,xvc)
repmap = ifcfile.createIfcRepresentationMap(gpl,subrep)
pla = FreeCAD.ActiveDocument.getObject(k).Placement
axis1 = ifcfile.createIfcDirection(tuple(pla.Rotation.multVec(FreeCAD.Vector(1,0,0))))
axis2 = ifcfile.createIfcDirection(tuple(pla.Rotation.multVec(FreeCAD.Vector(0,1,0))))
origin = ifcfile.createIfcCartesianPoint(tuple(FreeCAD.Vector(pla.Base).multiply(0.001)))
axis3 = ifcfile.createIfcDirection(tuple(pla.Rotation.multVec(FreeCAD.Vector(0,0,1))))
transf = ifcfile.createIfcCartesianTransformationOperator3D(axis1,axis2,origin,1.0,axis3)
mapitem = ifcfile.createIfcMappedItem(repmap,transf)
shapes = [mapitem]
sharedobjects[k] = repmap
solidType = "MappedRepresentation"
# set surface style
if FreeCAD.GuiUp and (not subtraction) and hasattr(obj.ViewObject,"ShapeColor"):
# only set a surface style if the object has no material.
# apparently not needed, no harm in having both.
#m = False
#if hasattr(obj,"BaseMaterial"):
# if obj.BaseMaterial:
# if "Color" in obj.BaseMaterial.Material:
# m = True
#if not m:
rgb = obj.ViewObject.ShapeColor[:3]
if rgb in surfstyles:
psa = surfstyles[rgb]
else:
col = ifcfile.createIfcColourRgb(None,rgb[0],rgb[1],rgb[2])
ssr = ifcfile.createIfcSurfaceStyleRendering(col,None,None,None,None,None,None,None,"FLAT")
iss = ifcfile.createIfcSurfaceStyle(None,"BOTH",[ssr])
psa = ifcfile.createIfcPresentationStyleAssignment([iss])
surfstyles[rgb] = psa
for shape in shapes:
isi = ifcfile.createIfcStyledItem(shape,[psa],None)
xvc = ifcfile.createIfcDirection((1.0,0.0,0.0))
zvc = ifcfile.createIfcDirection((0.0,0.0,1.0))
ovc = ifcfile.createIfcCartesianPoint((0.0,0.0,0.0))
gpl = ifcfile.createIfcAxis2Placement3D(ovc,zvc,xvc)
placement = ifcfile.createIfcLocalPlacement(None,gpl)
representation = ifcfile.createIfcShapeRepresentation(context,'Body',solidType,shapes)
productdef = ifcfile.createIfcProductDefinitionShape(None,None,[representation])
return productdef,placement,shapetype
def setRepresentation(representation):
"""Returns a shape from a 2D IfcShapeRepresentation"""
def getPolyline(ent):
pts = []
for p in ent.Points:
c = p.Coordinates
pts.append(FreeCAD.Vector(c[0],c[1],c[2] if len(c) > 2 else 0))
return Part.makePolygon(pts)
def getCircle(ent):
c = ent.Position.Location.Coordinates
c = FreeCAD.Vector(c[0],c[1],c[2] if len(c) > 2 else 0)
r = ent.Radius
return Part.makeCircle(r,c)
result = []
if representation.is_a("IfcShapeRepresentation"):
for item in representation.Items:
if item.is_a("IfcGeometricCurveSet"):
for el in item.Elements:
if el.is_a("IfcPolyline"):
result.append(getPolyline(el))
elif el.is_a("IfcCircle"):
result.append(getCircle(el))
elif el.is_a("IfcTrimmedCurve"):
base = el.BasisCurve
t1 = el.Trim1[0].wrappedValue
t2 = el.Trim2[0].wrappedValue
if not el.SenseAgreement:
t1,t2 = t2,t1
if base.is_a("IfcPolyline"):
bc = getPolyline(base)
result.append(bc)
elif base.is_a("IfcCircle"):
bc = getCircle(base)
e = Part.ArcOfCircle(bc.Curve,math.radians(t1),math.radians(t2)).toShape()
d = base.Position.RefDirection.DirectionRatios
v = FreeCAD.Vector(d[0],d[1],d[2] if len(d) > 2 else 0)
a = -DraftVecUtils.angle(v)
e.rotate(bc.Curve.Center,FreeCAD.Vector(0,0,1),math.degrees(a))
result.append(e)
return result
def getRotation(entity):
"returns a FreeCAD rotation from an IfcProduct with a IfcMappedItem representation"
try:
rmap = entity.Representation.Representations[0].Items[0].MappingTarget
u = FreeCAD.Vector(rmap.Axis1.DirectionRatios)
v = FreeCAD.Vector(rmap.Axis2.DirectionRatios)
w = FreeCAD.Vector(rmap.Axis3.DirectionRatios)
except AttributeError:
return FreeCAD.Rotation()
import WorkingPlane
p = WorkingPlane.plane(u=u,v=v,w=w)
return p.getRotation().Rotation
|
lgpl-2.1
| 3,035,360,781,126,952,400
| 45.694091
| 181
| 0.530194
| false
| 4.079059
| false
| false
| false
|
greenlin/universal-portfolios
|
universal/result.py
|
1
|
10866
|
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
import pickle
from universal import tools
class PickleMixin(object):
def save(self, filename):
""" Save object as a pickle """
with open(filename, 'wb') as f:
pickle.dump(self, f, -1)
@classmethod
def load(cls, filename):
""" Load pickled object. """
with open(filename, 'rb') as f:
return pickle.load(f)
class AlgoResult(PickleMixin):
""" Results returned by algo's run method. The class containts useful
metrics such as sharpe ratio, mean return, drawdowns, ... and also
many visualizations.
You can specify transactions by setting AlgoResult.fee. Fee is
expressed in a percentages as a one-round fee.
"""
def __init__(self, X, B):
"""
:param X: Price relatives.
:param B: Weights.
"""
# set initial values
self._fee = 0.
self._B = B
self.rf_rate = 0.
self._X = X
# update logarithms, fees, etc.
self._recalculate()
@property
def X(self):
return self._X
@X.setter
def X(self, _X):
self._X = _X
self._recalculate()
@property
def B(self):
return self._B
@B.setter
def B(self, _B):
self._B = _B
self._recalculate()
@property
def fee(self):
return self._fee
@fee.setter
def fee(self, value):
""" Set transaction costs. Fees can be either float or Series
of floats for individual assets with proper indices. """
if isinstance(value, dict):
value = pd.Series(value)
if isinstance(value, pd.Series):
missing = set(self.X.columns) - set(value.index)
assert len(missing) == 0, 'Missing fees for {}'.format(missing)
self._fee = value
self._recalculate()
def _recalculate(self):
# calculate return for individual stocks
r = (self.X - 1) * self.B
self.asset_r = r + 1
self.r = r.sum(axis=1) + 1
# stock went bankrupt
self.r[self.r < 0] = 0.
# add fees
if not isinstance(self._fee, float) or self._fee != 0:
fees = (self.B.shift(-1).mul(self.r, axis=0) - self.B * self.X).abs()
fees.iloc[0] = self.B.ix[0]
fees.iloc[-1] = 0.
fees *= self._fee
self.asset_r -= fees
self.r -= fees.sum(axis=1)
self.r_log = np.log(self.r)
@property
def weights(self):
return self.B
@property
def equity(self):
return self.r.cumprod()
@property
def equity_decomposed(self):
""" Return equity decomposed to individual assets. """
return self.asset_r.cumprod()
@property
def asset_equity(self):
return self.X.cumprod()
@property
def total_wealth(self):
return self.r.prod()
@property
def profit_factor(self):
x = self.r_log
up = x[x > 0].sum()
down = -x[x < 0].sum()
return up / down if down != 0 else np.inf
@property
def sharpe(self):
""" Compute annualized sharpe ratio from log returns. If data does
not contain datetime index, assume daily frequency with 252 trading days a year.
"""
return tools.sharpe(self.r_log, rf_rate=self.rf_rate, freq=self.freq())
@property
def information(self):
""" Information ratio benchmarked against uniform CRP portfolio. """
s = self.X.mean(axis=1)
x = self.r_log - np.log(s)
mu, sd = x.mean(), x.std()
freq = self.freq()
if sd > 1e-8:
return mu / sd * np.sqrt(freq)
elif mu > 1e-8:
return np.inf * np.sign(mu)
else:
return 0.
@property
def growth_rate(self):
return self.r_log.mean() * self.freq()
@property
def volatility(self):
return np.sqrt(self.freq()) * self.r_log.std()
@property
def annualized_return(self):
return np.exp(self.r_log.mean() * self.freq()) - 1
@property
def annualized_volatility(self):
return np.exp(self.r_log).std() * np.sqrt(self.freq())
@property
def drawdown_period(self):
''' Returns longest drawdown perid. Stagnation is a drawdown too. '''
x = self.equity
period = [0.] * len(x)
peak = 0
for i in range(len(x)):
# new peak
if x[i] > peak:
peak = x[i]
period[i] = 0
else:
period[i] = period[i-1] + 1
return max(period) * 252. / self.freq()
@property
def max_drawdown(self):
''' Returns highest drawdown in percentage. '''
x = self.equity
return max(1. - x / x.cummax())
@property
def winning_pct(self):
x = self.r_log
win = (x > 0).sum()
all_trades = (x != 0).sum()
return float(win) / all_trades
def freq(self, x=None):
""" Number of data items per year. If data does not contain
datetime index, assume daily frequency with 252 trading days a year."""
x = x or self.r
return tools.freq(x.index)
def summary(self, name=None):
return """Summary{}:
Profit factor: {:.2f}
Sharpe ratio: {:.2f}
Information ratio (wrt UCRP): {:.2f}
Annualized return: {:.2f}%
Annualized volatility: {:.2f}%
Longest drawdown: {:.0f} days
Max drawdown: {:.2f}%
Winning days: {:.1f}%
""".format(
'' if name is None else ' for ' + name,
self.profit_factor,
self.sharpe,
self.information,
100 * self.annualized_return,
100 * self.annualized_volatility,
self.drawdown_period,
100 * self.max_drawdown,
100 * self.winning_pct
)
def plot(self, weights=True, assets=True, portfolio_label='PORTFOLIO', **kwargs):
""" Plot equity of all assets plus our strategy.
:param weights: Plot weights as a subplot.
:param assets: Plot asset prices.
:return: List of axes.
"""
res = ListResult([self], [portfolio_label])
if not weights:
ax1 = res.plot(assets=assets, **kwargs)
return [ax1]
else:
plt.figure(1)
ax1 = plt.subplot2grid((3, 1), (0, 0), rowspan=2)
res.plot(assets=assets, ax=ax1, **kwargs)
ax2 = plt.subplot2grid((3, 1), (2, 0), sharex=ax1)
# plot weights as lines
if self.B.values.min() < -0.01:
self.B.plot(ax=ax2, ylim=(min(0., self.B.values.min()), max(1., self.B.values.max())),
legend=False, colormap=plt.get_cmap('jet'))
else:
# fix rounding errors near zero
if self.B.values.min() < 0:
B = self.B - self.B.values.min()
else:
B = self.B
B.plot(ax=ax2, ylim=(0., max(1., B.values.max())),
legend=False, colormap=plt.get_cmap('jet'), kind='area', stacked=True)
plt.ylabel('weights')
return [ax1, ax2]
def hedge(self, result=None):
""" Hedge results with results of other strategy (subtract weights).
:param result: Other result object. Default is UCRP.
:return: New AlgoResult object.
"""
if result is None:
from algos import CRP
result = CRP().run(self.X.cumprod())
return AlgoResult(self.X, self.B - result.B)
def plot_decomposition(self, **kwargs):
""" Decompose equity into components of individual assets and plot
them. Does not take fees into account. """
ax = self.equity_decomposed.plot(**kwargs)
return ax
@property
def importance(self):
ws = self.weights.sum()
return ws / sum(ws)
class ListResult(list, PickleMixin):
""" List of AlgoResults. """
def __init__(self, results=None, names=None):
results = results if results is not None else []
names = names if names is not None else []
super(ListResult, self).__init__(results)
self.names = names
def append(self, result, name):
super(ListResult, self).append(result)
self.names.append(name)
def to_dataframe(self):
""" Calculate equities for all results and return one dataframe. """
eq = {}
for result, name in zip(self, self.names):
eq[name] = result.equity
return pd.DataFrame(eq)
def save(self, filename, **kwargs):
# do not save it with fees
#self.fee = 0.
#self.to_dataframe().to_pickle(*args, **kwargs)
with open(filename, 'wb') as f:
pickle.dump(self, f, -1)
@classmethod
def load(cls, filename):
# df = pd.read_pickle(*args, **kwargs)
# return cls([df[c] for c in df], df.columns)
with open(filename, 'rb') as f:
return pickle.load(f)
@property
def fee(self):
return {name: result.fee for result, name in zip(self, self.names)}
@fee.setter
def fee(self, value):
for result in self:
result.fee = value
def summary(self):
return '\n'.join([result.summary(name) for result, name in zip(self, self.names)])
def plot(self, ucrp=False, bah=False, assets=False, **kwargs):
""" Plot strategy equity.
:param ucrp: Add uniform CRP as a benchmark.
:param bah: Add Buy-And-Hold portfolio as a benchmark.
:param assets: Add asset prices.
:param kwargs: Additional arguments for pd.DataFrame.plot
"""
# NOTE: order of plotting is important because of coloring
# plot portfolio
d = self.to_dataframe()
portfolio = d.copy()
ax = portfolio.plot(linewidth=3., legend=False, **kwargs)
kwargs['ax'] = ax
ax.set_ylabel('Total wealth')
# plot uniform constant rebalanced portfolio
if ucrp:
from algos import CRP
crp_algo = CRP().run(self[0].X.cumprod())
crp_algo.fee = self[0].fee
d['UCRP'] = crp_algo.equity
d[['UCRP']].plot(**kwargs)
# add bah
if bah:
from algos import BAH
bah_algo = BAH().run(self[0].X.cumprod())
bah_algo.fee = self[0].fee
d['BAH'] = bah_algo.equity
d[['BAH']].plot(**kwargs)
# add individual assets
if assets:
self[0].asset_equity.plot(colormap=plt.get_cmap('jet'), **kwargs)
# plot portfolio again to highlight it
kwargs['color'] = 'blue'
portfolio.plot(linewidth=3., **kwargs)
return ax
|
mit
| -8,270,731,566,286,813,000
| 28.769863
| 102
| 0.544543
| false
| 3.615973
| false
| false
| false
|
jsharkey13/isaac-selenium-testing
|
isaactest/tests/symbolic_q_text_entry_correct.py
|
1
|
2725
|
import time
from ..utils.log import log, INFO, ERROR, PASS
from ..utils.isaac import answer_symbolic_q_text_entry, open_accordion_section, submit_login_form, assert_logged_in
from ..utils.i_selenium import assert_tab, image_div
from ..utils.i_selenium import wait_for_xpath_element
from ..tests import TestWithDependency
from selenium.common.exceptions import TimeoutException, NoSuchElementException
__all__ = ["symbolic_q_text_entry_correct"]
#####
# Test : Symbolic Questions Text Entry Correct Answers
#####
@TestWithDependency("SYMBOLIC_Q_TEXT_ENTRY_CORRECT")
def symbolic_q_text_entry_correct(driver, Users, ISAAC_WEB, WAIT_DUR, **kwargs):
"""Test if symbolic questions can be answered correctly with text entry.
- 'driver' should be a Selenium WebDriver.
- 'ISAAC_WEB' is the string URL of the Isaac website to be tested.
- 'WAIT_DUR' is the time in seconds to wait for JavaScript to run/load.
"""
assert_tab(driver, ISAAC_WEB)
driver.get(ISAAC_WEB + "/questions/_regression_test_")
time.sleep(WAIT_DUR)
assert_tab(driver, ISAAC_WEB + "/questions/_regression_test_")
time.sleep(WAIT_DUR)
try:
open_accordion_section(driver, 4)
sym_question = driver.find_element_by_xpath("//div[@ng-switch-when='isaacSymbolicQuestion']")
except NoSuchElementException:
log(ERROR, "Can't find the symbolic question; can't continue!")
return False
log(INFO, "Attempt to enter correct answer.")
if not answer_symbolic_q_text_entry(sym_question, "(((x)))", wait_dur=WAIT_DUR):
log(ERROR, "Couldn't answer symbolic Question; can't continue!")
return False
time.sleep(WAIT_DUR)
try:
wait_for_xpath_element(driver, "//div[@ng-switch-when='isaacSymbolicQuestion']//h1[text()='Correct!']")
log(INFO, "A 'Correct!' message was displayed as expected.")
wait_for_xpath_element(driver, "(//div[@ng-switch-when='isaacSymbolicQuestion']//p[text()='This is a correct choice. It requires an exact match!'])[2]")
log(INFO, "The editor entered explanation text was correctly shown.")
wait_for_xpath_element(driver, "//div[@ng-switch-when='isaacSymbolicQuestion']//strong[text()='Well done!']")
log(INFO, "The 'Well done!' message was correctly shown.")
log(INFO, "Avoid rate limiting: wait 1 minute.")
time.sleep(WAIT_DUR)
log(PASS, "Symbolic Question 'correct value, correct unit' behavior as expected.")
return True
except TimeoutException:
image_div(driver, "ERROR_symbolic_q_correct")
log(ERROR, "The messages shown for a correct answer were not all displayed; see 'ERROR_symbolic_q_correct.png'!")
return False
|
mit
| 8,669,670,329,497,179,000
| 48.545455
| 160
| 0.686972
| false
| 3.614058
| true
| false
| false
|
EricssonResearch/calvin-base
|
calvin/actor/actor.py
|
1
|
36802
|
# -*- coding: utf-8 -*-
# Copyright (c) 2015-2016 Ericsson AB
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import wrapt
import functools
import time
import copy
from calvin.utilities import calvinuuid
from calvin.actor import actorport
from calvin.utilities.calvinlogger import get_logger
from calvin.utilities.utils import enum
from calvin.runtime.north.calvin_token import Token, ExceptionToken
# from calvin.runtime.north import calvincontrol
from calvin.runtime.north.replicationmanager import ReplicationId
import calvin.requests.calvinresponse as response
from calvin.runtime.south.async import async
from calvin.runtime.north.plugins.authorization_checks import check_authorization_plugin_list
from calvin.utilities.calvin_callback import CalvinCB
from calvin.csparser.port_property_syntax import get_port_property_capabilities, get_port_property_runtime
from calvin.runtime.north.calvinsys import get_calvinsys
from calvin.runtime.north.calvinlib import get_calvinlib
_log = get_logger(__name__)
# Tests in test_manage_decorator.py
def manage(include=None, exclude=None):
"""
Decorator for Actor::init() providing automatic management of state variables.
Usage:
@manage() # Manage every instance variable known upon completion of __init__
@manage(include = []) # Manage nothing
@manage(include = [foo, bar]) # Manage self.foo and self.bar only. Equivalent to @manage([foo, bar])
@manage(exclude = [foo, bar]) # Manage everything except self.foo and self.bar
@manage(exclude = []) # Same as @manage()
@manage(<list>) # Same as @manage(include = <list>)
N.B. If include and exclude are both present, exclude will be disregarded.
"""
if include and type(include) is not list or exclude and type(exclude) is not list:
raise Exception("@manage decorator: Must use list as argument")
include_set = set(include) if include else set()
exclude_set = set(exclude) if exclude else set()
# Using wrapt since we need to preserve the signature of the wrapped signature.
# See http://wrapt.readthedocs.org/en/latest/index.html
# FIXME: Since we use wrapt here, we might as well use it in guard and condition too.
@wrapt.decorator
def wrapper(wrapped, instance, args, kwargs):
# Exclude the instance variables added by superclasses
exclude_set.update(instance.__dict__)
x = wrapped(*args, **kwargs)
if include is None:
# include set not given, so construct the implicit include set
include_set.update(instance.__dict__)
include_set.remove('_managed')
include_set.difference_update(exclude_set)
instance._managed.update(include_set)
return x
return wrapper
def condition(action_input=[], action_output=[]):
"""
Decorator condition specifies the required input data and output space.
Both parameters are lists of port names
Return value is a tuple (did_fire, output_available, exhaust_list)
"""
tokens_produced = len(action_output)
tokens_consumed = len(action_input)
def wrap(action_method):
@functools.wraps(action_method)
def condition_wrapper(self):
#
# Check if input ports have enough tokens. Note that all([]) evaluates to True
#
input_ok = all(self.inports[portname].tokens_available(1) for portname in action_input)
#
# Check if output port have enough free token slots
#
output_ok = all(self.outports[portname].tokens_available(1) for portname in action_output)
if not input_ok or not output_ok:
return (False, output_ok, ())
#
# Build the arguments for the action from the input port(s)
#
exhausted_ports = set()
exception = False
args = []
for portname in action_input:
port = self.inports[portname]
token, exhaust = port.read()
is_exception_token = isinstance(token, ExceptionToken)
exception = exception or is_exception_token
args.append(token if is_exception_token else token.value )
if exhaust:
exhausted_ports.add(port)
#
# Check for exceptional conditions
#
if exception:
# FIXME: Simplify exception handling
production = self.exception_handler(action_method, args) or ()
else:
#
# Perform the action (N.B. the method may be wrapped in a decorator)
# Action methods not returning a production (i.e. no output ports) returns None
# => replace with empty_production constant
#
production = action_method(self, *args) or ()
valid_production = (tokens_produced == len(production))
if not valid_production:
#
# Error condition
#
action = "%s.%s" % (self._type, action_method.__name__)
raise Exception("%s invalid production %s, expected %s" % (action, str(production), str(tuple(action_output))))
#
# Write the results from the action to the output port(s)
#
for portname, retval in zip(action_output, production):
port = self.outports[portname]
port.write_token(retval if isinstance(retval, Token) else Token(retval))
return (True, True, exhausted_ports)
return condition_wrapper
return wrap
def stateguard(action_guard):
"""
Decorator guard refines the criteria for picking an action to run by stating a function
with THE SAME signature as the guarded action returning a boolean (True if action allowed).
If the speciified function is unbound or a lambda expression, you must account for 'self',
e.g. 'lambda self, a, b: a>0'
"""
def wrap(action_method):
@functools.wraps(action_method)
def guard_wrapper(self, *args):
if not action_guard(self):
return (False, True, ())
return action_method(self, *args)
return guard_wrapper
return wrap
def verify_status(valid_status_list, raise_=False):
"""
Decorator to help with debugging of state transitions
If a decorated is called when the actors status is not in valid_status_list
it will log (or raise exception if raise_ is True) the attempt.
"""
@wrapt.decorator
def wrapper(wrapped, instance, args, kwargs):
# Exclude the instance variables added by superclasses
if not instance.fsm.disable_state_checks and instance.fsm.state() not in valid_status_list:
msg = "Invalid status %s for operation %s" % (instance.fsm, wrapped.__name__)
if raise_:
raise Exception(msg)
else:
_log.info(msg)
x = wrapped(*args, **kwargs)
return x
return wrapper
def _implements_state(obj):
"""Helper method to check if foreign object supports setting/getting state."""
return hasattr(obj, 'state') and callable(getattr(obj, 'state')) and \
hasattr(obj, 'set_state') and callable(getattr(obj, 'set_state'))
class calvinsys(object):
"""
Calvinsys interface exposed to actors
"""
@staticmethod
def open(actor, name, **kwargs):
return get_calvinsys().open(name, actor, **kwargs)
@staticmethod
def can_write(ref):
return get_calvinsys().can_write(ref)
@staticmethod
def write(ref, data):
return get_calvinsys().write(ref, data)
@staticmethod
def can_read(ref):
return get_calvinsys().can_read(ref)
@staticmethod
def read(ref):
return get_calvinsys().read(ref)
@staticmethod
def close(ref):
return get_calvinsys().close(ref)
class calvinlib(object):
"""
CalvinLib interface exposed to actors
"""
@staticmethod
def use(name, **kwargs):
return get_calvinlib().use(name, **kwargs)
class Actor(object):
"""
Base class for all actors
Need a name supplied.
Subclasses need to declare the parameter
calvinsys if they want access to system
interface on the node, this parameter
will be supplied by the node and not by user
"""
# Class variable controls action priority order
action_priority = tuple()
# These are the security variables that will always be serialized, see serialize()/deserialize() below
_security_state_keys = ('_subject_attributes')
# These are the instance variables that will always be serialized, see serialize()/deserialize() below
_private_state_keys = ('_id', '_name', '_has_started', '_deployment_requirements',
'_signature', '_migration_info', "_port_property_capabilities", "_replication_id")
# Internal state (status)
class FSM(object):
def __init__(self, states, initial, transitions, hooks=None, allow_invalid_transitions=True,
disable_transition_checks=False, disable_state_checks=False):
self.states = states
self._state = initial
self.transitions = transitions
self.hooks = hooks or {}
self.allow_invalid_transitions = allow_invalid_transitions
self.disable_transition_checks = disable_transition_checks
# disable_state_checks is used in the verify_status decorator
self.disable_state_checks = disable_state_checks
def state(self):
return self._state
def transition_to(self, new_state):
if new_state in self.transitions[self._state] or self.disable_transition_checks:
hook = self.hooks.get((self._state, new_state), None)
if hook:
hook()
self._state = new_state
else:
msg = "Invalid transition %s -> %s" % (self, self.printable(new_state))
if self.allow_invalid_transitions:
_log.warning("ALLOWING " + msg)
self._state = new_state
else:
raise Exception(msg)
def printable(self, state):
return self.states.reverse_mapping[state]
def __str__(self):
return self.printable(self._state)
STATUS = enum('LOADED', 'READY', 'PENDING', 'ENABLED', 'DENIED', 'MIGRATABLE')
VALID_TRANSITIONS = {
STATUS.LOADED : [STATUS.READY],
STATUS.READY : [STATUS.PENDING, STATUS.ENABLED, STATUS.DENIED],
STATUS.PENDING : [STATUS.READY, STATUS.PENDING, STATUS.ENABLED],
STATUS.ENABLED : [STATUS.READY, STATUS.PENDING, STATUS.DENIED],
STATUS.DENIED : [STATUS.ENABLED, STATUS.MIGRATABLE, STATUS.PENDING],
STATUS.MIGRATABLE: [STATUS.READY, STATUS.DENIED]
}
test_args = ()
test_kwargs = {}
@property
def id(self):
return self._id
@property
def name(self):
return self._name
@name.setter
def name(self, value):
self._name = value
@property
def migration_info(self):
return self._migration_info
# What are the arguments, really?
def __init__(self, actor_type, name='', allow_invalid_transitions=True, disable_transition_checks=False,
disable_state_checks=False, actor_id=None, security=None):
"""Should _not_ be overridden in subclasses."""
super(Actor, self).__init__()
self._type = actor_type
self._name = name # optional: human_readable_name
self._id = actor_id or calvinuuid.uuid("ACTOR")
_log.debug("New actor id: %s, supplied actor id %s" % (self._id, actor_id))
self._deployment_requirements = []
self._port_property_capabilities = None
self._signature = None
self._component_members = set([self._id]) # We are only part of component if this is extended
self._managed = set()
self._has_started = False
# self.control = calvincontrol.get_calvincontrol()
self._migration_info = None
self._migrating_to = None # During migration while on the previous node set to the next node id
self._migration_connected = True # False while setup the migrated actor, to prevent further migrations
self._last_time_warning = 0.0
self.sec = security
self._subject_attributes = self.sec.get_subject_attributes() if self.sec is not None else None
self.authorization_checks = None
self._replication_id = ReplicationId()
self._exhaust_cb = None
self._pressure_event = 0 # Time of last pressure event time (not in state only local)
self.inports = {p: actorport.InPort(p, self, pp) for p, pp in self.inport_properties.items()}
self.outports = {p: actorport.OutPort(p, self, pp) for p, pp in self.outport_properties.items()}
hooks = {
(Actor.STATUS.PENDING, Actor.STATUS.ENABLED): self._will_start,
(Actor.STATUS.ENABLED, Actor.STATUS.PENDING): self.will_stop,
}
self.fsm = Actor.FSM(Actor.STATUS, Actor.STATUS.LOADED, Actor.VALID_TRANSITIONS, hooks,
allow_invalid_transitions=allow_invalid_transitions,
disable_transition_checks=disable_transition_checks,
disable_state_checks=disable_state_checks)
def set_authorization_checks(self, authorization_checks):
self.authorization_checks = authorization_checks
@verify_status([STATUS.LOADED])
def setup_complete(self):
self.fsm.transition_to(Actor.STATUS.READY)
def init(self):
raise Exception("Implementing 'init()' is mandatory.")
def _will_start(self):
"""Ensure will_start() is only called once"""
if not self._has_started:
self.will_start()
self._has_started = True
def will_start(self):
"""Override in actor subclass if actions need to be taken before starting."""
pass
def will_stop(self):
"""Override in actor subclass if actions need to be taken before stopping."""
pass
def will_migrate(self):
"""Override in actor subclass if actions need to be taken before migrating."""
pass
def did_migrate(self):
"""Override in actor subclass if actions need to be taken after migrating."""
pass
def _will_end(self):
if hasattr(self, "will_end") and callable(self.will_end):
self.will_end()
get_calvinsys().close_all(self)
def did_replicate(self, index):
"""Override in actor subclass if actions need to be taken after replication."""
pass
def __str__(self):
ip = ""
for p in self.inports.values():
ip = ip + str(p)
op = ""
for p in self.outports.values():
op = op + str(p)
s = "Actor: '%s' class '%s'\nstatus: %s\ninports: %s\noutports:%s" % (
self._name, self._type, self.fsm, ip, op)
return s
@verify_status([STATUS.READY, STATUS.PENDING, STATUS.ENABLED])
def did_connect(self, port):
"""Called when a port is connected, checks actor is fully connected."""
if self.fsm.state() == Actor.STATUS.ENABLED:
# We already was enabled thats fine now with dynamic port connections
return
_log.debug("actor.did_connect BEGIN %s %s " % (self._name, self._id))
# If we happen to be in READY, go to PENDING
if self.fsm.state() == Actor.STATUS.READY:
self.fsm.transition_to(Actor.STATUS.PENDING)
# Three non-patological options:
# have inports, have outports, or have in- and outports
if self.inports:
for p in self.inports.values():
if not p.is_connected():
return
if self.outports:
for p in self.outports.values():
if not p.is_connected():
return
# If we made it here, all ports are connected
self.fsm.transition_to(Actor.STATUS.ENABLED)
_log.debug("actor.did_connect ENABLED %s %s " % (self._name, self._id))
@verify_status([STATUS.ENABLED, STATUS.PENDING, STATUS.DENIED, STATUS.MIGRATABLE])
def did_disconnect(self, port):
"""Called when a port is disconnected, checks actor is fully disconnected."""
# If the actor is MIGRATABLE, return since it will be migrated soon.
_log.debug("Actor %s did_disconnect %s" % (self._id, Actor.STATUS.reverse_mapping[self.fsm.state()]))
if self.fsm.state() == Actor.STATUS.MIGRATABLE:
return
# If we happen to be in ENABLED/DENIED, go to PENDING
if self.fsm.state() != Actor.STATUS.PENDING:
self.fsm.transition_to(Actor.STATUS.PENDING)
# Three non-patological options:
# have inports, have outports, or have in- and outports
if self.inports:
for p in self.inports.values():
if p.is_connected():
return
if self.outports:
for p in self.outports.values():
if p.is_connected():
return
# If we made it here, all ports are disconnected
self.fsm.transition_to(Actor.STATUS.READY)
def exhaust(self, callback):
self._exhaust_cb = callback
def get_pressure(self):
_log.debug("get_pressure %s" % self._replication_id.measure_pressure())
if not self._replication_id.measure_pressure():
return None
t = time.time()
pressure = {}
for port in self.inports.values():
for e in port.endpoints:
PRESSURE_LENGTH = len(e.pressure)
pressure[port.id + "," + e.peer_id] = {'last': e.pressure_last, 'count': e.pressure_count,
'pressure': [e.pressure[i % PRESSURE_LENGTH] for i in range(
max(0, e.pressure_count - PRESSURE_LENGTH), e.pressure_count)]}
pressure_event = False
for p in pressure.values():
if len(p['pressure']) < 2:
continue
if ((p['pressure'][-1][1] - p['pressure'][-2][1]) < 10 and
p['pressure'][-1][1] > self._pressure_event):
# Less than 10 sec between queue full and not reported, maybe scale out
self._pressure_event = max(p['pressure'][-1][1], self._pressure_event)
pressure_event = True
break
if (p['pressure'][-1][1] < (t - 30) and
p['last'] > p['pressure'][-1][0] + 3 and
p['pressure'][-1][1] > self._pressure_event):
# More than 30 sec since queue full, received at least 3 tokens and not reported, maybe scale in
self._pressure_event = max(p['pressure'][-1][1], self._pressure_event)
pressure_event = True
break
pressure['time'] = t
_log.debug("get_pressure pressure_event:%s, pressure: %s" % (pressure_event, pressure))
return pressure if pressure_event else None
#
# FIXME: The following methods (_authorized, _warn_slow_actor, _handle_exhaustion) were
# extracted from fire() to make the logic easier to follow
# FIXME: Responsibility of scheduler, not actor class
#
def _authorized(self):
authorized = self.check_authorization_decision()
if not authorized:
_log.info("Access denied for actor %s(%s)" % ( self._type, self._id))
# The authorization decision is not valid anymore.
# Change actor status to DENIED.
self.fsm.transition_to(Actor.STATUS.DENIED)
# Try to migrate actor.
self.sec.authorization_runtime_search(self._id, self._signature, callback=CalvinCB(self.set_migration_info))
return authorized
def _warn_slow_actor(self, time_spent, start_time):
time_since_warning = start_time - self._last_time_warning
if time_since_warning < 120.0:
return
self._last_time_warning = start_time
_log.warning("%s (%s) actor blocked for %f sec" % (self._name, self._type, time_spent))
def _handle_exhaustion(self, exhausted_ports, output_ok):
_log.debug("actor_fire %s test exhaust %s, %s, %s" % (self._id, self._exhaust_cb is not None, exhausted_ports, output_ok))
for port in exhausted_ports:
# Might result in actor changing to PENDING
try:
port.finished_exhaustion()
except:
_log.exception("FINSIHED EXHAUSTION FAILED")
if (output_ok and self._exhaust_cb is not None and
not any([p.any_outstanding_exhaustion_tokens() for p in self.inports.values()])):
_log.debug("actor %s exhausted" % self._id)
# We are in exhaustion, got all exhaustion tokens from peer ports
# but stopped firing while outport token slots available, i.e. exhausted inports or deadlock
# FIXME handle exhaustion deadlock
# Initiate disconnect of outports and destroy the actor
async.DelayedCall(0, self._exhaust_cb, status=response.CalvinResponse(True))
self._exhaust_cb = None
@verify_status([STATUS.ENABLED])
def fire(self):
"""
Fire an actor.
Returns tuple (did_fire, output_ok, exhausted)
"""
#
# Go over the action priority list once
#
for action_method in self.__class__.action_priority:
did_fire, output_ok, exhausted = action_method(self)
# Action firing should fire the first action that can fire
if did_fire:
break
return did_fire, output_ok, exhausted
def enabled(self):
# We want to run even if not fully connected during exhaustion
r = self.fsm.state() == Actor.STATUS.ENABLED or self._exhaust_cb is not None
if not r:
_log.debug("Actor %s %s not enabled" % (self._name, self._id))
return r
def denied(self):
return self.fsm.state() == Actor.STATUS.DENIED
def migratable(self):
return self.fsm.state() == Actor.STATUS.MIGRATABLE
@verify_status([STATUS.DENIED])
def enable_or_migrate(self):
"""Enable actor if access is permitted. Try to migrate if access still denied."""
if self.check_authorization_decision():
self.fsm.transition_to(Actor.STATUS.ENABLED)
else:
# Try to migrate actor.
self.sec.authorization_runtime_search(self._id, self._signature, callback=CalvinCB(self.set_migration_info))
# DEPRECATED: Only here for backwards compatibility
@verify_status([STATUS.ENABLED])
def enable(self):
self.fsm.transition_to(Actor.STATUS.ENABLED)
@verify_status([STATUS.READY, STATUS.PENDING, STATUS.LOADED])
# DEPRECATED: Only here for backwards compatibility
def disable(self):
self.fsm.transition_to(Actor.STATUS.PENDING)
# TODO verify status should only allow reading connections when and after being fully connected (enabled)
@verify_status([STATUS.ENABLED, STATUS.READY, STATUS.PENDING, STATUS.MIGRATABLE])
def connections(self, node_id):
c = {'actor_id': self._id, 'actor_name': self._name}
inports = {}
for port in self.inports.values():
peers = [
(node_id, p[1]) if p[0] == 'local' else p for p in port.get_peers()]
inports[port.id] = peers
c['inports'] = inports
outports = {}
for port in self.outports.values():
peers = [
(node_id, p[1]) if p[0] == 'local' else p for p in port.get_peers()]
outports[port.id] = peers
c['outports'] = outports
return c
def state(self):
"""Serialize custom state, implement in subclass if necessary"""
return {}
def set_state(self, state):
"""Deserialize and set custom state, implement in subclass if necessary"""
pass
def _private_state(self):
"""Serialize state common to all actors"""
state = {}
state['inports'] = {
port: self.inports[port]._state() for port in self.inports}
state['outports'] = {
port: self.outports[port]._state() for port in self.outports}
state['_component_members'] = list(self._component_members)
# Place requires in state, in the event we become a ShadowActor
state['_requires'] = self.requires if hasattr(self, 'requires') else []
# FIXME: The objects in _private_state_keys are well known, they are private after all,
# and we shouldn't need this generic handler.
for key in self._private_state_keys:
obj = self.__dict__[key]
if _implements_state(obj):
state[key] = obj.state()
else:
state[key] = obj
state["_calvinsys"] = get_calvinsys().serialize(actor=self)
return state
def _set_private_state(self, state):
"""Deserialize and apply state common to all actors"""
if "_calvinsys" in state:
get_calvinsys().deserialize(actor=self, csobjects=state["_calvinsys"])
for port in state['inports']:
# Uses setdefault to support shadow actor
self.inports.setdefault(port, actorport.InPort(port, self))._set_state(state['inports'][port])
for port in state['outports']:
# Uses setdefault to support shadow actor
self.outports.setdefault(port, actorport.OutPort(port, self))._set_state(state['outports'][port])
self._component_members= set(state['_component_members'])
# FIXME: The objects in _private_state_keys are well known, they are private after all,
# and we shouldn't need this generic handler.
for key in self._private_state_keys:
if key not in self.__dict__:
self.__dict__[key] = state.get(key, None)
else:
obj = self.__dict__[key]
if _implements_state(obj):
obj.set_state(state.get(key))
else:
self.__dict__[key] = state.get(key, None)
def _replication_state(self):
return None
def _set_replication_state(self, state):
"""Deserialize and apply state related to a replicating actor """
pass
def _security_state(self):
"""
Serialize security state.
Security state can only contain objects that can be JSON-serialized.
"""
return {'_subject_attributes':self._subject_attributes}
def _set_security_state(self, state):
"""
Deserialize and apply security state.
Security state can only contain objects that can be JSON-serialized.
"""
pass
def _managed_state(self):
"""
Serialize managed state.
Managed state can only contain objects that can be JSON-serialized.
"""
state = {key: self.__dict__[key] for key in self._managed}
return state
def _set_managed_state(self, state):
"""
Deserialize and apply managed state.
Managed state can only contain objects that can be JSON-serialized.
"""
self._managed.update(set(state.keys()))
for key, val in state.iteritems():
self.__dict__[key] = val
def serialize(self):
"""Returns the serialized state of an actor."""
state = {}
state['private'] = self._private_state()
rstate = self._replication_state()
if rstate is not None:
state['replication'] = rstate
state['managed'] = self._managed_state()
state['security']= self._security_state()
state['custom'] = self.state()
return state
def deserialize(self, state):
"""Restore an actor's state from the serialized state."""
self._set_private_state(state['private'])
self._set_replication_state(state.get('replication', None))
self._set_security_state(state['security'])
self._set_managed_state(state['managed'])
self.set_state(state['custom'])
def exception_handler(self, action, args):
"""Defult handler when encountering ExceptionTokens"""
_log.error("ExceptionToken encountered\n name: %s\n type: %s\n action: %s\n args: %s\n" %
(self._name, self._type, action.__name__, args))
raise Exception("ExceptionToken NOT HANDLED")
def events(self):
return []
def component_add(self, actor_ids):
if not isinstance(actor_ids, (set, list, tuple)):
actor_ids = [actor_ids]
self._component_members.update(actor_ids)
def component_remove(self, actor_ids):
if not isinstance(actor_ids, (set, list, tuple)):
actor_ids = [actor_ids]
self._component_members -= set(actor_ids)
def part_of_component(self):
return len(self._component_members - set([self._id]))>0
def component_members(self):
return self._component_members
def requirements_add(self, deploy_reqs, extend=False):
if extend:
self._deployment_requirements.extend(deploy_reqs)
else:
self._deployment_requirements = deploy_reqs
def requirements_get(self):
if self._port_property_capabilities is None:
self._port_property_capabilities = self._derive_port_property_capabilities()
capability_port = [{
'op': 'port_property_match',
'kwargs': {'port_property': self._port_property_capabilities},
'type': '+'
}]
if hasattr(self, 'requires') and self.requires:
capability_require = [{
'op': 'actor_reqs_match',
'kwargs': {'requires': self.requires},
'type': '+'
}]
else:
capability_require = []
return (self._deployment_requirements + capability_require +
capability_port + self._replication_id._placement_req)
def _derive_port_property_capabilities(self):
port_property_capabilities = set([])
for port in self.inports.values():
port_property_capabilities.update(get_port_property_capabilities(port.properties))
for port in self.outports.values():
port_property_capabilities.update(get_port_property_capabilities(port.properties))
_log.debug("derive_port_property_capabilities:" + str(port_property_capabilities))
return get_port_property_runtime(port_property_capabilities)
def signature_set(self, signature):
if self._signature is None:
self._signature = signature
def check_authorization_decision(self):
"""Check if authorization decision is still valid"""
if self.authorization_checks:
if any(isinstance(elem, list) for elem in self.authorization_checks):
# If list of lists, True must be found in each list.
for plugin_list in self.authorization_checks:
if not check_authorization_plugin_list(plugin_list):
return False
return True
else:
return check_authorization_plugin_list(self.authorization_checks)
return True
@verify_status([STATUS.DENIED])
def set_migration_info(self, reply):
if reply and reply.status == 200 and reply.data["node_id"]:
self._migration_info = reply.data
self.fsm.transition_to(Actor.STATUS.MIGRATABLE)
_log.info("Migrate actor %s to node %s" % (self._name, self._migration_info["node_id"]))
# Inform the scheduler that the actor is ready to migrate.
get_calvinsys().scheduler_maintenance_wakeup()
else:
_log.info("No possible migration destination found for actor %s" % self._name)
# Try to enable/migrate actor again after a delay.
get_calvinsys().scheduler_maintenance_wakeup(delay=True)
@verify_status([STATUS.MIGRATABLE, STATUS.READY])
def remove_migration_info(self, status):
if status.status != 200:
self._migration_info = None
# FIXME: destroy() in actormanager.py was called before trying to migrate.
# Need to make the actor runnable again before transition to DENIED.
#self.fsm.transition_to(Actor.STATUS.DENIED)
def is_shadow(self):
return False
class ShadowActor(Actor):
"""A shadow actor try to behave as another actor but don't have any implementation"""
def __init__(self, actor_type, name='', allow_invalid_transitions=True, disable_transition_checks=False,
disable_state_checks=False, actor_id=None, security=None):
self.inport_properties = {}
self.outport_properties = {}
self.calvinsys_state = {}
self.requires = None
self._replication_state_data = None
super(ShadowActor, self).__init__(actor_type, name, allow_invalid_transitions=allow_invalid_transitions,
disable_transition_checks=disable_transition_checks,
disable_state_checks=disable_state_checks, actor_id=actor_id,
security=security)
@manage(['_shadow_args'])
def init(self, **args):
self._shadow_args = args
def is_shadow(self):
return True
def create_shadow_port(self, port_name, port_dir, port_id=None):
# TODO check if we should create port against meta info
if port_dir == "in":
self.inport_properties[port_name] = {}
port = actorport.InPort(port_name, self)
self.inports[port_name] = port
else:
self.outport_properties[port_name] = {}
port = actorport.OutPort(port_name, self)
self.outports[port_name] = port
return port
def enabled(self):
return False
def did_connect(self, port):
# Do nothing
return
def did_disconnect(self, port):
# Do nothing
return
def requirements_get(self):
# Get standard actor requirements first
reqs = super(ShadowActor, self).requirements_get()
if self._signature and hasattr(self, '_shadow_args') and self.requires is None:
# Fresh ShadowActor, needs to find placement based on signature
# Since actor requires is not known locally
reqs += [{'op': 'shadow_actor_reqs_match',
'kwargs': {'signature': self._signature,
'shadow_params': self._shadow_args.keys()},
'type': '+'}]
return reqs
def _set_private_state(self, state):
"""Pop _calvinsys state, set requires and call super class"""
self.calvinsys_state = state.pop("_calvinsys")
# Done only in ShadowActor since requires is normally part of the real Actor sub-class
self.requires = state['_requires']
super(ShadowActor, self)._set_private_state(state)
def _private_state(self):
"""Call super class and add stored calvinsys state"""
state = super(ShadowActor, self)._private_state()
state["_calvinsys"] = self.calvinsys_state
return state
def _set_replication_state(self, state):
""" Save the replication state, besides ports since they are already handled on the shadow instance """
super(ShadowActor, self)._set_replication_state(state)
# Need copy since remove the ports, which is needed for connect
self._replication_state_data = copy.copy(state)
if state is None:
return
def _replication_state(self):
return self._replication_state_data
|
apache-2.0
| 8,187,153,479,713,603,000
| 39.308872
| 130
| 0.606081
| false
| 4.185375
| false
| false
| false
|
hiuwo/acq4
|
acq4/pyqtgraph/opengl/items/GLGridItem.py
|
1
|
1650
|
from OpenGL.GL import *
from .. GLGraphicsItem import GLGraphicsItem
from ... import QtGui
__all__ = ['GLGridItem']
class GLGridItem(GLGraphicsItem):
"""
**Bases:** :class:`GLGraphicsItem <pyqtgraph.opengl.GLGraphicsItem>`
Displays a wire-grame grid.
"""
def __init__(self, size=None, color=None, antialias=True, glOptions='translucent'):
GLGraphicsItem.__init__(self)
self.setGLOptions(glOptions)
self.antialias = antialias
if size is None:
size = QtGui.QVector3D(1,1,1)
self.setSize(size=size)
def setSize(self, x=None, y=None, z=None, size=None):
"""
Set the size of the axes (in its local coordinate system; this does not affect the transform)
Arguments can be x,y,z or size=QVector3D().
"""
if size is not None:
x = size.x()
y = size.y()
z = size.z()
self.__size = [x,y,z]
self.update()
def size(self):
return self.__size[:]
def paint(self):
self.setupGLState()
if self.antialias:
glEnable(GL_LINE_SMOOTH)
glEnable(GL_BLEND)
glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA)
glHint(GL_LINE_SMOOTH_HINT, GL_NICEST);
glBegin( GL_LINES )
x,y,z = self.size()
glColor4f(1, 1, 1, .3)
for x in range(-10, 11):
glVertex3f(x, -10, 0)
glVertex3f(x, 10, 0)
for y in range(-10, 11):
glVertex3f(-10, y, 0)
glVertex3f( 10, y, 0)
glEnd()
|
mit
| -7,482,402,917,673,212,000
| 27.448276
| 101
| 0.526061
| false
| 3.4375
| false
| false
| false
|
davy39/eric
|
Debugger/VariablesFilterDialog.py
|
1
|
3253
|
# -*- coding: utf-8 -*-
# Copyright (c) 2002 - 2014 Detlev Offenbach <detlev@die-offenbachs.de>
#
"""
Module implementing the variables filter dialog.
"""
from __future__ import unicode_literals
from PyQt5.QtWidgets import QDialog, QDialogButtonBox
from Debugger.Config import ConfigVarTypeDispStrings
import Preferences
from .Ui_VariablesFilterDialog import Ui_VariablesFilterDialog
class VariablesFilterDialog(QDialog, Ui_VariablesFilterDialog):
"""
Class implementing the variables filter dialog.
It opens a dialog window for the configuration of the variables type
filter to be applied during a debugging session.
"""
def __init__(self, parent=None, name=None, modal=False):
"""
Constructor
@param parent parent widget of this dialog (QWidget)
@param name name of this dialog (string)
@param modal flag to indicate a modal dialog (boolean)
"""
super(VariablesFilterDialog, self).__init__(parent)
if name:
self.setObjectName(name)
self.setModal(modal)
self.setupUi(self)
self.defaultButton = self.buttonBox.addButton(
self.tr("Save Default"), QDialogButtonBox.ActionRole)
lDefaultFilter, gDefaultFilter = Preferences.getVarFilters()
#populate the listboxes and set the default selection
for lb in self.localsList, self.globalsList:
for ts in ConfigVarTypeDispStrings:
lb.addItem(self.tr(ts))
for filterIndex in lDefaultFilter:
itm = self.localsList.item(filterIndex)
itm.setSelected(True)
for filterIndex in gDefaultFilter:
itm = self.globalsList.item(filterIndex)
itm.setSelected(True)
def getSelection(self):
"""
Public slot to retrieve the current selections.
@return A tuple of lists of integer values. The first list is the
locals variables filter, the second the globals variables filter.
"""
lList = []
gList = []
for i in range(self.localsList.count()):
itm = self.localsList.item(i)
if itm.isSelected():
lList.append(i)
for i in range(self.globalsList.count()):
itm = self.globalsList.item(i)
if itm.isSelected():
gList.append(i)
return (lList, gList)
def setSelection(self, lList, gList):
"""
Public slot to set the current selection.
@param lList local variables filter (list of int)
@param gList global variables filter (list of int)
"""
for filterIndex in lList:
itm = self.localsList.item(filterIndex)
itm.setSelected(True)
for filterIndex in gList:
itm = self.globalsList.item(filterIndex)
itm.setSelected(True)
def on_buttonBox_clicked(self, button):
"""
Private slot called by a button of the button box clicked.
@param button button that was clicked (QAbstractButton)
"""
if button == self.defaultButton:
Preferences.setVarFilters(self.getSelection())
|
gpl-3.0
| 3,282,031,345,774,328,300
| 32.193878
| 77
| 0.618813
| false
| 4.331558
| false
| false
| false
|
s-tar/just-a-chat
|
kernel/server.py
|
1
|
3625
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
__author__ = 'mr.S'
import bottle
from gevent import monkey
import datetime
from kernel.widget import get as loadWidget
from kernel.helpers import is_ajax
from bottle import default_app, Bottle, route, static_file, ServerAdapter, Jinja2Template, request, error, redirect, jinja2_template as template
from kernel.session import Session
from beaker.middleware import SessionMiddleware
from kernel.user import User
import time
import kernel.db
import sys
import os
monkey.patch_all()
bottle.debug(True)
app = application = default_app()
reload(sys)
sys.setdefaultencoding('UTF8')
template_path = './templates/default/'
bottle.TEMPLATE_PATH.insert(0, template_path)
def run(run=False):
global app
import kernel.module
# redistogo_url = os.getenv('REDISTOGO_URL', None)
# if redistogo_url == None:
# redis_url = '127.0.0.1:6379'
# else:
# redis_url = redistogo_url
# redis_url = redis_url.split('redis://redistogo:')[1]
# redis_url = redis_url.split('/')[0]
# REDIS_PWD, REDIS_HOST = redis_url.split('@', 1)
# redis_url = "%s?password=%s" % (REDIS_HOST, REDIS_PWD)
# session_opts = {
# 'session.type': 'redis',
# 'session.url': redis_url,
# 'session.key': 'just_a_chat',
# 'session.auto': True, }
session_opts = {
'session.type': 'file',
'session.data_dir': './temp/sessions',
'session.cookie_expires': 7*24*60*60,
'session.auto': True}
class BeforeRequestMiddleware(object):
def __init__(self, app):
self.app = app
def __call__(self, e, h):
e['PATH_INFO'] = e['PATH_INFO'].rstrip('/')
return self.app(e, h)
Jinja2Template.defaults = {
'widget': loadWidget,
'is_ajax': is_ajax,
'modules': kernel.module.modules,
'datetime': datetime
}
Jinja2Template.settings = {
'filters': {
'nl2br': lambda value: value.replace('\n', '<br>\n')
}
}
@app.route('/static/<path:path>')
def static(path):
return static_file(path, './templates/default/static/')
@app.route('/file/<path:path>')
def file(path):
return static_file(path, './files/')
@app.post('/widget/<name:path>')
def widget(name):
try:
data = request.json['data'] if request.json is not None and 'data' in request.json else {}
return loadWidget(name, data, wrap=False)
except ValueError:
bottle.response.status = 404
@app.error(404)
def error404(error):
return template("404")
@app.hook('before_request')
def before_request():
request.session = Session(request.environ)
request.db = kernel.db.Database()
request.user = User(request.session, request.db)
Jinja2Template.defaults['user'] = request.user
@app.hook('after_request')
def after_request():
if 'db' in request:
request.db().close()
app = BeforeRequestMiddleware(app)
app = SessionMiddleware(app, session_opts)
#bottle.run(app, host='192.168.1.2', port=3000)
if run:
import kernel.socket
from socketio.server import SocketIOServer
SocketIOServer(('192.168.1.2', 3000), app).serve_forever()
def get_environment():
if request.environ['PATH_INFO'].startswith('/admin/') or request.environ['PATH_INFO'] == '/admin':
return 'admin'
else:
return 'site'
files_dir = os.path.abspath("./files/")
from modules import *
__all__ = ["app", "session", "files_dir"]
|
mit
| 8,543,817,912,420,767,000
| 27.551181
| 144
| 0.608828
| false
| 3.482229
| false
| false
| false
|
Azure/azure-sdk-for-python
|
sdk/network/azure-mgmt-network/azure/mgmt/network/v2019_06_01/aio/operations/_connection_monitors_operations.py
|
1
|
45799
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar, Union
import warnings
from azure.core.async_paging import AsyncItemPaged, AsyncList
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod
from azure.mgmt.core.exceptions import ARMErrorFormat
from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling
from ... import models as _models
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class ConnectionMonitorsOperations:
"""ConnectionMonitorsOperations async operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.network.v2019_06_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
async def _create_or_update_initial(
self,
resource_group_name: str,
network_watcher_name: str,
connection_monitor_name: str,
parameters: "_models.ConnectionMonitor",
**kwargs
) -> "_models.ConnectionMonitorResult":
cls = kwargs.pop('cls', None) # type: ClsType["_models.ConnectionMonitorResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-06-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self._create_or_update_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkWatcherName': self._serialize.url("network_watcher_name", network_watcher_name, 'str'),
'connectionMonitorName': self._serialize.url("connection_monitor_name", connection_monitor_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'ConnectionMonitor')
body_content_kwargs['content'] = body_content
request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('ConnectionMonitorResult', pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize('ConnectionMonitorResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_create_or_update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkWatchers/{networkWatcherName}/connectionMonitors/{connectionMonitorName}'} # type: ignore
async def begin_create_or_update(
self,
resource_group_name: str,
network_watcher_name: str,
connection_monitor_name: str,
parameters: "_models.ConnectionMonitor",
**kwargs
) -> AsyncLROPoller["_models.ConnectionMonitorResult"]:
"""Create or update a connection monitor.
:param resource_group_name: The name of the resource group containing Network Watcher.
:type resource_group_name: str
:param network_watcher_name: The name of the Network Watcher resource.
:type network_watcher_name: str
:param connection_monitor_name: The name of the connection monitor.
:type connection_monitor_name: str
:param parameters: Parameters that define the operation to create a connection monitor.
:type parameters: ~azure.mgmt.network.v2019_06_01.models.ConnectionMonitor
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: Pass in True if you'd like the AsyncARMPolling polling method,
False for no polling, or your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either ConnectionMonitorResult or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.network.v2019_06_01.models.ConnectionMonitorResult]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.ConnectionMonitorResult"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._create_or_update_initial(
resource_group_name=resource_group_name,
network_watcher_name=network_watcher_name,
connection_monitor_name=connection_monitor_name,
parameters=parameters,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('ConnectionMonitorResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkWatcherName': self._serialize.url("network_watcher_name", network_watcher_name, 'str'),
'connectionMonitorName': self._serialize.url("connection_monitor_name", connection_monitor_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'azure-async-operation'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkWatchers/{networkWatcherName}/connectionMonitors/{connectionMonitorName}'} # type: ignore
async def get(
self,
resource_group_name: str,
network_watcher_name: str,
connection_monitor_name: str,
**kwargs
) -> "_models.ConnectionMonitorResult":
"""Gets a connection monitor by name.
:param resource_group_name: The name of the resource group containing Network Watcher.
:type resource_group_name: str
:param network_watcher_name: The name of the Network Watcher resource.
:type network_watcher_name: str
:param connection_monitor_name: The name of the connection monitor.
:type connection_monitor_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: ConnectionMonitorResult, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2019_06_01.models.ConnectionMonitorResult
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ConnectionMonitorResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-06-01"
accept = "application/json"
# Construct URL
url = self.get.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkWatcherName': self._serialize.url("network_watcher_name", network_watcher_name, 'str'),
'connectionMonitorName': self._serialize.url("connection_monitor_name", connection_monitor_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('ConnectionMonitorResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkWatchers/{networkWatcherName}/connectionMonitors/{connectionMonitorName}'} # type: ignore
async def _delete_initial(
self,
resource_group_name: str,
network_watcher_name: str,
connection_monitor_name: str,
**kwargs
) -> None:
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-06-01"
accept = "application/json"
# Construct URL
url = self._delete_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkWatcherName': self._serialize.url("network_watcher_name", network_watcher_name, 'str'),
'connectionMonitorName': self._serialize.url("connection_monitor_name", connection_monitor_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [202, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkWatchers/{networkWatcherName}/connectionMonitors/{connectionMonitorName}'} # type: ignore
async def begin_delete(
self,
resource_group_name: str,
network_watcher_name: str,
connection_monitor_name: str,
**kwargs
) -> AsyncLROPoller[None]:
"""Deletes the specified connection monitor.
:param resource_group_name: The name of the resource group containing Network Watcher.
:type resource_group_name: str
:param network_watcher_name: The name of the Network Watcher resource.
:type network_watcher_name: str
:param connection_monitor_name: The name of the connection monitor.
:type connection_monitor_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: Pass in True if you'd like the AsyncARMPolling polling method,
False for no polling, or your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[None]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._delete_initial(
resource_group_name=resource_group_name,
network_watcher_name=network_watcher_name,
connection_monitor_name=connection_monitor_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkWatcherName': self._serialize.url("network_watcher_name", network_watcher_name, 'str'),
'connectionMonitorName': self._serialize.url("connection_monitor_name", connection_monitor_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkWatchers/{networkWatcherName}/connectionMonitors/{connectionMonitorName}'} # type: ignore
async def update_tags(
self,
resource_group_name: str,
network_watcher_name: str,
connection_monitor_name: str,
parameters: "_models.TagsObject",
**kwargs
) -> "_models.ConnectionMonitorResult":
"""Update tags of the specified connection monitor.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param network_watcher_name: The name of the network watcher.
:type network_watcher_name: str
:param connection_monitor_name: The name of the connection monitor.
:type connection_monitor_name: str
:param parameters: Parameters supplied to update connection monitor tags.
:type parameters: ~azure.mgmt.network.v2019_06_01.models.TagsObject
:keyword callable cls: A custom type or function that will be passed the direct response
:return: ConnectionMonitorResult, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2019_06_01.models.ConnectionMonitorResult
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ConnectionMonitorResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-06-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self.update_tags.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkWatcherName': self._serialize.url("network_watcher_name", network_watcher_name, 'str'),
'connectionMonitorName': self._serialize.url("connection_monitor_name", connection_monitor_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'TagsObject')
body_content_kwargs['content'] = body_content
request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('ConnectionMonitorResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
update_tags.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkWatchers/{networkWatcherName}/connectionMonitors/{connectionMonitorName}'} # type: ignore
async def _stop_initial(
self,
resource_group_name: str,
network_watcher_name: str,
connection_monitor_name: str,
**kwargs
) -> None:
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-06-01"
accept = "application/json"
# Construct URL
url = self._stop_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkWatcherName': self._serialize.url("network_watcher_name", network_watcher_name, 'str'),
'connectionMonitorName': self._serialize.url("connection_monitor_name", connection_monitor_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.post(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_stop_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkWatchers/{networkWatcherName}/connectionMonitors/{connectionMonitorName}/stop'} # type: ignore
async def begin_stop(
self,
resource_group_name: str,
network_watcher_name: str,
connection_monitor_name: str,
**kwargs
) -> AsyncLROPoller[None]:
"""Stops the specified connection monitor.
:param resource_group_name: The name of the resource group containing Network Watcher.
:type resource_group_name: str
:param network_watcher_name: The name of the Network Watcher resource.
:type network_watcher_name: str
:param connection_monitor_name: The name of the connection monitor.
:type connection_monitor_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: Pass in True if you'd like the AsyncARMPolling polling method,
False for no polling, or your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[None]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._stop_initial(
resource_group_name=resource_group_name,
network_watcher_name=network_watcher_name,
connection_monitor_name=connection_monitor_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkWatcherName': self._serialize.url("network_watcher_name", network_watcher_name, 'str'),
'connectionMonitorName': self._serialize.url("connection_monitor_name", connection_monitor_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_stop.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkWatchers/{networkWatcherName}/connectionMonitors/{connectionMonitorName}/stop'} # type: ignore
async def _start_initial(
self,
resource_group_name: str,
network_watcher_name: str,
connection_monitor_name: str,
**kwargs
) -> None:
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-06-01"
accept = "application/json"
# Construct URL
url = self._start_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkWatcherName': self._serialize.url("network_watcher_name", network_watcher_name, 'str'),
'connectionMonitorName': self._serialize.url("connection_monitor_name", connection_monitor_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.post(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_start_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkWatchers/{networkWatcherName}/connectionMonitors/{connectionMonitorName}/start'} # type: ignore
async def begin_start(
self,
resource_group_name: str,
network_watcher_name: str,
connection_monitor_name: str,
**kwargs
) -> AsyncLROPoller[None]:
"""Starts the specified connection monitor.
:param resource_group_name: The name of the resource group containing Network Watcher.
:type resource_group_name: str
:param network_watcher_name: The name of the Network Watcher resource.
:type network_watcher_name: str
:param connection_monitor_name: The name of the connection monitor.
:type connection_monitor_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: Pass in True if you'd like the AsyncARMPolling polling method,
False for no polling, or your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[None]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._start_initial(
resource_group_name=resource_group_name,
network_watcher_name=network_watcher_name,
connection_monitor_name=connection_monitor_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkWatcherName': self._serialize.url("network_watcher_name", network_watcher_name, 'str'),
'connectionMonitorName': self._serialize.url("connection_monitor_name", connection_monitor_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_start.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkWatchers/{networkWatcherName}/connectionMonitors/{connectionMonitorName}/start'} # type: ignore
async def _query_initial(
self,
resource_group_name: str,
network_watcher_name: str,
connection_monitor_name: str,
**kwargs
) -> "_models.ConnectionMonitorQueryResult":
cls = kwargs.pop('cls', None) # type: ClsType["_models.ConnectionMonitorQueryResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-06-01"
accept = "application/json"
# Construct URL
url = self._query_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkWatcherName': self._serialize.url("network_watcher_name", network_watcher_name, 'str'),
'connectionMonitorName': self._serialize.url("connection_monitor_name", connection_monitor_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.post(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('ConnectionMonitorQueryResult', pipeline_response)
if response.status_code == 202:
deserialized = self._deserialize('ConnectionMonitorQueryResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_query_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkWatchers/{networkWatcherName}/connectionMonitors/{connectionMonitorName}/query'} # type: ignore
async def begin_query(
self,
resource_group_name: str,
network_watcher_name: str,
connection_monitor_name: str,
**kwargs
) -> AsyncLROPoller["_models.ConnectionMonitorQueryResult"]:
"""Query a snapshot of the most recent connection states.
:param resource_group_name: The name of the resource group containing Network Watcher.
:type resource_group_name: str
:param network_watcher_name: The name of the Network Watcher resource.
:type network_watcher_name: str
:param connection_monitor_name: The name given to the connection monitor.
:type connection_monitor_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: Pass in True if you'd like the AsyncARMPolling polling method,
False for no polling, or your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either ConnectionMonitorQueryResult or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.network.v2019_06_01.models.ConnectionMonitorQueryResult]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.ConnectionMonitorQueryResult"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._query_initial(
resource_group_name=resource_group_name,
network_watcher_name=network_watcher_name,
connection_monitor_name=connection_monitor_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('ConnectionMonitorQueryResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkWatcherName': self._serialize.url("network_watcher_name", network_watcher_name, 'str'),
'connectionMonitorName': self._serialize.url("connection_monitor_name", connection_monitor_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_query.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkWatchers/{networkWatcherName}/connectionMonitors/{connectionMonitorName}/query'} # type: ignore
def list(
self,
resource_group_name: str,
network_watcher_name: str,
**kwargs
) -> AsyncIterable["_models.ConnectionMonitorListResult"]:
"""Lists all connection monitors for the specified Network Watcher.
:param resource_group_name: The name of the resource group containing Network Watcher.
:type resource_group_name: str
:param network_watcher_name: The name of the Network Watcher resource.
:type network_watcher_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either ConnectionMonitorListResult or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.network.v2019_06_01.models.ConnectionMonitorListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ConnectionMonitorListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-06-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkWatcherName': self._serialize.url("network_watcher_name", network_watcher_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('ConnectionMonitorListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkWatchers/{networkWatcherName}/connectionMonitors'} # type: ignore
|
mit
| -1,777,509,022,329,166,800
| 52.069525
| 242
| 0.658551
| false
| 4.347318
| true
| false
| false
|
keepkey/python-keepkey
|
keepkeylib/client.py
|
1
|
50402
|
# This file is part of the TREZOR project.
#
# Copyright (C) 2012-2016 Marek Palatinus <slush@satoshilabs.com>
# Copyright (C) 2012-2016 Pavol Rusnak <stick@satoshilabs.com>
# Copyright (C) 2016 Jochen Hoenicke <hoenicke@gmail.com>
#
# This library is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this library. If not, see <http://www.gnu.org/licenses/>.
#
# The script has been modified for KeepKey Device.
from __future__ import print_function, absolute_import
import os
import sys
import time
import binascii
import hashlib
import unicodedata
import json
import getpass
import copy
from mnemonic import Mnemonic
from . import tools
from . import mapping
from . import messages_pb2 as proto
from . import messages_eos_pb2 as eos_proto
from . import messages_nano_pb2 as nano_proto
from . import messages_cosmos_pb2 as cosmos_proto
from . import messages_ripple_pb2 as ripple_proto
from . import messages_tendermint_pb2 as tendermint_proto
from . import messages_thorchain_pb2 as thorchain_proto
from . import types_pb2 as types
from . import eos
from . import nano
from .debuglink import DebugLink
# try:
# from PIL import Image
# SCREENSHOT = True
# except:
# SCREENSHOT = False
SCREENSHOT = False
DEFAULT_CURVE = 'secp256k1'
# monkeypatching: text formatting of protobuf messages
tools.monkeypatch_google_protobuf_text_format()
def get_buttonrequest_value(code):
# Converts integer code to its string representation of ButtonRequestType
return [ k for k, v in types.ButtonRequestType.items() if v == code][0]
def pprint(msg):
msg_class = msg.__class__.__name__
msg_size = msg.ByteSize()
"""
msg_ser = msg.SerializeToString()
msg_id = mapping.get_type(msg)
msg_json = json.dumps(protobuf_json.pb2json(msg))
"""
if isinstance(msg, proto.FirmwareUpload):
return "<%s> (%d bytes):\n" % (msg_class, msg_size)
else:
return "<%s> (%d bytes):\n%s" % (msg_class, msg_size, msg)
def log(msg):
sys.stderr.write(msg + '\n')
sys.stderr.flush()
def log_cr(msg):
sys.stdout.write('\r' + msg)
sys.stdout.flush()
def format_mnemonic(word_pos, character_pos):
return "WORD %d: %s" % (word_pos, character_pos * '*')
def getch():
try:
import termios
except ImportError:
# Non-POSIX. Return msvcrt's (Windows') getch.
import msvcrt
return msvcrt.getch()
# POSIX system. Create and return a getch that manipulates the tty.
import sys, tty
def _getch():
fd = sys.stdin.fileno()
old_settings = termios.tcgetattr(fd)
try:
tty.setraw(fd)
ch = sys.stdin.read(1)
finally:
termios.tcsetattr(fd, termios.TCSADRAIN, old_settings)
return ch
return _getch()
class CallException(Exception):
def __init__(self, code, message):
super(CallException, self).__init__()
self.args = [code, message]
class PinException(CallException):
pass
class field(object):
# Decorator extracts single value from
# protobuf object. If the field is not
# present, raises an exception.
def __init__(self, field):
self.field = field
def __call__(self, f):
def wrapped_f(*args, **kwargs):
ret = f(*args, **kwargs)
ret.HasField(self.field)
return getattr(ret, self.field)
return wrapped_f
class expect(object):
# Decorator checks if the method
# returned one of expected protobuf messages
# or raises an exception
def __init__(self, *expected):
self.expected = expected
def __call__(self, f):
def wrapped_f(*args, **kwargs):
ret = f(*args, **kwargs)
if not isinstance(ret, self.expected):
raise Exception("Got %s, expected %s" % (ret.__class__, self.expected))
return ret
return wrapped_f
def session(f):
# Decorator wraps a BaseClient method
# with session activation / deactivation
def wrapped_f(*args, **kwargs):
client = args[0]
try:
client.transport.session_begin()
return f(*args, **kwargs)
finally:
client.transport.session_end()
return wrapped_f
def normalize_nfc(txt):
if sys.version_info[0] < 3:
if isinstance(txt, unicode):
return unicodedata.normalize('NFC', txt)
if isinstance(txt, str):
return unicodedata.normalize('NFC', txt.decode('utf-8'))
else:
if isinstance(txt, bytes):
return unicodedata.normalize('NFC', txt.decode('utf-8'))
if isinstance(txt, str):
return unicodedata.normalize('NFC', txt)
raise Exception('unicode/str or bytes/str expected')
class BaseClient(object):
# Implements very basic layer of sending raw protobuf
# messages to device and getting its response back.
def __init__(self, transport, **kwargs):
self.transport = transport
self.verbose = False
super(BaseClient, self).__init__() # *args, **kwargs)
def cancel(self):
self.transport.write(proto.Cancel())
@session
def call_raw(self, msg):
self.transport.write(msg)
return self.transport.read_blocking()
@session
def call(self, msg):
resp = self.call_raw(msg)
handler_name = "callback_%s" % resp.__class__.__name__
handler = getattr(self, handler_name, None)
if handler != None:
msg = handler(resp)
if msg == None:
raise Exception("Callback %s must return protobuf message, not None" % handler)
resp = self.call(msg)
return resp
def callback_Failure(self, msg):
if msg.code in (types.Failure_PinInvalid,
types.Failure_PinCancelled, types.Failure_PinExpected):
raise PinException(msg.code, msg.message)
raise CallException(msg.code, msg.message)
def close(self):
self.transport.close()
class DebugWireMixin(object):
def call_raw(self, msg):
log("SENDING " + pprint(msg))
resp = super(DebugWireMixin, self).call_raw(msg)
log("RECEIVED " + pprint(resp))
return resp
class TextUIMixin(object):
# This class demonstrates easy test-based UI
# integration between the device and wallet.
# You can implement similar functionality
# by implementing your own GuiMixin with
# graphical widgets for every type of these callbacks.
def __init__(self, *args, **kwargs):
super(TextUIMixin, self).__init__(*args, **kwargs)
self.character_request_first_pass = True
def callback_ButtonRequest(self, msg):
# log("Sending ButtonAck for %s " % get_buttonrequest_value(msg.code))
return proto.ButtonAck()
def callback_RecoveryMatrix(self, msg):
if self.recovery_matrix_first_pass:
self.recovery_matrix_first_pass = False
log("Use the numeric keypad to describe positions. For the word list use only left and right keys. The layout is:")
log(" 7 8 9 7 | 9")
log(" 4 5 6 4 | 6")
log(" 1 2 3 1 | 3")
while True:
character = getch()
if character in ('\x03', '\x04'):
return proto.Cancel()
if character in ('\x08', '\x7f'):
return proto.WordAck(word='\x08')
# ignore middle column if only 6 keys requested.
if (msg.type == types.WordRequestType_Matrix6 and
character in ('2', '5', '8')):
continue
if (ord(character) >= ord('1') and ord(character) <= ord('9')):
return proto.WordAck(word=character)
def callback_PinMatrixRequest(self, msg):
if msg.type == 1:
desc = 'current PIN'
elif msg.type == 2:
desc = 'new PIN'
elif msg.type == 3:
desc = 'new PIN again'
else:
desc = 'PIN'
log("Use the numeric keypad to describe number positions. The layout is:")
log(" 7 8 9")
log(" 4 5 6")
log(" 1 2 3")
log("Please enter %s: " % desc)
pin = getpass.getpass('')
return proto.PinMatrixAck(pin=pin)
def callback_PassphraseRequest(self, msg):
log("Passphrase required: ")
passphrase = getpass.getpass('')
log("Confirm your Passphrase: ")
if passphrase == getpass.getpass(''):
passphrase = normalize_nfc(passphrase)
return proto.PassphraseAck(passphrase=passphrase)
else:
log("Passphrase did not match! ")
exit()
def callback_CharacterRequest(self, msg):
if self.character_request_first_pass:
self.character_request_first_pass = False
log("Use recovery cipher on device to input mnemonic. Words are autocompleted at 3 or 4 characters.")
log("(use spacebar to progress to next word after match, use backspace to correct bad character or word entries)")
# format mnemonic for console
formatted_console = format_mnemonic(msg.word_pos + 1, msg.character_pos)
# clear the runway before we display formatted mnemonic
log_cr(' ' * 14)
log_cr(formatted_console)
while True:
character = getch().lower()
# capture escape
if character in ('\x03', '\x04'):
return proto.Cancel()
character_ascii = ord(character)
if character_ascii >= 97 and character_ascii <= 122 \
and msg.character_pos != 4:
# capture characters a-z
return proto.CharacterAck(character=character)
elif character_ascii == 32 and msg.word_pos < 23 \
and msg.character_pos >= 3:
# capture spaces
return proto.CharacterAck(character=' ')
elif character_ascii == 8 or character_ascii == 127 \
and (msg.word_pos > 0 or msg.character_pos > 0):
# capture backspaces
return proto.CharacterAck(delete=True)
elif character_ascii == 13 and msg.word_pos in (11, 17, 23):
# capture returns
log("")
return proto.CharacterAck(done=True)
class DebugLinkMixin(object):
# This class implements automatic responses
# and other functionality for unit tests
# for various callbacks, created in order
# to automatically pass unit tests.
#
# This mixing should be used only for purposes
# of unit testing, because it will fail to work
# without special DebugLink interface provided
# by the device.
def __init__(self, *args, **kwargs):
super(DebugLinkMixin, self).__init__(*args, **kwargs)
self.debug = None
self.in_with_statement = 0
self.button_wait = 0
self.screenshot_id = 0
# Always press Yes and provide correct pin
self.setup_debuglink(True, True)
self.auto_button = True
# Do not expect any specific response from device
self.expected_responses = None
# Use blank passphrase
self.set_passphrase('')
def close(self):
super(DebugLinkMixin, self).close()
if self.debug:
self.debug.close()
def set_debuglink(self, debug_transport):
self.debug = DebugLink(debug_transport)
def set_buttonwait(self, secs):
self.button_wait = secs
def __enter__(self):
# For usage in with/expected_responses
self.in_with_statement += 1
return self
def __exit__(self, _type, value, traceback):
self.in_with_statement -= 1
if _type != None:
# Another exception raised
return False
# return isinstance(value, TypeError)
# Evaluate missed responses in 'with' statement
if self.expected_responses != None and len(self.expected_responses):
raise Exception("Some of expected responses didn't come from device: %s" % \
[ pprint(x) for x in self.expected_responses ])
# Cleanup
self.expected_responses = None
return False
def set_expected_responses(self, expected):
if not self.in_with_statement:
raise Exception("Must be called inside 'with' statement")
self.expected_responses = expected
def setup_debuglink(self, button, pin_correct):
self.button = button # True -> YES button, False -> NO button
self.pin_correct = pin_correct
def set_passphrase(self, passphrase):
self.passphrase = normalize_nfc(passphrase)
def set_mnemonic(self, mnemonic):
self.mnemonic = normalize_nfc(mnemonic).split(' ')
def call_raw(self, msg):
if SCREENSHOT and self.debug:
layout = self.debug.read_layout()
im = Image.new("RGB", (128, 64))
pix = im.load()
for x in range(128):
for y in range(64):
rx, ry = 127 - x, 63 - y
if (ord(layout[rx + (ry / 8) * 128]) & (1 << (ry % 8))) > 0:
pix[x, y] = (255, 255, 255)
im.save('scr%05d.png' % self.screenshot_id)
self.screenshot_id += 1
resp = super(DebugLinkMixin, self).call_raw(msg)
self._check_request(resp)
return resp
def _check_request(self, msg):
if self.expected_responses != None:
try:
expected = self.expected_responses.pop(0)
except IndexError:
raise CallException(types.Failure_Other,
"Got %s, but no message has been expected" % pprint(msg))
if msg.__class__ != expected.__class__:
raise CallException(types.Failure_Other,
"Expected %s, got %s" % (pprint(expected), pprint(msg)))
fields = expected.ListFields() # only filled (including extensions)
for field, value in fields:
if not msg.HasField(field.name) or getattr(msg, field.name) != value:
raise CallException(types.Failure_Other,
"Expected %s, got %s" % (pprint(expected), pprint(msg)))
def callback_ButtonRequest(self, msg):
if self.verbose:
log("ButtonRequest code: " + get_buttonrequest_value(msg.code))
if self.auto_button:
if self.verbose:
log("Pressing button " + str(self.button))
if self.button_wait:
if self.verbose:
log("Waiting %d seconds " % self.button_wait)
time.sleep(self.button_wait)
self.debug.press_button(self.button)
return proto.ButtonAck()
def callback_PinMatrixRequest(self, msg):
if self.pin_correct:
pin = self.debug.read_pin_encoded()
else:
pin = '444222'
return proto.PinMatrixAck(pin=pin)
def callback_PassphraseRequest(self, msg):
if self.verbose:
log("Provided passphrase: '%s'" % self.passphrase)
return proto.PassphraseAck(passphrase=self.passphrase)
class ProtocolMixin(object):
PRIME_DERIVATION_FLAG = 0x80000000
VENDORS = ('keepkey.com',)
def __init__(self, *args, **kwargs):
super(ProtocolMixin, self).__init__(*args, **kwargs)
self.init_device()
self.tx_api = None
def set_tx_api(self, tx_api):
self.tx_api = tx_api
def get_tx_api(self):
return self.tx_api
def init_device(self):
self.features = expect(proto.Features)(self.call)(proto.Initialize())
if str(self.features.vendor) not in self.VENDORS:
raise Exception("Unsupported device")
def _get_local_entropy(self):
return os.urandom(32)
def _convert_prime(self, n):
# Convert minus signs to uint32 with flag
return [ int(abs(x) | self.PRIME_DERIVATION_FLAG) if x < 0 else x for x in n ]
@staticmethod
def expand_path(n):
# Convert string of bip32 path to list of uint32 integers with prime flags
# 0/-1/1' -> [0, 0x80000001, 0x80000001]
if not n:
return []
n = n.split('/')
# m/a/b/c => a/b/c
if n[0] == 'm':
n = n[1:]
# coin_name/a/b/c => 44'/SLIP44_constant'/a/b/c
# https://github.com/satoshilabs/slips/blob/master/slip-0044.md
coins = {
"Bitcoin": 0,
"Testnet": 1,
"Litecoin": 2,
"Dogecoin": 3,
"Dash": 5,
"Namecoin": 7,
"Bitsend": 91,
"Groestlcoin": 17,
"Zcash": 133,
"BitcoinCash": 145,
"Bitcore": 160,
"Megacoin": 217,
"Bitcloud": 218,
"Axe": 4242,
}
if n[0] in coins:
n = ["44'", "%d'" % coins[n[0]] ] + n[1:]
path = []
for x in n:
prime = False
if x.endswith("'"):
x = x.replace('\'', '')
prime = True
if x.startswith('-'):
prime = True
x = abs(int(x))
if prime:
x |= ProtocolMixin.PRIME_DERIVATION_FLAG
path.append(x)
return path
@expect(proto.PublicKey)
def get_public_node(self, n, ecdsa_curve_name=DEFAULT_CURVE, show_display=False, coin_name=None, script_type=types.SPENDADDRESS):
n = self._convert_prime(n)
if not ecdsa_curve_name:
ecdsa_curve_name=DEFAULT_CURVE
return self.call(proto.GetPublicKey(address_n=n, ecdsa_curve_name=ecdsa_curve_name, show_display=show_display, coin_name=coin_name, script_type=script_type))
@field('address')
@expect(proto.Address)
def get_address(self, coin_name, n, show_display=False, multisig=None, script_type=types.SPENDADDRESS):
n = self._convert_prime(n)
if multisig:
return self.call(proto.GetAddress(address_n=n, coin_name=coin_name, show_display=show_display, multisig=multisig, script_type=script_type))
else:
return self.call(proto.GetAddress(address_n=n, coin_name=coin_name, show_display=show_display, script_type=script_type))
@field('address')
@expect(proto.EthereumAddress)
def ethereum_get_address(self, n, show_display=False, multisig=None):
n = self._convert_prime(n)
return self.call(proto.EthereumGetAddress(address_n=n, show_display=show_display))
@session
def ethereum_sign_tx(self, n, nonce, gas_price, gas_limit, value, to=None, to_n=None, address_type=None, exchange_type=None, data=None, chain_id=None):
from keepkeylib.tools import int_to_big_endian
n = self._convert_prime(n)
if address_type == types.TRANSFER: #Ethereum transfer transaction
msg = proto.EthereumSignTx(
address_n=n,
nonce=int_to_big_endian(nonce),
gas_price=int_to_big_endian(gas_price),
gas_limit=int_to_big_endian(gas_limit),
value=int_to_big_endian(value),
to_address_n=to_n,
address_type=address_type
)
elif address_type == types.EXCHANGE: #Ethereum exchange transaction
msg = proto.EthereumSignTx(
address_n=n,
nonce=int_to_big_endian(nonce),
gas_price=int_to_big_endian(gas_price),
gas_limit=int_to_big_endian(gas_limit),
value=int_to_big_endian(value),
to_address_n=to_n,
exchange_type=exchange_type,
address_type=address_type
)
else:
msg = proto.EthereumSignTx(
address_n=n,
nonce=int_to_big_endian(nonce),
gas_price=int_to_big_endian(gas_price),
gas_limit=int_to_big_endian(gas_limit),
value=int_to_big_endian(value)
)
if to:
msg.to = to
if data:
msg.data_length = len(data)
data, chunk = data[1024:], data[:1024]
msg.data_initial_chunk = chunk
if chain_id:
msg.chain_id = chain_id
response = self.call(msg)
while response.HasField('data_length'):
data_length = response.data_length
data, chunk = data[data_length:], data[:data_length]
response = self.call(proto.EthereumTxAck(data_chunk=chunk))
if address_type:
return response.signature_v, response.signature_r, response.signature_s, response.hash, response.signature_der
else:
return response.signature_v, response.signature_r, response.signature_s
@expect(eos_proto.EosPublicKey)
def eos_get_public_key(self, address_n, show_display=True, legacy=True):
msg = eos_proto.EosGetPublicKey(
address_n=address_n,
show_display=show_display,
kind = eos_proto.EOS if legacy else eos_proto.EOS_K1
)
return self.call(msg)
@session
def eos_sign_tx_raw(self, msg, actions):
response = self.call(msg)
for common, action in actions:
if isinstance(action, eos_proto.EosActionTransfer):
msg = eos_proto.EosTxActionAck(common=common, transfer=action)
elif isinstance(action, eos_proto.EosActionDelegate):
msg = eos_proto.EosTxActionAck(common=common, delegate=action)
elif isinstance(action, eos_proto.EosActionUndelegate):
msg = eos_proto.EosTxActionAck(common=common, undelegate=action)
elif isinstance(action, eos_proto.EosActionRefund):
msg = eos_proto.EosTxActionAck(common=common, refund=action)
elif isinstance(action, eos_proto.EosActionBuyRam):
msg = eos_proto.EosTxActionAck(common=common, buy_ram=action)
elif isinstance(action, eos_proto.EosActionBuyRamBytes):
msg = eos_proto.EosTxActionAck(common=common, buy_ram_bytes=action)
elif isinstance(action, eos_proto.EosActionSellRam):
msg = eos_proto.EosTxActionAck(common=common, sell_ram=action)
elif isinstance(action, eos_proto.EosActionVoteProducer):
msg = eos_proto.EosTxActionAck(common=common, vote_producer=action)
elif isinstance(action, eos_proto.EosActionUpdateAuth):
msg = eos_proto.EosTxActionAck(common=common, update_auth=action)
elif isinstance(action, eos_proto.EosActionDeleteAuth):
msg = eos_proto.EosTxActionAck(common=common, delete_auth=action)
elif isinstance(action, eos_proto.EosActionUnlinkAuth):
msg = eos_proto.EosTxActionAck(common=common, unlink_auth=action)
elif isinstance(action, eos_proto.EosActionLinkAuth):
msg = eos_proto.EosTxActionAck(common=common, link_auth=action)
elif isinstance(action, eos_proto.EosActionNewAccount):
msg = eos_proto.EosTxActionAck(common=common, new_account=action)
elif isinstance(action, eos_proto.EosActionUnknown):
msg = eos_proto.EosTxActionAck(common=common, unknown=action)
else:
raise Exception("Unknown EOS Action")
response = self.call(msg)
if not isinstance(response, eos_proto.EosSignedTx):
raise Exception("Unexpected EOS signing response")
return response
@session
def eos_sign_tx(self, n, transaction):
tx = eos.parse_transaction_json(copy.deepcopy(transaction))
header = eos_proto.EosTxHeader(
expiration=tx.expiration,
ref_block_num=tx.ref_block_num,
ref_block_prefix=tx.ref_block_prefix,
max_net_usage_words=tx.net_usage_words,
max_cpu_usage_ms=tx.max_cpu_usage_ms,
delay_sec=tx.delay_sec)
msg = eos_proto.EosSignTx(
address_n=n,
chain_id=tx.chain_id,
header=header,
num_actions=tx.num_actions)
response = self.call(msg)
try:
while isinstance(response, eos_proto.EosTxActionRequest):
a = eos.parse_action(tx.actions.pop(0))
if isinstance(a, list):
while len(a) and isinstance(response, eos_proto.EosTxActionRequest):
response = self.call(a.pop(0))
else:
response = self.call(a)
except IndexError:
# pop from empty list
raise Exception("Unexpected EOS signing response")
if not isinstance(response, eos_proto.EosSignedTx):
raise Exception("Unexpected EOS signing response")
return response
@expect(nano_proto.NanoAddress)
def nano_get_address(self, coin_name, address_n, show_display=False):
msg = nano_proto.NanoGetAddress(
coin_name=coin_name,
address_n=address_n,
show_display=show_display)
return self.call(msg)
@expect(nano_proto.NanoSignedTx)
def nano_sign_tx(
self, coin_name, address_n,
grandparent_hash=None,
parent_link=None,
parent_representative=None,
parent_balance=None,
link_hash=None,
link_recipient=None,
link_recipient_n=None,
representative=None,
balance=None,
):
parent_block = None
if (grandparent_hash is not None or
parent_link is not None or
parent_representative is not None or
parent_balance is not None):
parent_block = nano_proto.NanoSignTx.ParentBlock(
parent_hash=grandparent_hash,
link=parent_link,
representative=parent_representative,
balance=nano.encode_balance(parent_balance),
)
msg = nano_proto.NanoSignTx(
coin_name=coin_name,
address_n=address_n,
parent_block=parent_block,
link_hash=link_hash,
link_recipient=link_recipient,
link_recipient_n=link_recipient_n,
representative=representative,
balance=nano.encode_balance(balance),
)
return self.call(msg)
@field('address')
@expect(cosmos_proto.CosmosAddress)
def cosmos_get_address(self, address_n, show_display=False):
return self.call(
cosmos_proto.CosmosGetAddress(address_n=address_n, show_display=show_display)
)
@session
def cosmos_sign_tx(
self,
address_n,
account_number,
chain_id,
fee,
gas,
msgs,
memo,
sequence,
exchange_types=None
):
resp = self.call(cosmos_proto.CosmosSignTx(
address_n=address_n,
account_number=account_number,
chain_id=chain_id,
fee_amount=fee,
gas=gas,
memo=memo,
sequence=sequence,
msg_count=len(msgs)
))
for (msg, exchange_type) in zip(msgs, exchange_types or [None] * len(msgs)):
if not isinstance(resp, cosmos_proto.CosmosMsgRequest):
raise CallException(
"Cosmos.ExpectedMsgRequest",
"Message request expected but not received.",
)
if msg['type'] == "cosmos-sdk/MsgSend":
if len(msg['value']['amount']) != 1:
raise CallException("Cosmos.MsgSend", "Multiple amounts per msg not supported")
denom = msg['value']['amount'][0]['denom']
if denom != 'uatom':
raise CallException("Cosmos.MsgSend", "Unsupported denomination: " + denom)
resp = self.call(cosmos_proto.CosmosMsgAck(
send=cosmos_proto.CosmosMsgSend(
from_address=msg['value']['from_address'],
to_address=msg['value']['to_address'],
amount=int(msg['value']['amount'][0]['amount']),
address_type=types.EXCHANGE if exchange_type is not None else types.SPEND,
exchange_type=exchange_type
)
))
else:
raise CallException(
"Cosmos.UnknownMsg",
"Cosmos message %s is not yet supported" % (msg['type'],)
)
if not isinstance(resp, cosmos_proto.CosmosSignedTx):
raise CallException(
"Cosmos.UnexpectedEndOfOperations",
"Reached end of operations without a signature.",
)
return resp
@field('address')
@expect(thorchain_proto.ThorchainAddress)
def thorchain_get_address(self, address_n, show_display=False, testnet=False):
return self.call(
thorchain_proto.ThorchainGetAddress(address_n=address_n, show_display=show_display, testnet=testnet)
)
@session
def thorchain_sign_tx(
self,
address_n,
account_number,
chain_id,
fee,
gas,
msgs,
memo,
sequence,
exchange_types=None,
testnet=None
):
resp = self.call(thorchain_proto.ThorchainSignTx(
address_n=address_n,
account_number=account_number,
chain_id=chain_id,
fee_amount=fee,
gas=gas,
memo=memo,
sequence=sequence,
msg_count=len(msgs),
testnet=testnet
))
for (msg, exchange_type) in zip(msgs, exchange_types or [None] * len(msgs)):
if not isinstance(resp, thorchain_proto.ThorchainMsgRequest):
raise CallException(
"Thorchain.ExpectedMsgRequest",
"Message request expected but not received.",
)
if msg['type'] == "thorchain/MsgSend":
if len(msg['value']['amount']) != 1:
raise CallException("Thorchain.MsgSend", "Multiple amounts per send msg not supported")
denom = msg['value']['amount'][0]['denom']
if denom != 'rune':
raise CallException("Thorchain.MsgSend", "Unsupported denomination: " + denom)
resp = self.call(thorchain_proto.ThorchainMsgAck(
send=thorchain_proto.ThorchainMsgSend(
from_address=msg['value']['from_address'],
to_address=msg['value']['to_address'],
amount=int(msg['value']['amount'][0]['amount']),
address_type=types.EXCHANGE if exchange_type is not None else types.SPEND,
exchange_type=exchange_type
)
))
elif msg['type'] == "thorchain/MsgDeposit":
if len(msg['value']['coins']) != 1:
raise CallException("Thorchain.MsgDeposit", "Multiple coins per deposit msg not supported")
asset = msg['value']['coins'][0]['asset']
if asset != 'THOR.RUNE':
raise CallException("Thorchain.MsgDeposit", "Unsupported asset: " + asset)
resp = self.call(thorchain_proto.ThorchainMsgAck(
deposit=thorchain_proto.ThorchainMsgDeposit(
asset=asset,
amount=int(msg['value']['coins'][0]['amount']),
memo=msg['value']['memo'],
signer=msg['value']['signer']
)
))
else:
raise CallException(
"Thorchain.UnknownMsg",
"Thorchain message %s is not yet supported" % (msg['type'],)
)
if not isinstance(resp, thorchain_proto.ThorchainSignedTx):
raise CallException(
"Thorchain.UnexpectedEndOfOperations",
"Reached end of operations without a signature.",
)
return resp
@field('address')
@expect(ripple_proto.RippleAddress)
def ripple_get_address(self, address_n, show_display=False):
return self.call(
ripple_proto.RippleGetAddress(address_n=address_n, show_display=show_display)
)
@session
@expect(ripple_proto.RippleSignedTx)
def ripple_sign_tx(self, address_n, msg):
msg.address_n = address_n
return self.call(msg)
@field('entropy')
@expect(proto.Entropy)
def get_entropy(self, size):
return self.call(proto.GetEntropy(size=size))
@field('message')
@expect(proto.Success)
def ping(self, msg, button_protection=False, pin_protection=False, passphrase_protection=False):
msg = proto.Ping(message=msg,
button_protection=button_protection,
pin_protection=pin_protection,
passphrase_protection=passphrase_protection)
return self.call(msg)
def get_device_id(self):
return self.features.device_id
@field('message')
@expect(proto.Success)
def apply_settings(self, label=None, language=None, use_passphrase=None, homescreen=None):
settings = proto.ApplySettings()
if label != None:
settings.label = label
if language:
settings.language = language
if use_passphrase != None:
settings.use_passphrase = use_passphrase
out = self.call(settings)
self.init_device() # Reload Features
return out
@field('message')
@expect(proto.Success)
def apply_policy(self, policy_name, enabled):
policy = types.PolicyType(policy_name=policy_name, enabled=enabled)
apply_policies = proto.ApplyPolicies(policy=[policy])
out = self.call(apply_policies)
self.init_device() # Reload Features
return out
@field('message')
@expect(proto.Success)
def clear_session(self):
return self.call(proto.ClearSession())
@field('message')
@expect(proto.Success)
def change_pin(self, remove=False):
ret = self.call(proto.ChangePin(remove=remove))
self.init_device() # Re-read features
return ret
@expect(proto.MessageSignature)
def sign_message(self, coin_name, n, message, script_type=types.SPENDADDRESS):
n = self._convert_prime(n)
# Convert message to UTF8 NFC (seems to be a bitcoin-qt standard)
message = normalize_nfc(message).encode("utf-8")
return self.call(proto.SignMessage(coin_name=coin_name, address_n=n, message=message, script_type=script_type))
@expect(proto.SignedIdentity)
def sign_identity(self, identity, challenge_hidden, challenge_visual, ecdsa_curve_name=DEFAULT_CURVE):
return self.call(proto.SignIdentity(identity=identity, challenge_hidden=challenge_hidden, challenge_visual=challenge_visual, ecdsa_curve_name=ecdsa_curve_name))
def verify_message(self, coin_name, address, signature, message):
# Convert message to UTF8 NFC (seems to be a bitcoin-qt standard)
message = normalize_nfc(message).encode("utf-8")
try:
resp = self.call(proto.VerifyMessage(address=address, signature=signature, message=message, coin_name=coin_name))
except CallException as e:
resp = e
if isinstance(resp, proto.Success):
return True
return False
@field('value')
@expect(proto.CipheredKeyValue)
def encrypt_keyvalue(self, n, key, value, ask_on_encrypt=True, ask_on_decrypt=True, iv=b''):
n = self._convert_prime(n)
return self.call(proto.CipherKeyValue(address_n=n,
key=key,
value=value,
encrypt=True,
ask_on_encrypt=ask_on_encrypt,
ask_on_decrypt=ask_on_decrypt,
iv=iv))
@field('value')
@expect(proto.CipheredKeyValue)
def decrypt_keyvalue(self, n, key, value, ask_on_encrypt=True, ask_on_decrypt=True, iv=b''):
n = self._convert_prime(n)
return self.call(proto.CipherKeyValue(address_n=n,
key=key,
value=value,
encrypt=False,
ask_on_encrypt=ask_on_encrypt,
ask_on_decrypt=ask_on_decrypt,
iv=iv))
def _prepare_sign_tx(self, coin_name, inputs, outputs):
tx = types.TransactionType()
tx.inputs.extend(inputs)
tx.outputs.extend(outputs)
txes = {None: tx}
txes[b''] = tx
force_bip143 = ['BitcoinGold', 'BitcoinCash', 'BitcoinSV']
if coin_name in force_bip143:
return txes
known_hashes = []
for inp in inputs:
if inp.prev_hash in txes:
continue
if inp.script_type in (types.SPENDP2SHWITNESS,
types.SPENDWITNESS):
continue
if not self.tx_api:
raise Exception('TX_API not defined')
prev_tx = self.tx_api.get_tx(binascii.hexlify(inp.prev_hash).decode('utf-8'))
txes[inp.prev_hash] = prev_tx
return txes
@session
def sign_tx(self, coin_name, inputs, outputs, version=None, lock_time=None, debug_processor=None):
start = time.time()
txes = self._prepare_sign_tx(coin_name, inputs, outputs)
# Prepare and send initial message
tx = proto.SignTx()
tx.inputs_count = len(inputs)
tx.outputs_count = len(outputs)
tx.coin_name = coin_name
if version is not None:
tx.version = version
if lock_time is not None:
tx.lock_time = lock_time
res = self.call(tx)
# Prepare structure for signatures
signatures = [None] * len(inputs)
serialized_tx = b''
counter = 0
while True:
counter += 1
if isinstance(res, proto.Failure):
raise CallException("Signing failed")
if not isinstance(res, proto.TxRequest):
raise CallException("Unexpected message")
# If there's some part of signed transaction, let's add it
if res.HasField('serialized') and res.serialized.HasField('serialized_tx'):
if self.verbose:
log("RECEIVED PART OF SERIALIZED TX (%d BYTES)" % len(res.serialized.serialized_tx))
serialized_tx += res.serialized.serialized_tx
if res.HasField('serialized') and res.serialized.HasField('signature_index'):
if signatures[res.serialized.signature_index] != None:
raise Exception("Signature for index %d already filled" % res.serialized.signature_index)
signatures[res.serialized.signature_index] = res.serialized.signature
if res.request_type == types.TXFINISHED:
# Device didn't ask for more information, finish workflow
break
# Device asked for one more information, let's process it.
if not res.details.tx_hash:
current_tx = txes[None]
else:
current_tx = txes[bytes(res.details.tx_hash)]
if res.request_type == types.TXMETA:
msg = types.TransactionType()
msg.version = current_tx.version
msg.lock_time = current_tx.lock_time
msg.inputs_cnt = len(current_tx.inputs)
if res.details.tx_hash:
msg.outputs_cnt = len(current_tx.bin_outputs)
else:
msg.outputs_cnt = len(current_tx.outputs)
msg.extra_data_len = len(current_tx.extra_data) if current_tx.extra_data else 0
res = self.call(proto.TxAck(tx=msg))
continue
elif res.request_type == types.TXINPUT:
msg = types.TransactionType()
msg.inputs.extend([current_tx.inputs[res.details.request_index], ])
if debug_processor is not None:
# msg needs to be deep copied so when it's modified
# the other messages stay intact
from copy import deepcopy
msg = deepcopy(msg)
# If debug_processor function is provided,
# pass thru it the request and prepared response.
# This is useful for tests, see test_msg_signtx
msg = debug_processor(res, msg)
res = self.call(proto.TxAck(tx=msg))
continue
elif res.request_type == types.TXOUTPUT:
msg = types.TransactionType()
if res.details.tx_hash:
msg.bin_outputs.extend([current_tx.bin_outputs[res.details.request_index], ])
else:
msg.outputs.extend([current_tx.outputs[res.details.request_index], ])
if debug_processor != None:
# msg needs to be deep copied so when it's modified
# the other messages stay intact
from copy import deepcopy
msg = deepcopy(msg)
# If debug_processor function is provided,
# pass thru it the request and prepared response.
# This is useful for tests, see test_msg_signtx
msg = debug_processor(res, msg)
res = self.call(proto.TxAck(tx=msg))
continue
elif res.request_type == types.TXEXTRADATA:
o, l = res.details.extra_data_offset, res.details.extra_data_len
msg = types.TransactionType()
msg.extra_data = current_tx.extra_data[o:o + l]
res = self.call(proto.TxAck(tx=msg))
continue
if None in signatures:
raise Exception("Some signatures are missing!")
if self.verbose:
log("SIGNED IN %.03f SECONDS, CALLED %d MESSAGES, %d BYTES" % \
(time.time() - start, counter, len(serialized_tx)))
return (signatures, serialized_tx)
@field('message')
@expect(proto.Success)
def wipe_device(self):
ret = self.call(proto.WipeDevice())
self.init_device()
return ret
@field('message')
@expect(proto.Success)
def recovery_device(self, use_trezor_method, word_count, passphrase_protection, pin_protection, label, language):
if self.features.initialized:
raise Exception("Device is initialized already. Call wipe_device() and try again.")
if use_trezor_method:
raise Exception("Trezor-style recovery is no longer supported")
elif word_count not in (12, 18, 24):
raise Exception("Invalid word count. Use 12/18/24")
res = self.call(proto.RecoveryDevice(word_count=int(word_count),
passphrase_protection=bool(passphrase_protection),
pin_protection=bool(pin_protection),
label=label,
language=language,
enforce_wordlist=True,
use_character_cipher=True))
self.init_device()
return res
@field('message')
@expect(proto.Success)
def test_recovery_seed(self, word_count, language):
if not self.features.initialized:
raise Exception("Device must already be initialized in order to perform test recovery")
elif word_count not in (12, 18, 24):
raise Exception("Invalid word count. Use 12/18/24")
res = self.call(proto.RecoveryDevice(word_count=int(word_count),
language=language,
enforce_wordlist=True,
use_character_cipher=True,
dry_run=True))
self.init_device()
return res
@field('message')
@expect(proto.Success)
@session
def reset_device(self, display_random, strength, passphrase_protection, pin_protection, label, language):
if self.features.initialized:
raise Exception("Device is initialized already. Call wipe_device() and try again.")
# Begin with device reset workflow
msg = proto.ResetDevice(display_random=display_random,
strength=strength,
language=language,
passphrase_protection=bool(passphrase_protection),
pin_protection=bool(pin_protection),
label=label)
resp = self.call(msg)
if not isinstance(resp, proto.EntropyRequest):
raise Exception("Invalid response, expected EntropyRequest")
external_entropy = self._get_local_entropy()
if self.verbose:
log("Computer generated entropy: " + binascii.hexlify(external_entropy).decode('ascii'))
ret = self.call(proto.EntropyAck(entropy=external_entropy))
self.init_device()
return ret
@field('message')
@expect(proto.Success)
def load_device_by_mnemonic(self, mnemonic, pin, passphrase_protection, label, language, skip_checksum=False):
m = Mnemonic('english')
if not skip_checksum and not m.check(mnemonic):
raise Exception("Invalid mnemonic checksum")
# Convert mnemonic to UTF8 NKFD
mnemonic = Mnemonic.normalize_string(mnemonic)
# Convert mnemonic to ASCII stream
mnemonic = normalize_nfc(mnemonic)
if self.features.initialized:
raise Exception("Device is initialized already. Call wipe_device() and try again.")
resp = self.call(proto.LoadDevice(mnemonic=mnemonic, pin=pin,
passphrase_protection=passphrase_protection,
language=language,
label=label,
skip_checksum=skip_checksum))
self.init_device()
return resp
@field('message')
@expect(proto.Success)
def load_device_by_xprv(self, xprv, pin, passphrase_protection, label, language):
if self.features.initialized:
raise Exception("Device is initialized already. Call wipe_device() and try again.")
if xprv[0:4] not in ('xprv', 'tprv'):
raise Exception("Unknown type of xprv")
if len(xprv) < 100 and len(xprv) > 112:
raise Exception("Invalid length of xprv")
node = types.HDNodeType()
data = binascii.hexlify(tools.b58decode(xprv, None))
if data[90:92] != b'00':
raise Exception("Contain invalid private key")
checksum = binascii.hexlify(hashlib.sha256(hashlib.sha256(binascii.unhexlify(data[:156])).digest()).digest()[:4])
if checksum != data[156:]:
raise Exception("Checksum doesn't match")
# version 0488ade4
# depth 00
# fingerprint 00000000
# child_num 00000000
# chaincode 873dff81c02f525623fd1fe5167eac3a55a049de3d314bb42ee227ffed37d508
# privkey 00e8f32e723decf4051aefac8e2c93c9c5b214313817cdb01a1494b917c8436b35
# checksum e77e9d71
node.depth = int(data[8:10], 16)
node.fingerprint = int(data[10:18], 16)
node.child_num = int(data[18:26], 16)
node.chain_code = binascii.unhexlify(data[26:90])
node.private_key = binascii.unhexlify(data[92:156]) # skip 0x00 indicating privkey
resp = self.call(proto.LoadDevice(node=node,
pin=pin,
passphrase_protection=passphrase_protection,
language=language,
label=label))
self.init_device()
return resp
def firmware_update(self, fp):
if self.features.bootloader_mode == False:
raise Exception("Device must be in bootloader mode")
resp = self.call(proto.FirmwareErase())
if isinstance(resp, proto.Failure) and resp.code == types.Failure_FirmwareError:
return False
data = fp.read()
data_hash = hashlib.sha256(data).digest()
resp = self.call(proto.FirmwareUpload(payload_hash=data_hash, payload=data))
if isinstance(resp, proto.Success):
return True
elif isinstance(resp, proto.Failure) and resp.code == types.Failure_FirmwareError:
return False
raise Exception("Unexpected result %s" % resp)
class KeepKeyClient(ProtocolMixin, TextUIMixin, BaseClient):
pass
class KeepKeyClientVerbose(ProtocolMixin, TextUIMixin, DebugWireMixin, BaseClient):
pass
class KeepKeyDebuglinkClient(ProtocolMixin, DebugLinkMixin, BaseClient):
pass
class KeepKeyDebuglinkClientVerbose(ProtocolMixin, DebugLinkMixin, DebugWireMixin, BaseClient):
pass
|
lgpl-3.0
| -2,209,451,958,689,590,800
| 36.169617
| 168
| 0.574699
| false
| 4.058786
| false
| false
| false
|
Kalle0x12/Test2
|
csr_test.py
|
1
|
2910
|
from __future__ import print_function
import lis_wrapper
import numpy as np
import scipy.sparse
# Define a symmetric 8 x 8 dense upper triangular matrix first.
# This matrix is part of the examples which come with Intel's MKL library
# and is used here for historical reasons.
# A:
# 7.0, 1.0, 2.0, 7.0,
# -4.0, 8.0, 2.0,
# 1.0, 5.0,
# 7.0, 9.0,
# 5.0, 1.0, 5.0,
# -1.0, 5.0,
# 11.0,
# 5.0
A = np.zeros((8, 8), dtype=np.float64)
A[0, 0] = 7.0
A[0, 2] = 1.0
A[0, 5] = 2.0
A[0, 6] = 7.0
A[1, 1] = -4.0
A[1, 2] = 8.0
A[1, 4] = 2.0
A[2, 2] = 1.0
A[2, 7] = 5.0
A[3, 3] = 7.0
A[3, 6] = 9.0
A[4, 4] = 5.0
A[4, 5] = 1.0
A[4, 6] = 5.0
A[5, 5] = -1.0
A[5, 7] = 5.0
A[6, 6] = 11.0
A[7, 7] = 5.0
# print "Dense matrix:"
print(A)
# Dense matrix to sparse matrix in CSR format
Acsr = scipy.sparse.csr_matrix(A)
print("Sparse upper triangular CSR matrix:")
print("values: ", Acsr.data)
# Indices are 0 based
print("index: ", Acsr.indices)
print("pointer: ", Acsr.indptr)
# LIS Manual: Appendix File Formats
# "Note that both the upper and lower triangular entries need to be stored
# irrespective of whether the matrix is symmetric or not."
# Convert the upper triangular CSR matrix Acsr to 'full' CSR matrix Acsr_full
Acsr_full = Acsr + Acsr.T - scipy.sparse.diags(Acsr.diagonal())
print()
print("Sparse 'full' CSR matrix:")
print("values: ", Acsr_full.data)
# Indices are 0 based
print("index: ", Acsr_full.indices)
print("pointer: ", Acsr_full.indptr)
# initial guess for solution x
x = np.zeros(8)
# right hand side
b = np.ones(8)
info = 1 # make LIS more verbose
tol = 1e-6 # convergence tolerance
max_iter = 10000 # maximum number of iterations
logfname = "residuals.log" # log
# in lis_cmd following parameters are set:
# -i cg : conjugate gradient solver
# -p ssor : SSOR preconditioner
# -tol : convergence tolerance
# -maxiter : maximum number of iterations
# -p ssor : SSOR preconditioner
# -ssor_w 1.0 : relaxation coefficient w (0 < w < 2)
# -initx_zeros 0 : don't set initial values for x to 0. The initial guess is passed by x to LIS
# -print mem : Save the residual history to logfile
lis_cmd = "-i cg -tol %e -maxiter %d -p ssor -ssor_w 1.0 -initx_zeros 0 -print mem" % (tol, max_iter)
lis_wrapper.lis(Acsr_full.data, Acsr_full.indices, Acsr_full.indptr, x, b, info, lis_cmd, logfname)
# check solution x with original dense matrix A first
# convert upper triangular matrix AA to 'full' matrix
y = (A + A.T - np.eye(A.shape[0]) * A.diagonal()).dot(x)
assert (np.allclose(b, y))
# check solution with sparse matrix Acsr_full
y = Acsr_full.dot(x)
assert (np.allclose(b, y))
print("Solution x: ", x)
print()
print("A * x:", y)
print("b :", b)
|
gpl-3.0
| -5,039,573,383,625,121,000
| 26.714286
| 101
| 0.604467
| false
| 2.712022
| false
| false
| false
|
alobbs/autome
|
chief/chief-client.py
|
1
|
1722
|
#!/usr/bin/env python3
import conf
import argparse
import time
import requests
import tabulate
CLEAR = "\x1b[2J\x1b[1;1H"
def get(path):
url = "http://localhost:{}".format(conf.CHIEF_API_PORT)
r = requests.get(url + path)
return r.json()
def table(info, *a, **ka):
if 'tablefmt' not in ka:
ka['tablefmt'] = "fancy_grid"
if type(info) == dict:
info = [list(i) for i in info.items()]
return tabulate.tabulate(info, [], *a, **ka)
elif type(info) == list and type(info[0] == dict):
headers = sorted(info[0].keys())
values = []
for e in info:
values.append([e.get(k, '') for k in headers])
return tabulate.tabulate(values, headers, *a, **ka)
return tabulate.tabulate(info, *a, **ka)
def do(args):
now = time.strftime("%h %d, %H:%M")
if args.cmd == "jobs":
print(now + '\n' + table(get("/jobs/list")))
elif args.cmd == "run":
assert args.job, "--job required"
url = "/jobs/run/{}".format(args.job)
print(now + '\n' + table(get(url)))
def main():
parser = argparse.ArgumentParser()
parser.add_argument("cmd", choices=["jobs", "run", "ping"])
parser.add_argument("--auto", type=int)
parser.add_argument("--job")
args = parser.parse_args()
try:
do(args)
while args.auto:
time.sleep(args.auto)
print(CLEAR)
try:
do(args)
except requests.exceptions.ConnectionError:
now = time.strftime("%h %d, %H:%M")
print(now + " - [ERROR] Autome API server not reachable")
except KeyboardInterrupt:
pass
if __name__ == '__main__':
main()
|
mit
| 8,255,673,079,585,231,000
| 24.323529
| 73
| 0.546458
| false
| 3.324324
| false
| false
| false
|
liberiun/cynin-intranet
|
src/ubify.spaces/ubify/spaces/browser/mindmap.py
|
1
|
5085
|
###############################################################################
#cyn.in is an open source Collaborative Knowledge Management Appliance that
#enables teams to seamlessly work together on files, documents and content in
#a secure central environment.
#
#cyn.in v2 an open source appliance is distributed under the GPL v3 license
#along with commercial support options.
#
#cyn.in is a Cynapse Invention.
#
#Copyright (C) 2008 Cynapse India Pvt. Ltd.
#
#This program is free software: you can redistribute it and/or modify it under
#the terms of the GNU General Public License as published by the Free Software
#Foundation, either version 3 of the License, or any later version and observe
#the Additional Terms applicable to this program and must display appropriate
#legal notices. In accordance with Section 7(b) of the GNU General Public
#License version 3, these Appropriate Legal Notices must retain the display of
#the "Powered by cyn.in" AND "A Cynapse Invention" logos. You should have
#received a copy of the detailed Additional Terms License with this program.
#
#This program is distributed in the hope that it will be useful,
#but WITHOUT ANY WARRANTY; without even the implied warranty of
#MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General
#Public License for more details.
#
#You should have received a copy of the GNU General Public License along with
#this program. If not, see <http://www.gnu.org/licenses/>.
#
#You can contact Cynapse at support@cynapse.com with any problems with cyn.in.
#For any queries regarding the licensing, please send your mails to
# legal@cynapse.com
#
#You can also contact Cynapse at:
#802, Building No. 1,
#Dheeraj Sagar, Malad(W)
#Mumbai-400064, India
###############################################################################
from Products.Five import BrowserView
from zope.interface import implements
from Products.Five.browser.pagetemplatefile import ViewPageTemplateFile
from ubify.policy import CyninMessageFactory as _
from ubify.spaces.interfaces import IMindMappable
from ubify.spaces.config import mindmapshowabletypes
from Products.CMFCore.utils import getToolByName
import logging
from ubify.policy.config import spacesdefaultaddablenonfolderishtypes
class SpaceFreeMindMap(BrowserView):
"""Contains backend code the xml template in mindmap.pt
"""
template = ViewPageTemplateFile('mindmap.xml')
recurse = ViewPageTemplateFile('mindmap_recurse.xml')
def __call__(self):
self.logger = logging.getLogger()
self.isfullview = False
self.showleafitems = False
if 'fullviewmapdata' in self.request.steps:
self.isfullview = True
self.typetool= getToolByName(self.context, 'portal_types')
if self.isfullview:
portal = self.context.portal_url.getPortalObject()
mnode = portal
return self.template(mainnode=portal)
else:
if self.context.portal_type == 'ContentRoot':
portal = self.context.portal_url.getPortalObject()
mnode = portal
return self.template(mainnode=portal)
else:
return self.template(mainnode=self.context)
def getTypeIcon(self,obj):
object_typename = obj.portal_type
object_typeobj = self.typetool[object_typename]
fulliconpath = object_typeobj.icon_expr
#self.logger.info('returned typeicon: %s' % (fulliconpath))
return fulliconpath
def getChildren(self,obj):
"""Gets the immediate children of the passed object"""
cat = getToolByName(obj, 'portal_catalog')
currpath = '/'.join(obj.getPhysicalPath())
display_portal_types = mindmapshowabletypes
#import pdb; pdb.set_trace()
if self.showleafitems:
display_portal_types = mindmapshowabletypes + spacesdefaultaddablenonfolderishtypes
else:
if self.context.portal_type == 'Plone Site' or obj.portal_type in ('ContentRoot','ContentSpace'):
display_portal_types = mindmapshowabletypes
else:
display_portal_types = mindmapshowabletypes + spacesdefaultaddablenonfolderishtypes
catresults = cat.searchResults({'path': {'query': currpath, 'depth': 1},'portal_type':display_portal_types})
return catresults
def pathsort(x,y):
""" Sorts by path of object first and then by string"""
#DKG: Unused. Was written to sort a mybrains list based on the paths of the objects in it.
xpath = x.getPath()
ypath = y.getPath()
xsplit = xpath.split('/')
ysplit = ypath.split('/')
if len(xsplit) > len(ysplit):
return 1
elif len(xsplit) < len(ysplit):
return -1
else: #objects are peers in path
if xpath > ypath:
return 1
elif xpath < ypath:
return -1
else: #objects are having same path!?!?!
return 0
|
gpl-3.0
| -5,295,264,525,636,877,000
| 40.341463
| 116
| 0.661357
| false
| 4.003937
| false
| false
| false
|
volalex/endlessctf
|
scoreboard/models.py
|
1
|
2683
|
# encoding: UTF-8
import os
from django.contrib.auth.models import User
from django.db import models
from django.db.models.signals import pre_delete, pre_save
from django.dispatch import receiver
class Category(models.Model):
title = models.CharField(max_length=50)
position = models.IntegerField(name='position', unique=True)
def __str__(self):
return self.title
class Meta:
ordering = ('position',)
class News(models.Model):
title = models.CharField(max_length=50, name="title", verbose_name="Заголовок новости")
text = models.TextField(name="text", verbose_name="Текст новости")
create_date = models.DateTimeField(auto_now_add=True)
def __str__(self):
return self.title
class Meta:
ordering = ('create_date',)
verbose_name_plural = "News"
class Task(models.Model):
name = models.CharField(max_length=100, blank=False)
score = models.IntegerField(name='score', blank=False)
category = models.ForeignKey(Category, blank=False)
text = models.TextField(name='text', blank=False)
task_file = models.FileField(verbose_name="Task file", upload_to="task_files", blank=True)
flag = models.CharField(max_length=100, blank=False)
is_enabled = models.BooleanField(default=False)
def __str__(self):
return self.name
def gen_file_link(self):
if self.task_file:
return "<a href='%s'>File</a>" % self.task_file.url
else:
return ""
@receiver(models.signals.post_delete, sender=Task)
def auto_delete_file_on_delete(sender, instance, **kwargs):
"""Deletes file from filesystem
when corresponding `Task` object is deleted.
"""
try:
if instance.file:
if os.path.isfile(instance.file.path):
os.remove(instance.file.path)
except AttributeError:
pass
@receiver(models.signals.pre_save, sender=Task)
def auto_delete_file_on_change(sender, instance, **kwargs):
"""Deletes file from filesystem
when corresponding `Task` object is changed.
"""
if not instance.pk:
return False
try:
old_file = Task.objects.get(pk=instance.pk).task_file
except Task.DoesNotExist:
return False
if not old_file:
return False
new_file = instance.task_file
if not old_file == new_file:
if os.path.isfile(old_file.path):
os.remove(old_file.path)
class SolvedTasks(models.Model):
task = models.ForeignKey(Task)
team = models.ForeignKey(User)
solved_at = models.DateTimeField(auto_now_add=True)
class Meta:
unique_together = (('task', 'team'),)
|
gpl-2.0
| 6,566,926,016,593,865,000
| 26.947368
| 94
| 0.654991
| false
| 3.667127
| false
| false
| false
|
jetuk/pywr
|
examples/two_reservoir_moea.py
|
1
|
8924
|
"""
This example shows the trade-off (pareto frontier) of deficit against cost by altering a reservoir control curve.
Two types of control curve are possible. The first is a monthly control curve containing one value for each
month. The second is a harmonic control curve with cosine terms around a mean. Both Parameter objects
are part of pywr.parameters.
Inspyred is used in this example to perform a multi-objective optimisation using the NSGA-II algorithm. The
script should be run twice (once with --harmonic) to generate results for both types of control curve. Following
this --plot can be used to generate an animation and PNG of the pareto frontier.
"""
import os
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
import datetime
import inspyred
from pywr.core import Model, Input, Output, Link, Storage
from pywr.parameters import ArrayIndexedParameter, MonthlyProfileParameter, AnnualHarmonicSeriesParameter
from pywr.parameters.control_curves import ControlCurveParameter
from pywr.recorders import TotalDeficitNodeRecorder, TotalFlowNodeRecorder, AggregatedRecorder
from pywr.optimisation.moea import InspyredOptimisationModel
def create_model(harmonic=True):
# import flow timeseries for catchments
flow = pd.read_csv(os.path.join('data', 'thames_stochastic_flow.gz'))
flow['Date'] = flow['Date'].apply(pd.to_datetime)
flow.set_index('Date', inplace=True)
# resample input to weekly average
flow = flow.resample('7D', how='mean')
flow_parameter = ArrayIndexedParameter(flow['flow'].values)
model = InspyredOptimisationModel(
solver='glpk',
start=flow.index[0],
end=flow.index[365*10], # roughly 10 years
timestep=datetime.timedelta(7), # weekly time-step
)
catchment1 = Input(model, 'catchment1', min_flow=flow_parameter, max_flow=flow_parameter)
catchment2 = Input(model, 'catchment2', min_flow=flow_parameter, max_flow=flow_parameter)
reservoir1 = Storage(model, 'reservoir1', min_volume=3000, max_volume=20000, volume=16000)
reservoir2 = Storage(model, 'reservoir2', min_volume=3000, max_volume=20000, volume=16000)
if harmonic:
control_curve = AnnualHarmonicSeriesParameter(0.5, [0.5], [0.0], mean_upper_bounds=1.0, amplitude_upper_bounds=1.0)
else:
control_curve = MonthlyProfileParameter(np.array([0.0]*12), lower_bounds=0.0, upper_bounds=1.0)
control_curve.is_variable = True
controller = ControlCurveParameter(reservoir1, control_curve, [0.0, 10.0])
transfer = Link(model, 'transfer', max_flow=controller, cost=-500)
demand1 = Output(model, 'demand1', max_flow=45.0, cost=-101)
demand2 = Output(model, 'demand2', max_flow=20.0, cost=-100)
river1 = Link(model, 'river1')
river2 = Link(model, 'river2')
# compensation flows from reservoirs
compensation1 = Link(model, 'compensation1', max_flow=5.0, cost=-9999)
compensation2 = Link(model, 'compensation2', max_flow=5.0, cost=-9998)
terminator = Output(model, 'terminator', cost=1.0)
catchment1.connect(reservoir1)
catchment2.connect(reservoir2)
reservoir1.connect(demand1)
reservoir2.connect(demand2)
reservoir2.connect(transfer)
transfer.connect(reservoir1)
reservoir1.connect(river1)
reservoir2.connect(river2)
river1.connect(terminator)
river2.connect(terminator)
reservoir1.connect(compensation1)
reservoir2.connect(compensation2)
compensation1.connect(terminator)
compensation2.connect(terminator)
r1 = TotalDeficitNodeRecorder(model, demand1)
r2 = TotalDeficitNodeRecorder(model, demand2)
r3 = AggregatedRecorder(model, [r1, r2], agg_func="mean")
r3.is_objective = True
r4 = TotalFlowNodeRecorder(model, transfer)
r4.is_objective = True
return model
def moea_main(prng=None, display=False, harmonic=False):
from random import Random
from time import time
if prng is None:
prng = Random()
prng.seed(time())
script_name = os.path.splitext(os.path.basename(__file__))[0]
stats_file = open('{}-{}-statistics-file.csv'.format(script_name, 'harmonic' if harmonic else 'monthly'), 'w')
individuals_file = open('{}-{}-individuals-file.csv'.format(script_name, 'harmonic' if harmonic else 'monthly'), 'w')
problem = create_model(harmonic=harmonic)
problem.setup()
ea = inspyred.ec.emo.NSGA2(prng)
ea.variator = [inspyred.ec.variators.blend_crossover,
inspyred.ec.variators.gaussian_mutation]
ea.terminator = inspyred.ec.terminators.generation_termination
ea.observer = [
inspyred.ec.observers.file_observer,
]
final_pop = ea.evolve(generator=problem.generator,
evaluator=problem.evaluator,
pop_size=25,
bounder=problem.bounder,
maximize=False,
max_generations=50,
statistics_file=stats_file,
individuals_file=individuals_file)
# Save the final population archive to CSV files
stats_file = open('{}-{}-final-statistics-file.csv'.format(script_name, 'harmonic' if harmonic else 'monthly'), 'w')
individuals_file = open('{}-{}-final-individuals-file.csv'.format(script_name, 'harmonic' if harmonic else 'monthly'), 'w')
inspyred.ec.observers.file_observer(ea.archive, 'final', None,
args={'statistics_file': stats_file, 'individuals_file': individuals_file})
if display:
final_arc = ea.archive
print('Best Solutions: \n')
for f in final_arc:
print(f)
x = []
y = []
for f in final_arc:
x.append(f.fitness[0])
y.append(f.fitness[1])
plt.scatter(x, y, c='b')
plt.xlabel('Total demand deficit [Ml/d]')
plt.ylabel('Total Transferred volume [Ml/d]')
title = 'Harmonic Control Curve' if harmonic else 'Monthly Control Curve'
plt.savefig('{0} Example ({1}).pdf'.format(ea.__class__.__name__, title), format='pdf')
plt.show()
return ea
def load_individuals(filename):
""" Read an inspyred individuals file in to two pandas.DataFrame objects.
There is one DataFrame for the objectives and another for the variables.
"""
import ast
index = []
all_objs = []
all_vars = []
with open(filename, 'r') as f:
for row in f.readlines():
gen, pop_id, objs, vars = ast.literal_eval(row.strip())
index.append((gen, pop_id))
all_objs.append(objs)
all_vars.append(vars)
index = pd.MultiIndex.from_tuples(index, names=['generation', 'individual'])
return pd.DataFrame(all_objs, index=index), pd.DataFrame(all_vars, index=index)
def animate_generations(objective_data, colors):
"""
Animate the pareto frontier plot over the saved generations.
"""
import matplotlib.animation as animation
def update_line(gen, dfs, ax, xmax, ymax):
ax.cla()
artists = []
for i in range(gen+1):
for c, key in zip(colors, sorted(dfs.keys())):
df = dfs[key]
scat = ax.scatter(df.loc[i][0], df.loc[i][1], alpha=0.8**(gen-i), color=c,
label=key if i == gen else None, clip_on=True, zorder=100)
artists.append(scat)
ax.set_title('Generation: {:d}'.format(gen))
ax.set_xlabel('Total demand deficit [Ml/d]')
ax.set_ylabel('Total Transferred volume [Ml/d]')
ax.set_xlim(0, xmax)
ax.set_ylim(0, ymax)
ax.legend()
ax.grid()
return artists
fig, ax = plt.subplots(figsize=(10, 10))
last_gen = list(objective_data.values())[0].index[-1][0]
last_gen = int(last_gen)
xmax = max(df.loc[last_gen][0].max() for df in objective_data.values())
ymax = max(df.loc[last_gen][1].max() for df in objective_data.values())
line_ani = animation.FuncAnimation(fig, update_line, last_gen+1,
fargs=(objective_data, ax, xmax, ymax), interval=400, repeat=False)
line_ani.save('generations.mp4', bitrate=1024,)
fig.savefig('generations.png')
if __name__ == '__main__':
import argparse
parser = argparse.ArgumentParser()
parser.add_argument('--harmonic', action='store_true', help='Use an harmonic control curve.')
parser.add_argument('--plot', action='store_true', help='Plot the pareto frontier.')
args = parser.parse_args()
if args.plot:
objs, vars = {}, {}
for cctype in ('monthly', 'harmonic'):
objs[cctype], vars[cctype] = load_individuals('two_reservoir_moea-{}-individuals-file.csv'.format(cctype))
animate_generations(objs, ('b', 'r'))
plt.show()
else:
moea_main(display=True, harmonic=args.harmonic)
|
gpl-3.0
| 277,628,569,962,579,800
| 37.465517
| 127
| 0.651277
| false
| 3.44423
| false
| false
| false
|
ver228/tierpsy-tracker
|
tierpsy/gui/BatchProcessing_ui.py
|
1
|
13026
|
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'BatchProcessing.ui'
#
# Created by: PyQt5 UI code generator 5.12.3
#
# WARNING! All changes made in this file will be lost!
from PyQt5 import QtCore, QtGui, QtWidgets
class Ui_BatchProcessing(object):
def setupUi(self, BatchProcessing):
BatchProcessing.setObjectName("BatchProcessing")
BatchProcessing.resize(594, 504)
self.centralwidget = QtWidgets.QWidget(BatchProcessing)
self.centralwidget.setObjectName("centralwidget")
self.verticalLayout_2 = QtWidgets.QVBoxLayout(self.centralwidget)
self.verticalLayout_2.setObjectName("verticalLayout_2")
self.verticalLayout = QtWidgets.QVBoxLayout()
self.verticalLayout.setObjectName("verticalLayout")
self.gridLayout_2 = QtWidgets.QGridLayout()
self.gridLayout_2.setObjectName("gridLayout_2")
self.p_video_dir_root = QtWidgets.QLineEdit(self.centralwidget)
self.p_video_dir_root.setObjectName("p_video_dir_root")
self.gridLayout_2.addWidget(self.p_video_dir_root, 0, 2, 1, 1)
self.pushButton_videosDir = QtWidgets.QPushButton(self.centralwidget)
self.pushButton_videosDir.setObjectName("pushButton_videosDir")
self.gridLayout_2.addWidget(self.pushButton_videosDir, 0, 1, 1, 1)
self.p_tmp_dir_root = QtWidgets.QLineEdit(self.centralwidget)
self.p_tmp_dir_root.setObjectName("p_tmp_dir_root")
self.gridLayout_2.addWidget(self.p_tmp_dir_root, 5, 2, 1, 1)
self.p_videos_list = QtWidgets.QLineEdit(self.centralwidget)
self.p_videos_list.setEnabled(True)
self.p_videos_list.setObjectName("p_videos_list")
self.gridLayout_2.addWidget(self.p_videos_list, 1, 2, 1, 1)
self.p_mask_dir_root = QtWidgets.QLineEdit(self.centralwidget)
self.p_mask_dir_root.setObjectName("p_mask_dir_root")
self.gridLayout_2.addWidget(self.p_mask_dir_root, 2, 2, 1, 1)
self.pushButton_tmpDir = QtWidgets.QPushButton(self.centralwidget)
self.pushButton_tmpDir.setObjectName("pushButton_tmpDir")
self.gridLayout_2.addWidget(self.pushButton_tmpDir, 5, 1, 1, 1)
self.pushButton_txtFileList = QtWidgets.QPushButton(self.centralwidget)
self.pushButton_txtFileList.setEnabled(True)
self.pushButton_txtFileList.setObjectName("pushButton_txtFileList")
self.gridLayout_2.addWidget(self.pushButton_txtFileList, 1, 1, 1, 1)
self.pushButton_masksDir = QtWidgets.QPushButton(self.centralwidget)
self.pushButton_masksDir.setObjectName("pushButton_masksDir")
self.gridLayout_2.addWidget(self.pushButton_masksDir, 2, 1, 1, 1)
self.p_results_dir_root = QtWidgets.QLineEdit(self.centralwidget)
self.p_results_dir_root.setObjectName("p_results_dir_root")
self.gridLayout_2.addWidget(self.p_results_dir_root, 3, 2, 1, 1)
self.pushButton_paramFile = QtWidgets.QPushButton(self.centralwidget)
self.pushButton_paramFile.setObjectName("pushButton_paramFile")
self.gridLayout_2.addWidget(self.pushButton_paramFile, 4, 1, 1, 1)
self.pushButton_resultsDir = QtWidgets.QPushButton(self.centralwidget)
self.pushButton_resultsDir.setObjectName("pushButton_resultsDir")
self.gridLayout_2.addWidget(self.pushButton_resultsDir, 3, 1, 1, 1)
self.checkBox_txtFileList = QtWidgets.QCheckBox(self.centralwidget)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Maximum, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.checkBox_txtFileList.sizePolicy().hasHeightForWidth())
self.checkBox_txtFileList.setSizePolicy(sizePolicy)
self.checkBox_txtFileList.setText("")
self.checkBox_txtFileList.setObjectName("checkBox_txtFileList")
self.gridLayout_2.addWidget(self.checkBox_txtFileList, 1, 0, 1, 1)
self.checkBox_tmpDir = QtWidgets.QCheckBox(self.centralwidget)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Maximum, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.checkBox_tmpDir.sizePolicy().hasHeightForWidth())
self.checkBox_tmpDir.setSizePolicy(sizePolicy)
self.checkBox_tmpDir.setText("")
self.checkBox_tmpDir.setObjectName("checkBox_tmpDir")
self.gridLayout_2.addWidget(self.checkBox_tmpDir, 5, 0, 1, 1)
self.p_json_file = QtWidgets.QComboBox(self.centralwidget)
self.p_json_file.setEditable(True)
self.p_json_file.setObjectName("p_json_file")
self.gridLayout_2.addWidget(self.p_json_file, 4, 2, 1, 1)
self.verticalLayout.addLayout(self.gridLayout_2)
self.verticalLayout_2.addLayout(self.verticalLayout)
self.horizontalLayout_2 = QtWidgets.QHBoxLayout()
self.horizontalLayout_2.setObjectName("horizontalLayout_2")
self.gridLayout_3 = QtWidgets.QGridLayout()
self.gridLayout_3.setObjectName("gridLayout_3")
self.p_is_debug = QtWidgets.QCheckBox(self.centralwidget)
self.p_is_debug.setObjectName("p_is_debug")
self.gridLayout_3.addWidget(self.p_is_debug, 5, 2, 1, 1)
self.label_numMaxProc = QtWidgets.QLabel(self.centralwidget)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Maximum)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_numMaxProc.sizePolicy().hasHeightForWidth())
self.label_numMaxProc.setSizePolicy(sizePolicy)
self.label_numMaxProc.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignTop)
self.label_numMaxProc.setWordWrap(True)
self.label_numMaxProc.setObjectName("label_numMaxProc")
self.gridLayout_3.addWidget(self.label_numMaxProc, 2, 1, 1, 1)
self.label = QtWidgets.QLabel(self.centralwidget)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Maximum)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label.sizePolicy().hasHeightForWidth())
self.label.setSizePolicy(sizePolicy)
self.label.setObjectName("label")
self.gridLayout_3.addWidget(self.label, 0, 3, 1, 1)
self.p_force_start_point = QtWidgets.QComboBox(self.centralwidget)
self.p_force_start_point.setObjectName("p_force_start_point")
self.gridLayout_3.addWidget(self.p_force_start_point, 1, 3, 1, 3)
self.p_copy_unfinished = QtWidgets.QCheckBox(self.centralwidget)
self.p_copy_unfinished.setObjectName("p_copy_unfinished")
self.gridLayout_3.addWidget(self.p_copy_unfinished, 5, 1, 1, 1)
self.p_max_num_process = QtWidgets.QSpinBox(self.centralwidget)
self.p_max_num_process.setObjectName("p_max_num_process")
self.gridLayout_3.addWidget(self.p_max_num_process, 3, 1, 1, 1)
self.p_pattern_exclude = QtWidgets.QLineEdit(self.centralwidget)
self.p_pattern_exclude.setObjectName("p_pattern_exclude")
self.gridLayout_3.addWidget(self.p_pattern_exclude, 1, 2, 1, 1)
self.label_2 = QtWidgets.QLabel(self.centralwidget)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Maximum)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_2.sizePolicy().hasHeightForWidth())
self.label_2.setSizePolicy(sizePolicy)
self.label_2.setObjectName("label_2")
self.gridLayout_3.addWidget(self.label_2, 2, 3, 1, 1)
self.p_end_point = QtWidgets.QComboBox(self.centralwidget)
self.p_end_point.setObjectName("p_end_point")
self.gridLayout_3.addWidget(self.p_end_point, 3, 3, 1, 3)
self.p_pattern_include = QtWidgets.QLineEdit(self.centralwidget)
self.p_pattern_include.setObjectName("p_pattern_include")
self.gridLayout_3.addWidget(self.p_pattern_include, 1, 1, 1, 1)
self.label_patternExc = QtWidgets.QLabel(self.centralwidget)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Maximum)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_patternExc.sizePolicy().hasHeightForWidth())
self.label_patternExc.setSizePolicy(sizePolicy)
self.label_patternExc.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.label_patternExc.setWordWrap(True)
self.label_patternExc.setObjectName("label_patternExc")
self.gridLayout_3.addWidget(self.label_patternExc, 0, 2, 1, 1)
self.label_patternIn = QtWidgets.QLabel(self.centralwidget)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Maximum)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_patternIn.sizePolicy().hasHeightForWidth())
self.label_patternIn.setSizePolicy(sizePolicy)
self.label_patternIn.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.label_patternIn.setWordWrap(True)
self.label_patternIn.setObjectName("label_patternIn")
self.gridLayout_3.addWidget(self.label_patternIn, 0, 1, 1, 1)
self.p_is_copy_video = QtWidgets.QCheckBox(self.centralwidget)
self.p_is_copy_video.setObjectName("p_is_copy_video")
self.gridLayout_3.addWidget(self.p_is_copy_video, 6, 1, 1, 1)
self.p_unmet_requirements = QtWidgets.QCheckBox(self.centralwidget)
self.p_unmet_requirements.setObjectName("p_unmet_requirements")
self.gridLayout_3.addWidget(self.p_unmet_requirements, 6, 2, 1, 1)
self.p_only_summary = QtWidgets.QCheckBox(self.centralwidget)
self.p_only_summary.setObjectName("p_only_summary")
self.gridLayout_3.addWidget(self.p_only_summary, 7, 1, 1, 1)
self.pushButton_start = QtWidgets.QPushButton(self.centralwidget)
font = QtGui.QFont()
font.setPointSize(18)
self.pushButton_start.setFont(font)
self.pushButton_start.setObjectName("pushButton_start")
self.gridLayout_3.addWidget(self.pushButton_start, 5, 3, 3, 1)
self.horizontalLayout_2.addLayout(self.gridLayout_3)
self.verticalLayout_2.addLayout(self.horizontalLayout_2)
BatchProcessing.setCentralWidget(self.centralwidget)
self.menubar = QtWidgets.QMenuBar(BatchProcessing)
self.menubar.setGeometry(QtCore.QRect(0, 0, 594, 22))
self.menubar.setObjectName("menubar")
BatchProcessing.setMenuBar(self.menubar)
self.statusbar = QtWidgets.QStatusBar(BatchProcessing)
self.statusbar.setObjectName("statusbar")
BatchProcessing.setStatusBar(self.statusbar)
self.retranslateUi(BatchProcessing)
QtCore.QMetaObject.connectSlotsByName(BatchProcessing)
def retranslateUi(self, BatchProcessing):
_translate = QtCore.QCoreApplication.translate
BatchProcessing.setWindowTitle(_translate("BatchProcessing", "Batch Processing"))
self.pushButton_videosDir.setText(_translate("BatchProcessing", "Original Videos Dir"))
self.pushButton_tmpDir.setText(_translate("BatchProcessing", "Temporary Dir"))
self.pushButton_txtFileList.setText(_translate("BatchProcessing", "Individual Files List"))
self.pushButton_masksDir.setText(_translate("BatchProcessing", "Masked Videos Dir"))
self.pushButton_paramFile.setText(_translate("BatchProcessing", "Parameters File"))
self.pushButton_resultsDir.setText(_translate("BatchProcessing", "Tracking Results Dir"))
self.p_is_debug.setText(_translate("BatchProcessing", "Print debug information"))
self.label_numMaxProc.setText(_translate("BatchProcessing", "Maximum Number of Processes"))
self.label.setText(_translate("BatchProcessing", "Analysis Start Point"))
self.p_copy_unfinished.setText(_translate("BatchProcessing", "Copy Unfinished Analysis"))
self.label_2.setText(_translate("BatchProcessing", "Analysis End Point"))
self.label_patternExc.setText(_translate("BatchProcessing", "File Pattern to Exclude"))
self.label_patternIn.setText(_translate("BatchProcessing", "File Pattern to Include"))
self.p_is_copy_video.setText(_translate("BatchProcessing", "Copy Raw Videos to Temp Dir"))
self.p_unmet_requirements.setText(_translate("BatchProcessing", "Print Unmet Requirements"))
self.p_only_summary.setText(_translate("BatchProcessing", "Only Display Progress Summary"))
self.pushButton_start.setText(_translate("BatchProcessing", "START"))
|
mit
| -3,348,856,700,559,731,000
| 63.167488
| 109
| 0.720482
| false
| 3.805434
| false
| false
| false
|
wli/django-allauth
|
allauth/socialaccount/providers/paypal/provider.py
|
1
|
1138
|
from allauth.socialaccount import providers
from allauth.socialaccount.providers.base import ProviderAccount
from allauth.socialaccount.providers.oauth2.provider import OAuth2Provider
class PaypalAccount(ProviderAccount):
def get_avatar_url(self):
return self.account.extra_data.get('picture')
def to_str(self):
return self.account.extra_data.get('name',
super(PaypalAccount, self).to_str())
class PaypalProvider(OAuth2Provider):
id = 'paypal'
name = 'Paypal'
account_class = PaypalAccount
def get_default_scope(self):
# See: https://developer.paypal.com/docs/integration/direct/identity/attributes/ # noqa
return ['openid', 'email']
def extract_uid(self, data):
return str(data['user_id'])
def extract_common_fields(self, data):
# See: https://developer.paypal.com/docs/api/#get-user-information
return dict(first_name=data.get('given_name', ''),
last_name=data.get('family_name', ''),
email=data.get('email'))
providers.registry.register(PaypalProvider)
|
mit
| 2,561,766,692,793,844,700
| 32.470588
| 96
| 0.654657
| false
| 3.924138
| false
| false
| false
|
danirus/django-comments-xtd
|
django_comments_xtd/admin.py
|
1
|
1729
|
from __future__ import unicode_literals
from django.contrib import admin
from django_comments import get_model
from django_comments.admin import CommentsAdmin
from django_comments.models import CommentFlag
from django_comments_xtd.models import XtdComment, BlackListedDomain
class XtdCommentsAdmin(CommentsAdmin):
list_display = ('cid', 'thread_level', 'nested_count', 'name',
'content_type', 'object_pk', 'ip_address', 'submit_date',
'followup', 'is_public', 'is_removed')
list_display_links = ('cid',)
list_filter = ('content_type', 'is_public', 'is_removed', 'followup')
fieldsets = (
(None, {'fields': ('content_type', 'object_pk', 'site')}),
('Content', {'fields': ('user', 'user_name', 'user_email',
'user_url', 'comment', 'followup')}),
('Metadata', {'fields': ('submit_date', 'ip_address',
'is_public', 'is_removed')}),
)
date_hierarchy = 'submit_date'
ordering = ('thread_id', 'order')
search_fields = ['object_pk', 'user__username', 'user_name', 'user_email',
'comment']
def thread_level(self, obj):
rep = '|'
if obj.level:
rep += '-' * obj.level
rep += " c%d to c%d" % (obj.id, obj.parent_id)
else:
rep += " c%d" % obj.id
return rep
def cid(self, obj):
return 'c%d' % obj.id
class BlackListedDomainAdmin(admin.ModelAdmin):
search_fields = ['domain']
if get_model() is XtdComment:
admin.site.register(XtdComment, XtdCommentsAdmin)
admin.site.register(CommentFlag)
admin.site.register(BlackListedDomain, BlackListedDomainAdmin)
|
bsd-2-clause
| 6,129,600,062,988,580,000
| 34.285714
| 78
| 0.585309
| false
| 3.655391
| false
| false
| false
|
marcharper/python-ternary
|
examples/scatter_colorbar.py
|
1
|
4211
|
"""An example of the colorbar display on the scatter plot."""
import ternary
import matplotlib.pyplot as plt
def _en_to_enth(energy, concs, A, B, C):
"""Converts an energy to an enthalpy.
Converts energy to enthalpy using the following formula:
Enthalpy = energy - (energy contribution from A) - (energy contribution from B) -
(energy contribution from C)
An absolute value is taken afterward for convenience.
Parameters
----------
energy : float
The energy of the structure
concs : list of floats
The concentrations of each element
A : float
The energy of pure A
B : float
The energy of pure B
C : float
The energy of pure C
Returns
-------
enth : float
The enthalpy of formation.
"""
enth = abs(energy - concs[0]*A - concs[1] * B - concs[2] * C)
return enth
def _energy_to_enthalpy(energy):
"""Converts energy to enthalpy.
This function take the energies stored in the energy array and
converts them to formation enthalpy.
Parameters
---------
energy : list of lists of floats
Returns
-------
enthalpy : list of lists containing the enthalpies.
"""
pureA = [energy[0][0], energy[0][1]]
pureB = [energy[1][0], energy[1][1]]
pureC = [energy[2][0], energy[2][1]]
enthalpy = []
for en in energy:
c = en[2]
conc = [float(i) / sum(c) for i in c]
CE = _en_to_enth(en[0], conc, pureA[0], pureB[0], pureC[0])
VASP = _en_to_enth(en[1], conc, pureA[1], pureB[1], pureC[1])
enthalpy.append([CE, VASP, c])
return enthalpy
def _find_error(vals):
"""Find the errors in the energy values.
This function finds the errors in the enthalpys.
Parameters
----------
vals : list of lists of floats
Returns
-------
err_vals : list of lists containing the errors.
"""
err_vals = []
for en in vals:
c = en[2]
conc = [float(i) / sum(c) for i in c]
err = abs(en[0] - en[1])
err_vals.append([conc, err])
return err_vals
def _read_data(fname):
"""Reads data from file.
Reads the data in 'fname' into a list where each list entry contains
[energy predicted, energy calculated, list of concentrations].
Parameters
----------
fname : str
The name and path to the data file.
Returns
-------
energy : list of lists of floats
A list of the energies and the concentrations.
"""
energy = []
with open(fname,'r') as f:
for line in f:
CE = abs(float(line.strip().split()[0]))
VASP = abs(float(line.strip().split()[1]))
conc = [i for i in line.strip().split()[2:]]
conc_f = []
for c in conc:
if '[' in c and ']' in c:
conc_f.append(int(c[1:-1]))
elif '[' in c:
conc_f.append(int(c[1:-1]))
elif ']' in c or ',' in c:
conc_f.append(int(c[:-1]))
else:
conc_f.append(int(c))
energy.append([CE, VASP, conc_f])
return energy
def conc_err_plot(fname):
"""Plots the error in the CE data.
This plots the error in the CE predictions within a ternary concentration diagram.
Parameters
----------
fname : string containing the input file name.
"""
energies = _read_data(fname)
enthalpy = _energy_to_enthalpy(energies)
this_errors = _find_error(enthalpy)
points = []
colors = []
for er in this_errors:
concs = er[0]
points.append((concs[0] * 100, concs[1] * 100, concs[2] * 100))
colors.append(er[1])
scale = 100
figure, tax = ternary.figure(scale=scale)
tax.boundary(linewidth=1.0)
tax.set_title("Errors in Convex Hull Predictions.", fontsize=20)
tax.gridlines(multiple=10, color="blue")
tax.scatter(points, vmax=max(colors), colormap=plt.cm.viridis, colorbar=True, c=colors, cmap=plt.cm.viridis)
tax.show()
if __name__ == "__main__":
conc_err_plot('sample_data/scatter_colorbar.txt')
|
mit
| 334,574,868,188,572,740
| 24.521212
| 112
| 0.560912
| false
| 3.462993
| false
| false
| false
|
ITCase/ps_gallery
|
pyramid_sacrud_gallery/tests/test_models.py
|
1
|
3733
|
# -*- coding: utf-8 -*-
#
# Copyright © 2014 Petr Zelenin (po.zelenin@gmail.com)
#
# Distributed under terms of the MIT license.
import hashlib
import os
import unittest
import transaction
from sqlalchemy import create_engine
from sqlalchemy.orm import scoped_session, sessionmaker
from zope.sqlalchemy import ZopeTransactionExtension
from pyramid_sacrud.exceptions import SacrudMessagedException
from . import (
add_fixture,
Base,
Gallery, GalleryItem, GalleryItemM2M,
TEST_DATABASE_CONNECTION_STRING,
)
DBSession = scoped_session(sessionmaker(extension=ZopeTransactionExtension()))
def add_data(session):
galleries = [
{'pk': 1, 'name': 'Best gallery',
'description': 'Full description of gallery'},
{'pk': 2, 'name': 'Another best gallery',
'description': 'Another full description of gallery'},
]
add_fixture(Gallery, galleries, session)
items = []
gallery_items_m2m = []
for gallery in galleries:
for x in xrange(1, 10):
image = '{name}-{salt}.jpg'.format(name=x, salt=gallery['pk'])
image_abspath = GalleryItem.get_upload_path()
image_hash_base = os.path.join(image_abspath, image)
image_hash = hashlib.md5(image_hash_base).hexdigest()
items.append({
'image': image,
'description': 'This is image with hash "%s"' % image_hash
})
gallery_items_m2m.append({
'gallery_id': gallery['pk'],
'item_id': image_hash,
})
add_fixture(GalleryItem, items, session)
add_fixture(GalleryItemM2M, gallery_items_m2m, session)
class TestGallery(unittest.TestCase):
def setUp(self):
engine = create_engine(TEST_DATABASE_CONNECTION_STRING)
DBSession.configure(bind=engine)
Base.metadata.create_all(engine)
with transaction.manager:
add_data(DBSession)
def tearDown(self):
DBSession.remove()
def test_mixins_attrs(self):
"""Check mixins attrs auto apply to classes."""
self.assertEqual(Gallery.get_pk(), 'pk')
self.assertEqual(Gallery.get_db_pk(), 'id')
self.assertEqual(Gallery.__tablename__, 'gallery')
self.assertEqual(GalleryItem.get_pk(), 'pk')
self.assertEqual(GalleryItem.get_db_pk(), 'id')
self.assertEqual(GalleryItem.__tablename__, 'galleryitem')
self.assertEqual(GalleryItemM2M.__tablename__, 'galleryitemm2m')
def test_instances_attrs(self):
"""Check attrs and methods available only for instances."""
gallery = DBSession.query(Gallery).first()
self.assertEqual(gallery.__repr__(), gallery.name)
self.assertEqual(gallery.get_val_pk(), 1)
image = DBSession.query(GalleryItem).filter(GalleryItem.pk == 1).one()
self.assertIn(image.image_hash, image.__repr__())
def test_mixins_fks(self):
"""Check GalleryItemM2MMixin has ForeignKeys to GalleryMixin
and GalleryItemMixin."""
self.assertTrue(hasattr(GalleryItemM2M, 'gallery_id'))
self.assertTrue(hasattr(GalleryItemM2M, 'item_id'))
def test_access_by_relations(self):
"""Check relations between GalleryMixin and GalleryItemMixin."""
gallery = DBSession.query(Gallery).first()
self.assertEqual(len(gallery.items), 9)
def test_unique_image_hash(self):
"""Check of deny to add non-unique image_hash."""
image = GalleryItem(image='1-1.jpg')
DBSession.add(image)
with self.assertRaises(SacrudMessagedException) as cm:
DBSession.query(GalleryItem).all()
self.assertIn('This image was uploaded earlier.', str(cm.exception))
|
mit
| -1,741,868,866,605,887,200
| 33.238532
| 78
| 0.643891
| false
| 3.724551
| true
| false
| false
|
siavashk/pycpd
|
testing/affine_test.py
|
1
|
1194
|
import pytest
import numpy as np
from numpy.testing import assert_almost_equal, assert_array_almost_equal
from pycpd import AffineRegistration
def test_2D():
B = np.array([[1.0, 0.5], [0, 1.0]])
t = np.array([0.5, 1.0])
Y = np.loadtxt('data/fish_target.txt')
X = np.dot(Y, B) + np.tile(t, (np.shape(Y)[0], 1))
reg = AffineRegistration(**{'X': X, 'Y': Y})
TY, (B_reg, t_reg) = reg.register()
assert_array_almost_equal(B, B_reg)
assert_array_almost_equal(t, t_reg)
assert_array_almost_equal(X, TY)
def test_3D():
B = np.array([[1.0, 0.5, 0.0], [0, 1.0, 0.0], [0.0, 0.0, 1.0]])
t = np.array([0.5, 1.0, -2.0])
fish_target = np.loadtxt('data/fish_target.txt')
Y1 = np.zeros((fish_target.shape[0], fish_target.shape[1] + 1))
Y1[:, :-1] = fish_target
Y2 = np.ones((fish_target.shape[0], fish_target.shape[1] + 1))
Y2[:, :-1] = fish_target
Y = np.vstack((Y1, Y2))
X = np.dot(Y, B) + np.tile(t, (np.shape(Y)[0], 1))
reg = AffineRegistration(**{'X': X, 'Y': Y})
TY, (B_reg, t_reg) = reg.register()
assert_array_almost_equal(B, B_reg)
assert_array_almost_equal(t, t_reg)
assert_array_almost_equal(X, TY)
|
mit
| 1,301,905,175,649,250,300
| 30.421053
| 72
| 0.577889
| false
| 2.417004
| false
| false
| false
|
tomprince/gemrb
|
gemrb/GUIScripts/Spellbook.py
|
1
|
16543
|
# -*-python-*-
# GemRB - Infinity Engine Emulator
# Copyright (C) 2011 The GemRB Project
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# a library of any functions for spell(book) managment
import GemRB
import CommonTables
from GUIDefines import *
from ie_stats import *
from ie_action import ACT_LEFT, ACT_RIGHT
from ie_spells import SP_IDENTIFY, SP_SURGE, LSR_KNOWN, LSR_LEVEL, LSR_STAT
from ie_restype import RES_2DA
#################################################################
# this is in the operator module of the standard python lib
def itemgetter(*items):
if len(items) == 1:
item = items[0]
def g(obj):
return obj[item]
else:
def g(obj):
return tuple(obj[item] for item in items)
return g
#################################################################
# routines for the actionbar spell access code
def GetUsableMemorizedSpells(actor, BookType):
memorizedSpells = []
spellResRefs = []
for level in range (9):
spellCount = GemRB.GetMemorizedSpellsCount (actor, BookType, level, False)
for i in range (spellCount):
Spell0 = GemRB.GetMemorizedSpell (actor, BookType, level, i)
if not Spell0["Flags"]:
# depleted, so skip
continue
if Spell0["SpellResRef"] in spellResRefs:
# add another one, so we can get the count more cheaply later
spellResRefs.append (Spell0["SpellResRef"])
continue
spellResRefs.append (Spell0["SpellResRef"])
Spell = GemRB.GetSpell(Spell0["SpellResRef"])
Spell['BookType'] = BookType # just another sorting key
Spell['SpellIndex'] = GemRB.GetSpelldataIndex (actor, Spell["SpellResRef"], 1<<BookType) # crucial!
if Spell['SpellIndex'] == -1:
print "Error, memorized spell not found!", Spell["SpellResRef"]
Spell['SpellIndex'] += 1000 * 1<<BookType
memorizedSpells.append (Spell)
if not len(memorizedSpells):
return []
# count and remove the duplicates
memorizedSpells2 = []
for spell in memorizedSpells:
if spell["SpellResRef"] in spellResRefs:
spell['MemoCount'] = spellResRefs.count(spell["SpellResRef"])
while spell["SpellResRef"] in spellResRefs:
spellResRefs.remove(spell["SpellResRef"])
memorizedSpells2.append(spell)
return memorizedSpells2
def GetKnownSpells(actor, BookType):
knownSpells = []
spellResRefs = []
for level in range (9):
spellCount = GemRB.GetKnownSpellsCount (actor, BookType, level)
for i in range (spellCount):
Spell0 = GemRB.GetKnownSpell (actor, BookType, level, i)
if Spell0["SpellResRef"] in spellResRefs:
continue
spellResRefs.append (Spell0["SpellResRef"])
Spell = GemRB.GetSpell(Spell0["SpellResRef"])
Spell['BookType'] = BookType # just another sorting key
Spell['MemoCount'] = 0
Spell['SpellIndex'] = 1000 * 1<<BookType # this gets assigned properly later
knownSpells.append (Spell)
return knownSpells
def SortUsableSpells(memorizedSpells):
# sort it by using the spldisp.2da table
layout = CommonTables.SpellDisplay.GetValue ("USE_ROW", "ROWS")
layout = CommonTables.SpellDisplay.GetRowName (layout)
key1 = CommonTables.SpellDisplay.GetValue (layout, "KEY1")
key2 = CommonTables.SpellDisplay.GetValue (layout, "KEY2")
key3 = CommonTables.SpellDisplay.GetValue (layout, "KEY3")
if key1:
if key3 and key2:
memorizedSpells = sorted(memorizedSpells, key=itemgetter(key1, key2, key3))
elif key2:
memorizedSpells = sorted(memorizedSpells, key=itemgetter(key1, key2))
else:
memorizedSpells = sorted(memorizedSpells, key=itemgetter(key1))
return memorizedSpells
# Sets up all the (12) action buttons for a player character with different spell or innate icons.
# It also sets up the scroll buttons left and right if needed.
# If Start is supplied, it will skip the first few items (used when scrolling through the list)
# BookType is a spellbook type bitfield (1-mage, 2-priest, 4-innate)
# FIXME: iwd2 has even more types
# Offset is a control ID offset here for iwd2 purposes
def SetupSpellIcons(Window, BookType, Start=0, Offset=0):
actor = GemRB.GameGetFirstSelectedActor ()
# construct the spellbook of usable (not depleted) memorized spells
# the getters expect the BookType as: 0 priest, 1 mage, 2 innate
# we almost need bitfield support for cleric/mages and the like
if BookType == -1:
# Nahal's reckless dweomer can use any known spell
allSpells = GetKnownSpells (actor, IE_SPELL_TYPE_WIZARD)
else:
allSpells = []
if BookType & (1<<IE_SPELL_TYPE_PRIEST): #1
allSpells = GetUsableMemorizedSpells (actor, IE_SPELL_TYPE_PRIEST)
if BookType & (1<<IE_SPELL_TYPE_WIZARD): #2
allSpells += GetUsableMemorizedSpells (actor, IE_SPELL_TYPE_WIZARD)
if BookType & (1<<IE_SPELL_TYPE_INNATE): #4
allSpells += GetUsableMemorizedSpells (actor, IE_SPELL_TYPE_INNATE)
if not len(allSpells):
raise AttributeError ("Error, unknown BookType passed to SetupSpellIcons: %d! Bailing out!" %(BookType))
return
if BookType == -1:
memorizedSpells = allSpells
# reset Type, so we can choose the surgy spell instead of just getting a redraw of the action bar
GemRB.SetVar("Type", 3)
else:
memorizedSpells = SortUsableSpells(allSpells)
# start creating the controls
import GUICommonWindows
# TODO: ASCOL, ROWS
#AsCol = CommonTables.SpellDisplay.GetValue (layout, "AS_COL")
#Rows = CommonTables.SpellDisplay.GetValue (layout, "ROWS")
More = len(memorizedSpells) > 12
if not More and Start:
More = True
# scroll left button
if More:
Button = Window.GetControl (Offset)
Button.SetText ("")
if Start:
#Button.SetActionIcon(globals(), ACT_LEFT, 0)
GUICommonWindows.SetActionIconWorkaround (Button, ACT_LEFT, 0)
Button.SetState (IE_GUI_BUTTON_UNPRESSED)
else:
Button.SetFlags (IE_GUI_BUTTON_NO_IMAGE, OP_SET)
Button.SetTooltip ("")
Button.SetState (IE_GUI_BUTTON_DISABLED)
# disable all spells if fx_disable_spellcasting was run with the same type
# but only if there are any spells of that type to disable
disabled_spellcasting = GemRB.GetPlayerStat(actor, IE_CASTING, 0)
actionLevel = GemRB.GetVar ("ActionLevel")
#order is: mage, cleric, innate, class, song, (defaults to 1, item)
spellSections = [2, 4, 8, 16, 16]
# create the spell icon buttons
buttonCount = 12 - More # GUIBT_COUNT in PCStatsStruct
for i in range (buttonCount):
Button = Window.GetControl (i+Offset+More)
if i+Start >= len(memorizedSpells):
Button.SetState (IE_GUI_BUTTON_DISABLED)
Button.SetFlags (IE_GUI_BUTTON_NO_IMAGE, OP_SET)
Button.SetText ("")
Button.SetTooltip ("")
continue
Spell = memorizedSpells[i+Start]
spellType = Spell['SpellType']
if spellType > 4:
spellType = 1
else:
spellType = spellSections[spellType]
if BookType == -1:
Button.SetVarAssoc ("Spell", Spell['SpellIndex']+i+Start)
else:
Button.SetVarAssoc ("Spell", Spell['SpellIndex'])
# disable spells that should be cast from the inventory or can't be cast while silenced or ...
# see splspec.2da for all the reasons; silence is handled elsewhere
specialSpell = GemRB.CheckSpecialSpell(actor, Spell['SpellResRef'])
specialSpell = (specialSpell & SP_IDENTIFY) or ((specialSpell & SP_SURGE) and actionLevel == 5)
if specialSpell or (disabled_spellcasting&spellType):
Button.SetState (IE_GUI_BUTTON_DISABLED)
Button.EnableBorder(1, 0)
#Button.SetEvent (IE_GUI_BUTTON_ON_PRESS, GUICommonWindows.UpdateActionsWindow) # noop if it worked or not :)
else:
Button.SetState (IE_GUI_BUTTON_UNPRESSED)
Button.SetEvent (IE_GUI_BUTTON_ON_PRESS, GUICommonWindows.SpellPressed)
if Spell['SpellResRef']:
Button.SetSprites ("guibtbut", 0, 0,1,2,3)
Button.SetSpellIcon (Spell['SpellResRef'], 1)
Button.SetFlags (IE_GUI_BUTTON_PICTURE|IE_GUI_BUTTON_ALIGN_BOTTOM|IE_GUI_BUTTON_ALIGN_RIGHT, OP_SET)
Button.SetTooltip (Spell['SpellName'])
if Spell['MemoCount'] > 0 and BookType != -1:
Button.SetText (str(Spell['MemoCount']))
else:
Button.SetText ("")
# scroll right button
if More:
Button = Window.GetControl (Offset+buttonCount)
GUICommonWindows.SetActionIconWorkaround (Button, ACT_RIGHT, buttonCount)
Button.SetText ("")
if len(memorizedSpells) - Start > 10:
Button.SetState (IE_GUI_BUTTON_UNPRESSED)
else:
Button.SetState (IE_GUI_BUTTON_DISABLED)
Button.SetFlags (IE_GUI_BUTTON_NO_IMAGE, OP_SET)
Button.SetTooltip ("")
#################################################################
# routines used during character generation and levelup
#################################################################
def GetMageSpells (Kit, Alignment, Level):
MageSpells = []
SpellType = 99
Table = GemRB.LoadTable ("aligns")
v = Table.FindValue (3, Alignment)
Usability = Kit | Table.GetValue(v, 5)
SpellsTable = GemRB.LoadTable ("spells")
for i in range(SpellsTable.GetValue ("MAGE", str(Level), 1) ):
SpellName = "SPWI%d%02d"%(Level,i+1)
ms = GemRB.GetSpell (SpellName, 1)
if ms == None:
continue
if Usability & ms['SpellExclusion']:
SpellType = 0
else:
SpellType = 1
if Kit & (1 << ms['SpellSchool']+5): # of matching specialist school
SpellType = 2
# Wild mage spells are of normal schools, so we have to find them
# separately. Generalists can learn any spell but the wild ones, so
# we check if the mage is wild and if a generalist wouldn't be able
# to learn the spell.
if Kit == 0x8000 and (0x4000 & ms['SpellExclusion']):
SpellType = 2
MageSpells.append ([SpellName, SpellType])
return MageSpells
def GetLearnableMageSpells (Kit, Alignment, Level):
Learnable = []
for Spell in GetMageSpells (Kit, Alignment, Level):
if Spell[1]:
Learnable.append (Spell[0])
return Learnable
def GetLearnablePriestSpells (Class, Alignment, Level):
Learnable =[]
Table=GemRB.LoadTable("aligns")
v = Table.FindValue(3, Alignment)
#usability is the bitset we look for
Usability=Table.GetValue(v, 5)
SpellsTable = GemRB.LoadTable ("spells")
for i in range(SpellsTable.GetValue ("PRIEST", str (Level), 1) ):
SpellName = "SPPR%d%02d"%(Level,i+1)
ms = GemRB.GetSpell(SpellName, 1)
if ms == None:
continue
if Class & ms['SpellDivine']:
continue
if Usability & ms['SpellExclusion']:
continue
Learnable.append (SpellName)
return Learnable
# there is no separate druid spell table in the originals
#FIXME: try to do this in a non-hard way?
def GetPriestSpellTable(tablename):
if not GemRB.HasResource (tablename, RES_2DA):
if tablename == "MXSPLDRU":
return "MXSPLPRS"
return tablename
def SetupSpellLevels (pc, TableName, Type, Level):
#don't die on a missing reference
tmp = GetPriestSpellTable(TableName)
if tmp != TableName:
SetupSpellLevels (pc, tmp, Type, Level)
return
Table = GemRB.LoadTable (TableName)
for i in range(Table.GetColumnCount ()):
# do a string lookup since some tables don't have entries for all levels
value = Table.GetValue (str(Level), str(i+1), 1)
# specialist mages get an extra spell if they already know that level
# FIXME: get a general routine to find specialists
school = GemRB.GetVar("MAGESCHOOL")
if Type == IE_SPELL_TYPE_WIZARD and school != 0:
if value > 0:
value += 1
GemRB.SetMemorizableSpellsCount (pc, value, Type, i)
return
def UnsetupSpellLevels (pc, TableName, Type, Level):
#don't die on a missing reference
tmp = GetPriestSpellTable(TableName)
if tmp != TableName:
UnsetupSpellLevels (pc, tmp, Type, Level)
return
Table = GemRB.LoadTable (TableName)
for i in range(Table.GetColumnCount ()):
GemRB.SetMemorizableSpellsCount (pc, 0, Type, i)
return
# Returns -1 if not found; otherwise, the index of the spell
def HasSpell (Actor, SpellType, Level, Ref):
# loop through each spell in the spell level and check for a matching ref
for i in range (GemRB.GetKnownSpellsCount (Actor, SpellType, Level)):
Spell = GemRB.GetKnownSpell(Actor, SpellType, Level, i)
if Spell["SpellResRef"].upper() == Ref.upper(): # ensure case is the same
return i
# not found
return -1
def CannotLearnSlotSpell ():
pc = GemRB.GameGetSelectedPCSingle ()
# disqualify sorcerors immediately
if GemRB.GetPlayerStat (pc, IE_CLASS) == 19:
return LSR_STAT
import GUICommon
if GUICommon.GameIsPST():
import GUIINV
slot, slot_item = GUIINV.ItemHash[GemRB.GetVar ('ItemButton')]
else:
slot_item = GemRB.GetSlotItem (pc, GemRB.GetVar ("ItemButton"))
spell_ref = GemRB.GetItem (slot_item['ItemResRef'], pc)['Spell']
spell = GemRB.GetSpell (spell_ref)
# maybe she already knows this spell
if HasSpell (pc, IE_SPELL_TYPE_WIZARD, spell['SpellLevel']-1, spell_ref) != -1:
return LSR_KNOWN
# level check (needs enough intelligence for this level of spell)
dumbness = GemRB.GetPlayerStat (pc, IE_INT)
if spell['SpellLevel'] > GemRB.GetAbilityBonus (IE_INT, 1, dumbness):
return LSR_LEVEL
return 0
def LearnPriestSpells (pc, level, mask):
"""Learns all the priest spells through the given spell level.
Mask distinguishes clerical and druidic spells."""
if level > 7: # make sure we don't have too high a level
level = 7
# go through each level
alignment = GemRB.GetPlayerStat (pc, IE_ALIGNMENT)
for i in range (level):
learnable = GetLearnablePriestSpells (mask, alignment, i+1)
for spell in learnable:
# if the spell isn't learned, learn it
if HasSpell (pc, IE_SPELL_TYPE_PRIEST, i, spell) < 0:
GemRB.LearnSpell (pc, spell)
return
def RemoveKnownSpells (pc, type, level1=1, level2=1, noslots=0, kit=0):
"""Removes all known spells of a given type between two spell levels.
If noslots is true, all memorization counts are set to 0.
Kit is used to identify the priest spell mask of the spells to be removed;
this is only used when removing spells in a dualclass."""
# choose the correct limit based upon class type
if type == IE_SPELL_TYPE_WIZARD:
limit = 9
elif type == IE_SPELL_TYPE_PRIEST:
limit = 7
# make sure that we get the original kit, if we have one
if kit:
originalkit = GetKitIndex (pc)
if originalkit: # kitted; find the class value
originalkit = CommonTables.KitList.GetValue (originalkit, 7)
else: # just get the class value
originalkit = GemRB.GetPlayerStat (pc, IE_CLASS)
# this is is specifically for dual-classes and will not work to remove only one
# spell type from a ranger/cleric multi-class
if CommonTables.ClassSkills.GetValue (originalkit, 0, 0) != "*": # knows druid spells
originalkit = 0x8000
elif CommonTables.ClassSkills.GetValue (originalkit, 1, 0) != "*": # knows cleric spells
originalkit = 0x4000
else: # don't know any other spells
originalkit = 0
# don't know how this would happen, but better to be safe
if originalkit == kit:
originalkit = 0
elif type == IE_SPELL_TYPE_INNATE:
limit = 1
else: # can't do anything if an improper spell type is sent
return 0
# make sure we're within parameters
if level1 < 1 or level2 > limit or level1 > level2:
return 0
# remove all spells for each level
for level in range (level1-1, level2):
# we need the count because we remove each spell in reverse order
count = GemRB.GetKnownSpellsCount (pc, type, level)
mod = count-1
for spell in range (count):
# see if we need to check for kit
if type == IE_SPELL_TYPE_PRIEST and kit:
# get the spell's ref data
ref = GemRB.GetKnownSpell (pc, type, level, mod-spell)
ref = GemRB.GetSpell (ref['SpellResRef'], 1)
# we have to look at the originalkit as well specifically for ranger/cleric dual-classes
# we wouldn't want to remove all cleric spells and druid spells if we lost our cleric class
# only the cleric ones
if kit&ref['SpellDivine'] or (originalkit and not originalkit&ref['SpellDivine']):
continue
# remove the spell
GemRB.RemoveSpell (pc, type, level, mod-spell)
# remove memorization counts if desired
if noslots:
GemRB.SetMemorizableSpellsCount (pc, 0, type, level)
# return success
return 1
|
gpl-2.0
| -1,405,279,672,849,003,800
| 34.423983
| 112
| 0.708457
| false
| 2.921243
| false
| false
| false
|
hkernbach/arangodb
|
3rdParty/V8/v5.7.492.77/src/js/macros.py
|
1
|
9259
|
# Copyright 2006-2009 the V8 project authors. All rights reserved.
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided
# with the distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived
# from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
# Dictionary that is passed as defines for js2c.py.
# Used for defines that must be defined for all native JS files.
define NONE = 0;
define READ_ONLY = 1;
define DONT_ENUM = 2;
define DONT_DELETE = 4;
# 2^53 - 1
define kMaxSafeInteger = 9007199254740991;
# 2^32 - 1
define kMaxUint32 = 4294967295;
# Type query macros.
#
# Note: We have special support for typeof(foo) === 'bar' in the compiler.
# It will *not* generate a runtime typeof call for the most important
# values of 'bar'.
macro IS_ARRAY(arg) = (%_IsArray(arg));
macro IS_ARRAYBUFFER(arg) = (%_ClassOf(arg) === 'ArrayBuffer');
macro IS_BOOLEAN(arg) = (typeof(arg) === 'boolean');
macro IS_DATAVIEW(arg) = (%_ClassOf(arg) === 'DataView');
macro IS_DATE(arg) = (%IsDate(arg));
macro IS_ERROR(arg) = (%_ClassOf(arg) === 'Error');
macro IS_FUNCTION(arg) = (%IsFunction(arg));
macro IS_GENERATOR(arg) = (%_ClassOf(arg) === 'Generator');
macro IS_GLOBAL(arg) = (%_ClassOf(arg) === 'global');
macro IS_MAP(arg) = (%_ClassOf(arg) === 'Map');
macro IS_MAP_ITERATOR(arg) = (%_ClassOf(arg) === 'Map Iterator');
macro IS_NULL(arg) = (arg === null);
macro IS_NULL_OR_UNDEFINED(arg) = (arg == null);
macro IS_NUMBER(arg) = (typeof(arg) === 'number');
macro IS_OBJECT(arg) = (typeof(arg) === 'object');
macro IS_PROXY(arg) = (%_IsJSProxy(arg));
macro IS_SCRIPT(arg) = (%_ClassOf(arg) === 'Script');
macro IS_SET(arg) = (%_ClassOf(arg) === 'Set');
macro IS_SET_ITERATOR(arg) = (%_ClassOf(arg) === 'Set Iterator');
macro IS_SHAREDARRAYBUFFER(arg) = (%_ClassOf(arg) === 'SharedArrayBuffer');
macro IS_SIMD_VALUE(arg) = (%IsSimdValue(arg));
macro IS_STRING(arg) = (typeof(arg) === 'string');
macro IS_SYMBOL(arg) = (typeof(arg) === 'symbol');
macro IS_TYPEDARRAY(arg) = (%_IsTypedArray(arg));
macro IS_UNDEFINED(arg) = (arg === (void 0));
macro IS_WEAKMAP(arg) = (%_ClassOf(arg) === 'WeakMap');
macro IS_WEAKSET(arg) = (%_ClassOf(arg) === 'WeakSet');
# Macro for ES queries of the type: "Type(O) is Object."
macro IS_RECEIVER(arg) = (%_IsJSReceiver(arg));
# Macro for ES queries of the type: "IsCallable(O)"
macro IS_CALLABLE(arg) = (typeof(arg) === 'function');
# Macro for ES6 CheckObjectCoercible
# Will throw a TypeError of the form "[functionName] called on null or undefined".
macro CHECK_OBJECT_COERCIBLE(arg, functionName) = if (IS_NULL(%IS_VAR(arg)) || IS_UNDEFINED(arg)) throw %make_type_error(kCalledOnNullOrUndefined, functionName);
# Inline macros. Use %IS_VAR to make sure arg is evaluated only once.
macro NUMBER_IS_NAN(arg) = (!%_IsSmi(%IS_VAR(arg)) && !(arg == arg));
macro NUMBER_IS_FINITE(arg) = (%_IsSmi(%IS_VAR(arg)) || ((arg == arg) && (arg != 1/0) && (arg != -1/0)));
macro TO_BOOLEAN(arg) = (!!(arg));
macro TO_INTEGER(arg) = (%_ToInteger(arg));
macro TO_INT32(arg) = ((arg) | 0);
macro TO_UINT32(arg) = ((arg) >>> 0);
macro INVERT_NEG_ZERO(arg) = ((arg) + 0);
macro TO_LENGTH(arg) = (%_ToLength(arg));
macro TO_STRING(arg) = (%_ToString(arg));
macro TO_NUMBER(arg) = (%_ToNumber(arg));
macro TO_OBJECT(arg) = (%_ToObject(arg));
macro HAS_OWN_PROPERTY(obj, key) = (%_Call(ObjectHasOwnProperty, obj, key));
# Private names.
macro IS_PRIVATE(sym) = (%SymbolIsPrivate(sym));
macro HAS_PRIVATE(obj, key) = HAS_OWN_PROPERTY(obj, key);
macro HAS_DEFINED_PRIVATE(obj, sym) = (!IS_UNDEFINED(obj[sym]));
macro GET_PRIVATE(obj, sym) = (obj[sym]);
macro SET_PRIVATE(obj, sym, val) = (obj[sym] = val);
# To avoid ES2015 Function name inference.
macro ANONYMOUS_FUNCTION(fn) = (0, (fn));
# Constants. The compiler constant folds them.
define INFINITY = (1/0);
define UNDEFINED = (void 0);
# Macros implemented in Python.
python macro CHAR_CODE(str) = ord(str[1]);
# For messages.js
# Matches Script::Type from objects.h
define TYPE_NATIVE = 0;
define TYPE_EXTENSION = 1;
define TYPE_NORMAL = 2;
# Matches Script::CompilationType from objects.h
define COMPILATION_TYPE_HOST = 0;
define COMPILATION_TYPE_EVAL = 1;
define COMPILATION_TYPE_JSON = 2;
# Must match PropertyFilter in property-details.h
define PROPERTY_FILTER_NONE = 0;
define PROPERTY_FILTER_ONLY_ENUMERABLE = 2;
define PROPERTY_FILTER_SKIP_STRINGS = 8;
define PROPERTY_FILTER_SKIP_SYMBOLS = 16;
# Use for keys, values and entries iterators.
define ITERATOR_KIND_KEYS = 1;
define ITERATOR_KIND_VALUES = 2;
define ITERATOR_KIND_ENTRIES = 3;
macro FIXED_ARRAY_GET(array, index) = (%_FixedArrayGet(array, (index) | 0));
macro FIXED_ARRAY_SET(array, index, value) = (%_FixedArraySet(array, (index) | 0, value));
# TODO(adamk): Find a more robust way to force Smi representation.
macro FIXED_ARRAY_SET_SMI(array, index, value) = (FIXED_ARRAY_SET(array, index, (value) | 0));
macro ORDERED_HASH_TABLE_BUCKET_COUNT(table) = (FIXED_ARRAY_GET(table, 0));
macro ORDERED_HASH_TABLE_ELEMENT_COUNT(table) = (FIXED_ARRAY_GET(table, 1));
macro ORDERED_HASH_TABLE_SET_ELEMENT_COUNT(table, count) = (FIXED_ARRAY_SET_SMI(table, 1, count));
macro ORDERED_HASH_TABLE_DELETED_COUNT(table) = (FIXED_ARRAY_GET(table, 2));
macro ORDERED_HASH_TABLE_SET_DELETED_COUNT(table, count) = (FIXED_ARRAY_SET_SMI(table, 2, count));
macro ORDERED_HASH_TABLE_BUCKET_AT(table, bucket) = (FIXED_ARRAY_GET(table, 3 + (bucket)));
macro ORDERED_HASH_TABLE_SET_BUCKET_AT(table, bucket, entry) = (FIXED_ARRAY_SET(table, 3 + (bucket), entry));
macro ORDERED_HASH_TABLE_HASH_TO_BUCKET(hash, numBuckets) = (hash & ((numBuckets) - 1));
macro ORDERED_HASH_SET_ENTRY_TO_INDEX(entry, numBuckets) = (3 + (numBuckets) + ((entry) << 1));
macro ORDERED_HASH_SET_KEY_AT(table, entry, numBuckets) = (FIXED_ARRAY_GET(table, ORDERED_HASH_SET_ENTRY_TO_INDEX(entry, numBuckets)));
macro ORDERED_HASH_SET_CHAIN_AT(table, entry, numBuckets) = (FIXED_ARRAY_GET(table, ORDERED_HASH_SET_ENTRY_TO_INDEX(entry, numBuckets) + 1));
macro ORDERED_HASH_MAP_ENTRY_TO_INDEX(entry, numBuckets) = (3 + (numBuckets) + ((entry) * 3));
macro ORDERED_HASH_MAP_KEY_AT(table, entry, numBuckets) = (FIXED_ARRAY_GET(table, ORDERED_HASH_MAP_ENTRY_TO_INDEX(entry, numBuckets)));
macro ORDERED_HASH_MAP_VALUE_AT(table, entry, numBuckets) = (FIXED_ARRAY_GET(table, ORDERED_HASH_MAP_ENTRY_TO_INDEX(entry, numBuckets) + 1));
macro ORDERED_HASH_MAP_CHAIN_AT(table, entry, numBuckets) = (FIXED_ARRAY_GET(table, ORDERED_HASH_MAP_ENTRY_TO_INDEX(entry, numBuckets) + 2));
# Must match OrderedHashTable::kNotFound.
define NOT_FOUND = -1;
# Check whether debug is active.
define DEBUG_IS_ACTIVE = (%_DebugIsActive() != 0);
# UseCounters from include/v8.h
define kUseAsm = 0;
define kBreakIterator = 1;
define kLegacyConst = 2;
define kMarkDequeOverflow = 3;
define kStoreBufferOverflow = 4;
define kSlotsBufferOverflow = 5;
define kForcedGC = 7;
define kSloppyMode = 8;
define kStrictMode = 9;
define kRegExpPrototypeStickyGetter = 11;
define kRegExpPrototypeToString = 12;
define kRegExpPrototypeUnicodeGetter = 13;
define kIntlV8Parse = 14;
define kIntlPattern = 15;
define kIntlResolved = 16;
define kPromiseChain = 17;
define kPromiseAccept = 18;
define kPromiseDefer = 19;
define kHtmlCommentInExternalScript = 20;
define kHtmlComment = 21;
define kSloppyModeBlockScopedFunctionRedefinition = 22;
define kForInInitializer = 23;
define kArrayProtectorDirtied = 24;
define kArraySpeciesModified = 25;
define kArrayPrototypeConstructorModified = 26;
define kArrayInstanceProtoModified = 27;
define kArrayInstanceConstructorModified = 28;
define kLegacyFunctionDeclaration = 29;
define kRegExpPrototypeSourceGetter = 30;
define kRegExpPrototypeOldFlagGetter = 31;
|
apache-2.0
| -6,143,239,776,302,362,000
| 45.762626
| 161
| 0.696836
| false
| 3.143973
| false
| false
| false
|
enthought/traitsbackendqt
|
enthought/traits/ui/qt4/image_enum_editor.py
|
1
|
12386
|
#------------------------------------------------------------------------------
#
# Copyright (c) 2009, Enthought, Inc.
# All rights reserved.
#
# This software is provided without warranty under the terms of the BSD
# license included in enthought/LICENSE.txt and may be redistributed only
# under the conditions described in the aforementioned license. The license
# is also available online at http://www.enthought.com/licenses/BSD.txt
#
# Thanks for using Enthought open source!
#
# Author: Evan Patterson
# Date: 08/11/2009
#
#------------------------------------------------------------------------------
""" Defines the various image enumeration editors for the PyQt user interface
toolkit.
"""
#-------------------------------------------------------------------------------
# Imports:
#-------------------------------------------------------------------------------
from enthought.qt import QtCore, QtGui
# FIXME: ToolkitEditorFactory is a proxy class defined here just for backward
# compatibility. The class has been moved to the
# enthought.traits.ui.editors.image_enum_editor file.
from enthought.traits.ui.editors.image_enum_editor import ToolkitEditorFactory
from editor import Editor
from enum_editor import BaseEditor as BaseEnumEditor
from enum_editor import SimpleEditor as SimpleEnumEditor
from enum_editor import RadioEditor as CustomEnumEditor
from helper import pixmap_cache
#-------------------------------------------------------------------------------
# 'BaseImageEnumEditor' class:
#-------------------------------------------------------------------------------
class BaseEditor(object):
""" The base class for the different styles of ImageEnumEditor.
"""
def get_pixmap(self, name):
""" Get a pixmap representing a possible object traits value.
"""
factory = self.factory
name = ''.join((factory.prefix, name, factory.suffix))
return pixmap_cache(name, factory._image_path)
#-------------------------------------------------------------------------------
# 'ReadonlyEditor' class:
#-------------------------------------------------------------------------------
class ReadonlyEditor(BaseEditor, BaseEnumEditor):
""" Read-only style of image enumeration editor, which displays a single
static image representing the object trait's value.
"""
#---------------------------------------------------------------------------
# Finishes initializing the editor by creating the underlying toolkit
# widget:
#---------------------------------------------------------------------------
def init(self, parent):
""" Finishes initializing the editor by creating the underlying toolkit
widget.
"""
self.control = QtGui.QLabel()
self.control.setPixmap(self.get_pixmap(self.str_value))
self.set_tooltip()
#---------------------------------------------------------------------------
# Updates the editor when the object trait changes external to the editor:
#---------------------------------------------------------------------------
def update_editor(self):
""" Updates the editor when the object trait changes externally to the
editor.
"""
self.control.setPixmap(self.get_pixmap(self.str_value))
#-------------------------------------------------------------------------------
# 'SimpleEditor' class:
#-------------------------------------------------------------------------------
class SimpleEditor(BaseEditor, SimpleEnumEditor):
""" Simple style of image enumeration editor, which displays a combo box.
"""
#---------------------------------------------------------------------------
# Returns the QComboBox used for the editor control:
#---------------------------------------------------------------------------
def create_combo_box(self):
""" Returns the QComboBox used for the editor control.
"""
control = ImageEnumComboBox(self)
control.setSizePolicy(QtGui.QSizePolicy.Maximum,
QtGui.QSizePolicy.Maximum)
return control
#---------------------------------------------------------------------------
# Updates the editor when the object trait changes external to the editor:
#---------------------------------------------------------------------------
def update_editor ( self ):
""" Updates the editor when the object trait changes externally to the
editor.
"""
if self._no_enum_update == 0:
self._no_enum_update += 1
try:
index = self.names.index(self.inverse_mapping[self.value])
except:
self.control.setCurrentIndex(-1)
else:
cols = self.factory.cols
rows = (len(self.names) + cols - 1) / cols
row, col = index / cols, index % cols
self.control.setModelColumn(col)
self.control.setCurrentIndex(row)
self._no_enum_update -= 1
#---------------------------------------------------------------------------
# Rebuilds the contents of the editor whenever the original factory
# object's 'values' trait changes:
#---------------------------------------------------------------------------
def rebuild_editor(self):
""" Rebuilds the contents of the editor whenever the original factory
object's **values** trait changes.
"""
self.control.model().reset()
#-------------------------------------------------------------------------------
# 'CustomEditor' class:
#-------------------------------------------------------------------------------
class CustomEditor(BaseEditor, CustomEnumEditor):
""" Simple style of image enumeration editor, which displays a combo box.
"""
# Is the button layout row-major or column-major? This value overrides the
# default.
row_major = True
#---------------------------------------------------------------------------
# Returns the QAbstractButton used for the radio button:
#---------------------------------------------------------------------------
def create_button(self, name):
""" Returns the QAbstractButton used for the radio button.
"""
button = QtGui.QToolButton()
button.setAutoExclusive(True)
button.setCheckable(True)
pixmap = self.get_pixmap(name)
button.setIcon(QtGui.QIcon(pixmap))
button.setIconSize(pixmap.size())
return button
#-------------------------------------------------------------------------------
# Custom Qt objects used in the SimpleEditor:
#-------------------------------------------------------------------------------
class ImageEnumComboBox(QtGui.QComboBox):
""" A combo box which displays images instead of text.
"""
def __init__(self, editor, parent=None):
""" Reimplemented to store the editor and set a delegate for drawing the
items in the popup menu. If there is more than one column, use a
TableView instead of ListView for the popup.
"""
QtGui.QComboBox.__init__(self, parent)
self._editor = editor
model = ImageEnumModel(editor, self)
self.setModel(model)
delegate = ImageEnumItemDelegate(editor, self)
if editor.factory.cols > 1:
view = ImageEnumTablePopupView(self)
view.setItemDelegate(delegate)
self.setView(view)
# Unless we force it, the popup for a combo box will not be wider
# than the box itself, so we set a high minimum width.
width = 0
for col in xrange(self._editor.factory.cols):
width += view.sizeHintForColumn(col)
view.setMinimumWidth(width)
else:
self.setItemDelegate(delegate)
def paintEvent(self, event):
""" Reimplemented to draw the ComboBox frame and paint the image
centered in it.
"""
painter = QtGui.QStylePainter(self)
painter.setPen(self.palette().color(QtGui.QPalette.Text))
option = QtGui.QStyleOptionComboBox()
self.initStyleOption(option)
painter.drawComplexControl(QtGui.QStyle.CC_ComboBox, option)
editor = self._editor
pixmap = editor.get_pixmap(editor.inverse_mapping[editor.value])
arrow = self.style().subControlRect(QtGui.QStyle.CC_ComboBox, option,
QtGui.QStyle.SC_ComboBoxArrow)
option.rect.setWidth(option.rect.width() - arrow.width())
target = QtGui.QStyle.alignedRect(QtCore.Qt.LeftToRight,
QtCore.Qt.AlignCenter,
pixmap.size(), option.rect)
painter.drawPixmap(target, pixmap)
def sizeHint(self):
""" Reimplemented to set a size hint based on the size of the larget
image.
"""
size = QtCore.QSize()
for name in self._editor.names:
size = size.expandedTo(self._editor.get_pixmap(name).size())
option = QtGui.QStyleOptionComboBox()
self.initStyleOption(option)
size = self.style().sizeFromContents(QtGui.QStyle.CT_ComboBox, option,
size, self)
return size
class ImageEnumTablePopupView(QtGui.QTableView):
def __init__(self, parent):
""" Configure the appearence of the table view.
"""
QtGui.QTableView.__init__(self, parent)
hheader = self.horizontalHeader()
hheader.setResizeMode(QtGui.QHeaderView.ResizeToContents)
hheader.hide()
vheader = self.verticalHeader()
vheader.setResizeMode(QtGui.QHeaderView.ResizeToContents)
vheader.hide()
self.setShowGrid(False)
class ImageEnumItemDelegate(QtGui.QStyledItemDelegate):
""" An item delegate which draws only images.
"""
def __init__(self, editor, parent):
""" Reimplemented to store the editor.
"""
QtGui.QStyledItemDelegate.__init__(self, parent)
self._editor = editor
def displayText(self, value, locale):
""" Reimplemented to display nothing.
"""
return ''
def paint(self, painter, option, mi):
""" Reimplemented to draw images.
"""
# Delegate to our superclass to draw the background
QtGui.QStyledItemDelegate.paint(self, painter, option, mi)
# Now draw the pixmap
name = mi.data(QtCore.Qt.DisplayRole)
pixmap = self._get_pixmap(name)
if pixmap is not None:
target = QtGui.QStyle.alignedRect(QtCore.Qt.LeftToRight,
QtCore.Qt.AlignCenter,
pixmap.size(), option.rect)
painter.drawPixmap(target, pixmap)
def sizeHint(self, option, mi):
""" Reimplemented to define a size hint based on the size of the pixmap.
"""
name = mi.data(QtCore.Qt.DisplayRole)
pixmap = self._get_pixmap(name)
if pixmap is None:
return QtCore.QSize()
return pixmap.size()
def _get_pixmap(self, name):
return self._editor.get_pixmap(name)
class ImageEnumModel(QtCore.QAbstractTableModel):
""" A table model for use with the 'simple' style ImageEnumEditor.
"""
def __init__(self, editor, parent):
""" Reimplemented to store the editor.
"""
super(ImageEnumModel, self).__init__(parent)
self._editor = editor
def rowCount(self, mi):
""" Reimplemented to return the number of rows.
"""
cols = self._editor.factory.cols
result = (len(self._editor.names) + cols - 1) / cols
return result
def columnCount(self, mi):
""" Reimplemented to return the number of columns.
"""
return self._editor.factory.cols
def data(self, mi, role):
""" Reimplemented to return the data.
"""
if role == QtCore.Qt.DisplayRole:
index = mi.row() * self._editor.factory.cols + mi.column()
if index < len(self._editor.names):
return self._editor.names[index]
return None
|
gpl-2.0
| -7,069,525,519,631,933,000
| 37.110769
| 80
| 0.516793
| false
| 4.980298
| false
| false
| false
|
realitix/vulkan
|
setup.py
|
1
|
1143
|
from setuptools import setup
with open("README.md") as file:
long_description = file.read()
setup(
name='vulkan',
version='1.1.99.1',
description='Ultimate Python binding for Vulkan API',
author='realitix',
author_email='realitix@gmail.com',
packages=['vulkan'],
long_descripiton=long_description,
long_description_content_type="text/markdown",
include_package_data=True,
install_requires=['cffi>=1.10'],
setup_requires=['cffi>=1.10'],
url='https://github.com/realitix/vulkan',
keywords='Graphics,3D,Vulkan,cffi',
classifiers=[
"Development Status :: 5 - Production/Stable",
"Intended Audience :: Developers",
"License :: OSI Approved :: Apache Software License",
"Operating System :: Android",
"Operating System :: Microsoft :: Windows",
"Operating System :: POSIX :: Linux",
"Natural Language :: English",
"Topic :: Multimedia :: Graphics",
"Topic :: Scientific/Engineering",
"Topic :: Software Development :: Libraries :: Python Modules",
],
cffi_modules=["vulkan/vulkan_build.py:ffi"]
)
|
apache-2.0
| -5,513,329,181,179,121,000
| 31.657143
| 71
| 0.634296
| false
| 3.81
| false
| false
| false
|
VerosK/django-dashing
|
dashing/widgets.py
|
1
|
2802
|
# -*- coding: utf-8 -*-
import json
from django.http import HttpResponse
from django.views.generic.detail import View
class JSONResponseMixin(object):
"""
A mixin that can be used to render a JSON response.
"""
def render_to_json_response(self, context, **response_kwargs):
"""
Returns a JSON response, transforming 'context' to make the payload.
"""
return HttpResponse(
self.convert_context_to_json(context),
content_type='application/json',
**response_kwargs
)
def convert_context_to_json(self, context):
"Convert the context dictionary into a JSON object"
return json.dumps(context)
class Widget(JSONResponseMixin, View):
def get(self, request, *args, **kwargs):
context = self.get_context()
return HttpResponse(json.dumps(context), content_type="application/json")
def render_to_response(self, context, **response_kwargs):
return self.render_to_json_response(context, **response_kwargs)
class NumberWidget(Widget):
title = ''
more_info = ''
updated_at = ''
change_rate = ''
value = ''
def get_title(self):
return self.title
def get_more_info(self):
return self.more_info
def get_updated_at(self):
return self.updated_at
def get_change_rate(self):
return self.change_rate
def get_value(self):
return self.value
def get_context(self):
return {
'title': self.get_title(),
'more_info': self.get_more_info(),
'updated_at': self.get_updated_at(),
'change_rate': self.get_change_rate(),
'value': self.get_value(),
}
class ListWidget(Widget):
title = ''
more_info = ''
updated_at = ''
data = []
def get_title(self):
return self.title
def get_more_info(self):
return self.more_info
def get_updated_at(self):
return self.updated_at
def get_data(self):
return self.data
def get_context(self):
return {
'title': self.get_title(),
'more_info': self.get_more_info(),
'updated_at': self.get_updated_at(),
'data': self.get_data(),
}
class GraphWidget(Widget):
title = ''
more_info = ''
value = ''
data = []
def get_title(self):
return self.title
def get_more_info(self):
return self.more_info
def get_value(self):
return self.value
def get_data(self):
return self.data
def get_context(self):
return {
'title': self.get_title(),
'more_info': self.get_more_info(),
'value': self.get_value(),
'data': self.get_data(),
}
|
bsd-3-clause
| 4,210,653,318,595,151,000
| 22.745763
| 81
| 0.568879
| false
| 3.875519
| false
| false
| false
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.