repo_name
stringlengths 5
100
| path
stringlengths 4
231
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 6
947k
| score
float64 0
0.34
| prefix
stringlengths 0
8.16k
| middle
stringlengths 3
512
| suffix
stringlengths 0
8.17k
|
|---|---|---|---|---|---|---|---|---|
hkariti/ansible
|
lib/ansible/modules/system/systemd.py
|
Python
|
gpl-3.0
| 18,087
| 0.002764
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright: (c) 2016, Brian Coca <bcoca@ansible.com>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['stableinterface'],
'supported_by': 'core'}
DOCUMENTATION = '''
module: systemd
author:
- Ansible Core Team
version_added: "2.2"
short_description: Manage services
description:
- Controls systemd services on remote hosts.
options:
name:
description:
- Name of the service. When using in a chroot environment you always need to specify the full name i.e. (crond.service).
aliases: [ service, unit ]
state:
description:
- C(started)/C(stopped) are idempotent actions that will not run commands unless necessary.
C(restarted) will always bounce the service. C(reloaded) will always reload.
choices: [ reloaded, restarted, started, stopped ]
enabled:
description:
- Whether the service should start on boot. B(At least one of state and enabled are required.)
type: bool
force:
description:
- Whether to override existing symlinks.
type: bool
version_added: 2.6
masked:
description:
- Whether the unit should be masked or not, a masked unit is impossible to start.
type: bool
daemon_reload:
description:
- run daemon-reload before doing any other operations, to make sure systemd has read any changes.
type: bool
default: 'no'
aliases: [ daemon-reload ]
user:
description:
- run systemctl talking to the service manager of the calling user, rather than the service manager
of the system.
type: bool
default: 'no'
no_block:
description:
- Do not synchronously wait for the requested operation to finish.
Enqueued job will continue without Ansible blocking on its completion.
type: bool
default: 'no'
version_added: "2.3"
notes:
- Since 2.4, one of the following options is required 'state', 'enabled', 'masked', 'daemon_reload', and all except 'daemon_reload' also require 'name'.
- Before 2.4 you always required 'name'.
requirements:
- A system managed by systemd.
'''
EXAMPLES = '''
- name: Make sure a service is running
systemd:
state: started
name: httpd
- name: stop service cron on debian, if running
systemd:
name: cron
state: stopped
- name: restart service cron on centos, in all cases, also issue daemon-reload to pick up config changes
systemd:
state: restarted
daemon_reload: yes
name: crond
- name: reload service httpd, in all cases
systemd:
name: httpd
state: reloaded
- name: enable service httpd and ensure it is not masked
systemd:
name: httpd
enabled: yes
masked: no
- name: enable a timer for dnf-automatic
systemd:
name: dnf-automatic.timer
state: started
enabled: True
- name: just force systemd to reread configs (2.4 and above)
systemd:
daemon_reload: yes
'''
RETURN = '''
status:
description: A dictionary with the key=value pairs returned from `systemctl show`
returned: success
type: complex
contains: {
"ActiveEnterTimestamp": "Sun 2016-05-15 18:28:49 EDT",
"ActiveEnterTimestampMonotonic": "8135942",
"ActiveExitTimestampMonotonic": "0",
"ActiveState": "active",
"After": "auditd.service systemd-user-sessions.service time-sync.target systemd-journald.socket basic.target system.slice",
"AllowIsolate": "no",
"Before": "shutdown.target multi-user.target",
"BlockIOAccounting": "no",
"BlockIOWeight": "1000",
"CPUAccounting": "no",
"CPUSchedulingPolicy": "0",
"CPUSchedulingPriority": "0",
"CPUSchedulingResetOnFork": "no",
"CPUShares": "1024",
"CanIsolate": "no",
"CanReload": "yes",
"CanStart": "yes",
"CanStop": "yes",
"CapabilityBoundingSet": "18446744073709551615",
"ConditionResult": "yes",
"ConditionTimestamp": "Sun 2016-05-15 18:28:49 EDT",
"ConditionTimestampMonotonic": "7902742",
"Conflicts": "shutdown.target",
"ControlGroup": "/system.slice/crond.service",
"ControlPID": "0",
"DefaultDependencies": "yes",
"Delegate": "no",
"Description": "Command Scheduler",
"DevicePolicy": "auto",
"EnvironmentFile": "/etc/sysconfig/crond (ignore_errors=no)",
"ExecMainCode": "0",
|
"ExecMainExitTimestampMonotonic": "0",
"
|
ExecMainPID": "595",
"ExecMainStartTimestamp": "Sun 2016-05-15 18:28:49 EDT",
"ExecMainStartTimestampMonotonic": "8134990",
"ExecMainStatus": "0",
"ExecReload": "{ path=/bin/kill ; argv[]=/bin/kill -HUP $MAINPID ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
"ExecStart": "{ path=/usr/sbin/crond ; argv[]=/usr/sbin/crond -n $CRONDARGS ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
"FragmentPath": "/usr/lib/systemd/system/crond.service",
"GuessMainPID": "yes",
"IOScheduling": "0",
"Id": "crond.service",
"IgnoreOnIsolate": "no",
"IgnoreOnSnapshot": "no",
"IgnoreSIGPIPE": "yes",
"InactiveEnterTimestampMonotonic": "0",
"InactiveExitTimestamp": "Sun 2016-05-15 18:28:49 EDT",
"InactiveExitTimestampMonotonic": "8135942",
"JobTimeoutUSec": "0",
"KillMode": "process",
"KillSignal": "15",
"LimitAS": "18446744073709551615",
"LimitCORE": "18446744073709551615",
"LimitCPU": "18446744073709551615",
"LimitDATA": "18446744073709551615",
"LimitFSIZE": "18446744073709551615",
"LimitLOCKS": "18446744073709551615",
"LimitMEMLOCK": "65536",
"LimitMSGQUEUE": "819200",
"LimitNICE": "0",
"LimitNOFILE": "4096",
"LimitNPROC": "3902",
"LimitRSS": "18446744073709551615",
"LimitRTPRIO": "0",
"LimitRTTIME": "18446744073709551615",
"LimitSIGPENDING": "3902",
"LimitSTACK": "18446744073709551615",
"LoadState": "loaded",
"MainPID": "595",
"MemoryAccounting": "no",
"MemoryLimit": "18446744073709551615",
"MountFlags": "0",
"Names": "crond.service",
"NeedDaemonReload": "no",
"Nice": "0",
"NoNewPrivileges": "no",
"NonBlocking": "no",
"NotifyAccess": "none",
"OOMScoreAdjust": "0",
"OnFailureIsolate": "no",
"PermissionsStartOnly": "no",
"PrivateNetwork": "no",
"PrivateTmp": "no",
"RefuseManualStart": "no",
"RefuseManualStop": "no",
"RemainAfterExit": "no",
"Requires": "basic.target",
"Restart": "no",
"RestartUSec": "100ms",
"Result": "success",
"RootDirectoryStartOnly": "no",
"SameProcessGroup": "no",
"SecureBits": "0",
"SendSIGHUP": "no",
"SendSIGKILL": "yes",
"Slice": "system.slice",
"StandardError": "inherit",
"StandardInput": "null",
"StandardOutput": "journal",
"StartLimitAction": "none",
"StartLimitBurst": "5",
"StartLimitInterval": "10000000",
"StatusErrno": "0",
"StopWhenUnneeded": "no",
"SubState": "running",
"Sysl
|
sgarrity/bedrock
|
bedrock/newsletter/redirects.py
|
Python
|
mpl-2.0
| 432
| 0.009259
|
from bedrock.redirects.util import redirect
redirectpatterns = (
# bug 926629
redirect(r'^newsletter/abou
|
t_mobile(?:/(?:index\.html)?)?$', 'newsletter.subscribe'),
redirect(r'^newsletter/about_mozilla(?:/(?:index\.html)?)?$', 'mozorg.contribute.index'),
redirect(r'^newsletter/new(?:/(?:index\.html)?)?$', 'newsletter.subscribe'),
redirect(r'^newsl
|
etter/ios(?:/(?:index\.html)?)?$', 'firefox.mobile.index'),
)
|
jason-weirather/py-seq-tools
|
seqtools/cli/legacy/fastq_bgzf_index.py
|
Python
|
apache-2.0
| 6,010
| 0.030283
|
#!/usr/bin/python
import sys, argparse, StringIO, re, gzip
from multiprocessing import Pool, cpu_count, Queue
from Bio.Format.BGZF import is_bgzf, reader as BGZF_reader, get_block_bounds
from Bio.Format.Fastq import FastqEntry
# Create an index for bgzf zipped fastq files.
# Pre: A fastq file that has been compressed by bgzf
# Post: the Pre file, with the exension .bgi added.
# the index is gzipped
# <name> <blockStart> <innerStart> <dataSize> <read length>
# Be cautious that name could contain spaces and tabs
# global
blocks = {}
ncount = 1
def main():
global blocks
parser = argparse.ArgumentParser(description="Take a bgzf compressed fastq file and make an index",formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument('input_file',help="BGZF compressed fastq file")
parser.add_argument('--threads',type=int,default=1,help="number of threads")
args = parser.parse_args()
if not is_bgzf(args.input_file):
sys.stderr.write("ERROR: not a proper BGZF compressed file\n")
sys.exit()
z = 0
sys.stderr.write("scanning block starts\n")
bs = get_block_bounds(args.input_file)
blocks[bs[0][0]] = [[bs[0][1],-1]]
sys.stderr.write("scanning for new lines\n")
z = 0
#if args.threads > 1:
# p = Pool(processes=args.threads)
#results = []
#for xs in [bs[j:j+args.threads*10] for j in range(0,len(bs),args.threads*10)]:
for bounds in bs:
#print xs
#for bounds in xs:
z += 1
#if args.threads > 1:
# nls = p.apply_async(get_nls,args=(xs,args.input_file,z,))
#else:
# nls = Queue()
# nls.put(get_nls(xs,args.input_file,z))
v = get_nls(bounds,args.input_file,z)
do_nls_output(v)
#results.append(nls)
sys.stderr.write(str(z)+'/'+str(len(bs))+"\r")
#sys.exit()
#if args.threads > 1:
# p.close()
# p.join()
sys.stderr.write("\n")
sys.stderr.write("Traverse blocks and writing index\n")
of = gzip.open(args.input_file+'.bgi','w')
z = 0
for block in sorted(blocks):
z+=1
sys.stderr.write(str(z)+'/'+str(len(blocks))+"\r")
if len(blocks[block]) == 0: continue
bend = blocks[block][0][0]
starts = [x[1]+1 for x in blocks[block]]
with open(args.input_file,'rb') as inf:
inf.seek(block)
bytes = inf.read(bend-block)
s = StringIO.StringIO(bytes)
v = BGZF_reader(s)
ubytes = v.read(70000)
# now we can find all the new starts
# do all but the last
#print ubytes[starts[-2]:]
for i in range(len(starts)-1):
if starts[i] >= len(ubytes): #problem
sys.stderr.write("Problem start\n")
sys.exit()
m = re.match('([^\n]+)\n([^\n]+)(\n[^\n]+\n[^\n]+)',ubytes[starts[i]:])
|
if not m:
|
sys.stderr.write("Problem overlap\n")
sys.exit()
else:
if m.group(1)[0] != '@':
sys.stderr.write("failed to parse last\n")
sys.exit()
of.write(m.group(1)[1:]+"\t"+str(block)+"\t"+str(starts[i])+"\t"+str(len(m.group(1))+len(m.group(2))+len(m.group(3))+2)+"\t"+str(len(m.group(2)))+"\n")
with open(args.input_file,'rb') as inf:
v2 = BGZF_reader(inf,blockStart=block,innerStart=starts[-1]-1)
spc = v2.read(1)
if spc != "\n":
sys.stderr.write("expected newline\n")
sys.exit()
cur = v2.get_block_start()
inn = v2.get_inner_start()
buffer = ''
for i in range(0,4):
while True:
c = v2.read(1)
if len(c) == 0: break
buffer += c
if c == "\n": break
if buffer == "":
break
m = re.match('([^\n]+)\n([^\n]+)',buffer)
if not m:
sys.stderr.write("failed to parse last\n"+buffer+"\n")
sys.exit()
if m.group(1)[0] != '@':
sys.stderr.write("failed to parse last\n"+buffer+"\n")
sys.exit()
of.write(m.group(1)[1:]+"\t"+str(cur)+"\t"+str(inn)+"\t"+str(len(buffer))+"\t"+str(len(m.group(2)))+"\n")
sys.stderr.write("\n")
sys.exit()
buffer = ''
with open(args.input_file) as inf:
#inf.seek(bs[i])
reader = BGZF_reader(inf)
while True:
cur = reader.get_block_start()
inn = reader.get_inner_start()
fq = readfastq(reader)
z += 1
if not fq: break
if z%1000 == 0: sys.stderr.write("Indexed "+str(z)+" reads\r")
of.write(fq['name']+"\t"+str(cur)+"\t"+str(inn)+"\n")
inf.close()
sys.stderr.write("\n")
of.close()
def get_nls(bounds,fname,i):
with open(fname,'rb') as inf:
inf.seek(bounds[0])
bytes = inf.read(bounds[1]-bounds[0])
s = StringIO.StringIO(bytes)
#v = BGZF_reader(inf,blockStart=bound[0],innerStart=0)
v = BGZF_reader(s)
ubytes = v.read(70000) # always less than 65K by definition
p = re.compile('\n')
nls = [m.start() for m in p.finditer(ubytes)]
breaks = []
for j in range(len(nls)):
breaks.append([bounds[0],bounds[1],nls[j]])
return breaks
def do_nls_output(results):
global blocks
global ncount
#local = {}
#for y in [x for x in results]:
# local[y[0]] = y[1]
#for i in sorted(local):
# for e in local[i]:
for e in results:
#print e
#print ncount
useval = False
if ncount%4 == 0: useval = True
ncount += 1
if not useval: continue
if e[0] not in blocks: blocks[e[0]] = []
blocks[e[0]].append([e[1],e[2]])
#only every fourth newline is a start
#breaks = [breaks[i] for i in range(0,len(breaks),4)]
#sys.stderr.write("Reducing to new lines indicating starts\n")
#blocks = {}
#for i in range(0,len(breaks),4):
# if breaks[i][0] not in blocks: blocks[breaks[i][0]] = []
# blocks[breaks[i][0]].append([breaks[i][1],breaks[i][2]])
def readfastq(reader):
buffer = ''
for i in range(0,4):
v = ''
while v!="\n":
v = reader.read(1)
if len(v) == 0: return False
buffer += v
if len(buffer) == 0: return False
return FastqEntry(buffer.rstrip().split("\n"))
if __name__=="__main__":
main()
|
alexissmirnov/donomo
|
donomo_archive/lib/reportlab/pdfbase/_fontdata.py
|
Python
|
bsd-3-clause
| 61,719
| 0.04992
|
#Copyright ReportLab Europe Ltd. 2000-2004
#see license.txt for license details
#history http://www.reportlab.co.uk/cgi-bin/viewcvs.cgi/public/reportlab/trunk/reportlab/pdfbase/_fontdata.py
#$Header $
__version__=''' $Id: _fontdata.py 3052 2007-03-07 14:04:49Z rgbecker $ '''
__doc__="""
database of font related things
standardFonts tuple of the 14 standard string font names
standardEncodings tuple of the known standard font names
encodings a mapping object from standard encoding names (and minor variants)
to the encoding vectors ie the tuple of string glyph names
widthsByFontGlyph fontname x glyphname --> width of glyph
widthVectorsByFont fontName -> vector of widths
"""
import UserDict, os, sys
# mapping of name to width vector, starts empty until fonts are added
# e.g. widths['Courier'] = [...600,600,600,...]
widthVectorsByFont = {}
fontsByName = {}
fontsByBaseEnc = {}
# this is a list of the standard 14 font names in Acrobat Reader
standardFonts = (
'Courier', 'Courier-Bold', 'Courier-Oblique', 'Courier-BoldOblique',
'Helvetica', 'Helvetica-Bold', 'Helvetica-Oblique', 'Helvetica-BoldOblique',
'Times-Roman', 'Times-Bold', 'Times-Italic', 'Times-BoldItalic',
'Symbol','ZapfDingbats')
standardFontAttributes = {
#family, bold, italic defined for basic ones
'Courier':('Courier',0,0),
'Courier-Bold':('Courier',1,0),
'Courier-Oblique':('Courier',0,1),
'Courier-BoldOblique':('Courier',1,1),
'Helvetica':('Helvetica',0,0),
'Helvetica-Bold':('Helvetica',1,0),
'Helvetica-Oblique':('Helvetica',0,1),
'Helvetica-BoldOblique':('Helvetica',1,1),
'Times-Roman':('Times-Roman',0,0),
'Times-Bold':('Times-Roman',1,0),
'Times-Italic':('Times-Roman',0,1),
'Times-BoldItalic':('Times-Roman',1,1),
'Symbol':('Symbol',0,0),
'ZapfDingbats':('ZapfDingbats',0,0)
}
#this maps fontnames to the equivalent filename root.
_font2fnrMapWin32 = {
'symbol': 'Sy______',
'zapfdingbats': 'Zd______',
'helvetica': '_a______',
'helvetica-bold': '_ab_____',
'helvetica-boldoblique': '_abi____',
'helvetica-oblique': '_ai_____',
'times-bold': '_eb_____',
'times-bolditalic': '_ebi____',
'times-italic': '_ei_____',
'times-roman': '_er_____',
'courier-bold': 'cob_____',
'courier-boldoblique': 'cobo____',
'courier': 'com_____',
'courier-oblique': 'coo_____',
}
if sys.platform in ('linux2',):
_font2fnrMapLinux2 ={
'symbol': 'Symbol',
'zapfdingbats': 'ZapfDingbats',
'helvetica': 'Arial',
'helvetica-bold': 'Arial-Bold',
'helvetica-boldoblique': 'Arial-BoldItalic',
'helvetica-oblique': 'Arial-Italic',
'times-bold': 'TimesNewRoman-Bold',
'times-bolditalic':'TimesNewRoman-BoldItalic',
'times-italic': 'TimesNewRoman-Italic',
'times-roman': 'TimesNewRoman',
'courier-bold': 'Courier-Bold',
'courier-boldoblique': 'Courier-BoldOblique',
'courier': 'Courier',
'courier-oblique': 'Courier-Oblique',
}
_font2fnrMap = _font2fnrMapLinux2
for k, v in _font2fnrMap.items():
if k in _font2fnrMapWin32.keys():
_font2fnrMapWin32[v.lower()] = _font2fnrMapWin32[k]
del k, v
else:
_font2fnrMap = _font2fnrMapWin32
def _findFNR(fontName):
return _font2fnrMap[fontName.lower()]
from reportlab.rl_config import T1SearchPath
from reportlab.lib.utils import rl_isfile
def _searchT1Dirs(n,rl_isfile=rl_isfile,T1SearchPath=T1SearchPath):
assert T1SearchPath!=[], "No Type-1 font search path"
for d in T1SearchPath:
f = os.path.join(d,n)
if rl_isfile(f): return f
return None
del T1SearchPath, rl_isfile
def findT1File(fontName,ext='.pfb'):
if sys.platform in ('linux2',) and ext=='.pfb':
try:
f = _searchT1Dirs(_findFNR(fontName))
if f: return f
except:
pass
try:
f = _searchT1Dirs(_font2fnrMapWin32[fontName.lower()]+ext)
if f: return f
except:
pass
return _searchT1Dirs(_findFNR(fontName)+ext)
# this lists the predefined font encodings - WinAnsi and MacRoman. We have
# not added MacExpert - it's possible, but would complicate life and nobody
# is asking. StandardEncoding means something special.
standardEncodings = ('WinAnsiEncoding','MacRomanEncoding','StandardEncoding','SymbolEncoding','ZapfDingbatsEncoding','PDFDocEncoding', 'MacExpertEncoding')
#this is the global mapping of standard encodings to name vectors
class _Name2StandardEncodingMap(UserDict.UserDict):
'''Trivial fake dictionary with some [] magic'''
_XMap = {'winansi':'WinAnsiEncoding','macroman': 'MacRomanEncoding','standard':'StandardEncoding','symbol':'SymbolEncoding', 'zapfdingbats':'ZapfDingbatsEncoding','pdfdoc':'PDFDocEncoding', 'macexpert':'MacExpertEncoding'}
def __setitem__(self,x,v):
y = x.lower()
if y[-8:]=='encoding': y = y[:-8]
y = self._
|
XMap[y]
if y in self.keys(): raise IndexError, 'Encoding %s is already set' % y
self.data[y] = v
def __getitem__(self,x):
y = x.lower()
if y[-8:]=='encoding': y = y[:-8]
y = self._XMap[y]
return self.data[y]
encodings = _Name2StandardEncodingMap()
encodings['WinAnsiEncoding'] = (
None, None, None, None, None, None, None, None, None, None, None, None,
None, None, None, None, None,
|
None, None, None, None, None, None, None,
None, None, None, None, None, None, None, None, 'space', 'exclam',
'quotedbl', 'numbersign', 'dollar', 'percent', 'ampersand',
'quotesingle', 'parenleft', 'parenright', 'asterisk', 'plus', 'comma',
'hyphen', 'period', 'slash', 'zero', 'one', 'two', 'three', 'four',
'five', 'six', 'seven', 'eight', 'nine', 'colon', 'semicolon', 'less',
'equal', 'greater', 'question', 'at', 'A', 'B', 'C', 'D', 'E', 'F',
'G', 'H', 'I', 'J', 'K', 'L', 'M', 'N', 'O', 'P', 'Q', 'R', 'S', 'T',
'U', 'V', 'W', 'X', 'Y', 'Z', 'bracketleft', 'backslash', 'bracketright',
'asciicircum', 'underscore', 'grave', 'a', 'b', 'c', 'd', 'e', 'f',
'g', 'h', 'i', 'j', 'k', 'l', 'm', 'n', 'o', 'p', 'q', 'r', 's', 't',
'u', 'v', 'w', 'x', 'y', 'z', 'braceleft', 'bar', 'braceright',
'asciitilde', 'bullet', 'Euro', 'bullet', 'quotesinglbase', 'florin',
'quotedblbase', 'ellipsis', 'dagger', 'daggerdbl', 'circumflex',
'perthousand', 'Scaron', 'guilsinglleft', 'OE', 'bullet', 'Zcaron',
'bullet', 'bullet', 'quoteleft', 'quoteright', 'quotedblleft',
'quotedblright', 'bullet', 'endash', 'emdash', 'tilde', 'trademark',
'scaron', 'guilsinglright', 'oe', 'bullet', 'zcaron', 'Ydieresis',
'space', 'exclamdown', 'cent', 'sterling', 'currency', 'yen', 'brokenbar',
'section', 'dieresis', 'copyright', 'ordfeminine', 'guillemotleft',
'logicalnot', 'hyphen', 'registered', 'macron', 'degree', 'plusminus',
'twosuperior', 'threesuperior', 'acute', 'mu', 'paragraph', 'periodcentered',
'cedilla', 'onesuperior', 'ordmasculine', 'guillemotright', 'onequarter',
'onehalf', 'threequarters', 'questiondown', 'Agrave', 'Aacute',
'Acircumflex', 'Atilde', 'Adieresis', 'Aring', 'AE', 'Ccedilla',
'Egrave', 'Eacute', 'Ecircumflex', 'Ed
|
Zyell/home-assistant
|
tests/components/test_rfxtrx.py
|
Python
|
mit
| 4,095
| 0
|
"""Th tests for the Rfxtrx component."""
# pylint: disable=too-many-public-methods,protected-access
import unittest
import time
from homeassistant.bootstrap import _setup_component
from homeassistant.components import rfxtrx as rfxtrx
from tests.common import get_test_home_assistant
class TestRFXTRX(unittest.TestCase):
"""Test the Rfxtrx component."""
def setUp(self):
"""Setup things to be run when tests are started."""
|
self.hass = get_test_home_assistant(0)
def tearDown(self):
"""Stop everything that was started."""
rfxtrx.RECEIVED_EVT_SUBSCRIBERS = []
rfxtrx.RFX_DEVICES = {}
if rfxtrx.RFXOBJECT:
rfxtrx.RFXOBJECT.close_connection()
self.hass.stop()
def test_default_config(self):
"""Test configuration."""
self.assertTrue(_setup_component(self.hass
|
, 'rfxtrx', {
'rfxtrx': {
'device': '/dev/serial/by-id/usb' +
'-RFXCOM_RFXtrx433_A1Y0NJGR-if00-port0',
'dummy': True}
}))
self.assertTrue(_setup_component(self.hass, 'sensor', {
'sensor': {'platform': 'rfxtrx',
'automatic_add': True,
'devices': {}}}))
while len(rfxtrx.RFX_DEVICES) < 1:
time.sleep(0.1)
self.assertEqual(len(rfxtrx.RFXOBJECT.sensors()), 1)
def test_valid_config(self):
"""Test configuration."""
self.assertTrue(_setup_component(self.hass, 'rfxtrx', {
'rfxtrx': {
'device': '/dev/serial/by-id/usb' +
'-RFXCOM_RFXtrx433_A1Y0NJGR-if00-port0',
'dummy': True}}))
self.assertTrue(_setup_component(self.hass, 'rfxtrx', {
'rfxtrx': {
'device': '/dev/serial/by-id/usb' +
'-RFXCOM_RFXtrx433_A1Y0NJGR-if00-port0',
'dummy': True,
'debug': True}}))
def test_invalid_config(self):
"""Test configuration."""
self.assertFalse(_setup_component(self.hass, 'rfxtrx', {
'rfxtrx': {}
}))
self.assertFalse(_setup_component(self.hass, 'rfxtrx', {
'rfxtrx': {
'device': '/dev/serial/by-id/usb' +
'-RFXCOM_RFXtrx433_A1Y0NJGR-if00-port0',
'invalid_key': True}}))
def test_fire_event(self):
"""Test fire event."""
self.assertTrue(_setup_component(self.hass, 'rfxtrx', {
'rfxtrx': {
'device': '/dev/serial/by-id/usb' +
'-RFXCOM_RFXtrx433_A1Y0NJGR-if00-port0',
'dummy': True}
}))
self.assertTrue(_setup_component(self.hass, 'switch', {
'switch': {'platform': 'rfxtrx',
'automatic_add': True,
'devices':
{'0b1100cd0213c7f210010f51': {
'name': 'Test',
rfxtrx.ATTR_FIREEVENT: True}
}}}))
calls = []
def record_event(event):
"""Add recorded event to set."""
calls.append(event)
self.hass.bus.listen(rfxtrx.EVENT_BUTTON_PRESSED, record_event)
entity = rfxtrx.RFX_DEVICES['213c7f216']
self.assertEqual('Test', entity.name)
self.assertEqual('off', entity.state)
self.assertTrue(entity.should_fire_event)
event = rfxtrx.get_rfx_object('0b1100cd0213c7f210010f51')
event.data = bytearray([0x0b, 0x11, 0x00, 0x10, 0x01, 0x18,
0xcd, 0xea, 0x01, 0x01, 0x0f, 0x70])
rfxtrx.RECEIVED_EVT_SUBSCRIBERS[0](event)
self.hass.pool.block_till_done()
self.assertEqual(event.values['Command'], "On")
self.assertEqual('on', entity.state)
self.assertEqual(1, len(rfxtrx.RFX_DEVICES))
self.assertEqual(1, len(calls))
self.assertEqual(calls[0].data,
{'entity_id': 'switch.test', 'state': 'on'})
|
seraphln/wheel
|
wheel/modules/posts/commands/listposts.py
|
Python
|
gpl-3.0
| 316
| 0
|
# -*- coding: utf-8 -*-
import click
from ..models import Post
@click.command()
@click.option('--title', default=None,
|
help='Title of the Post')
def cli(title):
"Prints a list of posts"
posts = Post.objects
if title:
posts = posts(title=title
|
)
for post in posts:
click.echo(post)
|
AndrewSallans/osf.io
|
framework/exceptions/__init__.py
|
Python
|
apache-2.0
| 2,900
| 0.007241
|
# -*- coding: utf-8 -*-
'''Custom exceptions for the framework.'''
import copy
import httplib as http
from
|
flask import request
class FrameworkError(Exception):
"""Base class from which framework-related errors inherit."""
pass
class HTTPError(FrameworkError):
error_msgs = {
http.BAD_REQUEST: {
'message_short': 'Bad request',
|
'message_long': ('If this should not have occurred and the issue persists, '
'please report it to <a href="mailto:support@osf.io">support@osf.io</a>.'),
},
http.UNAUTHORIZED: {
'message_short': 'Unauthorized',
'message_long': 'You must <a href="/login/">log in</a> to access this resource.',
},
http.FORBIDDEN: {
'message_short': 'Forbidden',
'message_long': ('You do not have permission to perform this action. '
'If this should not have occurred and the issue persists, '
'please report it to <a href="mailto:support@osf.io">support@osf.io</a>.'),
},
http.NOT_FOUND: {
'message_short': 'Page not found',
'message_long': ('The requested resource could not be found. If this '
'should not have occurred and the issue persists, please report it '
'to <a href="mailto:support@osf.io">support@osf.io</a>.'),
},
http.GONE: {
'message_short': 'Resource deleted',
'message_long': ('The requested resource has been deleted. If this should '
'not have occurred and the issue persists, please report it to '
'<a href="mailto:support@osf.io">support@osf.io</a>.'),
},
}
def __init__(self, code, message=None, redirect_url=None, data=None):
super(HTTPError, self).__init__(message)
self.code = code
self.redirect_url = redirect_url
self.data = data or {}
try:
self.referrer = request.referrer
except RuntimeError:
self.referrer = None
def to_data(self):
data = copy.deepcopy(self.data)
if self.code in self.error_msgs:
data = {
'message_short': self.error_msgs[self.code]['message_short'],
'message_long': self.error_msgs[self.code]['message_long']
}
else:
data['message_short'] = 'Unable to resolve'
data['message_long'] = ('OSF was unable to resolve your request. If this '
'issue persists, please report it to '
'<a href="mailto:support@osf.io">support@osf.io</a>.')
data.update(self.data)
data['code'] = self.code
data['referrer'] = self.referrer
return data
class PermissionsError(FrameworkError):
"""Raised if an action cannot be performed due to insufficient permissions
"""
pass
|
matk86/pymatgen
|
pymatgen/io/tests/test_adf.py
|
Python
|
mit
| 10,190
| 0.000196
|
from __future__ import print_function, absolute_import
from pymatgen.io.adf import AdfKey, AdfTask, AdfOutput, AdfInput
from pymatgen.core.structure import Molecule
import unittest
import os
from os.path import join
__author__ = 'Xin Chen, chenxin13@mails.tsinghua.edu.cn'
test_dir = os.path.join(os.path.dirname(__file__), "..", "..", "..",
'test_files', 'molecules')
geometry_string = """GEOMETRY
smooth conservepoints
optim all cartesian
iterations 250
step rad=0.15 angle=10.0
hessupd BFGS
converge e=0.001 grad=0.0003 rad=0.01 angle=0.5
END
"""
zlmfit_string = """ZLMFIT
AtomDepQuality
10 good
12 normal
subend
END
"""
atoms_string = """ATOMS
O -0.90293455 0.66591421 0.00000000
H 0.05706545 0.66591421 0.00000000
H -1.22338913 1.57085004 0.00000000
END
"""
h2oxyz = """3
0.0
O -0.90293455 0.66591421 0.0
H 0.05706545 0.66591421 0.0
H -1.22338913 1.57085004 0.0
"""
rhb18xyz = """19
0.0
Rh -0.453396 -0.375115 0.000000
B 0.168139 3.232791 0.000000
B -0.270938 1.639058 0.000000
B 0.206283 2.604044 1.459430
B 0.404410 1.880136 2.866764
B
|
-0.103309 0.887485 1.655272
B 0.436856 0.371367 3.299887
B 0.016593 -0.854959 1.930982
B 0.563233 -1.229713 3.453066
B 0.445855 -2.
|
382027 2.415013
B 0.206283 2.604044 -1.459430
B 0.404410 1.880136 -2.866764
B -0.103309 0.887485 -1.655272
B 0.436856 0.371367 -3.299887
B 0.563233 -1.229713 -3.453066
B 0.016593 -0.854959 -1.930982
B 0.200456 -2.309538 -0.836316
B 0.200456 -2.309538 0.836316
B 0.445855 -2.382027 -2.415013
"""
def readfile(file_object):
"""
Return the content of the file as a string.
Parameters
----------
file_object : file or str
The file to read. This can be either a File object or a file path.
Returns
-------
content : str
The content of the file.
"""
if hasattr(file_object, "read"):
return file_object.read()
elif isinstance(file_object, str):
f = open(file_object, "r")
content = f.read()
f.close()
return content
else:
raise ValueError("``file_object`` must be a string or a file object!")
class AdfKeyTest(unittest.TestCase):
def test_simple(self):
unrestricted = AdfKey("unrestricted")
self.assertEqual(str(unrestricted).strip(), 'UNRESTRICTED')
def test_options(self):
charge = AdfKey("charge", [-1, 0])
charge_string = "CHARGE -1 0\n"
self.assertEqual(str(charge), "CHARGE -1 0\n")
self.assertEqual(str(AdfKey.from_dict(charge.as_dict())), charge_string)
def test_subkeys(self):
smooth = AdfKey("smooth", ["conservepoints"])
optim = AdfKey("optim", ["all", "cartesian"])
iterations = AdfKey("iterations", [250])
step = AdfKey("step", [("rad", 0.15), ("angle", 10.0)])
hessupd = AdfKey("hessupd", ["BFGS"])
converge = AdfKey("converge", [("e", 1.0e-3), ("grad", 3.0e-4),
("rad", 1.0e-2), ("angle", 0.5)])
geo = AdfKey("geometry", subkeys=[smooth, optim, iterations, step,
hessupd, converge])
self.assertEqual(str(geo), geometry_string)
self.assertEqual(str(AdfKey.from_dict(geo.as_dict())), geometry_string)
self.assertTrue(geo.has_subkey("optim"))
def test_end(self):
geo = AdfKey("Geometry")
self.assertEqual(str(geo), "GEOMETRY\nEND\n")
def test_subkeys_subkeys(self):
atom_dep_quality = AdfKey("AtomDepQuality",
subkeys=[AdfKey("10", ["good"]),
AdfKey("12", ["normal"])])
zlmfit = AdfKey("zlmfit", subkeys=[atom_dep_quality])
self.assertEqual(str(zlmfit), zlmfit_string)
self.assertEqual(str(AdfKey.from_dict(zlmfit.as_dict())), zlmfit_string)
def test_from_string(self):
k1 = AdfKey.from_string("CHARGE -1 0")
self.assertEqual(k1.key, "CHARGE")
self.assertListEqual(k1.options, [-1, 0])
k2 = AdfKey.from_string("step rad=0.15 angle=10.0")
self.assertEqual(k2.key, "step")
self.assertListEqual(k2.options[0], ['rad', 0.15])
self.assertListEqual(k2.options[1], ['angle', 10.0])
k3 = AdfKey.from_string("GEOMETRY\noptim all\niterations 100\nEND\n")
self.assertEqual(k3.key, "GEOMETRY")
self.assertEqual(k3.subkeys[0].options[0], "all")
self.assertEqual(k3.subkeys[1].options[0], 100)
k4 = AdfKey.from_string(
"""SCF
iterations 300
converge 1.0e-7 1.0e-7
mixing 0.2
diis n=100 ok=0.0001 cyc=100 cx=5.0 cxx=10.0
END"""
)
self.assertEqual(k4.key, "SCF")
self.assertEqual(k4.subkeys[0].key, "iterations")
self.assertEqual(k4.subkeys[1].key, "converge")
self.assertEqual(k4.subkeys[1].options[0], 1E-7)
self.assertEqual(k4.subkeys[2].options[0], 0.2)
def test_option_operations(self):
k1 = AdfKey("Charge", [-1, 0])
k1.add_option(2)
self.assertListEqual(k1.options, [-1, 0, 2])
k1.remove_option(0)
self.assertListEqual(k1.options, [0, 2])
k2 = AdfKey.from_string("step rad=0.15 angle=10.0")
k2.add_option(["length", 0.1])
self.assertListEqual(k2.options[2], ["length", 0.1])
k2.remove_option("rad")
self.assertListEqual(k2.options[0], ["angle", 10.0])
def test_atom_block_key(self):
block = AdfKey("atoms")
o = Molecule.from_str(h2oxyz, "xyz")
for site in o:
block.add_subkey(AdfKey(str(site.specie), list(site.coords)))
self.assertEqual(str(block), atoms_string)
energy_task = """TITLE ADF_RUN
UNITS
length angstrom
angle degree
END
XC
GGA PBE
END
BASIS
type DZ
core small
END
SCF
iterations 300
END
GEOMETRY SinglePoint
END
"""
class AdfTaskTest(unittest.TestCase):
def test_energy(self):
task = AdfTask()
self.assertEqual(str(task), energy_task)
def test_serialization(self):
task = AdfTask()
o = AdfTask.from_dict(task.as_dict())
self.assertEqual(task.title, o.title)
self.assertEqual(task.basis_set, o.basis_set)
self.assertEqual(task.scf, o.scf)
self.assertEqual(task.geo, o.geo)
self.assertEqual(task.operation, o.operation)
self.assertEqual(task.units, o.units)
self.assertEqual(str(task), str(o))
rhb18 = {"title": "RhB18",
"basis_set": AdfKey.from_string("BASIS\ntype TZP\ncore small\nEND"),
"xc": AdfKey.from_string("XC\nHybrid PBE0\nEND"),
"units": AdfKey.from_string("UNITS\nlength angstrom\nEND"),
"other_directives": [AdfKey.from_string("SYMMETRY"),
AdfKey.from_string("RELATIVISTIC scalar zora"),
AdfKey.from_string("INTEGRATION 6.0 6.0 6.0"),
AdfKey.from_string("SAVE TAPE21"),
AdfKey.from_string("A1FIT 10.0")],
"geo_subkeys": [AdfKey.from_string("optim all"),
AdfKey.from_string("iterations 300"),
AdfKey.from_string("step rad=0.15 angle=10.0"),
AdfKey.from_string("hessupd BFGS")],
"scf": AdfKey.from_string(
"""SCF
iterations 300
converge 1.0e-7 1.0e-7
mixing 0.2
lshift 0.0
diis n=100 ok=0.0001 cyc=100 cx=5.0 cxx=10.0
END"""
)}
class AdfInputTest(unittest.TestCase):
def setUp(self):
self.tempfile = "./adf.temp"
def test_main(self):
o = Molecule.from_str(rhb18xyz, "xyz")
o.set_charge_and_spin(-1, 3)
task = AdfTask("optimize", **rhb18)
inp = AdfInput(task)
inp.write_file(o, self.tempfile)
|
erh3cq/hyperspy
|
hyperspy/tests/component/test_exponential.py
|
Python
|
gpl-3.0
| 3,100
| 0.000323
|
# -*- coding: utf-8 -*-
# Copyright 2007-2021 The HyperSpy developers
#
# This file is part of HyperSpy.
#
# HyperSpy is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# HyperSpy is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with HyperSpy. If not, see <http://www.gnu.org/licenses/>.
import itertools
import numpy as np
import pytest
from hyperspy.components1d import Exponential
from hyperspy.signals import Signal1D
from hyperspy.utils import stack
TRUE_FALSE_2_TUPLE = [p for p in itertools.product((True, False), repeat=2)]
def test_function():
g = Exponential()
g.A.value = 10000.
g.tau.value = 200.
test_value = 200.
test_result = g.A.value * np.exp(-tes
|
t_value / g.tau.value)
np.testing.assert_allclose(g.function(0.), g.A.value)
np.testing.assert_allclose(g.function(test_value), test_result)
|
@pytest.mark.parametrize(("lazy"), (True, False))
@pytest.mark.parametrize(("uniform"), (True, False))
@pytest.mark.parametrize(("only_current", "binned"), TRUE_FALSE_2_TUPLE)
def test_estimate_parameters_binned(only_current, binned, lazy, uniform):
s = Signal1D(np.empty((100,)))
s.axes_manager.signal_axes[0].is_binned = binned
axis = s.axes_manager.signal_axes[0]
axis.scale = 0.2
axis.offset = 15.
g1 = Exponential(A=10005.7, tau=214.3)
s.data = g1.function(axis.axis)
if not uniform:
axis.convert_to_non_uniform_axis()
if lazy:
s = s.as_lazy()
g2 = Exponential()
if binned and uniform:
factor = axis.scale
elif binned:
factor = np.gradient(axis.axis)
else:
factor = 1
assert g2.estimate_parameters(s, axis.low_value, axis.high_value,
only_current=only_current)
assert g2._axes_manager[-1].is_binned == binned
np.testing.assert_allclose(g1.A.value, g2.A.value * factor, rtol=0.05)
np.testing.assert_allclose(g1.tau.value, g2.tau.value)
@pytest.mark.parametrize(("lazy"), (True, False))
@pytest.mark.parametrize(("binned"), (True, False))
def test_function_nd(binned, lazy):
s = Signal1D(np.empty((100,)))
axis = s.axes_manager.signal_axes[0]
axis.scale = 0.2
axis.offset = 15
g1 = Exponential(A=10005.7, tau=214.3)
s.data = g1.function(axis.axis)
s.axes_manager.signal_axes[0].is_binned = binned
s2 = stack([s] * 2)
if lazy:
s2 = s2.as_lazy()
g2 = Exponential()
factor = axis.scale if binned else 1.
g2.estimate_parameters(s2, axis.low_value, axis.high_value, False)
assert g2._axes_manager[-1].is_binned == binned
np.testing.assert_allclose(g2.function_nd(axis.axis) * factor, s2.data, rtol=0.05)
|
scheib/chromium
|
third_party/blink/web_tests/external/wpt/webdriver/tests/refresh/refresh.py
|
Python
|
bsd-3-clause
| 3,290
| 0.000608
|
import pytest
from webdriver.error import NoSuchElementException, StaleElementReferenceException
from tests.support.asserts import assert_error, assert_success
def refresh(session):
return session.transport.send(
"POST", "session/{session_id}/refresh".format(**vars(session)))
def test_null_response_value(session, inline):
session.url = inline("<div>")
response = refresh(session)
value = assert_success(response)
assert value is None
def test_no_top_browsing_context(session, closed_window):
response = refresh(session)
assert_error(response, "no such window")
def test_no_browsing_context(session, closed_frame, inline):
url = inline("<div id=foo>")
session.url = url
element = session.find.css("#foo", all=False)
response = refresh(session)
assert_success(response)
with pytest.raises(StaleElementReferenceException):
element.property("id")
assert session.url == url
assert session.find.css("#foo", all=False)
def test_basic(session, inline):
url = inline("<div id=foo>")
session.url = url
element = session.find.css("#foo", all=False)
response = refresh(session)
assert_success(response)
with pytest.raises(StaleElementReferenceException):
element.property("id")
assert session.url == url
assert session.find.css("#foo", all=False)
def test_dismissed_beforeunload(session, inline):
url_beforeunload = inline("""
<input type="text">
<script>
window.addEventListener("beforeunload", function (event) {
event.preventDefault();
});
</script>
""")
session.url = url_beforeunload
element = session.find.css("input", all=False)
element.send_keys("bar")
response = refresh(session)
assert_success(response)
with pytest.raises(StaleElementReferenceException):
element.property("id")
session.find.css("input", all=False)
def test_history_pushstate(session, inline):
pushstate_page = inline("""
<script>
function pushState() {
|
history.pushState({foo: "bar"}, "", "#pushstate");
}
</script>
<a onclick="javascript:pushState();">click</a>
""")
session.url = pushstate_page
session.find.css("a", all=False).click()
assert session.url == "{}#pushstate".format(pushstate_page)
assert session.execute_script("return history.state;") == {"foo": "bar"}
session.execute_script("""
let elem = window.document.create
|
Element('div');
window.document.body.appendChild(elem);
""")
element = session.find.css("div", all=False)
response = refresh(session)
assert_success(response)
assert session.url == "{}#pushstate".format(pushstate_page)
assert session.execute_script("return history.state;") == {"foo": "bar"}
with pytest.raises(StaleElementReferenceException):
element.property("id")
def test_refresh_switches_to_parent_browsing_context(session, create_frame, inline):
session.url = inline("<div id=foo>")
session.switch_frame(create_frame())
with pytest.raises(NoSuchElementException):
session.find.css("#foo", all=False)
response = refresh(session)
assert_success(response)
session.find.css("#foo", all=False)
|
ddm/pcbmode
|
pcbmode/utils/excellon.py
|
Python
|
mit
| 4,661
| 0.00708
|
#!/usr/bin/python
import os
import re
from lxml import etree as et
import pcbmode.config as config
from . import messages as msg
# pcbmode modules
from . import utils
from .point import Point
def makeExcellon(manufacturer='default'):
"""
"""
ns = {'pcbmode':config.cfg['ns']['pcbmode'],
'svg':config.cfg['ns']['svg']}
# Open the board's SVG
svg_in = utils.openBoardSVG()
drills_layer = svg_in.find("//svg:g[@pcbmode:sheet='drills']",
namespaces=ns)
excellon = Excellon(drills_layer)
# Save to file
base_dir = os.path.join(config.cfg['base-dir'],
config.cfg['locations']['build'],
'production')
base_name = "%s_rev_%s" % (config.brd['config']['name'],
config.brd['config']['rev'])
filename_info = config.cfg['manufacturers'][manufacturer]['filenames']['drills']
add = '_%s.%s' % ('drills',
filename_info['plated'].get('ext') or 'txt')
filename = os.path.join(base_dir, base_name + add)
with open(filename, "wb") as f:
for line in excellon.
|
getExcellon():
f.write(line)
class Excellon():
"""
"""
def __init__(self, svg):
"""
"""
self._svg = svg
self._ns = {'pcbmode':config.cfg['ns']['pcbmode'],
|
'svg':config.cfg['ns']['svg']}
# Get all drill paths except for the ones used in the
# drill-index
drill_paths = self._svg.findall(".//svg:g[@pcbmode:type='component-shapes']//svg:path",
namespaces=self._ns)
drills_dict = {}
for drill_path in drill_paths:
diameter = drill_path.get('{'+config.cfg['ns']['pcbmode']+'}diameter')
location = self._getLocation(drill_path)
if diameter not in drills_dict:
drills_dict[diameter] = {}
drills_dict[diameter]['locations'] = []
drills_dict[diameter]['locations'].append(location)
self._preamble = self._createPreamble()
self._content = self._createContent(drills_dict)
self._postamble = self._createPostamble()
def getExcellon(self):
return (self._preamble+
self._content+
self._postamble)
def _createContent(self, drills):
"""
"""
ex = []
for i, diameter in enumerate(drills):
# This is probably not necessary, but I'm not 100% certain
# that if the item order of a dict is gurenteed. If not
# the result can be quite devastating where drill
# diameters are wrong!
# Drill index must be greater than 0
drills[diameter]['index'] = i+1
ex.append("T%dC%s\n" % (i+1, diameter))
ex.append('M95\n') # End of a part program header
for diameter in drills:
ex.append("T%s\n" % drills[diameter]['index'])
for coord in drills[diameter]['locations']:
ex.append(self._getPoint(coord))
return ex
def _createPreamble(self):
"""
"""
ex = []
ex.append('M48\n') # Beginning of a part program header
ex.append('METRIC,TZ\n') # Metric, trailing zeros
ex.append('G90\n') # Absolute mode
ex.append('M71\n') # Metric measuring mode
return ex
def _createPostamble(self):
"""
"""
ex = []
ex.append('M30\n') # End of Program, rewind
return ex
def _getLocation(self, path):
"""
Returns the location of a path, factoring in all the transforms of
its ancestors, and its own transform
"""
location = Point()
# We need to get the transforms of all ancestors that have
# one in order to get the location correctly
ancestors = path.xpath("ancestor::*[@transform]")
for ancestor in ancestors:
transform = ancestor.get('transform')
transform_data = utils.parseTransform(transform)
# Add them up
location += transform_data['location']
# Add the transform of the path itself
transform = path.get('transform')
if transform != None:
transform_data = utils.parseTransform(transform)
location += transform_data['location']
return location
def _getPoint(self, point):
"""
Converts a Point type into an Excellon coordinate
"""
return "X%.6fY%.6f\n" % (point.x, -point.y)
|
mementum/backtrader
|
backtrader/feeds/rollover.py
|
Python
|
gpl-3.0
| 6,892
| 0
|
#!/usr/bin/env python
# -*- coding: utf-8; py-indent-offset:4 -*-
###############################################################################
#
# Copyright (C) 2015-2020 Daniel Rodriguez
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
###############################################################################
from __future__ import (absolute_import, division, print_function,
unicode_literals)
from datetime import datetime
import backtrader as bt
class MetaRollOver(bt.DataBase.__class__):
def __init__(cls, name, bases, dct):
'''Class has already been created ... register'''
# Initialize the class
super(MetaRollOver, cls).__init__(name, bases, dct)
def donew(cls, *args, **kwargs):
'''Intercept const. to copy timeframe/compression from 1st data'''
# Create the object and set the params in place
_obj, args, kwargs = super(MetaRollOver, cls).donew(*args, **kwargs)
if args:
_obj.p.timeframe = args[0]._timeframe
_obj.p.compression = args[0]._compression
return _obj, args, kwargs
class RollOver(bt.with_metaclass(MetaRollOver, bt.DataBase)):
'''Class that rolls over to the next future when a condition is met
Params:
- ``checkdate`` (default: ``None``)
This must be a *callable* with the following signature::
checkdate(dt, d):
Where:
- ``dt`` is a ``datetime.datetime`` object
- ``d`` is the current data feed for the active future
Expected Return Values:
- ``True``: as long as the callable returns this, a switchover can
happen to the next future
If a commodity expires on the 3rd Friday of March, ``checkdate`` could
return ``True`` for the entire week in which the expiration takes
place.
- ``False``: the expiration cannot take place
- ``checkcondition`` (default: ``None``)
**Note**: This will only be called if ``checkdate`` has returned
``True``
If ``None`` this will evaluate to ``True`` (execute roll over)
internally
Else this must be a *callable* with this signature::
checkcondition(d0, d1)
Where:
- ``d0`` is the current data feed for the active future
- ``d1`` is the data feed for the next expiration
Expected Return Values:
- ``True``: roll-over to the next future
Following with the example from ``checkdate``, this could say that the
roll-over can only happend if the *volume* from ``d0`` is already less
than the volume from ``d1``
- ``False``: the expiration cannot take place
'''
params = (
# ('rolls', []), # array of futures to roll over
('checkdate', None), # callable
('checkcondition', None), # callable
)
def islive(self):
'''Returns ``True`` to notify ``Cerebro`` that preloading and runonce
should be deactivated'''
return True
def __init__(self, *args):
self._rolls = args
def start(self):
super(RollOver, self).start()
for d in self._rolls:
d.setenvironment(self._env)
d._start()
# put the references in a separate list to have pops
self._ds = list(self._rolls)
self._d = self._ds.pop(0) if self._ds else None
self._dexp = None
self._dts = [datetime.min for xx in self._ds]
def stop(self):
super(RollOver, self).stop()
for d in self._rolls:
d.stop()
def _gettz(self):
'''To be overriden by subclasses which may auto-calculate the
timezone'''
if self._rolls:
return self._rolls[0]._gettz()
return bt.utils.date.Localizer(self.p.tz)
def _checkdate(self, dt, d):
if self.p.checkdate is not None:
return self.p.checkdate(dt, d)
return False
def _checkcondition(self, d0, d1):
if self.p.checkcondition is not None:
return self.p.checkcondition(d0, d1)
return True
def _load(self):
while self._d is not None:
_next = self._d.next()
if _next is None: # no values yet, more will come
continue
if _next is False: # no values from current data src
if self._ds:
self._d = self._ds.pop(0)
self._dts.pop(0)
else:
self._d = None
continue
dt0 = self._d.datetime.datetime() # current dt for active data
# Synchronize other datas using dt0
for i, d_dt in enumerate(zip(self._ds, self._dts)):
d, dt = d_dt
while dt < dt0:
if d.next() is None:
continue
self._dts[i] = dt = d.datetime.datetime()
# Move expired future as much as needed
while self._dexp is not None:
if not self._dexp.next():
self._dexp = None
break
if self._dexp.datetime.datetime() < dt0:
continue
if self._dexp is None and self._checkdate(dt0, self._d):
# rule has
|
been met ... check other factors only if 2 datas
# still there
if self._ds and self._checkcondition(self._d, self._ds[0]):
# Time to switch to next data
self._dexp = self._d
self._d = self._ds.pop(0)
self._dts.pop(0)
# Fill the line and tell we die
self.lines.datetime[0] = self._d.l
|
ines.datetime[0]
self.lines.open[0] = self._d.lines.open[0]
self.lines.high[0] = self._d.lines.high[0]
self.lines.low[0] = self._d.lines.low[0]
self.lines.close[0] = self._d.lines.close[0]
self.lines.volume[0] = self._d.lines.volume[0]
self.lines.openinterest[0] = self._d.lines.openinterest[0]
return True
# Out of the loop -> self._d is None, no data feed to return from
return False
|
jeffshek/betterself
|
events/migrations/0004_auto_20171223_0859.py
|
Python
|
mit
| 1,984
| 0.00252
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.7 on 2017-12-23 08:59
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('events', '0003_auto_20171221_0336'),
]
operations = [
migrations.AlterField(
model_name='dailyproductivitylog',
name='source',
field=models.CharField(choices=[('api', 'Api'), ('ios', 'Ios'), ('android', 'Android'), ('mobile', 'Mobile'), ('web', 'Web'), ('user_excel', 'User_Excel'), ('text_message', '
|
Text_Message')], max_length=50),
),
migrations.AlterField(
model_name='sleeplog',
name='source',
field=models.CharField(choices=[('api', 'Api'), ('ios', 'Ios'), ('android', 'Android'), ('mobile', 'Mobile'), ('web', 'Web'), ('user_excel', 'User_Excel'), ('text_message', 'Text_Message')], max
|
_length=50),
),
migrations.AlterField(
model_name='supplementlog',
name='source',
field=models.CharField(choices=[('api', 'Api'), ('ios', 'Ios'), ('android', 'Android'), ('mobile', 'Mobile'), ('web', 'Web'), ('user_excel', 'User_Excel'), ('text_message', 'Text_Message')], default='web', max_length=50),
),
migrations.AlterField(
model_name='useractivitylog',
name='source',
field=models.CharField(choices=[('api', 'Api'), ('ios', 'Ios'), ('android', 'Android'), ('mobile', 'Mobile'), ('web', 'Web'), ('user_excel', 'User_Excel'), ('text_message', 'Text_Message')], default='web', max_length=50),
),
migrations.AlterField(
model_name='usermoodlog',
name='source',
field=models.CharField(choices=[('api', 'Api'), ('ios', 'Ios'), ('android', 'Android'), ('mobile', 'Mobile'), ('web', 'Web'), ('user_excel', 'User_Excel'), ('text_message', 'Text_Message')], default='web', max_length=50),
),
]
|
shelag/piggybank
|
saving/migrations/0001_initial.py
|
Python
|
mit
| 1,468
| 0.004087
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
from django.conf import settings
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Movement',
fields=[
('id', models.AutoField(serialize=False, verbose_name='ID', primary_key=True, auto_created=True)),
('date_pub', models.DateField(verbose_name='Data inserimento', auto_now_add=True)),
('text', models.CharField(verbose_name='Descrizione', max_length=200)),
('amount', models.DecimalField(decimal_places=2, max_digits=10)),
('currency', models.CharField(choices=[('EUR', 'EUR'), ('USD', 'USD')], max_length=3)),
('owner', models.ForeignKey(to=setti
|
ngs.AUTH_USER_MODEL)),
],
),
migrations.CreateModel(
name='Tag',
fields=[
('id', models.AutoField(serialize=False, verbose_name='ID', primary_key=True, auto_created=True)),
|
('word', models.CharField(max_length=50)),
('slug', models.CharField(max_length=100)),
],
),
migrations.AddField(
model_name='movement',
name='tag',
field=models.ManyToManyField(to='saving.Tag'),
),
]
|
beiko-lab/gengis
|
bin/Lib/site-packages/wx-2.8-msw-unicode/wx/tools/Editra/src/ed_cmdbar.py
|
Python
|
gpl-3.0
| 43,285
| 0.000924
|
###########################################
|
####################################
# Name: ed_cmdbar.py
|
#
# Purpose: Creates a small slit panel that holds small controls for searching #
# and other actions. #
# Author: Cody Precord <cprecord@editra.org> #
# Copyright: (c) 2008 Cody Precord <staff@editra.org> #
# License: wxWindows License #
###############################################################################
"""
This class creates a custom panel that can hide and show different controls
based an id value. The panel is generally between 24-32 pixels in height but
can grow to fit the controls inserted in it. The the background is painted with
a gradient using system defined colors.
@summary: The buffers CommandBar control with search/goto line/command entry
"""
__author__ = "Cody Precord <cprecord@editra.org>"
__svnid__ = "$Id: ed_cmdbar.py 67402 2011-04-06 13:34:14Z CJP $"
__revision__ = "$Revision: 67402 $"
#--------------------------------------------------------------------------#
# Imports
import os
import sys
import glob
import re
import wx
# Local Imports
import util
import ed_glob
import ed_search
import ed_event
import ed_msg
import ebmlib
import eclib
from profiler import Profile_Get, Profile_Set
_ = wx.GetTranslation
#--------------------------------------------------------------------------#
# Close Button Bitmap
from extern.embeddedimage import PyEmbeddedImage
XButton = PyEmbeddedImage(
"iVBORw0KGgoAAAANSUhEUgAAAA4AAAAOCAIAAACQKrqGAAAAA3NCSVQICAjb4U/gAAAB6UlE"
"QVQokW2SvWsTYRjAn7tcctdLc7kmxtJqj8ZECIqi7eJHhywVxLWLSBctXQKFOhSE0IJKB0EH"
"wf/BwUkEBxEFFR0cSoei1ZaSJjQ56Zn0vDd3977v8zrcUTr0mZ6PH8+39Onxw97Xz0FnL2w1"
"4DhJnbLU4ZHs1Snpza0bk1cum0MFLvwoJmg/pmjs6bW7a5u7StDZM8Zu0v0W3W/HAGMAgJxF"
"pmYaxumc+nNLDlsNUBKceNJAKj27KI2OIWfImWKVcnMPuKr53QOmJsVfWz7sSZ+pqZWJ7L26"
"YpUUq5SfX1YrE4XZRR+pwikAKAAgBBMQkPevkuMVWdPz88sAIGs6+sR5+/IwlwIA0CXBrsM2"
"toPm/ZMrz2RNBwD0SaO+4G/9AACeG41R9o8wL6CuLwXs6Jow5Mz1OSJ3XMG4DAAiZIgidfb8"
"yOrzqC76RNb08Scv9HMXAAAoFyGNx0Kk2dt3I25nqbazVIvo/J05ABAsAMTETEYrX7pIbNv7"
"8iFZLLeePiKbG7TT9ta+y7lCY7UuM6oNGZ1mW3p9fXKqeq3/xz6wm8yNz8MRIyWRSg6amfTg"
"wHqzp+hW0XUcI3NCy6QBQGAYNRfVZYgJztxeH3LDilmd/vXxHVn/5m3/PvZd0mfKulU0q9P/"
"AeP28JG84F5KAAAAAElFTkSuQmCC")
#-----------------------------------------------------------------------------#
# Globals
ID_CLOSE_BUTTON = wx.NewId()
ID_SEARCH_NEXT = wx.NewId()
ID_SEARCH_PRE = wx.NewId()
ID_FIND_ALL = wx.NewId()
ID_MATCH_CASE = wx.NewId()
ID_WHOLE_WORD = wx.NewId()
ID_REGEX = wx.NewId()
#-----------------------------------------------------------------------------#
class CommandBarBase(eclib.ControlBar):
"""Base class for control bars"""
def __init__(self, parent):
super(CommandBarBase, self).__init__(parent,
style=eclib.CTRLBAR_STYLE_GRADIENT)
if wx.Platform == '__WXGTK__':
self.SetWindowStyle(eclib.CTRLBAR_STYLE_DEFAULT)
self.SetVMargin(2, 2)
# Attributes
self._parent = parent
self._menu = None
self._menu_enabled = True
self.ctrl = None
self.close_b = eclib.PlateButton(self, ID_CLOSE_BUTTON,
bmp=XButton.GetBitmap(),
style=eclib.PB_STYLE_NOBG)
# Setup
self.AddControl(self.close_b, wx.ALIGN_LEFT)
# Event Handlers
self.Bind(wx.EVT_BUTTON, self.OnClose, self.close_b)
self.Bind(wx.EVT_CONTEXT_MENU, self.OnContext)
self.Bind(wx.EVT_MENU, self.OnContextMenu)
def OnClose(self, evt):
"""Handles events from the buttons on the bar
@param evt: Event that called this handler
"""
e_id = evt.GetId()
if e_id == ID_CLOSE_BUTTON:
self.Hide()
else:
evt.Skip()
def OnContext(self, evt):
"""Show the custom menu"""
if self._menu_enabled:
if self._menu is None:
# Lazy init the menu
self._menu = wx.Menu(_("Customize"))
# Ensure the label is disabled (wxMSW Bug)
item = self._menu.GetMenuItems()[0]
self._menu.Enable(item.GetId(), False)
to_menu = list()
for child in self.GetChildren():
if self.IsCustomizable(child):
to_menu.append(child)
if len(to_menu):
to_menu.sort(key=wx.Window.GetLabel)
for item in to_menu:
if not item.GetLabel():
continue
self._menu.Append(item.GetId(),
item.GetLabel(),
kind=wx.ITEM_CHECK)
self._menu.Check(item.GetId(), item.IsShown())
self.PopupMenu(self._menu)
else:
evt.Skip()
def OnContextMenu(self, evt):
"""Hide and Show controls"""
e_id = evt.GetId()
ctrl = self.FindWindowById(e_id)
if ctrl is not None:
self.ShowControl(ctrl.GetName(), not ctrl.IsShown())
self.Layout()
# Update the persistent configuration
key = self.GetConfigKey()
if key is not None:
cfg = Profile_Get('CTRLBAR', default=dict())
state = self.GetControlStates()
cfg[key] = state
def EnableMenu(self, enable=True):
"""Enable the popup customization menu
@keyword enable: bool
"""
self._menu_enabled = enable
if not enable and self._menu is not None:
self._menu.Destroy()
self._menu = None
def GetConfigKey(self):
"""Get the key to use for the layout config persistence.
@return: string
@note: override in subclasses
"""
return None
def GetControlStates(self):
"""Get the map of control name id's to their shown state True/False
@return: dict()
"""
state = dict()
for child in self.GetChildren():
if self.IsCustomizable(child):
state[child.GetName()] = child.IsShown()
return state
def SetControlStates(self, state):
"""Set visibility state of the customizable controls
@param state: dict(ctrl_name=bool)
"""
for name, show in state.iteritems():
self.ShowControl(name, show)
self.Layout()
def Hide(self):
"""Hides the control and notifies the parent
@postcondition: commandbar is hidden
@todo: don't reference nb directly here
"""
super(CommandBarBase, self).Hide()
self._parent.SendSizeEvent()
nb = self._parent.GetNotebook()
ctrl = nb.GetCurrentCtrl()
if ctrl:
ctrl.SetFocus()
return True
def ShowControl(self, ctrl_name, show=True):
"""Show/Hide a control
@param ctrl_name: string
@note: assumes all left aligned controls
"""
sizer = self.GetControlSizer()
next = False
for item in sizer.GetChildren():
if next:
if item.IsSpacer():
item.Show(show)
break
if item.Window and item.Window.GetName() == ctrl_name:
item.Show(show)
next = True
def IsCustomizable(self, ctrl):
"""Is the control of a type that can be customized
@param ctrl: wx.Window
@return: bool
"""
ok = (ctrl is not self.close_b)
ok = ok and (isinstance(ctrl, wx.CheckBox) or \
isinstance(ctrl, eclib.PlateButton))
return ok
def SetControl(self, ctrl):
"""Set the main control of this command bar
@param ctrl: window
"""
self.ctr
|
winterDroid/android-drawable-importer-intellij-plugin
|
json_generator/__init__.py
|
Python
|
apache-2.0
| 30
| 0
|
__author
|
__ = 'marcprengemann'
| |
starwels/starwels
|
test/functional/wallet_hd.py
|
Python
|
mit
| 5,199
| 0.003847
|
#!/usr/bin/env python3
# Copyright (c) 2016-2019 The Starwels developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test Hierarchical Deterministic wallet function."""
from test_framework.test_framework import StarwelsTestFramework
fr
|
om test_framework.util import (
assert_equal,
connect_nodes_bi,
)
import shutil
import os
class WalletHDTest(StarwelsTestFramework):
def set_test_params(self):
self.setup_clean_chain = True
self.num_nodes = 2
sel
|
f.extra_args = [[], ['-keypool=0']]
def run_test (self):
tmpdir = self.options.tmpdir
# Make sure can't switch off usehd after wallet creation
self.stop_node(1)
self.assert_start_raises_init_error(1, ['-usehd=0'], 'already existing HD wallet')
self.start_node(1)
connect_nodes_bi(self.nodes, 0, 1)
# Make sure we use hd, keep masterkeyid
masterkeyid = self.nodes[1].getwalletinfo()['hdmasterkeyid']
assert_equal(len(masterkeyid), 40)
# create an internal key
change_addr = self.nodes[1].getrawchangeaddress()
change_addrV= self.nodes[1].validateaddress(change_addr)
assert_equal(change_addrV["hdkeypath"], "m/0'/1'/0'") #first internal child key
# Import a non-HD private key in the HD wallet
non_hd_add = self.nodes[0].getnewaddress()
self.nodes[1].importprivkey(self.nodes[0].dumpprivkey(non_hd_add))
# This should be enough to keep the master key and the non-HD key
self.nodes[1].backupwallet(tmpdir + "/hd.bak")
#self.nodes[1].dumpwallet(tmpdir + "/hd.dump")
# Derive some HD addresses and remember the last
# Also send funds to each add
self.nodes[0].generate(101)
hd_add = None
num_hd_adds = 300
for i in range(num_hd_adds):
hd_add = self.nodes[1].getnewaddress()
hd_info = self.nodes[1].validateaddress(hd_add)
assert_equal(hd_info["hdkeypath"], "m/0'/0'/"+str(i)+"'")
assert_equal(hd_info["hdmasterkeyid"], masterkeyid)
self.nodes[0].sendtoaddress(hd_add, 1)
self.nodes[0].generate(1)
self.nodes[0].sendtoaddress(non_hd_add, 1)
self.nodes[0].generate(1)
# create an internal key (again)
change_addr = self.nodes[1].getrawchangeaddress()
change_addrV= self.nodes[1].validateaddress(change_addr)
assert_equal(change_addrV["hdkeypath"], "m/0'/1'/1'") #second internal child key
self.sync_all()
assert_equal(self.nodes[1].getbalance(), num_hd_adds + 1)
self.log.info("Restore backup ...")
self.stop_node(1)
# we need to delete the complete regtest directory
# otherwise node1 would auto-recover all funds in flag the keypool keys as used
shutil.rmtree(os.path.join(tmpdir, "node1/regtest/blocks"))
shutil.rmtree(os.path.join(tmpdir, "node1/regtest/chainstate"))
shutil.copyfile(os.path.join(tmpdir, "hd.bak"), os.path.join(tmpdir, "node1/regtest/wallets/wallet.dat"))
self.start_node(1)
# Assert that derivation is deterministic
hd_add_2 = None
for _ in range(num_hd_adds):
hd_add_2 = self.nodes[1].getnewaddress()
hd_info_2 = self.nodes[1].validateaddress(hd_add_2)
assert_equal(hd_info_2["hdkeypath"], "m/0'/0'/"+str(_)+"'")
assert_equal(hd_info_2["hdmasterkeyid"], masterkeyid)
assert_equal(hd_add, hd_add_2)
connect_nodes_bi(self.nodes, 0, 1)
self.sync_all()
# Needs rescan
self.stop_node(1)
self.start_node(1, extra_args=self.extra_args[1] + ['-rescan'])
assert_equal(self.nodes[1].getbalance(), num_hd_adds + 1)
# Try a RPC based rescan
self.stop_node(1)
shutil.rmtree(os.path.join(tmpdir, "node1/regtest/blocks"))
shutil.rmtree(os.path.join(tmpdir, "node1/regtest/chainstate"))
shutil.copyfile(os.path.join(tmpdir, "hd.bak"), os.path.join(tmpdir, "node1/regtest/wallet.dat"))
self.start_node(1, extra_args=self.extra_args[1])
connect_nodes_bi(self.nodes, 0, 1)
self.sync_all()
out = self.nodes[1].rescanblockchain(0, 1)
assert_equal(out['start_height'], 0)
assert_equal(out['stop_height'], 1)
out = self.nodes[1].rescanblockchain()
assert_equal(out['start_height'], 0)
assert_equal(out['stop_height'], self.nodes[1].getblockcount())
assert_equal(self.nodes[1].getbalance(), num_hd_adds + 1)
# send a tx and make sure its using the internal chain for the changeoutput
txid = self.nodes[1].sendtoaddress(self.nodes[0].getnewaddress(), 1)
outs = self.nodes[1].decoderawtransaction(self.nodes[1].gettransaction(txid)['hex'])['vout']
keypath = ""
for out in outs:
if out['value'] != 1:
keypath = self.nodes[1].validateaddress(out['scriptPubKey']['addresses'][0])['hdkeypath']
assert_equal(keypath[0:7], "m/0'/1'")
if __name__ == '__main__':
WalletHDTest().main ()
|
eternnoir/pyTelegramBotAPI
|
examples/asynchronous_telebot/callback_data_examples/simple_products_example.py
|
Python
|
gpl-2.0
| 2,831
| 0.003535
|
# -*- coding: utf-8 -*-
"""
This Example will show you how to use CallbackData
"""
from telebot.callback_data import CallbackData, CallbackDataFilter
from telebot import types
from telebot.async_telebot import AsyncTeleBot
from telebot.asyncio_filters import AdvancedCustomFilter
API_TOKEN = 'TOKEN'
PRODUCTS = [
{'id': '0', 'name': 'xiaomi mi 10', 'price': 400},
{'id': '1', 'name': 'samsung s20', 'price': 800},
{'id': '2', 'name': 'iphone 13', 'price': 1300}
]
bot = AsyncTeleBot(API_TOKEN)
products_factory = CallbackData('product_id', prefix='products')
def products_keyboard():
return types.InlineKeyboardMarkup(
keyboard=[
[
types.InlineKeyboardButton(
text=product['name'],
callback_data=products_factory.new(product_id=product["id"])
)
]
for product in PRODUCTS
]
)
def back_keyboard():
return types.InlineKeyboardMarkup(
keyboard=[
[
types.InlineKeyboardButton(
text='⬅',
callback_data='back'
)
]
]
)
class ProductsCallbackFilter(AdvancedCustomFilter):
key = 'config'
async def check(self, call: types.CallbackQuery, config: CallbackDataFilter):
return config.check(query=call)
@bot.message_handler(commands=['products'])
async def products_command_handler(message: types.Message):
await bot.send_message(message.chat.id, 'Products:', reply_markup=products_keyboard())
# Only product with field - product_id = 2
@bot.callback_query_handler(func=None,
|
config=products_factory.filter(product_id='2'))
async def product_one_callback(call: types.CallbackQuery):
await bot.answer_callback_query(callback_query_id=call.id, text='Not available :(', show_alert=True)
# Any other products
@bo
|
t.callback_query_handler(func=None, config=products_factory.filter())
async def products_callback(call: types.CallbackQuery):
callback_data: dict = products_factory.parse(callback_data=call.data)
product_id = int(callback_data['product_id'])
product = PRODUCTS[product_id]
text = f"Product name: {product['name']}\n" \
f"Product price: {product['price']}"
await bot.edit_message_text(chat_id=call.message.chat.id, message_id=call.message.message_id,
text=text, reply_markup=back_keyboard())
@bot.callback_query_handler(func=lambda c: c.data == 'back')
async def back_callback(call: types.CallbackQuery):
await bot.edit_message_text(chat_id=call.message.chat.id, message_id=call.message.message_id,
text='Products:', reply_markup=products_keyboard())
bot.add_custom_filter(ProductsCallbackFilter())
import asyncio
asyncio.run(bot.polling())
|
shailcoolboy/Warp-Trinity
|
ResearchApps/Measurement/warpnet_coprocessors/phy_logger/examples/twoNode_cfoLogging.py
|
Python
|
bsd-2-clause
| 6,164
| 0.034069
|
from warpnet_framework.warpnet_client import *
from warpnet_framework.warpnet_common_params import *
from warpnet_experiment_structs import *
from twisted.internet import reactor
from datetime import *
from numpy import log10, linspace
import time
import sys
mods = [[2,2,2100,78-1]]
pktLens = [1412]; #range(1412, 91, -240) #[1440:-120:120]-28
time_on = 5*60
time_off = 0
numItrs = 1
fileName_offset = 50
#cfo = 2**20
cfo = 2**17
txGain = 55
minChanMag_D = 20
class ScriptMaster:
def startup(self):
stderr_log = open("exp_err.log", "a")
stderr_log.write("\r\n####################################################################\r\n")
stderr_log.write("%s started at %s\r\n" % (sys.argv[0], datetime.now()))
stderr_log.write("####################################################################\r\n\r\n")
stderr_log.flush()
sys.stderr = stderr_log
er_log = MyDataLogger('results/twoNode_realCFO_v%d_logging.txt' % (fileName_offset))
er_log.log("%s" % (datetime.now()) )
er_log.log("CFO: %d, Time on: %d, time off: %d, numIttrs: %d, fn_offset: %d\r\n" % (cfo, time_on, time_off, numItrs, fileName_offset))
er_log.log("Continuous test of actual CFO on emulator kits\r\n")
registerWithServer()
nodes = dict()
#WARP Nodes
createNode(nodes, Node(0, NODE_PCAP))
createNode(nodes, Node(2, NODE_PCAP))
#BER processor "node"
createNode(nodes, Node(98, NODE_PCAP)) #PHY logger
connectToServer(nodes)
controlStruct = ControlStruct()
nodes[0].addStruct('controlStruct', controlStruct)
nodes[2].addStruct('controlStruct', controlStruct)
phyCtrl0 = PHYctrlStruct()
phyCtrl1 = PHYctrlStruct()
nodes[0].addStruct('phyCtrlStruct', phyCtrl0)
nodes[2].addStruct('phyCtrlStruct', phyCtrl1)
cmdStructStart = CommandStruct(COMMANDID_STARTTRIAL, 0)
nodes[0].addStruct('cmdStructStart', cmdStructStart)
cmdStructStop = CommandStruct(COMMANDID_STOPTRIAL, 0)
nodes[0].addStruct('cmdStructStop', cmdStructStop)
cmdStructResetPER = CommandStruct(COMMANDID_RESET_PER, 0)
nodes[0].addStruct('cmdStructResetPER', cmdStructResetPER)
nodes[2].addStruct('cmdStructResetPER', cmdStructResetPER)
perStruct0 = ObservePERStruct()
perStruct1 = ObservePERStruct()
nodes[0].addStruct('perStruct', perStruct0)
nodes[2].addStruct('perStruct', perStruct1)
logParams = LogParams()
nodes[98].addStruct('logParams', logParams)
sendRegistrations(nodes)
controlStruct.packetGeneratorPeriod = mods[0][2]
controlStruct.packetGeneratorLength = pktLens[0]
contro
|
lStruct.channel = 9
controlStruct.txPower = txGain
controlStruct.modOrderHeader = mods[0][0]
controlStruct.modOrderPayload = mods[0][1]
#P
|
HYCtrol params:
#param0: txStartOut delay
#param1: artificial txCFO
#param2: minPilotChanMag
#param3:
# [0-0x01]: PHYCTRL_BER_EN: enable BER reporting
# [1-0x02]: PHYCTRL_CFO_EN: enable CFO reporting
# [2-0x04]: PHYCTRL_PHYDUMP_EN: enable Rx PHY dumping
# [3-0x08]: PHYTRCL_EXTPKTDET_EN: use only ext pkt det
# [4-0x10]: PHYCTRL_COOP_EN: 0=nonCoop, 1=coopMode
# [5-0x20]: PHYCTRL_CFO_CORR_EN: 0=bypass CFO correction, 1=enable CFO correction
# [6-0x40]: PHYCTRL_SWAP_ANT: 0=AntA, 1=AntA_Swapped
#param4:
# [ 7:0]: src re-Tx delay
# [ 7:0]: relay AF Tx delay (only used when in COOP_TESTING)
# [15:8]: relay DF Tx delay (only used when in COOP_TESTING)
#param5: (0 ignores)
# [17: 0]: AGC IIR coef FB
#param6: (0 ignores)
# [31:16]: H_BA minEstMag (UFix16_15)
# [15: 0]: H_AA minEstMag (UFix16_15)
#param7: (0 ignores)
# [27:16]: AF blank stop
# [11: 0]: AF blank start
#param8: (0 ignores)
# [17: 0]: AGC IIR coef Gain
#param9: (Tx pkt types)
# [31: 0]: OR'd combination of PHYCTRL_TX_*
phyCtrl0.param0 = 32+12
phyCtrl0.param1 = cfo #(2**19 ~ 1.2e-4)
phyCtrl0.param2 = 0xFFF
# phyCtrl0.param3 = (PHYCTRL_COOP_EN | PHYCTRL_BER_EN)
phyCtrl0.param3 = (0) #PHYCTRL_COOP_EN)
# phyCtrl0.param4 = (251-2) #v21 timing; #######reTxDly/FFToffset: 251/12, 249/10
phyCtrl0.param4 = 255 #v22 timing
phyCtrl0.param5 = 0
phyCtrl0.param6 = 0
phyCtrl0.param7 = 0
phyCtrl0.param8 = 0
# phyCtrl0.param9 = (PHYCTRL_TX_NC | PHYCTRL_TX_DF | PHYCTRL_TX_AF | PHYCTRL_TX_AFGH | PHYCTRL_TX_DFGH | PHYCTRL_TX_NCMHOP)
phyCtrl0.param9 = (PHYCTRL_TX_NC)
phyCtrl1.param0 = 0
phyCtrl1.param1 = 0
phyCtrl1.param2 = minChanMag_D
# phyCtrl1.param3 = (PHYCTRL_CFO_CORR_EN | PHYCTRL_PHYDUMP_EN)
phyCtrl1.param3 = (PHYCTRL_PHYDUMP_EN)
phyCtrl1.param4 = 0
phyCtrl1.param5 = 0x20000
phyCtrl1.param6 = 1000 | (1000<<16)
phyCtrl1.param7 = 0
phyCtrl1.param8 = 0x20000
phyCtrl1.param9 = 0
nodes[0].sendToNode('phyCtrlStruct')
nodes[2].sendToNode('phyCtrlStruct')
nodes[0].sendToNode('controlStruct')
nodes[2].sendToNode('controlStruct')
nodes[0].sendToNode('cmdStructResetPER')
nodes[2].sendToNode('cmdStructResetPER')
trialInd = -1 #Increment before first trial, which should be trialNum=0
pktLen = pktLens[0];
#Experiment Loops
for ittr in range(1,numItrs+1):
print("Starting iteration %d of %d at %s" % (ittr, numItrs, datetime.now().strftime("%H:%M:%S")))
trialInd += 1
#Stop any traffic that might be running
nodes[0].sendToNode('cmdStructStop')
logParams.fileSuffix = fileName_offset+trialInd
logParams.param0 = ittr
logParams.param1 = 0
logParams.param2 = 0
logParams.param3 = 0
nodes[98].sendToNode('logParams')
#Reset the PER counters at all nodes
nodes[0].sendToNode('cmdStructResetPER')
nodes[2].sendToNode('cmdStructResetPER')
#Start the trial
nodes[0].sendToNode('cmdStructStart')
#Run until minTime elapses
time.sleep(time_on)
nodes[0].sendToNode('cmdStructStop')
time.sleep(time_off)
if not reactor.running:
return
print("############################################")
print("############# Experiment Done! #############")
print("############################################")
reactor.callFromThread(reactor.stop)
sm = ScriptMaster()
stdio.StandardIO(CmdReader()) #if interactive shell is needed
factory = WARPnetClient(sm.startup);
reactor.connectTCP('localhost', 10101, factory)
reactor.run()
|
SciLifeLab/scilifelab
|
tests/pm/test_default.py
|
Python
|
mit
| 2,421
| 0.008674
|
import os
from cement.core import backend, handler, output
from cement.utils import test, shell
from scilifelab.pm import PmApp
from data import setup_data_files
from empty_files import setup_empty_files
## Set default configuration
filedir = os.path.abspath(os.path.dirname(os.path.realpath(__file__)))
config_defaults = backend.defaults('production', 'archive', 'config', 'project','log', 'db')
config_defaults['production']['root'] = os.path.join(filedir, "data", "production")
config_defaults['archive']['root'] = os.path.join(filedir, "data", "archive")
config_defaults['project']['root'] = os.path.join(filedir, "data", "projects")
config_defaults['project']['repos'] = os.path.join(filedir, "data", "repos")
confi
|
g_defaults['config']['ignore'] = ["slurm*", "tmp*"]
config_defaults['log']['level'] = "INFO"
config_defaults['log']['file'] = os.path.join(filedir, "data", "log", "pm.log")
config_defaults['db']['url'] = "localhost"
config_defaults['db']['
|
user'] = "u"
config_defaults['db']['password'] = "p"
config_defaults['db']['samples'] = "samples-test"
config_defaults['db']['flowcells'] = "flowcells-test"
config_defaults['db']['projects'] = "projects-test"
def safe_makedir(dname):
"""Make directory"""
if not os.path.exists(dname):
try:
os.makedirs(dname)
except OSError:
if not os.path.isdir(dname):
raise
else:
print "Directory %s already exists" % dname
return dname
## Output handler for tests
class PmTestOutputHandler(output.CementOutputHandler):
class Meta:
label = 'pmtest'
def render(self, data, template = None):
for key in data:
if data[key]:
print "{} => {}".format(key, data[key].getvalue())
## Testing app
class PmTestApp(PmApp):
class Meta:
argv = []
config_files = []
config_defaults = config_defaults
output_handler = PmTestOutputHandler
## Main pm test
class PmTest(test.CementTestCase):
app_class = PmTestApp
app = None
OUTPUT_FILES = []
def setUp(self):
setup_data_files()
setup_empty_files()
def _run_app(self):
try:
self.app.setup()
with self.app.log.log_setup.applicationbound():
self.app.run()
self.app.render(self.app._output_data)
finally:
self.app.close()
|
williamFalcon/pytorch-lightning
|
tests/loggers/test_wandb.py
|
Python
|
apache-2.0
| 9,157
| 0.001747
|
# Copyright The PyTorch Lightning team.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import pickle
from argparse import ArgumentParser
from unittest import mock
import pytest
from pytorch_lightning import Trainer
from pytorch_lightning.loggers import WandbLogger
from pytorch_lightning.utilities.exceptions import MisconfigurationException
from tests.helpers import BoringModel
@mock.patch("pytorch_lightning.loggers.wandb.wandb")
def test_wandb_logger_init(wandb):
"""Verify that basic functionality of wandb logger works.
Wandb doesn't work well with pytest so we have to mock it out here."""
# test wandb.init called when there is no W&B run
wandb.run = None
logger = WandbLogger(
name="test_name", save_dir="test_save_dir", version="test_id", project="test_project", resume="never"
)
logger.log_metrics({"acc": 1.0})
wandb.init.assert_called_once_with(
name="test_name", dir="test_save_dir", id="test_id", project="test_project", resume="never", anonymous=None
)
wandb.init().log.assert_called_once_with({"acc": 1.0})
# test wandb.init and setting logger experiment externally
wandb.run = None
run = wandb.init()
logger = WandbLogger(experiment=run)
assert logger.experiment
# test wandb.init not called if there is a W&B run
wandb.init().log.reset_mock()
wandb.init.reset_mock()
wandb.run = wandb.init()
logger = WandbLogger()
# verify default resume value
assert logger._wandb_init["resume"] == "allow"
with pytest.warns(UserWarning, match="There is a wandb run already in progress"):
_ = logger.experiment
logger.log_metrics({"acc": 1.0}, step=3)
wandb.init.assert_called_once()
wandb.init().log.assert_called_once_with({"acc": 1.0, "trainer/global_step": 3})
# continue training on same W&B run and offset step
logger.finalize("success")
logger.log_metrics({"acc": 1.0}, step=6)
wandb.init().log.assert_called_with({"acc": 1.0, "trainer/global_step": 6})
# log hyper parameters
logger.log_hyperparams({"test": None, "nested": {"a": 1}, "b": [2, 3, 4]})
wandb.init().config.update.assert_called_once_with(
{"test": "None", "nested/a": 1, "b": [2, 3, 4]}, allow_val_change=True
)
# watch a model
logger.watch("model", "log", 10, False)
wandb.init().watch.assert_called_once_with("model", log="log", log_freq=10, log_graph=False)
assert logger.name == wandb.init().project_name()
assert logger.version == wandb.init().id
@mock.patch("pytorch_lightning.loggers.wandb.wandb")
def test_wandb_pickle(wandb, tmpdir):
"
|
""
Verify that pickling trainer with wandb logger works.
Wandb doesn't work well with pytest so we have to mock it out here.
"""
class Experiment:
id = "the_id"
step = 0
dir = "wandb"
def project_name(self):
return "the_project_name"
wandb.run = None
wandb.init.re
|
turn_value = Experiment()
logger = WandbLogger(id="the_id", offline=True)
trainer = Trainer(default_root_dir=tmpdir, max_epochs=1, logger=logger)
# Access the experiment to ensure it's created
assert trainer.logger.experiment, "missing experiment"
assert trainer.log_dir == logger.save_dir
pkl_bytes = pickle.dumps(trainer)
trainer2 = pickle.loads(pkl_bytes)
assert os.environ["WANDB_MODE"] == "dryrun"
assert trainer2.logger.__class__.__name__ == WandbLogger.__name__
assert trainer2.logger.experiment, "missing experiment"
wandb.init.assert_called()
assert "id" in wandb.init.call_args[1]
assert wandb.init.call_args[1]["id"] == "the_id"
del os.environ["WANDB_MODE"]
@mock.patch("pytorch_lightning.loggers.wandb.wandb")
def test_wandb_logger_dirs_creation(wandb, tmpdir):
"""Test that the logger creates the folders and files in the right place."""
logger = WandbLogger(save_dir=str(tmpdir), offline=True)
assert logger.version is None
assert logger.name is None
# mock return values of experiment
wandb.run = None
logger.experiment.id = "1"
logger.experiment.project_name.return_value = "project"
for _ in range(2):
_ = logger.experiment
assert logger.version == "1"
assert logger.name == "project"
assert str(tmpdir) == logger.save_dir
assert not os.listdir(tmpdir)
version = logger.version
model = BoringModel()
trainer = Trainer(default_root_dir=tmpdir, logger=logger, max_epochs=1, limit_train_batches=3, limit_val_batches=3)
assert trainer.log_dir == logger.save_dir
trainer.fit(model)
assert trainer.checkpoint_callback.dirpath == str(tmpdir / "project" / version / "checkpoints")
assert set(os.listdir(trainer.checkpoint_callback.dirpath)) == {"epoch=0-step=2.ckpt"}
assert trainer.log_dir == logger.save_dir
@mock.patch("pytorch_lightning.loggers.wandb.wandb")
def test_wandb_log_model(wandb, tmpdir):
"""Test that the logger creates the folders and files in the right place."""
wandb.run = None
model = BoringModel()
# test log_model=True
logger = WandbLogger(log_model=True)
logger.experiment.id = "1"
logger.experiment.project_name.return_value = "project"
trainer = Trainer(default_root_dir=tmpdir, logger=logger, max_epochs=2, limit_train_batches=3, limit_val_batches=3)
trainer.fit(model)
wandb.init().log_artifact.assert_called_once()
# test log_model='all'
wandb.init().log_artifact.reset_mock()
wandb.init.reset_mock()
logger = WandbLogger(log_model="all")
logger.experiment.id = "1"
logger.experiment.project_name.return_value = "project"
trainer = Trainer(default_root_dir=tmpdir, logger=logger, max_epochs=2, limit_train_batches=3, limit_val_batches=3)
trainer.fit(model)
assert wandb.init().log_artifact.call_count == 2
# test log_model=False
wandb.init().log_artifact.reset_mock()
wandb.init.reset_mock()
logger = WandbLogger(log_model=False)
logger.experiment.id = "1"
logger.experiment.project_name.return_value = "project"
trainer = Trainer(default_root_dir=tmpdir, logger=logger, max_epochs=2, limit_train_batches=3, limit_val_batches=3)
trainer.fit(model)
assert not wandb.init().log_artifact.called
# test correct metadata
import pytorch_lightning.loggers.wandb as pl_wandb
pl_wandb._WANDB_GREATER_EQUAL_0_10_22 = True
wandb.init().log_artifact.reset_mock()
wandb.init.reset_mock()
wandb.Artifact.reset_mock()
logger = pl_wandb.WandbLogger(log_model=True)
logger.experiment.id = "1"
logger.experiment.project_name.return_value = "project"
trainer = Trainer(default_root_dir=tmpdir, logger=logger, max_epochs=2, limit_train_batches=3, limit_val_batches=3)
trainer.fit(model)
wandb.Artifact.assert_called_once_with(
name="model-1",
type="model",
metadata={
"score": None,
"original_filename": "epoch=1-step=5-v3.ckpt",
"ModelCheckpoint": {
"monitor": None,
"mode": "min",
"save_last": None,
"save_top_k": 1,
"save_weights_only": False,
"_every_n_train_steps": 0,
},
},
)
def test_wandb_sanitize_callable_params(tmpdir):
"""
Callback function are not serializiable. Therefore, we get them a chance to return
something and if the returned type is not accepted, return None.
"""
opt = "--max_epochs 1".split(" ")
parser = ArgumentParser()
parser = Trainer.add_argparse_args(parent_parser=parser)
params = parser.parse_args(opt)
def retu
|
cga-harvard/worldmap
|
worldmap/management/commands/fix_migrated_layers.py
|
Python
|
gpl-3.0
| 4,316
| 0.001854
|
# -*- coding: utf-8 -*-
#########################################################################
#
# Copyright (C) 2017 OSGeo
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
#########################################################################
import sys
import requests
from django.core.management.base import BaseCommand
from geonode.geoserver.helpers import gs_catalog, ogc_server_settings
from geonode.layers.models import Layer
from geonode.people.models import Profile
def fix_layer_in_gwc(layer):
print 'Fixing %s in GWC' % layer.alternate
headers = {'Content-Type': 'application/xml'}
url = ogc_server_settings.public_url
user = gs_catalog.username
password = gs_catalog.password
url_xml = "%sgwc/rest/layers/%s.xml" % (url, layer.alternate)
print url_xml
resp = requests.get(url=url_xml, auth=(user, password))
xml = resp.content
xml_fixed = xml.replace('<mimeFormats>\n <string>image/png</string>\n </mimeFormats>', '<mimeFormats>\n <string>image/png</string>\n <string>image/png8</string>\n </mimeFormats>')
data = xml_fixed.replace('\n', '')
print requests.post(url_xml, data=data, headers=headers, auth=(user, password)).text
def is_gs_resource_valid(layer):
gs_resource = gs_catalog.get_resource(
layer.name,
store=layer.store,
workspace=layer.workspace)
if gs_resource:
return True
else:
return False
class Command(BaseCommand):
"""
Fixes migrated WorldMap layers (from 1.2 to 2.8.x).
This includes:
1) layer.save to generate syles, links and thumbnails and sync with sync_geofence
2) fixes GWC
The command detects also broken GeoNode layers (layer without a resource in GeoServer)
"""
help = 'Fixes migrated WorldMap layers (from 1.2 to 2.8.x)'
def add_arguments(self, parser):
parser.add_argument(
'--layername',
dest='layername',
default=None,
help='Filter by a layername.',
)
parser.add_argument(
'--owner',
dest='owner',
default=None,
help='Filter by a owner.',
)
def handle(self, **options):
if options['layername']:
layers = Layer.objects.filter(name__icontains=options['layername'])
else:
layers = Layer.objects.all()
if options['owner']:
layers = layers.filter(owner=Profile.objects.filter(username=options['owner']))
layers_count = layers.count()
count = 0
layer_errors = []
for layer in layers:
count += 1
try:
print 'Fixing layer %s/%s: %s owned by %s' % (count,
layers_count,
layer.alternate,
layer.owner.username)
if is_gs_resource_valid(layer):
print 'Saving %s layer' % layer.alternate
layer.save()
fix_layer_in_gwc(layer)
else:
print 'Layer %s is broken' % layer.alternate
layer_errors.app
|
end(layer)
if options['remove']:
print 'Removing this layer...'
layer.delete()
except:
print("Unexpected error:", sys.exc_info()[0])
print '\n***** Layers with errors: %s in a total of %s *****' % (len(layer_errors), layers_count)
for layer_error in layer_errors:
print '%s by %s' % (layer.alternate, lay
|
er.owner.username)
|
Gilbert88/mesos
|
src/python/cli_new/lib/cli/util.py
|
Python
|
apache-2.0
| 13,337
| 0
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
A collection of helper functions used by the CLI and its Plugins.
"""
import imp
import importlib
import ipaddress
import json
import os
import re
import textwrap
import urllib.parse
from kazoo.client import KazooClient
from cli.exceptions import CLIException
def import_modules(package_paths, module_type):
"""
Looks for python packages under `package_paths` and imports
them as modules. Returns a dictionary of the basename of the
`package_paths` to the imported modules.
"""
modules = {}
for package_path in package_paths:
# We put the imported module into the namespace of
# "mesos.<module_type>.<>" to keep it from cluttering up
# the import namespace elsewhere.
package_name = os.path.basename(package_path)
package_dir = os.path.dirname(package_path)
module_name = "cli." + module_type + "." + package_name
try:
module = importlib.import_module(module_name)
except Exception:
obj, filename, data = imp.find_module(package_name, [package_dir])
module = imp.load_module(module_name, obj, filename, data)
modules[package_name] = module
return modules
def get_module(modules, import_path):
"""
Given a modules dictionary returned by `import_modules()`,
return a reference to the module at `import_path` relative
to the base module. For example, get_module(modules, "example.stuff")
will return a reference to the "stuff" module inside the
imported "example" plugin.
"""
import_path = import_path.split('.')
try:
module = modules[import_path[0]]
if len(import_path) > 1:
module = getattr(module,
|
".".join(import_path[1:]))
except Exception as exception:
raise CLIException("Un
|
able to get module: {error}"
.format(error=str(exception)))
return module
def completions(comp_words, current_word, argv):
"""
Helps autocomplete by returning the appropriate
completion words under three conditions.
1) Returns `comp_words` if the completion word is
potentially in that list.
2) Returns an empty list if there is no possible
completion.
3) Returns `None` if the autocomplete is already done.
"""
comp_words += ["-h", "--help", "--version"]
if not argv:
return comp_words
if len(argv) == 1:
if argv[0] not in comp_words and current_word:
return comp_words
if argv[0] in comp_words and current_word:
return comp_words
if argv[0] not in comp_words and not current_word:
return []
if argv[0] in comp_words and not current_word:
return None
if len(argv) > 1 and argv[0] not in comp_words:
return []
if len(argv) > 1 and argv[0] in comp_words:
return None
raise CLIException("Unreachable")
def format_commands_help(cmds):
"""
Helps format plugin commands for display.
"""
longest_cmd_name = max(list(cmds.keys()), key=len)
help_string = ""
for cmd in sorted(cmds.keys()):
# For the top-level entry point, `cmds` is a single-level
# dictionary with `short_help` as the values. For plugins,
# `cmds` is a two-level dictionary, where `short_help` is a
# field in each sub-dictionary.
short_help = cmds[cmd]
if isinstance(short_help, dict):
short_help = short_help["short_help"]
num_spaces = len(longest_cmd_name) - len(cmd) + 2
help_string += " %s%s%s\n" % (cmd, " " * num_spaces, short_help)
return help_string
def format_subcommands_help(cmd):
"""
Helps format plugin subcommands for display.
"""
arguments = " ".join(cmd["arguments"])
short_help = cmd["short_help"]
long_help = textwrap.dedent(cmd["long_help"].rstrip())
long_help = " " + "\n ".join(long_help.lstrip().split('\n'))
flags = cmd["flags"]
flags["-h --help"] = "Show this screen."
flag_string = ""
if list(flags.keys()) != 0:
longest_flag_name = max(list(flags.keys()), key=len)
for flag in sorted(flags.keys()):
num_spaces = len(longest_flag_name) - len(flag) + 2
flag_string += " %s%s%s\n" % (flag, " " * num_spaces, flags[flag])
flag_string = flag_string.rstrip()
return (arguments, short_help, long_help, flag_string)
def join_plugin_paths(settings, config):
"""
Return all the plugin paths combined
from both settings and the config file.
"""
builtin_paths = settings.PLUGINS
try:
config_paths = config.plugins()
except Exception as exception:
raise CLIException("Error: {error}.".format(error=str(exception)))
return builtin_paths + config_paths
def sanitize_address(address):
"""
Sanitize an address, ensuring that it has a format recognizable by the CLI.
"""
# Try and parse the address to make sure it is parseable.
try:
parsed = urllib.parse.urlparse(address)
except Exception as exception:
raise CLIException("Unable to parse address: {error}"
.format(error=str(exception)))
# Since we allow addresses to be specified without an
# explicit scheme, some fields in the parsed address may
# be missing. Patch it up to force an implicit HTTP scheme.
if parsed.scheme == "" and parsed.netloc == "":
address = "http://{addr}".format(addr=address)
elif parsed.scheme == "" and parsed.netloc != "":
address = "http:{addr}".format(addr=address)
# Try and parse the address again to make sure it
# now has all the parts we expect and that they are valid.
try:
parsed = urllib.parse.urlparse(address)
except Exception as exception:
raise CLIException("Unable to parse address: {error}"
.format(error=str(exception)))
# We only support HTTP and HTTPS schemes.
if parsed.scheme != "http" and parsed.scheme != "https":
raise CLIException("Invalid scheme '{scheme}' in address"
.format(scheme=parsed.scheme))
# There must be a hostname present.
if parsed.hostname == "":
raise CLIException("Missing hostname in address")
# We do not support IPv6 in the hostname (yet).
try:
ipaddress.IPv6Address(parsed.hostname)
raise CLIException("IPv6 addresses are unsupported")
except Exception as exception:
pass
valid_ip_v4_address = False
# We either accept IPv4 addresses, or DNS names as the hostname. In the
# check below we try and parse the hostname as an IPv4 address, if this
# does not succeed, then we assume the hostname is formatted as a DNS name.
try:
ipaddress.IPv4Address(parsed.hostname)
valid_ip_v4_address = True
except Exception as exception:
pass
# If we have an IPv4 address then we require a port to be specified.
if valid_ip_v4_address and parsed.port is None:
raise CLIException("Addresses formatted as IP must contain a port")
# We allow ports for both IPv4 addresses and DNS
# names, but they must be in a specific range.
if parsed.port and (parsed.port < 0 or parsed.port > 65535):
raise CLIException("Port '{port}' is out of range"
.format(port=pars
|
dr4ke616/LazyTorrent
|
application/lib/the_pirate_bay/utils.py
|
Python
|
gpl-3.0
| 2,327
| 0
|
from collections import Order
|
edDict
from purl import URL as PURL
def URL(base, path, segments=None, defaults=None):
"""
URL segment handler ca
|
pable of getting and setting segments by name. The
URL is constructed by joining base, path and segments.
For each segment a property capable of getting and setting that segment is
created dinamically.
"""
# Make a copy of the Segments class
url_class = type(Segments.__name__, Segments.__bases__,
dict(Segments.__dict__))
segments = [] if segments is None else segments
defaults = [] if defaults is None else defaults
# For each segment attach a property capable of getting and setting it
for segment in segments:
setattr(url_class, segment, url_class._segment(segment))
# Instantiate the class with the actual parameters
return url_class(base, path, segments, defaults)
class Segments(object):
"""
URL segment handler, not intended for direct use. The URL is constructed by
joining base, path and segments.
"""
def __init__(self, base, path, segments, defaults):
# Preserve the base URL
self.base = PURL(base, path=path)
# Map the segments and defaults lists to an ordered dict
self.segments = OrderedDict(zip(segments, defaults))
def build(self):
# Join base segments and segments
segments = self.base.path_segments() + tuple(self.segments.values())
# Create a new URL with the segments replaced
url = self.base.path_segments(segments)
return url
def full_path(self):
full_path = self.build().as_string()
full_path = full_path.replace(self.base.host(), '')
full_path = full_path.replace(self.base.scheme(), '')
return full_path[4:]
def __str__(self):
return self.build().as_string()
def _get_segment(self, segment):
return self.segments[segment]
def _set_segment(self, segment, value):
self.segments[segment] = value
@classmethod
def _segment(cls, segment):
"""
Returns a property capable of setting and getting a segment.
"""
return property(
fget=lambda x: cls._get_segment(x, segment),
fset=lambda x, v: cls._set_segment(x, segment, v),
)
|
offbye/PiBoat
|
pyboat/gps_test.py
|
Python
|
apache-2.0
| 604
| 0.003333
|
#!/usr/bin/python2.7
# -*- encoding: UTF-8 -*-
# gps_test created on 15/8/22 上午12:44
# Copyright 2014 offbye@gmail.com
"""
"""
|
__author__ = ['"Xitao":<offbye@gmail.com>']
import gps3
gps_connection = gps3.GPSDSocket()
gps_fix = gps3.Fix()
try:
for new_data in gps_connection:
if new_data:
gps_fix.refresh(new_data)
print(gps_fix.TPV['time'])
print(gps_fix.TPV['lat'])
print(gps_fix.TPV['lon'])
print(gps_fix.SKY['gdop'])
except KeyboardInterrupt:
gps_connection.close()
prin
|
t('\nTerminated by user\nGood Bye.\n')
|
pannkotsky/groupmate
|
backend/apps/users/login_backend.py
|
Python
|
mit
| 618
| 0
|
from .models import EmailUser
class EmailOrPhoneModelBackend:
def authenticate(self, username=None, password=None):
if '@' in username:
kwarg
|
s = {'email__iexact': username}
else:
kwargs = {'phone': username}
try:
user = EmailUser.objects.get(**kwargs)
if user.check_password(password):
return user
except EmailUser.DoesNotExist:
return None
def get_user(self, user_id):
try:
return EmailUser.objects.get(pk=user_id)
except EmailUser.
|
DoesNotExist:
return None
|
AcroManiac/AcroLink
|
Server/DjangoServer/manage.py
|
Python
|
gpl-3.0
| 810
| 0
|
#!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "DjangoServer.s
|
ettings")
try:
from django.core.management import execute_from_command_line
except ImportError:
# The above import may fail for some other reason. Ensure that the
# issue is really that Django is missing to avoid masking other
# exceptions on Python 2.
try:
import django
except ImportError:
raise ImportError(
|
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
)
raise
execute_from_command_line(sys.argv)
|
EricSchles/python-route53
|
tests/test_util.py
|
Python
|
mit
| 916
| 0.009825
|
import unittest
from tests.test_basic import BaseTestCase
from datetime import timedelta, datetime, tzinfo
class UTC(tzinfo):
"""UTC"""
def utcoffset(self, dt):
return timedelta(0)
def tzname(self, dt):
return "UTC"
def dst(self, dt):
return timedelta(0)
class UtilTestCase(BaseTestCase):
"""
Tests utils
"""
def test_parse_iso_8601_time_str(self):
"""
At times, Amazon hands us a timestamp with no microseconds.
"""
import datetime
from route53.util import parse_iso_8601_time_str
|
self.assertEqual(parse_iso_8601_time_str('2013-07-28T
|
01:00:01Z'),
datetime.datetime(2013, 7, 28, 1, 0, 1, 0, \
tzinfo=UTC()))
self.assertEqual(parse_iso_8601_time_str('2013-07-28T01:00:01.001Z'),
datetime.datetime(2013, 7, 28, 1, 0, 1, 1000, \
tzinfo=UTC()))
|
suutari/shoop
|
shuup_setup_utils/parsing.py
|
Python
|
agpl-3.0
| 928
| 0
|
# This file is part of Shuup.
#
# Copyright (c) 2012-2016, Shoop Commerce Ltd. All rights reserved.
#
# This source code is licensed under the AGPLv3 license found in the
# LICENSE file in the root directory of this source tree.
import os
def get_test_requirements_from_tox_ini(path):
result = []
between_begin_and_end = False
with open(os.path.join(path, 'tox.ini'), 'rt') as f
|
p:
for line in fp:
if line.strip() == '# BEGIN testing deps':
between_begin_and_end = True
elif line.strip() == '# END testing deps' or not line[0].isspace():
between_begin_and_end = False
elif between_begin_and_end:
result.append(line.strip())
return result
def get_long_description(path):
"""
Get long description from file.
"""
if path:
|
with open(path, 'rt') as fp:
return fp.read()
return None
|
jianghuaw/nova
|
nova/api/openstack/placement/handler.py
|
Python
|
apache-2.0
| 9,268
| 0
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LIC
|
ENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See
|
the
# License for the specific language governing permissions and limitations
# under the License.
"""Handlers for placement API.
Individual handlers are associated with URL paths in the
ROUTE_DECLARATIONS dictionary. At the top level each key is a Routes
compliant path. The value of that key is a dictionary mapping
individual HTTP request methods to a Python function representing a
simple WSGI application for satisfying that request.
The ``make_map`` method processes ROUTE_DECLARATIONS to create a
Routes.Mapper, including automatic handlers to respond with a
405 when a request is made against a valid URL with an invalid
method.
"""
import routes
import webob
from oslo_log import log as logging
from nova.api.openstack.placement.handlers import aggregate
from nova.api.openstack.placement.handlers import allocation
from nova.api.openstack.placement.handlers import allocation_candidate
from nova.api.openstack.placement.handlers import inventory
from nova.api.openstack.placement.handlers import resource_class
from nova.api.openstack.placement.handlers import resource_provider
from nova.api.openstack.placement.handlers import root
from nova.api.openstack.placement.handlers import trait
from nova.api.openstack.placement.handlers import usage
from nova.api.openstack.placement import policy
from nova.api.openstack.placement import util
from nova import exception
from nova.i18n import _
LOG = logging.getLogger(__name__)
# URLs and Handlers
# NOTE(cdent): When adding URLs here, do not use regex patterns in
# the path parameters (e.g. {uuid:[0-9a-zA-Z-]+}) as that will lead
# to 404s that are controlled outside of the individual resources
# and thus do not include specific information on the why of the 404.
ROUTE_DECLARATIONS = {
'/': {
'GET': root.home,
},
# NOTE(cdent): This allows '/placement/' and '/placement' to
# both work as the root of the service, which we probably want
# for those situations where the service is mounted under a
# prefix (as it is in devstack). While weird, an empty string is
# a legit key in a dictionary and matches as desired in Routes.
'': {
'GET': root.home,
},
'/resource_classes': {
'GET': resource_class.list_resource_classes,
'POST': resource_class.create_resource_class
},
'/resource_classes/{name}': {
'GET': resource_class.get_resource_class,
'PUT': resource_class.update_resource_class,
'DELETE': resource_class.delete_resource_class,
},
'/resource_providers': {
'GET': resource_provider.list_resource_providers,
'POST': resource_provider.create_resource_provider
},
'/resource_providers/{uuid}': {
'GET': resource_provider.get_resource_provider,
'DELETE': resource_provider.delete_resource_provider,
'PUT': resource_provider.update_resource_provider
},
'/resource_providers/{uuid}/inventories': {
'GET': inventory.get_inventories,
'POST': inventory.create_inventory,
'PUT': inventory.set_inventories,
'DELETE': inventory.delete_inventories
},
'/resource_providers/{uuid}/inventories/{resource_class}': {
'GET': inventory.get_inventory,
'PUT': inventory.update_inventory,
'DELETE': inventory.delete_inventory
},
'/resource_providers/{uuid}/usages': {
'GET': usage.list_usages
},
'/resource_providers/{uuid}/aggregates': {
'GET': aggregate.get_aggregates,
'PUT': aggregate.set_aggregates
},
'/resource_providers/{uuid}/allocations': {
'GET': allocation.list_for_resource_provider,
},
'/allocations/{consumer_uuid}': {
'GET': allocation.list_for_consumer,
'PUT': allocation.set_allocations,
'DELETE': allocation.delete_allocations,
},
'/allocation_candidates': {
'GET': allocation_candidate.list_allocation_candidates,
},
'/traits': {
'GET': trait.list_traits,
},
'/traits/{name}': {
'GET': trait.get_trait,
'PUT': trait.put_trait,
'DELETE': trait.delete_trait,
},
'/resource_providers/{uuid}/traits': {
'GET': trait.list_traits_for_resource_provider,
'PUT': trait.update_traits_for_resource_provider,
'DELETE': trait.delete_traits_for_resource_provider
},
'/usages': {
'GET': usage.get_total_usages,
},
}
def dispatch(environ, start_response, mapper):
"""Find a matching route for the current request.
If no match is found, raise a 404 response.
If there is a matching route, but no matching handler
for the given method, raise a 405.
"""
result = mapper.match(environ=environ)
if result is None:
raise webob.exc.HTTPNotFound(
json_formatter=util.json_error_formatter)
# We can't reach this code without action being present.
handler = result.pop('action')
environ['wsgiorg.routing_args'] = ((), result)
return handler(environ, start_response)
def handle_405(environ, start_response):
"""Return a 405 response when method is not allowed.
If _methods are in routing_args, send an allow header listing
the methods that are possible on the provided URL.
"""
_methods = util.wsgi_path_item(environ, '_methods')
headers = {}
if _methods:
# Ensure allow header is a python 2 or 3 native string (thus
# not unicode in python 2 but stay a string in python 3)
# In the process done by Routes to save the allowed methods
# to its routing table they become unicode in py2.
headers['allow'] = str(_methods)
# Use Exception class as WSGI Application. We don't want to raise here.
response = webob.exc.HTTPMethodNotAllowed(
_('The method specified is not allowed for this resource.'),
headers=headers, json_formatter=util.json_error_formatter)
return response(environ, start_response)
def make_map(declarations):
"""Process route declarations to create a Route Mapper."""
mapper = routes.Mapper()
for route, targets in declarations.items():
allowed_methods = []
for method in targets:
mapper.connect(route, action=targets[method],
conditions=dict(method=[method]))
allowed_methods.append(method)
allowed_methods = ', '.join(allowed_methods)
mapper.connect(route, action=handle_405, _methods=allowed_methods)
return mapper
class PlacementHandler(object):
"""Serve Placement API.
Dispatch to handlers defined in ROUTE_DECLARATIONS.
"""
def __init__(self, **local_config):
# NOTE(cdent): Local config currently unused.
self._map = make_map(ROUTE_DECLARATIONS)
def __call__(self, environ, start_response):
# All requests but '/' require admin.
if environ['PATH_INFO'] != '/':
context = environ['placement.context']
# TODO(cdent): Using is_admin everywhere (except /) is
# insufficiently flexible for future use case but is
# convenient for initial exploration.
if not policy.placement_authorize(context, 'placement'):
raise webob.exc.HTTPForbidden(
_('admin required'),
json_formatter=util.json_error_formatter)
# Check that an incoming request with a content-length header
# that is an integer > 0 and not empty, also has a content-type
# header that is not empty. If not raise a 400.
clen = environ.get('CONTENT_LENGTH')
try:
if clen and (int(clen) > 0) and not environ.get('CONTENT_TYPE'):
raise webob.exc.HTT
|
joergdietrich/astropy
|
astropy/utils/console.py
|
Python
|
bsd-3-clause
| 33,248
| 0.000211
|
# -*- coding: utf-8 -*-
# Licensed under a 3-clause BSD style license - see LICENSE.rst
"""
Utilities for console input and output.
"""
from __future__ import (absolute_import, division, print_function,
unicode_literals)
import codecs
import locale
import re
import math
import multiprocessing
import os
import struct
import sys
import threading
import time
try:
import fcntl
import termios
import signal
_CAN_RESIZE_TERMINAL = True
except ImportError:
_CAN_RESIZE_TERMINAL = False
from ..extern import six
from ..extern.six.moves import range
from .. import conf
from .misc import isiterable
from .decorators import classproperty
__all__ = [
'isatty', 'color_print', 'human_time', 'human_file_size',
'ProgressBar', 'Spinner', 'print_code_line', 'ProgressBarOrSpinner',
'terminal_size']
_DEFAULT_ENCODING = 'utf-8'
class _IPyt
|
hon(object):
"""Singleton class given access to IPython streams, etc."""
@classproperty
def get_ipython(cls):
try:
from IPython import get_ipython
except ImportError:
pass
return get_ipython
@classproperty
def Out
|
Stream(cls):
if not hasattr(cls, '_OutStream'):
cls._OutStream = None
try:
cls.get_ipython()
except NameError:
return None
try:
from ipykernel.iostream import OutStream
except ImportError:
try:
from IPython.zmq.iostream import OutStream
except ImportError:
from IPython import version_info
if version_info[0] >= 4:
return None
try:
from IPython.kernel.zmq.iostream import OutStream
except ImportError:
return None
cls._OutStream = OutStream
return cls._OutStream
@classproperty
def ipyio(cls):
if not hasattr(cls, '_ipyio'):
try:
from IPython.utils import io
except ImportError:
cls._ipyio = None
else:
cls._ipyio = io
return cls._ipyio
@classproperty
def IOStream(cls):
if cls.ipyio is None:
return None
else:
return cls.ipyio.IOStream
@classmethod
def get_stream(cls, stream):
return getattr(cls.ipyio, stream)
def _get_stdout(stderr=False):
"""
This utility function contains the logic to determine what streams to use
by default for standard out/err.
Typically this will just return `sys.stdout`, but it contains additional
logic for use in IPython on Windows to determine the correct stream to use
(usually ``IPython.util.io.stdout`` but only if sys.stdout is a TTY).
"""
if stderr:
stream = 'stderr'
else:
stream = 'stdout'
sys_stream = getattr(sys, stream)
if not isatty(sys_stream) or _IPython.OutStream is None:
return sys_stream
# Our system stream is an atty and we're in ipython.
ipyio_stream = _IPython.get_stream(stream)
if ipyio_stream is not None and isatty(ipyio_stream):
# Use the IPython console output stream
return ipyio_stream
else:
# sys.stdout was set to some other non-TTY stream (a file perhaps)
# so just use it directly
return sys_stream
def isatty(file):
"""
Returns `True` if ``file`` is a tty.
Most built-in Python file-like objects have an `isatty` member,
but some user-defined types may not, so this assumes those are not
ttys.
"""
if (multiprocessing.current_process().name != 'MainProcess' or
threading.current_thread().getName() != 'MainThread'):
return False
if hasattr(file, 'isatty'):
return file.isatty()
# Use two isinstance calls to only evaluate IOStream when necessary.
if (_IPython.OutStream is None or
(not isinstance(file, _IPython.OutStream) and
not isinstance(file, _IPython.IOStream))):
return False
# File is an IPython OutStream or IOStream. Check whether:
# - File name is 'stdout'; or
# - File wraps a Console
if getattr(file, 'name', None) == 'stdout':
return True
if hasattr(file, 'stream'):
# On Windows, in IPython 2 the standard I/O streams will wrap
# pyreadline.Console objects if pyreadline is available; this should
# be considered a TTY.
try:
from pyreadyline.console import Console as PyreadlineConsole
except ImportError:
return False
return isinstance(file.stream, PyreadlineConsole)
return False
def terminal_size(file=None):
"""
Returns a tuple (height, width) containing the height and width of
the terminal.
This function will look for the width in height in multiple areas
before falling back on the width and height in astropy's
configuration.
"""
if file is None:
file = _get_stdout()
try:
s = struct.pack(str("HHHH"), 0, 0, 0, 0)
x = fcntl.ioctl(file, termios.TIOCGWINSZ, s)
(lines, width, xpixels, ypixels) = struct.unpack(str("HHHH"), x)
if lines > 12:
lines -= 6
if width > 10:
width -= 1
if lines <= 0 or width <= 0:
raise Exception('unable to get terminal size')
return (lines, width)
except Exception:
try:
# see if POSIX standard variables will work
return (int(os.environ.get('LINES')),
int(os.environ.get('COLUMNS')))
except TypeError:
# fall back on configuration variables, or if not
# set, (25, 80)
lines = conf.max_lines
width = conf.max_width
if lines is None:
lines = 25
if width is None:
width = 80
return lines, width
def _color_text(text, color):
"""
Returns a string wrapped in ANSI color codes for coloring the
text in a terminal::
colored_text = color_text('Here is a message', 'blue')
This won't actually effect the text until it is printed to the
terminal.
Parameters
----------
text : str
The string to return, bounded by the color codes.
color : str
An ANSI terminal color name. Must be one of:
black, red, green, brown, blue, magenta, cyan, lightgrey,
default, darkgrey, lightred, lightgreen, yellow, lightblue,
lightmagenta, lightcyan, white, or '' (the empty string).
"""
color_mapping = {
'black': '0;30',
'red': '0;31',
'green': '0;32',
'brown': '0;33',
'blue': '0;34',
'magenta': '0;35',
'cyan': '0;36',
'lightgrey': '0;37',
'default': '0;39',
'darkgrey': '1;30',
'lightred': '1;31',
'lightgreen': '1;32',
'yellow': '1;33',
'lightblue': '1;34',
'lightmagenta': '1;35',
'lightcyan': '1;36',
'white': '1;37'}
if sys.platform == 'win32' and _IPython.OutStream is None:
# On Windows do not colorize text unless in IPython
return text
color_code = color_mapping.get(color, '0;39')
return '\033[{0}m{1}\033[0m'.format(color_code, text)
def _decode_preferred_encoding(s):
"""Decode the supplied byte string using the preferred encoding
for the locale (`locale.getpreferredencoding`) or, if the default encoding
is invalid, fall back first on utf-8, then on latin-1 if the message cannot
be decoded with utf-8.
"""
enc = locale.getpreferredencoding()
try:
try:
return s.decode(enc)
except LookupError:
enc = _DEFAULT_ENCODING
return s.decode(enc)
except UnicodeDecodeError:
return s.decode('latin-1')
def _write_with_fallback(s, write, fileobj):
"""Write the supplied string with the given write function like
``write(s)``, but use a writer for the locale's preferred e
|
yanheven/ceilometer
|
ceilometer/alarm/evaluator/combination.py
|
Python
|
apache-2.0
| 4,511
| 0
|
#
# Copyright 2013 eNovance <licensing@enovance.com>
#
# Authors: Mehdi Abaakouk <mehdi.abaakouk@enovance.com>
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import itertools
from ceilometer.alarm import evaluator
from ceilometer.i18n import _
from ceilometer.openstack.common import log
LOG = log.getLogger(__name__)
COMPARATORS = {'and': all, 'or': any}
class CombinationEvaluator(evaluator.Evaluator):
def _get_alarm_state(self, alarm_id):
try:
alarm = self._client.alarms.get(alarm_id)
except Exception:
LOG.exception(_('alarm retrieval failed'))
return None
return alarm.state
def _sufficient_states(self, alarm, states):
"""Check for the sufficiency of the data for evaluation.
Ensure that there is sufficient data for evaluation,
transitioning to unknown otherwise.
"""
# note(sileht): alarm can be evaluated only with
# stable state of other alarm
alarms_missing_states = [alarm_id for alarm_id, state in states
if not state or state == evaluator.UNKNOWN]
sufficient = len(alarms_missing_states) == 0
if not sufficient and alarm.rule['operator'] == 'or':
# if operator is 'or' and there is one alarm, then the combinated
# alarm's state should be 'alarm'
sufficient = bool([alarm_id for alarm_id, state in states
if state == evaluator.ALARM])
if not sufficient and alarm.state != evaluator.UNKNOWN:
reason = (_('Alarms %(alarm_ids)s'
' are in unknown state') %
{'alarm_ids': ",".join(alarms_missing_states)})
reason_data = self._reason_data(alarms_missing_states)
self._refresh(alarm, evaluator.UNKNOWN, reason, reason_data)
return sufficient
@staticmethod
def _reason_data(alarm_ids):
"""Create a reason data dictionary for this evaluator type."""
return {'type': 'combination', 'alarm_ids': alarm_ids}
@classmethod
def _reason(cls, alarm, state, underlying_states):
"""Fabricate reason string."""
transition = alarm.state != state
alarms_to_report = [alarm_id for alarm_id, alarm_state
in underlying_states
if a
|
larm_state == state]
reason_data = cls._reason_data(alarms_to_report)
if transition:
return (_('Transition to %(state)s due to alarms'
' %(alarm_ids)s
|
in state %(state)s') %
{'state': state,
'alarm_ids': ",".join(alarms_to_report)}), reason_data
return (_('Remaining as %(state)s due to alarms'
' %(alarm_ids)s in state %(state)s') %
{'state': state,
'alarm_ids': ",".join(alarms_to_report)}), reason_data
def _transition(self, alarm, underlying_states):
"""Transition alarm state if necessary."""
op = alarm.rule['operator']
if COMPARATORS[op](s == evaluator.ALARM
for __, s in underlying_states):
state = evaluator.ALARM
else:
state = evaluator.OK
continuous = alarm.repeat_actions
reason, reason_data = self._reason(alarm, state, underlying_states)
if alarm.state != state or continuous:
self._refresh(alarm, state, reason, reason_data)
def evaluate(self, alarm):
if not self.within_time_constraint(alarm):
LOG.debug(_('Attempted to evaluate alarm %s, but it is not '
'within its time constraint.') % alarm.alarm_id)
return
states = zip(alarm.rule['alarm_ids'],
itertools.imap(self._get_alarm_state,
alarm.rule['alarm_ids']))
if self._sufficient_states(alarm, states):
self._transition(alarm, states)
|
daicang/Leetcode-solutions
|
487-max-consecutive-ones-ii.py
|
Python
|
mit
| 773
| 0.006468
|
class Solution(object):
def findMaxConsecutiveOnes(self, nums):
"""
:type nums: List[int]
:rtype: int
"""
|
# sliding window
max_len = 0
li = 0
ri = 0
l = 0
inserted = False
for ri, rval in enumerate(nums):
if rval == 1:
l += 1
max_len = max(max_len, l)
|
else:
if not inserted:
inserted = True
l += 1
max_len = max(max_len, l)
else:
while nums[li] == 1:
li += 1
li += 1
l = ri-li+1
max_len = max(max_len, l)
return max_len
|
chris48s/UK-Polling-Stations
|
polling_stations/apps/data_collection/management/commands/import_bradford.py
|
Python
|
bsd-3-clause
| 408
| 0.009804
|
from data_collection.management.commands import Bas
|
eXpressDemocracyClubCsvImporter
class Command(BaseXpressDemocracyClubCsvImporter):
council_id = 'E08000032'
addresses_name = 'parl.2017-06-08/Version 1/Democracy_Club__08June2017.tsvJune2017.tsv'
stations_name = 'parl.2017-06-08/Version 1/Democracy_Club__08June2017.tsvJune2017.tsv'
elections = ['parl.2017-06-08']
csv
|
_delimiter = '\t'
|
noemis-fr/custom
|
account_banking_natixis_direct_debit/__openerp__.py
|
Python
|
gpl-3.0
| 1,248
| 0
|
# -*- coding: utf-8 -*-
##############################################################################
##############################################################################
{
'name': 'Account Banking Netixis Direct Debit',
'summary': 'Create Natixis files for Direct Debit',
'version': '7.0.0.2.0',
'license': 'AGPL-3',
'author': "Mind And Go",
'website': "http://www.mind-and-go.com",
'category'
|
: 'Banking addons',
'depends': [
'account_direct_debit',
'account_banking_pain_base',
'account_payment_partner',
'account'
],
'external_dependencies': {
'python': ['unidecode', 'lxml'],
},
'data': [
'views/account_banking_natixis_view.xml',
'views/company_view.xml',
'wizard/export_natixis_view.xml',
|
'security/ir.model.access.csv',
'views/invoice.xml',
'data/payment_type_natixis.xml',
'views/account_payment_view.xml',
'views/partner.xml',
'views/natixis_file_sequence.xml',
],
'demo': ['sepa_direct_debit_demo.xml'],
'description': '''
Module to export direct debit payment orders in Natixis TXT file format.
''',
'active': False,
'installable': True,
}
|
0x0all/scikit-learn
|
sklearn/preprocessing/label.py
|
Python
|
bsd-3-clause
| 28,286
| 0
|
# Authors: Alexandre Gramfort <alexandre.gramfort@inria.fr>
# Mathieu Blondel <mathieu@mblondel.org>
# Olivier Grisel <olivier.grisel@ensta.org>
# Andreas Mueller <amueller@ais.uni-bonn.de>
# Joel Nothman <joel.nothman@gmail.com>
# Hamzeh Alsalhi <ha258@cornell.edu>
# License: BSD 3 clause
from collections import defaultdict
import itertools
import array
import warnings
import numpy as np
import scipy.sparse as sp
from ..base import BaseEstimator, TransformerMixin
from ..utils.fixes import np_version
from ..utils.fixes import sparse_min_max
from ..utils.fixes import astype
from ..utils.fixes import in1d
from ..utils import deprecated, column_or_1d
from ..utils.validation import check_array
from ..utils.multiclass import unique_labels
from ..utils.multiclass import type_of_target
from ..externals import six
zip = six.moves.zip
map = six.moves.map
__all__ = [
'label_binarize',
'LabelBinarizer',
'LabelEncoder',
]
def _check_numpy_unicode_bug(labels):
"""Check that user is not subject to an old numpy bug
Fixed in master before 1.7.0:
https://github.com/numpy/numpy/pull/243
"""
if np_version[:3] < (1, 7, 0) and labels.dtype.kind == 'U':
raise RuntimeError("NumPy < 1.7.0 does not implement searchsorted"
" on unicode data correctly. Please upgrade"
" NumPy to use LabelEncoder with unicode inputs.")
class LabelEncoder(BaseEstimator, TransformerMixin):
"""Encode labels with value between 0 and n_classes-1.
Attributes
----------
classes_ : array of shape (n_class,)
Holds the label for each class.
Examples
--------
`LabelEncoder` can be used to normalize labels.
>>> from sklearn import preprocessing
>>> le = preprocessing.LabelEncoder()
>>> le.fit([1, 2, 2, 6])
LabelEncoder()
>>> le.classes_
array([1, 2, 6])
>>> le.transform([1, 1, 2, 6]) #doctest: +ELLIPSIS
array([0, 0, 1, 2]...)
>>> le.inverse_transform([0, 0, 1, 2])
array([1, 1, 2, 6])
It can also be used to transform non-numerical labels (as long as they are
hashable and comparable) to numerical labels.
>>> le = preprocessing.LabelEncoder()
>>> le.fit(["paris", "paris", "tokyo", "amsterdam"])
LabelEncoder()
>>> list(le.classes_)
['amsterdam', 'pari
|
s', 'tokyo']
>>> le.transform(["tokyo", "tokyo", "paris"]) #doctest: +ELLIPSIS
array([2, 2, 1]...)
>>> list(le.inverse_transform([2, 2, 1]))
['tokyo', 'tokyo', 'paris']
"""
def _check_fitted(self):
if not hasattr(self, "classes_"):
raise ValueError("LabelEncoder was not fitted yet.
|
")
def fit(self, y):
"""Fit label encoder
Parameters
----------
y : array-like of shape (n_samples,)
Target values.
Returns
-------
self : returns an instance of self.
"""
y = column_or_1d(y, warn=True)
_check_numpy_unicode_bug(y)
self.classes_ = np.unique(y)
return self
def fit_transform(self, y):
"""Fit label encoder and return encoded labels
Parameters
----------
y : array-like of shape [n_samples]
Target values.
Returns
-------
y : array-like of shape [n_samples]
"""
y = column_or_1d(y, warn=True)
_check_numpy_unicode_bug(y)
self.classes_, y = np.unique(y, return_inverse=True)
return y
def transform(self, y):
"""Transform labels to normalized encoding.
Parameters
----------
y : array-like of shape [n_samples]
Target values.
Returns
-------
y : array-like of shape [n_samples]
"""
self._check_fitted()
classes = np.unique(y)
_check_numpy_unicode_bug(classes)
if len(np.intersect1d(classes, self.classes_)) < len(classes):
diff = np.setdiff1d(classes, self.classes_)
raise ValueError("y contains new labels: %s" % str(diff))
return np.searchsorted(self.classes_, y)
def inverse_transform(self, y):
"""Transform labels back to original encoding.
Parameters
----------
y : numpy array of shape [n_samples]
Target values.
Returns
-------
y : numpy array of shape [n_samples]
"""
self._check_fitted()
y = np.asarray(y)
return self.classes_[y]
class LabelBinarizer(BaseEstimator, TransformerMixin):
"""Binarize labels in a one-vs-all fashion
Several regression and binary classification algorithms are
available in the scikit. A simple way to extend these algorithms
to the multi-class classification case is to use the so-called
one-vs-all scheme.
At learning time, this simply consists in learning one regressor
or binary classifier per class. In doing so, one needs to convert
multi-class labels to binary labels (belong or does not belong
to the class). LabelBinarizer makes this process easy with the
transform method.
At prediction time, one assigns the class for which the corresponding
model gave the greatest confidence. LabelBinarizer makes this easy
with the inverse_transform method.
Parameters
----------
neg_label : int (default: 0)
Value with which negative labels must be encoded.
pos_label : int (default: 1)
Value with which positive labels must be encoded.
sparse_output : boolean (default: False)
True if the returned array from transform is desired to be in sparse
CSR format.
Attributes
----------
classes_ : array of shape [n_class]
Holds the label for each class.
y_type_ : str,
Represents the type of the target data as evaluated by
utils.multiclass.type_of_target. Possible type are 'continuous',
'continuous-multioutput', 'binary', 'multiclass',
'mutliclass-multioutput', 'multilabel-sequences',
'multilabel-indicator', and 'unknown'.
multilabel_ : boolean
True if the transformer was fitted on a multilabel rather than a
multiclass set of labels. The multilabel_ attribute is deprecated
and will be removed in 0.18
sparse_input_ : boolean,
True if the input data to transform is given as a sparse matrix, False
otherwise.
indicator_matrix_ : str
'sparse' when the input data to tansform is a multilable-indicator and
is sparse, None otherwise. The indicator_matrix_ attribute is
deprecated as of version 0.16 and will be removed in 0.18
Examples
--------
>>> from sklearn import preprocessing
>>> lb = preprocessing.LabelBinarizer()
>>> lb.fit([1, 2, 6, 4, 2])
LabelBinarizer(neg_label=0, pos_label=1, sparse_output=False)
>>> lb.classes_
array([1, 2, 4, 6])
>>> lb.transform([1, 6])
array([[1, 0, 0, 0],
[0, 0, 0, 1]])
Binary targets transform to a column vector
>>> lb = preprocessing.LabelBinarizer()
>>> lb.fit_transform(['yes', 'no', 'no', 'yes'])
array([[1],
[0],
[0],
[1]])
Passing a 2D matrix for multilabel classification
>>> import numpy as np
>>> lb.fit(np.array([[0, 1, 1], [1, 0, 0]]))
LabelBinarizer(neg_label=0, pos_label=1, sparse_output=False)
>>> lb.classes_
array([0, 1, 2])
>>> lb.transform([0, 1, 2, 1])
array([[1, 0, 0],
[0, 1, 0],
[0, 0, 1],
[0, 1, 0]])
See also
--------
label_binarize : function to perform the transform operation of
LabelBinarizer with fixed classes.
"""
def __init__(self, neg_label=0, pos_label=1, sparse_output=False):
if neg_label >= pos_label:
raise ValueError("neg_label={0} must be strictly less than "
"pos_label={1}.".format(neg_label, pos_label))
if sparse_output and (pos_label == 0 or neg_label != 0):
raise ValueError("Sparse binarization is only support
|
mhaessig/servo
|
tests/wpt/web-platform-tests/tools/webdriver/webdriver/transport.py
|
Python
|
mpl-2.0
| 3,140
| 0.001274
|
import httplib
import json
import urlparse
import error
class Response(object):
"""Describes an HTTP response received from a remote en"Describes an HTTP
response received from a remote end whose body has been read and parsed as
appropriate."""
def __init__(self, status, body):
self.status = status
self.body = body
def __repr__(self):
return "wdclient.Response(status=%d, body=%s)" % (self.status, self.body)
@classmethod
def from_http_response(cls, http_response):
status = http_response.status
body = http_response.read()
# SpecID: dfn-send-a-response
#
# > 3. Set the response's header with name and value with the following
# > values:
# >
# > "Content-Type"
# > "application/json; charset=utf-8"
# > "cache-control"
# >
|
"no-cache"
|
if body:
try:
body = json.loads(body)
except:
raise error.UnknownErrorException("Failed to decode body as json:\n%s" % body)
return cls(status, body)
class ToJsonEncoder(json.JSONEncoder):
def default(self, obj):
return getattr(obj.__class__, "json", json.JSONEncoder().default)(obj)
class HTTPWireProtocol(object):
"""Transports messages (commands and responses) over the WebDriver
wire protocol.
"""
def __init__(self, host, port, url_prefix="/", timeout=None):
"""Construct interface for communicating with the remote server.
:param url: URL of remote WebDriver server.
:param wait: Duration to wait for remote to appear.
"""
self.host = host
self.port = port
self.url_prefix = url_prefix
self._timeout = timeout
def url(self, suffix):
return urlparse.urljoin(self.url_prefix, suffix)
def send(self, method, uri, body=None, headers=None):
"""Send a command to the remote.
:param method: `GET`, `POST`, or `DELETE`.
:param uri: Relative endpoint of the requests URL path.
:param body: Body of the request. Defaults to an empty
dictionary if ``method`` is `POST`.
:param headers: Additional headers to include in the request.
:return: Instance of ``wdclient.Response`` describing the
HTTP response received from the remote end.
"""
if body is None and method == "POST":
body = {}
if isinstance(body, dict):
body = json.dumps(body, cls=ToJsonEncoder)
if isinstance(body, unicode):
body = body.encode("utf-8")
if headers is None:
headers = {}
url = self.url(uri)
kwargs = {}
if self._timeout is not None:
kwargs["timeout"] = self._timeout
conn = httplib.HTTPConnection(
self.host, self.port, strict=True, **kwargs)
conn.request(method, url, body, headers)
try:
response = conn.getresponse()
return Response.from_http_response(response)
finally:
conn.close()
|
googleapis/python-tasks
|
samples/generated_samples/cloudtasks_v2beta2_generated_cloud_tasks_get_task_async.py
|
Python
|
apache-2.0
| 1,437
| 0.000696
|
# -*- coding: utf-8 -*-
# Copyright 2022 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language gove
|
rning permissions and
# limitations under the License.
#
# Generated code. DO NOT EDIT!
#
# Snippet for GetTask
# NOTE: This snippet has been automatically generated for illustrative purposes only.
# It may require modifications to work in your environment.
# To install the latest published package dependency, execute the following:
# python3 -m pip install google-cloud-tasks
# [START cloudtasks_v2beta2_generated_CloudTasks_GetTask_async]
from google.cloud import tasks_v2beta2
async def
|
sample_get_task():
# Create a client
client = tasks_v2beta2.CloudTasksAsyncClient()
# Initialize request argument(s)
request = tasks_v2beta2.GetTaskRequest(
name="name_value",
)
# Make the request
response = await client.get_task(request=request)
# Handle the response
print(response)
# [END cloudtasks_v2beta2_generated_CloudTasks_GetTask_async]
|
openstack/cinder
|
cinder/tests/unit/policies/test_default_volume_types.py
|
Python
|
apache-2.0
| 12,078
| 0.000662
|
# Copyright 2020 Red Hat, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from http import HTTPStatus
from unittest import mock
import uuid
import ddt
from webob import exc
from cinder.api import api_utils
from cinder.api import microversions as mv
from cinder.api.v3 import default_types
from cinder import db
from cinder.policies import default_types as default_type_policies
from cinder.tests.unit.api import fakes as fake_api
from cinder.tests.unit import fake_constants
from cinder.tests.unit.policies import base
from cinder.tests.unit.policies import test_base
from cinder.tests.unit import utils as test_utils
class FakeProject(object):
def __init__(self, id=None, name=None):
if id:
self.id = id
else:
self.id = uuid.uuid4().hex
self.name = name
self.description = 'fake project description'
self.domain_id = 'default'
class DefaultVolumeTypesPolicyTests(test_base.CinderPolicyTests):
class FakeDefaultType:
project_id = fake_constants.PROJECT_ID
volume_type_id = fake_constants.VOLUME_TYPE_ID
def setUp(self):
super(DefaultVolumeTypesPolicyTests, self).setUp()
self.volume_type = self._create_fake_type(self.admin_context)
self.project = FakeProject()
# Need to mock out Keystone so the functional tests don't require other
# services
_keystone_client = mock.MagicMock()
_keystone_client.version = 'v3'
_keystone_client.projects.get.side_effect = self._get_project
_keystone_client_get = mock.patch(
'cinder.api.api_utils._keystone_client',
lambda *args, **kwargs: _keystone_client)
_keystone_client_get.start()
self.addCleanup(_keystone_client_get.stop)
def _get_project(self, project_id, *args, **kwargs):
return self.project
def test_system_admin_can_set_default(self):
system_admin_context = self.system_admin_context
path = '/v3/default-types/%s' % system_admin_context.project_id
body = {
'default_type':
{"volume_type": self.volume_type.id}
}
response = self._get_request_response(system_admin_context,
path, 'PUT', body=body,
microversion=
mv.DEFAULT_TYPE_OVERRIDES)
self.assertEqual(HTTPStatus.OK, response.status_int)
def test_project_admin_can_set_default(self):
admin_context = self.admin_context
path = '/v3/default-types/%s' % admin_context.project_id
body = {
'default_type':
{"volume_type": self.volume_type.id}
}
response = self._get_request_response(admin_context,
path, 'PUT', body=body,
microversion=
mv.DEFAULT_TYPE_OVERRIDES)
self.assertEqual(HTTPStatus.OK, response.status_int)
@mock.patch.object(db, 'project_default_volume_type_get',
return_value=FakeDefaultType())
def test_system_admin_can_get_default(self, mock_default_get):
system_admin_context = self.system_admin_context
path = '/v3/default-types/%s' % system_admin_context.project_id
response = self._get_request_response(system_admin_context,
path, 'GET',
microversion=
mv.DEFAULT_TYPE_OVERRIDES)
self.assertEqual(HTTPStatus.OK, response.status_int)
def test_project_admin_can_get_default(self):
admin_context = self.admin_context
path = '/v3/default-types/%s' % admin_context.project_id
body = {
'default_type':
{"volume_type": self.volume_type.id}
}
self._get_request_response(admin_context,
path, 'PUT', body=body,
microversion=
mv.DEFAULT_TYPE_OVERRIDES)
path = '/v3/default-types/%s' % admin_context.project_id
response = self._get_request_response(admin_context,
path, 'GET',
microversion=
mv.DEFAULT_TYPE_OVERRIDES)
self.assertEqual(HTTPStatus.OK, response.status_int)
def test_system_admin_can_get_all_default(self):
system_admin_context = self.system_admin_context
path = '/v3/default-types'
response = self._get_request_response(system_admin_context,
path, 'GET',
microversion=
mv.DEFAULT_TYPE_OVERRIDES)
self.assertEqual(HTTPStatus.OK, response.status_int)
def test_system_admin_can_unset_default(self):
system_admin_context = self.system_admin_context
path = '/v3/default-types/%s' % system_admin_
|
context.project_id
response = self._get_request
|
_response(system_admin_context,
path, 'DELETE',
microversion=
mv.DEFAULT_TYPE_OVERRIDES)
self.assertEqual(HTTPStatus.NO_CONTENT, response.status_int)
def test_project_admin_can_unset_default(self):
admin_context = self.admin_context
path = '/v3/default-types/%s' % admin_context.project_id
response = self._get_request_response(admin_context,
path, 'DELETE',
microversion=
mv.DEFAULT_TYPE_OVERRIDES)
self.assertEqual(HTTPStatus.NO_CONTENT, response.status_int)
@ddt.ddt
class DefaultVolumeTypesPolicyTest(base.BasePolicyTest):
authorized_admins = [
'system_admin',
'legacy_admin',
'project_admin',
]
unauthorized_admins = [
'legacy_owner',
'system_member',
'system_reader',
'system_foo',
'project_member',
'project_reader',
'project_foo',
'other_project_member',
'other_project_reader',
]
# Basic policy test is without enforcing scope (which cinder doesn't
# yet support) and deprecated rules enabled.
def setUp(self, enforce_scope=False, enforce_new_defaults=False,
*args, **kwargs):
super().setUp(enforce_scope, enforce_new_defaults, *args, **kwargs)
self.controller = default_types.DefaultTypesController()
self.api_path = '/v3/default-types/%s' % (self.project_id)
self.api_version = mv.DEFAULT_TYPE_OVERRIDES
def _create_volume_type(self):
vol_type = test_utils.create_volume_type(self.project_admin_context,
name='fake_vol_type',
testcase_instance=self)
return vol_type
@ddt.data(*base.all_users)
@mock.patch.object(api_utils, 'get_project')
def test_default_type_set_policy(self, user_id, fake_project):
vol_type = self._create_volume_type()
fake_project.return_value = FakeProject(id=self.project_id)
rule_name = default_type_policies.CREATE_UPDATE_POLICY
url = self.api_path
|
syncloud/platform
|
src/test/snap/test_snap.py
|
Python
|
gpl-3.0
| 1,740
| 0.000575
|
from syncloud_platform.snap.models import App, AppVersions
from syncloud_platform.snap.snap import join_apps
def test_join_apps():
installed_app1 = App()
installed_app1.id = 'id1'
installed_app_version1 = AppVersions()
installed_app_version1.installed_version = 'v1'
installed_app_version1.current_version = None
installed_app_version1.app = installed_app1
installed_app2 = App()
installed_app2.id = 'id2'
installed_app_version2 = AppVersions()
installed_app_version2.installed_version
|
= 'v1'
installed_app_version2.current_version = None
installed_app_version2.app = installed_app2
installed_apps = [installed_app_version1, installed_app_version2]
store_app2 = App()
store_app2.id = 'id2'
store_app_versi
|
on2 = AppVersions()
store_app_version2.installed_version = None
store_app_version2.current_version = 'v2'
store_app_version2.app = store_app2
store_app3 = App()
store_app3.id = 'id3'
store_app_version3 = AppVersions()
store_app_version3.installed_version = None
store_app_version3.current_version = 'v2'
store_app_version3.app = store_app3
store_apps = [store_app_version2, store_app_version3]
all_apps = sorted(join_apps(installed_apps, store_apps), key=lambda app: app.app.id)
assert len(all_apps) == 3
assert all_apps[0].app.id == 'id1'
assert all_apps[0].installed_version == 'v1'
assert all_apps[0].current_version is None
assert all_apps[1].app.id == 'id2'
assert all_apps[1].installed_version == 'v1'
assert all_apps[1].current_version == 'v2'
assert all_apps[2].app.id == 'id3'
assert all_apps[2].installed_version is None
assert all_apps[2].current_version == 'v2'
|
plaidml/plaidml
|
plaidbench/plaidbench/cli.py
|
Python
|
apache-2.0
| 3,879
| 0.002578
|
# Copyright 2018 Intel Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import tempfile
import click
from . import core
def _find_frontends():
result = {}
prefix = 'frontend_'
suffix = '.py'
dirname = os.path.dirname(__file__)
for fname in os.listdir(dirname):
if fname.startswith(prefix) and fname.endswith(suffix):
result[fname[len(prefix):-len(suffix)]] = os.path.join(dirname, fname)
return result
_FRONTENDS = _find_frontends()
def _get_frontend_mod(name):
try:
fname = _FRONTENDS[name]
except KeyError:
return None
mod = {'__file__': fname}
with open(fname) as f:
code = compile(f.read(), fname, 'exec')
eval(code, mod)
return mod
class _PlaidbenchCommand(click.MultiCommand):
def list_commands(self, ctx):
return _FRONTENDS.keys()
def get_command(self, ctx, name):
return _get_frontend_mod(name)['cli']
@click.command(cls=_PlaidbenchCommand)
@click.option('-v', '--verbose', count=True)
@click.option('-n',
|
'--examples',
type=int,
default=None,
|
help='Number of examples to use (over all epochs)')
@click.option(
'--blanket-run',
is_flag=True,
help='Run all networks at a range of batch sizes, ignoring the --batch-size and --examples '
'options and the choice of network.')
@click.option('--results',
type=click.Path(exists=False, file_okay=False, dir_okay=True),
default=os.path.join(tempfile.gettempdir(), 'plaidbench_results'),
help='Destination directory for results output')
@click.option('--callgrind/--no-callgrind',
default=False,
help='Invoke callgrind during timing runs')
@click.option('--epochs', type=int, default=1, help="Number of epochs per test")
@click.option('--batch-size', type=int, default=1)
@click.option('--timeout-secs', type=int, default=None)
@click.option('--warmup/--no-warmup', default=True, help='Do warmup runs before main timing')
@click.option('--kernel-timing/--no-kernel-timing', default=True, help='Emit kernel timing info')
@click.option('--print-stacktraces/--no-print-stacktraces',
default=False,
help='Print a stack trace if an exception occurs')
@click.pass_context
def plaidbench(ctx, verbose, examples, blanket_run, results, callgrind, epochs, batch_size,
timeout_secs, warmup, print_stacktraces, kernel_timing):
"""PlaidML Machine Learning Benchmarks
plaidbench runs benchmarks for a variety of ML framework, framework backend,
and neural network combinations.
For more information, see http://www.github.com/plaidml/plaidbench
"""
runner = ctx.ensure_object(core.Runner)
if blanket_run:
runner.param_builder = core.BlanketParamBuilder(epochs)
runner.reporter = core.BlanketReporter(os.path.expanduser(results))
runner.reporter.configuration['train'] = False
else:
runner.param_builder = core.ExplicitParamBuilder(batch_size, epochs, examples)
runner.reporter = core.ExplicitReporter(results)
runner.verbose = verbose
runner.callgrind = callgrind
runner.warmup = warmup
runner.kernel_timing = kernel_timing
runner.print_stacktraces = print_stacktraces
runner.timeout_secs = timeout_secs
|
hcarvalhoalves/SublimeHaskell
|
util.py
|
Python
|
mit
| 2,159
| 0.012043
|
import sublime
if int(sublime.version()) < 3000:
import ghci
import ghcmod
|
import haskell_docs
import hdevtools
import sublime_haskell_common as common
import symbols
else:
import SublimeHaskell.ghci as ghci
import SublimeHaskell.ghcmod as ghcmod
import SublimeHaskell.haskell_docs as haskell_docs
import SublimeHaskell.hdevtools as hdevtools
import SublimeHaskell.sublime_haskell_common as
|
common
import SublimeHaskell.symbols as symbols
def symbol_info(filename, module_name, symbol_name, cabal = None, no_ghci = False):
result = None
if hdevtools.hdevtools_enabled():
result = hdevtools.hdevtools_info(filename, symbol_name, cabal = cabal)
if not result and ghcmod.ghcmod_enabled():
result = ghcmod.ghcmod_info(filename, module_name, symbol_name, cabal = cabal)
if not result and not filename and not no_ghci:
result = ghci.ghci_info(module_name, symbol_name, cabal = cabal)
return result
def load_docs(decl):
"""
Tries to load docs for decl
"""
if decl.docs is None:
decl.docs = haskell_docs.haskell_docs(decl.module.name, decl.name)
def refine_type(decl, no_ghci = True):
"""
Refine type for sources decl
"""
if decl.location:
if decl.what == 'function' and not decl.type:
info = symbol_info(decl.location.filename, decl.module.name, decl.name, None, no_ghci = no_ghci)
if info:
decl.type = info.type
def refine_decl(decl):
"""
Refine decl information.
"""
# Symbol from cabal, try to load detailed info with ghci
if not decl.location:
load_docs(decl)
if decl.what == 'declaration':
decl_detailed = ghci.ghci_info(decl.module.name, decl.name)
if decl_detailed:
decl.__dict__.update(decl_detailed.__dict__)
# Symbol from sources, concrete type if it's not specified
else:
refine_type(decl, False)
def browse_module(module_name, cabal = None):
"""
Returns symbols.Module with all declarations
"""
return ghcmod.ghcmod_browse_module(module_name, cabal = cabal)
|
tanium/pytan
|
BUILD/doc/source/examples/export_resultset_csv_sensor_false_code.py
|
Python
|
mit
| 3,040
| 0.002303
|
# import the basic python packages we need
import os
import sys
import tempfile
import pprint
import traceback
# disable python from generating a .pyc file
sys.dont_write_bytecode = True
# change me to the path of pytan if this script is not running from EXAMPLES/PYTAN_API
pytan_loc = "~/gh/pytan"
pytan_static_path = os.path.join(os.path.expanduser(pytan_loc), 'lib')
# Determine our script name, script dir
my_file = os.path.abspath(sys.argv[0])
my_dir = os.path.dirname(my_file)
# try to automatically determine the pytan lib directory by assuming it is in '../../lib/'
parent_dir = os.path.dirname(my_dir)
pytan_root_dir = os.path.dirname(parent_dir)
lib_dir = os.path.join(pytan_root_dir, 'lib')
# add pytan_loc and lib_dir to the PYTHONPATH variable
path_adds = [lib_dir, pytan_static_path]
[sys.path.append(aa) for aa in path_adds if aa not in sys.path]
# import pytan
import pytan
# create a dictionary of arguments for the pytan handler
handler_args = {}
# establish our connection info for the Tanium Server
handler_args['username'] = "Administrator"
handler_args['password'] = "Tanium2015!"
handler_args['host'] = "10.0.1.240"
handler_args['port'] = "443" # optional
# optional, level 0 is no output except warnings/errors
# level 1 through 12 are more and more verbose
handler_args['loglevel'] = 1
# optional, use a debug format for the logging output (uses two lines per log entry)
handler_args['debugformat'] = Fals
|
e
# optional, this saves all re
|
sponse objects to handler.session.ALL_REQUESTS_RESPONSES
# very useful for capturing the full exchange of XML requests and responses
handler_args['record_all_requests'] = True
# instantiate a handler using all of the arguments in the handler_args dictionary
print "...CALLING: pytan.handler() with args: {}".format(handler_args)
handler = pytan.Handler(**handler_args)
# print out the handler string
print "...OUTPUT: handler string: {}".format(handler)
# setup the arguments for the handler() class
kwargs = {}
kwargs["export_format"] = u'csv'
kwargs["header_add_sensor"] = False
# setup the arguments for handler.ask()
ask_kwargs = {
'qtype': 'manual',
'sensors': [
"Computer Name", "IP Route Details", "IP Address",
'Folder Contents{folderPath=C:\Program Files}',
],
}
# ask the question that will provide the resultset that we want to use
print "...CALLING: handler.ask() with args {}".format(ask_kwargs)
response = handler.ask(**ask_kwargs)
# store the resultset object as the obj we want to export into kwargs
kwargs['obj'] = response['question_results']
# export the object to a string
# (we could just as easily export to a file using export_to_report_file)
print "...CALLING: handler.export_obj() with args {}".format(kwargs)
out = handler.export_obj(**kwargs)
# trim the output if it is more than 15 lines long
if len(out.splitlines()) > 15:
out = out.splitlines()[0:15]
out.append('..trimmed for brevity..')
out = '\n'.join(out)
print "...OUTPUT: print the export_str returned from export_obj():"
print out
|
connect1ngdots/AppHtmlME
|
AppHtmlME.workflow/Scripts/apphtml_settings.py
|
Python
|
mit
| 1,540
| 0.00411
|
# vim: fileencoding=utf-8
"""
AppHtml settings
@author Toshiya NISHIO(http://www.toshiya240.com)
"""
defaultTemplate = {
'1) 小さいボタン': '${badgeS}',
'2) 大きいボタン': '${badgeL}',
'3) テキストのみ': '${textonly}',
"4) アイコン付き(小)": u"""<span class="appIcon"><img class="appIconImg" height="60" src="${icon60url}" style="float:left;margin: 0px 15px 15px 5px;"></span>
<span class="appName"><strong><a href="${url}" target="itunes_store">${name}</a></strong></span><br>
<span class="appCategory">カテゴリ: ${category}</span><br>
<span class="badgeS" style="display:inline-block; margin:6px">${badgeS}</span><br style="clear:both;">
""",
"5) アイコン付き(大)": u"""<span class="appIcon"><img class="appIconImg" height="100" src="${icon100url}" style="float:left;;margin: 0px 15px 15px 5px;"></span>
<span class="appName"><strong><a href="${url}" target="itunes_store">${name}</a></strong></span><br>
<span class="appCategory">カテゴリ: ${category}</span><br>
<span class="badgeL" style="display:i
|
nline-block; margin:4px">${badgeL}</span><br style="clear:both;">
"""
}
settings = {
'phg': "",
'cnt': 8,
'scs': {
'iphone': 320,
'ipad': 320,
'mac': 480
},
'template': {
'software': defaultTemplate,
'iPadSoftware': defaultTemplate,
'macSoftware': defaultTemplate,
'song': defaultTemplate,
'album': defaultTemplate,
'movie': defaultTemplate,
'ebook': defaultTempla
|
te
}
}
|
EmreAtes/spack
|
var/spack/repos/builtin/packages/cblas/package.py
|
Python
|
lgpl-2.1
| 2,365
| 0
|
##############################################################################
# Copyright (c) 2013-2018, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/spack/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
class Cblas(Package):
"""The BLAS (Basic Linear Algebra Subprograms) are routines that
provide standard building blocks for performing basic vector and
matrix operations."""
homepage = "http://www.netlib.org/blas/_cblas/"
# tarball has no version, but on the date below, this MD5 was correct.
version('2015-06-06', '1e8830f622d2112239a4a8a83b84209a',
url='http://www.netlib.org/blas/blast-forum/cblas.tgz')
d
|
epends_on('blas')
pa
|
rallel = False
def patch(self):
mf = FileFilter('Makefile.in')
mf.filter('^BLLIB =.*', 'BLLIB = %s/libblas.a' %
self.spec['blas'].prefix.lib)
mf.filter('^CC =.*', 'CC = cc')
mf.filter('^FC =.*', 'FC = f90')
def install(self, spec, prefix):
make('all')
mkdirp(prefix.lib)
mkdirp(prefix.include)
# Rename the generated lib file to libcblas.a
install('./lib/cblas_LINUX.a', '%s/libcblas.a' % prefix.lib)
install('./include/cblas.h', '%s' % prefix.include)
install('./include/cblas_f77.h', '%s' % prefix.include)
|
matrixise/epcon
|
microblog/dataaccess.py
|
Python
|
bsd-2-clause
| 5,734
| 0.007499
|
# -*- coding: UTF-8 -*-
from django.core.cache import cache
from django.db.models.signals import post_delete, post_save
import functools
import hashlib
WEEK = 7 * 24 * 60 * 60 # 1 week
def cache_me(key=None, ikey=None, signals=(), models=(), timeout=WEEK):
def hashme(k):
if isinstance(k, unicode):
k = k.encode('utf-8')
return hashlib.md5(k).hexdigest()
def decorator(f):
def invalidate(sender, **kwargs):
if ikey is None:
ks = (f.__name__,)
elif callable(ikey):
k = ikey(sender, **kwargs)
if isinstance(k, basestring):
ks = (k,)
else:
ks = k
else:
ks = (ikey,)
if ks:
cache.delete_many(map(hashme, ks))
if ikey or (ikey is None and key is None):
for s in signals:
s.connect(invalidate, weak=False)
for m in models:
post_save.connect(invalidate, sender=m, weak=False)
post_delete.connect(invalidate, sender=m, weak=False)
def _key(*args, **kwargs):
if key is None:
k = f.__name__
elif callable(key):
k = key(*args, **kwargs)
else:
k = key % args
return hashme(k)
@functools.wraps(f)
def wrapper(*args, **kwargs):
k = _key(*args, **kwargs)
data = cache.get(k)
if data is None:
data = f(*args, **kwargs)
cache.set(k, data, timeout)
return data
wrapper.cachekey = _key
return wrapper
return decorator
from collections import defaultdict
from django.conf import settings as dsettings
from django.core.urlresolvers import reverse
from microblog import models
from microblog import settings
from taggit.models import TaggedItem
import django_comments as comments
def _i_post_list(sender, **kw):
ks = []
for l in dsettings.LANGUAGES:
ks.append('m:post_list:%s' % l[0])
return ks
@cache_me(models=(models.Post,),
key='m:post_list:%s',
ikey=_i_post_list)
def post_list(lang):
qs = models.Post.objects\
.all()\
.byLanguage(lang)\
.order_by('-date')\
.select_related('category', 'author')
return list(qs)
@cache_me(models=(models.Post,))
def tag_map():
tmap = defaultdict(set)
items = TaggedItem.objects\
.filter(content_type__app_label='microblog', content_type__model='post')\
.select_related('tag')
for o in items:
tmap[o.object_id].add(o.tag)
return tmap
@cache_me(models=(models.Post,),
key = 'm:tagged_posts:%s',
ikey = 'm:tagged_posts:%s')
def tagged_posts(name):
"""
restituisce i post taggati con il tag passato
"""
posts = TaggedItem.objects\
.filter(content_type__app_label='microblog', content_type__model='post')\
.filter(tag__name__iexact=name)\
.values_list('object_id', flat=True)
return set(posts)
def _i_post_data(sender, **kw):
if sender is models.Post:
pid = kw['instance'].id
elif sender is comments.get_model():
o = kw['instance']
if o.content_type.app_label == 'microblog' and o.content_type.model == 'post':
pid = o.object_pk
else:
pid = None
else:
pid = kw['instance'].post_id
ks = []
if pid:
for l in dsettings.LANGUAGES:
ks.append('m:post_data:%s%s' % (pid, l[0]))
return ks
@cache_me(models=(models.Post, models.PostContent, comments.get_model()),
key='m:post_data:%s%s',
ikey=_i_post_data)
def post_data(pid, lang):
post = models.Post.objects\
.select_related('author', 'category')\
.get(id=pid)
try:
content = post.content(lang=lang, fallback=True)
except models.PostContent.DoesNotExist:
content = None
comment_list = comments.get_model().objects\
|
.filter(content_type__app_label='microblog', content_type__model='post')\
.filter(object_pk=pid, is_public=True)
burl = models.PostContent.build_absolute_url(post, content)
return {
'post': post,
'content': content,
'url': dsetting
|
s.DEFAULT_URL_PREFIX + reverse(burl[0], args=burl[1], kwargs=burl[2]),
'comments': list(comment_list),
'tags': list(post.tags.all()),
}
def _i_get_reactions(sender, **kw):
if sender is models.Trackback:
return 'm:reaction:%s' % kw['instance'].content_id
else:
return 'm:reaction:%s' % kw['instance'].object_id
if settings.MICROBLOG_PINGBACK_SERVER:
deco = cache_me(models=(models.Trackback,),
key='m:reactions:%s',
ikey=_i_get_reactions)
else:
from pingback.models import Pingback
deco = cache_me(models=(models.Trackback, Pingback),
key='m:reactions:%s',
ikey=_i_get_reactions)
@deco
def get_reactions(cid):
trackbacks = models.Trackback.objects.filter(content=cid)
if settings.MICROBLOG_PINGBACK_SERVER:
from pingback.models import Pingback
# Purtroppo il metodo pingbacks_for_object vuole un oggetto non un id
content = models.PostContent.objects.get(id=cid)
pingbacks = Pingback.objects.pingbacks_for_object(content).filter(approved=True)
else:
pingbacks = []
reactions = sorted(list(trackbacks) + list(pingbacks), key=lambda r: r.date, reverse=True)
# normalizzo le reactions, mi assicuro che tutte abbiano un excerpt
for ix, r in enumerate(reactions):
if not hasattr(r, 'excerpt'):
r.excerpt = r.content
return reactions
|
gitlitz/pygame-with-interpreter
|
drawable.py
|
Python
|
gpl-3.0
| 750
| 0.064
|
from static import tools
class DrawAble(object):
def __init__(self,image,position,zIndex=0,activated=True):
self.image=image
self.position=position
self._zIndex=zIndex
self.__activated=Non
|
e
self.activated=activated
def _
|
_del__(self):
self.activated=False
#zindex
def __getZIndex(self):
return self._zIndex
zIndex=property(__getZIndex)
#enabled
def _disable(self):
tools.spritebatch.remove(self)
def _enable(self):
tools.spritebatch.add(self)
def __setActivated(self,b):
if self.__activated!=b:
self.__activated=b
if b:
self._enable()
else:
self._disable()
def __getActivated(self):
return self.__activated
activated=property(__getActivated,__setActivated)
|
Elvirita/reposelvira
|
elviraae/ec/edu/itsae/conn/DBcon.py
|
Python
|
gpl-2.0
| 604
| 0.006623
|
#-*- coding:utf-8 -*-
'
|
''
Created on 18/2/2015
@author: PC06
'''
from flaskext.mysql import MySQL
from flask import Flask
c
|
lass DBcon():
'''
classdocs
'''
def __init__(self):
'''
Constructor
'''
pass
def conexion(self):
mysql = MySQL()
app = Flask(__name__)
app.config['MYSQL_DATABASE_USER'] = 'python'
app.config['MYSQL_DATABASE_PASSWORD'] = '123456'
app.config['MYSQL_DATABASE_DB'] = 'ventas'
app.config['MYSQL_DATABASE_HOST'] = 'localhost'
mysql.init_app(app)
return mysql
|
juju-solutions/charms.reactive
|
charms/reactive/flags.py
|
Python
|
apache-2.0
| 9,936
| 0.000805
|
from charmhelpers.cli import cmdline
from charmhelpers.core import hookenv
from charmhelpers.core import unitdata
from charms.reactive.bus import FlagWatch
from charms.reactive.trace import tracer
__all__ = [
'set_flag',
'clear_flag',
'toggle_flag',
'register_trigger',
'is_flag_set',
'all_flags_set',
'any_flags_set',
'get_flags',
'get_unset_flags',
'set_state', # DEPRECATED
'remove_state', # DEPRECATED
'toggle_state', # DEPRECATED
'is_state', # DEPRECATED
'all_states', # DEPRECATED
'get_states', # DEPRECATED
'any_states', # DEPRECATED
]
TRIGGER_CALLBACKS = {}
class State(str):
"""
.. deprecated:: 0.5.0
Use flag instead
A reactive state that can be set.
States are essentially just strings, but this class should be used to enable them
to be discovered and introspected, for documentation, composition, or linting.
This should be used with :class:`StateList`.
"""
pass
class StateList(object):
"""
.. deprecated:: 0.5.0
use flag instead
Base class for a set of states that can be set by a relation or layer.
This class should be used so that they can be discovered and introspected,
for documentation, composition, or linting.
Example usage::
class MyRelat
|
ion(RelationBase):
class states(StateList):
connected = State('{relation_name}.connected')
available = State('{
|
relation_name}.available')
"""
pass
@cmdline.subcommand()
@cmdline.no_output
def set_flag(flag, value=None):
"""set_flag(flag)
Set the given flag as active.
:param str flag: Name of flag to set.
.. note:: **Changes to flags are reset when a handler crashes.** Changes to
flags happen immediately, but they are only persisted at the end of a
complete and successful run of the reactive framework. All unpersisted
changes are discarded when a hook crashes.
"""
old_flags = get_flags()
unitdata.kv().update({flag: value}, prefix='reactive.states.')
if flag not in old_flags:
tracer().set_flag(flag)
FlagWatch.change(flag)
trigger = _get_trigger(flag, None)
for flag_name in trigger['set_flag']:
set_flag(flag_name)
for flag_name in trigger['clear_flag']:
clear_flag(flag_name)
for callback in trigger['callbacks']:
callback()
@cmdline.subcommand()
@cmdline.no_output
def clear_flag(flag):
"""
Clear / deactivate a flag.
:param str flag: Name of flag to set.
.. note:: **Changes to flags are reset when a handler crashes.** Changes to
flags happen immediately, but they are only persisted at the end of a
complete and successful run of the reactive framework. All unpersisted
changes are discarded when a hook crashes.
"""
old_flags = get_flags()
unitdata.kv().unset('reactive.states.%s' % flag)
unitdata.kv().set('reactive.dispatch.removed_state', True)
if flag in old_flags:
tracer().clear_flag(flag)
FlagWatch.change(flag)
trigger = _get_trigger(None, flag)
for flag_name in trigger['set_flag']:
set_flag(flag_name)
for flag_name in trigger['clear_flag']:
clear_flag(flag_name)
for callback in trigger['callbacks']:
callback()
@cmdline.subcommand()
@cmdline.no_output
def toggle_flag(flag, should_set):
"""
Helper that calls either :func:`set_flag` or :func:`clear_flag`,
depending on the value of `should_set`.
Equivalent to::
if should_set:
set_flag(flag)
else:
clear_flag(flag)
:param str flag: Name of flag to toggle.
:param bool should_set: Whether to set the flag, or clear it.
.. note:: **Changes to flags are reset when a handler crashes.** Changes to
flags happen immediately, but they are only persisted at the end of a
complete and successful run of the reactive framework. All unpersisted
changes are discarded when a hook crashes.
"""
if should_set:
set_flag(flag)
else:
clear_flag(flag)
@cmdline.subcommand()
@cmdline.no_output
def register_trigger(when=None, when_not=None, set_flag=None, clear_flag=None, callback=None):
"""
Register a trigger to set or clear a flag when a given flag is set.
Note: Flag triggers are handled at the same time that the given flag is set.
:param str when: Flag to trigger on when it is set.
:param str when_not: Flag to trigger on when it is cleared.
:param str set_flag: If given, this flag will be set when the relevant flag is changed.
:param str clear_flag: If given, this flag will be cleared when the relevant flag is changed.
:param str callback: If given, this callback will be invoked when the relevant flag is changed.
Note: Exactly one of either `when` or `when_not`, and at least one of
`set_flag`, `clear_flag`, or `callback` must be provided.
"""
if not any((when, when_not)):
raise ValueError('Must provide one of when or when_not')
if all((when, when_not)):
raise ValueError('Only one of when or when_not can be provided')
if not any((set_flag, clear_flag, callback)):
raise ValueError('Must provide at least one of set_flag, clear_flag, or callback')
trigger = _get_trigger(when, when_not)
if set_flag and set_flag not in trigger['set_flag']:
trigger['set_flag'].append(set_flag)
if clear_flag and clear_flag not in trigger['clear_flag']:
trigger['clear_flag'].append(clear_flag)
if callback and callback not in trigger['callbacks']:
trigger['callbacks'].append(callback)
_save_trigger(when, when_not, trigger)
def _get_trigger(when, when_not):
if when is not None:
key = 'reactive.flag_set_triggers.{}'.format(when)
elif when_not is not None:
key = 'reactive.flag_clear_triggers.{}'.format(when_not)
triggers = unitdata.kv().get(key, {
'set_flag': [],
'clear_flag': [],
})
triggers['callbacks'] = TRIGGER_CALLBACKS.get(key, [])
return triggers
def _save_trigger(when, when_not, data):
if when is not None:
key = 'reactive.flag_set_triggers.{}'.format(when)
elif when_not is not None:
key = 'reactive.flag_clear_triggers.{}'.format(when_not)
TRIGGER_CALLBACKS[key] = data.pop('callbacks')
return unitdata.kv().set(key, data)
@cmdline.subcommand()
@cmdline.test_command
def is_flag_set(flag):
"""Assert that a flag is set"""
return any_flags_set(flag)
@cmdline.subcommand()
@cmdline.test_command
def all_flags_set(*desired_flags):
"""Assert that all desired_flags are set"""
active_flags = get_flags()
return all(flag in active_flags for flag in desired_flags)
@cmdline.subcommand()
@cmdline.test_command
def any_flags_set(*desired_flags):
"""Assert that any of the desired_flags are set"""
active_flags = get_flags()
return any(flag in active_flags for flag in desired_flags)
@cmdline.subcommand()
def get_flags():
"""
Return a list of all flags which are set.
"""
flags = unitdata.kv().getrange('reactive.states.', strip=True) or {}
return sorted(flags.keys())
@cmdline.subcommand()
def get_unset_flags(*desired_flags):
"""Check if any of the provided flags missing and return them if so.
:param desired_flags: list of reactive flags
:type desired_flags: non-keyword args, str
:returns: list of unset flags filtered from the parameters shared
:rtype: List[str]
"""
flags = unitdata.kv().getrange('reactive.states.', strip=True) or {}
return sorted(set(desired_flags) - flags.keys())
def _get_flag_value(flag, default=None):
return unitdata.kv().get('reactive.states.%s' % flag, default)
# DEPRECATED
@cmdline.subcommand()
@cmdline.no_output
def set_state(state, value=None):
"""
.. deprecated:: 0.5.0
Alias of :func:`set_flag`.
"""
set_flag(state, value)
@cmdline.subcommand()
@cmdline.no_output
def remove_state(state):
"""
.. deprecated:: 0.5.0
Alias
|
dsimandl/teamsurmandl
|
gallery/tasks.py
|
Python
|
mit
| 1,465
| 0.001365
|
import time
import zipfile
from io import BytesIO
from django.utils.image import Image as D_
|
Image
from django.core.files.base import ContentFile
|
from celery import task
from .models import Image
@task
def upload_zip(to_upload):
print("In the zip!")
zip = zipfile.ZipFile(to_upload.zip_file)
bad_file = zip.testzip()
if bad_file:
zip.close()
raise Exception('"%s" in zip archive is corrupt' % bad_file)
count = 1
for file_name in sorted(zip.namelist()):
if file_name.startswith('__') or file_name.startswith('.'):
continue
data = zip.read(file_name)
if not len(data):
continue
try:
file = BytesIO(data)
opened = D_Image.open(file)
opened.verify()
except Exception:
raise Exception('"%s" is a bad image file' % format(file_name))
if not to_upload.title:
title = '_'.join([format(file_name), str(count)])
else:
title = '_'.join([to_upload.title, str(count)])
image = Image(title=title,
created=time.time(),
public=to_upload.public,
user=to_upload.user, )
content_file = ContentFile(data)
image.image.save(file_name, content_file)
image.save()
image.albums.add(to_upload.albums)
image.save()
count += 1
zip.close()
return "Zip file uploaded!!"
|
tensorflow/kfac
|
kfac/python/kernel_tests/periodic_inv_cov_update_kfac_opt_test.py
|
Python
|
apache-2.0
| 3,178
| 0.003776
|
# Copyright 2019 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for l.d.tf.optimizers.python.PeriodicInvCovUpdateKfacOpt class."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
# Dependency imports
import sonnet as snt
import tensorflow.compat.v1 as tf
from kfac.python.ops import layer_collection
from kfac.python.ops.kfac_utils
|
import periodic_inv_cov_update_kfac_opt
from kfac.python.ops.tensormatch import graph_search
_BATCH_SIZE = 128
def _construct_layer_collection(layers, all_logits, var_list):
for idx, logits in enumerate(all_logits):
tf.logging.info("Registering logits: %s", logits)
with tf.variable_scope(tf.get_variable_scope(), reuse=(idx
|
> 0)):
layers.register_categorical_predictive_distribution(
logits, name="register_logits")
batch_size = all_logits[0].shape.as_list()[0]
vars_to_register = var_list if var_list else tf.trainable_variables()
graph_search.register_layers(layers, vars_to_register, batch_size)
class PeriodicInvCovUpdateKfacOptTest(tf.test.TestCase):
def test_train(self):
image = tf.random_uniform(shape=(_BATCH_SIZE, 784), maxval=1.)
labels = tf.random_uniform(shape=(_BATCH_SIZE,), maxval=10, dtype=tf.int32)
labels_one_hot = tf.one_hot(labels, 10)
model = snt.Sequential([snt.BatchFlatten(), snt.nets.MLP([128, 128, 10])])
logits = model(image)
all_losses = tf.nn.softmax_cross_entropy_with_logits_v2(
logits=logits, labels=labels_one_hot)
loss = tf.reduce_mean(all_losses)
layers = layer_collection.LayerCollection()
optimizer = periodic_inv_cov_update_kfac_opt.PeriodicInvCovUpdateKfacOpt(
invert_every=10,
cov_update_every=1,
learning_rate=0.03,
cov_ema_decay=0.95,
damping=100.,
layer_collection=layers,
momentum=0.9,
num_burnin_steps=0,
placement_strategy="round_robin")
_construct_layer_collection(layers, [logits], tf.trainable_variables())
train_step = optimizer.minimize(loss)
counter = optimizer.counter
max_iterations = 50
with self.test_session() as sess:
sess.run(tf.global_variables_initializer())
coord = tf.train.Coordinator()
tf.train.start_queue_runners(sess=sess, coord=coord)
for iteration in range(max_iterations):
sess.run([loss, train_step])
counter_ = sess.run(counter)
self.assertEqual(counter_, iteration + 1.0)
if __name__ == "__main__":
tf.disable_v2_behavior()
tf.test.main()
|
qedsoftware/commcare-hq
|
corehq/apps/app_manager/views/multimedia.py
|
Python
|
bsd-3-clause
| 2,218
| 0.000451
|
from django.contrib import messages
from django.http import Http404, HttpResponse
fr
|
om django.shortcuts import render
from corehq.apps.app_manager.dbaccessors import get_app
from corehq.apps.app_manager.decorators import require_deploy_apps, \
require_can_edit_apps
from corehq.apps.app_manager.xform import XForm
from corehq.util.view_utils import set_file_download
from dimagi.utils.logging import notify_exception
from dimagi.utils.subprocess_timeout import ProcessTimedOut
@require_can_edit_apps
def multimedia_list_download(request, domain, app_id):
app = get_app
|
(domain, app_id)
include_audio = request.GET.get("audio", True)
include_images = request.GET.get("images", True)
strip_jr = request.GET.get("strip_jr", True)
filelist = []
for m in app.get_modules():
for f in m.get_forms():
parsed = XForm(f.source)
parsed.validate()
if include_images:
filelist.extend(parsed.image_references)
if include_audio:
filelist.extend(parsed.audio_references)
if strip_jr:
filelist = [s.replace("jr://file/", "") for s in filelist if s]
response = HttpResponse()
set_file_download(response, 'list.txt')
response.write("\n".join(sorted(set(filelist))))
return response
@require_deploy_apps
def multimedia_ajax(request, domain, app_id, template='app_manager/v1/partials/multimedia_ajax.html'):
app = get_app(domain, app_id)
if app.get_doc_type() == 'Application':
try:
multimedia_state = app.check_media_state()
except ProcessTimedOut:
notify_exception(request)
messages.warning(request, (
"We were unable to check if your forms had errors. "
"Refresh the page and we will try again."
))
multimedia_state = {
'has_media': False,
'has_form_errors': True,
'has_missing_refs': False,
}
context = {
'multimedia_state': multimedia_state,
'domain': domain,
'app': app,
}
return render(request, template, context)
else:
raise Http404()
|
mrshelly/openerp71313
|
openerp/addons/hr_attendance/wizard/hr_attendance_error.py
|
Python
|
agpl-3.0
| 2,918
| 0.002742
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import time
from openerp.osv import fields, osv
from openerp.tools.translate import _
class hr_attendance_error(osv.osv_memory):
_name = 'hr.attendance.error'
_description = 'Print Error Attendance Report'
_columns = {
'init_date': fields.date('Starting Date', required=True),
'end_date': fields.date('Ending Date', required=True),
'max_delay': fields.integer('Max. Delay (Min)', required=True)
}
_defaults = {
'init_date': lambda *a: time.strftime('%Y-%m-%d'),
'end_date': lambda *a: time.strftime('%Y-%
|
m-%d'),
'max_delay': 120,
}
def print_report(self, cr, uid, ids, context=None):
emp_ids = []
data_error = self.read(cr, uid, ids, context=context)[0]
date_from = data_error['init_date']
date_to = data_error['end_date']
cr.execute("SELECT id FROM hr
|
_attendance WHERE employee_id IN %s AND to_char(name,'YYYY-mm-dd')<=%s AND to_char(name,'YYYY-mm-dd')>=%s AND action IN %s ORDER BY name" ,(tuple(context['active_ids']), date_to, date_from, tuple(['sign_in','sign_out'])))
attendance_ids = [x[0] for x in cr.fetchall()]
if not attendance_ids:
raise osv.except_osv(_('No Data Available !'), _('No records are found for your selection!'))
attendance_records = self.pool.get('hr.attendance').browse(cr, uid, attendance_ids, context=context)
for rec in attendance_records:
if rec.employee_id.id not in emp_ids:
emp_ids.append(rec.employee_id.id)
data_error['emp_ids'] = emp_ids
datas = {
'ids': [],
'model': 'hr.employee',
'form': data_error
}
return {
'type': 'ir.actions.report.xml',
'report_name': 'hr.attendance.error',
'datas': datas,
}
hr_attendance_error()
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
zhou13/qtunneler
|
qtunneler.py
|
Python
|
gpl-3.0
| 16,827
| 0.002853
|
#!/bin/env python3
"""
Copyright (C) 2014 by Yichao Zhou <broken.zhou AT gmail DOT com>
License: http://www.gnu.org/licenses/gpl.html GPL version 3 or higher
Any comments are welcome through email and github!
"""
import codecs
import re
import random
import os
import pexpect
import sys
import string
import time
from PyQt4 import QtGui
from PyQt4 import QtCore
from PyQt4.QtCore import Qt, pyqtSignal
try:
from PyQt4.QtCore import QString
except ImportError:
QString = str
RETRY_DELAY = [ 5, 5, 10, 20, 60 ]
STATE_NOTCONNECTED = 0
STATE_CONNECTING = 1
STATE_CONNECTED = 2
class SSHThread(QtCore.QThread):
# signal
messageLogged = pyqtSignal(QString)
stateSwitched = pyqtSignal(int)
portOccupied = pyqtSignal()
answerCondition = QtCore.QWaitCondition()
SSH_UNKNOWN = 0
SSH_FAIL = 1
SSH_EXIT = 2
SSH_OK = 3
SSH_RETRY = 4
def __init__(self, app):
super(SSHThread, self).__init__()
self.app = app
self.retryCount = 0
def log(self, msg):
self.messageLogged.emit(msg)
def switchState(self, state):
self.stateSwitched.emit(state)
def wait(self, timeout=30):
"""Try to establish a connection and return current status
Args:
timeout: set the timeout of pexpect
Returns:
SSH_OK: This connection is successful.
SSH_FAIL: Connection failed due to some reason, retry.
SSH_EXIT: Fatal error or user teminated the process.
Thread should e
|
xit now.
SSH_UNKNOWN: SSH does not return enough information
"""
index = self.ssh.expect([
pexpect.TIMEOUT, #0
"ssh: connect t
|
o host", #1
"Permission denied (publickey)", #2
"The authenticity of host", #3
"s password: ", #4
pexpect.EOF, #5
"execing", #6
"connection ok", #7
"restarting ssh", #8
"channel_setup_fwd_listener: cannot listen to port", #9
"Address already in use", #10
], timeout=timeout)
if index == 0:
# Nothing happenes
return self.SSH_UNKNOWN
elif index == 1:
return self.SSH_FAIL
elif index == 2:
self.log("Permission denied; Is your private key correct?")
return self.SSH_EXIT
elif index == 3:
# We need to prompt the user to accept the server's fingerprint
self.log("Please check server's fingerprint")
self.ssh.sendline("yes")
return self.wait(timeout=timeout)
elif index == 4:
# Send password
return self.sendPassword(timeout=timeout)
elif index == 5:
# Process terminated
self.log("Process terminated")
return self.SSH_EXIT
elif index == 6:
self.log("Connecting...")
return self.SSH_UNKNOWN
elif index == 7:
# connection ok
return self.SSH_OK
elif index == 8:
self.log("Connection is down. Please wait for reconnecting")
return self.SSH_FAIL
elif index == 9:
portNumber = self.app.socksPortEdit.text();
self.log("Cannot listen SOCKS5 port at {}".format(portNumber))
return self.SSH_EXIT
elif index == 10:
portNumber = self.app.monitorPort
self.log("Bad luck: monitor port is already in use".format(portNumber))
self.app.chooseMonitorPort()
return self.SSH_RETRY
def sendPassword(self, timeout):
self.ssh.sendline(self.app.passphraseLineEdit.text())
self.log("Password is sent")
index = self.ssh.expect([
pexpect.TIMEOUT, #0
"Permission denied", #1
"ssh: connect to host", #2
pexpect.EOF, #3
"connection ok", #4
], timeout=timeout)
if index == 0:
return self.SSH_UNKNOWN
elif index == 1:
self.log("Permission denied; Is your password correct?")
self.app.disconnect()
return self.SSH_EXIT
elif index == 2:
return self.SSH_FAIL
elif index == 3:
self.log("Process terminated")
return self.SSH_EXIT
elif index == 4:
return self.SSH_OK
def checkState(self, result):
"""Check the result of wait(), and handle it.
Returns:
True: if this thread should exit
False: otherwise
"""
if result == self.SSH_OK:
self.switchState(STATE_CONNECTED)
return False
elif result == self.SSH_EXIT:
self.switchState(STATE_NOTCONNECTED)
self.app.disconnect()
return True
elif result == self.SSH_UNKNOWN:
return False;
elif result == self.SSH_FAIL:
# SSH_FAIL: Retry
self.switchState(STATE_CONNECTING)
return False
elif result == self.SSH_RETRY:
self.connect()
return False
def connect(self):
self.app.disconnect()
self.switchState(STATE_CONNECTING)
command = self.app.getSSHCommand()
env = self.app.getSSHEnv()
self.log(command)
self.ssh = pexpect.spawn(command, env=env)
def disconnect(self):
while hasattr(self, 'ssh') and not self.ssh.terminate(force=True):
pass
self.switchState(STATE_NOTCONNECTED)
def run(self):
self.connect()
first = True
while True:
state = self.wait(timeout=15)
if self.checkState(state):
return
if state == self.SSH_FAIL:
first = True
if state == self.SSH_OK and first:
first = False
self.log("Connection is established")
class QTunneler(QtGui.QWidget):
def __init__(self):
super(QTunneler, self).__init__()
self.initUI()
self.sshThread = SSHThread(self)
self.state = STATE_NOTCONNECTED
self.loadConfig()
self.sshThread.messageLogged.connect(self.log)
self.sshThread.stateSwitched.connect(self.switchState)
self.autoStartup.stateChanged.connect(self.saveConfig)
self.autoConnect.stateChanged.connect(self.saveConfig)
self.usingPolipo.stateChanged.connect(self.saveConfig)
self.remeberPassowrd.stateChanged.connect(self.saveConfig)
self.promptOnExit.stateChanged.connect(self.saveConfig)
def initUI(self): #{{{
expandingPolicy = QtGui.QSizePolicy.Expanding
vertical = QtGui.QVBoxLayout()
editLayout = QtGui.QGridLayout()
vertical.addLayout(editLayout)
# editLayout.setSpacing(5)
self.serverAddressLabel = QtGui.QLabel("Server Address:")
self.serverAddressLabel.setSizePolicy(expandingPolicy, expandingPolicy)
self.serverAddressLabel.setAlignment(Qt.AlignVCenter | Qt.AlignRight)
self.serverAddressLineEdit = QtGui.QLineEdit()
editLayout.addWidget(self.serverAddressLabel, 0, 0)
editLayout.addWidget(self.serverAddressLineEdit, 0, 1)
self.serverPortLabel = QtGui.QLabel("Server Port:")
self.serverPortLabel.setSizePolicy(expandingPolicy, expandingPolicy)
self.serverPortLabel.setAlignment(Qt.AlignVCenter | Qt.AlignRight)
self.serverPortLineEdit = QtGui.QLineEdit()
editLayout.addWidget(self.serverPortLabel, 0, 2)
editLayout.addWidget(self.serverPortLineEdit, 0, 3)
self.usernameLabel = QtGui.QLabel("Username:")
self.usernameLabel.setSizePolicy(expandingPolicy, expandingPolicy)
self.usernameLabel.setAlignment(Qt.AlignVCenter | Qt.AlignRight)
self.usernameLineEdit = QtGui.QL
|
TheOstrichIO/tomato-cmd
|
slugify.py
|
Python
|
apache-2.0
| 4,674
| 0.001498
|
# -*- coding: utf-8 -*-
"""
Copyright © Val Neekman ([Neekware Inc.](http://neekware.com))
[ info@neekware.com, [@vneekman](https://twitter.com/vneekman) ]
All rights reserved.
Redistribution and use in source and binary forms,
with or without modification,
are permitted provided that the following conditions are met:
1. Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
3. Neither the name of this project nor the names of its contributors may be
used to endorse or promote products derived from this software without
specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
source: https://github.com/un33k/python-slugify
"""
__version__ = '0.0.7'
__all__ = ['slugify']
import re
import unicodedata
import types
import sys
from htmlentitydefs import name2codepoint
from unidecode import unidecode
# character entity reference
CHAR_ENTITY_REXP = re.compile('&(%s);' % '|'.join(name2codepoint))
# decimal character reference
DECIMAL_REXP = re.compile('&#(\d+);')
# hexadecimal character reference
HEX_REXP = re.compile('&#x([\da-fA-F]+);')
REPLACE1_REXP = re.compile(r'[\']+')
REPLACE2_REXP = re.compile(r'[^-a-z0-9]+')
REMOVE_REXP = re.compile('-{2,}')
def smart_truncate(string, max_length=0, word_boundaries=False, separator=' '):
""" Truncate a string """
string = string.strip(separator)
if not max_length:
return string
if len(string) < max_length:
return string
if not word_boundaries:
return string[:max_length].strip(separator)
if separator not in string:
return string[:max_length]
truncated = ''
for word in string.split(separator):
if word:
next_len = len(truncated) + len(word) + len(separator)
if next_len <= max_length:
truncated += '{0}{1}'.format(word, separator)
if not truncated:
truncated = string[:max_length]
return truncated.strip(separator)
def slugify(text, entities=True, decimal=True, hexadecimal=True, max_length=0,
word_boundary=False, separator='-'):
""" Make a slug from the given text """
# text to unicode
if type(text) != types.UnicodeType:
text = unicode(text, 'utf-8', 'ignore')
# decode unicode
text = unidecode(text)
# text back to unicode
if type(text) != types.UnicodeType:
|
text = unicode(text, 'u
|
tf-8', 'ignore')
# character entity reference
if entities:
text = CHAR_ENTITY_REXP.sub(lambda m:
unichr(name2codepoint[m.group(1)]), text)
# decimal character reference
if decimal:
try:
text = DECIMAL_REXP.sub(lambda m: unichr(int(m.group(1))), text)
except:
pass
# hexadecimal character reference
if hexadecimal:
try:
text = HEX_REXP.sub(lambda m: unichr(int(m.group(1), 16)), text)
except:
pass
# translate
text = unicodedata.normalize('NFKD', text)
if sys.version_info < (3,):
text = text.encode('ascii', 'ignore')
# replace unwanted characters
text = REPLACE1_REXP.sub('', text.lower())
# replace ' with nothing instead with -
text = REPLACE2_REXP.sub('-', text.lower())
# remove redundant -
text = REMOVE_REXP.sub('-', text).strip('-')
# smart truncate if requested
if max_length > 0:
text = smart_truncate(text, max_length, word_boundary, '-')
if separator != '-':
text = text.replace('-', separator)
return text
|
reinforceio/tensorforce
|
tensorforce/environments/cartpole.py
|
Python
|
apache-2.0
| 8,953
| 0.002457
|
# Copyright 2020 Tensorforce Team. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
import numpy as np
from tensorforce.environments import Environment
class CartPole(Environment):
"""
Based on OpenAI Gym version
(https://github.com/openai/gym/blob/master/gym/envs/classic_control/cartpole.py)
"""
def __init__(
self,
# Physics parameters
pole_mass=(0.05, 0.5), # 0.1
pole_length=(0.1, 1.0), # 0.5, actually half the pole's length
cart_mass=(0.5, 1.5),
relative_force=(0.75, 1.5), # 1.0, relative to gravity
gravity=9.8,
# State space
state_velocities=True,
state_location=False, # true
state_initial_max_angle=0.05,
state_initial_max_angle_velocity=0.05,
state_initial_max_velocity=0.05,
# Action space
action_timedelta=0.02,
action_continuous=False,
action_noop=True # false
):
super().__init__()
# Physics parameters
if isinstance(pole_mass, tuple):
assert len(pole_mass) == 2 and 0.0 < pole_mass[0] < pole_mass[1]
self.pole_mass_range = (float(pole_mass[0]), float(pole_mass[1]))
else:
assert pole_mass > 0.0
self.pole_mass_range = (float(pole_mass), float(pole_mass))
if isinstance(pole_length, tuple):
assert len(pole_length) == 2 and 0.0 < pole_length[0] < pole_length[1]
self.pole_length_range = (float(pole_length[0]), float(pole_length[1]))
else:
assert pole_length > 0.0
self.pole_length_range = (float(pole_length), float(pole_length))
if isinstance(cart_mass, tuple):
assert len(cart_mass) == 2 and 0.0 < cart_mass[0] < cart_mass[1]
self.cart_mass_range = (float(cart_mass[0]), float(cart_mass[1]))
else:
assert cart_mass > 0.0
self.cart_mass_range = (float(cart_mass), float(cart_mass))
if isinstance(relative_force, tuple):
assert len(relative_force) == 2 and 0.0 < relative_force[0] < relative_force[1]
self.relative_force_range = (float(relative_force[0]), float(relative_force[1]))
else:
assert relative_force > 0.0
self.relative_force_range = (float(relative_force), float(relative_force))
assert gravity > 0.0
self.gravity = float(gravity)
# State space
state_indices = [2]
self.state_velocities = bool(state_velocities)
if self.state_velocities:
state_indices.append(3)
state_indices.append(1)
self.state_location = bool(state_location)
if self.state_location:
state_indices.append(0)
self.state_indices = np.array(state_indices, np.int32)
self.state_initials = np.array([[
0.0, float(state_initial_max_velocity),
float(state_initial_max_angle), float(state_initial_max_angle_velocity)
]], dtype=np.float32)
# Action space
self.action_timedelta = float(action_timedelta) # in seconds
assert not action_continuous or action_noop
self.action_continuous = bool(action_continuous)
self.action_noop = bool(action_noop)
# State bounds
angle_bound = float(np.pi) / 4.0
max_angle_acc_in_zero = self.relative_force_range[1] * self.gravity / \
(self.cart_mass_range[0] + self.pole_mass_range[0]) / \
self.pole_length_range[0] / \
(4.0 / 3.0 - self.pole_mass_range[1] / (self.cart_mass_range[0] + self.pole_mass_range[0]))
min_angle_acc_in_zero = self.relative_force_range[0] * self.gravity / \
(self.cart_mass_range[1] + self.pole_mass_range[1]
|
) / \
self.pole_length_range[1] / \
(4.0 / 3.0 - self.pole_mass_range[0] / (self.cart_mass_range[1] + self.pole_mass_range[1]))
max_loc_acc_in_zero = (self.relative_force_range[1] * self.gravity - \
|
self.pole_mass_range[0] * self.pole_length_range[0] * min_angle_acc_in_zero) / \
(self.cart_mass_range[0] + self.pole_mass_range[0])
angle_vel_bound = max_angle_acc_in_zero * self.action_timedelta * 10.0
loc_vel_bound = max_loc_acc_in_zero * self.action_timedelta * 10.0
if self.state_location:
loc_bound = loc_vel_bound
else:
loc_bound = np.inf
self.state_bounds = np.array(
[[loc_bound, loc_vel_bound, angle_bound, angle_vel_bound]], dtype=np.float32
)
assert (self.state_bounds > 0.0).all()
def states(self):
return dict(
type='float', shape=tuple(self.state_indices.shape),
min_value=-self.state_bounds[0, self.state_indices],
max_value=self.state_bounds[0, self.state_indices]
)
def actions(self):
if self.action_continuous:
return dict(type='float', shape=())
elif self.action_noop:
return dict(type='int', shape=(), num_values=3)
else:
return dict(type='int', shape=(), num_values=2)
def is_vectorizable(self):
return True
def reset(self, num_parallel=None):
# Physics parameters
self.pole_mass = float(np.random.uniform(low=self.pole_mass_range[0], high=self.pole_mass_range[1]))
self.pole_length = float(np.random.uniform(low=self.pole_length_range[0], high=self.pole_length_range[1]))
self.cart_mass = float(np.random.uniform(low=self.cart_mass_range[0], high=self.cart_mass_range[1]))
self.relative_force = float(np.random.uniform(low=self.relative_force_range[0], high=self.relative_force_range[1]))
if num_parallel is None:
initials = np.tile(self.state_initials, reps=(1, 1))
self.state = np.random.uniform(low=-initials, high=initials)
self.parallel_indices = None
return self.state[0, self.state_indices]
else:
initials = np.tile(self.state_initials, reps=(num_parallel, 1))
self.state = np.random.uniform(low=-initials, high=initials)
self.parallel_indices = np.arange(num_parallel)
return self.parallel_indices, self.state[:, self.state_indices]
def execute(self, actions):
assert self.state.shape[0] > 0
# Split state into components
loc = self.state[:, 0]
loc_vel = self.state[:, 1]
angle = self.state[:, 2]
angle_vel = self.state[:, 3]
# Make action continuous
actions = np.asarray(actions)
if self.parallel_indices is None:
actions = np.expand_dims(actions, axis=0)
else:
assert actions.shape[0] == self.parallel_indices.shape[0]
if self.action_continuous:
force = actions
else:
force = np.where(actions == 2, 0.0, np.where(actions == 1, 1.0, -1.0))
force *= self.relative_force * self.gravity
# Compute accelerations (https://coneural.org/florian/papers/05_cart_pole.pdf)
cos_angle = np.cos(angle)
sin_angle = np.sin(angle)
total_mass = self.cart_mass + self.pole_mass
pole_mass_length = self.pole_mass * self.pole_length
bracket = (force + pole_mass_length * angle_vel * angle_vel * sin_angle) / total_mass
denom = self.pole_length * (4.0 / 3.0 - (self.pole_mass * cos_angle * cos_angle) / total_mass)
angle_acc = (self.gravity * sin_angle - cos_angle * bracket) / denom
loc_acc = bracket - pole_mass_length * angle
|
blancltd/django-latest-tweets
|
latest_tweets/migrations/0011_photo_image_file.py
|
Python
|
bsd-3-clause
| 434
| 0.002304
|
# -*- coding: utf-8 -*-
from __future_
|
_ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('latest_tweets', '0010_photo_unique'),
]
operations = [
mig
|
rations.AddField(
model_name='photo',
name='image_file',
field=models.ImageField(blank=True, upload_to='latest_tweets/photo'),
),
]
|
botswana-harvard/microbiome
|
microbiome/apps/mb_maternal/tests/test_maternal_locator.py
|
Python
|
gpl-2.0
| 847
| 0.002361
|
from edc_constants.constants import YES, NEG
from .base_test_case import BaseTestCase
from .factories import MaternalConsentFactory, MaternalEligibilityFactory, PostnatalEnrollmentFactory
class TestMaternalLocator(BaseTestCase):
def setUp(self):
super(TestMaternalLocator, self).setUp()
self.maternal_eligibility = MaternalEligibilityFactory()
self.maternal_consent = MaternalConsentFactory(registered_subject=self.maternal_eligibility.registered_subject)
self.registered_subject = self.maternal_consent.r
|
egistered_subject
PostnatalEnrollmentFactory(
registered_subject=self.registered_subject,
current_hiv_st
|
atus=NEG,
evidence_hiv_status=YES,
rapid_test_done=YES,
rapid_test_result=NEG)
def test_maternal_locator(self):
pass
|
iafan/zing
|
pootle/core/views/api.py
|
Python
|
gpl-3.0
| 10,141
| 0.000197
|
# -*- coding: utf-8 -*-
#
# Copyright (C) Pootle contributors.
# Copyright (C) Zing contributors.
#
# This file is a part of the Zing project. It is distributed under the GPL3
# or later license. See the LICENSE file for a copy of the license and the
# AUTHORS file for copyright and authorship information.
import json
import operator
from django.core.exceptions import PermissionDenied
from django.db.models import ProtectedError, Q
from django.forms.models import modelform_factory
from django.http import Http404
from django.shortcuts import get_object_or_404
from django.utils.functional import cached_property
from django.views.generic import View
from pootle.core.http import (
JsonResponse, JsonResponseBadRequest, JsonResponseForbidden,
JsonResponseNotFound
)
class JSONDecodeError(ValueError):
pass
class APIView(View):
"""View to implement internal RESTful APIs.
Based on djangbone https://github.com/af/djangbone
"""
# Model on which this view operates. Setting this is required
model = None
# Base queryset for accessing data. If `None`, model's default manager will
# be used
base_queryset = None
# Set this to restrict the view to a subset of the available methods
restrict_to_methods = None
# Field names to be included
fields = ()
# Individual forms to use for each method. By default it'll auto-populate
# model forms built using `self.model` and `self.fields`
add_form_class = None
edit_form_class = None
# Permission classes implement logic to determine whether the request
# should be permitted. Empty list means no permission-checking.
permission_classes = []
# Tuple of sensitive field names that will be excluded from any serialized
# responses
sensitive_field_names = ('password', 'pw')
# Set to an integer to enable GET pagination
page_size = None
# HTTP GET parameter to use for accessing pages
page_param_name = 'p'
# HTTP GET parameter to use for search queries
search_param_name = 'q'
# Field names in which searching will be allowed
search_fields = None
@property
def allowed_methods(self):
methods = [m for m in self.http_method_names if hasattr(self, m)]
if self.restrict_to_methods is not None:
restricted_to = map(lambda x: x.lower(), self.restrict_to_methods)
methods = filter(lambda x: x in restricted_to, methods)
return methods
def __init__(self, *args, **kwargs):
if self.model is None:
raise ValueError('No model class specified.')
self.pk_field_name = self.model._meta.pk.name
if self.base_queryset is None:
self.base_queryset = self.model._default_manager
self._init_fields()
self._init_forms()
return super(APIView, self).__init__(*args, **kwargs)
def _init_fields(self):
if len(self.fields) < 1:
form = self.add_form_class or self.edit_form_class
if form is
|
not None:
self.fields = form._meta.fields
else
|
: # Assume all fields by default
self.fields = (f.name for f in self.model._meta.fields)
self.serialize_fields = (f for f in self.fields if
f not in self.sensitive_field_names)
def _init_forms(self):
if 'post' in self.allowed_methods and self.add_form_class is None:
self.add_form_class = modelform_factory(self.model,
fields=self.fields)
if 'put' in self.allowed_methods and self.edit_form_class is None:
self.edit_form_class = modelform_factory(self.model,
fields=self.fields)
@cached_property
def request_data(self):
try:
return json.loads(self.request.body)
except ValueError:
raise JSONDecodeError
def get_permissions(self):
"""Returns permission handler instances required for a particular view."""
return [permission() for permission in self.permission_classes]
def check_permissions(self, request):
"""Checks whether the view is allowed to process the request or not.
"""
for permission in self.get_permissions():
if not permission.has_permission(request, self):
raise PermissionDenied
def check_object_permissions(self, request, obj):
for permission in self.get_permissions():
if not permission.has_object_permission(request, self, obj):
raise PermissionDenied
def handle_exception(self, exc):
"""Handles response exceptions."""
if isinstance(exc, Http404):
return JsonResponseNotFound({
'msg': 'Not found',
})
if isinstance(exc, PermissionDenied):
return JsonResponseForbidden({
'msg': 'Permission denied.',
})
if isinstance(exc, JSONDecodeError):
return JsonResponseBadRequest({
'msg': 'Invalid JSON data',
})
raise
def dispatch(self, request, *args, **kwargs):
try:
self.check_permissions(request)
if request.method.lower() in self.allowed_methods:
handler = getattr(self, request.method.lower(),
self.http_method_not_allowed)
else:
handler = self.http_method_not_allowed
return handler(request, *args, **kwargs)
except Exception as exc:
return self.handle_exception(exc)
def get(self, request, *args, **kwargs):
"""GET handler."""
if self.kwargs.get(self.pk_field_name, None) is not None:
object = self.get_object()
return JsonResponse(self.object_to_values(object))
return self.get_collection(request, *args, **kwargs)
def get_object(self):
"""Returns a single model instance."""
obj = get_object_or_404(
self.base_queryset, pk=self.kwargs[self.pk_field_name],
)
self.check_object_permissions(self.request, obj)
return obj
def get_collection(self, request, *args, **kwargs):
"""Retrieve a full collection."""
return JsonResponse(self.qs_to_values(self.base_queryset))
def get_form_kwargs(self):
kwargs = {
'data': self.request_data,
}
if (self.pk_field_name in self.kwargs and
self.kwargs[self.pk_field_name] is not None):
kwargs.update({
'instance': self.get_object(),
})
return kwargs
def post(self, request, *args, **kwargs):
"""Creates a new model instance.
The form to be used can be customized by setting
`self.add_form_class`. By default a model form will be used with
the fields from `self.fields`.
"""
form = self.add_form_class(**self.get_form_kwargs())
if form.is_valid():
new_object = form.save()
return JsonResponse(self.object_to_values(new_object))
return self.form_invalid(form)
def put(self, request, *args, **kwargs):
"""Update the current model."""
if self.pk_field_name not in self.kwargs:
return self.status_msg('PUT is not supported for collections',
status=405)
form = self.edit_form_class(**self.get_form_kwargs())
if form.is_valid():
updated_object = form.save()
return JsonResponse(self.object_to_values(updated_object))
return self.form_invalid(form)
def delete(self, request, *args, **kwargs):
"""Delete the model and return its JSON representation."""
if self.pk_field_name not in kwargs:
return self.status_msg('DELETE is not supported for collections',
status=405)
obj = self.get_object()
try:
obj.delete()
return JsonResponse({})
except ProtectedError as e:
|
ifduyue/sentry
|
tests/sentry/models/test_groupresolution.py
|
Python
|
bsd-3-clause
| 2,921
| 0
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import, print_function
from datetime import timedelta
from django.utils import timezone
from sentry.models import GroupResolution
from sentry.testutils import TestCase
class GroupResolutionTest(TestCase):
def setUp(self):
super(GroupResolutionTest, self).setUp()
self.old_release = self.create_release(
version='a',
project=self.project,
)
self.old_release.update(
date_added=timezone.now() - timedelta(minutes=30),
)
self.new_release = self.create_release(
version='b',
project=self.project,
)
self.group = self.create_group()
def test_in_next_release_with_new_release(self):
GroupResolution.objects.create(
release=self.old_release,
group=self.group,
type=GroupResolution.Type.in_next_release,
)
assert not GroupResolution.has_resolution(self.group, self.new_release)
def test_in_next_release_with_same_release(self):
GroupResolution.objects.create(
release=self.old_release,
group=self.group,
type=GroupResolution.Type.in_next_release,
)
assert GroupResolution.has_resolution(self.group, self.old_release)
def test_in_next_release_with_old_release(self):
GroupResolution.objects.create(
release=self.new_release,
group=self.group,
type=GroupResolution.Type.in_next_release,
)
assert GroupResolution.has_resolution(self.group, self.old_release)
def test_in_release_with_new_release(self):
GroupResolution.objects.create(
release=self.old_release,
group=self.group,
type=GroupResolution.Type.in_release,
)
assert not GroupResolution.has_resolution(self.group, self.new_release)
def test_in_release_with_current_release(self):
GroupResolution.objects.create(
release=self.old_release,
group=self.group,
type=GroupResolution.Type.in_release,
)
assert not GroupResolution.has_resolution(self.group, self.old_release)
def test_in_release_with_old_release(self):
GroupResolution.objects.create(
release=self.new_release,
group=self.group,
type=GroupResolution.Type.in_release,
)
assert GroupResolution.has_resolution(self.group, self.old_release)
def test_no_release_with_resolution(self):
GroupResoluti
|
on.objects.create(
release=self.new_re
|
lease,
group=self.group,
type=GroupResolution.Type.in_release,
)
assert GroupResolution.has_resolution(self.group, None)
def test_no_release_with_no_resolution(self):
assert not GroupResolution.has_resolution(self.group, None)
|
veikman/cbg
|
cbg/content/card.py
|
Python
|
gpl-3.0
| 4,586
| 0.000436
|
# -*- coding: utf-8 -*-
'''A module to represent the text content of a playing card at a high level.'''
# This file is part of CBG.
#
# CBG is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# CBG is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with CBG. If not, see <http://www.gnu.org/licenses/>.
#
# Copyright 2014-2016 Viktor Eikman
import itertools
import logging
from cbg.content import elements
from cbg.content import field
class Card(elements.DerivedFromSpec, field.Layout):
'''The content of a unique playing card, as a "master" of sorts.
Content is tracked by field type, and each field type has its own
class, listed in the "plan" class attribute.
If there is a field corresponding to a title, it should generally
be populated first and use the "key_title" attribute of the card
class as its key, because that way its content will appear in
exception messages etc., to help debug subsequent problems.
The number of copies in a deck is tracked at the deck level, not here.
'''
_untitled_base = 'untitled card'
_untitled_iterator = itertools.count(start=1)
def layout(self):
'''Put data from incoming raws into empty fields.'''
if not self.specification:
s = 'No specification data for the "{}" card.'
raise self.SpecificationError(s.format(self))
try:
super().layout()
except:
s = 'An error occurred while processing the "{}" card.'
logging.error(s.format(self))
raise
if self.specification:
for key, value in self.specification.items():
s = 'Unrecognized data key "{}" not consumed: "{}".'
logging.error(s.format(key, value))
s = 'Specification data for the "{}" card was not consumed.'
raise self.SpecificationError(s.format(self))
def not_in_spec(self):
s = 'Specification of "{}" card inadequate for basic layout.'
raise self.SpecificationError(s.format(self))
@property
def title(self):
'''Quick access to the card's title field's processed value, if any.
In the absence of a title field, for the moment, use a stable
generated title.
'''
try:
field = str(self.child_by_key_required(self.key_title))
if field:
# In spec.
return str(field)
except:
pass
try:
return self._generated_title
e
|
xcept AttributeError:
return self._untitled_base
@property
def card(self):
'''An override of a field method.'''
return self
@property
def _sorting_signature(self):
'''Salient properties of self, for sorting purposes.
To be overridden for card types with other salient properties.
'''
|
return str(self.deck), str(self)
def __eq__(self, other):
'''Used for sorting (as performed by decks).'''
try:
return self._sorting_signature == other._sorting_signature
except AttributeError:
return False
def __lt__(self, other):
'''Used for sorting (as performed by decks).
Notice that this method and __eq__ cannot be used with
functools.total_ordering, because that decorator will not override
inherited comparison methods from our parent classes.
'''
try:
return self._sorting_signature < other._sorting_signature
except AttributeError:
s = 'Tried to sort {} relative to incompatible {}.'
raise TypeError(s.format(type(self), type(other)))
def __ne__(self, other):
return not self == other
def __le__(self, other):
return self < other or self == other
def __gt__(self, other):
return not self < other
def __ge__(self, other):
return self > other or self == other
def __str__(self):
return self.title
def __hash__(self):
'''Treat as if immutable, because decks are counters (hash tables).'''
return hash(id(self))
|
mheap/ansible
|
lib/ansible/module_utils/network/f5/bigip.py
|
Python
|
gpl-3.0
| 3,738
| 0.001338
|
# -*- coding: utf-8 -*-
#
# Copyright (c) 2017 F5 Networks Inc.
# GNU General Public License v3.0 (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
|
__metaclass__ = type
import time
try:
from f5.bigip import ManagementRoot
from icontrol.exceptions import iControlUnexpectedHTTPError
HAS_F5SDK = True
except ImportError:
HAS_F5SDK
|
= False
try:
from library.module_utils.network.f5.common import F5BaseClient
from library.module_utils.network.f5.common import F5ModuleError
from library.module_utils.network.f5.icontrol import iControlRestSession
except ImportError:
from ansible.module_utils.network.f5.common import F5BaseClient
from ansible.module_utils.network.f5.common import F5ModuleError
from ansible.module_utils.network.f5.icontrol import iControlRestSession
class F5Client(F5BaseClient):
def __init__(self, *args, **kwargs):
super(F5Client, self).__init__(*args, **kwargs)
self.provider = self.merge_provider_params()
@property
def api(self):
exc = None
if self._client:
return self._client
for x in range(0, 10):
try:
result = ManagementRoot(
self.provider['server'],
self.provider['user'],
self.provider['password'],
port=self.provider['server_port'],
verify=self.provider['validate_certs'],
token='tmos'
)
self._client = result
return self._client
except Exception as ex:
exc = ex
time.sleep(1)
error = 'Unable to connect to {0} on port {1}.'.format(
self.provider['server'], self.provider['server_port']
)
if exc is not None:
error += ' The reported error was "{0}".'.format(str(exc))
raise F5ModuleError(error)
class F5RestClient(F5BaseClient):
def __init__(self, *args, **kwargs):
super(F5RestClient, self).__init__(*args, **kwargs)
self.provider = self.merge_provider_params()
@property
def api(self):
exc = None
if self._client:
return self._client
for x in range(0, 10):
try:
url = "https://{0}:{1}/mgmt/shared/authn/login".format(
self.provider['server'], self.provider['server_port']
)
payload = {
'username': self.provider['user'],
'password': self.provider['password'],
'loginProviderName': self.provider['auth_provider'] or 'tmos'
}
session = iControlRestSession()
session.verify = self.provider['validate_certs']
response = session.post(url, json=payload)
if response.status not in [200]:
raise F5ModuleError('Status code: {0}. Unexpected Error: {1} for uri: {2}\nText: {3}'.format(
response.status, response.reason, response.url, response._content
))
session.headers['X-F5-Auth-Token'] = response.json()['token']['token']
self._client = session
return self._client
except Exception as ex:
exc = ex
time.sleep(1)
error = 'Unable to connect to {0} on port {1}.'.format(
self.provider['server'], self.provider['server_port']
)
if exc is not None:
error += ' The reported error was "{0}".'.format(str(exc))
raise F5ModuleError(error)
|
theshammy/GenAn
|
src/concepts/query.py
|
Python
|
mit
| 1,811
| 0.009939
|
from textx.exceptions import TextXSemanticError
def query_processor(query):
if not query.condition is None:
query.condition.conditionName = adapter_for_query(query)
for query in query.parent.queries:
if (not hasattr(query, 'property')) and (query.sortBy not in query.parent.properties):
line, col = query.parent._tx_metamodel.parser.pos_to_linecol(
object._tx_position)
raise TextXSemanticError("ERROR: (at %d, %d) Object %s has
|
no property named %s." %
(line, col, query.parent.object.name, query.parent.
|
property.name))
elif (not hasattr(query, 'sortBy')) and (query.sortBy not in query.parent.properties):
line, col = query.parent._tx_metamodel.parser.pos_to_linecol(
object._tx_position)
raise TextXSemanticError("ERROR: (at %d, %d) Object %s has no property named %s." %
(line, col, query.parent.object.name, query.parent.property.name))
else:
return True
def adapter_for_query(queryObject):
try:
return {
'lowerThan': 'lt',
'greaterThan': 'gt',
'lessEqual': 'le',
'greaterEqual': 'ge',
'equal': 'e'
}[queryObject.condition.conditionName]
except:
return queryObject.condition.conditionName
class Query(object):
def __init__(self, parent, name, property=None, condition=None,sortBy=None, order=None, rangeFrom=None, rangeTo=None ):
self.name = name
self.parent = parent
self.property = property
self.condition = condition
self.sortBy = sortBy
self.order=order
self.rangeFrom = rangeFrom
self.rangeTo = rangeTo
|
supistar/Botnyan
|
model/qrcreator.py
|
Python
|
mit
| 520
| 0
|
# -*- encoding:utf8 -*-
import cStringIO
import qrcode
class QRCodeCreator():
def __init__(self):
pass
def create(self, message):
qr = qrcode.QRCode(
version=1,
error_correction=q
|
rcode.constants.ERROR_CORRECT_L,
box_size=10,
border=4,
)
qr.add_data(message)
qr.make(fit=True)
img = qr.make_image()
img_buf = cStringIO.StringIO()
img.save(img_buf)
img_buf.seek(0)
|
return img_buf
|
jalanb/dotjab
|
src/python/__init__.py
|
Python
|
mit
| 48
| 0
|
#! /us
|
er/bin/env p
|
ython
__version__ = '0.8.53'
|
stdgregwar/elve
|
personaltypes.py
|
Python
|
lgpl-3.0
| 2,750
| 0.000727
|
############################################################################
#
# Copyright (C) 2016 The Qt Company Ltd.
# Contact: https://www.qt.io/licensing/
#
# This file is part of Qt Creator.
#
# Commercial License Usage
# Licensees holding valid commercial Qt licenses may use this file in
# accordance with the commercial license agreement provided with the
# Software or, alternatively, in accordance with the terms contained in
# a written agreement between you and The Qt Company. For licensing terms
# and conditions see https://www.qt.io/terms-conditions. For further
# information use the contact form at https://www.qt.io/contact-us.
#
# GNU General Public License Usage
# Alternatively, this file may be used under the terms of the GNU
# General Public License version 3 as published by the Free Software
# Foundation with exceptions as appearing in the file LICENSE.GPL3-EXCEPT
# included in the packaging of this file. Please review the following
# information to ensure the GNU General Public License requirements will
# be met: https://www.gnu.org/licenses/gpl-3.0.html.
#
############################################################################
# This is a place to add your own dumpers for testing purposes.
# Any contents here will be picked up by GDB and LLDB based
# debugging in Qt Creator automatically. This code is not used
# when debugging with CDB on Windows.
# NOTE: This file will get overwritten when updating Qt Creator.
#
# To add dumpers that don't get overwritten, copy this file here
# to a safe location outside the Qt Creator installation and
# make this location known to Qt Creator using the Debugger /
# GDB / Dumper customization / Additional file setting.
# Example to display a simple type
# template<typename U, typename V> struct MapNode
# {
# U key;
# V data;
# }
#
# def qdump__MapNode(d, value):
# d.putValue("This is the value column contents")
# d.putNumChild(2)
# if d.isExpanded():
# with Children(d):
# # Compact simple case.
# d.putSubItem("key", value["key"])
# # Same effect, with more customization possibilities.
# with SubItem(d, "data")
# d.putItem("data", value["data"])
# Check http://doc.qt.io/qtcre
|
ator/creator-debugging-helpers.html
# for more details or look at qttypes.py, stdtypes.py, boosttypes.py
# for more complex examples.
from dumper import *
#def qdump__Pin(d,value):
# d.putValue('%s %s' % (value['id'].integer(
|
), value['index'].integer()))
# d.putNumChild(2)
# if d.isExpanded():
# with Children(d):
# d.putSubItem("id",value["id"])
# d.putSubItem("index",value["index"])
######################## Your code below #######################
|
quamilek/django-custard
|
custard/tests/test.py
|
Python
|
mit
| 13,067
| 0.004362
|
from __future__ import unicode_literals
import django
from django.core.exceptions import ValidationError
from django.db.models import Q
from django.contrib.contenttypes.models import ContentType
from django.test import TestCase, Client
from django.test.client import RequestFactory
from django.test.utils import override_settings
from custard.conf import (CUSTOM_TYPE_TEXT, CUSTOM_TYPE_INTEGER,
CUSTOM_TYPE_BOOLEAN, CUSTOM_TYPE_FLOAT,
CUSTOM_TYPE_DATE, CUSTOM_TYPE_DATETIME,
CUSTOM_TYPE_TIME, settings)
from custard.builder import CustomFieldsBuilder
from custard.utils import import_class
from .models import (SimpleModelWithManager, SimpleModelWithoutManager,
CustomFieldsModel, CustomValuesModel, builder)
#==============================================================================
class SimpleModelWithManagerForm(builder.create_modelform()):
class Meta:
model = SimpleModelWithManager
fields = '__all__'
#class ExampleAdmin(admin.ModelAdmin):
# form = ExampleForm
# search_fields = ('name',)
#
# def get_search_results(self, request, queryset, search_term):
# queryset, use_distinct = super(ExampleAdmin, self).get_search_results(request, queryset, search_term)
# queryset |= self.model.objects.search(search_term)
# return queryset, use_distinct
#
# admin.site.register(Example, ExampleAdmin)
#==============================================================================
class CustomModelsTestCase(TestCase):
def setUp(self):
self.factory = RequestFactory()
self.simple_with_manager_ct = ContentType.objects.get_for_model(SimpleModelWithManager)
self.simple_without_manager_ct = ContentType.objects.get_for_model(SimpleModelWithoutManager)
self.cf = CustomFieldsModel.objects.create(content_type=self.simple_with_manager_ct,
name='text_field',
label="Text field",
data_type=CUSTOM_TYPE_TEXT)
self.cf.save()
self.cf2 = CustomFieldsModel.objects.create(content_type=self.simple_with_manager_ct,
name='another_text_field',
label="Text field 2",
data_type=CUSTOM_TYPE_TEXT,
required=True,
searchable=False)
self.cf2.clean()
self.cf2.save()
self.cf3 = CustomFieldsModel.objects.create(content_type=self.simple_with_manager_ct,
name='int_field', label="Integer field",
data_type=CUSTOM_TYPE_INTEGER)
self.cf3.save()
self.cf4 = CustomFieldsModel.objects.create(content_type=self.simple_with_manager_ct,
name='boolean_field', label="Boolean field",
data_type=CUSTOM_TYPE_BOOLEAN)
self.cf4.save()
self.cf5 = CustomFieldsModel.objects.create(content_type=self.simple_with_manager_ct,
name='float_field', label="Float field",
data_type=CUSTOM_TYPE_FLOAT)
self.cf5.save()
self.cf6 = CustomFieldsModel.objects.create(content_type=self.simple_with_manager_ct,
name='date_field', label="Date field",
data_type=CUSTOM_TYPE_DATE)
self.cf6.save()
self.cf7 = CustomFieldsModel.objects.create(content_type=self.simple_with_manager_ct,
name='dat
|
etime_f
|
ield', label="Datetime field",
data_type=CUSTOM_TYPE_DATETIME)
self.cf7.save()
self.cf8 = CustomFieldsModel.objects.create(content_type=self.simple_with_manager_ct,
name='time_field', label="Time field",
data_type=CUSTOM_TYPE_TIME)
self.cf8.save()
self.obj = SimpleModelWithManager.objects.create(name='old test')
self.obj.save()
def tearDown(self):
CustomFieldsModel.objects.all().delete()
def test_import_class(self):
self.assertEqual(import_class('custard.builder.CustomFieldsBuilder'), CustomFieldsBuilder)
def test_model_repr(self):
self.assertEqual(repr(self.cf), "<CustomFieldsModel: text_field>")
val = CustomValuesModel.objects.create(custom_field=self.cf,
object_id=self.obj.pk,
value="abcdefg")
val.save()
self.assertEqual(repr(val), "<CustomValuesModel: text_field: abcdefg>")
@override_settings(CUSTOM_CONTENT_TYPES=['simplemodelwithmanager'])
def test_field_creation(self):
builder2 = CustomFieldsBuilder('tests.CustomFieldsModel',
'tests.CustomValuesModel',
settings.CUSTOM_CONTENT_TYPES)
class TestCustomFieldsModel(builder2.create_fields()):
class Meta:
app_label = 'tests'
self.assertQuerysetEqual(ContentType.objects.filter(builder2.content_types_query),
ContentType.objects.filter(Q(name__in=['simplemodelwithmanager'])))
def test_mixin(self):
self.assertIn(self.cf, self.obj.get_custom_fields())
self.assertIn(self.cf, SimpleModelWithManager.get_model_custom_fields())
self.assertEqual(self.cf, self.obj.get_custom_field('text_field'))
val = CustomValuesModel.objects.create(custom_field=self.cf,
object_id=self.obj.pk,
value="123456")
val.save()
self.assertEqual("123456", self.obj.get_custom_value('text_field'))
self.obj.set_custom_value('text_field', "abcdefg")
self.assertEqual("abcdefg", self.obj.get_custom_value('text_field'))
val.delete()
def test_field_model_clean(self):
cf = CustomFieldsModel.objects.create(content_type=self.simple_with_manager_ct,
name='another_text_field',
label="Text field already present",
data_type=CUSTOM_TYPE_INTEGER)
with self.assertRaises(ValidationError):
cf.full_clean()
cf = CustomFieldsModel.objects.create(content_type=self.simple_with_manager_ct,
name='name',
label="Text field already in model",
data_type=CUSTOM_TYPE_TEXT)
with self.assertRaises(ValidationError):
cf.full_clean()
def test_value_model_clean(self):
val = CustomValuesModel.objects.create(custom_field=self.cf2,
object_id=self.obj.pk)
val.value = "qwertyuiop"
val.save()
val = CustomValuesModel.objects.create(custom_field=self.cf2,
object_id=self.obj.pk)
val.value = "qwertyuiop"
with self.assertRaises(ValidationError):
val.full_clean()
def test_value_creation(self):
val = CustomValuesModel.objects.create(custom_field=self.cf,
object_id=self.obj.pk,
value="qwertyuiop")
val.save()
self.assertEqual(val.content_type, self.simple_with_manager_ct)
self.assertEqual(val.content_type, val.custom_
|
trickvi/budgetdatapackage
|
tests/test_resource.py
|
Python
|
gpl-3.0
| 3,567
| 0
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import budgetdatapackage
import datapackage
import datetime
from nose.tools import raises
from datapackage import compat
class TestBudgetResource(object):
def setup(self):
self.values = {
'currency': 'ISK',
'dateLastUpdated': '2014-04-22',
'datePublished': '1982-04-22',
'fiscalYear': '2014',
'granularity': 'transactional',
'status': 'approved',
'type': 'expenditure',
'location': 'IS',
'url': 'http://iceland.is/budgets.csv'}
def test_create_resource(self):
resource = budgetdatapackage.BudgetResource(**self.values)
assert resource.currency == self.values['currency']
last_updated = datetime.datetime.strptime(
self.values['dateLastUpdated'], '%Y-%m-%d').date()
assert resource.dateLastUpdated == last_updated
published = datetime.datetime.strptime(
self.values['datePublished'], '%Y-%m-%d').date()
assert resource.datePublished == published
assert resource.fiscalYear == self.values['fiscalYear']
assert resource.granularity == self.values['granularity']
assert resource.status == self.values['status']
assert resource.type == self.values['type']
assert resource.location == self.values['location']
assert resource.url == self.values['url']
assert resource.standard == '1.0.0-alpha'
def test_resource_can_be_used_with_datapackage(self):
"""Checks if it's possible to create a datapackage with a
budget resource"""
moneys = budgetdatapackage.BudgetResource(**self.values)
finances = datapackage.DataPackage(
name="finances", license="PDDL", resources=[moneys])
assert finances.name == "finances"
assert len(finances.resources) == 1
assert finances.resources[0].granularity == self.values['granularity']
@raises(ValueError)
def test_create_resource_missing_required_field(self):
del self.values['fiscalYear']
budgetdatapackage.BudgetResource(**self.values)
@raises(ValueError)
def test_bad_currency(self):
self.values['currency'] = 'batman'
budgetdatapackage.BudgetResource(**self.values)
@raises(ValueError)
def test_bad_dateLastPublished(self):
self.values['dateLastUpdated'] = 'batman'
budgetdatapackage.BudgetResource(**self.values)
@raises(ValueError)
def test_bad_datePublished(self):
self.values['datePublished'] = 'batman'
budgetdatapackage.BudgetResource(**self.values)
@raises(ValueError)
def test_bad_fiscalYear(self):
self.values['fiscalYear'] = 'batman'
budgetdatapackage.BudgetResource(**self.values)
@raises(ValueError)
def test_bad_granularity(self):
self.values['granularity'] = 'batman'
budgetdata
|
package.BudgetResource(**self.values)
@raises(ValueError)
def test_bad_status(self):
self.values['status'] = 'batman'
budgetdatapackage.BudgetResource(**self.values)
@raises(ValueError)
def test_bad_type(self):
self.values['type'] = 'batma
|
n'
budgetdatapackage.BudgetResource(**self.values)
@raises(ValueError)
def test_bad_location(self):
self.values['location'] = 'batman'
budgetdatapackage.BudgetResource(**self.values)
|
sit/dht
|
tools/vischat.py
|
Python
|
mit
| 1,908
| 0.021488
|
import asynchat
import socket
import errno
class vischat (asynchat.async_chat):
def __init__ (self, host, port):
self.host = host
self.port = port
self.outstanding = []
self.lines = []
self.buffer = ""
asynchat.async_chat.__init__ (self)
def handle_connect (self):
err = self.getsockopt (socket.SOL_SOCKET, socket.SO_ERROR)
if err == errno.ECONNREFUSED:
self.connect_cb (None)
else:
self.connect_cb (self)
def start_connect (self, cb):
self.create_socket (socket.AF_INET, socket.SOCK_STREAM)
self.connect ((self.host, self.port))
self.set_terminator ("\n")
self.connect_cb = cb
def collect_incoming_data (self, data):
self.buffer += data
def found_terminator (self):
# Assumes that vis handles all request in order.
# print "### %s" % self.buffer
if self.buffer[0] == '.':
z = self.outstanding.pop (0)
if z:
z (self.lines)
|
self.lines = []
else:
self.lines.append (self.buffer)
|
self.buffer = ""
# Each command here is a front-end to a real message that could get
# sent to vis, and a callback that should notify
def list (self, cb):
self.push ("list\n")
self.outstanding.append (cb)
def arc (self, a, b):
self.push ("arc %s %s\n" % (a, b))
self.outstanding.append (None)
def arrow (self, a, b):
self.push ("arrow %s %s\n" % (a, b))
self.outstanding.append (None)
def reset (self):
self.push ("reset\n")
self.outstanding.append (None)
def highlight (self, a):
self.push ("highlight %s\n" % a)
self.outstanding.append (None)
def select (self, a):
self.push ("select %s\n" % a)
self.outstanding.append (None)
|
alexsilva/helicon-zoofcgi
|
zoofcgi.py
|
Python
|
mit
| 34,047
| 0.001351
|
# encoding: utf-8
# FastCGI-to-WSGI bridge for files/pipes transport (not socket)
#
# Copyright (c) 2002, 2003, 2005, 2006 Allan Saddi <allan@saddi.com>
# Copyright (c) 2011 - 2013 Ruslan Keba <ruslan@helicontech.com>
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
# OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
# HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
# OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
# SUCH DAMAGE.
#
__author__ = 'Allan Saddi <allan@saddi.com>, Ruslan Keba <ruslan@helicontech.com>'
import msvcrt
import struct
import os
import logging
import sys
import traceback
import datetime
import urllib
from optparse import OptionParser
# debug flag
__dbg__ = False
# Constants from the spec.
FCGI_LISTENSOCK_FILENO = 0
FCGI_HEADER_LEN = 8
FCGI_VERSION_1 = 1
FCGI_BEGIN_REQUEST = 1
FCGI_ABORT_REQUEST = 2
FCGI_END_REQUEST = 3
FCGI_PARAMS = 4
FCGI_STDIN = 5
FCGI_STDOUT = 6
FCGI_STDERR = 7
FCGI_DATA = 8
FCGI_GET_VALUES = 9
FCGI_GET_VALUES_RESULT = 10
FCGI_UNKNOWN_TYPE = 11
FCGI_MAXTYPE = FCGI_UNKNOWN_TYPE
FCGI_NULL_REQUEST_ID = 0
FCGI_KEEP_CONN = 1
FCGI_RESPONDER = 1
FCGI_AUTHORIZER = 2
FCGI_FILTER = 3
FCGI_REQUEST_COMPLETE = 0
FCGI_CANT_MPX_CONN = 1
FCGI_OVERLOADED = 2
FCGI_UNKNOWN_ROLE = 3
FCGI_MAX_CONNS = 'FCGI_MAX_CONNS'
FCGI_MAX_REQS = 'FCGI_MAX_REQS'
FCGI_MPXS_CONNS = 'FCGI_MPXS_CONNS'
FCGI_Header = '!BBHHBx'
FCGI_BeginRequestBody = '!HB5x'
FCGI_EndRequestBody = '!LB3x'
FCGI_UnknownTypeBody = '!B7x'
FCGI_EndRequestBody_LEN = struct.calcsize(FCGI_EndRequestBody)
FCGI_UnknownTypeBody_LEN = struct.calcsize(FCGI_UnknownTypeBody)
FCGI_HEADER_NAMES = (
'ERROR TYPE: 0',
'BEGIN_REQUEST',
'ABORT_REQUEST',
'END_REQUEST',
'PARAMS',
'STDIN',
'STDOUT',
'STDERR',
'DATA',
'GET_VALUES',
'GET_VALUES_RESULT',
'UNKNOWN_TYPE',
)
class InputStream(object):
"""
File-like object representing FastCGI input streams (FCGI_STDIN and
FCGI_DATA). Supports the minimum methods required by WSGI spec.
"""
def __init__(self, conn):
self._conn = conn
# See Server.
self._shrinkThreshold = conn.server.inputStreamShrinkThreshold
self._buf = ''
self._bufList = []
self._pos = 0 # Current read position.
self._avail = 0 # Number of bytes currently available.
self._eof = False # True when server has sent EOF notification.
def _shrinkBuffer(self):
"""Gets rid of already read data (since we can't rewind)."""
if self._pos >= self._shrinkThreshold:
self._buf = self._buf[self._pos:
|
]
self._avail -= self._pos
self._pos = 0
assert self._avail >= 0
def _waitForData(self):
"""Waits for more data to become available."""
self._conn.process_input()
def read(self, n=-1):
if self._pos == self._avail and self._eof:
return ''
while True:
if n < 0 or (self._avail - self._pos) < n:
# Not enough data available.
if self._eof:
# And ther
|
e's no more coming.
newPos = self._avail
break
else:
# Wait for more data.
self._waitForData()
continue
else:
newPos = self._pos + n
break
# Merge buffer list, if necessary.
if self._bufList:
self._buf += ''.join(self._bufList)
self._bufList = []
r = self._buf[self._pos:newPos]
self._pos = newPos
self._shrinkBuffer()
return r
def readline(self, length=None):
if self._pos == self._avail and self._eof:
return ''
while True:
# Unfortunately, we need to merge the buffer list early.
if self._bufList:
self._buf += ''.join(self._bufList)
self._bufList = []
# Find newline.
i = self._buf.find('\n', self._pos)
if i < 0:
# Not found?
if self._eof:
# No more data coming.
newPos = self._avail
break
else:
if length is not None and len(self._buf) >= length + self._pos:
newPos = self._pos + length
break
# Wait for more to come.
self._waitForData()
continue
else:
newPos = i + 1
break
r = self._buf[self._pos:newPos]
self._pos = newPos
self._shrinkBuffer()
return r
def readlines(self, sizehint=0):
total = 0
lines = []
line = self.readline()
while line:
lines.append(line)
total += len(line)
if 0 < sizehint <= total:
break
line = self.readline()
return lines
def __iter__(self):
return self
def next(self):
r = self.readline()
if not r:
raise StopIteration
return r
def add_data(self, data):
if not data:
self._eof = True
else:
self._bufList.append(data)
self._avail += len(data)
class OutputStream(object):
"""
FastCGI output stream (FCGI_STDOUT/FCGI_STDERR). By default, calls to
write() or writelines() immediately result in Records being sent back
to the server. Buffering should be done in a higher level!
"""
def __init__(self, conn, req, type, buffered=False):
self._conn = conn
self._req = req
self._type = type
self._buffered = buffered
self._bufList = [] # Used if buffered is True
self.dataWritten = False
self.closed = False
def _write(self, data):
length = len(data)
while length:
to_write = min(length, self._req.server.maxwrite - FCGI_HEADER_LEN)
rec = Record(self._type, self._req.requestId)
rec.contentLength = to_write
rec.contentData = data[:to_write]
self._conn.writeRecord(rec)
data = data[to_write:]
length -= to_write
def write(self, data):
assert not self.closed
if not data:
return
self.dataWritten = True
if self._buffered:
self._bufList.append(data)
else:
self._write(data)
def writelines(self, lines):
assert not self.closed
for line in lines:
self.write(line)
def flush(self):
# Only need to flush if this OutputStream is actually buffered.
if self._buffered:
data = ''.join(self._bufList)
self._bufList = []
self._write(data)
# Though available, the following should NOT be called by WSGI apps.
def close(self):
"""Sends end-of-stream notification, if necessary."""
if not self.closed and self.dataWritte
|
leepa/django-paypal-driver
|
paypal/driver.py
|
Python
|
gpl-2.0
| 14,675
| 0.000409
|
# -*- coding: utf-8 -*-
# Pluggable PayPal NVP (Name Value Pair) API implementation for Django.
# This file includes the PayPal driver class that maps NVP API methods to such
# simple functions.
# Feel free to distribute, modify or use any open or closed project without
# any permission.
# Author: Ozgur Vatansever
# Email: ozgurvt@gmail.com
from cgi import parse_qs
import urllib
import urllib2
from django.conf import settings
# Exception messages
TOKEN_NOT_FOUND_ERROR = (
"PayPal error occured. There is no TOKEN info to finish performing PayPal "
"payment process. We haven't charged your money yet."
)
NO_PAYERID_ERROR = (
"PayPal error occured. There is no PAYERID info to finish performing "
"PayPal payment process. We haven't charged your money yet."
)
GENERIC_PAYPAL_ERROR = (
"There occured an error while performing PayPal checkout process. We "
"apologize for the inconvenience. We haven't charged your money yet."
)
GENERIC_PAYMENT_ERROR = (
"Transaction failed. Check out your order details again."
)
GENERIC_REFUND_ERROR = (
"An error occured, we can not perform your refund request"
)
class PayPal(object):
"""
Pluggable Python PayPal Driver that implements NVP (Name Value Pair) API
methods.
There are simply 3 main methods to be executed in order to finish the
PayPal payment process.
Those are:
1) SetExpressCheckout
2) GetExpressCheckoutDetails (optional)
3) DoExpressCheckoutPayment
:param username: The PayPal username
:type username: string
:param password: The PayPal password
:type password: string
:param signature: The PayPal signature
:type signature: string
"""
def __init__(
self,
username=None,
password=None,
signature=None
):
self.credientials = {
"USER": username or getattr(settings, "PAYPAL_USER", None),
"PWD": password or getattr(settings, "PAYPAL_PASSWORD", None),
"SIGNATURE": signature or getattr(
settings, "PAYPAL_SIGNATURE", None
),
"VERSION": "53.0",
}
# Second step is to set the API end point and redirect urls correctly.
if getattr(settings, "PAYPAL_DEBUG", False):
self.NVP_API_ENDPOINT = "https://api-3t.sandbox.paypal.com/nvp"
self.PAYPAL_REDIRECT_URL = (
"https://www.sandbox.paypal.com/cgi-bin/webscr?"
"cmd=_express-checkout&token="
)
else:
self.NVP_API_ENDPOINT = "https://api-3t.paypal.com/nvp"
self.PAYPAL_REDIRECT_URL = (
"https://www.paypal.com/cgi-bin/webscr?"
"cmd=_express-checkout&token="
)
# initialization
self.signature = urllib.urlencode(self.credientials) + '&'
self.setexpresscheckouterror = None
self.getexpresscheckoutdetailserror = None
self.doexpresscheckoutpaymenterror = None
self.refundtransactionerror = None
self.apierror = None
self.api_response = None
self.token = None
self.response = None
self.refund_response = None
def _get_value_from_qs(self, qs, value):
"""
Gets a value from a querystring dict
This is a private helper function, so DO NOT call this explicitly.
"""
raw = qs.get(value)
if type(raw) == list:
try:
return raw[0]
except KeyError:
return None
else:
return raw
def paypal_url(self, token=None):
"""
Returns a 'redirect url' for PayPal payments.
If token was null, this function MUST NOT return any URL.
"""
token = token if token is not None else self.token
if not token:
return None
return self.PAYPAL_REDIRECT_URL + token
def SetExpressCheckout(
self,
amount,
currency,
return_url,
cancel_url,
**kwargs
):
"""
To set up an Express Checkout transaction, you must invoke the
SetExpressCheckout API to provide sufficient information to initiate
the payment flow and redirect to PayPal if the operation was successful
@currency: Look at 'https://cms.paypal.com/us/cgi-bin/?
cmd=_render-content&content_ID=developer/
e_howto_api_nvp_currency_codes'
@amount: should be string with the following format '10.00'
@return_url: should be in the format
scheme://hostname[:uri (optional)]
@cancel_url: should be in the format
scheme://hostname[:uri (optional)]
@returns bool
If you want to add extra parameters, you can define them in **kwargs
dict. For instance:
- SetExpressCheckout(
10.00,
US,
http://www.test.com/cancel/,
http://www.test.com/return/,
**{'SHIPTOSTREET': 'T Street', 'SHIPTOSTATE': 'T State'}
)
"""
parameters = {
'METHOD': 'SetExpressCheckout',
'NOSHIPPING': 1,
'PAYMENTACTION': 'Sale',
'RETURNURL': return_url,
'CANCELURL': cancel_url,
'AMT': amount,
'CURRENCYCODE': currency,
}
parameters.update(kwargs)
query_string = self.signature + urllib.urlencode(parameters)
response = urllib2.urlopen(self.NVP_API_
|
ENDPOINT, query_string).read()
response_dict = parse_qs(response)
self.api_response = response_dict
state = self._get_value_from_qs(response_dict, "ACK")
if state in ["Success", "SuccessWithWarning"]:
self.token = self._get_value_from_qs(response
|
_dict, "TOKEN")
return True
self.setexpresscheckouterror = GENERIC_PAYPAL_ERROR
self.apierror = self._get_value_from_qs(
response_dict, "L_LONGMESSAGE0"
)
return False
"""
If SetExpressCheckout is successfull use TOKEN to redirect to the browser
to the address BELOW:
- https://www.sandbox.paypal.com/cgi-bin/webscr?
cmd=_express-checkout&token=TOKEN (for development only URL)
"""
def GetExpressCheckoutDetails(self, token):
"""
This method performs the NVP API method that is responsible from
getting the payment details. This returns True if successfully fetch
the checkout details, otherwise returns False.
All of the parameters are REQUIRED.
@returns bool
"""
token = self.token if token is None else token
if token is None:
self.getexpresscheckoutdetails = TOKEN_NOT_FOUND_ERROR
return False
parameters = {
'METHOD': "GetExpressCheckoutDetails",
'TOKEN': token,
}
query_string = self.signature + urllib.urlencode(parameters)
response = urllib2.urlopen(self.NVP_API_ENDPOINT, query_string).read()
response_dict = parse_qs(response)
self.api_response = response_dict
state = self._get_value_from_qs(response_dict, "ACK")
if not state in ["Success", "SuccessWithWarning"]:
self.getexpresscheckoutdetailserror = self._get_value_from_qs(
response_dict, "L_SHORTMESSAGE0"
)
self.apierror = self.getexpresscheckoutdetailserror
return False
return True
def DoExpressCheckoutPayment(
self,
currency,
amount,
token=None,
payerid=None,
**kwargs
):
"""
This method performs the NVP API method that is responsible from doing
the actual payment.
All of the parameters are REQUIRED.
@currency: Look at 'https://cms.paypal.com/us/cgi-bin/?
cmd=_render-content&content_ID=developer/
e_howto_api_nvp_currency_codes'
@amount : should be string with the following format '10.00'
@token : token that will come from the res
|
t3dev/odoo
|
addons/partner_autocomplete/models/res_partner.py
|
Python
|
gpl-3.0
| 7,237
| 0.001935
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
import logging
import json
from odoo import api, fields, models, exceptions, _
from odoo.addons.iap import jsonrpc
from requests.exceptions import ConnectionError, HTTPError
from odoo.addons.iap.models.iap import InsufficientCreditError
_logger = logging.getLogger(__name__)
DEFAULT_ENDPOINT = 'https://partner-autocomplete.odoo.com'
class ResPartner(models.Model):
_name = 'res.partner'
_inherit = 'res.partner'
partner_gid = fields.Integer('Company database ID')
additional_info = fields.Char('Additional info')
@api.model
def _replace_location_code_by_id(self, record):
record['country_id'], record['state_id'] = self._find_country_data(
state_code=record.pop('state_code', False),
state_name=record.pop('state_name', False),
country_code=record.pop('country_code', False),
country_name=record.pop('country_name', False)
)
return record
@api.model
def _format_data_company(self, company):
self._replace_location_code_by_id(company)
if company.get('child_ids'):
child_ids = []
for child in company.get('child_ids'):
child_ids.append(self._replace_location_code_by_id(child))
company['child_ids'] = child_ids
if company.get('additional_info'):
company['additional_info'] = json.dumps(company['additional_info'])
return company
@api.model
def _find_country_data(self, state_code, state_name, country_code, country_name):
country = self.env['res.country'].search([['code', '=ilike', country_code]])
if not country:
country = self.env['res.country'].search([['name', '=ilike', country_name]])
state_id = {}
country_id = {}
if country:
country_id = {
'id': country.id,
'display_name': country.display_name
}
if state_name or state_code:
state = self.env['res.country.state'].search([
('country_id', '=', country_id.get('id')),
|
'|',
('name', '=ilike', state_name),
|
('code', '=ilike', state_code)
], limit=1)
if state:
state_id = {
'id': state.id,
'display_name': state.display_name
}
else:
_logger.info('Country code not found: %s', country_code)
return country_id, state_id
@api.model
def get_endpoint(self):
url = self.env['ir.config_parameter'].sudo().get_param('iap.partner_autocomplete.endpoint', DEFAULT_ENDPOINT)
url += '/iap/partner_autocomplete'
return url
@api.model
def _rpc_remote_api(self, action, params, timeout=15):
if self.env.registry.in_test_mode() :
return False, 'Insufficient Credit'
url = '%s/%s' % (self.get_endpoint(), action)
account = self.env['iap.account'].get('partner_autocomplete')
params.update({
'db_uuid': self.env['ir.config_parameter'].sudo().get_param('database.uuid'),
'account_token': account.account_token,
'country_code': self.env.user.company_id.country_id.code,
'zip': self.env.user.company_id.zip,
})
try:
return jsonrpc(url=url, params=params, timeout=timeout), False
except (ConnectionError, HTTPError, exceptions.AccessError) as exception:
_logger.error('Autocomplete API error: %s' % str(exception))
return False, str(exception)
except InsufficientCreditError as exception:
_logger.warning('Insufficient Credits for Autocomplete Service: %s' % str(exception))
return False, 'Insufficient Credit'
@api.model
def autocomplete(self, query):
suggestions, error = self._rpc_remote_api('search', {
'query': query,
})
if suggestions:
results = []
for suggestion in suggestions:
results.append(suggestion)
return results
else:
return []
@api.model
def enrich_company(self, company_domain, partner_gid, vat):
response, error = self._rpc_remote_api('enrich', {
'domain': company_domain,
'partner_gid': partner_gid,
'vat': vat,
})
if response and response.get('company_data'):
result = self._format_data_company(response.get('company_data'))
else:
result = {}
if error:
result.update({
'error': True,
'error_message': error
})
return result
@api.model
def read_by_vat(self, vat):
vies_vat_data, error = self._rpc_remote_api('search_vat', {
'vat': vat,
})
if vies_vat_data:
return [self._format_data_company(vies_vat_data)]
else:
return []
@api.model
def _is_company_in_europe(self, country_code):
country = self.env['res.country'].search([('code', '=ilike', country_code)])
if country:
country_id = country.id
europe = self.env.ref('base.europe')
if not europe:
europe = self.env["res.country.group"].search([('name', '=', 'Europe')], limit=1)
if not europe or country_id not in europe.country_ids.ids:
return False
return True
def _is_vat_syncable(self, vat):
vat_country_code = vat[:2]
partner_country_code = self.country_id and self.country_id.code
return self._is_company_in_europe(vat_country_code) and (partner_country_code == vat_country_code or not partner_country_code)
def _is_synchable(self):
already_synched = self.env['res.partner.autocomplete.sync'].search([('partner_id', '=', self.id), ('synched', '=', True)])
return self.is_company and self.partner_gid and not already_synched
def _update_autocomplete_data(self, vat):
self.ensure_one()
if vat and self._is_synchable() and self._is_vat_syncable(vat):
self.env['res.partner.autocomplete.sync'].sudo().add_to_queue(self.id)
@api.model_create_multi
def create(self, vals_list):
partners = super(ResPartner, self).create(vals_list)
if len(vals_list) == 1:
partners._update_autocomplete_data(vals_list[0].get('vat', False))
if partners.additional_info:
partners.message_post_with_view(
'partner_autocomplete.additional_info_template',
values=json.loads(partners.additional_info),
subtype_id=self.env.ref('mail.mt_note').id,
)
partners.write({'additional_info': False})
return partners
@api.multi
def write(self, values):
res = super(ResPartner, self).write(values)
if len(self) == 1:
self._update_autocomplete_data(values.get('vat', False))
return res
|
stephenrjones/geoq
|
geoq/core/managers.py
|
Python
|
mit
| 1,155
| 0.001732
|
# -*- coding: utf-8 -*-
# This technical data was produced for the U. S. Government under Contract No. W15P7T-13-C-F600, and
# is subject to the Rights in Technical Data-Noncommercial Items clause at DFARS 252.227-7013 (FEB 2012)
from django.contrib.gis.db import models
class AOIManager(models.GeoManager):
def add_filters(self, **kwargs):
"""
Returns the queryset with new filters
"""
return super(AOIManager, self).get_query_set().filter(**kwargs)
def unassigned(self):
"""
Returns unassigned AOIs.
"""
return self.add_filters(status='Unassigned')
|
def assigned(self):
"""
Returns assigned AOIs.
"""
return self.add_filters(status='Assigned')
def in_work(self):
"""
Returns AOIs in work.
"""
return self.add_filters(status='In Work')
def submitted(self):
"""
Re
|
turns submitted AOIs.
"""
return self.add_filters(status='Submitted')
def completed(self):
"""
Returns completed AOIs.
"""
return self.add_filters(status='Completed')
|
arnaudsj/pebl
|
src/pebl/learner/greedy.py
|
Python
|
mit
| 4,765
| 0.003568
|
"""Learner that implements a greedy learning algorithm"""
import time
from pebl import network, result, evaluator
from pebl.util import *
from pebl.learner.base import *
class GreedyLearnerStatistics:
def __init__(self):
self.restarts = -1
self.iterations = 0
self.unimproved_iterations = 0
self.best_score = 0
self.start_time = time.time()
@property
def runtime(self):
return time.time() - self.start_time
class GreedyLearner(Learner):
#
# Parameters
#
_params = (
config.IntParameter(
'greedy.max_iterations',
"""Maximum number of iterations to run.""",
default=1000
),
config.IntParameter(
'greedy.max_time',
"""Maximum learner runtime in seconds.""",
default=0
),
config.IntParameter(
'greedy.max_unimproved_iterations',
"""Maximum number of iterations without score improvement before
a restart.""",
default=500
),
config.StringParameter(
'greedy.seed',
'Starting network for a greedy search.',
default=''
)
)
def __init__(self, data_=None, prior_=None, **options):
"""
Create a learner that uses a greedy learning algorithm.
The algorithm works as follows:
1. start with a random network
2. Make a small, local change and rescore network
3. If new network scores better, accept it, otherwise reject.
4. Steps 2-3 are repeated t
|
ill the restarting_criteria is met, at
whic
|
h point we begin again with a new random network (step 1)
Any config param for 'greedy' can be passed in via options.
Use just the option part of the parameter name.
For more information about greedy learning algorithms, consult:
1. http://en.wikipedia.org/wiki/Greedy_algorithm
2. D. Heckerman. A Tutorial on Learning with Bayesian Networks.
Microsoft Technical Report MSR-TR-95-06, 1995. p.35.
"""
super(GreedyLearner, self).__init__(data_, prior_)
self.options = options
config.setparams(self, options)
if not isinstance(self.seed, network.Network):
self.seed = network.Network(self.data.variables, self.seed)
def run(self):
"""Run the learner.
Returns a LearnerResult instance. Also sets self.result to that
instance.
"""
# max_time and max_iterations are mutually exclusive stopping critera
if 'max_time' not in self.options:
_stop = self._stop_after_iterations
else:
_stop = self._stop_after_time
self.stats = GreedyLearnerStatistics()
self.result = result.LearnerResult(self)
self.evaluator = evaluator.fromconfig(self.data, self.seed, self.prior)
self.evaluator.score_network(self.seed.copy())
first = True
self.result.start_run()
while not _stop():
self._run_without_restarts(_stop, self._restart,
randomize_net=(not first))
first = False
self.result.stop_run()
return self.result
def _run_without_restarts(self, _stop, _restart, randomize_net=True):
self.stats.restarts += 1
self.stats.unimproved_iterations = 0
if randomize_net:
self.evaluator.randomize_network()
# set the default best score
self.stats.best_score = self.evaluator.score_network()
# continue learning until time to stop or restart
while not (_restart() or _stop()):
self.stats.iterations += 1
try:
curscore = self._alter_network_randomly_and_score()
except CannotAlterNetworkException:
return
self.result.add_network(self.evaluator.network, curscore)
if curscore <= self.stats.best_score:
# score did not improve, undo network alteration
self.stats.unimproved_iterations += 1
self.evaluator.restore_network()
else:
self.stats.best_score = curscore
self.stats.unimproved_iterations = 0
#
# Stopping and restarting criteria
#
def _stop_after_time(self):
return self.stats.runtime >= self.max_time
def _stop_after_iterations(self):
return self.stats.iterations >= self.max_iterations
def _restart(self):
return self.stats.unimproved_iterations >= self.max_unimproved_iterations
|
dmayle/YAMLTrak
|
yamltrak/commands.py
|
Python
|
gpl-3.0
| 11,746
| 0.004001
|
# Copyright 2009 Douglas Mayle
# This file is part of YAMLTrak.
# YAMLTrak is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or (at your
# option) any later version.
# YAMLTrak is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
# You should have received a copy of the GNU Lesser General Public License
# along with YAMLTrak. If not, see <http://www.gnu.org/licenses/>.
import os
import textwrap
from termcolor import colored
from yamltrak.argparse import ArgumentParser
from yamltrak import IssueDB, NoRepository, NoIssueDB
def guess_issue_id(issuedb):
related = issuedb.related(detail=True)
if len(related) > 1:
print colored('Too many linked issues found, please specify one.', None, attrs=['reverse'])
for issueid in related:
print colored(textwrap.fill('Issue: %s' % issueid,
initial_indent=' ', subsequent_indent=' '), None, attrs=[])
print colored(textwrap.fill(related[issueid].get('title', '').upper(),
initial_indent=' ', subsequent_indent=' '), None, attrs=[])
import sys
sys.exit(1)
issueid = related.keys()[0]
# Prompt user?
print "Found only one issue."
print colored(textwrap.fill('Issue: %s' % issueid,
initial_indent=' ', subsequent_indent=' '), None, attrs=[])
print colored(textwrap.fill(related[issueid].get('title', '').upper(),
initial_indent=' ', subsequent_indent=' '), None, attrs=[])
verification = raw_input("Do you want to use this issue? (Y/[N]) ")
if verification.lower() in ['y', 'yes', 'yeah', 'oui', 'uh-huh', 'sure', 'why not?', 'meh']:
return issueid
print '
|
Aborting'
import sys
sys.exit(1)
def unpack_new(issuedb, args):
# We should be able to avoid this somehow by using an object dictionary.
skeleton_new = issuedb.skeleton_new
issue = {}
for field in skeleton_new:
issue[field] = getattr(args, field, None)
if issue[field] is None:
issue[field] = skeleton_new[field]
newid = issuedb.new(issue=issue)
|
print 'Added new issue: %s' % newid
def unpack_list(issuedb, args):
issues = issuedb.issues(status=args.status)
for id, issue in issues.iteritems():
# Try to use color for clearer output
color = None
if 'high' in issue.get('priority',''):
color = 'red'
elif 'normal' in issue.get('priority',''):
pass
elif 'low' in issue.get('priority',''):
color = 'blue'
else:
color = 'red'
# We'll use status indicators on indent for estimate
if 'long' in issue.get('estimate', {}).get('scale').lower():
indent = '>>>>'
elif 'medium' in issue.get('estimate', {}).get('scale').lower():
indent = '> > '
elif 'short' in issue.get('estimate', {}).get('scale').lower():
indent = '> '
else:
indent = '===='
print colored('Issue: %s' % id, color, attrs=['reverse'])
print colored(textwrap.fill(issue.get('title', '').upper(),
initial_indent=indent, subsequent_indent=indent), color, attrs=[])
# print colored(textwrap.fill(issue.get('description',''),
# initial_indent=indent, subsequent_indent=indent), color)
print colored(textwrap.fill(issue.get('estimate',{}).get('text',''),
initial_indent=indent, subsequent_indent=indent), color)
def unpack_edit(issuedb, args):
if not args.id:
args.id = guess_issue_id(issuedb)
skeleton = issuedb.skeleton
issue = issuedb.issue(id=args.id, detail=False)[0]['data']
newissue = {}
for field in skeleton:
newissue[field] = getattr(args, field, None) or issue.get(field, skeleton[field])
issuedb.edit(id=args.id, issue=newissue)
def unpack_show(issuedb, args):
if not args.id:
args.id = guess_issue_id(issuedb)
issuedata = issuedb.issue(id=args.id, detail=args.detail)
if not issuedata or not issuedata[0].get('data'):
print 'No such issue found'
return
issue = issuedata[0]['data']
print '\nIssue: %s' % args.id
if 'title' in issue:
print textwrap.fill(issue.get('title', '').upper(), initial_indent='', subsequent_indent='')
if 'description' in issue:
print textwrap.fill(issue['description'], initial_indent='', subsequent_indent='')
print ''
for field in sorted(issue.keys()):
if field in ['title', 'description']:
continue
print textwrap.fill('%s: %s' % (field.upper(), issue[field]), initial_indent='', subsequent_indent=' ')
if issue.get('diff'):
for changeset in issue['diff'][0].iteritems():
print 'Added: %s - %s' % (changeset[0].upper(), changeset[1])
for changeset in issue['diff'][1].iteritems():
print 'Removed: %s' % changeset[0].upper()
for changeset in issue['diff'][2].iteritems():
print 'Changed: %s - %s' % (changeset[0].upper(), changeset[1][1])
else:
# No uncommitted changes
pass
for version in issuedata[1:]:
print '\nChangeset: %s' % version['node']
print 'Committed by: %s on %s' % (version['user'], version['date'])
print 'Linked files:'
for filename in version['files']:
print ' %s' % filename
if version.get('diff'):
for changeset in version['diff'][0].iteritems():
print 'Added: %s - %s' % (changeset[0].upper(), changeset[1])
for changeset in version['diff'][1].iteritems():
print 'Removed: %s' % changeset[0].upper()
for changeset in version['diff'][2].iteritems():
print 'Changed: %s - %s' % (changeset[0].upper(), changeset[1][1])
def unpack_related(issuedb, args):
relatedissues = issuedb.related(filenames=args.files, detail=True)
for issueid, issue in relatedissues.iteritems():
print colored(textwrap.fill('Issue: %s' % issueid,
initial_indent=' ', subsequent_indent=' '), None, attrs=[])
print colored(textwrap.fill(issue.get('title', '').upper(),
initial_indent=' ', subsequent_indent=' '), None, attrs=[])
def unpack_dbinit(issuedb, args):
try:
issuedb = IssueDB(args.repository, dbinit=True)
except NoRepository:
# This means that there was no repository here.
print 'Unable to find a repository.'
import sys
sys.exit(1)
except NoIssueDB:
# Whoops
print 'Error initializing issued database'
import sys
sys.exit(1)
print 'Initialized issue database'
def unpack_close(issuedb, args):
if not args.id:
args.id = guess_issue_id(issuedb)
issuedb.close(args.id, args.comment)
def unpack_purge(issuedb, args):
pass
def unpack_burndown(issuedb, args):
pass
def main():
"""Parse the command line options and react to them."""
try:
issuedb = IssueDB(os.getcwd())
except NoRepository:
# This means that there was no repository here.
print 'Unable to find a repository.'
import sys
sys.exit(1)
except NoIssueDB:
# This means no issue database was found. We give the option to
# initialize one.
parser = ArgumentParser(prog='yt', description='YAMLTrak is a distributed version controlled issue tracker.')
subparsers = parser.add_subparsers(help=None, dest='command')
parser_dbinit = subparsers.add_parser('dbinit',
help="Initialize the issue database.")
parser_dbinit.set_defaults(func=unpack_dbinit)
args = parser.parse_args()
# We don't have a valid database, so we call with none.
args.repository = os.getcwd()
args.func(None, args)
r
|
dana-i2cat/felix
|
optin_manager/src/python/openflow/optin_manager/sfa/methods/Update.py
|
Python
|
apache-2.0
| 1,376
| 0.009448
|
from openflow.optin_manager.sfa.util.method import Method
from openflow.optin_manager.sfa.trust.credential import Credential
from openflow.optin_manager.sfa.util.parameter import Parameter
class Update(Method):
"""
Update an object in the registry. Currently, this only updates the
PLC information associated with the record. The SFA fields (name, type,
GID) are fixed.
@param cred credential string specifying rights of the caller
@param record a record dictionary to be updated
@return 1 if successful, faults otherwise
"""
interfaces = ['registry']
accepts = [
Parameter(dict, "Record dictionary to be updated"),
Parameter(str, "Credential string"),
]
returns = Parameter(int, "1 if su
|
ccessful")
def call(self, record_dict, creds):
# validate the cred
valid_creds = self.api.auth.checkCredentials(creds, "update")
# verify permissions
hrn = record_dict.get('hrn', '')
self.api.auth.verify_object_permission(hrn)
# log
origin_hrn = Credential(string=valid_creds[0]).get_gid_caller().get_hrn()
self.api.logger.info("interface: %s\tcaller-hrn: %s\ttarget-hrn: %s\tmethod-name
|
: %s"%(self.api.interface, origin_hrn, hrn, self.name))
return self.api.manager.Update(self.api, record_dict)
|
scowcron/ImagesOfNetwork
|
images_of/entrypoints/discord_announce_bot.py
|
Python
|
mit
| 1,599
| 0.005629
|
import click
from images_of import command, settings, Reddit
from images_of.discord_announcer import DiscordBot, DiscordBotSettings
@command
@click.option('-G', '--no-github', is_flag=True, help='Do not process github events')
@click.option('-M', '--no-modlog', is_flag=True, help='Do not process network modlog events')
@click.option('-O', '--no-oc', is_flag=True, help='Do not process network for OC submissions')
@click.option('-I', '--no-inbox', is_flag=True, help='Do not process inbox for messages/replies')
@click.option('-F', '--no-falsepositives', is_flag=True, help='Do not announce false-positive reports')
@click.option('-r', '--run-interval', help='Number of minutes to process items', default=1)
@click.option('-s', '--stats-interval', help='Number of minutes to send stats info', default=15)
def main(no_github, no_modlog, no_oc, no_inbox, no_falsepositives, run_interval, stats_interval):
"""Discord Announcer Bot to relay specified information to designated Discord channels."""
reddit = Reddit('{} Discord Announcer v1.1 - /u/{}'
.format(settings.NETWORK_NAME, settings.USERNAME))
reddit.oauth()
discobot = DiscordBot(reddit)
botsettings = DiscordBot
|
Settings()
botsettings.DO_GITHUB = not no_github
botsettings.DO_MODLO
|
G = not no_modlog
botsettings.DO_OC = not no_oc
botsettings.DO_INBOX = not no_inbox
botsettings.DO_FALSEPOS = not no_falsepositives
botsettings.RUN_INTERVAL = run_interval
botsettings.STATS_INTERVAL = stats_interval
discobot.run(botsettings)
if __name__ == '__main__':
main()
|
bebraw/speccer
|
speccer/__init__.py
|
Python
|
mit
| 86
| 0.011905
|
# -*- coding: utf-8 -*-
__aut
|
hor__ = 'Juho Vepsäläinen'
__version__ = '0.7.5-dev'
|
|
hugombarreto/credibility_allocation
|
allocators/optimization_allocation.py
|
Python
|
mit
| 3,025
| 0.000661
|
import numpy as np
import scipy.optimize as opt
import unittest
def allocate(a, c, initial_guess):
def sum_positives(x):
return sum(i for i in x if i > 0)
def sum_negatives(x):
return -sum(i for i in x if i < 0)
pareto_ref = min(sum_positives(a), sum_negatives(a))
def sqr_sum(o,c):
aux = o + c
return aux.dot(aux)
def bound_n(x):
if x < 0:
return x, 0
else:
return x, x
def bound_p(x):
if x < 0:
return x, x
else:
return 0, x
bound = bound_p if sum_positives(a) > sum_negatives(a) else bound_n
def pareto(x):
return sum_positives(x) + sum_negatives(x) - 2 * pareto_ref
bounds = [bound(i) for i in a]
initial_guess[:] = a # ignore initial_guess using a as initial_guess
solution = opt.minimize(
lambda x: sqr_sum(x, c),
initial_guess,
method='SLSQP',
bounds=bounds,
constraints=({'type': 'eq', 'fun': lambda x: sum(x)},
{'type': 'eq', 'fun': pareto}),
options={'maxiter': 1000, 'disp': False}
)
solution = map(lambda x: 0 if np.isnan(x) else int(round(x)), solution.x)
initial_guess[:] = solution
class TestAllocate(unittest.TestCase):
def test_lack_resource_int_reputation(self): # sum a > 0
desire = [1, 3, 2, -1, -2, 1]
reputation = [-10, -3, -5, 2, 4, 12]
output = desire[:]
allocate(desire, reputation, output)
self.assertEqual(output, [1, 0, 2, -1, -2, 0])
self.assertEqual(desire, [1, 3, 2, -1, -2, 1])
self.assertEqual(reputation, [-10, -3, -5, 2, 4, 12])
desire = [-1, 3, -2, 1, 2, -1]
reputation = [10, 3, 5, -2, -4, -12]
output = desire[:]
allocate(desire, reputation, output)
self.assertEqual(output, [-1, 1, -2, 1, 2, -1])
desire = [-1, 3, -2, 3, 3, -1]
reputation = [10, 3, 7, -4, -4, -12]
output = desire[:]
allocate(desire, reputation, output)
self.assertE
|
qual(output, [-1, 0, -2, 2, 2, -1])
def test_lack_resources_float_reputation(self):
desire = [1, 3, 2, 2, -3, 1, -5, 3, 0]
reputation = [-6.2, -3.1, -3.1, -2.2, 8.6, 12.2, -4.3, 6.0, -7.9]
output = desire[:]
allocate(desire, reputation, output)
self.assertEqual(output, [1, 3, 2, 2, -3, 0, -5, 0, 0])
def test_lack_consumer(self): # sum a < 0
desire = [1, -3, 2, -1, -2, 1]
reputation = [-10, -3, -5, 2, 4,
|
12]
output = desire[:]
allocate(desire, reputation, output)
self.assertEqual(output, [1, -1, 2, -1, -2, 1])
def test_efficient(self): # sum a = 0
desire = [1, -3, 2, -1, -2, 3]
reputation = [-10, -3, -5, 2, 4, 12]
output = desire[:]
allocate(desire, reputation, output)
self.assertEqual(output, [1, -3, 2, -1, -2, 3])
if __name__ == "__main__":
unittest.main()
|
AgusRumayor/pypriorapi
|
order.py
|
Python
|
gpl-3.0
| 6,154
| 0.039812
|
import falcon
import msgpack
import json
from btree import BinaryTree
import ZODB, ZODB.FileStorage
import transaction
from persistent import Persistent
import uuid
import urllib
import btree
from pprint import pprint
class Collection (object):
def on_post(self, req, resp):
# req.stream corresponds to the WSGI wsgi.input environ variable,
# and allows you to read bytes from the request body.
#
# See also: PEP 3333
if req.content_length in (None, 0):
# Nothing to do
print "nothin"
return
body = req.stream.read()
if not body:
raise falcon.HTTPBadRequest('Empty request body',
'A valid JSON document is required.')
try:
pprint(body)
req.context['doc'] = json.loads(body.decode('utf-8'))
token = str(uuid.uuid4())
storage = ZODB.FileStorage.FileStorage('trees/'+token+'.fs')
db = ZODB.DB(storage)
connection = db.open()
root = connection.root
unordered_list = req.context['doc']['data']
root.tree = BinaryTree(unordered_list.pop())
tree = root.tree
tree.unordered_list = unordered_list
#tree.setList()
if len(unordered_list) <2:
raise falcon.HTTPBadRequest('Empty request body', 'We need more than 2 data elements')
except (ValueError, UnicodeDecodeError):
raise falcon.HTTPError(falcon.HTTP_753,
'Malformed JSON',
'Could not decode the request body. The '
'JSON was incorrect or not encoded as '
'UTF-8.')
tree.current = tree
tree.treeroot = tree.current
tree.next = tree.unordered_list.pop()
tree.ordered = False
tree.jresp = {'remain':tree.unordered_list, 'item':tree.current.getNodeValue(), 'compare':tree.next, 'token':token, 'ordered':tree.ordered,
'links':[{"self":"/order/"},
{'order':'/order/%s'%(urllib.quote(token))},
{'lt':'/order/%s/%s/%s'%(urllib.quote(token), tree.current.getNodeValue(), tree.next)},
{'gt':'/order/%s/%s/%s'%(urllib.quote(token), tree.next, tree.current.getNodeValue())}]}
transaction.commit()
connection.close()
db.close()
storage.close()
resp.body = json.dumps(tree.jresp)
def on_get(self, req, resp, token):
storage = ZODB.FileStorage.FileStorage('trees/'+token+'.fs')
db = ZODB.DB(storage)
connection = db.open()
root = connection.root
if hasattr(root, 'tree'):
tree = root.tree
else:
resp.body = "Initialize first"
connection.close()
db.close()
storage.close()
return
lst = list(btree.inorder(tree))
tree.jresp = {'data':lst, 'item':tree.current.getNodeValue(), 'compare':tree.next, 'token':token, 'ordered':tree.ordered,
'links':[{"new":"/order/"},
{"self":"/order/%s"%(urllib.quote(token))},
{"lt":"/order/%s/%s/%s"%(urllib.quote(token), tree.current.getNodeValue(), tree.next)},
{"gt":"/order/%s/%s/%s"%(urllib.quote(token), tree.next, tree.current.getNodeValue())}]}
transaction.commit()
connection.close()
db.close()
storage.close()
resp.body = json.dumps(tree.jresp)
def on_put(self, req, resp, token):
if req.content_length in (None, 0):
# Nothing to do
return
body = req.stream.read()
if not body:
raise falcon.HTTPBadRequest('Empty request body',
'A valid JSON document is required.')
try:
req.context['doc'] = json.loads(body.decode('utf-8'))
left = req.context['doc']['left']
right = req.context['doc']['right']
except (ValueError, UnicodeDecodeError):
raise falcon.HTTPError(falcon.HTTP_753,
'Malformed JSON',
'Could not decode the request body. The '
'JSON was incorrect or not encoded as '
'UTF-8.')
storage = ZODB.FileStorage.FileStorage('trees/'+token+'.fs')
db = ZODB.DB(storage)
connection = db.open()
root = connection.root
if hasattr(root, 'tree'):
tree = root.tree
else:
resp.body = "Initialize first"
connection.close()
db.close()
storage.close()
return
if tree.next not in [left, right]:
resp.body = json.dumps(tree.jresp)
connection.close()
db.close()
storage.close()
return
if left == tree.current.getNodeValue():
if tree.current.getRightChild() == None:
tree.current.insertRight(right)
tree.current = tree.treeroot
if len(tree.unordered_list)>0:
tree.next = tree.unordered_list.pop()
else:
tree.ordered = True
tree.next = "None"
else:
tree.current = tree.current.getRightChild()
elif right == tree.current.getNodeValue():
if tree.current.getLeftChild()== None:
tree.current.insertLeft(left)
tree.current = tree.treeroot
if len(tree.unordered_list)>0:
tree.next = tree.unordered_list.pop()
else:
tree.ordered = True
tree.next = "None"
else:
tree.current = tree.current.getLeftChild()
tree.jresp = {'remain':tree.unordered_list, 'item':tree.current.getNodeValue(), 'compare':tree.next, 'token':token, 'ordered':tree.ordered,
'links':[{"new":"/order/"},
|
{"order":"/order
|
/%s"%(urllib.quote(token))},
{"lt":"/order/%s/%s/%s"%(urllib.quote(token), tree.current.getNodeValue(), tree.next)},
{"gt":"/order/%s/%s/%s"%(urllib.quote(token), tree.next, tree.current.getNodeValue())}]}
transaction.commit()
connection.close()
db.close()
storage.close()
resp.body = json.dumps(tree.jresp)
|
oaraque/sentiment-analysis
|
pattern/run.py
|
Python
|
mit
| 639
| 0.001565
|
import csv
import sys
import codecs
from pattern.en import sentiment
input_, output_ = str(sys.argv[1]), str(sys.argv[2])
with codecs.open('/output.txt', 'w') as fout:
writer = csv.writer(fout, delimiter='\t')
with codecs.open('/input.txt', 'r') as fin:
for l_i, line i
|
n enumerate(fin):
line = line.strip()
result = sentiment(line)[0]
prediction = None
if result > 0:
prediction = 1
elif result < 0:
prediction = -1
elif result == 0:
prediction = 0
|
writer.writerow([l_i, prediction])
|
dbbhattacharya/kitsune
|
vendor/packages/sqlalchemy/lib/sqlalchemy/sql/expression.py
|
Python
|
bsd-3-clause
| 152,008
| 0.003592
|
# expression.py
# Copyright (C) 2005, 2006, 2007, 2008, 2009, 2010 Michael Bayer mike_mp@zzzcomputing.com
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
"""Defines the base components of SQL expression trees.
All components are derived from a common base class
:class:`ClauseElement`. Common behaviors are organized
based on class hierarchies, in some cases via mixins.
All object construction from this package occurs via functions which
in some cases will construct composite :class:`ClauseElement` structures
together, and in other cases simply return a single :class:`ClauseElement`
constructed directly. The function interface affords a more "DSL-ish"
feel to constructing SQL expressions and also allows future class
reorganizations.
Even though classes are not constructed directly from the outside,
most classes which have additional public methods are considered to be
public (i.e. have no leading underscore). Other classes which are
"semi-public" are marked with a single leading underscore; these
classes usually have few or no public methods and are less guaranteed
to stay the same in future releases.
"""
import itertools, re
from operator import attrgetter
from sqlalchemy import util, exc #, types as sqltypes
from sqlalchemy.sql import operators
from sqlalchemy.sql.visitors import Visitable, cloned_traverse
import operator
functions, sql_util, sqltypes = None, None, None
DefaultDialect = None
__all__ = [
'Alias', 'ClauseElement', 'ColumnCollection', 'ColumnElement',
'CompoundSelect', 'Delete', 'FromClause', 'Insert', 'Join', 'Select',
'Selectable', 'TableClause', 'Update', 'alias', 'and_', 'asc', 'between',
'bindparam', 'case', 'cast', 'column', 'delete', 'desc', 'distinct',
|
'except_', 'except_all', 'exists', 'extract', 'func', 'modifier',
'collate', 'insert', 'intersect', 'intersect_all', 'join', 'label',
'literal', 'literal_column', 'not_', 'null', 'or_', 'outparam',
'outerjoin', 'select', 'subquery', 'table', 'text', 'tuple_', 'union',
'union_all', 'update', ]
PARSE_AUTOCOMMIT = util._symbol('PARSE_AUTOCOMMIT')
def desc(column):
"""Return a descending ``ORDER BY`` clause e
|
lement.
e.g.::
order_by = [desc(table1.mycol)]
"""
return _UnaryExpression(column, modifier=operators.desc_op)
def asc(column):
"""Return an ascending ``ORDER BY`` clause element.
e.g.::
order_by = [asc(table1.mycol)]
"""
return _UnaryExpression(column, modifier=operators.asc_op)
def outerjoin(left, right, onclause=None):
"""Return an ``OUTER JOIN`` clause element.
The returned object is an instance of :class:`Join`.
Similar functionality is also available via the :func:`outerjoin()`
method on any :class:`FromClause`.
left
The left side of the join.
right
The right side of the join.
onclause
Optional criterion for the ``ON`` clause, is derived from
foreign key relationships established between left and right
otherwise.
To chain joins together, use the :func:`join()` or :func:`outerjoin()`
methods on the resulting :class:`Join` object.
"""
return Join(left, right, onclause, isouter=True)
def join(left, right, onclause=None, isouter=False):
"""Return a ``JOIN`` clause element (regular inner join).
The returned object is an instance of :class:`Join`.
Similar functionality is also available via the :func:`join()` method
on any :class:`FromClause`.
left
The left side of the join.
right
The right side of the join.
onclause
Optional criterion for the ``ON`` clause, is derived from
foreign key relationships established between left and right
otherwise.
To chain joins together, use the :func:`join()` or :func:`outerjoin()`
methods on the resulting :class:`Join` object.
"""
return Join(left, right, onclause, isouter)
def select(columns=None, whereclause=None, from_obj=[], **kwargs):
"""Returns a ``SELECT`` clause element.
Similar functionality is also available via the :func:`select()`
method on any :class:`FromClause`.
The returned object is an instance of :class:`Select`.
All arguments which accept :class:`ClauseElement` arguments also accept
string arguments, which will be converted as appropriate into
either :func:`text()` or :func:`literal_column()` constructs.
:param columns:
A list of :class:`ClauseElement` objects, typically
:class:`ColumnElement` objects or subclasses, which will form the
columns clause of the resulting statement. For all members which are
instances of :class:`Selectable`, the individual :class:`ColumnElement`
members of the :class:`Selectable` will be added individually to the
columns clause. For example, specifying a
:class:`~sqlalchemy.schema.Table` instance will result in all the
contained :class:`~sqlalchemy.schema.Column` objects within to be added
to the columns clause.
This argument is not present on the form of :func:`select()`
available on :class:`~sqlalchemy.schema.Table`.
:param whereclause:
A :class:`ClauseElement` expression which will be used to form the
``WHERE`` clause.
:param from_obj:
A list of :class:`ClauseElement` objects which will be added to the
``FROM`` clause of the resulting statement. Note that "from" objects are
automatically located within the columns and whereclause ClauseElements.
Use this parameter to explicitly specify "from" objects which are not
automatically locatable. This could include
:class:`~sqlalchemy.schema.Table` objects that aren't otherwise present,
or :class:`Join` objects whose presence will supercede that of the
:class:`~sqlalchemy.schema.Table` objects already located in the other
clauses.
:param autocommit:
Deprecated. Use .execution_options(autocommit=<True|False>)
to set the autocommit option.
:param prefixes:
a list of strings or :class:`ClauseElement` objects to include
directly after the SELECT keyword in the generated statement,
for dialect-specific query features.
:param distinct=False:
when ``True``, applies a ``DISTINCT`` qualifier to the columns
clause of the resulting statement.
:param use_labels=False:
when ``True``, the statement will be generated using labels
for each column in the columns clause, which qualify each
column with its parent table's (or aliases) name so that name
conflicts between columns in different tables don't occur.
The format of the label is <tablename>_<column>. The "c"
collection of the resulting :class:`Select` object will use these
names as well for targeting column members.
:param for_update=False:
when ``True``, applies ``FOR UPDATE`` to the end of the
resulting statement. Certain database dialects also support
alternate values for this parameter, for example mysql
supports "read" which translates to ``LOCK IN SHARE MODE``,
and oracle supports "nowait" which translates to ``FOR UPDATE
NOWAIT``.
:param correlate=True:
indicates that this :class:`Select` object should have its
contained :class:`FromClause` elements "correlated" to an enclosing
:class:`Select` object. This means that any :class:`ClauseElement`
instance within the "froms" collection of this :class:`Select`
which is also present in the "froms" collection of an
enclosing select will not be rendered in the ``FROM`` clause
of this select statement.
:param group_by:
a list of :class:`ClauseElement` objects which will comprise the
``GROUP BY`` clause of the resulting select.
:param having:
a :class:`ClauseElement` that will comprise the ``HAVING`` clause
of the resulting select when ``GROUP BY`` is used.
:param order_by:
a scalar or list of :class:`ClauseElement` objects which will
comprise the ``ORDER BY`` clause of the resulting s
|
AllieDeford/radremedy
|
remedy/admin_views/reviewview.py
|
Python
|
bsd-3-clause
| 4,932
| 0.00588
|
"""
reviewview.py
Contains administrative views for working with reviews.
"""
from admin_helpers import *
from flask import flash
from flask.ext.admin.actions import action
from flask.ext.admin.contrib.sqla import ModelView
from wtforms import IntegerField, validators
import remedy.rad.reviewservice
from remedy.rad.models import Review
class ReviewView(AdminAuthMixin, ModelView):
"""
An administrative view for working with resource reviews.
"""
# Disable model creation
can_create = False
column_select_related_list = (Review.resource, Review.user)
column_default_sort = (Review.date_created, True)
column_sortable_list = ('composite_rating', 'visible', ('date_created', Review.date_created))
column_list = ('composite_rating', 'resource.name', 'user.username',
'visible', 'date_created')
column_labels = {
'composite_rating': 'Comp. Rating',
'rating': 'Provider Rating',
'staff_rating': 'Staff Rating',
'intake_rating': 'Intake Rating',
'resource.name': 'Resource',
'user.username': 'User',
'visible': 'Visible',
'date_created': 'Date Created',
'ip': 'IP'
}
column_descriptions = dict(composite_rating='The average of the rating fields.')
column_searchable_list = ('text',)
column_filters = ('visible','composite_rating','rating','staff_rating',
'intake_rating','ip',)
form_excluded_columns = ('date_created','is_old_review','old_reviews',
'new_review_id','new_review', 'composite_rating')
# Mark fields visible but read-only. If we use
# "disabled" this ends up clearing out the value.
form_widget_args = {
'ip': {
'readonly': 'readonly'
}
}
def scaffold_form(self):
"""
S
|
ets up the review form to ensure that the rating field
behaves on a 1-5 scale.
"""
form_class = super(ReviewView, self).scaffold_form()
form_class.rating = IntegerField('Provider Rating', validators=[
validators.Optional(),
validators.NumberRange(min=1, max=5)
])
form_class.staff_rating = IntegerField('Staff Rating', validators=[
|
validators.Optional(),
validators.NumberRange(min=1, max=5)
])
form_class.intake_rating = IntegerField('Intake Rating', validators=[
validators.Optional(),
validators.NumberRange(min=1, max=5)
])
return form_class
def delete_model(self, model):
"""
Deletes the specified review.
Args:
model: The review to delete.
"""
try:
remedy.rad.reviewservice.delete(self.session, model)
flash('Review deleted.')
return True
except Exception as ex:
if not super(ReviewView, self).handle_view_exception(ex):
flash(gettext('Failed to delete model. %(error)s', error=str(ex)), 'error')
log.exception('Failed to delete model')
self.session.rollback()
return False
@action('togglevisible',
'Toggle Visibility',
'Are you sure you wish to toggle visibility for the selected reviews?')
def action_togglevisible(self, ids):
"""
Attempts to toggle visibility for each of the specified reviews.
Args:
ids: The list of review IDs, indicating which reviews
should have their visibility toggled.
"""
# Load all reviews by the set of IDs
target_reviews = self.get_query().filter(self.model.id.in_(ids)).all()
# Build a list of all the results
results = []
if len(target_reviews) > 0:
for review in target_reviews:
# Build a helpful string to use for messages.
review_str = 'review #' + str(review.id) + ' (' + review.resource.name + \
' by ' + review.user.username + ')'
visible_status = ''
try:
if not review.visible:
review.visible = True
visible_status = ' as visible'
else:
review.visible = False
visible_status = ' as not visible'
except Exception as ex:
results.append('Error changing ' + review_str + ': ' + str(ex))
else:
results.append('Marked ' + review_str + visible_status + '.')
# Save our changes.
self.session.commit()
else:
results.append('No reviews were selected.')
# Flash the results of everything
flash("\n".join(msg for msg in results))
def __init__(self, session, **kwargs):
super(ReviewView, self).__init__(Review, session, **kwargs)
|
jlaine/django-timegraph
|
timegraph/tests/test_timegraph.py
|
Python
|
bsd-2-clause
| 13,243
| 0.003248
|
# -*- coding: utf-8 -*-
#
# django-timegraph - monitoring graphs for django
# Copyright (c) 2011-2012, Wifirst
# Copyright (c) 2013, Jeremy Lainé
# All rights reserved.
#
# See AUTHORS file for a full list of contributors.
#
# Redistribution and use in source and binary forms, with or without modification,
# are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
# ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
# ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
import os
import shutil
import tempfile
from django.conf import settings
from django.contrib.auth.models import User
from django.test import TestCase
import timegraph
from timegraph.models import format_value, Graph, Metric
def setup_test_environment():
timegraph.original_rrd_root = settings.TIMEGRAPH_RRD_ROOT
settings.TIMEGRAPH_RRD_ROOT = tempfile.mkdtemp()
def teardown_test_environment():
shutil.rmtree(settings.TIMEGRAPH_RRD_ROOT)
settings.TIMEGRAPH_RRD_ROOT = timegraph.original_rrd_root
del timegraph.original_rrd_root
class TestFormat(TestCase):
def test_format_none(self):
self.assertEquals(format_value(None, 'b'), '')
self.assertEquals(format_value(None, ''), '')
self.assertEquals(format_value('', 'b'), '')
self.assertEquals(format_value('', ''), '')
self.assertEquals(format_value(object(), 'b'), '')
self.assertEquals(format_value(object(), ''), '')
def test_format_byte(self):
self.assertEquals(format_value(0, 'b'), '0 b')
self.assertEquals(format_value(1, 'b'), '1 b')
self.assertEquals(format_value(1024, 'b'), '1.0 kib')
self.assertEquals(format_value(2048, 'b'), '2.0 kib')
self.assertEquals(format_value(1048576, 'b'), '1.0 Mib')
self.assertEquals(format_value(1073741824, 'b'), '1.0 Gib')
self.assertEquals(format_value(1099511627776, 'b'), '1.0 Tib')
self.assertEquals(format_value(1125899906842624, 'b'), '1.0 Pib')
self.assertEquals(format_value(1152921504606846976, 'b'), '1.0 Eib')
# FIXME: there seems to be a rounding error
#self.assertEquals(format_value(1180591620717411303424, 'b'), '1.0 Zib')
#self.assertEquals(format_value(1208925819614629174706176, 'b'), '1.0 Yib')
self.assertEquals(format_value(1237940039285380274899124224, 'b'), '1024.0 Yib')
def test_format_int(self):
self.assertEquals(format_value(0, 's'), '0 s')
self.assertEquals(format_value(1, 's'), '1 s')
self.assertEquals(format_value(10, 's'), '10 s')
self.assertEquals(format_value(100, 's'), '100 s')
self.assertEquals(format_value(1000, 's'), '1.0 ks')
self.assertEquals(format_value(10000, 's'), '10.0 ks')
self.assertEquals(format_value(100000, 's'), '100.0 ks')
self.assertEquals(format_value(1000000, 's'), '1.0 Ms')
self.assertEquals(format_value(10000000, 's'), '10.0 Ms')
self.assertEquals(format_value(100000000, 's'), '100.0 Ms')
self.assertEquals(format_value(1000000000, 's'), '1.0 Gs')
self.assertEquals(format_value(10000000000, 's'), '10.0 Gs')
self.assertEquals(format_value(100000000000, 's'), '100.0 Gs')
self.assertEquals(format_value(1000000000000, 's'), '1.0 Ts')
self.assertEquals(format_value(10000000000000, 's'), '10.0 Ts')
self.assertEquals(format_value(100000000000000, 's'), '100.0 Ts')
self.assertEquals(format_value(1000000000000000, 's'), '1.0 Ps')
self.assertEquals(format_value(10000000000000000, 's'), '10.0 Ps')
self.assertEquals(format_value(100000000000000000, 's'), '100.0 Ps')
self.assertEquals(format_value(1000000000000000000, 's'), '1.0 Es')
self.assertEquals(format_value(10000000000000000000, 's'), '10.0 Es')
self.assertEquals(format_value(100000000000000000000, 's'), '100.0 Es')
self.assertEquals(format_value(1000000000000000000000, 's'), '1.0 Zs')
self.assertEquals(format_value(10000000000000000000000, 's'), '10.0 Zs')
self.assertEquals(format_value(1000000000000000000000000, 's'), '1.0 Ys')
self.assertEquals(format_value(10000000000000000000000000, 's'), '10.0 Ys')
self.assertEquals(format_value(100000000000000000000000000, 's'), '100.0 Ys')
self.assertEquals(format_value(1000000000000000000000000000, 's'), '1000.0 Ys')
self.assertEquals(format_value(10000000000000000000000000000, 's'), '10000.0 Ys')
def test_format_float(self):
self.assertEquals(format_value(0.0, 's'), '0.0 s')
self.assertEquals(format_value(0.00000000000000000000000001, 's'), u'0.0 ys')
self.assertEquals(format_value(0.0000000000000000000000001, 's'), u'0.1 ys')
self.assertEquals(format_value(0.000000000000000000000001, 's'), u'1.0 ys')
self.assertEquals(format_value(0.00000000000000000000001, 's'), u'10.0 ys')
self.assertEquals(format_value(0.0000000000000000000001, 's'), u'100.0 ys')
self.assertEquals(format_value(0.000000000000000000001, 's'), u'1.0 zs')
self.assertEquals(format_value(0.00000000000000000001, 's'), u'10.0 zs')
self.assertEquals(format_value(0.0000000000000000001, 's'), u'100.0 zs')
self.assertEquals(format_value(0.000000000000000001, 's'), u'1.0 as')
self.assertEquals(format_value(0.00000000000000001, 's'), u'10.0 as')
self.assertEquals(format_value(0.0000000000000001, 's'), u'100.0 as')
self.assertEquals(format_value(0.000000000000001, 's'), u'1.0 fs
|
')
self.assertEquals(format_value(0.00000000000001, 's'), u'10.0 fs')
self.assertEquals(format_value(0.0000000000001, 's'), u'100.0 fs')
self.assertEquals(format
|
_value(0.000000000001, 's'), u'1.0 ps')
self.assertEquals(format_value(0.00000000001, 's'), u'10.0 ps')
self.assertEquals(format_value(0.0000000001, 's'), u'100.0 ps')
self.assertEquals(format_value(0.000000001, 's'), u'1.0 ns')
self.assertEquals(format_value(0.00000001, 's'), u'10.0 ns')
self.assertEquals(format_value(0.0000001, 's'), u'100.0 ns')
self.assertEquals(format_value(0.000001, 's'), u'1.0 µs')
self.assertEquals(format_value(0.00001, 's'), u'10.0 µs')
self.assertEquals(format_value(0.0001, 's'), u'100.0 µs')
self.assertEquals(format_value(0.001, 's'), '1.0 ms')
self.assertEquals(format_value(0.01, 's'), '10.0 ms')
self.assertEquals(format_value(0.1, 's'), '100.0 ms')
self.assertEquals(format_value(1.0, 's'), '1.0 s')
self.assertEquals(format_value(10.0, 's'), '10.0 s')
self.assertEquals(format_value(100.0, 's'), '100.0 s')
self.assertEquals(format_value(1000.0, 's'), '1.0 ks')
self.assertEquals(format_value(10000.0, 's'), '10.0 ks')
self.assertEquals(format_value(100000.0, 's'), '100.0 ks')
self.assertEquals(format_value(1000000.0, 's'), '1.0 Ms')
self.assertEquals(format_value(10000000.0, 's'), '10.0 Ms')
self.assertEquals(format_value(100000000.0, 's'), '100.0 Ms')
self.assertEquals(format_value(100000000
|
voriux/Flexget
|
flexget/plugins/plugin_change_warn.py
|
Python
|
mit
| 3,389
| 0.002951
|
from __future__ import unicode_literals, division, absolute_import
import logging
import sys
import os
from flexget import plugin
from flexget.event import event
log = logging.getLogger('change')
found_deprecated = False
class ChangeWarn(object):
"""
Gives warning if user has deprecated / changed configuration in the root level.
Will be replaced by root level validation in the future!
Contains ugly hacks, better to include all deprecation warnings here during 1.0 BETA phase
"""
def on_task_start(self, task, config):
global found_deprecated
if 'torrent_size' in task.config:
log.critical('Plugin torrent_size is deprecated, use content_size instead')
found_deprecated = True
if 'nzb_size' in task.config:
log.critical('Plugin nzb_size is deprecated, use content_size instead')
found_deprecated = True
if found_deprecated:
task.manager.scheduler.shutdown(finish_queue=False)
task.abort('Deprecated config.')
@event('plugin.register')
def register_plugin():
plugin.register(ChangeWarn, 'change_warn', builtin=True, api_ver=2)
# check that no old plugins are in pre-compiled form (pyc)
try:
import os.path
plugin_dirs = (os.path.normpath(sys.path[0] + '/../flexget/plugins/'),
os.path.normpath(sys.path[0] + '/../flexget/plugins/input/'))
for plugin_dir in plugin_dirs:
for name in os.listdir(plugin_dir):
require_clean = False
if name.startswith('module'):
require_clean = True
if name == 'csv.pyc':
require
|
_clean = True
if 'resolver' in name:
require_clean = True
if 'filter_torrent_size' in name:
require_clean = True
if 'filter_nzb_size' in name:
require_clean = True
if 'module_priority' in name:
require_clean = True
if 'ignore_feed' in name:
require
|
_clean = True
if 'module_manual' in name:
require_clean = True
if 'output_exec' in name:
require_clean = True
if 'plugin_adv_exec' in name:
require_clean = True
if 'output_transmissionrpc' in name:
require_clean = True
if require_clean:
log.critical('-' * 79)
log.critical('IMPORTANT: Your installation has some files from older FlexGet!')
log.critical('')
log.critical(' Please remove all pre-compiled .pyc and .pyo files from %s' % plugin_dir)
log.critical(' Offending file: %s' % name)
log.critical('')
log.critical(' After getting rid of these FlexGet should run again normally')
from flexget import __version__ as version
if version == '{git}':
log.critical('')
log.critical(' If you are using bootstrapped git checkout you can run:')
log.critical(' bin/paver clean_compiled')
log.critical('')
log.critical('-' * 79)
found_deprecated = True
break
except:
pass
|
dbbhattacharya/kitsune
|
vendor/packages/logilab-common/db.py
|
Python
|
bsd-3-clause
| 2,037
| 0.003436
|
# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
#
# This file is part of logilab-common.
#
# logilab-common is free software: you can redistribute it and/or modify it under
# the terms of the GNU Lesser General Public License as published by the Free
# Software Foundation, either version 2.1 of the License, or (at your option) any
# later version.
#
# logilab-common is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
# details.
#
# You should have received a copy of the GNU Lesser General Public License along
# with logilab-common. If not, see <http://www.gnu.org/licenses/>.
"""Wrappers to get actually replaceable DBAPI2 compliant modules and
database connection whatever the database and client lib used.
Currently support:
- postgresql (pgdb, psycopg, psycopg2, pyPgSQL)
- mysql (MySQLdb)
- sqlite (pysqlite2, sqlite, sqlite3)
just use the `get_connection` function from this module to get a
wrapped connection. If multiple drivers for a database are available,
you can control which one you want to use using the
`set_prefered_driver` function.
Additional helpers are also provided for advanced functio
|
nalities such
as listing existing users or databases, creating database... Get the
helper for your database using the `get_adv_func_helper` function.
"""
__docformat__ = "restructuredtext en"
from warnings import warn
warn('this module is deprecated, use logilab.database instead',
DeprecationWarning, stackle
|
vel=1)
from logilab.database import (get_connection, set_prefered_driver,
get_dbapi_compliant_module as _gdcm,
get_db_helper as _gdh)
def get_dbapi_compliant_module(driver, *args, **kwargs):
module = _gdcm(driver, *args, **kwargs)
module.adv_func_helper = _gdh(driver)
return module
|
xavierdutreilh/robots.midgar.fr
|
services/crawler/tests/conftest.py
|
Python
|
mit
| 564
| 0.001773
|
from concurrent.futures import ThreadPoolExecutor
import grpc
import pytest
from crawler.services import Library
im
|
port crawler_pb2_grpc
@pytest.fixture(name='grpc_client', scope='session', autouse=True)
def setup_grpc_client():
server = grpc.server(ThreadPoolExecutor(max_workers=4))
crawler_pb2_grpc.add_LibraryServicer_to_server(Libr
|
ary(), server)
port = server.add_insecure_port('[::]:0')
server.start()
with grpc.insecure_channel(f'localhost:{port}') as channel:
yield crawler_pb2_grpc.LibraryStub(channel)
server.stop(0)
|
josh-perry/NDSHomebrewDownloader
|
main.py
|
Python
|
mit
| 3,662
| 0.001638
|
__author__ = "Josh Perry"
import os
import shutil
import urllib2
import zipfile
import rarfile
from bs4 import BeautifulSoup
dumping_directory = "W:\Plugin repo\dump"
def save_file(url, filename):
filename = "out/" + filename
# Check if we already have it
if os.path.isfile(filename + ".zip") or os.path.isfile(filename + ".rar") or os.path.isfile(filename + ".zip"):
print "We already have " + filename + ", skipping"
return
print("Downloading... " + filename)
try:
f = open(filename, 'wb')
f.write(urllib2.urlopen(url).read())
f.close()
if zipfile.is_zipfile(filename):
print(filename + " is a zip file")
shutil.move(filename, filename + ".zip")
elif rarfile.is_rarfile(filename):
print(filename + " is a rar file")
shutil.move(filename, filename + ".rar")
else:
print(filename + " is an nds file")
shutil.move(filename, filename + ".nds")
except urllib2.URLError:
print "Failed to download: " + filename
def nds_homebrew_hive():
base_url = "http://www.ndshb.com"
# Apps #
page_template = "http://www.ndshb.com/index.php/component/jdownloads/viewcategory/3-apps?start={0}"
os.mkdir("out")
os.mkdir("out/ndshb_apps")
for i in range(0, 7):
page = page_template.format(i * 10)
f = urllib2.urlopen(page)
soup = BeautifulSoup(f.read(), "html.parser")
for link in soup.find_all(class_="jd_download_url"):
url = link["href"]
filename = url.split('/')[-1].split("?")[0]
save_file(base_url + url, "ndshb_apps/" + filename)
# Games #
page_template = "http://www.ndshb.com/index.php/component/jdownloads/viewcategory/4-games?start={0}"
if not os.path.isdir("
|
out/ndshb_games"):
os.mkdir("out/ndshb_games")
for i in range(0, 10):
page = page_template.format(i * 10)
f = u
|
rllib2.urlopen(page)
soup = BeautifulSoup(f.read(), "html.parser")
for link in soup.find_all(class_="jd_download_url"):
url = link["href"]
filename = url.split('/')[-1].split("?")[0]
save_file(base_url + url, "ndshb_games/" + filename)
def process_files(directory):
for root, directories, files in os.walk(directory):
for f in files:
try:
original = os.path.join(root, f)
output = os.path.join(dumping_directory, f)
# Extract zip files
if f.endswith(".zip"):
with zipfile.ZipFile(original, "r") as z:
os.mkdir(output[:-3])
z.extractall(output[:-3])
# Extract rar files
elif f.endswith(".rar"):
with rarfile.RarFile(original, "r") as z:
os.mkdir(output[:-3])
z.extractall(output[:-3])
# Just copy nds files
elif f.endswith(".nds"):
os.mkdir(output[:-3])
shutil.copy(original, os.path.join(output[:-3], f))
except (zipfile.BadZipfile, rarfile.BadRarFile):
print "Bad archive: " + f
continue
except Exception as e:
print e
continue
print "Processed " + f
for d in directories:
process_files(d)
def main():
nds_homebrew_hive()
if not os.path.isdir(dumping_directory):
os.mkdir(dumping_directory)
process_files("out")
if __name__ == "__main__":
main()
|
Rivares/MyBlog
|
blog/urls.py
|
Python
|
apache-2.0
| 446
| 0.002242
|
__author__ = 'Conscience'
from django.conf.urls import url
from django.contrib import admin
from . import views
urlpatter
|
ns = [
url(r'^$', views.post_list, name='post_list'),
url(r'^post/(?P<pk>[0-9]+)/$', views.post_detail, name='post_detail'),
url(r'^post/$', views.post_list, name='post_list'),
url(r'^post/new/$', views.post_new, name='post_new'),
url(r'^post/(
|
?P<pk>[0-9]+)/edit/$', views.post_edit, name='post_edit'),
]
|
lcoandrade/DsgTools
|
gui/CustomWidgets/BasicInterfaceWidgets/buttonPropWidget.py
|
Python
|
gpl-2.0
| 27,350
| 0.00106
|
# -*- coding: utf-8 -*-
"""
/***************************************************************************
DsgTools
A QGIS plugin
Brazilian Army Cartographic Production Tools
-------------------
begin : 2017-08-24
git sha : $Format:%H$
copyright : (C) 2017 by Philipe Borba - Cartographic Engineer @ Brazilian Army
email : borba.philipe@eb.mil.br
***************************************************************************/
/***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************/
"""
import os
from functools import partial
from qgis.core import Qgis
from qgis.utils import iface
from qgis.PyQt import uic
from qgis.PyQt.QtGui import QIcon, QColor, QKeySequence
from qgis.PyQt.QtCore import Qt, QSize, pyqtSlot, pyqtSignal, QSettings
from qgis.PyQt.QtWidgets import (QWidget,
QSpinBox,
QLineEdit,
QCheckBox,
QComboBox,
QPushButton,
QHBoxLayout,
QMessageBox,
QDoubleSpinBox)
from DsgTools.core.Utils.utils import Utils, MessageRaiser
from DsgTools.core.GeometricTools.layerHandler import LayerHandler
from DsgTools.gui.ProductionTools.Toolboxes.CustomFeatureToolBox.customButtonSetup import CustomButtonSetup, CustomFeatureButton
FORM_CLASS, _ = uic.loadUiType(os.path.join(
os.path.dirname(__file__), 'buttonPropWidget.ui'))
utils = Utils()
class ButtonPropWidget(QWidget, FORM_CLASS):
# col enum
COL_COUNT = 5
ATTR_COL, VAL_COL, PK_COL, EDIT_COL, IGNORED_COL = range(COL_COUNT)
def __init__(self, parent=None, button=None):
"""
Class constructor.
:param parent: (QtWidgets.*) any widget that 'contains' this tool.
:param buttonProps: (CustomFeatureButton) button to be managed.
"""
super(ButtonPropWidget, self).__init__(parent)
self.setupUi(self)
self.button = button or CustomFeatureButton()
self.fillToolComboBox()
self.colorCheckBox.toggled.connect(self.mColorButton.setEnabled)
self.tooltipCheckBox.toggled.connect(self.toolTipLineEdit.setEnabled)
self.categoryCheckBox.toggled.connect(self.categoryLineEdit.setEnabled)
self.keywordCheckBox.toggled.connect(self.keywordLineEdit.setEnabled)
self.shortcutCheckBox.toggled.connect(self.shortcutWidget.setEnabled)
self.mMapLayerComboBox.layerChanged.connect(self.updateFieldTable)
self.attributeTableWidget.setHorizontalHeaderLabels([
self.tr("Attribute"), self.tr("Value"), self.tr("PK"),
self.tr("Editable"), self.tr("Ignored")
])
self.updateFieldTable()
def confirmAction(self, msg, title=None, showNo=True):
"""
Raises a message box that asks for user confirmation.
:param msg: (str) message requesting for confirmation to be shown.
:param showNo: (bool) whether No button should be exposed.
:return: (bool) whether action was confirmed.
"""
mb = QMessageBox()
title = title or self.tr("Confirm action")
if showNo:
return QMessageBox.question(
self, title, msg, QMessageBox.Yes | QMessageBox.No
) == QMessageBox.Yes
else:
return QMessageBox.question(
self, title, msg, QMessageBox.Ok) == QMessageBox.Ok
def fillToolComboBox(self):
"""
Sets a up available feature extraction tool to GUI.
"""
self.toolComboBox.clear()
# make sure those keys are EXACTLY the same as in "supportedTools"
# method, from CustomFeatureButton
tools = {
self.tr("QGIS default feature extraction tool"): QIcon(""),
self.tr("DSGTools: Free Hand Acquisition"): \
QIcon(':/plugins/DsgTools/icons/free_hand.png'),
self.tr("QGIS Circle extraction tool"): \
QIcon(':/plugins/DsgTools/icons/circle.png'),
self.tr("DSGTools: Right Degree Angle Digitizing"): \
QIcon(':/plugins/DsgTools/icons/home.png')
}
for idx, (tool, icon) in enumerate(tools.items()):
self.toolComboBox.insertItem(idx, tool)
if idx != 0:
self.toolComboBox.setItemIcon(idx, icon)
def setButtonName(self, name):
"""
Sets button name to GUI.
:param name: (str) name to be set to GUI.
"""
self.nameLineEdit.setText(name)
def buttonName(self):
"""
Reads button name from GUI.
:return: (str) button name read from GUI.
"""
return self.nameLineEdit.text().strip()
def setDigitizingTool(self, tool):
"""
Sets button's digitizing tool to GUI.
:param tool: (str) a supported digitizing tool to be set.
"""
tool = CustomFeatureButton().supportedTools()[tool]
self.toolComboBox.setCurrentText(tool)
def digitizingTool(self):
"""
Reads current digitizing tool.
:return: (str) current digitizing tool.
"""
tools = {v: k for
|
k, v in \
CustomFeatureButton().supportedTools().items()}
return tools[self.toolComboBox.currentText()]
def setUseColor(self, useColor):
"""
Sets whether button
|
will have a custom color set as read from GUI.
:param useColor: (bool) whether button should use a custom color
palette.
"""
self.colorCheckBox.setChecked(useColor)
def useColor(self):
"""
Reads whether button will have a custom color from GUI.
:return: (bool) whether button should use a custom color
palette.
"""
return self.colorCheckBox.isChecked()
def setColor(self, color):
"""
Sets custom color to the color widget.
:param color: (str/tuple) color to be set.
"""
if isinstance(color, str):
color = QColor(color)
else:
color = QColor(*color)
self.mColorButton.setColor(color)
def color(self):
"""
Reads custom color to be set to widget as read from GUI.
:return: (tuple) color to be used.
"""
return self.mColorButton.color().getRgb()
def setUseToolTip(self, useToolTip):
"""
Defines if button will have a tool tip assigned to it as read from GUI.
:param useToolTip: (bool) whether button will have a tool tip assigned.
"""
self.tooltipCheckBox.setChecked(useToolTip)
def useToolTip(self):
"""
Reads if the button will have a tool tip assigned to it from GUI.
:return: (bool) whether the button will have a tool tip assigned.
"""
return self.tooltipCheckBox.isChecked()
def setToolTip(self, tooltip):
"""
Sets a tool tip for the active button widget.
:param tooltip: (str) tool tip to be set.
"""
self.toolTipLineEdit.setText(tooltip)
def toolTip(self):
"""
Reads the tool tip for the button from GUI.
:param tooltip: (str) tool tip to be used.
"""
return self.toolTipLineEdit.text()
def setUseCategory(self, useCat):
"""
Sets button's category/group to GUI.
:param useCat: (b
|
tradebyte/paci
|
paci/helpers/__init__.py
|
Python
|
mit
| 37
| 0
|
"""
|
Init file for the paci h
|
elpers"""
|
pegasus-isi/pegasus
|
packages/pegasus-python/src/Pegasus/service/_serialize.py
|
Python
|
apache-2.0
| 1,770
| 0.00113
|
import logging
from json import JSONEncoder, dumps
from flask import current_app, make_response, request
__all__ = (
"serialize",
"jsonify",
)
log = logging.getLogger(__name__)
def serialize(rv):
log.debug("Serializing output")
if rv is None or (isinstance(rv, str) and not len(rv)):
log.info("No content")
rv = make_response("", 204)
elif (
isinstance(rv, current_app.response_class)
or callable(rv)
or isinstance(rv, str)
):
...
else:
log.info("Serializing")
rv = jsonify(rv)
if request.method == "POST":
make_response(rv, 201)
return rv
def jsonify(*args, **kwargs):
if args and kwargs:
raise TypeError("jsonify() behavior undefined when passed both args and kwargs")
elif len(args) == 1: # single args are passed directly to dumps()
data = args[0]
else:
data = args or kwargs
pretty_print = bool(
request.args.get(
|
"pretty-print", current_app.config["JSONIFY_PRETTYPRINT_REGULAR"]
)
)
indent = None
separators = (",", ":")
cls = current_app.json_encoder or JSONEncoder
if pretty_print is True and request.is_xhr is False:
indent = 2
separators = (", ", ": ")
if hasattr(request, "operation") and request.operation.produces:
mime_type = request.operation.produces[0]
elif "JSONIFY_MIMET
|
YPE" in current_app.config:
mime_type = current_app.config["JSONIFY_MIMETYPE"]
else:
mime_type = "application/json; charset=utf-8"
json_str = dumps(data, indent=indent, separators=separators, cls=cls) + "\n"
json_str.encode("utf-8")
return current_app.response_class(json_str, mimetype=mime_type)
|
zzzzrrr/openmelee
|
ai/ai.py
|
Python
|
gpl-3.0
| 2,647
| 0.005667
|
#
# Copyright (c) 2009 Mason Green & Tom Novelli
#
# This file is part of OpenMelee.
#
# OpenMelee is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# any later version.
#
# OpenMelee is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with OpenMelee. If not, see <http://www.gnu.org/licenses/>.
#
from math import atan2, pi
from steer import Steer
from engine import draw_line
class AI(object):
def __init__(self, ship, enemy, actors):
self.ship = ship
self.steer = Steer(ship, actors)
self.max_prediction_time = 0.25
self.planet = ship.melee.planet
self.enemy = enemy
# Elementary steering AI
def update(self):
st = self.steer.collision_threat(2.5)
self.range = (self.ship.body.position - self.enemy.body.position).length
range2 = (self.ship.body.position - self.planet.body.position).length
margin = self.planet.radius + self.ship.radius * 2.0
if st is None and range2 > margin:
self.chase()
return
if st:
self.avoid(st)
def chase(self):
st = self.steer.target(self.enemy, self.max_prediction_time)
#p1 = self.ship.body.position
#draw_line(p1.x, p1.y, st.x, st.y)
st = self.ship.body.get_local_point(st)
# Ship's heading is 180 off rigid body's heading => add pi
angle = atan2(st.x, st.y) + pi
if self.range < 50 and (angle < 0.05 or angle > 6.233):
self.ship.fire()
|
if angle > 0.05 and angle < 6.233:
if angle >= 0.05 and angle < pi:
self.ship.turn_right()
else:
self.
|
ship.turn_left()
else:
self.ship.body.angular_velocity = 0.0
if self.range > 5.0:
self.ship.thrust()
def avoid(self, st):
k = self.ship.body.get_local_point(st)
angle = atan2(k.x, k.y) + pi
t = self.ship.body.linear_velocity.cross(st)
if self.range < 50 and (angle < 0.05 or angle > 6.233):
self.ship.fire()
if t >= 0:
self.ship.turn_right()
else:
self.ship.turn_left()
self.ship.thrust()
|
thiagopena/PySIGNFe
|
pysignfe/nfe/manual_300/__init__.py
|
Python
|
lgpl-2.1
| 1,984
| 0.017713
|
# -*- coding: utf-8 -*-
ESQUEMA_ATUAL = u'pl_005f'
#
# Envelopes SOAP
#
from .soap_100 import SOAPEnvio as SOAPEnvio_110
from .soap_100 import SOAPRetorno as SOAPRetorno_110
#
# Emissão de NF-e
#
from .nfe_110 import NFe as NFe_110
from .nfe_110 import NFRef as NFRef_110
from .nfe_110 import Det as Det_110
from .nfe_110 import DI as DI_110
from .nfe_110 import Adi as Adi_110
from .nfe_110 import Med as Med_110
from .nfe_110 import Arma as Arma_110
from .nfe_110 import Reboque as Reboque_110
from .nfe_110 import Vol as Vol_110
from .nfe_110 import Lacres as Lacres_110
from .nfe_110 import Dup as Dup_110
from .nfe_110 import ObsCont as ObsCont_110
from .nfe_110 import ObsFisco as ObsFisco_110
from .nfe_110 import ProcRef as ProcRef_110
#
# Envio de lote de NF-e
#
from .envinfe_110 import EnviNFe as EnviNFe_110
from .envinfe_110 import RetEnviNFe as RetEnviNFe_110
#
# Consulta do recibo do lote de NF-e
#
from .consrecinfe_110 import ConsReciNFe as ConsReciNFe_110
from .consrecinfe_110 import RetConsReciNFe as RetConsReciNFe_110
from .consrecinfe_110 import ProtNFe as ProtNFe_110
from .consrecinfe_110 import ProcNFe as ProcNFe_110
#
# Cancelamento de NF-e
#
from .cancnf
|
e_107 import
|
CancNFe as CancNFe_107
from .cancnfe_107 import RetCancNFe as RetCancNFe_107
from .cancnfe_107 import ProcCancNFe as ProcCancNFe_107
#
# Inutilização de NF-e
#
from .inutnfe_107 import InutNFe as InutNFe_107
from .inutnfe_107 import RetInutNFe as RetInutNFe_107
from .inutnfe_107 import ProcInutNFe as ProcInutNFe_107
#
# Consulta a situação de NF-e
#
from .conssitnfe_107 import ConsSitNFe as ConsSitNFe_107
from .conssitnfe_107 import RetConsSitNFe as RetConsSitNFe_107
#
# Consulta a situação do serviço
#
from .consstatserv_107 import ConsStatServ as ConsStatServ_107
from .consstatserv_107 import RetConsStatServ as RetConsStatServ_107
#
# Consulta cadastro
#
from .conscad_101 import ConsCad as ConsCad_101
from .conscad_101 import RetConsCad as RetConsCad_101
|
Azure/azure-sdk-for-python
|
sdk/keyvault/azure-keyvault-certificates/samples/import_certificate_async.py
|
Python
|
mit
| 3,725
| 0.005101
|
# ------------------------------------
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT License.
# ------------------------------------
import asyncio
import os
from azure.identity.aio import DefaultAzureCredential
from azure.keyvault.certificates import CertificateContentType, CertificatePolicy, WellKnownIssuerNames
from azure.keyvault.certificates.aio import CertificateClient
# ----------------------------------------------------------------------------------------------------------
# Prerequisites:
# 1. An Azure Key Vault (https://docs.microsoft.com/en-us/azure/key-vault/quick-create-cli)
#
# 2. azure-keyvault-certificates and azure-identity packages (pip install these)
#
# 3. Set up your environment to use azure-identity's DefaultAzureCredential. To authenticate a service principal with
# environment variables, set AZURE_CLIENT_ID, AZURE_CLIENT_SECRET, and AZURE_TENANT_ID
# (See https:/
|
/github.com/Azure/azure-sdk-for-python/tree/main/sdk/keyvault/azure-keyvault-admin
|
istration#authenticate-the-client)
#
# 4. A PFX certificate on your machine. Set an environment variable, PFX_CERT_PATH, with the path to this certificate.
#
# 5. A PEM-formatted certificate on your machine. Set an environment variable, PEM_CERT_PATH, with the path to this
# certificate.
#
# ----------------------------------------------------------------------------------------------------------
# Sample - demonstrates importing a PFX and PEM-formatted certificate into Azure Key Vault
#
# 1. Import an existing PFX certificate (import_certificate)
#
# 2. Import an existing PEM-formatted certificate (import_certificate)
#
# ----------------------------------------------------------------------------------------------------------
async def run_sample():
# Instantiate a certificate client that will be used to call the service.
# Here we use the DefaultAzureCredential, but any azure-identity credential can be used.
VAULT_URL = os.environ["VAULT_URL"]
credential = DefaultAzureCredential()
client = CertificateClient(vault_url=VAULT_URL, credential=credential)
# Let's import a PFX certificate first.
# Assuming you already have a PFX containing your key pair, you can import it into Key Vault.
# You can do this without setting a policy, but the policy is needed if you want the private key to be exportable
# or to configure actions when a certificate is close to expiration.
pfx_cert_name = "pfxCert"
with open(os.environ["PFX_CERT_PATH"], "rb") as f:
pfx_cert_bytes = f.read()
imported_pfx_cert = await client.import_certificate(
certificate_name=pfx_cert_name, certificate_bytes=pfx_cert_bytes
)
print("PFX certificate '{}' imported successfully.".format(imported_pfx_cert.name))
# Now let's import a PEM-formatted certificate.
# To import a PEM-formatted certificate, you must provide a CertificatePolicy that sets the content_type to
# CertificateContentType.pem or the certificate will fail to import (the default content type is PFX).
pem_cert_name = "pemCert"
with open(os.environ["PEM_CERT_PATH"], "rb") as f:
pem_cert_bytes = f.read()
pem_cert_policy = CertificatePolicy(issuer_name=WellKnownIssuerNames.self, content_type=CertificateContentType.pem)
imported_pem_cert = await client.import_certificate(
certificate_name=pem_cert_name, certificate_bytes=pem_cert_bytes, policy=pem_cert_policy
)
print("PEM-formatted certificate '{}' imported successfully.".format(imported_pem_cert.name))
await credential.close()
await client.close()
if __name__ == "__main__":
loop = asyncio.get_event_loop()
loop.run_until_complete(run_sample())
loop.close()
|
jos4uke/getSeqFlankBlatHit
|
lib/python2.7/site-packages/pybedtools/test/tfuncs.py
|
Python
|
gpl-2.0
| 462
| 0.004329
|
import pybedtools
import os
testdir = os.path.dirname(__file__)
test_tempdir = os.path.join(os.path.abspath(testdir), 'tmp')
unwriteable = os.path.join(os.path.abspath(testdir), 'unwriteable
|
')
def setup():
if not os.path.exists(test_tempdir):
os.system('mkdir -p %s' % test_tempdir)
pybedtools.set_tempdir(test_tempdir)
def teardown():
|
if os.path.exists(test_tempdir):
os.system('rm -r %s' % test_tempdir)
pybedtools.cleanup()
|
djorda9/Simulated-Conversations
|
vagrant/simcon/templatetags/generatelink_extras.py
|
Python
|
mit
| 376
| 0.007979
|
from djang
|
o import template
register = template.Library()
# Used to create the generated link url for the templates
@register.filter
def get_link_filter(obj, arg):
base = obj.get_link(arg)
return base
# Used to create the base url of the generated link for the templates
@register.filter
def get_base_link_filter(obj):
base = obj.get_base_lin
|
k()
return base
|
rcbops/glance-buildpackage
|
glance/registry/db/migration.py
|
Python
|
apache-2.0
| 3,992
| 0
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2011 OpenStack LLC.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import logging
import os
from migrate.versioning import api as versioning_api
# See LP bug #719834. sqlalchemy-migrate changed location of
# exceptions.py after 0.6.0.
try:
from migrate.versioning import exceptions as versioning_exceptions
except ImportError:
from migrate import exceptions as versioning_exceptions
from glance.common import exception
logger = logging.getLogger('glance.registry.db.migration')
def db_version(conf):
"""
Return the database's current migration number
:param conf: conf dict
:retval version number
"""
repo_path = get_migrate_repo_path(
|
)
sql_connection = conf.sql_connection
try:
return versioning_api.db_version(sql_connection, repo_path)
except versioning_exceptions.DatabaseNotControlledError, e:
msg = (_("database '%(sql_connection)s' is not under "
"migration control") % locals())
raise
|
exception.DatabaseMigrationError(msg)
def upgrade(conf, version=None):
"""
Upgrade the database's current migration level
:param conf: conf dict
:param version: version to upgrade (defaults to latest)
:retval version number
"""
db_version(conf) # Ensure db is under migration control
repo_path = get_migrate_repo_path()
sql_connection = conf.sql_connection
version_str = version or 'latest'
logger.info(_("Upgrading %(sql_connection)s to version %(version_str)s") %
locals())
return versioning_api.upgrade(sql_connection, repo_path, version)
def downgrade(conf, version):
"""
Downgrade the database's current migration level
:param conf: conf dict
:param version: version to downgrade to
:retval version number
"""
db_version(conf) # Ensure db is under migration control
repo_path = get_migrate_repo_path()
sql_connection = conf.sql_connection
logger.info(_("Downgrading %(sql_connection)s to version %(version)s") %
locals())
return versioning_api.downgrade(sql_connection, repo_path, version)
def version_control(conf):
"""
Place a database under migration control
:param conf: conf dict
"""
sql_connection = conf.sql_connection
try:
_version_control(conf)
except versioning_exceptions.DatabaseAlreadyControlledError, e:
msg = (_("database '%(sql_connection)s' is already under migration "
"control") % locals())
raise exception.DatabaseMigrationError(msg)
def _version_control(conf):
"""
Place a database under migration control
:param conf: conf dict
"""
repo_path = get_migrate_repo_path()
sql_connection = conf.sql_connection
return versioning_api.version_control(sql_connection, repo_path)
def db_sync(conf, version=None):
"""
Place a database under migration control and perform an upgrade
:param conf: conf dict
:retval version number
"""
try:
_version_control(conf)
except versioning_exceptions.DatabaseAlreadyControlledError, e:
pass
upgrade(conf, version=version)
def get_migrate_repo_path():
"""Get the path for the migrate repository."""
path = os.path.join(os.path.abspath(os.path.dirname(__file__)),
'migrate_repo')
assert os.path.exists(path)
return path
|
LarsBV/kate_plugin_template
|
my_plugin.py
|
Python
|
bsd-2-clause
| 1,396
| 0.012894
|
# -*- coding: utf-8 -*-
# Author: <Your name>
# License see LICENSE
from PyKDE4.kdecore import i18n
from PyKDE4.kdeui import KAction, KIcon
from PyQt4.QtCore import QObject
from PyQt4.QtGui import QMenu
from libkatepate.errors import showOk, showError
import kate
class MyPlugin(QObject):
def __init__(self):
QObject.__init__(self)
self.window = kate.mainInterfaceWindow().window()
showOk('MyPlugin inits!')
# self.act = KAction(KIcon("reload"), i18n("Auto Reload"), self)
# self.act.setObjectName("test")
# self.window.actionCollection().addAction(self.act.objectName(), self.act)
# self.window.findChild(QMenu, 'view').addAction(self.act)
# if not self.act.objectName() in kate.configuration:
# kate.configuration[self.act.objectName()] = "alt+r"
# self.act.setShortcut(kate.configurati
|
on[self.act.objectName()])
# self.act.setCheckable(True)
# self.act.setChecked(False)
# self.act
|
.changed.connect(self.onActionChange)
# self.act.toggled.connect(self.toggle)
# kate.mainInterfaceWindow().viewChanged.connect(self.onViewChanged)
def onActionChange(self):
kate.configuration[self.sender().objectName()] = self.sender().shortcut().toString()
kate.configuration.save()
print(self.sender().objectName() + ': Save ' + kate.configuration[self.sender().objectName()])
|
saltstack/salt
|
salt/utils/preseed.py
|
Python
|
apache-2.0
| 2,707
| 0.002586
|
"""
Utilities for managing Debian preseed
.. versionadded:: 2015.8.0
"""
import shlex
import salt.utils.files
import salt.utils.stringutils
import salt.utils.yaml
def mksls(src, dst=None):
"""
Convert a preseed file to an SLS file
"""
ps_opts = {}
with salt.utils.files.f
|
open(src, "r") as fh_:
for line in fh_:
line = salt.utils.stringutils.to_u
|
nicode(line)
if line.startswith("#"):
continue
if not line.strip():
continue
comps = shlex.split(line)
if comps[0] not in ps_opts.keys():
ps_opts[comps[0]] = {}
cmds = comps[1].split("/")
pointer = ps_opts[comps[0]]
for cmd in cmds:
pointer = pointer.setdefault(cmd, {})
pointer["type"] = comps[2]
if len(comps) > 3:
pointer["argument"] = comps[3]
sls = {}
# Set language
# ( This looks like it maps to something else )
sls[ps_opts["d-i"]["languagechooser"]["language-name-fb"]["argument"]] = {
"locale": ["system"]
}
# Set keyboard
# ( This looks like it maps to something else )
sls[ps_opts["d-i"]["kbd-chooser"]["method"]["argument"]] = {"keyboard": ["system"]}
# Set timezone
timezone = ps_opts["d-i"]["time"]["zone"]["argument"]
sls[timezone] = {"timezone": ["system"]}
if ps_opts["d-i"]["tzconfig"]["gmt"]["argument"] == "true":
sls[timezone]["timezone"].append("utc")
# Set network
if "netcfg" in ps_opts["d-i"].keys():
iface = ps_opts["d-i"]["netcfg"]["choose_interface"]["argument"]
sls[iface] = {}
sls[iface]["enabled"] = True
if ps_opts["d-i"]["netcfg"]["confirm_static"] == "true":
sls[iface]["proto"] = "static"
elif ps_opts["d-i"]["netcfg"]["disable_dhcp"] == "false":
sls[iface]["proto"] = "dhcp"
sls[iface]["netmask"] = ps_opts["d-i"]["netcfg"]["get_netmask"]["argument"]
sls[iface]["domain"] = ps_opts["d-i"]["netcfg"]["get_domain"]["argument"]
sls[iface]["gateway"] = ps_opts["d-i"]["netcfg"]["get_gateway"]["argument"]
sls[iface]["hostname"] = ps_opts["d-i"]["netcfg"]["get_hostname"]["argument"]
sls[iface]["ipaddress"] = ps_opts["d-i"]["netcfg"]["get_ipaddress"]["argument"]
sls[iface]["nameservers"] = ps_opts["d-i"]["netcfg"]["get_nameservers"][
"argument"
]
if dst is not None:
with salt.utils.files.fopen(dst, "w") as fh_:
salt.utils.yaml.safe_dump(sls, fh_, default_flow_style=False)
else:
return salt.utils.yaml.safe_dump(sls, default_flow_style=False)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.