repo_name
stringlengths 5
92
| path
stringlengths 4
221
| copies
stringclasses 19
values | size
stringlengths 4
6
| content
stringlengths 766
896k
| license
stringclasses 15
values | hash
int64 -9,223,277,421,539,062,000
9,223,102,107B
| line_mean
float64 6.51
99.9
| line_max
int64 32
997
| alpha_frac
float64 0.25
0.96
| autogenerated
bool 1
class | ratio
float64 1.5
13.6
| config_test
bool 2
classes | has_no_keywords
bool 2
classes | few_assignments
bool 1
class |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
anjos/popster
|
popster/qnap.py
|
1
|
8677
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Utilities to use the QNAP Container Station API
See documentation here: http://qnap-dev.github.io/container-station-api/index.html
"""
import os
import sys
import pickle
import requests
import getpass
import warnings
import contextlib
import logging
logger = logging.getLogger(__name__)
import pkg_resources
SESSION_FILE = os.path.expanduser("~/.qnap-auth.pickle")
@contextlib.contextmanager
def no_ssl_warnings(verify):
if not verify:
warnings.filterwarnings("ignore", "Unverified HTTPS request")
yield
if not verify:
warnings.resetwarnings()
def api(session, server, url, verb="get", data=None, json=None, verify=False):
"""Calls the container station API with a given url and data dictionary
Parameters:
session (requests.Session): A previously opened session with the
authentication cookies to use
server (str): The server to reach
url (str): The URL to call on the container station API, relative to the
the address ``/containerstation/api/v1", which is always
prepended.
verb (str, Optional): One of the HTTP verbs to query the URL with. If not
specified, defaults to ``get``. Any verb available in
:py:class:`requests.Session` is acceptable.
data (dict, Optional): A dictionary containing parameters to pass to the
API
verify (bool, Optional): If should use ``verify=True`` for requests calls
Returns:
int: The returned status code
requests.Result: An object that contains the reply from the HTTP API call
"""
url = server + "/containerstation/api/v1" + url
logger.debug(f"{verb.upper()} {url}")
with no_ssl_warnings(verify):
return getattr(session, verb)(url, data=data, json=json, verify=verify)
def login(server, username, password, verify=False):
"""Logs-in the server, if a session file is not available yet.
Parameters:
server (str): The server to reach
username (str): The user identifier to use for login
password (str): The user password for login
verify (bool, Optional): If should use ``verify=True`` for requests calls
Returns:
requests.Session: A restored or new session after successfuly
authenticating to the server
"""
if os.path.exists(SESSION_FILE):
logger.debug(f"Session file ({SESSION_FILE}) exists - trying to use it")
with open(SESSION_FILE, "rb") as f:
session = pickle.load(f)
result = api(session, server, "/login_refresh", verify=verify)
if "error" in result.json():
logout(server, verify=verify)
if not os.path.exists(SESSION_FILE):
logger.debug(f"Session file ({SESSION_FILE}) does not exist - logging-in")
session = requests.Session()
data = dict(username=username, password=password)
result = api(
session, server, "/login", verb="post", data=data, verify=verify
)
if result.status_code != 200:
raise RuntimeError(
"Login request failed with status code %d" % result.status_code
)
response = result.json()
if response.get("username") != username:
raise RuntimeError(
"Login request for user %s failed (%s is "
"logged in)" % (username, response.get("username"))
)
with open(SESSION_FILE, "wb") as f:
pickle.dump(session, f)
return session
def logout(server, verify=False):
"""Logs the user out
Parameters:
server (str): The server to reach
verify (bool, Optional): If should use ``verify=True`` for requests calls
"""
if not os.path.exists(SESSION_FILE):
logger.error(f"No session file exists at {SESSION_FILE} - not logging out")
return
logger.debug("Logging out...")
with open(SESSION_FILE, "rb") as f:
session = pickle.load(f)
result = api(session, server, "/logout", verb="put", verify=verify)
response = result.json()
if os.path.exists(SESSION_FILE):
logger.debug(f"Removing {SESSION_FILE}...")
os.unlink(SESSION_FILE)
session.close() # close all connections
@contextlib.contextmanager
def session(server, username, password, verify=False):
"""Context manager that opens and closes a connection to the NAS"""
yield login(server, username, password, verify=verify)
logout(server, verify=verify)
def system(server, session=None, verify=False):
"""Checks system information
Parameters:
server (str): The server to reach
session (requests.Session): A previously opened session you'd like to close
verify (bool, Optional): If should use ``verify=True`` for requests calls
Returns:
dict: A valid JSON object, decoded into Python
"""
return api(session, server, "/system", verify=verify).json()
def get_containers(session, server, verify=False):
"""Gets all information on available containers
Parameters:
session (requests.Session): A previously opened session you'd like to close
server (str): The server to reach
verify (bool, Optional): If should use ``verify=True`` for requests calls
Returns:
list of dict: Containing information about all running containers
"""
return api(session, server, "/container", verify=verify).json()
def inspect_container(session, server, id_, verify=False):
"""Gets all information on the container with the given identifier
Parameters:
session (requests.Session): A previously opened session you'd like to close
server (str): The server to reach
id_ (str): The identify of the container to inspect
verify (bool, Optional): If should use ``verify=True`` for requests calls
Returns:
list: A valid JSON object, decoded into Python
"""
return api(
session, server, "/container/docker/%s/inspect" % id_, verify=verify
).json()
def stop_container(session, server, id_, verify=False):
"""Stops the container with the given identifier
Parameters:
session (requests.Session): A previously opened session you'd like to close
server (str): The server to reach
id_ (str): The identify of the container to stop
verify (bool, Optional): If should use ``verify=True`` for requests calls
Returns:
list: A valid JSON object, decoded into Python
"""
return api(
session,
server,
"/container/docker/%s/stop" % id_,
verb="put",
verify=verify,
).json()
def remove_container(session, server, id_, verify=False):
"""Removes the container with the given identifier
Parameters:
session (requests.Session): A previously opened session you'd like to close
server (str): The server to reach
id_ (str): The identify of the container to be removed
verify (bool, Optional): If should use ``verify=True`` for requests calls
Returns:
list: A valid JSON object, decoded into Python
"""
return api(
session,
server,
"/container/docker/%s" % id_,
verb="delete",
verify=verify,
).json()
def create_container(
session,
server,
name,
options,
image="anjos/popster",
tag="v%s" % pkg_resources.require("popster")[0].version,
verify=False,
):
"""Creates a container with an existing image
Parameters:
session (requests.Session): A previously opened session you'd like to close
server (str): The server to reach
name (str): The name of the container to update
image (str): The name of the image to use for the update (e.g.:
'anjos/popster')
tag (str): Tag to be used for the above image (e.g.: 'v1.2.3')
options (dict): A dictionary of options that will be passed to the API
verify (bool, Optional): If should use ``verify=True`` for requests calls
"""
info = dict(type="docker", name=name, image=image, tag=tag,)
# prepares new container information
info.update(options)
response = api(
session, server, "/container", verb="post", json=info, verify=verify
)
return response.json()
def retrieve_logs(session, server, id_, tail=1000, verify=False):
"""Retrieves the logs from container
Parameters:
session (requests.Session): A previously opened session you'd like to close
server (str): The server to reach
id_ (str): The identifier of the container to retrieve logs from
verify (bool, Optional): If should use ``verify=True`` for requests calls
"""
return api(
session,
server,
"/container/docker/%s/logs?tail=%d" % (id_, tail),
verify=verify,
).json()
|
gpl-3.0
| -3,796,160,623,227,330,000
| 22.837912
| 83
| 0.659329
| false
| 4.143744
| false
| false
| false
|
ionelmc/virtualenv
|
virtualenv/_compat.py
|
1
|
1342
|
from __future__ import absolute_import, division, print_function
try:
FileNotFoundError = FileNotFoundError
except NameError: # pragma: no cover
FileNotFoundError = OSError
# Python 2.6 does not have check_output, so we'll backport this just for
# Python 2.6
import subprocess
try:
from subprocess import check_output
except ImportError: # pragma: no cover
def check_output(*popenargs, **kwargs):
if "stdout" in kwargs:
raise ValueError(
"stdout argument not allowed, it will be overridden."
)
if "input" in kwargs:
if "stdin" in kwargs:
raise ValueError(
"stdin and input arguments may not both be used."
)
inputdata = kwargs["input"]
del kwargs["input"]
kwargs["stdin"] = subprocess.PIPE
else:
inputdata = None
process = subprocess.Popen(
*popenargs,
stdout=subprocess.PIPE,
**kwargs
)
try:
output, unused_err = process.communicate(inputdata)
except:
process.kill()
process.wait()
raise
retcode = process.poll()
if retcode:
raise subprocess.CalledProcessError(retcode, output)
return output
|
mit
| 4,296,613,407,612,718,600
| 29.5
| 72
| 0.5693
| false
| 4.89781
| false
| false
| false
|
henryscala/plain_sequence_chart
|
src/canvas.py
|
1
|
5513
|
import array
class Canvas:
BLANK=' '
HLINE='-'
VLINE='|'
HLARROW='<'
HRARROW='>'
VUARROW='^'
VDARROW='v'
INTERSECT='+'
XINTERSECT='*'
WAVEVLLINE='('
WAVEVRLINE=')'
WAVEHLINE='~'
def __init__(self,col,row):
self.row=row
self.column=col
self.canvas=array.array('b',[ord(self.BLANK)]*(row*col))
def __draw(self,col,row,char):
self.canvas[self.column*row+col]=ord(char)
def reset(self):
for i in range(self.column*self.row):
self.canvas[i]=self.BLANK
def output(self):
for i in range(self.row):
lineStart=self.column*i
line=self.canvas[lineStart:lineStart+self.column].tostring().decode('utf-8')
line = line.rstrip()
if len(line) > 0:
print (line)
def point(self,col,row,char):
self.__draw(col,row,char)
def hline(self,col,row,length,direction=None,arrow=None,hChar=HLINE):
start=col
stop=col+length
if direction:
start=col-length+1
stop=col+1
for i in range(start,stop):
self.point(i,row,hChar)
if arrow:
if direction:
self.point(start,row,self.HLARROW)
else:
self.point(stop-1,row,self.HRARROW)
def vline(self,col,row,length,direction=None,arrow=None,vChar=VLINE):
start=row
stop=row+length
if direction:
start=row-length+1
stop=row+1
for i in range(start,stop):
self.point(col,i,vChar)
if arrow:
if direction:
self.point(col,start,Canvas.VUARROW)
else:
self.point(col,stop-1,Canvas.VDARROW)
def rect(self,col,row,width,height):
self.vline(col,row,height)
self.vline(col+width-1,row,height)
self.hline(col+1,row+height-1,width-2)
self.hline(col+1,row,width-2)
def waveRect(self,col,row,width,height):
self.vline(col,row,height,vChar=self.WAVEVLLINE)
self.vline(col+width-1,row,height,vChar=self.WAVEVRLINE)
self.hline(col+1,row+height-1,width-2,hChar=self.WAVEHLINE)
self.hline(col+1,row,width-2,hChar=self.WAVEHLINE)
def text(self,col,row,astr,center=None):
left=col
if center:
left=col-len(astr)//2
for i in range(len(astr)):
self.point(left+i,row,astr[i])
def __textRect(self,str,width=None):
strlen=len(str)
if not width :
cols=strlen
rows=1
elif strlen<=width:
cols=width
rows=1
else:
cols=width
rows=strlen//width
remain=strlen % width
if remain:
rows +=1
return (cols,rows)
def rectText(self,col,row,astr,width=None,center=None):
cols,rows=self.__textRect(astr,width)
for i in range(rows):
line=astr[cols*i:cols*i+cols]
if center:
self.text(col,row+1+i,line,center)
left=col-cols//2-1
top=row
width=cols+2
height=rows+2
self.rect(left,top,width,height)
else:
self.text(col+1,row+1+i,line,center)
left=col
top=row
width=cols+2
height=rows+2
self.rect(left,top,width,height)
return (width,height)
def waveRectText(self,col,row,astr,width=None,center=None):
cols,rows=self.__textRect(astr,width)
for i in range(rows):
line=astr[cols*i:cols*i+cols]
if center:
self.text(col,row+1+i,line,center)
left=col-cols//2-1
top=row
width=cols+2
height=rows+2
self.waveRect(left,top,width,height)
else:
self.text(col+1,row+1+i,line,center)
left=col
top=row
width=cols+2
height=rows+2
self.waveRect(left,top,width,height)
return (width,height)
def ordAt(self,col,row):
return self.canvas[self.column*row+col]
def isRowBlank(self,row):
for c in range(self.column):
if self.ordAt(c,row)!=ord(self.BLANK):
return False
return True
def isColumnBlank(self,column):
for r in range(self.row):
if self.ordAt(column,r)!=ord(self.BLANK):
return False
return True
def shiftLeft(self,fromColumn, numOfColumn=1):
for r in range(self.row):
for c in range(fromColumn,self.column):
self.point(c - numOfColumn, r, chr(self.ordAt(c,r)))
def shiftTop(self,fromRow, numOfRow=1):
for c in range(self.column):
for r in range(fromRow,self.row):
self.point(c, r-numOfRow, chr(self.ordAt(c,r)))
def trimLeftTop(self):
while self.isColumnBlank(0):
self.shiftLeft(1)
while self.isRowBlank(0):
self.shiftTop(1)
|
gpl-3.0
| 2,124,022,629,261,501,000
| 29.627778
| 92
| 0.498821
| false
| 3.591531
| false
| false
| false
|
fragaria/BorIS
|
boris/services/migrations/0019_workforclient.py
|
1
|
1264
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('services', '0018_delete_pregnancytest'),
]
operations = [
migrations.CreateModel(
name='WorkForClient',
fields=[
('service_ptr', models.OneToOneField(parent_link=True, auto_created=True, primary_key=True, serialize=False, to='services.Service')),
('contact_institution', models.BooleanField(default=False, verbose_name='a) kontakt s institucemi')),
('message', models.BooleanField(default=False, verbose_name='b) zpr\xe1va, doporu\u010den\xed')),
('search_information', models.BooleanField(default=False, verbose_name='c) vyhled\xe1v\xe1n\xed a zji\u0161\u0165ov\xe1n\xed informac\xed pro klienta')),
('case_conference', models.BooleanField(default=False, verbose_name='d) p\u0159\xedpadov\xe1 konference')),
],
options={
'verbose_name': 'Pr\xe1ce ve prosp\u011bch klienta',
'verbose_name_plural': 'Pr\xe1ce ve prosp\u011bch klienta',
},
bases=('services.service',),
),
]
|
mit
| -2,708,543,291,182,270,000
| 42.586207
| 169
| 0.612342
| false
| 3.653179
| false
| false
| false
|
morgenfree/column-store-tbat-exp-python
|
prepare/updateData.py
|
1
|
5865
|
'''
updateBun bun to update
bat_file
update_file
'''
__author__ = 'fyu'
from config import *
BUFFERING_SIZE=1048576
#BUFFERING_SIZE=10
def updateBATFast(bat_file_name,update_file_name):
bat_file=open(bat_file_name,'r+')
update_file=open(update_file_name,'r')
for updateLine in update_file:
(updateLineNumStr,updateValue)=updateLine.split(',')
updateLineNum=long(updateLineNumStr)
bat_file.seek((updateLineNum-1)*len(updateLine))
bat_file.write(updateLine)
bat_file.seek(0)
bat_file.close()
update_file.close()
def updateBATFast2(bat_file_name,update_file_name):
with open(bat_file_name, 'r+', buffering=BUFFERING_SIZE) as bat_file:
with open(update_file_name, 'r', buffering=BUFFERING_SIZE) as update_file:
for updateLine in update_file:
(updateLineNumStr,updateValue)=updateLine.split(',')
updateLineNum=long(updateLineNumStr)
bat_file.seek((updateLineNum-1)*len(updateLine))
bat_file.write(updateLine)
bat_file.seek(0)
# bat_file.close()
# update_file.close()
def updateBAT1(bat_file_name,update_file_name):
bat_file=open(bat_file_name,'r+', buffering=BUFFERING_SIZE)
update_file=open(update_file_name,'r', buffering=BUFFERING_SIZE)
for updateLine in update_file:
(updateLineNumStr,updateValue)=updateLine.split(',')
updateLineNum=long(updateLineNumStr)
currentLineNum=1
while currentLineNum < updateLineNum: # simulating seeking next line
bat_file.seek(len(updateLine),1)
currentLineNum+=1
# bat_file.seek((currentLineNum-1)*len(updateLine))
# bat_file.seek((updateLineNum-1)*len(updateLine))
# print '%d\n' % currentLineNum
bat_file.write(updateLine)
bat_file.seek(0)
bat_file.close()
update_file.close()
def updateBAT2(bat_file_name,update_file_name):
bat_file=open(bat_file_name,'r+', buffering=BUFFERING_SIZE)
update_file=open(update_file_name,'r', buffering=BUFFERING_SIZE)
for updateLine in update_file:
(updateLineNumStr,updateValue)=updateLine.split(',')
updateLineNum=long(updateLineNumStr)
currentLineNum=1
while currentLineNum < updateLineNum: # simulating seeking next line
#print '%d\n' % bat_file.tell()
bat_file.seek(1,1)
currentLineNum+=1
# bat_file.seek((currentLineNum-1)*len(updateLine))
# bat_file.seek((updateLineNum-1)*len(updateLine))
print '%d\n' % currentLineNum
bat_file.write(updateLine)
bat_file.seek(0)
bat_file.close()
update_file.close()
def updateBAT3(bat_file_name,update_file_name):
with open(bat_file_name,'r+', buffering=BUFFERING_SIZE) as bat_file:
with open(update_file_name,'r', buffering=BUFFERING_SIZE) as update_file:
for updateLine in update_file:
(updateLineNumStr,updateValue)=updateLine.split(',')
updateLineNum=long(updateLineNumStr)
currentLineNum=1
updateLineLength=len(updateLine)
while currentLineNum < updateLineNum: # simulating seeking next line
bat_file.seek(updateLineLength, 1)
currentLineNum += 1
#print '%d\n' % currentLineNum
bat_file.write(updateLine)
bat_file.seek(0)
def updateTBAT(tbat_file_name,update_file_name):
updateTimeStamp=time.time()
tbat_file=open(tbat_file_name,'a', buffering=BUFFERING_SIZE)
update_file=open(update_file_name,'r', buffering=BUFFERING_SIZE)
for updateLine in update_file:
updateLine='%10g,%s' %(updateTimeStamp,updateLine)
# print updateLine
tbat_file.write(updateLine)
tbat_file.close()
update_file.close()
'''
def updateTBAT(tbat_file_name,update_file_name):
updateTimeStamp=time.time()
tbat_file=open(tbat_file_name,'a')
update_file=open(update_file_name,'r')
for updateLine in update_file:
updateLine='%10g,%s' %(updateTimeStamp,updateLine)
# print updateLine
tbat_file.write(updateLine)
tbat_file.close()
update_file.close()
'''
if __name__=='__main__':
bat_time_start=time.time()
updateBAT(bat_file_name,update_file_name)
bat_time=time.time()-bat_time_start
print 'bat update time:'+str(bat_time)
tbat_time_start=time.time()
updateTBAT(tbat_file_name,update_file_name)
tbat_time=time.time()-tbat_time_start
print 'tbat update time:'+str(tbat_time)
overhead=(bat_time)/tbat_time*100
print 'overhead=%g%%' % (overhead)
'''
def updateBAT(bat_file_name,update_file_name):
bat_file=open(bat_file_name,'r+')
update_file=open(update_file_name,'r')
for updateLine in update_file:
(updateLineNumStr,updateValue)=updateLine.split(',')
#print updateLineNumStr+','+updateValue
updateLineNum=long(updateLineNumStr)
bat_file.seek((updateLineNum-1)*len(updateLine))
bat_file.write(updateLine)
bat_file.close()
update_file.close()
'''
'''
def updateBAT(bat_file_name,update_file_name):
bat_file=open(bat_file_name,'r+')
update_file=open(update_file_name,'r')
for updateLine in update_file:
updateLineNum=long(updateLine.split(',')[0])
seekLine=0
bat_file.seek(0)
for currentLine in bat_file: # simulate seeking the line to change
currentLineNum=long(currentLine.split(',')[0])
if currentLineNum == updateLineNum:
#print 'change line: %d' % (currentLineNum)
bat_file.seek(seekLine*len(currentLine))
bat_file.write(updateLine)
break
else:
seekLine+=1
bat_file.close()
update_file.close()
'''
|
gpl-2.0
| -2,389,603,193,760,424,000
| 35.434783
| 84
| 0.637852
| false
| 3.300506
| false
| false
| false
|
Samsung/ADBI
|
idk/cachebuilder/sections.py
|
1
|
1516
|
from collections import namedtuple
import logging
from elftools.elf.constants import SH_FLAGS
Section = namedtuple('Section', 'id name type addr offset size flags')
class Sections(object):
def __init__(self, debuginfo):
self.debuginfo = debuginfo
def iter_sections():
# omit first section - it is always null section
for idx in range(1, self.debuginfo.elf.num_sections()):
section = self.debuginfo.elf.get_section(idx)
h = section.header
yield Section(idx, section.name, h['sh_type'], h['sh_addr'], h['sh_offset'], h['sh_size'], h['sh_flags'])
self.sections = list(iter_sections())
def addr2fo(self, addr):
'''Convert given virtual address to file offset.'''
for section in [s for s in self.sections if s.flags & SH_FLAGS.SHF_ALLOC]:
lo = section.addr
hi = lo + section.size
if lo <= addr < hi:
offset = addr - lo
return section.offset + offset
raise ValueError('Address %x is invalid.' % addr)
def store(self, conn):
logging.debug('Storing ELF sections')
query = 'insert into sections(id, name, type, addr, offset, size, flags) values (?, ?, ?, ?, ?, ?, ?)'
items = ((section.id, section.name, section.type, section.addr, section.offset, section.size, section.flags)
for section in self.sections )
conn.executemany(query, items)
conn.commit()
|
apache-2.0
| 7,481,293,933,677,235,000
| 41.138889
| 121
| 0.591689
| false
| 4.075269
| false
| false
| false
|
yinwenpeng/rescale
|
wenpeng.py
|
1
|
1057
|
#!/usr/bin/env python
import time
import threading
import sys
def Traverse(rootDir):
fileNo=0
for lists in os.listdir(rootDir):
path = os.path.join(rootDir, lists)
#print path
if os.path.isdir(path):
Traverse(path)
elif os.path.isfile(path):
file = open(path)
#ReadFile(file)
ReadFile(file, -1)
fileNo+=1
'''
if fileNo > 0:
return
'''
def calc_froebius_norm(m):
time.sleep(1)
return m
def calc_norm(m, i, norms):
print >> sys.stderr, 'Starting thread', i
norm = calc_froebius_norm(m)
norms[i] = norm
def main():
matrixes = [1, 2, 3, 4]
norms = [0] * len(matrixes)
threads = []
for i, m in enumerate(matrixes):
t = threading.Thread(target=calc_norm, args=(m, i, norms))
t.start()
threads.append(t)
for thread in threads:
t.join()
print >> sys.stderr, norms
if __name__ == '__main__':
main()
|
gpl-3.0
| -4,074,478,246,556,086,300
| 17.875
| 66
| 0.512772
| false
| 3.387821
| false
| false
| false
|
OpenMOOC/moocng
|
moocng/media_contents/__init__.py
|
1
|
2157
|
import json
from django.conf import settings
from . import handlers
def media_content_get_iframe_code(handler, content_id, **kwargs):
handler = handlers.get_handler(handler)
return handler.get_iframe_code(content_id, **kwargs)
def media_content_get_thumbnail_url(handler, content_id, **kwargs):
handler = handlers.get_handler(handler)
return handler.get_thumbnail_url(content_id, **kwargs)
def media_content_get_iframe_template(handler, content_id, **kwargs):
handler = handlers.get_handler(handler)
return handler.get_iframe_template(content_id, **kwargs)
def media_content_get_js_code(handler, **kwargs):
handler = handlers.get_handler(handler)
return handler.get_javascript_code(**kwargs)
def media_content_get_last_frame(handler, content_id, tmpdir, **kwargs):
handler = handlers.get_handler(handler)
return handler.get_last_frame(content_id, tmpdir, **kwargs)
def media_content_extract_id(handler, url, **kwargs):
handler = handlers.get_handler(handler)
return handler.extract_id(url, **kwargs)
def media_contents_javascripts(**kwargs):
course = kwargs.get('course', None)
handlers_ids = []
if course:
if course.promotion_media_content_type:
handlers_ids.append(course.promotion_media_content_type)
for unit in course.unit_set.all():
for kq in unit.knowledgequantum_set.all():
handlers_ids.append(kq.media_content_type)
for question in kq.question_set.all():
handlers_ids.append(question.solution_media_content_type)
handlers_ids = list(set(handlers_ids))
html = "<script>MEDIA_CONTENT_TYPES = %s;</script>" % json.dumps(dict([(item['id'], item) for item in settings.MEDIA_CONTENT_TYPES]))
for handler_id in handlers_ids:
handler = handlers.get_handler(handler_id)
html += handler.get_javascript_code(**kwargs)
return html
def get_media_content_types_choices():
choices = []
for handler_dict in settings.MEDIA_CONTENT_TYPES:
choices.append((handler_dict['id'], handler_dict.get('name', handler_dict['id'])))
return choices
|
apache-2.0
| -2,980,033,750,012,387,300
| 32.703125
| 137
| 0.685211
| false
| 3.637437
| false
| false
| false
|
SimonBiggs/electronfactors
|
test/test_poi.py
|
1
|
1335
|
# Copyright (C) 2015 Simon Biggs
# This program is free software: you can redistribute it and/or
# modify it under the terms of the GNU Affero General Public
# License as published by the Free Software Foundation, either
# version 3 of the License, or (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Affero General Public License for more details.
# You should have received a copy of the GNU Affero General Public
# License along with this program. If not, see
# http://www.gnu.org/licenses/.
import numpy as np
from electronfactors.ellipse.equivalent import poi_distance_method
def test_centre_of_square():
XCoords = np.array([-3, 3, 3, -3])
YCoords = np.array([3, 3, -3, -3])
poi = poi_distance_method(
XCoords=XCoords, YCoords=YCoords
)
assert np.abs(poi[0]) < 0.1
assert np.abs(poi[1]) < 0.1
def test_centre_of_arbitrary_cutout():
XCoords = np.array([-1, -0.2, 0, 0.7, 1, 0]) * 4 + 1
YCoords = np.array([0, -1, -.8, 0, .6, 1]) * 4 - 1
poi = poi_distance_method(
XCoords=XCoords, YCoords=YCoords
)
assert np.abs(poi[0] - 0.92) < 0.1
assert np.abs(poi[1] + 0.62) < 0.1
|
agpl-3.0
| -1,580,463,007,600,693,000
| 31.560976
| 66
| 0.677903
| false
| 3.19378
| false
| false
| false
|
bletham/fstimer
|
fstimer/gui/preregister.py
|
1
|
5850
|
#fsTimer - free, open source software for race timing.
#Copyright 2012-15 Ben Letham
#This program is free software: you can redistribute it and/or modify
#it under the terms of the GNU General Public License as published by
#the Free Software Foundation, either version 3 of the License, or
#(at your option) any later version.
#This program is distributed in the hope that it will be useful,
#but WITHOUT ANY WARRANTY; without even the implied warranty of
#MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
#GNU General Public License for more details.
#You should have received a copy of the GNU General Public License
#along with this program. If not, see <http://www.gnu.org/licenses/>.
#The author/copyright holder can be contacted at bletham@gmail.com
'''Handling of the window handling preregistration setup'''
import gi
gi.require_version('Gtk', '3.0')
from gi.repository import Gtk
import fstimer.gui
import os
from fstimer.gui.util_classes import MsgDialog
from fstimer.gui.util_classes import GtkStockButton
class PreRegistrationWin(Gtk.Window):
'''Handling of the window handling preregistration setup'''
def __init__(self, path, set_registration_file_cb, handle_registration_cb):
'''Builds and display the window handling preregistration
set the computers registration ID, and optionally choose a pre-registration json'''
super(PreRegistrationWin, self).__init__(Gtk.WindowType.TOPLEVEL)
self.path = path
self.set_registration_file_cb = set_registration_file_cb
self.modify_bg(Gtk.StateType.NORMAL, fstimer.gui.bgcolor)
fname = os.path.abspath(
os.path.join(
os.path.dirname(os.path.abspath(__file__)),
'../data/icon.png'))
self.set_icon_from_file(fname)
self.set_title('fsTimer - ' + os.path.basename(path))
self.set_position(Gtk.WindowPosition.CENTER)
self.connect('delete_event', lambda b, jnk: self.hide())
self.set_border_width(10)
# Start with some intro text.
prereglabel1 = Gtk.Label('Give a unique number to each computer used for registration.\nSelect a pre-registration file, if available.')
# Continue to the spinner
preregtable = Gtk.Table(3, 2, False)
preregtable.set_row_spacings(5)
preregtable.set_col_spacings(5)
preregtable.set_border_width(10)
regid = Gtk.Adjustment(value=1, lower=1, upper=99, step_incr=1)
regid_btn = Gtk.SpinButton(digits=0, climb_rate=0)
regid_btn.set_adjustment(regid)
preregtable.attach(regid_btn, 0, 1, 0, 1)
preregtable.attach(Gtk.Label(label="This computer's registration number"), 1, 2, 0, 1)
preregbtnFILE = Gtk.Button('Select pre-registration')
preregbtnFILE.connect('clicked', self.file_selected)
preregtable.attach(preregbtnFILE, 0, 1, 2, 3)
self.preregfilelabel = Gtk.Label(label='')
self.preregfilelabel.set_markup('<span color="blue">No pre-registration selected.</span>')
preregtable.attach(self.preregfilelabel, 1, 2, 2, 3)
## buttons
prereghbox = Gtk.HBox(True, 0)
preregbtnOK = GtkStockButton('ok',"OK")
preregbtnOK.connect('clicked', self.preregister_ok_cb, regid_btn, handle_registration_cb)
preregbtnCANCEL = GtkStockButton('close',"Close")
preregbtnCANCEL.connect('clicked', lambda b: self.hide())
prereghbox.pack_start(preregbtnOK, False, False, 5)
prereghbox.pack_start(preregbtnCANCEL, False, False, 5)
#Vbox
preregvbox = Gtk.VBox(False, 0)
preregbtnhalign = Gtk.Alignment.new(1, 0, 0, 0)
preregbtnhalign.add(prereghbox)
preregvbox.pack_start(prereglabel1, False, False, 5)
preregvbox.pack_start(preregtable, False, False, 5)
preregvbox.pack_start(preregbtnhalign, False, False, 5)
self.add(preregvbox)
self.show_all()
def file_selected(self, jnk_unused):
'''Handle selection of a pre-reg file using a filechooser.'''
chooser = Gtk.FileChooserDialog(title='Select pre-registration file', parent=self, action=Gtk.FileChooserAction.OPEN, buttons=('Cancel', Gtk.ResponseType.CANCEL, 'OK', Gtk.ResponseType.OK))
ffilter = Gtk.FileFilter()
ffilter.set_name('Registration files')
ffilter.add_pattern('*_registration_*.json')
chooser.add_filter(ffilter)
chooser.set_current_folder(self.path)
response = chooser.run()
if response == Gtk.ResponseType.OK:
filename = chooser.get_filename()
try:
self.set_registration_file_cb(filename)
self.preregfilelabel.set_markup('<span color="blue">Pre-registration '+os.path.basename(filename)+' loaded.</span>')
except (IOError, ValueError):
self.preregfilelabel.set_markup('<span color="red">ERROR! Failed to load '+os.path.basename(filename)+'.</span>')
chooser.destroy()
return
def preregister_ok_cb(self, jnk_unused, regid_btn, handle_registration_cb):
'''If OK is pushed on the pre-register window.'''
#First check if the file already exists
regid = regid_btn.get_value_as_int()
filename = os.path.join(self.path, os.path.basename(self.path)+'_registration_'+str(regid)+'.json')
if os.path.exists(filename):
#Raise a warning window
md = MsgDialog(self, 'warning', ['ok', 'cancel'], 'Proceed?', "A file with this registration number already exists.\nIf you continue it will be overwritten!")
resp = md.run()
md.destroy()
#Check the result.
if resp == Gtk.ResponseType.CANCEL:
#Do nothing.
return
#Else, continue on.
handle_registration_cb(regid)
|
gpl-3.0
| 8,722,225,096,570,457,000
| 49.008547
| 197
| 0.665128
| false
| 3.683879
| false
| false
| false
|
Azure/azure-sdk-for-python
|
sdk/network/azure-mgmt-network/azure/mgmt/network/v2019_07_01/operations/_operations.py
|
1
|
4744
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import TYPE_CHECKING
import warnings
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.paging import ItemPaged
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import HttpRequest, HttpResponse
from azure.mgmt.core.exceptions import ARMErrorFormat
from .. import models as _models
if TYPE_CHECKING:
# pylint: disable=unused-import,ungrouped-imports
from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
class Operations(object):
"""Operations operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.network.v2019_07_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
def list(
self,
**kwargs # type: Any
):
# type: (...) -> Iterable["_models.OperationListResult"]
"""Lists all of the available Network Rest API operations.
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either OperationListResult or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.network.v2019_07_01.models.OperationListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.OperationListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-07-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list.metadata['url'] # type: ignore
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
def extract_data(pipeline_response):
deserialized = self._deserialize('OperationListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list.metadata = {'url': '/providers/Microsoft.Network/operations'} # type: ignore
|
mit
| 4,323,221,576,189,648,400
| 42.522936
| 133
| 0.640388
| false
| 4.610301
| false
| false
| false
|
azurer100/monitor
|
monitor/collector.py
|
1
|
7231
|
#!/usr/bin/env python
# encoding: utf-8
'''
monitor.collector -- shortdesc
monitor.collector is a description
It defines classes_and_methods
@author: Yi
@copyright: 2016 MY. All rights reserved.
'''
import ConfigParser
import socket, time, string, logging
import MySQLdb
from encode import Encode
logging.basicConfig(level=logging.DEBUG,
filename='logs/collector.log',
format='%(asctime)s - %(name)s - %(levelname)s - %(message)s',
datefmt='%d %b %Y %H:%M:%S')
bufsize = 1500
port = 10514
sql_linux_fs = "INSERT INTO linux_file_monitor_info(`access_time`,`operator_status`,`operator_path`,`process_name`,`exec_user`,`original_user`,`local_ip`,`file_md5`,`container_oid`,`aciton`,`status`) values(%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)"
sql_linux_ps = "INSERT INTO linux_process_monitor_info(`access_time`,`process_status`,`file_path`,`pid`,`process_name`,`ppid`,`parent_process_name`,`exec_user`,`original_user`,`local_ip`,`file_md5`,`aciton`) values(%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)"
sql_linux_net = "INSERT INTO linux_network_monitor_info(`access_time`,`loacl_address`,`foreign_address`,`state`,`protolcol`,`pid`,`progame_name`,`network_status`,`container_oid`,`aciton`) values(%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)"
sql_linux_cmd = "INSERT INTO linux_command_monitor_info(`access_time`,`exec_command`,`exec_result`,`exec_user`,`original_user`,`local_ip`,`user_ip`,`operator_status`,`container_oid`,`aciton`) values(%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)"
class Config:
def __init__(self, path = "./collector.ini"):
cf = ConfigParser.ConfigParser()
cf.read(path)
#return all section
secs = cf.sections()
logging.info("config sections: %s" % secs)
encode = cf.get("other", "encode")
self.db_host = cf.get("db", "host")
self.db_port = cf.getint("db", "port")
self.db_user = cf.get("db", "user")
if(encode == "0"):
self.db_pw = cf.get("db", "pw")
self.db_pw_b = Encode.encrypt(self.db_pw)
else:
self.db_pw_b = cf.get("db", "pw")
self.db_pw = Encode.decrypt(self.db_pw_b)
self.db_name = cf.get("db", "name")
self.sl_host = cf.get("syslog", "host")
self.sl_port = cf.getint("syslog", "port")
#modify one value and write to file
cf.set("db", "pw", self.db_pw_b)
cf.set("other", "encode", "1")
cf.write(open(path, "w"))
def linux_fs(ip, syslog):
items = syslog.split(" ")
file_path_action = items[0]
file_name = items[1]
process_name = items[2]
exec_user = items[3]
ori_user = items[4]
file_md5 = items[5]
return (time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()), file_path_action, file_name, process_name, exec_user, ori_user,
ip, file_md5, None, "1", None)
def linux_ps(ip, syslog):
items = syslog.split(" ")
return (time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(float(items[1]))), items[0], items[2], items[3], items[4], items[5],
items[6], items[7], items[8], ip, "", "1")
def linux_net(ip, syslog):
items = syslog.split(" ")
return (time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(float(items[0]))), items[1], items[2], items[3], items[4], (items[5] if items[5] != "" else None),
items[6], items[7], None, "1")
def linux_cmd(ip, syslog):
items = syslog.split(" ")
return (time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(float(items[0]))), items[1], items[2], items[3], items[4], ip, items[5],
None, None, "1")
def main():
logging.info("starting collector...")
config = Config()
try:
sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
sock.bind((config.sl_host, config.sl_port))
except Exception, e:
logging.error("error bind syslog port: " + str(e.args))
try:
conn = MySQLdb.connect(host=config.db_host, db=config.db_name, port=config.db_port, user=config.db_user, passwd=config.db_pw,
connect_timeout=10, use_unicode=True, autocommit=True)
curs = conn.cursor()
except Exception, e:
logging.error("mysql can not be connected: " + str(e.args))
logging.info("syslog is start to collect")
try:
while 1:
try:
data, addr = sock.recvfrom(bufsize)
syslog = str(data)
logging.debug("syslog: %s" % syslog)
# <131> Jul 26 11:34:47 2016 ubuntu linux_fs: hello 1 1 1 1 1 1 1
n = syslog.find('>')
serverty=string.atoi(syslog[1:n])&0x0007
facility=(string.atoi(syslog[1:n])&0x03f8)>>3
syslog_msg = syslog[27:]
host = syslog_msg[:syslog_msg.find(' ')]
syslog_msg = syslog[28+len(host) :]
who = syslog_msg[:syslog_msg.find(': ')]
syslog_msg = syslog[30+len(host + who) :]
if (who == "linux_fs"):
param = linux_fs(addr[0], syslog_msg)
curs.execute(sql_linux_fs, param)
if (who == "linux_ps"):
param1 = linux_ps(addr[0], syslog_msg)
curs.execute(sql_linux_ps, param1)
if (who == "linux_net"):
param2 = linux_net(addr[0], syslog_msg)
curs.execute(sql_linux_net, param2)
if (who == "linux_cmd"):
param3 = linux_cmd(addr[0], syslog_msg)
curs.execute(sql_linux_cmd, param3)
logging.info("syslog: %s" % syslog_msg)
except socket.error:
logging.error("syslog collection failed")
pass
except Exception, e:
logging.error("syslog stop: " + str(e.args))
sock.close()
curs.close()
conn.close()
# sys.exit()
time.sleep(10)
main()
if __name__ == '__main__':
main()
syslog_serverty={ 0:"emergency",
1:"alert",
2:"critical",
3:"error",
4:"warning",
5:"notice",
6:"info",
7:"debug"
}
syslog_facility={ 0:"kernel",
1:"user",
2:"mail",
3:"daemaon",
4:"auth",
5:"syslog",
6:"lpr",
7:"news",
8:"uucp",
9:"cron",
10:"authpriv",
11:"ftp",
12:"ntp",
13:"security",
14:"console",
15:"cron",
16:"local 0",
17:"local 1",
18:"local 2",
19:"local 3",
20:"local 4",
21:"local 5",
22:"local 6",
23:"local 7"
}
|
mit
| -5,116,564,051,706,430,000
| 36.466321
| 252
| 0.493016
| false
| 3.488181
| true
| false
| false
|
tensorflow/probability
|
tensorflow_probability/python/bijectors/square.py
|
1
|
2677
|
# Copyright 2018 The TensorFlow Probability Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""Square bijector."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
# Dependency imports
import numpy as np
import tensorflow.compat.v2 as tf
from tensorflow_probability.python.bijectors import bijector
from tensorflow_probability.python.internal import assert_util
__all__ = [
'Square',
]
class Square(bijector.AutoCompositeTensorBijector):
"""Compute `g(X) = X^2`; X is a positive real number.
g is a bijection between the non-negative real numbers (R_+) and the
non-negative real numbers.
#### Examples
```python
bijector.Square().forward(x=[[1., 0], [2, 1]])
# Result: [[1., 0], [4, 1]], i.e., x^2
bijector.Square().inverse(y=[[1., 4], [9, 1]])
# Result: [[1., 2], [3, 1]], i.e., sqrt(y).
```
"""
def __init__(self, validate_args=False, name='square'):
"""Instantiates the `Square` bijector.
Args:
validate_args: Python `bool` indicating whether arguments should be
checked for correctness.
name: Python `str` name given to ops managed by this object.
"""
parameters = dict(locals())
with tf.name_scope(name) as name:
super(Square, self).__init__(
forward_min_event_ndims=0,
validate_args=validate_args,
parameters=parameters,
name=name)
@classmethod
def _is_increasing(cls):
return True
@classmethod
def _parameter_properties(cls, dtype):
return dict()
def _forward(self, x):
with tf.control_dependencies(self._assertions(x)):
return tf.square(x)
def _inverse(self, y):
with tf.control_dependencies(self._assertions(y)):
return tf.sqrt(y)
def _forward_log_det_jacobian(self, x):
with tf.control_dependencies(self._assertions(x)):
return np.log(2.) + tf.math.log(x)
def _assertions(self, t):
if not self.validate_args:
return []
return [assert_util.assert_non_negative(
t, message='All elements must be non-negative.')]
|
apache-2.0
| 9,180,104,289,838,083,000
| 27.784946
| 78
| 0.653343
| false
| 3.7493
| false
| false
| false
|
recursionbane/tensorflow-prebuilt-classifier
|
predict.py
|
1
|
1260
|
import sys
# Check and fail early!
if (len(sys.argv) != 2):
print('\nERROR: Must supply the image you want to run prediction on!\n')
exit(-1)
import tensorflow as tf
image_path = sys.argv[1]
# Read in the image_data
image_data = tf.gfile.FastGFile(image_path, 'rb').read()
# Loads label file, strips off carriage return
label_lines = [line.rstrip() for line
in tf.gfile.GFile("retrained_labels.txt")]
# Unpersists graph from file
with tf.gfile.FastGFile("retrained_graph.pb", 'rb') as f:
graph_def = tf.GraphDef()
graph_def.ParseFromString(f.read())
_ = tf.import_graph_def(graph_def, name='')
with tf.Session() as sess:
# Feed the image_data as input to the graph and get first prediction
softmax_tensor = sess.graph.get_tensor_by_name('final_result:0')
predictions = sess.run(softmax_tensor, \
{'DecodeJpeg/contents:0': image_data})
# Sort to show labels of first prediction in order of confidence
top_k = predictions[0].argsort()[-len(predictions[0]):][::-1]
for node_id in top_k:
human_string = label_lines[node_id]
score = predictions[0][node_id]
print('%s (score = %.5f)' % (human_string, score))
|
gpl-3.0
| 7,123,609,686,781,757,000
| 30.358974
| 73
| 0.629365
| false
| 3.36
| false
| false
| false
|
poeticcapybara/pythalesians
|
pythalesians-examples/bokeh_examples.py
|
1
|
4452
|
__author__ = 'saeedamen'
#
# Copyright 2015 Thalesians Ltd. - http//www.thalesians.com / @thalesians
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the
# License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
#
# See the License for the specific language governing permissions and limitations under the License.
#
"""
bokeh_examples
Shows how to plot using Bokeh library.
"""
import datetime
from pythalesians.market.loaders.lighttimeseriesfactory import LightTimeSeriesFactory
from pythalesians.market.requests.timeseriesrequest import TimeSeriesRequest
from pythalesians.timeseries.calcs.timeseriescalcs import TimeSeriesCalcs
from pythalesians.graphics.graphs.plotfactory import PlotFactory
from pythalesians.graphics.graphs.graphproperties import GraphProperties
if True:
time_series_request = TimeSeriesRequest(
start_date = "01 Jan 2013", # start date
finish_date = datetime.date.today(), # finish date
freq = 'daily', # daily data
data_source = 'google', # use Bloomberg as data source
tickers = ['Apple', 'S&P500 ETF'], # ticker (Thalesians)
fields = ['close'], # which fields to download
vendor_tickers = ['aapl', 'spy'], # ticker (Google)
vendor_fields = ['Close'], # which Bloomberg fields to download
cache_algo = 'internet_load_return') # how to return data
ltsf = LightTimeSeriesFactory()
tsc = TimeSeriesCalcs()
df = tsc.create_mult_index_from_prices(ltsf.harvest_time_series(time_series_request))
gp = GraphProperties()
gp.html_file_output = "output_data/apple.htm"
gp.title = "S&P500 vs Apple"
# plot first with PyThalesians and then Bokeh
# just needs 1 word to change
gp.display_legend = False
pf = PlotFactory()
pf.plot_generic_graph(df, type = 'line', adapter = 'pythalesians', gp = gp)
pf.plot_generic_graph(df, type = 'line', adapter = 'bokeh', gp = gp)
# test simple Bokeh bar charts - monthly returns over past 6 months
if True:
from datetime import timedelta
ltsf = LightTimeSeriesFactory()
end = datetime.datetime.utcnow()
start = end - timedelta(days=180)
tickers = ['S&P500', 'FTSE', 'Nikkei']
vendor_tickers = ['SPX Index', 'UKX Index', 'NKY Index']
time_series_request = TimeSeriesRequest(
start_date = start, # start date
finish_date = datetime.date.today(), # finish date
freq = 'daily', # daily data
data_source = 'bloomberg', # use Bloomberg as data source
tickers = tickers, # ticker (Thalesians)
fields = ['close'], # which fields to download
vendor_tickers = vendor_tickers, # ticker (Bloomberg)
vendor_fields = ['PX_LAST'], # which Bloomberg fields to download
cache_algo = 'internet_load_return') # how to return data
daily_vals = ltsf.harvest_time_series(time_series_request)
# resample for end of month
daily_vals = daily_vals.resample('BM')
daily_vals = daily_vals / daily_vals.shift(1) - 1
daily_vals.index = [str(x.year) + '/' + str(x.month) for x in daily_vals.index]
daily_vals = daily_vals.drop(daily_vals.head(1).index)
pf = PlotFactory()
gp = GraphProperties()
gp.source = 'Thalesians/BBG'
gp.html_file_output = "output_data/equities.htm"
gp.title = 'Recent monthly changes in equity markets'
gp.scale_factor = 2
gp.display_legend = True
gp.chart_type = ['bar', 'scatter', 'line']
gp.x_title = 'Dates'
gp.y_title = 'Pc'
# plot using Bokeh then PyThalesians
pf.plot_bar_graph(daily_vals * 100, adapter = 'bokeh', gp = gp)
pf.plot_bar_graph(daily_vals * 100, adapter = 'pythalesians', gp = gp)
|
apache-2.0
| -1,957,251,320,556,141,600
| 41.009434
| 121
| 0.612534
| false
| 3.795396
| false
| false
| false
|
johnnykv/heralding
|
heralding/capabilities/ftp.py
|
1
|
3906
|
# Copyright (C) 2017 Johnny Vestergaard <jkv@unixcluster.dk>
#
# Rewritten by Aniket Panse <contact@aniketpanse.in>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
# Aniket Panse <contact@aniketpanse.in> grants Johnny Vestergaard <jkv@unixcluster.dk>
# a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable
# copyright license to reproduce, prepare derivative works of, publicly
# display, publicly perform, sublicense, relicense, and distribute [the] Contributions
# and such derivative works.
import logging
from heralding.capabilities.handlerbase import HandlerBase
logger = logging.getLogger(__name__)
TERMINATOR = '\r\n'
class FtpHandler:
"""Handles a single FTP connection"""
def __init__(self, reader, writer, options, session):
self.banner = options['protocol_specific_data']['banner']
self.max_loggins = int(options['protocol_specific_data']['max_attempts'])
self.syst_type = options['protocol_specific_data']['syst_type']
self.authenticated = False
self.writer = writer
self.reader = reader
self.serve_flag = True
self.session = session
self.state = None
self.user = None
async def getcmd(self):
cmd = await self.reader.readline()
return str(cmd, 'utf-8')
async def serve(self):
await self.respond('220 ' + self.banner)
while self.serve_flag:
resp = await self.getcmd()
if not resp:
self.stop()
break
else:
try:
cmd, args = resp.split(' ', 1)
except ValueError:
cmd = resp
args = None
else:
args = args.strip('\r\n')
cmd = cmd.strip('\r\n')
cmd = cmd.upper()
# List of commands allowed before a login
unauth_cmds = ['USER', 'PASS', 'QUIT', 'SYST']
meth = getattr(self, 'do_' + cmd, None)
if not meth:
await self.respond('500 Unknown Command.')
else:
if not self.authenticated:
if cmd not in unauth_cmds:
await self.respond('503 Login with USER first.')
continue
await meth(args)
self.state = cmd
async def do_USER(self, arg):
self.user = arg
await self.respond('331 Now specify the Password.')
async def do_PASS(self, arg):
if self.state != 'USER':
await self.respond('503 Login with USER first.')
return
passwd = arg
self.session.add_auth_attempt(
'plaintext', username=self.user, password=passwd)
await self.respond('530 Authentication Failed.')
if self.session.get_number_of_login_attempts() >= self.max_loggins:
self.stop()
async def do_SYST(self, arg):
await self.respond('215 {0}'.format(self.syst_type))
async def do_QUIT(self, arg):
await self.respond('221 Bye.')
self.serve_flag = False
self.stop()
async def respond(self, msg):
msg += TERMINATOR
msg_bytes = bytes(msg, 'utf-8')
self.writer.write(msg_bytes)
await self.writer.drain()
def stop(self):
self.session.end_session()
class ftp(HandlerBase):
def __init__(self, options, loop):
super().__init__(options, loop)
self._options = options
async def execute_capability(self, reader, writer, session):
ftp_cap = FtpHandler(reader, writer, self._options, session)
await ftp_cap.serve()
|
gpl-3.0
| 5,418,834,045,856,934,000
| 30.248
| 86
| 0.658474
| false
| 3.674506
| false
| false
| false
|
hayalasalah/adhan.py
|
adhan/adhan.py
|
1
|
3946
|
"""
adhan.py - The main interface for using the API.
Copyright (C) 2015 Zuhair Parvez
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU Lesser General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
import math
from datetime import datetime, timedelta
from functools import partial
# from adhan import calculations, methods
from .calculations import (
compute_time_at_sun_angle,
compute_zuhr_utc,
time_at_shadow_length,
)
from .methods import ASR_STANDARD
SUNRISE_ANGLE = 0.833
SUNSET_ANGLE = 0.833
def floating_point_to_datetime(day, fp_time):
"""Convert a floating point time to a datetime."""
result = datetime(year=day.year, month=day.month, day=day.day)
result += timedelta(minutes=math.ceil(60 * fp_time))
return result
def adhan(day, location, parameters, timezone_offset=0):
"""Calculate adhan times given the parameters.
This function will compute the adhan times for a certain location on
certain day. The method for calculating the prayers as well as the time for
Asr can also be specified. The timezone offset naively adds the specified
number of hours to each time that is returned.
:param day: The datetime.date to calculate for
:param location: 2-tuple of floating point coordiantes for latitude and
longitude of location in degrees
:param parameters: A dictionary-like object of parameters for computing
adhan times. Commonly used calculation methods are
available in the adhan.methods module
:param timezone_offset: The number of hours to add to each prayer time
to account for timezones. Can be floating point
"""
latitude, longitude = location
#
# To reduce a little repetitiveness, using a partial function that has the
# day and latitude already set
#
time_at_sun_angle = partial(
compute_time_at_sun_angle,
day=day,
latitude=latitude
)
zuhr_time = compute_zuhr_utc(day, longitude)
shuruq_time = zuhr_time - time_at_sun_angle(angle=SUNRISE_ANGLE)
maghrib_time = zuhr_time + time_at_sun_angle(angle=SUNSET_ANGLE)
fajr_time = zuhr_time - time_at_sun_angle(angle=parameters['fajr_angle'])
#
# Most methods define Isha as a certain angle the sun has to be below
# the horizon, but some methods define it as a certain number of minutes
# after Maghrib
#
if parameters.get('isha_delay', None):
isha_time = maghrib_time + parameters['isha_delay']
else:
isha_time = (
zuhr_time +
time_at_sun_angle(angle=parameters['isha_angle'])
)
#
# Default to standard Asr method if not specified
#
asr_multiplier = parameters.get('asr_multiplier', ASR_STANDARD)
asr_time = zuhr_time + time_at_shadow_length(
day=day, latitude=latitude, multiplier=asr_multiplier
)
offset = timedelta(minutes=60 * timezone_offset)
return {
'fajr': floating_point_to_datetime(day, fajr_time) + offset,
'zuhr': floating_point_to_datetime(day, zuhr_time) + offset,
'shuruq': floating_point_to_datetime(day, shuruq_time) + offset,
'asr': floating_point_to_datetime(day, asr_time) + offset,
'maghrib': floating_point_to_datetime(day, maghrib_time) + offset,
'isha': floating_point_to_datetime(day, isha_time) + offset,
}
|
lgpl-3.0
| -2,185,333,171,867,021,300
| 33.920354
| 79
| 0.688039
| false
| 3.733207
| false
| false
| false
|
kubeflow/kfp-tekton
|
sdk/python/tests/compiler/testdata/recursion_while.py
|
1
|
1971
|
# Copyright 2020 kubeflow.org
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from kfp import dsl
from kfp_tekton.compiler import TektonCompiler
class Coder:
def empty(self):
return ""
TektonCompiler._get_unique_id_code = Coder.empty
def flip_coin_op():
"""Flip a coin and output heads or tails randomly."""
return dsl.ContainerOp(
name='Flip coin',
image='python:alpine3.6',
command=['sh', '-c'],
arguments=['python -c "import random; result = \'heads\' if random.randint(0,1) == 0 '
'else \'tails\'; print(result)" | tee /tmp/output'],
file_outputs={'output': '/tmp/output'}
)
def print_op(msg):
"""Print a message."""
return dsl.ContainerOp(
name='Print',
image='alpine:3.6',
command=['echo', msg],
)
@dsl._component.graph_component
def flip_component(flip_result, maxVal):
with dsl.Condition(flip_result == 'heads'):
print_flip = print_op(flip_result)
flipA = flip_coin_op().after(print_flip)
flip_component(flipA.output, maxVal)
@dsl.pipeline(
name='recursion pipeline',
description='shows how to use graph_component and recursion.'
)
def flipcoin(maxVal=12):
flip_out = flip_coin_op()
flip_loop = flip_component(flip_out.output, maxVal)
print_op('cool, it is over. %s' % flip_out.output).after(flip_loop)
if __name__ == '__main__':
TektonCompiler().compile(flipcoin, __file__.replace('.py', '.yaml'))
|
apache-2.0
| 2,473,245,731,787,266,600
| 28.41791
| 94
| 0.663115
| false
| 3.47007
| false
| false
| false
|
omnbmh/pi6x
|
webapps/weibo/demo.py
|
1
|
2953
|
# -*- coding:utf-8 -*- #
#! /usr/bin/env python
import time
#sys.path.insert(0, 'tweibo.zip')
from tweibo import *
# 换成你的 APPKEY
APP_KEY = "100628862"
APP_SECRET = "021e18ea097817f15a819a45c0e5c592"
CALLBACK_URL = "http://127.0.0.1:8000"
# 请先按照 https://github.com/upbit/tweibo-pysdk/wiki/OAuth2Handler 的鉴权说明填写 ACCESS_TOKEN 和 OPENID
ACCESS_TOKEN = "c3337750b56e1ee3d35e669ebdea0eef"
OPENID = "99A960D0C781A65640DD2A1BE48CCD6A"
IMG_EXAMPLE = "example.png"
# 返回text是unicode,设置默认编码为utf8
import sys
reload(sys)
sys.setdefaultencoding('utf8')
def access_token_test():
""" 访问get_access_token_url()的URL并授权后,会跳转callback页面,其中包含如下参数:
#access_token=00000000000ACCESSTOKEN0000000000&expires_in=8035200&openid=0000000000000OPENID0000000000000&openkey=0000000000000OPENKEY000000000000&refresh_token=0000000000REFRESHTOKEN00000000&state=
保存下其中的 access_token, openid 并调用
oauth.set_access_token(access_token)
oauth.set_openid(openid)
即可完成 OAuth2Handler() 的初始化。可以记录 access_token 等信息
"""
oauth = OAuth2Handler()
oauth.set_app_key_secret(APP_KEY, APP_SECRET, CALLBACK_URL)
print oauth.get_access_token_url()
def tweibo_test():
oauth = OAuth2Handler()
oauth.set_app_key_secret(APP_KEY, APP_SECRET, CALLBACK_URL)
oauth.set_access_token(ACCESS_TOKEN)
oauth.set_openid(OPENID)
api = API(oauth)
#api = API(oauth, host="127.0.0.1", port=8888) # Init API() with proxy
# GET /t/show
#tweet1 = api.get.t__show(format="json", id=301041004850688)
#print ">> %s: %s" % (tweet1.data.nick, tweet1.data.text)
# POST /t/add
#content_str = "[from PySDK] %s says: %s" % (tweet1.data.nick, tweet1.data.origtext)
#tweet2 = api.post.t__add(format="json", content=content_str, clientip="10.0.0.1")
#print ">> time=%s, http://t.qq.com/p/t/%s" % (tweet2.data.time, tweet2.data.id)
# GET /statuses/user_timeline
#user_timeline = api.get.statuses__user_timeline(format="json", name="qqfarm", reqnum=3, pageflag=0, lastid=0, pagetime=0, type=3, contenttype=0)
#for idx, tweet in enumerate(user_timeline.data.info):
# print "[%d] http://t.qq.com/p/t/%s, (type:%d) %s" % (idx+1, tweet.id, tweet.type, tweet.text)
# UPLOAD /t/upload_pic
pic1 = api.upload.t__upload_pic(format="json", pic_type=2, pic=open(IMG_EXAMPLE, "rb"))
print ">> IMG: %s" % (pic1.data.imgurl)
# POST /t/add_pic_url
content_str2 = "[from PySDK] add pic demo: %s, time %s" % (IMG_EXAMPLE, time.time())
pic_urls = "%s" % (pic1.data.imgurl)
tweet_pic1 = api.post.t__add_pic_url(format="json", content=content_str2, pic_url=pic_urls, clientip="10.0.0.1")
print ">> time=%s, http://t.qq.com/p/t/%s" % (tweet_pic1.data.time, tweet_pic1.data.id)
if __name__ == '__main__':
#access_token_test()
tweibo_test()
|
mit
| 7,072,474,937,092,337,000
| 39.042857
| 206
| 0.674634
| false
| 2.428943
| false
| false
| false
|
MicrosoftGenomics/FaST-LMM
|
fastlmm/util/runner/LocalMultiThread.py
|
1
|
2490
|
'''
Runs a distributable job on multiple processors. Returns the value of the job.
See SamplePi.py for examples.
'''
from fastlmm.util.runner import *
import os
import logging
try:
import dill as pickle
except:
logging.warning("Can't import dill, so won't be able to clusterize lambda expressions. If you try, you'll get this error 'Can't pickle <type 'function'>: attribute lookup __builtin__.function failed'")
import cPickle as pickle
import subprocess, sys, os.path
import threading
import fastlmm.util.util as util
from Queue import PriorityQueue
class LocalMultiThread: # implements IRunner
'''Designed so that reduce will start running as soon as the 1st task as finished
'''
def __init__(self, taskcount, mkl_num_threads = None, just_one_process = False,):
if not 0 < taskcount: raise Exception("Expect taskcount to be positive")
self.taskcount = taskcount
self.just_one_process = just_one_process
if mkl_num_threads != None:
os.environ['MKL_NUM_THREADS'] = str(mkl_num_threads)
def _result_sequence(self,thread_list,priority_queue,shaped_distributable):
for thread in thread_list:
if not self.just_one_process:
thread.join()
result_sequence = priority_queue.get()[1]
for result in result_sequence:
yield result
def run(self, distributable):
JustCheckExists().input(distributable)
priority_queue = PriorityQueue()
thread_list = []
shaped_distributable = shape_to_desired_workcount(distributable, self.taskcount)
for taskindex in xrange(self.taskcount):
def _target(taskindex=taskindex):
result_list = []
for work in work_sequence_for_one_index(shaped_distributable, self.taskcount, taskindex):
result_list.append(run_all_in_memory(work))
priority_queue.put((taskindex,result_list))
if not self.just_one_process:
thread = threading.Thread(target=_target,name=str(taskindex))
thread_list.append(thread)
thread.start()
else:
thread_list.append(None)
_target()
result_sequence = self._result_sequence(thread_list, priority_queue,shaped_distributable)
result = shaped_distributable.reduce(result_sequence)
JustCheckExists().output(distributable)
return result
|
apache-2.0
| -3,851,306,007,927,130,000
| 37.90625
| 205
| 0.64739
| false
| 4.081967
| false
| false
| false
|
riddlezyc/geolab
|
src/energyforce/metad/calc.py
|
1
|
7123
|
#coding=utf-8
import numpy as np
# 通过PBC边界条件计算两个原子之间的最小镜像距离
def dis_pbc(a, b, c, alpha, beta, gamma, t1, t2):
cosalpha = np.cos(alpha * np.pi / 180)
sinalpha = np.sin(alpha * np.pi / 180)
cosbeta = np.cos(beta * np.pi / 180)
cosgamma = np.cos(gamma * np.pi / 180)
singamma = np.sin(gamma * np.pi / 180)
Ax = a
Bx = b * cosgamma
By = b * singamma
Cx = cosbeta
Cy = (cosalpha - cosbeta * cosgamma) / singamma
Cz = np.sqrt(1.0 - Cx * Cx - Cy * Cy)
Cx = c * Cx
Cy = c * Cy
Cz = c * Cz
xmin = np.abs(t1[0] - t2[0]) - Ax * np.round(np.abs(t1[0] - t2[0]) / Ax)
ymin = np.abs(t1[1] - t2[1]) - By * np.round(np.abs(t1[1] - t2[1]) / By)
zmin = np.abs(t1[2] - t2[2]) - Cz * np.round(np.abs(t1[2] - t2[2]) / Cz)
return np.sqrt(xmin * xmin + ymin * ymin + zmin * zmin)
# 适用于任何体系,即输入 a,b,c,alpha,beta,gamma
def dis(pbca, pbcb, pbcc, t1, t2):
xmin = np.abs(t1[0] - t2[0]) - pbca * np.round(np.abs(t1[0] - t2[0]) / pbca)
ymin = np.abs(t1[1] - t2[1]) - pbcb * np.round(np.abs(t1[1] - t2[1]) / pbcb)
zmin = np.abs(t1[2] - t2[2]) - pbcc * np.round(np.abs(t1[2] - t2[2]) / pbcc)
return xmin,ymin,np.sqrt(xmin * xmin + ymin * ymin + zmin * zmin)
# 适用于正交体系,只输入a,b,c, alpha,beta,gamma 均为90度
def dis_npbc(t1, t2):
xmin = t1[0] - t2[0]
ymin = t1[1] - t2[1]
zmin = t1[2] - t2[2]
return np.sqrt(xmin * xmin + ymin * ymin + zmin * zmin)
# 适用于不考虑周期性的体系
def plane(h1, h2, o):
x1, y1, z1 = [h1[0]-o[0], h1[1]-o[1], h1[2]-o[2]]
x2, y2, z2 = [h2[0]-o[0], h2[1]-o[1], h2[2]-o[2]]
b = (x2 * z1 - x1 * z2) / (y2 * z1 - y1 * z2)
c = (x1 * y2 - x2 * y1) / (y2 * z1 - y1 * z2)
return np.arccos(c / np.sqrt(1 + b * b + c * c)) * 180 / np.pi
# 通过水分子3个原子的坐标计算由3个原子确定的平面的法线与z方向的夹角的余弦值
def polar(h1, h2, o):
x1 = (h1[0] + h2[0]) * 0.5
y1 = (h1[1] + h2[1]) * 0.5
z1 = (h1[2] + h2[2]) * 0.5
x, y, z = [x1-o[0], y1-o[1], z1-o[2]]
r = np.sqrt(x * x + y * y + z * z)
return np.arccos(z / r * 180 / np.pi)
# 以2个H的中点作为正电荷中心,O坐标为负电荷中心,两者连线方向为偶极矩方向,返回此方向与z的夹角的余弦值
def ori(o,h):
x, y, z = [h[0]-o[0], h[1]-o[1], h[2]-o[2]]
r = np.sqrt(x * x + y * y + z * z)
return np.arccos(np.abs(z / r)) * 180 / np.pi
# 用于OH根(或其他两个原子)与Z夹角的余弦值
def plane_abs(h1, h2, o):
x1, y1, z1 = [h1[0]-o[0], h1[1]-o[1], h1[2]-o[2]]
x2, y2, z2 = [h2[0]-o[0], h2[1]-o[1], h2[2]-o[2]]
b = (x2 * z1 - x1 * z2) / (y2 * z1 - y1 * z2)
c = (x1 * y2 - x2 * y1) / (y2 * z1 - y1 * z2)
return np.arccos(np.abs(c / np.sqrt(1 + b * b + c * c))) * 180 / np.pi
# 通过水分子3个原子的坐标计算由3个原子确定的平面的发现与z方向的夹角的余弦值
# 返回的为绝对值
def polar_abs(h1, h2, o):
x1 = (h1[0] + h2[0]) * 0.5
y1 = (h1[1] + h2[1]) * 0.5
z1 = (h1[2] + h2[2]) * 0.5
x, y, z = [x1-o[0], y1-o[1], z1-o[2]]
r = np.sqrt(x * x + y * y + z * z)
return np.arccos(np.abs(z / r )) * 180 / np.pi
# 以2个H的中点作为正电荷中心,O坐标为负电荷中心,两者连线方向为偶极矩方向,返回此方向与z的夹角的余弦值
# 返回的为绝对值
def hbond_pbc(a, b, c, alpha, beta, gamma, donor, h, acceptor):
cosalpha = np.cos(alpha * np.pi / 180)
sinalpha = np.sin(alpha * np.pi / 180)
cosbeta = np.cos(beta * np.pi / 180)
cosgamma = np.cos(gamma * np.pi / 180)
singamma = np.sin(gamma * np.pi / 180)
Ax = a
Bx = b * cosgamma
By = b * singamma
Cx = cosbeta
Cy = (cosalpha - cosbeta * cosgamma) / singamma
Cz = np.sqrt(1.0 - Cx * Cx - Cy * Cy)
Cx = c * Cx
Cy = c * Cy
Cz = c * Cz
# H-acceptor间的距离
xmin = np.abs(h[0] - acceptor[0]) - Ax * np.round(np.abs(h[0] - acceptor[0]) / Ax)
ymin = np.abs(h[1] - acceptor[1]) - By * np.round(np.abs(h[1] - acceptor[1]) / By)
zmin = np.abs(h[2] - acceptor[2]) - Cz * np.round(np.abs(h[2] - acceptor[2]) / Cz)
# O-O距离
# xmin = np.abs(donor[0] - acceptor[0]) - Ax * np.round(np.abs(donor[0] - acceptor[0]) / Ax)
# ymin = np.abs(donor[1] - acceptor[1]) - By * np.round(np.abs(donor[1] - acceptor[1]) / By)
# zmin = np.abs(donor[2] - acceptor[2]) - Cz * np.round(np.abs(donor[2] - acceptor[2]) / Cz)
r = np.sqrt(xmin * xmin + ymin * ymin + zmin * zmin)
# x1 = donor[0] - h[0] - Ax * np.round((donor[0] - h[0]) / Ax)
# y1 = donor[1] - h[1] - By * np.round((donor[1] - h[1]) / By)
# z1 = donor[2] - h[2] - Cz * np.round((donor[2] - h[2]) / Cz)
#
# x2 = (h[0] - acceptor[0]) - Ax * np.round((h[0] - acceptor[0]) / Ax)
# y2 = (h[1] - acceptor[1]) - By * np.round((h[1] - acceptor[1]) / By)
# z2 = (h[2] - acceptor[2]) - Cz * np.round((h[2] - acceptor[2]) / Cz)
x1 = acceptor[0] - donor[0] - Ax * np.round((acceptor[0] - donor[0]) / Ax)
y1 = acceptor[1] - donor[1] - By * np.round((acceptor[1] - donor[1]) / By)
z1 = acceptor[2] - donor[2] - Cz * np.round((acceptor[2] - donor[2]) / Cz)
x2 = (h[0] - donor[0]) - Ax * np.round((h[0] - donor[0]) / Ax)
y2 = (h[1] - donor[1]) - By * np.round((h[1] - donor[1]) / By)
z2 = (h[2] - donor[2]) - Cz * np.round((h[2] - donor[2]) / Cz)
dh = np.array([x1, y1, z1])
da = np.array([x2, y2, z2])
angle = np.arccos(sum(dh * da) / (np.sqrt(sum(dh * dh)) * np.sqrt(sum(da * da)))) * 180 / np.pi
return r, angle
# 因为将距离和角度分开计算很费时间,于是拿出来单独作为计算氢键的函数
# 这里只返回计算的r及angle(度)的值,不进行判断,在调用时在根据设定的cutoff判断
def get_cell(a, b, c, alpha, beta, gamma):
cosalpha = np.cos(alpha * np.pi / 180)
sinalpha = np.sin(alpha * np.pi / 180)
cosbeta = np.cos(beta * np.pi / 180)
cosgamma = np.cos(gamma * np.pi / 180)
singamma = np.sin(gamma * np.pi / 180)
Ax = a
Ay = 0
Az = 0
Bx = b * cosgamma
By = b * singamma
Bz = 0
Cx = cosbeta
Cy = (cosalpha - cosbeta * cosgamma) / singamma
Cz = np.sqrt(1.0 - Cx * Cx - Cy * Cy)
Cx = c * Cx
Cy = c * Cy
Cz = c * Cz
return Ax, Ay, Az, Bx, By, Bz, Cx, Cy, Cz
# auto correlation function
def acf(d1):
d1unbiased = d1 - np.mean(d1)
d1norm = np.sum(d1unbiased**2)
ac = np.correlate(d1unbiased,d1unbiased,"same")/d1norm
return ac[len(ac)/2:]
#acf2 is very slow
def acf2(x, length):
return np.array([1] + [np.corrcoef(x[:-i], x[i:])[0,1] \
for i in range(1, length)])
#auto correlation time
def act(x):
t = 0
for i in range(len(x)):
if x[i]<=0.001:
t=i
break
return t
|
gpl-3.0
| 4,224,248,938,134,856,700
| 30.00495
| 99
| 0.503944
| false
| 1.88704
| false
| false
| false
|
StochasticNumerics/mimclib
|
tests/matern/echo_test_cmd.py
|
1
|
1481
|
#!/usr/bin/python
import numpy as np
import argparse
parser = argparse.ArgumentParser(add_help=True)
parser.register('type', 'bool',
lambda v: v.lower() in ("yes", "true", "t", "1"))
parser.add_argument("-db", type="bool", action="store", default=False)
parser.add_argument("-qoi_dim", type=int, action="store",
default=1, help="MIMC dim")
parser.add_argument("-qoi_df_nu", type=float, action="store",
default=3.5, help="MIMC dim")
args, unknowns = parser.parse_known_args()
if args.qoi_dim:
base = "\
mimc_run.py -mimc_TOL {TOL} -qoi_seed 0 \
-qoi_problem 0 -qoi_sigma 0.2 \
-mimc_min_dim {qoi_dim} -qoi_dim {qoi_dim} -qoi_df_nu {qoi_df_nu} \
-qoi_x0 0.3 0.4 0.6 -ksp_rtol 1e-25 -ksp_type gmres \
-qoi_a0 0 -qoi_f0 1 \
-qoi_scale 10 -qoi_df_sig 0.5 -mimc_M0 1 \
-mimc_beta {beta} -mimc_gamma {gamma} -mimc_h0inv 3 \
-mimc_bayes_fit_lvls 3 -mimc_moments 1 -mimc_bayesian False \
".format(TOL="{TOL}", qoi_df_nu=args.qoi_df_nu, qoi_dim=args.qoi_dim,
beta=" ".join([str("2")]*args.qoi_dim),
gamma=" ".join([str("1")]*args.qoi_dim))
else:
assert False
base += " ".join(unknowns)
if not args.db:
cmd_single = "python " + base + " -mimc_verbose 10 -db False "
print(cmd_single.format(TOL=0.001))
else:
cmd_multi = "python " + base + " -mimc_verbose 0 -db True -db_tag {tag} "
print cmd_multi.format(tag="misc_matern_d{:d}_nu{:.2g}".format(args.qoi_dim, args.qoi_df_nu), TOL=1e-10)
|
gpl-2.0
| -6,952,863,911,539,885,000
| 36.974359
| 108
| 0.6158
| false
| 2.381029
| false
| true
| false
|
UAVCAN/pyuavcan
|
pyuavcan/transport/_data_specifier.py
|
1
|
1424
|
# Copyright (c) 2019 UAVCAN Consortium
# This software is distributed under the terms of the MIT License.
# Author: Pavel Kirienko <pavel@uavcan.org>
from __future__ import annotations
import enum
import dataclasses
@dataclasses.dataclass(frozen=True)
class DataSpecifier:
"""
The data specifier defines what category and type of data is exchanged over a transport session.
See the abstract transport model for details.
"""
@dataclasses.dataclass(frozen=True)
class MessageDataSpecifier(DataSpecifier):
SUBJECT_ID_MASK = 2 ** 13 - 1
subject_id: int
def __post_init__(self) -> None:
if not (0 <= self.subject_id <= self.SUBJECT_ID_MASK):
raise ValueError(f"Invalid subject ID: {self.subject_id}")
@dataclasses.dataclass(frozen=True)
class ServiceDataSpecifier(DataSpecifier):
class Role(enum.Enum):
REQUEST = enum.auto()
"""
Request output role is for clients.
Request input role is for servers.
"""
RESPONSE = enum.auto()
"""
Response output role is for servers.
Response input role is for clients.
"""
SERVICE_ID_MASK = 2 ** 9 - 1
service_id: int
role: Role
def __post_init__(self) -> None:
assert self.role in self.Role
if not (0 <= self.service_id <= self.SERVICE_ID_MASK):
raise ValueError(f"Invalid service ID: {self.service_id}")
|
mit
| -7,368,559,777,367,193,000
| 26.921569
| 100
| 0.649579
| false
| 3.797333
| false
| false
| false
|
adbuerger/PECas
|
test/test_systems.py
|
1
|
3895
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2014-2015 Adrian Bürger
#
# This file is part of PECas.
#
# PECas is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# PECas is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with PECas. If not, see <http://www.gnu.org/licenses/>.
# Test the classes fo system definitions
import pecas
import casadi as ca
import unittest
class TestSystemsInit(unittest.TestCase):
def test_basic_system_init(self):
self.t = ca.MX.sym("t", 1)
self.u = ca.MX.sym("u", 1)
self.p = ca.MX.sym("p", 1)
self.phi = ca.MX.sym("phi", 1)
self.g = ca.MX.sym("g", 1)
sys = pecas.systems.BasicSystem(p = self.p, phi = self.phi)
sys.show_system_information(showEquations = True)
sys = pecas.systems.BasicSystem(t = self.t, p = self.p, phi = self.phi)
sys.show_system_information(showEquations = True)
sys = pecas.systems.BasicSystem(t = self.t, u = self.u, p = self.p, \
phi = self.phi)
sys.show_system_information(showEquations = True)
sys = pecas.systems.BasicSystem(t = self.t, u = self.u, p = self.p, \
phi = self.phi, g = self.g)
sys.show_system_information(showEquations = True)
self.assertRaises(TypeError, pecas.systems.BasicSystem)
self.assertRaises(TypeError, pecas.systems.BasicSystem, p = None)
self.assertRaises(TypeError, pecas.systems.BasicSystem, phi = None)
def test_explode_system_init(self):
self.t = ca.MX.sym("t", 1)
self.u = ca.MX.sym("u", 1)
self.x = ca.MX.sym("x", 1)
self.p = ca.MX.sym("p", 1)
self.eps_e = ca.MX.sym("eps_e", 1)
self.eps_u = ca.MX.sym("eps_u", 1)
self.phi = ca.MX.sym("phi", 1)
self.f = ca.MX.sym("f", 1)
sys = pecas.systems.ExplODE(x = self.x, p = self.p, \
eps_e = self.eps_e, phi = self.phi, f = self.f)
sys.show_system_information(showEquations = True)
sys = pecas.systems.ExplODE(t = self.t, x = self.x, p = self.p, \
eps_e = self.eps_e, phi = self.phi, f = self.f)
sys.show_system_information(showEquations = True)
sys = pecas.systems.ExplODE(t = self.t, u = self.u, x = self.x, \
p = self.p, eps_e = self.eps_e, phi = self.phi, f = self.f)
sys.show_system_information(showEquations = True)
sys = pecas.systems.ExplODE(t = self.t, u = self.u, x = self.x,\
p = self.p, eps_e = self.eps_e, eps_u = self.eps_u, \
phi = self.phi, f = self.f)
sys.show_system_information(showEquations = True)
self.assertRaises(TypeError, pecas.systems.ExplODE)
self.assertRaises(TypeError, pecas.systems.ExplODE, x = None)
self.assertRaises(TypeError, pecas.systems.ExplODE, p = None)
self.assertRaises(TypeError, pecas.systems.ExplODE, w = None)
self.assertRaises(TypeError, pecas.systems.ExplODE, phi = None)
self.assertRaises(TypeError, pecas.systems.ExplODE, f = None)
# while explicit time dependecy is not allowed:
self.assertRaises(NotImplementedError, pecas.systems.ExplODE, \
t = self.t, u = self.u, x = self.x, \
p = self.p, eps_e = self.eps_e, phi = self.phi, f = self.t)
def test_implade_system_init(self):
self.assertRaises(NotImplementedError, pecas.systems.ImplDAE)
|
lgpl-3.0
| -6,181,232,457,055,610,000
| 37.186275
| 79
| 0.621983
| false
| 3.125201
| true
| false
| false
|
EdDev/vdsm
|
lib/vdsm/storage/mailbox.py
|
1
|
33976
|
#
# Copyright 2009-2016 Red Hat, Inc.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
#
# Refer to the README and COPYING files for full details of the license
#
from __future__ import absolute_import
import os
import errno
import time
import threading
import struct
import logging
import uuid
from six.moves import queue
from vdsm.config import config
from vdsm.storage import misc
from vdsm.storage import task
from vdsm.storage.exception import InvalidParameterException
from vdsm.storage.threadPool import ThreadPool
from vdsm import concurrent
from vdsm import constants
__author__ = "ayalb"
__date__ = "$Mar 9, 2009 5:25:07 PM$"
CHECKSUM_BYTES = 4
MAILBOX_SIZE = 4096
PACKED_UUID_SIZE = 16
VOLUME_MAX_SIZE = 0xFFFFFFFF # 64 bit unsigned max size
SIZE_CHARS = 16
MESSAGE_VERSION = "1"
MESSAGE_SIZE = 64
CLEAN_MESSAGE = "\1" * MESSAGE_SIZE
EXTEND_CODE = "xtnd"
BLOCK_SIZE = 512
REPLY_OK = 1
EMPTYMAILBOX = MAILBOX_SIZE * "\0"
SLOTS_PER_MAILBOX = int(MAILBOX_SIZE / MESSAGE_SIZE)
# Last message slot is reserved for metadata (checksum, extendable mailbox,
# etc)
MESSAGES_PER_MAILBOX = SLOTS_PER_MAILBOX - 1
_zeroCheck = misc.checksum(EMPTYMAILBOX, CHECKSUM_BYTES)
# Assumes CHECKSUM_BYTES equals 4!!!
pZeroChecksum = struct.pack('<l', _zeroCheck)
def dec2hex(n):
return "%x" % n
def runTask(args):
if type(args) == tuple:
cmd = args[0]
args = args[1:]
else:
cmd = args
args = None
ctask = task.Task(id=None, name=cmd)
ctask.prepare(cmd, *args)
def _mboxExecCmd(*args, **kwargs):
return misc.execCmd(*args, **kwargs)
class SPM_Extend_Message:
log = logging.getLogger('storage.SPM.Messages.Extend')
def __init__(self, volumeData, newSize, callbackFunction=None):
if ('poolID' not in volumeData) or \
('domainID' not in volumeData) or \
('volumeID' not in volumeData):
self.log.error('create extend msg failed for volume: %s, size:'
' %d', '-'.join(volumeData.values()), newSize)
raise InvalidParameterException('volumeData dictionary',
volumeData)
if (newSize < 0) or (newSize > VOLUME_MAX_SIZE):
raise InvalidParameterException('volumeSize', newSize)
misc.validateUUID(volumeData['domainID'], 'domainID')
misc.validateUUID(volumeData['volumeID'], 'volumeID')
self.pool = volumeData['poolID']
self.volumeData = volumeData
self.newSize = str(dec2hex(newSize))
self.callback = callbackFunction
# Message structure is rigid (order must be kept and is relied upon):
# Version (1 byte), OpCode (4 bytes), Domain UUID (16 bytes), Volume
# UUID (16 bytes), Requested size (16 bytes), Padding to 64 bytes (14
# bytes)
domain = misc.packUuid(volumeData['domainID'])
volume = misc.packUuid(volumeData['volumeID'])
# Build base payload
payload = MESSAGE_VERSION + EXTEND_CODE + domain + volume + \
self.newSize.rjust(SIZE_CHARS, "0")
# Pad payload with zeros
self.payload = payload.ljust(MESSAGE_SIZE, "0")
self.log.debug('new extend msg created: domain: %s, volume: %s',
volumeData['domainID'], volumeData['volumeID'])
def __getitem__(self, index):
return self.payload[index]
def checkReply(self, reply):
# Sanity check - Make sure reply is for current message
sizeOffset = 5 + 2 * PACKED_UUID_SIZE
if (self.payload[0:sizeOffset] != reply[0:sizeOffset]):
self.log.error("SPM_Extend_Message: Reply message volume data "
"(domainID + volumeID) differs from request "
"message, reply : %s, orig: %s", reply,
self.payload)
raise RuntimeError('Incorrect reply')
# if self.payload[sizeOffset:sizeOffset + PACKED_UUID_SIZE] > \
# reply[sizeOffset:sizeOffset + PACKED_UUID_SIZE]):
# self.log.error("SPM_Extend_Message: New size is smaller than "
# "requested size")
# raise RuntimeError('Request failed')
return REPLY_OK
@classmethod
def processRequest(cls, pool, msgID, payload):
cls.log.debug("processRequest, payload:" + repr(payload))
sdOffset = 5
volumeOffset = sdOffset + PACKED_UUID_SIZE
sizeOffset = volumeOffset + PACKED_UUID_SIZE
volume = {}
volume['poolID'] = pool.spUUID
volume['domainID'] = misc.unpackUuid(
payload[sdOffset:sdOffset + PACKED_UUID_SIZE])
volume['volumeID'] = misc.unpackUuid(
payload[volumeOffset:volumeOffset + PACKED_UUID_SIZE])
size = int(payload[sizeOffset:sizeOffset + SIZE_CHARS], 16)
cls.log.info("processRequest: extending volume %s "
"in domain %s (pool %s) to size %d", volume['volumeID'],
volume['domainID'], volume['poolID'], size)
msg = None
try:
try:
pool.extendVolume(volume['domainID'], volume['volumeID'], size)
msg = SPM_Extend_Message(volume, size)
except:
cls.log.error("processRequest: Exception caught while trying "
"to extend volume: %s in domain: %s",
volume['volumeID'], volume['domainID'],
exc_info=True)
msg = SPM_Extend_Message(volume, 0)
finally:
pool.spmMailer.sendReply(msgID, msg)
return {'status': {'code': 0, 'message': 'Done'}}
class HSM_Mailbox:
log = logging.getLogger('storage.Mailbox.HSM')
def __init__(self, hostID, poolID, inbox, outbox, monitorInterval=2):
self._hostID = str(hostID)
self._poolID = str(poolID)
self._monitorInterval = monitorInterval
self._queue = queue.Queue(-1)
self._inbox = inbox
if not os.path.exists(self._inbox):
self.log.error("HSM_Mailbox create failed - inbox %s does not "
"exist" % repr(self._inbox))
raise RuntimeError("HSM_Mailbox create failed - inbox %s does not "
"exist" % repr(self._inbox))
self._outbox = outbox
if not os.path.exists(self._outbox):
self.log.error("HSM_Mailbox create failed - outbox %s does not "
"exist" % repr(self._outbox))
raise RuntimeError("HSM_Mailbox create failed - outbox %s does "
"not exist" % repr(self._outbox))
self._mailman = HSM_MailMonitor(self._inbox, self._outbox, hostID,
self._queue, monitorInterval)
self.log.debug('HSM_MailboxMonitor created for pool %s' % self._poolID)
def sendExtendMsg(self, volumeData, newSize, callbackFunction=None):
msg = SPM_Extend_Message(volumeData, newSize, callbackFunction)
if str(msg.pool) != self._poolID:
raise ValueError('PoolID does not correspond to Mailbox pool')
self._queue.put(msg)
def stop(self):
if self._mailman:
self._mailman.immStop()
self._mailman.tp.joinAll(waitForTasks=False)
else:
self.log.warning("HSM_MailboxMonitor - No mail monitor object "
"available to stop")
def wait(self, timeout=None):
return self._mailman.wait(timeout)
def flushMessages(self):
if self._mailman:
self._mailman.immFlush()
else:
self.log.warning("HSM_MailboxMonitor - No mail monitor object "
"available to flush")
class HSM_MailMonitor(object):
log = logging.getLogger('storage.MailBox.HsmMailMonitor')
def __init__(self, inbox, outbox, hostID, queue, monitorInterval):
# Save arguments
tpSize = config.getint('irs', 'thread_pool_size') / 2
waitTimeout = wait_timeout(monitorInterval)
maxTasks = config.getint('irs', 'max_tasks')
self.tp = ThreadPool("mailbox-hsm", tpSize, waitTimeout, maxTasks)
self._stop = False
self._flush = False
self._queue = queue
self._activeMessages = {}
self._monitorInterval = monitorInterval
self._hostID = int(hostID)
self._used_slots_array = [0] * MESSAGES_PER_MAILBOX
self._outgoingMail = EMPTYMAILBOX
self._incomingMail = EMPTYMAILBOX
# TODO: add support for multiple paths (multiple mailboxes)
self._inCmd = [constants.EXT_DD,
'if=' + str(inbox),
'iflag=direct,fullblock',
'bs=' + str(MAILBOX_SIZE),
'count=1',
'skip=' + str(self._hostID)
]
self._outCmd = [constants.EXT_DD,
'of=' + str(outbox),
'iflag=fullblock',
'oflag=direct',
'conv=notrunc',
'bs=' + str(MAILBOX_SIZE),
'count=1',
'seek=' + str(self._hostID)
]
self._init = False
self._initMailbox() # Read initial mailbox state
self._msgCounter = 0
self._sendMail() # Clear outgoing mailbox
self._thread = concurrent.thread(self.run, name="mailbox-hsm",
log=self.log)
self._thread.start()
def _initMailbox(self):
# Sync initial incoming mail state with storage view
(rc, out, err) = _mboxExecCmd(self._inCmd, raw=True)
if rc == 0:
self._incomingMail = out
self._init = True
else:
self.log.warning("HSM_MailboxMonitor - Could not initialize "
"mailbox, will not accept requests until init "
"succeeds")
def immStop(self):
self._stop = True
def immFlush(self):
self._flush = True
def wait(self, timeout=None):
self._thread.join(timeout=timeout)
return not self._thread.is_alive()
def _handleResponses(self, newMsgs):
rc = False
for i in range(0, MESSAGES_PER_MAILBOX):
# Skip checking non used slots
if self._used_slots_array[i] == 0:
continue
# Skip empty return messages (messages with version 0)
start = i * MESSAGE_SIZE
# First byte of message is message version.
# Check return message version, if 0 then message is empty
if newMsgs[start] in ['\0', '0']:
continue
for j in range(start, start + MESSAGE_SIZE):
if newMsgs[j] != self._incomingMail[j]:
break
# If search exhausted then message hasn't changed since last read
# and can be skipped
if j == (start + MESSAGE_SIZE - 1):
continue
#
# We only get here if there is a novel reply so we can remove the
# message from the active list and the outgoing mail and handle the
# reply
#
rc = True
newMsg = newMsgs[start:start + MESSAGE_SIZE]
if newMsg == CLEAN_MESSAGE:
del self._activeMessages[i]
self._used_slots_array[i] = 0
self._msgCounter -= 1
self._outgoingMail = self._outgoingMail[0:start] + \
MESSAGE_SIZE * "\0" + self._outgoingMail[start +
MESSAGE_SIZE:]
continue
msg = self._activeMessages[i]
self._activeMessages[i] = CLEAN_MESSAGE
self._outgoingMail = self._outgoingMail[0:start] + \
CLEAN_MESSAGE + self._outgoingMail[start + MESSAGE_SIZE:]
try:
self.log.debug("HSM_MailboxMonitor(%s/%s) - Checking reply: "
"%s", self._msgCounter, MESSAGES_PER_MAILBOX,
repr(newMsg))
msg.checkReply(newMsg)
if msg.callback:
try:
id = str(uuid.uuid4())
if not self.tp.queueTask(id, runTask, (msg.callback,
msg.volumeData)):
raise Exception()
except:
self.log.error("HSM_MailMonitor: exception caught "
"while running msg callback, for "
"message: %s, callback function: %s",
repr(msg.payload), msg.callback,
exc_info=True)
except RuntimeError as e:
self.log.error("HSM_MailMonitor: exception: %s caught while "
"checking reply for message: %s, reply: %s",
str(e), repr(msg.payload), repr(newMsg))
except:
self.log.error("HSM_MailMonitor: exception caught while "
"checking reply from SPM, request was: %s "
"reply: %s", repr(msg.payload), repr(newMsg),
exc_info=True)
# Finished processing incoming mail, now save mail to compare against
# next batch
self._incomingMail = newMsgs
return rc
def _checkForMail(self):
# self.log.debug("HSM_MailMonitor - checking for mail")
# self.log.debug("Running command: " + str(self._inCmd))
(rc, in_mail, err) = misc.execCmd(self._inCmd, raw=True)
if rc:
raise RuntimeError("_handleResponses.Could not read mailbox - rc "
"%s" % rc)
if (len(in_mail) != MAILBOX_SIZE):
raise RuntimeError("_handleResponses.Could not read mailbox - len "
"%s != %s" % (len(in_mail), MAILBOX_SIZE))
# self.log.debug("Parsing inbox content: %s", in_mail)
return self._handleResponses(in_mail)
def _sendMail(self):
self.log.info("HSM_MailMonitor sending mail to SPM - " +
str(self._outCmd))
chk = misc.checksum(
self._outgoingMail[0:MAILBOX_SIZE - CHECKSUM_BYTES],
CHECKSUM_BYTES)
pChk = struct.pack('<l', chk) # Assumes CHECKSUM_BYTES equals 4!!!
self._outgoingMail = \
self._outgoingMail[0:MAILBOX_SIZE - CHECKSUM_BYTES] + pChk
_mboxExecCmd(self._outCmd, data=self._outgoingMail)
def _handleMessage(self, message):
# TODO: add support for multiple mailboxes
freeSlot = False
for i in range(0, MESSAGES_PER_MAILBOX):
if self._used_slots_array[i] == 0:
if not freeSlot:
freeSlot = i
continue
duplicate = True
for j in range(0, MESSAGE_SIZE):
if message[j] != self._activeMessages[i][j]:
duplicate = False
break
if duplicate:
self.log.debug("HSM_MailMonitor - ignoring duplicate message "
"%s" % (repr(message)))
return
if not freeSlot:
raise RuntimeError("HSM_MailMonitor - Active messages list full, "
"cannot add new message")
self._msgCounter += 1
self._used_slots_array[freeSlot] = 1
self._activeMessages[freeSlot] = message
start = freeSlot * MESSAGE_SIZE
end = start + MESSAGE_SIZE
self._outgoingMail = self._outgoingMail[0:start] + message.payload + \
self._outgoingMail[end:]
self.log.debug("HSM_MailMonitor - start: %s, end: %s, len: %s, "
"message(%s/%s): %s" %
(start, end, len(self._outgoingMail), self._msgCounter,
MESSAGES_PER_MAILBOX,
repr(self._outgoingMail[start:end])))
def run(self):
try:
failures = 0
# Do not start processing requests before incoming mailbox is
# initialized
while not self._init and not self._stop:
try:
time.sleep(2)
self._initMailbox() # Read initial mailbox state
except:
pass
while not self._stop:
try:
message = None
sendMail = False
# If no message is pending, block_wait until a new message
# or stop command arrives
while not self._stop and not message and \
not self._activeMessages:
try:
# self.log.debug("No requests in queue, going to "
# "sleep until new requests arrive")
# Check if a new message is waiting to be sent
message = self._queue.get(
block=True, timeout=self._monitorInterval)
self._handleMessage(message)
message = None
sendMail = True
except queue.Empty:
pass
if self._stop:
break
# If pending messages available, check if there are new
# messages waiting in queue as well
empty = False
while (not empty) and \
(len(self._activeMessages) < MESSAGES_PER_MAILBOX):
# TODO: Remove single mailbox limitation
try:
message = self._queue.get(block=False)
self._handleMessage(message)
message = None
sendMail = True
except queue.Empty:
empty = True
if self._flush:
self._flush = False
sendMail = True
try:
sendMail |= self._checkForMail()
failures = 0
except:
self.log.error("HSM_MailboxMonitor - Exception caught "
"while checking for mail",
exc_info=True)
failures += 1
if sendMail:
self._sendMail()
# If there are active messages waiting for SPM reply, wait
# a few seconds before performing another IO op
if self._activeMessages and not self._stop:
# If recurring failures then sleep for one minute
# before retrying
if (failures > 9):
time.sleep(60)
else:
time.sleep(self._monitorInterval)
except:
self.log.error("HSM_MailboxMonitor - Incoming mail"
"monitoring thread caught exception; "
"will try to recover", exc_info=True)
finally:
self.log.info("HSM_MailboxMonitor - Incoming mail monitoring "
"thread stopped, clearing outgoing mail")
self._outgoingMail = EMPTYMAILBOX
self._sendMail() # Clear outgoing mailbox
class SPM_MailMonitor:
log = logging.getLogger('storage.MailBox.SpmMailMonitor')
def registerMessageType(self, messageType, callback):
self._messageTypes[messageType] = callback
def unregisterMessageType(self, messageType):
del self._messageTypes[messageType]
def __init__(self, poolID, maxHostID, inbox, outbox, monitorInterval=2):
"""
Note: inbox paramerter here should point to the HSM's outbox
mailbox file, and vice versa.
"""
self._messageTypes = {}
# Save arguments
self._stop = False
self._stopped = False
self._poolID = poolID
tpSize = config.getint('irs', 'thread_pool_size') / 2
waitTimeout = wait_timeout(monitorInterval)
maxTasks = config.getint('irs', 'max_tasks')
self.tp = ThreadPool("mailbox-spm", tpSize, waitTimeout, maxTasks)
self._inbox = inbox
if not os.path.exists(self._inbox):
self.log.error("SPM_MailMonitor create failed - inbox %s does not "
"exist" % repr(self._inbox))
raise RuntimeError("SPM_MailMonitor create failed - inbox %s does "
"not exist" % repr(self._inbox))
self._outbox = outbox
if not os.path.exists(self._outbox):
self.log.error("SPM_MailMonitor create failed - outbox %s does "
"not exist" % repr(self._outbox))
raise RuntimeError("SPM_MailMonitor create failed - outbox %s "
"does not exist" % repr(self._outbox))
self._numHosts = int(maxHostID)
self._outMailLen = MAILBOX_SIZE * self._numHosts
self._monitorInterval = monitorInterval
# TODO: add support for multiple paths (multiple mailboxes)
self._outgoingMail = self._outMailLen * "\0"
self._incomingMail = self._outgoingMail
self._inCmd = ['dd',
'if=' + str(self._inbox),
'iflag=direct,fullblock',
'count=1'
]
self._outCmd = ['dd',
'of=' + str(self._outbox),
'oflag=direct',
'iflag=fullblock',
'conv=notrunc',
'count=1'
]
self._outLock = threading.Lock()
self._inLock = threading.Lock()
# Clear outgoing mail
self.log.debug("SPM_MailMonitor - clearing outgoing mail, command is: "
"%s", self._outCmd)
cmd = self._outCmd + ['bs=' + str(self._outMailLen)]
(rc, out, err) = _mboxExecCmd(cmd, data=self._outgoingMail)
if rc:
self.log.warning("SPM_MailMonitor couldn't clear outgoing mail, "
"dd failed")
self._thread = concurrent.thread(
self.run, name="mailbox-spm", log=self.log)
self._thread.start()
self.log.debug('SPM_MailMonitor created for pool %s' % self._poolID)
def wait(self, timeout=None):
self._thread.join(timeout=timeout)
return not self._thread.is_alive()
def stop(self):
self._stop = True
def isStopped(self):
return self._stopped
def getMaxHostID(self):
return self._numHosts
def setMaxHostID(self, newMaxId):
with self._inLock:
with self._outLock:
diff = newMaxId - self._numHosts
if diff > 0:
delta = MAILBOX_SIZE * diff * "\0"
self._outgoingMail += delta
self._incomingMail += delta
elif diff < 0:
delta = MAILBOX_SIZE * diff
self._outgoingMail = self._outgoingMail[:-delta]
self._incomingMail = self._incomingMail[:-delta]
self._numHosts = newMaxId
self._outMailLen = MAILBOX_SIZE * self._numHosts
@classmethod
def validateMailbox(self, mailbox, mailboxIndex):
"""
Return True if mailbox has a valid checksum, and is not an empty
mailbox, False otherwise.
"""
assert len(mailbox) == MAILBOX_SIZE
data = mailbox[:-CHECKSUM_BYTES]
checksum = mailbox[-CHECKSUM_BYTES:]
n = misc.checksum(data, CHECKSUM_BYTES)
expected = struct.pack('<l', n) # Assumes CHECKSUM_BYTES equals 4!!!
if checksum != expected:
self.log.error(
"mailbox %s checksum failed, not clearing mailbox, clearing "
"new mail (data=%r, checksum=%r, expected=%r)",
mailboxIndex, data, checksum, expected)
return False
elif expected == pZeroChecksum:
return False # Ignore messages of empty mailbox
return True
def _handleRequests(self, newMail):
send = False
# run through all messages and check if new messages have arrived
# (since last read)
for host in range(0, self._numHosts):
# Check mailbox checksum
mailboxStart = host * MAILBOX_SIZE
isMailboxValidated = False
for i in range(0, MESSAGES_PER_MAILBOX):
msgId = host * SLOTS_PER_MAILBOX + i
msgStart = msgId * MESSAGE_SIZE
# First byte of message is message version. Check message
# version, if 0 then message is empty and can be skipped
if newMail[msgStart] in ['\0', '0']:
continue
# Most mailboxes are probably empty so it costs less to check
# that all messages start with 0 than to validate the mailbox,
# therefor this is done after we find a non empty message in
# mailbox
if not isMailboxValidated:
if not self.validateMailbox(
newMail[mailboxStart:mailboxStart + MAILBOX_SIZE],
host):
# Cleaning invalid mbx in newMail
newMail = newMail[:mailboxStart] + EMPTYMAILBOX + \
newMail[mailboxStart + MAILBOX_SIZE:]
break
self.log.debug("SPM_MailMonitor: Mailbox %s validated, "
"checking mail", host)
isMailboxValidated = True
newMsg = newMail[msgStart:msgStart + MESSAGE_SIZE]
msgOffset = msgId * MESSAGE_SIZE
if newMsg == CLEAN_MESSAGE:
# Should probably put a setter on outgoingMail which would
# take the lock
self._outLock.acquire()
try:
self._outgoingMail = \
self._outgoingMail[0:msgOffset] + CLEAN_MESSAGE + \
self._outgoingMail[msgOffset + MESSAGE_SIZE:
self._outMailLen]
finally:
self._outLock.release()
send = True
continue
# Message isn't empty, check if its new
isMessageNew = False
for j in range(msgStart, msgStart + MESSAGE_SIZE):
if newMail[j] != self._incomingMail[j]:
isMessageNew = True
break
# If search exhausted, i.e. message hasn't changed since last
# read, it can be skipped
if not isMessageNew:
continue
# We only get here if there is a novel request
try:
msgType = newMail[msgStart + 1:msgStart + 5]
if msgType in self._messageTypes:
# Use message class to process request according to
# message specific logic
id = str(uuid.uuid4())
self.log.debug("SPM_MailMonitor: processing request: "
"%s" % repr(newMail[
msgStart:msgStart + MESSAGE_SIZE]))
res = self.tp.queueTask(
id, runTask, (self._messageTypes[msgType], msgId,
newMail[msgStart:
msgStart + MESSAGE_SIZE])
)
if not res:
raise Exception()
else:
self.log.error("SPM_MailMonitor: unknown message type "
"encountered: %s", msgType)
except RuntimeError as e:
self.log.error("SPM_MailMonitor: exception: %s caught "
"while handling message: %s", str(e),
newMail[msgStart:msgStart + MESSAGE_SIZE])
except:
self.log.error("SPM_MailMonitor: exception caught while "
"handling message: %s",
newMail[msgStart:msgStart + MESSAGE_SIZE],
exc_info=True)
self._incomingMail = newMail
return send
def _checkForMail(self):
# Lock is acquired in order to make sure that neither _numHosts nor
# incomingMail are changed during checkForMail
self._inLock.acquire()
try:
# self.log.debug("SPM_MailMonitor -_checking for mail")
cmd = self._inCmd + ['bs=' + str(self._outMailLen)]
# self.log.debug("SPM_MailMonitor - reading incoming mail, "
# "command: " + str(cmd))
(rc, in_mail, err) = misc.execCmd(cmd, raw=True)
if rc:
raise IOError(errno.EIO, "_handleRequests._checkForMail - "
"Could not read mailbox: %s" % self._inbox)
if (len(in_mail) != (self._outMailLen)):
self.log.error('SPM_MailMonitor: _checkForMail - dd succeeded '
'but read %d bytes instead of %d, cannot check '
'mail. Read mail contains: %s', len(in_mail),
self._outMailLen, repr(in_mail[:80]))
raise RuntimeError("_handleRequests._checkForMail - Could not "
"read mailbox")
# self.log.debug("Parsing inbox content: %s", in_mail)
if self._handleRequests(in_mail):
self._outLock.acquire()
try:
cmd = self._outCmd + ['bs=' + str(self._outMailLen)]
(rc, out, err) = _mboxExecCmd(cmd,
data=self._outgoingMail)
if rc:
self.log.warning("SPM_MailMonitor couldn't write "
"outgoing mail, dd failed")
finally:
self._outLock.release()
finally:
self._inLock.release()
def sendReply(self, msgID, msg):
# Lock is acquired in order to make sure that neither _numHosts nor
# outgoingMail are changed while used
self._outLock.acquire()
try:
msgOffset = msgID * MESSAGE_SIZE
self._outgoingMail = \
self._outgoingMail[0:msgOffset] + msg.payload + \
self._outgoingMail[msgOffset + MESSAGE_SIZE:self._outMailLen]
mailboxOffset = (msgID / SLOTS_PER_MAILBOX) * MAILBOX_SIZE
mailbox = self._outgoingMail[mailboxOffset:
mailboxOffset + MAILBOX_SIZE]
cmd = self._outCmd + ['bs=' + str(MAILBOX_SIZE),
'seek=' + str(mailboxOffset / MAILBOX_SIZE)]
# self.log.debug("Running command: %s, for message id: %s",
# str(cmd), str(msgID))
(rc, out, err) = _mboxExecCmd(cmd, data=mailbox)
if rc:
self.log.error("SPM_MailMonitor: sendReply - couldn't send "
"reply, dd failed")
finally:
self._outLock.release()
def run(self):
try:
while not self._stop:
try:
self._checkForMail()
except:
self.log.error("Error checking for mail", exc_info=True)
time.sleep(self._monitorInterval)
finally:
self._stopped = True
self.tp.joinAll(waitForTasks=False)
self.log.info("SPM_MailMonitor - Incoming mail monitoring thread "
"stopped")
def wait_timeout(monitor_interval):
"""
Designed to return 3 seconds wait timeout for monitor interval of 2
seconds, keeping the behaivor in runtime the same as it was in the last 8
years, while allowing shorter times for testing.
"""
return monitor_interval * 3 / 2
|
gpl-2.0
| 5,307,111,074,492,991,000
| 40.688344
| 79
| 0.515629
| false
| 4.490022
| false
| false
| false
|
butala/pyrsss
|
pyrsss/mag/fgm2iaga.py
|
1
|
5144
|
import sys
import logging
import os
from datetime import datetime
from argparse import ArgumentParser, ArgumentDefaultsHelpFormatter
import numpy as NP
import pandas as PD
logger = logging.getLogger('pyrsss.mag.fm2iaga')
HEADER_TEMPLATE = """\
Format IAGA-2002 |
Source of Data CARISMA |
IAGA CODE {stn} |
Geodetic Latitude {lat:<8.3f} |
Geodetic Longitude {lon:<8.3f} |
Elevation {el:<8.3f} |
Reported XYZF |
DATE TIME DOY {stn}X {stn}Y {stn}Z {stn}F |
"""
def parse(fname):
"""
Parse FGM format data *fname*. Return :class:`DataFrame`
containing all information found in the file.
The FGM file format is used by CARISMA to store data and is
described here:
http://www.carisma.ca/carisma-data/fgm-data-format.
"""
with open(fname) as fid:
siteid, lat, lon, date, pos_format, units, sample_rate = fid.next().split()
dt = []
x = []
y = []
z = []
flag = []
for line in fid:
cols = line.split()
dt.append(datetime.strptime(cols[0], '%Y%m%d%H%M%S'))
x.append(float(cols[1]))
y.append(float(cols[2]))
z.append(float(cols[3]))
if cols[4] == '.':
flag.append(False)
elif cols[4] == 'x':
flag.append(True)
else:
raise ValueError('unknown flag value {} encountered in {}'.format(cols[4], fname))
f = NP.hypot(x, NP.hypot(y, z))
df = PD.DataFrame(data={'x': x, 'y': y, 'z': z, 'f': f, 'flag': flag},
index=dt)
df.siteid = siteid
df.lat = float(lat)
df.lon = float(lon)
df.date = datetime.strptime(date, '%Y%m%d')
df.pos_format = pos_format
df.units = units
df.sample_rate = sample_rate
return df
def fgm2iaga(path,
fgm_fname,
ftype='v',
output_template='{stn}{date:%Y%m%d}{ftype}{interval}.{interval}'):
"""
Parse FGM format file *fgm_fname* and reformat it to IAGA2002 and
save at *path* (using *output_tempalte* to form the file
name). Return the file name. The *ftype* denotes the file type: p
- provisional, d - definitive, q - quasi-definitive, or v -
variation.
"""
df = parse(fgm_fname)
delta = (df.index[1] - df.index[0]).total_seconds()
if delta == 1.0:
interval = 'sec'
elif delta == 60.0:
interval = 'min'
else:
raise ValueError('unknown data interval found in {}'.format(fgm_fname))
stn = df.siteid[:3].upper()
out_fname = os.path.join(path,
output_template.format(stn=stn.lower(),
date=df.date,
ftype=ftype,
interval=interval))
with open(out_fname, 'w') as fid:
fid.write(HEADER_TEMPLATE.format(stn=stn.upper(),
lat=df.lat,
lon=df.lon,
el=0))
for row in df.itertuples():
dt = row.Index
if row.flag:
X = Y = Z = F = 99999
else:
X = row.x
Y = row.y
Z = row.z
F = NP.linalg.norm([X, Y, Z])
fid.write('{date:%Y-%m-%d %H:%M:%S.000} {date:%j}'
' {X:>9.2f} {Y:>9.2f} {Z:>9.2f} {F:>9.2f}\n'.format(date=dt,
X=X,
Y=Y,
Z=Z,
F=F))
return out_fname
def main(argv=None):
if argv is None:
argv = sys.argv
parser = ArgumentParser('Convert FGM format data (CARISMA) to IAGA2002 format.',
formatter_class=ArgumentDefaultsHelpFormatter)
parser.add_argument('output_path',
type=str,
help='path to store daily IAGA2002 format files')
parser.add_argument('fgm_fnames',
type=str,
nargs='+',
metavar='fgm_fname',
help='FGM format file')
args = parser.parse_args(argv[1:])
for fgm_fname in args.fgm_fnames:
iaga_fname = fgm2iaga(args.output_path, fgm_fname)
logger.info('{} -> {}'.format(fgm_fname, iaga_fname))
if __name__ == '__main__':
logging.basicConfig(level=logging.INFO)
sys.exit(main())
|
mit
| -107,705,631,235,652,930
| 35.48227
| 98
| 0.437597
| false
| 3.969136
| false
| false
| false
|
yoneken/train_tf
|
compressImg2TrainData.py
|
1
|
4562
|
#!/usr/bin/env python
# -*- coding:utf-8 -*-
'''
Directory structure
TRAIN_DIR:
label0:
img0001.png
img0002.png
img0003.png
label1:
img0001.png
img0002.png
.
.
.
label9:
img0001.png
'''
import cv2, os, gzip, random
import numpy as np
from itertools import chain
class MakeMnistData:
'''This class makes a train data set and a test data set for MNIST'''
def __init__(self):
self.LABEL_MAGIC_NUMBER = 2049
self.IMAGE_MAGIC_NUMBER = 2051
self.data_label = [] # the length is the same with the all data
self.img_data = [] # the length is the same with the all data
self.data_size = [] # the length is the same with the classes
self.label_name = [] # the length is the same with the classes
def _make32(self, val):
# Big endian
return [val >> i & 0xff for i in [24,16,8,0]]
def load(self, dirname):
for i,dname in enumerate(sorted(next(os.walk(dirname))[1])):
files = next(os.walk(dirname + "/" + dname))[2]
self.data_label.append([i]*len(files))
self.data_size.append(len(files))
self.label_name.append(dname)
for filename in files:
img_file = dirname + "/" + dname + "/" + filename
img = cv2.imread(img_file)
img = cv2.resize(img, (28, 28))
imgg = cv2.cvtColor(img, cv2.cv.CV_BGR2GRAY)
self.img_data.append(imgg[:,:].reshape(imgg.size))
self.data_label = list(chain.from_iterable(self.data_label))
# Shuffle the data
tmp_dl = list(self.data_label)
tmp_id = list(self.img_data)
indices = np.arange(len(self.data_label))
np.random.shuffle(indices)
for i in range(len(self.data_label)):
self.data_label[i] = tmp_dl[indices[i]]
self.img_data[i] = tmp_id[indices[i]]
def write(self, dirname, valid_size=0):
if valid_size == 0:
valid_size = int(len(self.data_label) * 0.05)
# make test data
test_data_label = self.data_label[:valid_size]
test_img_data = self.img_data[:valid_size]
self.data_label = self.data_label[valid_size:]
self.img_data = self.img_data[valid_size:]
test_data_size = [0]*len(self.data_size)
for i in range(valid_size):
ind = test_data_label[i]
self.data_size[ind] = self.data_size[ind] - 1
test_data_size[ind] = test_data_size[ind] + 1
# make a train label data
# make header
ldata = self._make32(self.LABEL_MAGIC_NUMBER)
ldata = np.r_[ldata, self._make32(sum(self.data_size))]
ldata = np.r_[ldata, self.data_label]
with gzip.open(dirname + "/train-labels-idx1-ubyte.gz",'wb') as f:
f.write(np.array(ldata, dtype=np.uint8))
# make a test label data
# make header
tldata = self._make32(self.LABEL_MAGIC_NUMBER)
tldata = np.r_[tldata, self._make32(sum(test_data_size))]
tldata = np.r_[tldata, test_data_label]
with gzip.open(dirname + "/t10k-labels-idx1-ubyte.gz",'wb') as f:
f.write(np.array(tldata, dtype=np.uint8))
# make a train image data
# make header
idata = self._make32(self.IMAGE_MAGIC_NUMBER)
idata = np.r_[idata, self._make32(sum(self.data_size))]
idata = np.r_[idata, self._make32(28)]
idata = np.r_[idata, self._make32(28)]
idata = np.r_[idata, list(chain.from_iterable(self.img_data))]
# write value
with gzip.open(dirname + "/train-images-idx3-ubyte.gz",'wb') as f:
f.write(np.array(idata, dtype=np.uint8))
# make a test image data
# make header
tidata = self._make32(self.IMAGE_MAGIC_NUMBER)
tidata = np.r_[tidata, self._make32(sum(test_data_size))]
tidata = np.r_[tidata, self._make32(28)]
tidata = np.r_[tidata, self._make32(28)]
tidata = np.r_[tidata, list(chain.from_iterable(test_img_data))]
# write value
with gzip.open(dirname + "/t10k-images-idx3-ubyte.gz",'wb') as f:
f.write(np.array(tidata, dtype=np.uint8))
s = ",".join(["\"" + x + "\"" for x in self.label_name])
print(s)
with open(dirname + "/label_name.txt", 'w') as f:
f.write(s)
if __name__ == '__main__':
from argparse import ArgumentParser
parser = ArgumentParser(description="This script makes a train and a validation dataset")
parser.add_argument("--in_dir", dest="indir", type=str, default="data")
parser.add_argument("--out_dir", dest="outdir", type=str, default="data")
parser.add_argument("--valid_size", dest="valsize", type=int, default=0, help="Default size is 5% of all data")
args = parser.parse_args()
mmd = MakeMnistData()
mmd.load(args.indir)
mmd.write(args.outdir, args.valsize)
|
apache-2.0
| 1,497,630,365,127,481,300
| 31.126761
| 113
| 0.629329
| false
| 2.979752
| true
| false
| false
|
Thylossus/tud-movie-character-insights
|
Server/Tools/Parser/show_stats.py
|
1
|
1183
|
#!/usr/bin/python3
stats_version="0.11"
# Include custom libs
import sys
sys.path.append( '../../include/python' )
import serverutils.config as config
import serverutils.mongohelper as mongohelper
import re
from pymongo import MongoClient
print("Word stats v.", stats_version)
print("================================")
print()
mongoClient, mongoDb = mongohelper.getMongoClient()
characterWords = []
movies = mongoDb.rawMovies.find({})
for movie in movies:
if len(sys.argv) > 1 and not movie['_id'] in sys.argv:
continue
print("Scanning " + movie['_id'])
counters = {}
for quote in mongoDb.rawQuotes.find({'_id.movie': movie['_id']}):
c = len(re.findall(r'\w+',quote['text']))
if not quote['character'] in counters:
counters[quote['character']] = 0
counters[quote['character']] = counters[quote['character']] + c
for character, count in counters.items():
characterWords += [(movie['_id'], character, count)]
characterWords = sorted(characterWords, key=lambda a: -a[2])
for i in range(200 if len(characterWords) > 200 else len(characterWords)):
print(str(characterWords[i][2]) + " words: " + characterWords[i][1] + " (" + characterWords[i][0] + ")")
|
apache-2.0
| 7,304,283,256,309,693,000
| 26.511628
| 105
| 0.668639
| false
| 3.295265
| false
| false
| false
|
little-dude/pyroute2
|
pyroute2/ipdb/route.py
|
1
|
15960
|
import logging
import threading
from socket import AF_UNSPEC
from pyroute2.common import basestring
from pyroute2.netlink import nlmsg
from pyroute2.netlink.rtnl.rtmsg import rtmsg
from pyroute2.netlink.rtnl.req import IPRouteRequest
from pyroute2.ipdb.transactional import Transactional
from pyroute2.ipdb.linkedset import LinkedSet
class Metrics(Transactional):
_fields = [rtmsg.metrics.nla2name(i[0]) for i in rtmsg.metrics.nla_map]
class NextHopSet(LinkedSet):
def __init__(self, prime=None):
super(NextHopSet, self).__init__()
prime = prime or []
for v in prime:
self.add(v)
def __sub__(self, vs):
ret = type(self)()
sub = set(self.raw.keys()) - set(vs.raw.keys())
for v in sub:
ret.add(self[v], raw=self.raw[v])
return ret
def __make_nh(self, prime):
return (prime.get('flags', 0),
prime.get('hops', 0),
prime.get('ifindex', 0),
prime.get('gateway'))
def __getitem__(self, key):
return dict(zip(('flags', 'hops', 'ifindex', 'gateway'), key))
def __iter__(self):
def NHIterator():
for x in tuple(self.raw.keys()):
yield self[x]
return NHIterator()
def add(self, prime, raw=None):
return super(NextHopSet, self).add(self.__make_nh(prime))
def remove(self, prime, raw=None):
hit = False
for nh in self:
for key in prime:
if prime[key] != nh.get(key):
break
else:
hit = True
super(NextHopSet, self).remove(self.__make_nh(nh))
if not hit:
raise KeyError('nexthop not found')
class WatchdogKey(dict):
'''
Construct from a route a dictionary that could be used as
a match for IPDB watchdogs.
'''
def __init__(self, route):
dict.__init__(self, [x for x in IPRouteRequest(route).items()
if x[0] in ('dst', 'dst_len', 'oif',
'iif', 'table')])
def RouteKey(msg):
'''
Construct from a netlink message a key that can be used
to locate the route in the table
'''
if isinstance(msg, nlmsg):
src = None
# calculate dst
if msg.get_attr('RTA_DST', None) is not None:
dst = '%s/%s' % (msg.get_attr('RTA_DST'),
msg['dst_len'])
else:
dst = 'default'
# use output | input interfaces as key also
iif = msg.get_attr(msg.name2nla('iif'))
oif = msg.get_attr(msg.name2nla('oif'))
elif isinstance(msg, Transactional):
src = None
dst = msg.get('dst')
iif = msg.get('iif')
oif = msg.get('oif')
else:
raise TypeError('prime not supported')
# key: src, dst, iif, oif
return (src, dst, iif, oif)
class Route(Transactional):
'''
Persistent transactional route object
'''
_fields = [rtmsg.nla2name(i[0]) for i in rtmsg.nla_map]
_fields.append('flags')
_fields.append('src_len')
_fields.append('dst_len')
_fields.append('table')
_fields.append('removal')
_virtual_fields = ['ipdb_scope', 'ipdb_priority']
_fields.extend(_virtual_fields)
_linked_sets = ['multipath', ]
cleanup = ('attrs',
'header',
'event',
'cacheinfo')
def __init__(self, ipdb, mode=None, parent=None, uid=None):
Transactional.__init__(self, ipdb, mode, parent, uid)
self._load_event = threading.Event()
with self._direct_state:
for i in self._fields:
self[i] = None
self['metrics'] = Metrics(parent=self)
self['multipath'] = NextHopSet()
self['ipdb_priority'] = 0
def add_nh(self, prime):
with self._write_lock:
tx = self.get_tx()
with tx._direct_state:
tx['multipath'].add(prime)
def del_nh(self, prime):
with self._write_lock:
tx = self.get_tx()
with tx._direct_state:
tx['multipath'].remove(prime)
def load_netlink(self, msg):
with self._direct_state:
if self['ipdb_scope'] == 'locked':
# do not touch locked interfaces
return
self['ipdb_scope'] = 'system'
self.update(msg)
# re-init metrics
metrics = self.get('metrics', Metrics(parent=self))
with metrics._direct_state:
for metric in tuple(metrics.keys()):
del metrics[metric]
self['metrics'] = metrics
# merge key
for (name, value) in msg['attrs']:
norm = rtmsg.nla2name(name)
# normalize RTAX
if norm == 'metrics':
with self['metrics']._direct_state:
for (rtax, rtax_value) in value['attrs']:
rtax_norm = rtmsg.metrics.nla2name(rtax)
self['metrics'][rtax_norm] = rtax_value
elif norm == 'multipath':
self['multipath'] = NextHopSet()
for v in value:
nh = {}
for name in [x[0] for x in rtmsg.nh.fields]:
nh[name] = v[name]
for (rta, rta_value) in v['attrs']:
rta_norm = rtmsg.nla2name(rta)
nh[rta_norm] = rta_value
self['multipath'].add(nh)
else:
self[norm] = value
if msg.get_attr('RTA_DST', None) is not None:
dst = '%s/%s' % (msg.get_attr('RTA_DST'),
msg['dst_len'])
else:
dst = 'default'
self['dst'] = dst
# finally, cleanup all not needed
for item in self.cleanup:
if item in self:
del self[item]
self.sync()
def sync(self):
self._load_event.set()
def reload(self):
# do NOT call get_routes() here, it can cause race condition
# self._load_event.wait()
return self
def commit(self, tid=None, transaction=None, rollback=False):
self._load_event.clear()
error = None
drop = True
if tid:
transaction = self._transactions[tid]
else:
if transaction:
drop = False
else:
transaction = self.last()
# create a new route
if self['ipdb_scope'] != 'system':
try:
self.ipdb.update_routes(
self.nl.route('add', **IPRouteRequest(transaction)))
except Exception:
self.nl = None
self.ipdb.routes.remove(self)
raise
# work on existing route
snapshot = self.pick()
try:
# route set
request = IPRouteRequest(transaction - snapshot)
if any([request[x] not in (None, {'attrs': []}) for x in request]):
self.ipdb.update_routes(
self.nl.route('set', **IPRouteRequest(transaction)))
# route removal
if (transaction['ipdb_scope'] in ('shadow', 'remove')) or\
((transaction['ipdb_scope'] == 'create') and rollback):
if transaction['ipdb_scope'] == 'shadow':
self.set_item('ipdb_scope', 'locked')
self.ipdb.update_routes(
self.nl.route('delete', **IPRouteRequest(snapshot)))
if transaction['ipdb_scope'] == 'shadow':
self.set_item('ipdb_scope', 'shadow')
except Exception as e:
if not rollback:
ret = self.commit(transaction=snapshot, rollback=True)
if isinstance(ret, Exception):
error = ret
else:
error = e
else:
if drop:
self.drop()
x = RuntimeError()
x.cause = e
raise x
if drop and not rollback:
self.drop()
if error is not None:
error.transaction = transaction
raise error
if not rollback:
with self._direct_state:
self['multipath'] = transaction['multipath']
self.reload()
return self
def remove(self):
self['ipdb_scope'] = 'remove'
return self
def shadow(self):
self['ipdb_scope'] = 'shadow'
return self
class RoutingTable(object):
def __init__(self, ipdb, prime=None):
self.ipdb = ipdb
self.lock = threading.Lock()
self.idx = {}
self.kdx = {}
def __repr__(self):
return repr([x['route'] for x in self.idx.values()])
def __len__(self):
return len(self.keys())
def __iter__(self):
for record in tuple(self.idx.values()):
yield record['route']
def keys(self, key='dst'):
with self.lock:
return [x['route'][key] for x in self.idx.values()]
def describe(self, target, forward=True):
# match the route by index -- a bit meaningless,
# but for compatibility
if isinstance(target, int):
keys = tuple(self.idx.keys())
return self.idx[keys[target]]
# match the route by key
if isinstance(target, (tuple, list)):
try:
# full match
return self.idx[target]
except KeyError:
# match w/o iif/oif
return self.idx[target[:2] + (None, None)]
# match the route by string
if isinstance(target, basestring):
target = {'dst': target}
# match the route by dict spec
if not isinstance(target, dict):
raise TypeError('unsupported key type')
for record in self.idx.values():
for key in target:
# skip non-existing keys
#
# it's a hack, but newly-created routes
# don't contain all the fields that are
# in the netlink message
if record['route'].get(key) is None:
continue
# if any key doesn't match
if target[key] != record['route'][key]:
break
else:
# if all keys match
return record
if not forward:
raise KeyError('route not found')
# split masks
if target.get('dst', '').find('/') >= 0:
dst = target['dst'].split('/')
target['dst'] = dst[0]
target['dst_len'] = int(dst[1])
if target.get('src', '').find('/') >= 0:
src = target['src'].split('/')
target['src'] = src[0]
target['src_len'] = int(src[1])
# load and return the route, if exists
route = Route(self.ipdb)
route.load_netlink(self.ipdb.nl.get_routes(**target)[0])
return {'route': route,
'key': None}
def __delitem__(self, key):
with self.lock:
item = self.describe(key, forward=False)
del self.idx[RouteKey(item['route'])]
def __setitem__(self, key, value):
with self.lock:
try:
record = self.describe(key, forward=False)
except KeyError:
record = {'route': Route(self.ipdb),
'key': None}
if isinstance(value, nlmsg):
record['route'].load_netlink(value)
elif isinstance(value, Route):
record['route'] = value
elif isinstance(value, dict):
with record['route']._direct_state:
record['route'].update(value)
key = RouteKey(record['route'])
if record['key'] is None:
self.idx[key] = {'route': record['route'],
'key': key}
else:
self.idx[key] = record
if record['key'] != key:
del self.idx[record['key']]
record['key'] = key
def __getitem__(self, key):
with self.lock:
return self.describe(key, forward=True)['route']
def __contains__(self, key):
try:
with self.lock:
self.describe(key, forward=False)
return True
except KeyError:
return False
class RoutingTableSet(object):
def __init__(self, ipdb, ignore_rtables=None):
self.ipdb = ipdb
self.ignore_rtables = ignore_rtables or []
self.tables = {254: RoutingTable(self.ipdb)}
def add(self, spec=None, **kwarg):
'''
Create a route from a dictionary
'''
spec = spec or kwarg
table = spec.get('table', 254)
assert 'dst' in spec
if table not in self.tables:
self.tables[table] = RoutingTable(self.ipdb)
route = Route(self.ipdb)
metrics = spec.pop('metrics', {})
multipath = spec.pop('multipath', [])
route.update(spec)
route.metrics.update(metrics)
route.set_item('ipdb_scope', 'create')
self.tables[table][route['dst']] = route
route.begin()
for nh in multipath:
route.add_nh(nh)
return route
def load_netlink(self, msg):
'''
Loads an existing route from a rtmsg
'''
table = msg.get('table', 254)
if table in self.ignore_rtables:
return
if not isinstance(msg, rtmsg):
return
# construct a key
# FIXME: temporary solution
# FIXME: can `Route()` be used as a key?
key = RouteKey(msg)
# RTM_DELROUTE
if msg['event'] == 'RTM_DELROUTE':
try:
# locate the record
record = self.tables[table][key]
# delete the record
if record['ipdb_scope'] not in ('locked', 'shadow'):
del self.tables[table][key]
record.set_item('ipdb_scope', 'detached')
# sync ???
record.sync()
except Exception as e:
logging.debug(e)
logging.debug(msg)
return
# RTM_NEWROUTE
if table not in self.tables:
self.tables[table] = RoutingTable(self.ipdb)
self.tables[table][key] = msg
return self.tables[table][key]
def remove(self, route, table=None):
if isinstance(route, Route):
table = route.get('table', 254) or 254
route = route.get('dst', 'default')
else:
table = table or 254
del self.tables[table][route]
def describe(self, spec, table=254):
return self.tables[table].describe(spec)
def get(self, dst, table=None):
table = table or 254
return self.tables[table][dst]
def keys(self, table=254, family=AF_UNSPEC):
return [x['dst'] for x in self.tables[table]
if (x.get('family') == family)
or (family == AF_UNSPEC)]
def has_key(self, key, table=254):
return key in self.tables[table]
def __contains__(self, key):
return key in self.tables[254]
def __getitem__(self, key):
return self.get(key)
def __setitem__(self, key, value):
assert key == value['dst']
return self.add(value)
def __delitem__(self, key):
return self.remove(key)
def __repr__(self):
return repr(self.tables[254])
|
apache-2.0
| 165,606,072,297,621,380
| 30.417323
| 79
| 0.497368
| false
| 4.125097
| false
| false
| false
|
mcublocks/embeddecy
|
Embeddecy-project/docs/embcdocumentation/conf.py
|
1
|
10287
|
# -*- coding: utf-8 -*-
#
# Язык Embeddecy documentation build configuration file, created by
# sphinx-quickstart on Fri Sep 29 10:45:50 2017.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
# import os
# import sys
# sys.path.insert(0, os.path.abspath('.'))
import sphinx_rtd_theme
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = []
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
#
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'
# The encoding of source files.
#
# source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'Язык Embeddecy'
copyright = u'2017, ООО "НПП САТЭК плюс"'
author = u'ООО "НПП САТЭК плюс"'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = u'1.0.0'
# The full version, including alpha/beta/rc tags.
release = u'1.0.0'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = 'ru'
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#
# today = ''
#
# Else, today_fmt is used as the format for a strftime call.
#
# today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This patterns also effect to html_static_path and html_extra_path
exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
# The reST default role (used for this markup: `text`) to use for all
# documents.
#
# default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#
# add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#
# add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#
# show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
# modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
# keep_warnings = False
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = "sphinx_rtd_theme"
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#
# html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
# html_theme_path = []
# The name for this set of Sphinx documents.
# "<project> v<release> documentation" by default.
#
# html_title = u'Язык Embeddecy v1.0.0'
# A shorter title for the navigation bar. Default is the same as html_title.
#
# html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#
# html_logo = None
# The name of an image file (relative to this directory) to use as a favicon of
# the docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#
# html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#
# html_extra_path = []
# If not None, a 'Last updated on:' timestamp is inserted at every page
# bottom, using the given strftime format.
# The empty string is equivalent to '%b %d, %Y'.
#
# html_last_updated_fmt = None
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#
# html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#
# html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#
# html_additional_pages = {}
# If false, no module index is generated.
#
# html_domain_indices = True
# If false, no index is generated.
#
# html_use_index = True
# If true, the index is split into individual pages for each letter.
#
# html_split_index = False
# If true, links to the reST sources are added to the pages.
#
# html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#
# html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#
# html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#
# html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
# html_file_suffix = None
# Language to be used for generating the HTML full-text search index.
# Sphinx supports the following languages:
# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja'
# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr', 'zh'
#
# html_search_language = 'en'
# A dictionary with options for the search language support, empty by default.
# 'ja' uses this config value.
# 'zh' user can custom change `jieba` dictionary path.
#
# html_search_options = {'type': 'default'}
# The name of a javascript file (relative to the configuration directory) that
# implements a search results scorer. If empty, the default will be used.
#
# html_search_scorer = 'scorer.js'
# Output file base name for HTML help builder.
htmlhelp_basename = 'Embeddecydoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#
# 'preamble': '',
# Latex figure (float) alignment
#
# 'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'Embeddecy.tex', u'Язык Embeddecy Documentation',
u'ООО "НПП САТЭК плюс"', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#
# latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#
# latex_use_parts = False
# If true, show page references after internal links.
#
# latex_show_pagerefs = False
# If true, show URL addresses after external links.
#
# latex_show_urls = False
# Documents to append as an appendix to all manuals.
#
# latex_appendices = []
# It false, will not define \strong, \code, itleref, \crossref ... but only
# \sphinxstrong, ..., \sphinxtitleref, ... To help avoid clash with user added
# packages.
#
# latex_keep_old_macro_names = True
# If false, no module index is generated.
#
# latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'embeddecy', u'Язык Embeddecy Documentation',
[author], 1)
]
# If true, show URL addresses after external links.
#
# man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'Embeddecy', u'Язык Embeddecy Documentation',
author, 'Embeddecy', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#
# texinfo_appendices = []
# If false, no module index is generated.
#
# texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#
# texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#
# texinfo_no_detailmenu = False
|
apache-2.0
| -8,252,560,133,260,538,000
| 28.141593
| 80
| 0.669211
| false
| 3.387931
| true
| false
| false
|
choeminjun/rasberryPI
|
MailSender/SendMail_ras.py
|
1
|
6156
|
import smtplib, logging, datetime, imaplib
import email
AUTH_EMAIL_SENDER = 'choeminjun@naver.com'
class MailSenderAPP(object):
def __init__(self, my_email, my_password):
self.myEmail = my_email
self.myPassword = my_password
self.mailSever = smtplib.SMTP("smtp.gmail.com", 587)
# Emailsuq = email
# self.Subject = subject
@staticmethod
def __loggerSetup__():
logging.basicConfig(filename='logging.txt', level=logging.DEBUG)
def connect_to_server(self):
try:
logging.info('Trying to connect to gmail sever...')
self.mailSever.login(self.myEmail, self.myPassword)
self.mailSever.ehlo()
self.mailSever.starttls()
logging.info('connect to sever:success')
except Exception as Error:
logging.error('Cant connect to gmail sever. Error messge:' + str(Error))
return 'Sever connect Error:' + str(Error)
def EmailSender(self, Emailsuq, subject):
logging.info('--------Program Starting at:%s.-------------' % (datetime.datetime.now()))
if type(Emailsuq).__name__ != 'list':
logging.error('Emailsuq have to be a list ,like this: ["blah@blah.com"]')
return 'Emailsuq have to be a list Error' + str(type(Emailsuq))
try:
logging.info('Trying to login With Email and password...')
logging.info('logining to gmail sever:success')
except Exception as Error:
logging.error('Cant login to gmail sever. Error messge:' + str(Error))
return 'Login Error:' + str(Error)
try:
logging.info('Sending mail to %s...' % (Emailsuq))
Email_number = 0
for email in Emailsuq:
self.mailSever.sendmail(self.myEmail, email, subject)
Email_number += 1
logging.info('Sending mail to %s:success' % (Emailsuq))
except Exception as Error:
logging.info('Cant Send mail to %s. Error messge:'+str(Error))
return 'Mail sending Error:' + str(Error)
return True
def end_connection(self):
self.mailSever.close()
logging.info('-----------Program Exited-------------')
#
#
# def main(my_email, my_password, email=[], subject='Subject:SE\nTyto alab'):
# MailSenderAPP.__loggerSetup__()
# status = MailSenderAPP(my_email, my_password, email, subject).EmailSender()
#
# return status
class MailReaderAPP(object):
def __init__(self, my_email, my_password):
self.myEmail = my_email
self.myPassword = my_password
self.mailSever = imaplib.IMAP4_SSL("smtp.gmail.com", 993)
@staticmethod
def __loggerSetup__():
logging.basicConfig(filename='logging.txt', level=logging.DEBUG)
logging.debug('--------------MailReaderAPP----------------')
def connect_to_server(self):
try:
logging.info('Trying to connect to gmail sever...')
# self.mailSever.starttls()
self.mailSever.login(self.myEmail, self.myPassword)
logging.info('connect to sever:success')
except Exception as Error:
logging.error('Cant connect to gmail sever. Error messge:' + str(Error))
return 'Sever connect Error:' + str(Error)
def read_latest_mail_and_command(self):
try:
logging.info('Trying to connect to gmail sever...')
logging.info('selecting inbox...')
self.mailSever.list()
self.mailSever.select('inbox')
unread_emails = []
logging.info('getting unseen emails...')
result, data = self.mailSever.uid('search', None, "UNSEEN") # (ALL/UNSEEN)
i = len(data[0].split())
for x in range(i):
logging.info('Decoding unseen email' + str(x))
latest_email_uid = data[0].split()[x]
result, email_data = self.mailSever.uid('fetch', latest_email_uid, '(RFC822)')
# result, email_data = conn.store(num,'-FLAGS','\\Seen')
# this might work to set flag to seen, if it doesn't already
raw_email = email_data[0][1]
raw_email_string = raw_email.decode('utf-8')
email_message = email.message_from_string(raw_email_string)
# Header Details
date_tuple = email.utils.parsedate_tz(email_message['Date'])
if date_tuple:
local_date = datetime.datetime.fromtimestamp(email.utils.mktime_tz(date_tuple))
local_message_date = "%s" % (str(local_date.strftime("%a, %d %b %Y %H:%M:%S")))
email_from = str(email.header.make_header(email.header.decode_header(email_message['From'])))
email_to = str(email.header.make_header(email.header.decode_header(email_message['To'])))
subject = str(email.header.make_header(email.header.decode_header(email_message['Subject'])))
# Body details
logging.info('getting body details...')
for part in email_message.walk():
if part.get_content_type() == "text/plain":
logging.info('getting body details of '+ str(part))
body = part.get_payload(decode=True)
unread_emails.append({'Body': body.decode('utf-8'), 'sender': email_from})
else:
continue
try:
logging.info('returning resaults...')
unread_email_ = []
for i in unread_emails:
if i['sender'] == '최민준 <choeminjun@naver.com>':
unread_email_.append(i)
return unread_email_
except:
return None
except Exception as E:
logging.error('Error while finding latest email' + str(E))
return 'Sever email read error:' + str(E)
def end_connection(self):
self.mailSever.close()
logging.info('-----------Program Exited-------------')
|
apache-2.0
| 941,600,670,764,131,200
| 39.728477
| 109
| 0.559024
| false
| 4.003906
| false
| false
| false
|
Julian24816/lHelper
|
__main__.py
|
1
|
1440
|
# coding=utf-8
#
# Copyright (C) 2016 Julian Mueller
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
lHelper project: python application for helping me learn Latin
"""
import argparse
from sys import path
from os import chdir
from os.path import dirname, abspath
ENABLE_DATA_COMMANDS = False
__version__ = "1.4.7"
__author__ = "Julian Mueller"
path_to_main = abspath(__file__)
path.append(dirname(path_to_main))
chdir(dirname(path_to_main))
parser = argparse.ArgumentParser()
parser.add_argument("-g", "--gui", action="store_const", const="gui", default="cli", dest="ui",
help="Run the program with a GUI instead of a CLI.")
ui_choice = parser.parse_args().ui
if ui_choice == "cli":
import cli
cli.main(__version__, enable_data_commands=ENABLE_DATA_COMMANDS)
elif ui_choice == "gui":
import main
main.main()
|
gpl-3.0
| 7,290,843,507,566,273,000
| 31
| 95
| 0.717361
| false
| 3.664122
| false
| false
| false
|
frank-u/elibrarian
|
migrations/versions/27c24cd72c1_user_personal_bookshelf.py
|
1
|
1307
|
"""user personal bookshelf
Revision ID: 27c24cd72c1
Revises: 4a8674c1b8a
Create Date: 2015-04-20 20:46:20.702185
"""
# revision identifiers, used by Alembic.
revision = '27c24cd72c1'
down_revision = '4a8674c1b8a'
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.create_table('users_personal_library',
sa.Column('user_id', sa.Integer(), nullable=False),
sa.Column('literary_work_id', sa.Integer(), nullable=False),
sa.Column('plan_to_read', sa.Boolean(), nullable=False),
sa.Column('read_flag', sa.Boolean(), nullable=False),
sa.Column('read_progress', sa.Integer(), nullable=True),
sa.Column('read_date', sa.Date(), nullable=True),
sa.Column('rating', sa.Integer(), nullable=True),
sa.Column('comment', sa.Text(), nullable=True),
sa.Column('timestamp', sa.DateTime(), nullable=True),
sa.ForeignKeyConstraint(['literary_work_id'], ['literary_works.id'], ),
sa.ForeignKeyConstraint(['user_id'], ['auth_users.id'], ),
sa.PrimaryKeyConstraint('user_id', 'literary_work_id')
)
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_table('users_personal_library')
### end Alembic commands ###
|
gpl-3.0
| 5,012,969,641,236,505,000
| 32.512821
| 75
| 0.674063
| false
| 3.334184
| false
| false
| false
|
nansencenter/DAPPER
|
dapper/mods/KS/demo.py
|
1
|
2208
|
"""Demonstrate the Kuramoto-Sivashinsky (KS) system."""
# The Kuramoto-Sivashinsky (KS) system:
# u_t = -u*u_x - u_xx - u_xxxx,
# where x ∈ [0, L], periodic BCs,
# is the simplest (?) PDE that admits chaos (requires L>=12?):
#
# Its numerical solution is best undertaken
# with Fourier decomposition for the spatial variable.
# According to kassam2005fourth:
# - The equation is stiff, due to higher-order linear terms:
# - the diffusion term acts as an energy source,
# causing instability of high-order (large-scale Fourier) modes.
# - the hyper-diffusion term yields stability of the low-order modes.
# - The nonlinear term induces mixing of the (Fourier) modes.
#
# bocquet2019consistency use it with DA because:
# "it is characterised by sharp density gradients
# so that it may be expected that local EnKFs are prone to imbalance"
#
# hickmann2017multiresolution use it with DA becaues:
# "[The mixing allows us to] investigate the effect of
# propagating scale-dependent information through the EnKF."
#
# www.encyclopediaofmath.org/index.php/Kuramoto-Sivashinsky_equation:
# Number of unstable modes almost directly proportional to L?
#
# Applications:
# - modeling hydrodynamic stability of laminar flame fronts
# - instabilities in thin films and the flow of a viscous fluid down a vertical plane
# - etc
#
# It can be observed in the plots that sharpness from ICs
# remain in the system for a long time (for ever?).
import numpy as np
from matplotlib import pyplot as plt
from dapper.mods.KS import Model
from dapper.tools.viz import amplitude_animation
model = Model()
# Time settings
T = 150
dt = model.dt
K = round(T/dt)
# IC
N = 3
tt = np.zeros((K+1,))
EE = np.zeros((K+1, N, model.Nx))
# x0 = x0_Kassam
EE[0] = model.x0 + 1e-3*np.random.randn(N, model.Nx)
# Integrate
for k in range(1, K+1):
EE[k] = model.step(EE[k-1], np.nan, dt)
tt[k] = k*dt
# Animate
ani = amplitude_animation(EE, dt, interval=20)
# Plot
plt.figure()
n = 0
plt.contourf(model.grid, tt, EE[:, n, :], 60)
plt.colorbar()
plt.set_cmap('seismic')
plt.axis('tight')
plt.title('Hovmoller for KS system, member %d' % n)
plt.ylabel('Time (t)')
plt.xlabel('Space (x)')
plt.show()
|
mit
| 7,997,371,697,744,379,000
| 28.413333
| 85
| 0.707616
| false
| 2.850129
| false
| false
| false
|
projectcuracao/projectcuracao
|
actions/useCamera.py
|
1
|
2300
|
# Takes a single picture on System
# filename: takeSinglePicture.py
# Version 1.0 10/31/13
#
# takes a picture using the camera
#
#
import sys
import time
import RPi.GPIO as GPIO
import serial
#import picamera
import subprocess
import MySQLdb as mdb
sys.path.append('/home/pi/ProjectCuracao/main/hardware')
sys.path.append('/home/pi/ProjectCuracao/main/pclogging')
sys.path.append('/home/pi/ProjectCuracao/main/util')
import pclogging
import util
import hardwareactions
def sweepShutter(source, delay):
print("sweepShutter source:%s" % source)
GPIO.setmode(GPIO.BOARD)
time.sleep(delay)
# blink GPIO LED when it's run
GPIO.setup(22, GPIO.OUT)
GPIO.output(22, False)
time.sleep(0.5)
GPIO.output(22, True)
time.sleep(0.5)
hardwareactions.sweepshutter()
time.sleep(3.0)
pclogging.log(pclogging.INFO, __name__, "Sweep Shutter")
def takePicture(source):
try:
f = open("/home/pi/ProjectCuracao/main/state/exposure.txt", "r")
tempString = f.read()
f.close()
lowername = tempString
except IOError as e:
lowername = "auto"
exposuremode = lowername
# take picture
print "taking picture"
cameracommand = "raspistill -o /home/pi/RasPiConnectServer/static/picameraraw.jpg -rot 180 -t 750 -ex " + exposuremode
print cameracommand
output = subprocess.check_output (cameracommand,shell=True, stderr=subprocess.STDOUT )
output = subprocess.check_output("convert '/home/pi/RasPiConnectServer/static/picameraraw.jpg' -pointsize 72 -fill white -gravity SouthWest -annotate +50+100 'ProjectCuracao %[exif:DateTimeOriginal]' '/home/pi/RasPiConnectServer/static/picamera.jpg'", shell=True, stderr=subprocess.STDOUT)
pclogging.log(pclogging.INFO, __name__, source )
print "finished taking picture"
return
def takeSinglePicture(source, delay):
print("takeSinglePicture source:%s" % source)
GPIO.setmode(GPIO.BOARD)
time.sleep(delay)
# blink GPIO LED when it's run
GPIO.setup(22, GPIO.OUT)
GPIO.output(22, False)
time.sleep(0.5)
GPIO.output(22, True)
time.sleep(0.5)
print GPIO.VERSION
hardwareactions.openshutter()
time.sleep(3.0)
takePicture("Single Picture Taken With Shutter")
hardwareactions.closeshutter()
return
|
gpl-3.0
| -7,709,868,329,935,272,000
| 22.232323
| 290
| 0.706957
| false
| 3.129252
| false
| false
| false
|
hman523/QuickStocks
|
QuickStocks.py
|
1
|
5274
|
#Quick Stocks v.2
#Author: Hunter Barbella (aka hman523)
#Use: to have a command line interface for checking stocks
#This code uses the GPL licence while the API uses the MIT licience
#This code is provided AS IS and provides no warrenty
#Imports the libraries requests for making the GET request and
#JSON for parsing the request
#sys and argparse for command line options
import requests
import json
import sys
import argparse
#This is the API URL that is used
emptyUrl = "http://dev.markitondemand.com/MODApis/Api/v2/Quote/jsonp?symbol="
#A nice welcome screen to print
def welcomePrint():
print(" /$$$$$$ /$$ /$$ \n" +
" /$$__ $$ |__/ | $$ \n" +
"| $$ \ $$ /$$ /$$ /$$ /$$$$$$$| $$ /$$ \n" +
"| $$ | $$| $$ | $$| $$ /$$_____/| $$ /$$/ \n" +
"| $$ | $$| $$ | $$| $$| $$ | $$$$$$/ \n" +
"| $$/$$ $$| $$ | $$| $$| $$ | $$_ $$ \n" +
"| $$$$$$/| $$$$$$/| $$| $$$$$$$| $$ \ $$ \n" +
" \____ $$$ \______/ |__/ \_______/|__/ \__/ \n" +
" \__/ \n" +
" \n" +
" \n" +
" /$$$$$$ /$$ /$$ \n" +
" /$$__ $$ | $$ | $$ \n" +
"| $$ \__//$$$$$$ /$$$$$$ /$$$$$$$| $$ /$$ /$$$$$$$\n" +
"| $$$$$$|_ $$_/ /$$__ $$ /$$_____/| $$ /$$/ /$$_____/\n" +
" \____ $$ | $$ | $$ \ $$| $$ | $$$$$$/ | $$$$$$ \n" +
" /$$ \ $$ | $$ /$$| $$ | $$| $$ | $$_ $$ \____ $$\n" +
"| $$$$$$/ | $$$$/| $$$$$$/| $$$$$$$| $$ \ $$ /$$$$$$$/\n" +
" \______/ \___/ \______/ \_______/|__/ \__/|_______/ \n" +
"\n\nVersion: 2.0 Author: Hunter Barbella (AKA hman523)\n\n"
)
#Informs user how to leave program
print("To quit type quit or Control + \"c\"")
#calls the api and returns a nice string of the json
def callApi(stockSymbol):
stockSymbol.upper()
apiCall = requests.get(emptyUrl + stockSymbol)
apiCall = str(apiCall.content)
#Clean up the junk by gettin rid of the unneeded data
indexOfStatus = apiCall.find('\"Status\"')
length = len(apiCall)
apiCall = apiCall[(indexOfStatus-1):length-2]
return apiCall
#converts the string to a json file if it can, if not it returns none
def apiCallToJson(call):
if(len(call) > 0):
jsonOfCall = json.loads(call)
return jsonOfCall
else:
return None
#prints all metadata from a given json
def printAllInfo(jsonOfCall):
if(jsonOfCall is not None and jsonOfCall['Timestamp'] is not None):
print("Firm- " + jsonOfCall['Name'])
print("Symbol- " + jsonOfCall['Symbol'])
print("Last Price- " + str(jsonOfCall['LastPrice']))
print("Change- " + str(jsonOfCall['Change']))
print("Percent Change- " + str(jsonOfCall['ChangePercent']) + "%")
print("Time- " + str(jsonOfCall['Timestamp']))
print("Market Cap- " + str(jsonOfCall['MarketCap']))
print("Volume- " + str(jsonOfCall['Volume']))
print("High- " + str(jsonOfCall['High']))
print("Low- " + str(jsonOfCall['Low']))
print("Open- " + str(jsonOfCall['Open']))
print("Year To Date Change- " + str(jsonOfCall['ChangeYTD']))
print("Year To Date Percent Change- " + str(jsonOfCall['ChangePercentYTD']) + "%")
print("")
else:
error = "unknown error occured"
if(jsonOfCall is None):
error = "stock doesn't exist"
else:
if(jsonOfCall['LastPrice'] is 0 and jsonOfCall['MarketCap'] is 0):
error = ("server error with stock " + jsonOfCall['Symbol'])
print("Error occured: " + error + "\n")
#gets the user input and returns it, also checks if user quits program
def getUserInput():
print("Enter a ticket symbol for a firm or load file:")
userInput = input()
if(userInput.lower() == 'quit'):
quit()
userInput = userInput.replace(" ", "")
return userInput
#using a filename, this opens and returns stock info
def getStocksFromFile(stockFile):
with open(stockFile) as f:
listOfNames = f.readlines()
listOfNames = [i.strip() for i in listOfNames]
return listOfNames
#Main loop in the program
#Asks the user for a stock symbol and searches info based on that
def main():
welcomePrint()
descriptionString = """Arguments: -f -q
f: file name
q: quick lookup option
"""
parser = argparse.ArgumentParser(description=descriptionString)
parser.add_argument('-sparams', nargs=1, dest='sparams', required=False,
help="Use the argument -q or -f")
while(True):
#It gets the user inout, calls the api with it,
# converts it to a JSON then it prints the data.
userIn = getUserInput()
if (userIn.startswith('load')):
names = getStocksFromFile(userIn[4:])
print("Reading from " + userIn[4:])
for n in names:
print("Reading...")
printAllInfo(apiCallToJson(callApi(n)))
else:
printAllInfo(apiCallToJson(callApi(userIn)))
if __name__ == '__main__':
main()
|
gpl-3.0
| 71,577,834,545,399,464
| 31.392405
| 84
| 0.502465
| false
| 2.957936
| false
| false
| false
|
sa2ajj/DistroTracker
|
pts/mail/tests/tests_mail_news.py
|
1
|
5377
|
# -*- coding: utf-8 -*-
# Copyright 2013 The Distro Tracker Developers
# See the COPYRIGHT file at the top-level directory of this distribution and
# at http://deb.li/DTAuthors
#
# This file is part of Distro Tracker. It is subject to the license terms
# in the LICENSE file found in the top-level directory of this
# distribution and at http://deb.li/DTLicense. No part of Distro Tracker,
# including this file, may be copied, modified, propagated, or distributed
# except according to the terms contained in the LICENSE file.
"""
Tests for the :mod:`pts.mail.mail_news` app.
"""
from __future__ import unicode_literals
from django.test import TestCase, SimpleTestCase
from django.utils import six
from django.utils.six.moves import mock
from django.utils.encoding import force_bytes
from pts.core.models import SourcePackageName, SourcePackage
from pts.core.models import News
from pts.core.tests.common import temporary_media_dir
from pts.mail.mail_news import process
from pts.mail.management.commands.pts_receive_news import (
Command as MailNewsCommand)
from email.message import Message
class BasicNewsGeneration(TestCase):
def setUp(self):
self.package_name = SourcePackageName.objects.create(name='dummy-package')
self.package = SourcePackage.objects.create(
source_package_name=self.package_name,
version='1.0.0')
self.message = Message()
def set_subject(self, subject):
if 'Subject' in self.message:
del self.message['Subject']
self.message['Subject'] = subject
def add_header(self, header_name, header_value):
self.message[header_name] = header_value
def set_message_content(self, content):
self.message.set_payload(content)
def process_mail(self):
process(force_bytes(self.message.as_string(), 'utf-8'))
@temporary_media_dir
def test_creates_news_from_email(self):
"""
Tets that a news is created from an email with the correct header
information.
"""
subject = 'Some message'
content = 'Some message content'
self.set_subject(subject)
self.add_header('X-PTS-Package', self.package.name)
self.set_message_content(content)
self.process_mail()
# A news item is created
self.assertEqual(1, News.objects.count())
news = News.objects.all()[0]
# The title of the news is set correctly.
self.assertEqual(subject, news.title)
self.assertIn(content, news.content)
# The content type is set to render email messages
self.assertEqual(news.content_type, 'message/rfc822')
@temporary_media_dir
def test_create_news_url_from_email(self):
"""
Tests that when an X-PTS-Url header is given the news content is the
URL, not the email message.
"""
subject = 'Some message'
content = 'Some message content'
self.set_subject(subject)
self.add_header('X-PTS-Package', self.package.name)
url = 'http://some-url.com'
self.add_header('X-PTS-Url', url)
self.set_message_content(content)
self.process_mail()
# A news item is created
self.assertEqual(1, News.objects.count())
news = News.objects.all()[0]
# The title of the news is set correctly.
self.assertEqual(url, news.title)
self.assertIn(url, news.content.strip())
@temporary_media_dir
def test_create_news_package_does_not_exist(self):
"""
Tests that when the package given in X-PTS-Package does not exist, no
news items are created.
"""
subject = 'Some message'
content = 'Some message content'
self.set_subject(subject)
self.add_header('X-PTS-Package', 'no-exist')
self.set_message_content(content)
# Sanity check - there are no news at the beginning
self.assertEqual(0, News.objects.count())
self.process_mail()
# There are still no news
self.assertEqual(0, News.objects.count())
@mock.patch('pts.mail.mail_news.vendor.call')
@temporary_media_dir
def test_create_news_calls_vendor_function(self, mock_vendor_call):
"""
Tests that the vendor-provided function is called during the processing
of the news.
"""
subject = 'Some message'
content = 'Some message content'
# Do not add any headers.
self.set_subject(subject)
self.set_message_content(content)
# Make it look like the vendor does not implement the function
mock_vendor_call.return_value = (None, False)
self.process_mail()
# The function was called?
self.assertTrue(mock_vendor_call.called)
# The correct vendor function was asked for?
self.assertEqual(mock_vendor_call.call_args[0][0], 'create_news_from_email_message')
class MailNewsManagementCommandTest(SimpleTestCase):
"""
Tests that the :mod:`pts.mail.management.commands.pts_receive_news`
management command calls the correct function.
"""
@mock.patch('pts.mail.management.commands.pts_receive_news.process')
def test_calls_process(self, mock_process):
cmd = MailNewsCommand()
cmd.input_file = mock.create_autospec(six.BytesIO)
mock_process.assert_called()
|
gpl-2.0
| -2,727,537,387,692,635,000
| 34.609272
| 92
| 0.660963
| false
| 3.933431
| true
| false
| false
|
royharoush/rtools
|
dnmaps.py
|
1
|
25943
|
#! /usr/bin/env python
#
# DNmap Server - Edited by Justin Warner (@sixdub). Originally written by Sebastian Garcia
# Orginal Copyright and license (included below) applies.
#
# This is the server code to be used in conjunction with Minions, a collaborative distributed
# scanning solution.
#
#
# DNmap Version Modified: .6
# Copyright (C) 2009 Sebastian Garcia
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
#
# Author:
# Sebastian Garcia eldraco@gmail.com
#
# Based on code from Twisted examples.
# Copyright (c) Twisted Matrix Laboratories.
#
import logging
import logging.handlers
import datetime
import sqlite3
import os
import xml.etree.ElementTree as ET
try:
from twisted.internet.protocol import Factory, Protocol
from twisted.internet import ssl, reactor, task
from twisted.python import log
from twisted.python.logfile import DailyLogFile
except:
print 'You need twisted library. apt-get install python-twisted-bin python-twisted-core'
exit(-1)
import getopt, sys, time, os
try:
from OpenSSL import SSL
except:
print 'You need python openssl library. apt-get install python-openssl'
exit(-1)
# Global variables
vernum='0.6'
nmap_commands_file = ''
nmap_command = []
nmap_commands_sent = []
trace_file = ''
nmap_output_coming_back = False
XML_file= ''
GNmap_file=''
outputswitch=''
file_position = 0
clients = {}
port=8001
clientes = {}
base_dir = os.path.dirname(os.path.abspath(__file__))
output_file=os.path.join(base_dir,"current_output")
log_file=os.path.join(base_dir, "log")
log_level='info'
sql_conn=''
sql_file=''
verbose_level = 2
# 0: quiet
# 1: info, normal
# 2: Statistical table
# 3: debug
# 4: ?
# 5: ?
# This is to assure that the first time we run, something is shown
temp = datetime.datetime.now()
delta = datetime.timedelta(seconds=5)
last_show_time = temp - delta
# defaults to 1 hour
client_timeout = 14400
sort_type = 'Status'
# By default in the same directory
pemfile = os.path.join(base_dir,'server.pem')
cafile = os.path.join(base_dir,'ca.pem')
# End of global variables
# Print version information and exit
def version():
print "+----------------------------------------------------------------------+"
print "| dnmap_server Version "+ vernum +" |"
print "| This program is free software; you can redistribute it and/or modify |"
print "| it under the terms of the GNU General Public License as published by |"
print "| the Free Software Foundation; either version 2 of the License, or |"
print "| (at your option) any later version. |"
print "| |"
print "| Author: Garcia Sebastian, eldraco@gmail.com |"
print "| www.mateslab.com.ar |"
print "+----------------------------------------------------------------------+"
print
# Print help information and exit:
def usage():
print "+----------------------------------------------------------------------+"
print "| dnmap_server Version "+ vernum +" |"
print "| This program is free software; you can redistribute it and/or modify |"
print "| it under the terms of the GNU General Public License as published by |"
print "| the Free Software Foundation; either version 2 of the License, or |"
print "| (at your option) any later version. |"
print "| |"
print "| Author: Garcia Sebastian, eldraco@gmail.com |"
print "| www.mateslab.com.ar |"
print "+----------------------------------------------------------------------+"
print "\nusage: %s <options>" % sys.argv[0]
print "options:"
print " -f, --nmap-commands Nmap commands file"
print " -p, --port TCP port where we listen for connections."
print " -L, --log-file Log file. Defaults to /var/log/dnmap_server.conf."
print " -l, --log-level Log level. Defaults to info."
print " -v, --verbose_level Verbose level. Give a number between 1 and 5. Defaults to 1. Level 0 means be quiet."
print " -t, --client-timeout How many time should we wait before marking a client Offline. We still remember its values just in case it cames back."
print " -s, --sort Field to sort the statical value. You can choose from: Alias, #Commands, UpTime, RunCmdXMin, AvrCmdXMin, Status"
print " -P, --pemfile pem file to use for TLS connection. By default we use the server.pem file provided with the server in the current directory."
print
print "dnmap_server uses a \'<nmap-commands-file-name>.dnmaptrace\' file to know where it must continue reading the nmap commands file. If you want to start over again,"
print "just delete the \'<nmap-commands-file-name>.dnmaptrace\' file"
print
sys.exit(1)
def timeout_idle_clients():
"""
This function search for idle clients and mark them as offline, so we do not display them
"""
global mlog
global verbose_level
global clients
global client_timeout
try:
for client_id in clients:
now = datetime.datetime.now()
time_diff = now - clients[client_id]['LastTime']
if time_diff.seconds >= client_timeout:
clients[client_id]['Status']='Offline'
except Exception as inst:
if verbose_level > 2:
msgline = 'Problem in mark_as_idle function'
mlog.error(msgline)
print msgline
msgline = type(inst)
mlog.error(msgline)
print msgline
msgline = inst.args
mlog.error(msgline)
print msgline
msgline = inst
mlog.error(msgline)
print msgline
def read_file_and_fill_nmap_variable():
""" Here we fill the nmap_command with the lines of the txt file. Only the first time. Later this file should be filled automatically"""
global nmap_commands_file
global nmap_command
global trace_file
global file_position
global mlog
global verbose_level
global sql_conn
global sql_file
with open(nmap_commands_file,'r') as f:
jobs = f.readlines()
#make sure all jobs in file are in queue
for job in jobs:
if not job in nmap_command:
nmap_command.insert(0,job)
mlog.debug('New Job: {0}'.format(job))
#clear queue of things not in jobs file
for job in nmap_command:
if not job in jobs:
nmap_command.remove(job)
return
def verifyCallback(connection, x509, errnum, errdepth, ok):
if not ok:
print "Invalid cert from subject: ",x509.get_subject()
return False
else:
return True
class ServerContextFactory:
global mlog
global verbose_level
global pemfile
global cafile
""" Only to set up SSL"""
def getContext(self):
"""
Create an SSL context.
"""
try:
ctx = SSL.Context(SSL.SSLv23_METHOD)
ctx.use_certificate_file(pemfile)
ctx.use_privatekey_file(pemfile)
except:
print "Unexpected error:", sys.exc_info()[0]
print 'You need to have a server.pem file for the server to work'
print pemfile
exit(-1)
# try:
# ctx.set_verify(SSL.VERIFY_PEER | SSL.VERIFY_FAIL_IF_NO_PEER_CERT, verifyCallback)
# ctx.load_verify_locations(cafile)
# except:
# print "Unexpected error:", sys.exc_info()[0]
# print 'You need to have a ca.pem file for the server to work'
# print cafile
# exit(-1)
return ctx
def show_info():
global verbose_level
global mlog
global clients
global last_show_time
global start_time
global sort_type
global output_file
of = open(output_file, "w")
try:
now = datetime.datetime.now()
diff_time = now - start_time
amount = 0
for j in clients:
if clients[j]['Status'] != 'Offline':
amount += 1
if verbose_level > 0:
line = '=| MET:{0} | Amount of Online clients: {1} |='.format(diff_time, amount)
print line
mlog.debug(line)
of.write(line+"\n")
if clients != {}:
if verbose_level > 1:
line = 'Clients connected'
print line
mlog.debug(line)
of.write(line+"\n")
line = '-----------------'
print line
mlog.debug(line)
of.write(line+"\n")
#line = 'Alias\t#Commands\tLast Time Seen\t\t\tVersion\tIsRoot\tStatus'
line = '{0:15}\t{1}\t{2}\t{3}\t{4}\t\t{5}\t{6}\t{7}\t{8}\t{9}'.format('Alias','#Commands','Last Time Seen', '(time ago)', 'UpTime', 'Version', 'IsRoot', 'RunCmdXMin', 'AvrCmdXMin', 'Status')
print line
mlog.debug(line)
of.write(line+"\n")
for i in clients:
if clients[i]['Status'] != 'Offline':
# Strip the name of the day and the year
temp = clients[i]['LastTime'].ctime().split(' ')[1:-1]
lasttime = ''
for j in temp:
lasttime = lasttime + str(j) + ' '
time_diff = datetime.datetime.now() - clients[i]['LastTime']
#time_diff_secs = int(time_diff.total_seconds() % 60)
#time_diff_secs = int(time_diff.seconds % 60)
time_diff_secs = int( (time_diff.seconds + (time_diff.microseconds / 1000000.0) ) % 60)
#time_diff_mins = int(time_diff.total_seconds() / 60)
#time_diff_mins = int(time_diff.seconds / 60)
time_diff_mins = int( (time_diff.seconds + (time_diff.microseconds / 1000000.0) ) / 60)
uptime_diff = datetime.datetime.now() - clients[i]['FirstTime']
#uptime_diff_hours = int(uptime_diff.total_seconds() / 3600)
#uptime_diff_hours = int(uptime_diff.seconds / 3600)
uptime_diff_hours = int( (uptime_diff.seconds + (uptime_diff.microseconds / 1000000.0)) / 3600)
#uptime_diff_mins = int(uptime_diff.total_seconds() % 3600 / 60)
#uptime_diff_mins = int(uptime_diff.seconds % 3600 / 60)
uptime_diff_mins = int( ((uptime_diff.seconds % 3600) + (uptime_diff.microseconds / 1000000.0)) / 60)
line = '{0:15}\t{1}\t\t{2}({3:2d}\'{4:2d}\")\t{5:2d}h{6:2d}m\t\t{7}\t{8}\t{9:10.1f}\t{10:9.1f}\t{11}'.format(clients[i]['Alias'], clients[i]['NbrCommands'], lasttime, time_diff_mins, time_diff_secs, uptime_diff_hours, uptime_diff_mins , clients[i]['Version'], clients[i]['IsRoot'], clients[i]['RunCmdsxMin'], clients[i]['AvrCmdsxMin'], clients[i]['Status'])
print line
mlog.debug(line)
of.write(line+"\n")
print
last_show_time = datetime.datetime.now()
of.close()
except Exception as inst:
if verbose_level > 2:
msgline = 'Problem in show_info function'
mlog.error(msgline)
print msgline
msgline = type(inst)
mlog.error(msgline)
print msgline
msgline = inst.args
mlog.error(msgline)
print msgline
msgline = inst
mlog.error(msgline)
print msgline
def send_one_more_command(ourtransport,client_id):
# Extract the next command to send.
global nmap_command
global verbose_level
global mlog
global clients
global nmap_commands_file
global trace_file
try:
alias = clients[client_id]['Alias']
command_to_send = nmap_command.pop()
line = 'Data sent to client ID '+client_id+' ('+alias+')'
log.msg(line, logLevel=logging.INFO)
if verbose_level > 2:
print line
line= '\t'+command_to_send.strip('\n')
log.msg(line, logLevel=logging.INFO)
if verbose_level > 2:
print line
ourtransport.transport.write(command_to_send)
#remove the cmd from the pending job file and add to trace file
with open(nmap_commands_file, "r") as f:
jobs = f.readlines()
jobs.remove(command_to_send)
with open(nmap_commands_file, "w") as f:
f.writelines(jobs)
#add to tracefile
with open(trace_file, "a+") as f:
f.writelines(command_to_send)
clients[client_id]['NbrCommands'] += 1
clients[client_id]['LastCommand'] = command_to_send
clients[client_id]['Status'] = 'Executing'
except IndexError:
# If the list of commands is empty, look for new commands
line = 'No more commands in queue.'
log.msg(line, logLevel=logging.DEBUG)
if verbose_level > 2:
print line
line = '\tMaking the client '+str(client_id)+' ('+str(alias)+')'+' wait 10 secs for new commands to arrive...'
log.msg(line, logLevel=logging.DEBUG)
if verbose_level > 2:
print line
ourtransport.transport.write('Wait:10')
except Exception as inst:
print 'Problem in Send More Commands'
print type(inst)
print inst.args
print inst
def process_input_line(data,ourtransport,client_id):
global mlog
global verbose_level
global clients
global trace_file
global nmap_command
global nmap_output_coming_back
global nmap_output_file
global xml_output_file
global gnmap_output_file
global outputswitch
try:
# What to do. Send another command or store the nmap output?
if 'Starts the Client ID:' in data:
# No more nmap lines coming back
if nmap_output_coming_back:
nmap_output_coming_back = False
alias = data.split(':')[3].strip('\n').strip('\r').strip(' ')
try:
client_version = data.split(':')[5].strip('\n').strip('\r').strip(' ')
client_isroot = 'False' if data.split(':')[7].strip('\n').strip('\r').strip(' ') == 0 else 'True'
except IndexError:
# It is an old version and it is not sending these data
client_version = '0.1?'
client_isroot = '?'
try:
# Do we have it yet?
value = clients[client_id]['Alias']
# Yes
except KeyError:
# No
clients[client_id] = {}
clients[client_id]['Alias'] = alias
clients[client_id]['FirstTime'] = datetime.datetime.now()
clients[client_id]['LastTime'] = datetime.datetime.now()
clients[client_id]['NbrCommands'] = 0
clients[client_id]['Status'] = 'Online'
clients[client_id]['LastCommand'] = ''
clients[client_id]['Version'] = client_version
clients[client_id]['IsRoot'] = client_isroot
clients[client_id]['RunCmdsxMin'] = 0
clients[client_id]['AvrCmdsxMin'] = 0
msgline = 'Client ID connected: {0} ({1})'.format(str(client_id),str(alias))
log.msg(msgline, logLevel=logging.INFO)
if verbose_level > 1:
print '+ '+msgline
elif 'Send more commands' in data:
alias = clients[client_id]['Alias']
clients[client_id]['Status'] = 'Online'
#nowtime = datetime.datetime.now().ctime()
nowtime = datetime.datetime.now()
clients[client_id]['LastTime'] = nowtime
# No more nmap lines coming back
if nmap_output_coming_back:
nmap_output_coming_back = False
send_one_more_command(ourtransport,client_id)
elif 'Nmap Output File' in data and not nmap_output_coming_back:
# Nmap output start to come back...
nmap_output_coming_back = True
outputswitch=0
alias = clients[client_id]['Alias']
clients[client_id]['Status'] = 'Online'
# compute the commands per hour
# 1 more command. Time is between lasttimeseen and now
time_since_cmd_start = datetime.datetime.now() - clients[client_id]['LastTime']
# Cummulative average
prev_ca = clients[client_id]['AvrCmdsxMin']
#commandsXsec = ( time_since_cmd_start.total_seconds() + (clients[client_id]['NbrCommands'] * prev_ca) ) / ( clients[client_id]['NbrCommands'] + 1 )
#clients[client_id]['RunCmdsxMin'] = cmds_per_min = 60 / time_since_cmd_start.total_seconds()
clients[client_id]['RunCmdsxMin'] = 60 / ( time_since_cmd_start.seconds + ( time_since_cmd_start.microseconds / 1000000.0))
clients[client_id]['AvrCmdsxMin'] = ( clients[client_id]['RunCmdsxMin'] + (clients[client_id]['NbrCommands'] * prev_ca) ) / ( clients[client_id]['NbrCommands'] + 1 )
# update the lasttime
nowtime = datetime.datetime.now()
clients[client_id]['LastTime'] = nowtime
# Create the dir
os.system('mkdir %s/nmap_results > /dev/null 2>&1'%base_dir)
# Get the output file from the data
# We strip \n.
filename = data.split(':')[1].strip('\n')
xml_output_file = "%s/nmap_results/%s.xml"%(base_dir, filename)
nmap_output_file = "%s/nmap_results/%s.nmap"%(base_dir, filename)
gnmap_output_file = "%s/nmap_results/%s.gnmap"%(base_dir, filename)
if verbose_level > 2:
log.msg('\tNmap output file is: {0}'.format(nmap_output_file), logLevel=logging.DEBUG)
clientline = 'Client ID:'+client_id+':Alias:'+alias+"\n"
with open(nmap_output_file, 'a+') as f:
f.writelines(clientline)
with open(xml_output_file, 'a+') as f:
f.writelines(clientline)
with open(gnmap_output_file, 'a+') as f:
f.writelines(clientline)
elif nmap_output_coming_back and 'Nmap Output Finished' not in data:
# Store the output to a file.
alias = clients[client_id]['Alias']
clients[client_id]['Status'] = 'Storing'
#nowtime = datetime.datetime.now().ctime()
nowtime = datetime.datetime.now()
clients[client_id]['LastTime'] = nowtime
#print data
if "#XMLOUTPUT#" in data:
outputswitch=1
elif "#GNMAPOUTPUT#" in data:
outputswitch=2
else:
if outputswitch==0:
with open(nmap_output_file, 'a+') as f:
f.writelines(data+'\n')
elif outputswitch==1:
with open(xml_output_file, 'a+') as f:
f.writelines(data+'\n')
elif outputswitch==2:
with open(gnmap_output_file, 'a+') as f:
f.writelines(data+'\n')
log.msg('\tStoring nmap output for client {0} ({1}).'.format(client_id, alias), logLevel=logging.DEBUG)
elif 'Nmap Output Finished' in data and nmap_output_coming_back:
# Nmap output finished
nmap_output_coming_back = False
alias = clients[client_id]['Alias']
clients[client_id]['Status'] = 'Online'
#nowtime = datetime.datetime.now().ctime()
nowtime = datetime.datetime.now()
clients[client_id]['LastTime'] = nowtime
# Store the finished nmap command in the file, so we can retrieve it if we need...
finished_nmap_command = clients[client_id]['LastCommand']
clients[client_id]['LastCommand'] = ''
#clear out the trace file
with open(trace_file, 'r') as f:
running_jobs = f.readlines()
running_jobs.remove(finished_nmap_command)
with open(trace_file, 'w') as f:
f.writelines(running_jobs)
if verbose_level > 2:
print '+ Storing command {0} in trace file.'.format(finished_nmap_command.strip('\n').strip('\r'))
outputswitch=0
except Exception as inst:
print 'Problem in process input lines'
print type(inst)
print inst.args
print inst
class NmapServerProtocol(Protocol):
""" This is the function that communicates with the client """
global mlog
global verbose_level
global clients
global nmap_command
global mlog
def connectionMade(self):
if verbose_level > 0:
pass
def connectionLost(self, reason):
peerHost = self.transport.getPeer().host
peerPort = str(self.transport.getPeer().port)
client_id = peerHost+':'+peerPort
try:
alias = clients[client_id]['Alias']
except:
msgline = 'No client found in list with id {0}. Moving on...'.format(client_id)
log.msg(msgline, logLevel=logging.INFO)
return 0
clients[client_id]['Status'] = 'Offline'
command_to_redo = clients[client_id]['LastCommand']
if command_to_redo != '':
#readd to job file and queue
nmap_command.append(command_to_redo)
with open(nmap_commands_file, "a+") as f:
f.writelines(command_to_redo)
#clear out the trace file
with open(trace_file, 'r') as f:
running_jobs = f.readlines()
running_jobs.remove(command_to_redo)
with open(trace_file, 'w') as f:
f.writelines(running_jobs)
if verbose_level > 1:
msgline = 'Connection lost in the protocol. Reason:{0}'.format(reason)
msgline2 = '+ Connection lost for {0} ({1}).'.format(alias, client_id)
log.msg(msgline, logLevel=logging.DEBUG)
print msgline2
if verbose_level > 2:
print 'Re inserting command: {0}'.format(command_to_redo)
def dataReceived(self, newdata):
#global client_id
data = newdata.strip('\r').strip('\n').split('\r\n')
peerHost = self.transport.getPeer().host
peerPort = str(self.transport.getPeer().port)
client_id = peerHost+':'+peerPort
# If you need to debug
if verbose_level > 2:
log.msg('Data recived', logLevel=logging.DEBUG)
log.msg(data, logLevel=logging.DEBUG)
print '+ Data received: {0}'.format(data)
for line in data:
process_input_line(line,self,client_id)
def sql_import_loop():
global sql_file
global sql_conn
global mlog
tree=""
#Process all files in the nmap_results directory
path = "%s/nmap_results/"%base_dir
newpath="%s/nmap_results/processed/"%base_dir
try:
os.stat(path)
os.stat(newpath)
except:
os.mkdir(path)
os.mkdir(newpath)
output_files = os.listdir("%s/nmap_results/"%base_dir)
scan_id=""
for ofile in output_files:
complete=path+ofile
if os.path.isfile(complete):
if ofile.endswith(".xml"):
try:
scan_id=ofile.split(".xml")[0]
log.msg("XML File Found: %s"%scan_id, logLevel=logging.INFO)
#take off the first line first, then pass to parser
xmlf = open(complete, "r")
data = xmlf.read()
xmlf.close()
lines = data.split("\n")
log.msg("Importing %s.xml from: %s"%(scan_id,lines[0]), logLevel=logging.INFO)
xmldata = "".join(lines[1:])
tree = ET.fromstring(xmldata)
except:
log.msg(sys.exc_info()[0], logLevel=logging.DEBUG)
raise
os.rename(complete, newpath+ofile)
#connect the DB
sql_conn=sqlite3.connect(sql_file)
c = sql_conn.cursor()
if len(tree)>0:
#get info about the scan
s_version = tree.get("version")
s_summary=""
if not tree.find("runstats").find("finished") == None:
s_summary = tree.find("runstats").find("finished").get("summary")
i=(s_version, s_summary,True,scan_id,)
c.execute('UPDATE scans_scan SET version=?, summary=?, finished=? WHERE id=?', i)
sql_conn.commit()
sql_conn.close()
def process_nmap_commands(logger_name):
""" Main function. Here we set up the environment, factory and port """
global nmap_commands_file
global nmap_command
global port
global mlog
global verbose_level
global client_timeout
observer = log.PythonLoggingObserver(logger_name)
observer.start()
# Create the factory
factory = Factory()
factory.protocol = NmapServerProtocol
# Create the time based print
loop = task.LoopingCall(show_info)
loop.start(5)
# Create the time based file read
loop2 = task.LoopingCall(read_file_and_fill_nmap_variable)
loop2.start(1)
# To mark idel clients as hold
loop3 = task.LoopingCall(timeout_idle_clients)
loop3.start(client_timeout) # call every second
if not sql_file =="":
loop4 = task.LoopingCall(sql_import_loop)
loop4.start(5)
# Create the reactor
reactor.listenSSL(port, factory, ServerContextFactory())
reactor.run()
def main():
global nmap_commands_file
global port
global log_file
global log_level
global mlog
global verbose_level
global start_time
global client_timeout
global sort_type
global pemfile
global cafile
global sql_file
global output_file
global trace_file
start_time = datetime.datetime.now()
try:
opts, args = getopt.getopt(sys.argv[1:], "f:l:L:p:P:c:s:t:v:S:o:", ["nmap-commands=","log-level=","log-server=","port=","pemfile=", "ca-file=","sort-type=","client-timeout=","verbose-level=", "sqlite-file=", "output-file"])
except getopt.GetoptError: usage()
for opt, arg in opts:
if opt in ("-f", "--nmap-commands"): nmap_commands_file=str(arg)
if opt in ("-p", "--port"): port=int(arg)
if opt in ("-l", "--log-level"): log_level=arg
if opt in ("-L", "--log-file"): log_file=arg
if opt in ("-v", "--verbose-level"): verbose_level=int(arg)
if opt in ("-t", "--client-timeout"): client_timeout=int(arg)
if opt in ("-s", "--sort-type"): sort_type=str(arg)
if opt in ("-P", "--pemfile"): pemfile=str(arg)
if opt in ("-c", "--ca-file"): cafile=str(arg)
if opt in ("-S", "--sqlite-file"): sql_file=str(arg)
if opt in ("-o", "--output-file"): output_file=str(arg)
print "Base Dir: %s"%base_dir
try:
# Verify that we have a pem file
try:
temp = os.stat(pemfile)
temp2 = os.stat(cafile)
except OSError:
print 'No pem or cert file given. Use -P or -c'
exit(-1)
if nmap_commands_file != '':
if verbose_level > 0:
version()
# Set up logger
# Set up a specific logger with our desired output level
logger_name = 'MyLogger'
mlog = logging.getLogger(logger_name)
# Set up the log level
numeric_level = getattr(logging, log_level.upper(), None)
if not isinstance(numeric_level, int):
raise ValueError('Invalid log level: %s' % loglevel)
mlog.setLevel(numeric_level)
# Add the log message handler to the logger
handler = logging.handlers.RotatingFileHandler(log_file, backupCount=5)
formater = logging.Formatter('%(asctime)s - %(levelname)s - %(message)s')
handler.setFormatter(formater)
mlog.addHandler(handler)
# End logger
#Get any leftover jobs and populate into jobs/queue
trace_file = nmap_commands_file+'.dnmaptrace'
with open(trace_file,'r') as f:
leftover=f.readlines()
with open(nmap_commands_file, 'r') as f:
curjobs=f.readlines()
for ljob in leftover:
if ljob not in curjobs:
with open(nmap_commands_file, 'a+') as f:
f.writelines(ljob)
#clear trace file
with open(trace_file,'w') as f:
f.write("")
# First fill the variable from the file
read_file_and_fill_nmap_variable()
# Start processing clients
process_nmap_commands(logger_name)
else:
usage()
except KeyboardInterrupt:
# CTRL-C pretty handling.
print "Keyboard Interruption!. Exiting."
sys.exit(1)
if __name__ == '__main__':
main()
|
gpl-2.0
| 8,240,667,989,885,825,000
| 29.995221
| 363
| 0.653394
| false
| 3.108435
| false
| false
| false
|
versusvoid/ugly-nlp-magic
|
learning.py
|
1
|
1270
|
#!/usr/bin/env python
parts = {}
partition_types = {}
max_len = 4
import sys
filename = 'partitions'
if len(sys.argv) > 1:
filename = sys.argv[1]
α = 1.0
partitions_count = α
f = open(filename)
for l in f:
partition_parts = l.strip().split()
for i, part in enumerate(partition_parts):
if '_' not in part:
print(l, part)
exit(1)
type, value = part.split('_')
value = value.replace('ь', '').replace('ъ', '')
if value == '': continue
part_types = parts.setdefault(value, set()).add(type)
if type != 'корень':
max_len = max(len(value), max_len)
else:
partition_parts[i] = type + '_'
partition_parts = tuple(partition_parts)
partition_types[partition_parts] = partition_types.get(partition_parts, 0.0) + 1.0
partitions_count += 1
f.close()
partitions_count += (len(partition_types) + 1)*α
for partition, count in partition_types.items():
partition_types[partition] = (count + α) / partitions_count
partition_types['__unknown__'] = α / partitions_count
import pickle
f = open('morfology.pick', 'wb')
pickle.dump((parts, partition_types, max_len), f)
f.close()
if '' in parts:
print('aaaaaaaaaaaaa')
exit(100500)
|
apache-2.0
| -7,207,247,101,435,700,000
| 24.14
| 86
| 0.603023
| false
| 3.273438
| false
| false
| false
|
projectshift/shift-schema
|
shiftschema/validators/multichoice.py
|
1
|
1718
|
from shiftschema.validators.abstract_validator import AbstractValidator
from shiftschema.result import Error
from shiftschema.exceptions import InvalidOption
class MultiChoice(AbstractValidator):
"""
MultiChoice validator
Accepts a list of values and checks if every item is a valid choice.
"""
invalid_multichoice = '%invalid_multichoice%'
def __init__(self, valid_choices=None, message=None):
"""
Initialize validator
Accepts an iterable of valid choices to check against.
:param min: int or None, minimum length
:param max: int or None, maximum length
:param message: str, custom error message
:return: None
"""
if message is not None:
self.invalid_multichoice = message
try:
iter(valid_choices)
except TypeError:
raise InvalidOption('Choices must be an iterable')
self.choices = valid_choices
def validate(self, value, model=None, context=None):
"""
Validate
Perform value validation against validation settings and return
error object.
:param value: list, value to check
:param model: parent model being validated
:param context: object or None, validation context
:return: shiftschema.result.Error
"""
invalid = [item for item in value if item not in self.choices]
if len(invalid):
return Error(
self.invalid_multichoice,
dict(items=', '.join(invalid))
)
# success otherwise
return Error()
|
mit
| -8,644,545,313,967,999,000
| 30.236364
| 72
| 0.592549
| false
| 5.038123
| false
| false
| false
|
hackers-terabit/portage
|
pym/portage/tests/resolver/ResolverPlayground.py
|
1
|
27366
|
# Copyright 2010-2015 Gentoo Foundation
# Distributed under the terms of the GNU General Public License v2
from itertools import permutations
import fnmatch
import sys
import tempfile
import portage
from portage import os
from portage import shutil
from portage.const import (GLOBAL_CONFIG_PATH, PORTAGE_BASE_PATH,
USER_CONFIG_PATH)
from portage.dep import Atom, _repo_separator
from portage.package.ebuild.config import config
from portage.package.ebuild.digestgen import digestgen
from portage._sets import load_default_config
from portage._sets.base import InternalPackageSet
from portage.tests import cnf_path
from portage.util import ensure_dirs, normalize_path
from portage.versions import catsplit
import _emerge
from _emerge.actions import calc_depclean
from _emerge.Blocker import Blocker
from _emerge.create_depgraph_params import create_depgraph_params
from _emerge.depgraph import backtrack_depgraph
from _emerge.RootConfig import RootConfig
if sys.hexversion >= 0x3000000:
# pylint: disable=W0622
basestring = str
class ResolverPlayground(object):
"""
This class helps to create the necessary files on disk and
the needed settings instances, etc. for the resolver to do
its work.
"""
config_files = frozenset(("eapi", "layout.conf", "make.conf", "package.accept_keywords",
"package.keywords", "package.license", "package.mask", "package.properties",
"package.provided", "packages",
"package.unmask", "package.use", "package.use.aliases", "package.use.stable.mask",
"soname.provided",
"unpack_dependencies", "use.aliases", "use.force", "use.mask", "layout.conf"))
metadata_xml_template = """<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE pkgmetadata SYSTEM "http://www.gentoo.org/dtd/metadata.dtd">
<pkgmetadata>
<maintainer type="person">
<email>maintainer-needed@gentoo.org</email>
<description>Description of the maintainership</description>
</maintainer>
<longdescription>Long description of the package</longdescription>
<use>
%(flags)s
</use>
</pkgmetadata>
"""
def __init__(self, ebuilds={}, binpkgs={}, installed={}, profile={}, repo_configs={}, \
user_config={}, sets={}, world=[], world_sets=[], distfiles={},
eprefix=None, targetroot=False, debug=False):
"""
ebuilds: cpv -> metadata mapping simulating available ebuilds.
installed: cpv -> metadata mapping simulating installed packages.
If a metadata key is missing, it gets a default value.
profile: settings defined by the profile.
"""
self.debug = debug
if eprefix is None:
self.eprefix = normalize_path(tempfile.mkdtemp())
else:
self.eprefix = normalize_path(eprefix)
# Tests may override portage.const.EPREFIX in order to
# simulate a prefix installation. It's reasonable to do
# this because tests should be self-contained such that
# the "real" value of portage.const.EPREFIX is entirely
# irrelevant (see bug #492932).
portage.const.EPREFIX = self.eprefix.rstrip(os.sep)
self.eroot = self.eprefix + os.sep
if targetroot:
self.target_root = os.path.join(self.eroot, 'target_root')
else:
self.target_root = os.sep
self.distdir = os.path.join(self.eroot, "var", "portage", "distfiles")
self.pkgdir = os.path.join(self.eprefix, "pkgdir")
self.vdbdir = os.path.join(self.eroot, "var/db/pkg")
os.makedirs(self.vdbdir)
if not debug:
portage.util.noiselimit = -2
self._repositories = {}
#Make sure the main repo is always created
self._get_repo_dir("test_repo")
self._create_distfiles(distfiles)
self._create_ebuilds(ebuilds)
self._create_binpkgs(binpkgs)
self._create_installed(installed)
self._create_profile(ebuilds, installed, profile, repo_configs, user_config, sets)
self._create_world(world, world_sets)
self.settings, self.trees = self._load_config()
self._create_ebuild_manifests(ebuilds)
portage.util.noiselimit = 0
def reload_config(self):
"""
Reload configuration from disk, which is useful if it has
been modified after the constructor has been called.
"""
for eroot in self.trees:
portdb = self.trees[eroot]["porttree"].dbapi
portdb.close_caches()
self.settings, self.trees = self._load_config()
def _get_repo_dir(self, repo):
"""
Create the repo directory if needed.
"""
if repo not in self._repositories:
if repo == "test_repo":
self._repositories["DEFAULT"] = {"main-repo": repo}
repo_path = os.path.join(self.eroot, "var", "repositories", repo)
self._repositories[repo] = {"location": repo_path}
profile_path = os.path.join(repo_path, "profiles")
try:
os.makedirs(profile_path)
except os.error:
pass
repo_name_file = os.path.join(profile_path, "repo_name")
with open(repo_name_file, "w") as f:
f.write("%s\n" % repo)
return self._repositories[repo]["location"]
def _create_distfiles(self, distfiles):
os.makedirs(self.distdir)
for k, v in distfiles.items():
with open(os.path.join(self.distdir, k), 'wb') as f:
f.write(v)
def _create_ebuilds(self, ebuilds):
for cpv in ebuilds:
a = Atom("=" + cpv, allow_repo=True)
repo = a.repo
if repo is None:
repo = "test_repo"
metadata = ebuilds[cpv].copy()
copyright_header = metadata.pop("COPYRIGHT_HEADER", None)
eapi = metadata.pop("EAPI", "0")
misc_content = metadata.pop("MISC_CONTENT", None)
metadata.setdefault("DEPEND", "")
metadata.setdefault("SLOT", "0")
metadata.setdefault("KEYWORDS", "x86")
metadata.setdefault("IUSE", "")
unknown_keys = set(metadata).difference(
portage.dbapi.dbapi._known_keys)
if unknown_keys:
raise ValueError("metadata of ebuild '%s' contains unknown keys: %s" %
(cpv, sorted(unknown_keys)))
repo_dir = self._get_repo_dir(repo)
ebuild_dir = os.path.join(repo_dir, a.cp)
ebuild_path = os.path.join(ebuild_dir, a.cpv.split("/")[1] + ".ebuild")
try:
os.makedirs(ebuild_dir)
except os.error:
pass
with open(ebuild_path, "w") as f:
if copyright_header is not None:
f.write(copyright_header)
f.write('EAPI="%s"\n' % eapi)
for k, v in metadata.items():
f.write('%s="%s"\n' % (k, v))
if misc_content is not None:
f.write(misc_content)
def _create_ebuild_manifests(self, ebuilds):
tmpsettings = config(clone=self.settings)
tmpsettings['PORTAGE_QUIET'] = '1'
for cpv in ebuilds:
a = Atom("=" + cpv, allow_repo=True)
repo = a.repo
if repo is None:
repo = "test_repo"
repo_dir = self._get_repo_dir(repo)
ebuild_dir = os.path.join(repo_dir, a.cp)
ebuild_path = os.path.join(ebuild_dir, a.cpv.split("/")[1] + ".ebuild")
portdb = self.trees[self.eroot]["porttree"].dbapi
tmpsettings['O'] = ebuild_dir
if not digestgen(mysettings=tmpsettings, myportdb=portdb):
raise AssertionError('digest creation failed for %s' % ebuild_path)
def _create_binpkgs(self, binpkgs):
# When using BUILD_ID, there can be mutiple instances for the
# same cpv. Therefore, binpkgs may be an iterable instead of
# a dict.
items = getattr(binpkgs, 'items', None)
items = items() if items is not None else binpkgs
for cpv, metadata in items:
a = Atom("=" + cpv, allow_repo=True)
repo = a.repo
if repo is None:
repo = "test_repo"
pn = catsplit(a.cp)[1]
cat, pf = catsplit(a.cpv)
metadata = metadata.copy()
metadata.setdefault("SLOT", "0")
metadata.setdefault("KEYWORDS", "x86")
metadata.setdefault("BUILD_TIME", "0")
metadata["repository"] = repo
metadata["CATEGORY"] = cat
metadata["PF"] = pf
repo_dir = self.pkgdir
category_dir = os.path.join(repo_dir, cat)
if "BUILD_ID" in metadata:
binpkg_path = os.path.join(category_dir, pn,
"%s-%s.xpak"% (pf, metadata["BUILD_ID"]))
else:
binpkg_path = os.path.join(category_dir, pf + ".tbz2")
ensure_dirs(os.path.dirname(binpkg_path))
t = portage.xpak.tbz2(binpkg_path)
t.recompose_mem(portage.xpak.xpak_mem(metadata))
def _create_installed(self, installed):
for cpv in installed:
a = Atom("=" + cpv, allow_repo=True)
repo = a.repo
if repo is None:
repo = "test_repo"
vdb_pkg_dir = os.path.join(self.vdbdir, a.cpv)
try:
os.makedirs(vdb_pkg_dir)
except os.error:
pass
metadata = installed[cpv].copy()
metadata.setdefault("SLOT", "0")
metadata.setdefault("BUILD_TIME", "0")
metadata.setdefault("COUNTER", "0")
metadata.setdefault("KEYWORDS", "~x86")
unknown_keys = set(metadata).difference(
portage.dbapi.dbapi._known_keys)
unknown_keys.discard("BUILD_TIME")
unknown_keys.discard("BUILD_ID")
unknown_keys.discard("COUNTER")
unknown_keys.discard("repository")
unknown_keys.discard("USE")
unknown_keys.discard("PROVIDES")
unknown_keys.discard("REQUIRES")
if unknown_keys:
raise ValueError("metadata of installed '%s' contains unknown keys: %s" %
(cpv, sorted(unknown_keys)))
metadata["repository"] = repo
for k, v in metadata.items():
with open(os.path.join(vdb_pkg_dir, k), "w") as f:
f.write("%s\n" % v)
def _create_profile(self, ebuilds, installed, profile, repo_configs, user_config, sets):
user_config_dir = os.path.join(self.eroot, USER_CONFIG_PATH)
try:
os.makedirs(user_config_dir)
except os.error:
pass
for repo in self._repositories:
if repo == "DEFAULT":
continue
repo_dir = self._get_repo_dir(repo)
profile_dir = os.path.join(repo_dir, "profiles")
metadata_dir = os.path.join(repo_dir, "metadata")
os.makedirs(metadata_dir)
#Create $REPO/profiles/categories
categories = set()
for cpv in ebuilds:
ebuilds_repo = Atom("="+cpv, allow_repo=True).repo
if ebuilds_repo is None:
ebuilds_repo = "test_repo"
if ebuilds_repo == repo:
categories.add(catsplit(cpv)[0])
categories_file = os.path.join(profile_dir, "categories")
with open(categories_file, "w") as f:
for cat in categories:
f.write(cat + "\n")
#Create $REPO/profiles/license_groups
license_file = os.path.join(profile_dir, "license_groups")
with open(license_file, "w") as f:
f.write("EULA TEST\n")
repo_config = repo_configs.get(repo)
if repo_config:
for config_file, lines in repo_config.items():
if config_file not in self.config_files and not any(fnmatch.fnmatch(config_file, os.path.join(x, "*")) for x in self.config_files):
raise ValueError("Unknown config file: '%s'" % config_file)
if config_file in ("layout.conf",):
file_name = os.path.join(repo_dir, "metadata", config_file)
else:
file_name = os.path.join(profile_dir, config_file)
if "/" in config_file and not os.path.isdir(os.path.dirname(file_name)):
os.makedirs(os.path.dirname(file_name))
with open(file_name, "w") as f:
for line in lines:
f.write("%s\n" % line)
# Temporarily write empty value of masters until it becomes default.
# TODO: Delete all references to "# use implicit masters" when empty value becomes default.
if config_file == "layout.conf" and not any(line.startswith(("masters =", "# use implicit masters")) for line in lines):
f.write("masters =\n")
#Create $profile_dir/eclass (we fail to digest the ebuilds if it's not there)
os.makedirs(os.path.join(repo_dir, "eclass"))
# Temporarily write empty value of masters until it becomes default.
if not repo_config or "layout.conf" not in repo_config:
layout_conf_path = os.path.join(repo_dir, "metadata", "layout.conf")
with open(layout_conf_path, "w") as f:
f.write("masters =\n")
if repo == "test_repo":
#Create a minimal profile in /usr/portage
sub_profile_dir = os.path.join(profile_dir, "default", "linux", "x86", "test_profile")
os.makedirs(sub_profile_dir)
if not (profile and "eapi" in profile):
eapi_file = os.path.join(sub_profile_dir, "eapi")
with open(eapi_file, "w") as f:
f.write("0\n")
make_defaults_file = os.path.join(sub_profile_dir, "make.defaults")
with open(make_defaults_file, "w") as f:
f.write("ARCH=\"x86\"\n")
f.write("ACCEPT_KEYWORDS=\"x86\"\n")
use_force_file = os.path.join(sub_profile_dir, "use.force")
with open(use_force_file, "w") as f:
f.write("x86\n")
parent_file = os.path.join(sub_profile_dir, "parent")
with open(parent_file, "w") as f:
f.write("..\n")
if profile:
for config_file, lines in profile.items():
if config_file not in self.config_files:
raise ValueError("Unknown config file: '%s'" % config_file)
file_name = os.path.join(sub_profile_dir, config_file)
with open(file_name, "w") as f:
for line in lines:
f.write("%s\n" % line)
#Create profile symlink
os.symlink(sub_profile_dir, os.path.join(user_config_dir, "make.profile"))
make_conf = {
"ACCEPT_KEYWORDS": "x86",
"CLEAN_DELAY": "0",
"DISTDIR" : self.distdir,
"EMERGE_WARNING_DELAY": "0",
"PKGDIR": self.pkgdir,
"PORTAGE_INST_GID": str(portage.data.portage_gid),
"PORTAGE_INST_UID": str(portage.data.portage_uid),
"PORTAGE_TMPDIR": os.path.join(self.eroot, 'var/tmp'),
}
if os.environ.get("NOCOLOR"):
make_conf["NOCOLOR"] = os.environ["NOCOLOR"]
# Pass along PORTAGE_USERNAME and PORTAGE_GRPNAME since they
# need to be inherited by ebuild subprocesses.
if 'PORTAGE_USERNAME' in os.environ:
make_conf['PORTAGE_USERNAME'] = os.environ['PORTAGE_USERNAME']
if 'PORTAGE_GRPNAME' in os.environ:
make_conf['PORTAGE_GRPNAME'] = os.environ['PORTAGE_GRPNAME']
make_conf_lines = []
for k_v in make_conf.items():
make_conf_lines.append('%s="%s"' % k_v)
if "make.conf" in user_config:
make_conf_lines.extend(user_config["make.conf"])
if not portage.process.sandbox_capable or \
os.environ.get("SANDBOX_ON") == "1":
# avoid problems from nested sandbox instances
make_conf_lines.append('FEATURES="${FEATURES} -sandbox -usersandbox"')
configs = user_config.copy()
configs["make.conf"] = make_conf_lines
for config_file, lines in configs.items():
if config_file not in self.config_files:
raise ValueError("Unknown config file: '%s'" % config_file)
file_name = os.path.join(user_config_dir, config_file)
with open(file_name, "w") as f:
for line in lines:
f.write("%s\n" % line)
#Create /usr/share/portage/config/make.globals
make_globals_path = os.path.join(self.eroot,
GLOBAL_CONFIG_PATH.lstrip(os.sep), "make.globals")
ensure_dirs(os.path.dirname(make_globals_path))
os.symlink(os.path.join(cnf_path, "make.globals"),
make_globals_path)
#Create /usr/share/portage/config/sets/portage.conf
default_sets_conf_dir = os.path.join(self.eroot, "usr/share/portage/config/sets")
try:
os.makedirs(default_sets_conf_dir)
except os.error:
pass
provided_sets_portage_conf = (
os.path.join(cnf_path, "sets", "portage.conf"))
os.symlink(provided_sets_portage_conf, os.path.join(default_sets_conf_dir, "portage.conf"))
set_config_dir = os.path.join(user_config_dir, "sets")
try:
os.makedirs(set_config_dir)
except os.error:
pass
for sets_file, lines in sets.items():
file_name = os.path.join(set_config_dir, sets_file)
with open(file_name, "w") as f:
for line in lines:
f.write("%s\n" % line)
def _create_world(self, world, world_sets):
#Create /var/lib/portage/world
var_lib_portage = os.path.join(self.eroot, "var", "lib", "portage")
os.makedirs(var_lib_portage)
world_file = os.path.join(var_lib_portage, "world")
world_set_file = os.path.join(var_lib_portage, "world_sets")
with open(world_file, "w") as f:
for atom in world:
f.write("%s\n" % atom)
with open(world_set_file, "w") as f:
for atom in world_sets:
f.write("%s\n" % atom)
def _load_config(self):
create_trees_kwargs = {}
if self.target_root != os.sep:
create_trees_kwargs["target_root"] = self.target_root
env = {
"PORTAGE_REPOSITORIES": "\n".join("[%s]\n%s" % (repo_name, "\n".join("%s = %s" % (k, v) for k, v in repo_config.items())) for repo_name, repo_config in self._repositories.items())
}
trees = portage.create_trees(env=env, eprefix=self.eprefix,
**create_trees_kwargs)
for root, root_trees in trees.items():
settings = root_trees["vartree"].settings
settings._init_dirs()
setconfig = load_default_config(settings, root_trees)
root_trees["root_config"] = RootConfig(settings, root_trees, setconfig)
return trees[trees._target_eroot]["vartree"].settings, trees
def run(self, atoms, options={}, action=None):
options = options.copy()
options["--pretend"] = True
if self.debug:
options["--debug"] = True
if action is None:
if options.get("--depclean"):
action = "depclean"
elif options.get("--prune"):
action = "prune"
if "--usepkgonly" in options:
options["--usepkg"] = True
global_noiselimit = portage.util.noiselimit
global_emergelog_disable = _emerge.emergelog._disable
try:
if not self.debug:
portage.util.noiselimit = -2
_emerge.emergelog._disable = True
if action in ("depclean", "prune"):
rval, cleanlist, ordered, req_pkg_count = \
calc_depclean(self.settings, self.trees, None,
options, action, InternalPackageSet(initial_atoms=atoms, allow_wildcard=True), None)
result = ResolverPlaygroundDepcleanResult(
atoms, rval, cleanlist, ordered, req_pkg_count)
else:
params = create_depgraph_params(options, action)
success, depgraph, favorites = backtrack_depgraph(
self.settings, self.trees, options, params, action, atoms, None)
depgraph._show_merge_list()
depgraph.display_problems()
result = ResolverPlaygroundResult(atoms, success, depgraph, favorites)
finally:
portage.util.noiselimit = global_noiselimit
_emerge.emergelog._disable = global_emergelog_disable
return result
def run_TestCase(self, test_case):
if not isinstance(test_case, ResolverPlaygroundTestCase):
raise TypeError("ResolverPlayground needs a ResolverPlaygroundTestCase")
for atoms in test_case.requests:
result = self.run(atoms, test_case.options, test_case.action)
if not test_case.compare_with_result(result):
return
def cleanup(self):
for eroot in self.trees:
portdb = self.trees[eroot]["porttree"].dbapi
portdb.close_caches()
if self.debug:
print("\nEROOT=%s" % self.eroot)
else:
shutil.rmtree(self.eroot)
class ResolverPlaygroundTestCase(object):
def __init__(self, request, **kwargs):
self.all_permutations = kwargs.pop("all_permutations", False)
self.ignore_mergelist_order = kwargs.pop("ignore_mergelist_order", False)
self.ambiguous_merge_order = kwargs.pop("ambiguous_merge_order", False)
self.ambiguous_slot_collision_solutions = kwargs.pop("ambiguous_slot_collision_solutions", False)
self.check_repo_names = kwargs.pop("check_repo_names", False)
self.merge_order_assertions = kwargs.pop("merge_order_assertions", False)
if self.all_permutations:
self.requests = list(permutations(request))
else:
self.requests = [request]
self.options = kwargs.pop("options", {})
self.action = kwargs.pop("action", None)
self.test_success = True
self.fail_msg = None
self._checks = kwargs.copy()
def compare_with_result(self, result):
checks = dict.fromkeys(result.checks)
for key, value in self._checks.items():
if not key in checks:
raise KeyError("Not an available check: '%s'" % key)
checks[key] = value
fail_msgs = []
for key, value in checks.items():
got = getattr(result, key)
expected = value
if key in result.optional_checks and expected is None:
continue
if key == "mergelist":
if not self.check_repo_names:
#Strip repo names if we don't check them
if got:
new_got = []
for cpv in got:
if cpv[:1] == "!":
new_got.append(cpv)
continue
new_got.append(cpv.split(_repo_separator)[0])
got = new_got
if expected:
new_expected = []
for obj in expected:
if isinstance(obj, basestring):
if obj[:1] == "!":
new_expected.append(obj)
continue
new_expected.append(
obj.split(_repo_separator)[0])
continue
new_expected.append(set())
for cpv in obj:
if cpv[:1] != "!":
cpv = cpv.split(_repo_separator)[0]
new_expected[-1].add(cpv)
expected = new_expected
if self.ignore_mergelist_order and got is not None:
got = set(got)
expected = set(expected)
if self.ambiguous_merge_order and got:
expected_stack = list(reversed(expected))
got_stack = list(reversed(got))
new_expected = []
match = True
while got_stack and expected_stack:
got_token = got_stack.pop()
expected_obj = expected_stack.pop()
if isinstance(expected_obj, basestring):
new_expected.append(expected_obj)
if got_token == expected_obj:
continue
# result doesn't match, so stop early
match = False
break
expected_obj = set(expected_obj)
try:
expected_obj.remove(got_token)
except KeyError:
# result doesn't match, so stop early
match = False
break
new_expected.append(got_token)
while got_stack and expected_obj:
got_token = got_stack.pop()
try:
expected_obj.remove(got_token)
except KeyError:
match = False
break
new_expected.append(got_token)
if not match:
# result doesn't match, so stop early
break
if expected_obj:
# result does not match, so stop early
match = False
new_expected.append(tuple(expected_obj))
break
if expected_stack:
# result does not match, add leftovers to new_expected
match = False
expected_stack.reverse()
new_expected.extend(expected_stack)
expected = new_expected
if match and self.merge_order_assertions:
for node1, node2 in self.merge_order_assertions:
if not (got.index(node1) < got.index(node2)):
fail_msgs.append("atoms: (" + \
", ".join(result.atoms) + "), key: " + \
("merge_order_assertions, expected: %s" % \
str((node1, node2))) + \
", got: " + str(got))
elif key == "slot_collision_solutions" and \
self.ambiguous_slot_collision_solutions:
# Tests that use all_permutations can have multiple
# outcomes here.
for x in expected:
if x == got:
expected = x
break
elif key in ("unstable_keywords", "needed_p_mask_changes",
"unsatisfied_deps", "required_use_unsatisfied") and \
expected is not None:
expected = set(expected)
elif key == "forced_rebuilds" and expected is not None:
expected = dict((k, set(v)) for k, v in expected.items())
if got != expected:
fail_msgs.append("atoms: (" + ", ".join(result.atoms) + "), key: " + \
key + ", expected: " + str(expected) + ", got: " + str(got))
if fail_msgs:
self.test_success = False
self.fail_msg = "\n".join(fail_msgs)
return False
return True
class ResolverPlaygroundResult(object):
checks = (
"success", "mergelist", "use_changes", "license_changes",
"unstable_keywords", "slot_collision_solutions",
"circular_dependency_solutions", "needed_p_mask_changes",
"unsatisfied_deps", "forced_rebuilds", "required_use_unsatisfied"
)
optional_checks = (
"forced_rebuilds",
"required_use_unsatisfied",
"unsatisfied_deps"
)
def __init__(self, atoms, success, mydepgraph, favorites):
self.atoms = atoms
self.success = success
self.depgraph = mydepgraph
self.favorites = favorites
self.mergelist = None
self.use_changes = None
self.license_changes = None
self.unstable_keywords = None
self.needed_p_mask_changes = None
self.slot_collision_solutions = None
self.circular_dependency_solutions = None
self.unsatisfied_deps = frozenset()
self.forced_rebuilds = None
self.required_use_unsatisfied = None
if self.depgraph._dynamic_config._serialized_tasks_cache is not None:
self.mergelist = []
host_root = self.depgraph._frozen_config._running_root.root
for x in self.depgraph._dynamic_config._serialized_tasks_cache:
if isinstance(x, Blocker):
self.mergelist.append(x.atom)
else:
repo_str = ""
if x.repo != "test_repo":
repo_str = _repo_separator + x.repo
build_id_str = ""
if (x.type_name == "binary" and
x.cpv.build_id is not None):
build_id_str = "-%s" % x.cpv.build_id
mergelist_str = x.cpv + build_id_str + repo_str
if x.built:
if x.operation == "merge":
desc = x.type_name
else:
desc = x.operation
mergelist_str = "[%s]%s" % (desc, mergelist_str)
if x.root != host_root:
mergelist_str += "{targetroot}"
self.mergelist.append(mergelist_str)
if self.depgraph._dynamic_config._needed_use_config_changes:
self.use_changes = {}
for pkg, needed_use_config_changes in \
self.depgraph._dynamic_config._needed_use_config_changes.items():
new_use, changes = needed_use_config_changes
self.use_changes[pkg.cpv] = changes
if self.depgraph._dynamic_config._needed_unstable_keywords:
self.unstable_keywords = set()
for pkg in self.depgraph._dynamic_config._needed_unstable_keywords:
self.unstable_keywords.add(pkg.cpv)
if self.depgraph._dynamic_config._needed_p_mask_changes:
self.needed_p_mask_changes = set()
for pkg in self.depgraph._dynamic_config._needed_p_mask_changes:
self.needed_p_mask_changes.add(pkg.cpv)
if self.depgraph._dynamic_config._needed_license_changes:
self.license_changes = {}
for pkg, missing_licenses in self.depgraph._dynamic_config._needed_license_changes.items():
self.license_changes[pkg.cpv] = missing_licenses
if self.depgraph._dynamic_config._slot_conflict_handler is not None:
self.slot_collision_solutions = []
handler = self.depgraph._dynamic_config._slot_conflict_handler
for change in handler.changes:
new_change = {}
for pkg in change:
new_change[pkg.cpv] = change[pkg]
self.slot_collision_solutions.append(new_change)
if self.depgraph._dynamic_config._circular_dependency_handler is not None:
handler = self.depgraph._dynamic_config._circular_dependency_handler
sol = handler.solutions
self.circular_dependency_solutions = dict(zip([x.cpv for x in sol.keys()], sol.values()))
if self.depgraph._dynamic_config._unsatisfied_deps_for_display:
self.unsatisfied_deps = set(dep_info[0][1]
for dep_info in self.depgraph._dynamic_config._unsatisfied_deps_for_display)
if self.depgraph._forced_rebuilds:
self.forced_rebuilds = dict(
(child.cpv, set(parent.cpv for parent in parents))
for child_dict in self.depgraph._forced_rebuilds.values()
for child, parents in child_dict.items())
required_use_unsatisfied = []
for pargs, kwargs in \
self.depgraph._dynamic_config._unsatisfied_deps_for_display:
if "show_req_use" in kwargs:
required_use_unsatisfied.append(pargs[1])
if required_use_unsatisfied:
self.required_use_unsatisfied = set(required_use_unsatisfied)
class ResolverPlaygroundDepcleanResult(object):
checks = (
"success", "cleanlist", "ordered", "req_pkg_count",
)
optional_checks = (
"ordered", "req_pkg_count",
)
def __init__(self, atoms, rval, cleanlist, ordered, req_pkg_count):
self.atoms = atoms
self.success = rval == 0
self.cleanlist = cleanlist
self.ordered = ordered
self.req_pkg_count = req_pkg_count
|
gpl-2.0
| 1,110,860,997,439,947,800
| 32.090689
| 182
| 0.670906
| false
| 3.034597
| true
| false
| false
|
awamper/draobpilc
|
draobpilc/processors/merger.py
|
1
|
8353
|
#!/usr/bin/env python3
# Copyright 2015 Ivan awamper@gmail.com
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of
# the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import json
from gi.repository import Gtk
from gi.repository import GObject
from draobpilc import common
from draobpilc.processors.processor_textwindow import TextWindow
from draobpilc.widgets.items_processor_base import (
ItemsProcessorBase,
ItemsProcessorPriority
)
COUNTER_LABEL_TPL = (
'<span size="xx-large">%s</span>' % _('Merge <b>%i</b> items.')
)
COMBOBOX_NONE_STRING = 'Draobpilc.Merger.ComboBoxText.Id == None'
class Merger(ItemsProcessorBase):
__gsignals__ = {
'merge': (GObject.SIGNAL_RUN_FIRST, None, (object, bool))
}
def __init__(self):
super().__init__(_('Merge'), ItemsProcessorPriority.HIGHEST)
self._counter_label = Gtk.Label()
self._counter_label.set_markup(COUNTER_LABEL_TPL % 0)
self._counter_label.set_hexpand(True)
self._counter_label.set_vexpand(False)
self._counter_label.set_valign(Gtk.Align.CENTER)
self._counter_label.set_halign(Gtk.Align.CENTER)
self._decorator_label = Gtk.Label()
self._decorator_label.props.margin = ItemsProcessorBase.MARGIN
self._decorator_label.set_label(_('Decorator'))
self._decorator_combo = Gtk.ComboBoxText.new_with_entry()
self._decorator_combo.connect('changed', lambda c: self.update())
self._decorator_combo.props.margin = ItemsProcessorBase.MARGIN
self._separator_label = Gtk.Label()
self._separator_label.props.margin = ItemsProcessorBase.MARGIN
self._separator_label.set_label(_('Separator'))
self._separator_combo = Gtk.ComboBoxText.new_with_entry()
self._separator_combo.connect('changed', lambda c: self.update())
self._separator_combo.props.margin = ItemsProcessorBase.MARGIN
self._text_window = TextWindow()
self._text_window.textview.set_name('MergerTextView')
self._merge_btn = Gtk.Button()
self._merge_btn.set_label(_('Merge'))
self._merge_btn.connect(
'clicked',
lambda b: self.emit('merge', self.items, False)
)
self._merge_del_btn = Gtk.Button()
self._merge_del_btn.set_label(_('Merge & Delete'))
self._merge_del_btn.set_tooltip_text(
_('Merge and delete merged items')
)
self._merge_del_btn.connect(
'clicked',
lambda b: self.emit('merge', self.items, True)
)
self._reverse_order_btn = Gtk.CheckButton(_('Reverse order'))
self._reverse_order_btn.props.margin = ItemsProcessorBase.MARGIN
self._reverse_order_btn.set_active(False)
self._reverse_order_btn.connect('toggled', lambda b: self.update())
buttons_box = Gtk.ButtonBox()
buttons_box.set_layout(Gtk.ButtonBoxStyle.EXPAND)
buttons_box.props.margin = ItemsProcessorBase.MARGIN
buttons_box.add(self._merge_del_btn)
buttons_box.add(self._merge_btn)
self.grid.set_name('MergerBox')
self.grid.attach(self._counter_label, 0, 1, 2, 1)
self.grid.attach(self._decorator_label, 0, 2, 1, 1)
self.grid.attach(self._decorator_combo, 0, 3, 1, 1)
self.grid.attach(self._separator_label, 1, 2, 1, 1)
self.grid.attach(self._separator_combo, 1, 3, 1, 1)
self.grid.attach(self._text_window, 0, 4, 2, 1)
self.grid.attach(self._reverse_order_btn, 0, 5, 2, 1)
self.grid.attach(buttons_box, 0, 6, 2, 1)
common.SETTINGS.connect(
'changed::' + common.MERGE_DEFAULT_DECORATOR,
self._on_settings_changed
)
common.SETTINGS.connect(
'changed::' + common.MERGE_DEFAULT_SEPARATOR,
self._on_settings_changed
)
common.SETTINGS.connect(
'changed::' + common.MERGE_DECORATORS,
lambda s, k: self._update_merge_data()
)
common.SETTINGS.connect(
'changed::' + common.MERGE_SEPARATORS,
lambda s, k: self._update_merge_data()
)
self._update_merge_data()
def _on_settings_changed(self, settings, key):
if key == common.MERGE_DEFAULT_DECORATOR:
combo = self._decorator_combo
else:
combo = self._separator_combo
if not settings[key]:
combo.set_active_id(COMBOBOX_NONE_STRING)
else:
combo.set_active_id(settings[key])
def _update_merge_data(self):
self._decorator_combo.remove_all()
self._separator_combo.remove_all()
decorators = json.loads(common.SETTINGS[common.MERGE_DECORATORS])
decorators.append([_('None'), COMBOBOX_NONE_STRING])
for decorator in decorators:
self._decorator_combo.append(decorator[1], decorator[0])
default_decorator = common.SETTINGS[common.MERGE_DEFAULT_DECORATOR]
if not default_decorator:
self._decorator_combo.set_active_id(COMBOBOX_NONE_STRING)
else:
self._decorator_combo.set_active_id(default_decorator)
separators = json.loads(common.SETTINGS[common.MERGE_SEPARATORS])
separators.append([_('None'), COMBOBOX_NONE_STRING])
for separator in separators:
self._separator_combo.append(separator[1], separator[0])
default_separator = common.SETTINGS[common.MERGE_DEFAULT_SEPARATOR]
if not default_separator:
self._separator_combo.set_active_id(COMBOBOX_NONE_STRING)
else:
self._separator_combo.set_active_id(default_separator)
def _get_merged_text(self):
def get_decorator():
decorator = self._decorator_combo.get_active_id()
if decorator == COMBOBOX_NONE_STRING:
decorator = ''
elif not decorator:
decorator = self._decorator_combo.get_active_text()
try:
decorator = decorator.encode('utf8').decode('unicode-escape')
except UnicodeDecodeError:
pass
return decorator
def get_separator():
separator = self._separator_combo.get_active_id()
if separator == COMBOBOX_NONE_STRING:
separator = ''
elif not separator:
separator = self._separator_combo.get_active_text()
try:
separator = separator.encode('utf8').decode('unicode-escape')
except UnicodeDecodeError:
pass
return separator
result = ''
merge_items = self.items
if self._reverse_order_btn.get_active():
merge_items = list(reversed(merge_items))
for i, item in enumerate(merge_items):
decorator = get_decorator()
separator = get_separator()
result += decorator + item.raw + decorator
if i < len(merge_items) - 1: result += separator
return result
def update(self):
self._counter_label.set_markup(
COUNTER_LABEL_TPL % len(self.items)
)
if len(self.items) < 2:
self.buffer.set_text('')
else:
preview = self._get_merged_text()
self.buffer.set_text(preview)
def set_items(self, items):
super().set_items(items)
self.update()
def clear(self):
super().clear()
self._reverse_order_btn.set_active(False)
self._update_merge_data()
self.update()
def can_process(self, items):
if len(items) > 1:
return True
else:
return False
@property
def buffer(self):
return self._text_window.textview.props.buffer
|
gpl-3.0
| -8,984,910,460,628,982,000
| 33.804167
| 81
| 0.611517
| false
| 3.888734
| false
| false
| false
|
L0st1nC0d3/TextAdventure
|
room.py
|
1
|
2058
|
import json
import sqlite3
from tkinter import *
def get_room(i2d, dbname):
ret = None
con = sqlite3.connect(dbname)
for row in con.execute("select description from rooms where id=?", (i2d,)):
jsontext = row[0]
d = json.loads(jsontext)
d['id'] = i2d
ret = Room(**d)
break
con.close()
return ret
class Room:
def __init__(self, id=0, name='A Room', description='An empty room', neighbors={}, items={}, npc={}, npcis={}):
self.id = id
self.name = name
self.description = description
self.neighbors = neighbors
self.items = items
self.npc = npc
self.npcis = npcis
def _neighbor(self, direction):
if direction in self.neighbors:
return self.neighbors[direction]
else:
return None
def north(self):
return self._neighbor('n')
def south(self):
return self._neighbor('s')
def east(self):
return self._neighbor('e')
def west(self):
return self._neighbor('w')
def up(self):
return self._neighbor('up')
def dw(self):
return self._neighbor('dw')
def show_item(self, character, txar):
txar.insert(END, "\n")
for i in range(len(self.items)):
if (self.items[i] not in character.items) and (self.items[i] not in character.used):
txar.insert(END, "\t* %s" % self.items[i], 'color5')
txar.insert(END, "\n")
def show_keyitems(self, txar):
txar.insert(END, "\n")
for i in range(len(self.npc)):
txar.insert(END, "\t* %s" % self.npc[i], 'color5')
txar.insert(END, "\n")
def show_mechanics(self, txar):
txar.insert(END, "\n")
for i in range(len(self.npcis)):
txar.insert(END, "\t* %s" % self.npcis[i], 'color5')
txar.insert(END, "\n")
def give_item(self, item):
if item in self.items:
return item
else:
return None
|
epl-1.0
| -5,393,833,993,856,727,000
| 20.4375
| 115
| 0.535471
| false
| 3.482234
| false
| false
| false
|
jsfan/requests-oauthlib
|
requests_oauthlib/oauth2_session.py
|
1
|
17215
|
from __future__ import unicode_literals
import logging
from oauthlib.common import generate_token, urldecode
from oauthlib.oauth2 import WebApplicationClient, InsecureTransportError
from oauthlib.oauth2 import TokenExpiredError, is_secure_transport
import requests
log = logging.getLogger(__name__)
class TokenUpdated(Warning):
def __init__(self, token):
super(TokenUpdated, self).__init__()
self.token = token
class OAuth2Session(requests.Session):
"""Versatile OAuth 2 extension to :class:`requests.Session`.
Supports any grant type adhering to :class:`oauthlib.oauth2.Client` spec
including the four core OAuth 2 grants.
Can be used to create authorization urls, fetch tokens and access protected
resources using the :class:`requests.Session` interface you are used to.
- :class:`oauthlib.oauth2.WebApplicationClient` (default): Authorization Code Grant
- :class:`oauthlib.oauth2.MobileApplicationClient`: Implicit Grant
- :class:`oauthlib.oauth2.LegacyApplicationClient`: Password Credentials Grant
- :class:`oauthlib.oauth2.BackendApplicationClient`: Client Credentials Grant
Note that the only time you will be using Implicit Grant from python is if
you are driving a user agent able to obtain URL fragments.
"""
def __init__(self, client_id=None, client=None, auto_refresh_url=None,
auto_refresh_kwargs=None, scope=None, redirect_uri=None, token=None,
state=None, token_updater=None, **kwargs):
"""Construct a new OAuth 2 client session.
:param client_id: Client id obtained during registration
:param client: :class:`oauthlib.oauth2.Client` to be used. Default is
WebApplicationClient which is useful for any
hosted application but not mobile or desktop.
:param scope: List of scopes you wish to request access to
:param redirect_uri: Redirect URI you registered as callback
:param token: Token dictionary, must include access_token
and token_type.
:param state: State string used to prevent CSRF. This will be given
when creating the authorization url and must be supplied
when parsing the authorization response.
Can be either a string or a no argument callable.
:auto_refresh_url: Refresh token endpoint URL, must be HTTPS. Supply
this if you wish the client to automatically refresh
your access tokens.
:auto_refresh_kwargs: Extra arguments to pass to the refresh token
endpoint.
:token_updater: Method with one argument, token, to be used to update
your token database on automatic token refresh. If not
set a TokenUpdated warning will be raised when a token
has been refreshed. This warning will carry the token
in its token argument.
:param kwargs: Arguments to pass to the Session constructor.
"""
super(OAuth2Session, self).__init__(**kwargs)
self._client = client or WebApplicationClient(client_id, token=token)
self.token = token or {}
self.scope = scope
self.redirect_uri = redirect_uri
self.state = state or generate_token
self._state = state
self.auto_refresh_url = auto_refresh_url
self.auto_refresh_kwargs = auto_refresh_kwargs or {}
self.token_updater = token_updater
# Allow customizations for non compliant providers through various
# hooks to adjust requests and responses.
self.compliance_hook = {
'access_token_response': set(),
'refresh_token_response': set(),
'protected_request': set(),
}
def new_state(self):
"""Generates a state string to be used in authorizations."""
try:
self._state = self.state()
log.debug('Generated new state %s.', self._state)
except TypeError:
self._state = self.state
log.debug('Re-using previously supplied state %s.', self._state)
return self._state
@property
def client_id(self):
return getattr(self._client, "client_id", None)
@client_id.setter
def client_id(self, value):
self._client.client_id = value
@client_id.deleter
def client_id(self):
del self._client.client_id
@property
def token(self):
return getattr(self._client, "token", None)
@token.setter
def token(self, value):
self._client.token = value
self._client.populate_token_attributes(value)
@property
def access_token(self):
return getattr(self._client, "access_token", None)
@access_token.setter
def access_token(self, value):
self._client.access_token = value
@access_token.deleter
def access_token(self):
del self._client.access_token
@property
def authorized(self):
"""Boolean that indicates whether this session has an OAuth token
or not. If `self.authorized` is True, you can reasonably expect
OAuth-protected requests to the resource to succeed. If
`self.authorized` is False, you need the user to go through the OAuth
authentication dance before OAuth-protected requests to the resource
will succeed.
"""
return bool(self.access_token)
def authorization_url(self, url, state=None, **kwargs):
"""Form an authorization URL.
:param url: Authorization endpoint url, must be HTTPS.
:param state: An optional state string for CSRF protection. If not
given it will be generated for you.
:param kwargs: Extra parameters to include.
:return: authorization_url, state
"""
state = state or self.new_state()
return self._client.prepare_request_uri(url,
redirect_uri=self.redirect_uri,
scope=self.scope,
state=state,
**kwargs), state
def fetch_token(self, token_url, code=None, authorization_response=None,
body='', auth=None, username=None, password=None, method='POST',
timeout=None, headers=None, verify=True, proxies=None, **kwargs):
"""Generic method for fetching an access token from the token endpoint.
If you are using the MobileApplicationClient you will want to use
token_from_fragment instead of fetch_token.
:param token_url: Token endpoint URL, must use HTTPS.
:param code: Authorization code (used by WebApplicationClients).
:param authorization_response: Authorization response URL, the callback
URL of the request back to you. Used by
WebApplicationClients instead of code.
:param body: Optional application/x-www-form-urlencoded body to add the
include in the token request. Prefer kwargs over body.
:param auth: An auth tuple or method as accepted by requests.
:param username: Username used by LegacyApplicationClients.
:param password: Password used by LegacyApplicationClients.
:param method: The HTTP method used to make the request. Defaults
to POST, but may also be GET. Other methods should
be added as needed.
:param headers: Dict to default request headers with.
:param timeout: Timeout of the request in seconds.
:param verify: Verify SSL certificate.
:param kwargs: Extra parameters to include in the token request.
:return: A token dict
"""
if not is_secure_transport(token_url):
raise InsecureTransportError()
if not code and authorization_response:
self._client.parse_request_uri_response(authorization_response,
state=self._state)
code = self._client.code
elif not code and isinstance(self._client, WebApplicationClient):
code = self._client.code
if not code:
raise ValueError('Please supply either code or '
'authorization_response parameters.')
body = self._client.prepare_request_body(code=code, body=body,
redirect_uri=self.redirect_uri, username=username,
password=password, **kwargs)
client_id = kwargs.get('client_id', '')
if auth is None:
if client_id:
log.debug('Encoding client_id "%s" with client_secret as Basic auth credentials.', client_id)
client_secret = kwargs.get('client_secret', '')
client_secret = client_secret if client_secret is not None else ''
auth = requests.auth.HTTPBasicAuth(client_id, client_secret)
elif username:
if password is None:
raise ValueError('Username was supplied, but not password.')
log.debug('Encoding username, password as Basic auth credentials.')
auth = requests.auth.HTTPBasicAuth(username, password)
headers = headers or {
'Accept': 'application/json',
'Content-Type': 'application/x-www-form-urlencoded;charset=UTF-8',
}
self.token = {}
if method.upper() == 'POST':
r = self.post(token_url, data=dict(urldecode(body)),
timeout=timeout, headers=headers, auth=auth,
verify=verify, proxies=proxies)
log.debug('Prepared fetch token request body %s', body)
elif method.upper() == 'GET':
# if method is not 'POST', switch body to querystring and GET
r = self.get(token_url, params=dict(urldecode(body)),
timeout=timeout, headers=headers, auth=auth,
verify=verify, proxies=proxies)
log.debug('Prepared fetch token request querystring %s', body)
else:
raise ValueError('The method kwarg must be POST or GET.')
log.debug('Request to fetch token completed with status %s.',
r.status_code)
log.debug('Request headers were %s', r.request.headers)
log.debug('Request body was %s', r.request.body)
log.debug('Response headers were %s and content %s.',
r.headers, r.text)
log.debug('Invoking %d token response hooks.',
len(self.compliance_hook['access_token_response']))
for hook in self.compliance_hook['access_token_response']:
log.debug('Invoking hook %s.', hook)
r = hook(r)
self._client.parse_request_body_response(r.text, scope=self.scope)
self.token = self._client.token
log.debug('Obtained token %s.', self.token)
return self.token
def token_from_fragment(self, authorization_response):
"""Parse token from the URI fragment, used by MobileApplicationClients.
:param authorization_response: The full URL of the redirect back to you
:return: A token dict
"""
self._client.parse_request_uri_response(authorization_response,
state=self._state)
self.token = self._client.token
return self.token
def refresh_token(self, token_url, refresh_token=None, body='', auth=None,
timeout=None, headers=None, verify=True, proxies=None, **kwargs):
"""Fetch a new access token using a refresh token.
:param token_url: The token endpoint, must be HTTPS.
:param refresh_token: The refresh_token to use.
:param body: Optional application/x-www-form-urlencoded body to add the
include in the token request. Prefer kwargs over body.
:param auth: An auth tuple or method as accepted by requests.
:param timeout: Timeout of the request in seconds.
:param verify: Verify SSL certificate.
:param kwargs: Extra parameters to include in the token request.
:return: A token dict
"""
if not token_url:
raise ValueError('No token endpoint set for auto_refresh.')
if not is_secure_transport(token_url):
raise InsecureTransportError()
refresh_token = refresh_token or self.token.get('refresh_token')
log.debug('Adding auto refresh key word arguments %s.',
self.auto_refresh_kwargs)
kwargs.update(self.auto_refresh_kwargs)
body = self._client.prepare_refresh_body(body=body,
refresh_token=refresh_token, scope=self.scope, **kwargs)
log.debug('Prepared refresh token request body %s', body)
if headers is None:
headers = {
'Accept': 'application/json',
'Content-Type': (
'application/x-www-form-urlencoded;charset=UTF-8'
),
}
r = self.post(token_url, data=dict(urldecode(body)), auth=auth,
timeout=timeout, headers=headers, verify=verify, withhold_token=True, proxies=proxies)
log.debug('Request to refresh token completed with status %s.',
r.status_code)
log.debug('Response headers were %s and content %s.',
r.headers, r.text)
log.debug('Invoking %d token response hooks.',
len(self.compliance_hook['refresh_token_response']))
for hook in self.compliance_hook['refresh_token_response']:
log.debug('Invoking hook %s.', hook)
r = hook(r)
self.token = self._client.parse_request_body_response(r.text, scope=self.scope)
if not 'refresh_token' in self.token:
log.debug('No new refresh token given. Re-using old.')
self.token['refresh_token'] = refresh_token
return self.token
def request(self, method, url, data=None, headers=None, withhold_token=False,
client_id=None, client_secret=None, **kwargs):
"""Intercept all requests and add the OAuth 2 token if present."""
if not is_secure_transport(url):
raise InsecureTransportError()
if self.token and not withhold_token:
log.debug('Invoking %d protected resource request hooks.',
len(self.compliance_hook['protected_request']))
for hook in self.compliance_hook['protected_request']:
log.debug('Invoking hook %s.', hook)
url, headers, data = hook(url, headers, data)
log.debug('Adding token %s to request.', self.token)
try:
url, headers, data = self._client.add_token(url,
http_method=method, body=data, headers=headers)
# Attempt to retrieve and save new access token if expired
except TokenExpiredError:
if self.auto_refresh_url:
log.debug('Auto refresh is set, attempting to refresh at %s.',
self.auto_refresh_url)
# We mustn't pass auth twice.
auth = kwargs.pop('auth', None)
if client_id and client_secret and (auth is None):
log.debug('Encoding client_id "%s" with client_secret as Basic auth credentials.', client_id)
auth = requests.auth.HTTPBasicAuth(client_id, client_secret)
token = self.refresh_token(
self.auto_refresh_url, auth=auth, **kwargs
)
if self.token_updater:
log.debug('Updating token to %s using %s.',
token, self.token_updater)
self.token_updater(token)
url, headers, data = self._client.add_token(url,
http_method=method, body=data, headers=headers)
else:
raise TokenUpdated(token)
else:
raise
log.debug('Requesting url %s using method %s.', url, method)
log.debug('Supplying headers %s and data %s', headers, data)
log.debug('Passing through key word arguments %s.', kwargs)
return super(OAuth2Session, self).request(method, url,
headers=headers, data=data, **kwargs)
def register_compliance_hook(self, hook_type, hook):
"""Register a hook for request/response tweaking.
Available hooks are:
access_token_response invoked before token parsing.
refresh_token_response invoked before refresh token parsing.
protected_request invoked before making a request.
If you find a new hook is needed please send a GitHub PR request
or open an issue.
"""
if hook_type not in self.compliance_hook:
raise ValueError('Hook type %s is not in %s.',
hook_type, self.compliance_hook)
self.compliance_hook[hook_type].add(hook)
|
isc
| 8,882,957,335,126,040,000
| 44.784574
| 117
| 0.60668
| false
| 4.565102
| false
| false
| false
|
Azure/azure-sdk-for-python
|
sdk/keyvault/azure-keyvault-keys/azure/keyvault/keys/crypto/_internal/algorithms/rsa_signing.py
|
1
|
2339
|
# ------------------------------------
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT License.
# ------------------------------------
from cryptography.hazmat.primitives import hashes
from cryptography.hazmat.primitives.asymmetric import padding, utils
from ..algorithm import SignatureAlgorithm
from ..transform import SignatureTransform
from ..._enums import SignatureAlgorithm as KeyVaultSignatureAlgorithm
class RsaSignatureTransform(SignatureTransform):
def __init__(self, key, padding_function, hash_algorithm):
super(RsaSignatureTransform, self).__init__()
self._key = key
self._padding_function = padding_function
self._hash_algorithm = hash_algorithm
def sign(self, digest):
return self._key.sign(digest, self._padding_function(digest), utils.Prehashed(self._hash_algorithm))
def verify(self, digest, signature):
self._key.verify(signature, digest, self._padding_function(digest), utils.Prehashed(self._hash_algorithm))
class RsaSsaPkcs1v15(SignatureAlgorithm):
def create_signature_transform(self, key):
return RsaSignatureTransform(key, lambda _: padding.PKCS1v15(), self._default_hash_algorithm)
class RsaSsaPss(SignatureAlgorithm):
def create_signature_transform(self, key):
return RsaSignatureTransform(key, self._get_padding, self._default_hash_algorithm)
def _get_padding(self, digest):
return padding.PSS(mgf=padding.MGF1(self._default_hash_algorithm), salt_length=len(digest))
class Ps256(RsaSsaPss):
_name = KeyVaultSignatureAlgorithm.ps256
_default_hash_algorithm = hashes.SHA256()
class Ps384(RsaSsaPss):
_name = KeyVaultSignatureAlgorithm.ps384
_default_hash_algorithm = hashes.SHA384()
class Ps512(RsaSsaPss):
_name = KeyVaultSignatureAlgorithm.ps512
_default_hash_algorithm = hashes.SHA512()
class Rs256(RsaSsaPkcs1v15):
_name = KeyVaultSignatureAlgorithm.rs256
_default_hash_algorithm = hashes.SHA256()
class Rs384(RsaSsaPkcs1v15):
_name = KeyVaultSignatureAlgorithm.rs384
_default_hash_algorithm = hashes.SHA384()
class Rs512(RsaSsaPkcs1v15):
_name = KeyVaultSignatureAlgorithm.rs512
_default_hash_algorithm = hashes.SHA512()
Ps256.register()
Ps384.register()
Ps512.register()
Rs256.register()
Rs384.register()
Rs512.register()
|
mit
| 355,841,851,340,012,100
| 30.186667
| 114
| 0.719111
| false
| 3.637636
| false
| false
| false
|
Esri/ops-server-config
|
Publish/Portal/UpdatePortalGUIDs.py
|
1
|
8962
|
#!/usr/bin/env python
#------------------------------------------------------------------------------
# Copyright 2015 Esri
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#==============================================================================
#Name: UpdatePortalGUIDs.py
#
#Purpose: Performs search and replace on portal items based on the
# "searchID" and "replaceID" keys in the specified JSON file.
# File must have specified JSON keys:
# [{"searchID": "GUID", "replaceID": "GUID"}]
#
#==============================================================================
import sys
import os
import time
import traceback
import json
from portalpy import Portal
from portalpy import TEXT_BASED_ITEM_TYPES
# Add "Root folder"\SupportFiles to sys path inorder to import
# modules in subfolder
sys.path.append(os.path.join(os.path.dirname(
os.path.dirname(os.path.dirname(sys.argv[0]))), "SupportFiles"))
from Utilities import validate_user_repsonse_yesno
import logging
logging.basicConfig()
search_key = 'search'
replace_key = 'replace'
def format_item_info(item):
itemID = item.get('id')
itemTitle = item.get('title')
itemOwner = item.get('owner')
itemType = item.get('type')
return "Id: {:<34}Owner: {:<25}Type: {:25}Title: {:<40}".format(
itemID, itemOwner, itemType, itemTitle)
def print_args():
""" Print script arguments """
if len(sys.argv) < 5:
print '\n' + os.path.basename(sys.argv[0]) + \
' <PortalURL>' \
' <AdminUser>' \
' <AdminUserPassword>' \
' <IdMappingFile>' \
' {SearchQuery}'
print '\nWhere:'
print '\n\t<PortalURL> (required): URL of Portal ' \
'(i.e. https://fully_qualified_domain_name/arcgis)'
print '\n\t<AdminUser> (required): Primary portal administrator user.'
print '\n\t<AdminUserPassword> (required): Password for AdminUser.'
print '\n\t<IdMappingFile> (required): file containing the item id ' \
'mapping information (i.e. output file from FindOrphanedHostedServices.py script)'
print '\n\t{SearchQuery} (optional): Portal search query.'
return None
else:
# Set variables from parameter values
portal_address = sys.argv[1]
adminuser = sys.argv[2]
password = sys.argv[3]
id_mapping_file_path = None
search_query = None
if len(sys.argv) >= 5:
id_mapping_file_path = sys.argv[4].strip()
if len(sys.argv) >= 6:
search_query = sys.argv[5].strip()
return portal_address, adminuser, password, id_mapping_file_path, search_query
def update_item_properties(portal, item, search, replace):
''' Search/replace values in the item json properties '''
if item is not None:
try:
jsonPropsToUpdate = ['description', 'snippet', 'accessInformation', 'licenseInfo', 'url']
for jsonProp in jsonPropsToUpdate:
is_updated = False
propertyValue = item.get(jsonProp)
if propertyValue:
search_str_list = [search, search.lower(), search.upper()]
for search_str in search_str_list:
if propertyValue.find(search_str) > -1:
propertyValue = propertyValue.replace(search_str, replace)
is_updated = True
if is_updated:
portal.update_item(item['id'], {jsonProp: propertyValue})
except Exception as err:
print('ERROR: Exception: error occured while executing update_item_properties for item: "{}"'.format(str(item.get('id'))))
def update_item_data(portal, item, search, replace):
''' Search/replace values in the item data '''
if item is not None:
if item['type'] in TEXT_BASED_ITEM_TYPES:
try:
itemdata = portal.item_data(item['id'])
except Exception as err:
print('ERROR: Exception: update_item_data function could not get item data for item: "{}"'.format(str(item.get('id'))))
itemdata = None
if itemdata:
is_updated = False
search_str_list = [search, search.lower(), search.upper()]
for search_str in search_str_list:
try:
if itemdata.find(search_str) > -1:
itemdata = itemdata.replace(search_str, replace)
is_updated = True
except Exception as err:
print('ERROR: Exception: update_item_data function encountered error during search/replace for item: "{}"'.format(str(item.get('id'))))
if is_updated:
try:
portal.update_item(item['id'], {'text': itemdata})
except Exception as err:
print('ERROR: Exception: update_item_data function encountered error during update of item: "{}"'.format(str(item.get('id'))))
def main():
exit_err_code = 1
starting_cwd = os.getcwd()
# Print/get script arguments
results = print_args()
if not results:
sys.exit(exit_err_code)
portal_address, adminuser, password, id_mapping_file_path, search_query = results
total_success = True
title_break_count = 100
section_break_count = 75
print '=' * title_break_count
print 'Update Portal GUIDs'
print '=' * title_break_count
if not os.path.exists(id_mapping_file_path):
print '\nFile {} does not exist. Exiting.'.format(id_mapping_file_path)
sys.exit(0)
try:
portal = Portal(portal_address, adminuser, password)
print '\n{}'.format('-' * section_break_count)
print '- Searching for portal items...\n'
items_temp = portal.search(q=search_query, sort_field='owner')
items = []
for item in items_temp:
if not item['owner'].startswith('esri_'):
items.append(item)
for item in items:
print format_item_info(item)
print '\nFound {} items.'.format(len(items))
if len(items) > 0:
user_response = raw_input("\nDo you want to continue with the update? Enter 'y' to continue: ")
if validate_user_repsonse_yesno(user_response):
# Open id mapping file
file_dir = os.path.dirname(id_mapping_file_path)
file_name = os.path.basename(id_mapping_file_path)
if len(file_dir) == 0:
file_dir = os.getcwd()
os.chdir(file_dir)
id_mapping = json.load(open(file_name))
print '\n{}'.format('-' * section_break_count)
print '- Updating item and item data...\n'
for item in items:
print format_item_info(item)
for id_map in id_mapping:
search = id_map.get(search_key)
replace = id_map.get(replace_key)
update_item_properties(portal, item, search, replace)
update_item_data(portal, item, search, replace)
except:
total_success = False
# Get the traceback object
tb = sys.exc_info()[2]
tbinfo = traceback.format_tb(tb)[0]
# Concatenate information together concerning the error
# into a message string
pymsg = "PYTHON ERRORS:\nTraceback info:\n" + tbinfo + \
"\nError Info:\n" + str(sys.exc_info()[1])
# Print Python error messages for use in Python / Python Window
print
print "***** ERROR ENCOUNTERED *****"
print pymsg + "\n"
finally:
os.chdir(starting_cwd)
print '\nDone.'
if total_success:
sys.exit(0)
else:
sys.exit(exit_err_code)
if __name__ == "__main__":
main()
|
apache-2.0
| -1,482,835,201,219,073,800
| 36.814346
| 159
| 0.535706
| false
| 4.312801
| false
| false
| false
|
ooici/coi-services
|
ion/processes/bootstrap/plugins/bootstrap_ingestion.py
|
1
|
1318
|
#!/usr/bin/env python
from ion.core.bootstrap_process import BootstrapPlugin
from pyon.public import Container
from interface.objects import IngestionQueue
from interface.services.dm.iingestion_management_service import IngestionManagementServiceProcessClient
class BootstrapIngestion(BootstrapPlugin):
"""
Bootstrap process for ingestion management.
"""
def on_initial_bootstrap(self, process, config, **kwargs):
"""
Defining the ingestion worker process is done in post_process_dispatcher.
Creating transform workers happens here...
"""
ing_ms_client = IngestionManagementServiceProcessClient(process=process)
self.container = Container.instance
exchange_point = config.get_safe('ingestion.exchange_point','science_data')
queues = config.get_safe('ingestion.queues',None)
if queues is None:
queues = [dict(name='science_granule_ingestion', type='SCIDATA')]
for i in xrange(len(queues)):
item = queues[i]
queues[i] = IngestionQueue(name=item['name'], type=item['type'], datastore_name=item['datastore_name'])
ing_ms_client.create_ingestion_configuration(name='standard ingestion config',
exchange_point_id=exchange_point,
queues=queues)
|
bsd-2-clause
| -4,062,449,384,635,165,700
| 37.764706
| 115
| 0.69044
| false
| 4.030581
| false
| false
| false
|
ngr/fe_sm_00
|
fe_sm_00/settings.py
|
1
|
2959
|
"""
Django settings for fe_sm_00 project.
Generated by 'django-admin startproject' using Django 1.8.1.
For more information on this file, see
https://docs.djangoproject.com/en/1.8/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.8/ref/settings/
"""
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.8/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'j#$z)^1b__ju_-3-lwl=77a1l)oo$5@s7c9f5%465r(-buptql'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'core',
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'django.middleware.security.SecurityMiddleware',
)
ROOT_URLCONF = 'fe_sm_00.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'fe_sm_00.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.8/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.mysql',
'NAME': 'fe_sm_00',
'HOST': 'rds-sm-00.cjlhgo3mq7ui.us-west-2.rds.amazonaws.com',
'USER': 'dj_dbuser',
'PASSWORD': 'P@ssw0rd',
'PORT': '3306',
},
'sqlite': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
},
}
# Internationalization
# https://docs.djangoproject.com/en/1.8/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.8/howto/static-files/
STATIC_ROOT = os.path.join(BASE_DIR, 'static')
STATIC_URL = '/static/'
|
mit
| -7,847,905,305,224,313,000
| 25.9
| 71
| 0.669483
| false
| 3.335964
| false
| false
| false
|
arximboldi/pigeoncide
|
src/game/pigeon.py
|
1
|
13252
|
#
# Copyright (C) 2009 Juan Pedro Bolivar Puente, Alberto Villegas Erce
#
# This file is part of Pigeoncide.
#
# Pigeoncide is free software: you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# Pigeoncide is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
from base.signal import weak_slot
from base.log import get_log
from base.util import *
from ent.observer import ObservableSpatialEntity
from ent.panda import ActorEntity, ModelEntity
from ent.physical import (DynamicPhysicalEntity, StandingPhysicalEntity,
OnFloorEntity)
from core.state import StateManager, State
from core.util import *
from core import task
import phys.geom as geom
import phys.mass as mass
from flock import BoidParams, BoidEntity, FlockEntity
from kill import KillableEntity
from crawler import CrawlerEntityDecorator
from weapon import Hittable
from physics import pigeon_category
import random
import weakref
from pandac.PandaModules import Vec3
from functools import partial
_log = get_log (__name__)
class PigeonFood (ModelEntity,
DynamicPhysicalEntity,
ObservableSpatialEntity):
food_model = 'obj/food.egg'
amount = 1.0
food_scale = 0.5
def __init__ (self, *a, **k):
super (PigeonFood, self).__init__ (
model = self.food_model,
geometry = geom.box (2, 2, 0.5),
*a, **k)
self.model.setTexture (loader.loadTexture ('obj/food.png'))
self.model_scale = self.food_scale
# A bit hackish, I guess
self.entities.game.pigeon_food.append (self)
def eat (self, cnt):
self.amount -= cnt
if self.amount <= 0.0:
self.dispose ()
else:
s = self.amount * self.food_scale
self.model_scale = Vec3 (s, s, s)
def dispose (self):
self.entities.game.pigeon_food.remove (self)
super (PigeonFood, self).dispose ()
class Pigeon (BoidEntity,
ActorEntity,
KillableEntity,
StateManager,
OnFloorEntity,
Hittable):
"""
TODO: Actually a StateManager has too much innecesary overhead. We
could try to make a lightweight version of it for this kind of
use.
"""
pigeon_model = 'char/pigeon-anims.egg'
pigeon_anims = { 'walk' : 'char/pigeon-walk.egg',
'fly' : 'char/pigeon-fly.egg',
'takeoff' : 'char/pigeon-takeoff.egg',
'land' : 'char/pigeon-land.egg',
'idle' : 'char/pigeon-idle.egg' }
pigeon_sweeping = True
pigeon_min_eat_distance = 100.
pigeon_z_limit = -50.
def __init__ (self,
model = pigeon_model,
anims = pigeon_anims,
boys = [],
*a, **k):
super (Pigeon, self).__init__ (
geometry = geom.capsule (2, 1),
#mass = mass.sphere (1, 2),
model = model,
anims = anims,
category = pigeon_category,
*a, **k)
self.on_is_on_floor_change += self.on_pigeon_is_on_floor
self.on_hit += self.on_pigeon_hit
self.on_death += self.on_pigeon_death
for boy in boys:
boy.on_boy_noise += self.on_boy_noise
boy.on_entity_set_position += self.on_boy_move
self.physical_hpr = Vec3 (90, 0, 0)
self.params = BoidParams ()
self.model_position = Vec3 (0, 0, -2)
self.model_scale = Vec3 (0.08, 0.08, 0.08)
self.model_hpr = Vec3 (180, 0, 0)
self.add_state ('fly', FlyState)
self.add_state ('walk', WalkState)
self.add_state ('follow', FollowState)
self.add_state ('fear', FearState)
self.add_state ('eat', EatState)
self.add_state ('hit', HitState)
self.add_state ('land', LandState)
self.add_state ('return', ReturnState)
self.add_state ('attack', AttackState)
self.model.loop ('fly')
self.curr_animation = 'fly'
self.anim_speed = 50
# Hack: 3D audio seems very slow, so only some pigeons emit
# some kinds of sounds.
if random.uniform (0, 10) < 2.:
self._coo_sounds = map (self.load_sound,
map (lambda x: "snd/pigeon-coo-%i.wav" % x,
range (1, 5)))
self.tasks.add (task.sequence (
task.wait (random.uniform (0, 20)),
task.loop (
task.func_wait (partial (random.uniform, 10, 30)),
task.run (lambda: random.choice (self._coo_sounds).play ()
))))
else:
self._coo_sounds = []
if random.uniform (0, 10) < 2.:
self._fly_sound = self.load_sound ('snd/pigeon-start.wav')
else:
self._fly_sound = None
if random.uniform (0, 10) < 2.:
self._fear_sound = self.load_sound ('snd/pigeon-flap.wav')
else:
self._fear_sound = None
self.start ('land')
def play_sound (self, sound):
#x, y, z = self.position
#u, v, w = self.linear_velocity
#sound.set3dAttributes (x, y, z, u, v, w)
if sound:
sound.play ()
def do_update (self, timer):
"""
Hack to avoid the tunneling effect. We manually sweep the
collision sphere using a cylinder.
"""
super (Pigeon, self).do_update (timer)
vlen = self.linear_velocity.length ()
if self.pigeon_sweeping:
self.geom.setParams (2., vlen * timer.delta)
self.model.setPlayRate (vlen / self.anim_speed, self.curr_animation)
self.check_limits ()
@weak_slot
def on_pigeon_is_on_floor (self, who, val):
if val and self.current and self.current.state_name == 'land':
self.change_state ('walk')
if val and self.curr_animation != 'walk':
self.model.loop ('walk')
self.curr_animation = 'walk'
elif not val and self.curr_animation != 'fly':
self.model.loop ('fly')
self.curr_animation = 'fly'
@weak_slot
def on_pigeon_hit (self, x):
self.enter_state ('hit')
@weak_slot
def on_boy_noise (self, boy, rad):
if distance_sq (boy.position, self.position) < rad ** 2:
if self.depth == 1:
self.enter_state ('fear', boy)
elif self.current and self.current.state_name == 'fear':
self.current.restart ()
@weak_slot
def on_pigeon_death (self):
self.force_finish ()
@weak_slot
def on_boy_move (self, boy, pos):
if distance_sq (pos, self.params.boid_center) > 500. ** 2:
if self.current and self.current.state_name != 'attack':
self.enter_state ('attack', boy)
elif self.current and self.current.state_name == 'attack':
self.current.restart ()
def find_food (self):
food = self.entities.game.pigeon_food
best = None
bestdist = 1000000.
pos = self.position
for f in food:
dist = distance_sq (f.position, pos)
if dist < self.pigeon_min_eat_distance ** 2 and dist < bestdist:
bestdist = dist
best = f
return best
def check_food (self, change = False):
best = self.find_food ()
if best:
self.enter_state ('eat', best)
def check_limits (self):
pos = self.position
if pos.getZ () < self.pigeon_z_limit:
_log.debug ("Pigeon needs repositioning. %s, %s" %
(str (pos), str (self)))
if self.depth == 1:
self.enter_state ('return')
class PigeonState (State, BoidParams):
anim_speed = 50.
def do_setup (self, *a, **k):
self.manager.change_params (self)
self.manager.anim_speed = self.anim_speed
self.do_pigeon_setup (*a, **k)
def do_sink (self):
self.pause ()
def do_unsink (self, *a, **k):
self.manager.change_params (self)
self.manager.anim_speed = self.anim_speed
self.resume ()
self.do_pigeon_unsink (self)
def do_update (self, timer):
super (PigeonState, self).do_update (timer)
def do_release (self):
self.do_pigeon_release ()
do_pigeon_release = nop
do_pigeon_setup = nop
do_pigeon_unsink = nop
class FollowState (PigeonState):
@weak_slot
def on_target_set_position (self, target, pos):
self.boid_target = pos
def do_pigeon_setup (self, target):
target.on_entity_set_position += self.on_target_set_position
self.boid_target = target.position
class FearState (FollowState, task.WaitTask):
anim_speed = 50.
duration = 3.
boid_f_target = 1.
boid_target_inv = True
boid_speed = 150
boid_power = 1000
def do_pigeon_setup (self, *a, **k):
super (FearState, self).do_pigeon_setup (*a, **k)
self.manager.play_sound (self.manager._fear_sound)
def do_pigeon_release (self):
super (FearState, self).do_pigeon_release ()
self.manager.change_state ('fly')
class AttackState (FollowState, task.WaitTask):
anim_speed = 50.
duration = 5.
boid_f_target = 1.
boid_speed = 200
boid_power = 100
def do_pigeon_setup (self, *a, **k):
super (AttackState, self).do_pigeon_setup (*a, **k)
self.manager.play_sound (self.manager._fear_sound)
def do_pigeon_release (self):
super (AttackState, self).do_pigeon_release ()
self.manager.change_state ('fly')
class EatState (FollowState):
boid_flying = False
boid_speed = 20
boid_f_target = 0.1
boid_f_randomness = 0.
boid_power = 100.
glutony = 0.3
eat_distance = 7
anim_speed = 10.
def do_pigeon_setup (self, target):
super (EatState, self).do_pigeon_setup (target)
self.happy_meal = target
def do_update (self, timer):
super (EatState, self).do_update (self)
best = self.manager.find_food ()
if best != self.happy_meal and best:
self.manager.change_state ('eat', best)
elif self.happy_meal:
if distance_sq (self.happy_meal.position, self.manager.position) < \
self.eat_distance ** 2:
self.boid_speed = 0.001
self.boid_power = 0.001
self.happy_meal.eat (timer.delta * self.glutony)
else:
self.manager.leave_state ()
class PatrolState (PigeonState):
def do_pigeon_setup (self):
super (PatrolState, self).do_pigeon_setup ()
self.tasks.add (task.sequence (
task.wait (random.uniform (15, 30)),
task.run (self.next_state)))
def do_update (self, timer):
super (PatrolState, self).do_update (timer)
self.manager.check_food ()
def next_state (self):
self.manager.change_state (
'land' if self.state_name == 'fly' else 'fly')
class WalkState (PatrolState):
anim_speed = 7.
boid_flying = False
boid_speed = 10
boid_max_far = 500
boid_f_bounds = 0.001
boid_power = 100.
boid_f_randomness = 0.
class ReturnState (PigeonState):
anim_speed = 50
boid_max_far = 100
boid_f_bounds = 0.1
boid_center = Vec3 (0, 0, 200)
def do_update (self, timer):
super (ReturnState, self).do_update (timer)
if distance_sq (self.manager.position, self.boid_center) < \
self.boid_max_far ** 2:
self.manager.leave_state ()
class LandState (PigeonState):
anim_speed = 50
boid_speed = 60
boid_max_far = 500
boid_f_bounds = 0.001
boid_flying = False
class FlyState (PatrolState):
anim_speed = 50.
boid_speed = 80.
boif_f_flight = 0.01
def do_pigeon_setup (self, *a, **k):
super (FlyState, self).do_pigeon_setup (*a, **k)
self.manager.play_sound (self.manager._fly_sound)
class HitState (PigeonState):
boid_flocking = False
boid_flying = False
boid_speed = 1000
def do_pigeon_setup (self):
self.tasks.add (task.sequence (task.wait (2.), task.run (self.kill)))
self.manager.pigeon_sweeping = False
def do_pigeon_release (self):
self.manager.pigeon_sweeping = True
|
gpl-3.0
| -5,173,898,428,870,489,000
| 29.394495
| 80
| 0.561425
| false
| 3.388392
| false
| false
| false
|
turon/openthread
|
tools/harness-automation/cases/router_8_2_5.py
|
1
|
1878
|
#!/usr/bin/env python
#
# Copyright (c) 2016, The OpenThread Authors.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# 3. Neither the name of the copyright holder nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
#
import unittest
from autothreadharness.harness_case import HarnessCase
class Router_8_2_5(HarnessCase):
role = HarnessCase.ROLE_ROUTER
case = '8 2 5'
golden_devices_required = 2
def on_dialog(self, dialog, title):
pass
if __name__ == '__main__':
unittest.main()
|
bsd-3-clause
| -986,593,235,084,160,300
| 39.826087
| 77
| 0.761448
| false
| 4.418824
| false
| false
| false
|
DataDog/integrations-extras
|
aws_pricing/datadog_checks/aws_pricing/aws_pricing.py
|
1
|
4071
|
import json
from collections import defaultdict
import boto3
from botocore.exceptions import ClientError
from six import iteritems
from six.moves import filter, map
from datadog_checks.base import AgentCheck
from datadog_checks.base.errors import CheckException
class AwsPricingCheck(AgentCheck):
def check(self, instance):
try:
region_name = instance.get('region_name')
if not region_name:
region_name = 'us-east-1'
pricing_client = boto3.client('pricing', region_name=region_name)
service_codes = get_aws_service_codes(pricing_client)
rate_codes_dict = get_rate_codes_dict_from_instance(service_codes, instance)
# Python dictionaries evaluate to false when empty
if not rate_codes_dict:
message = 'No rate codes for existing AWS services were defined, please fix conf.yaml'
self.service_check('aws_pricing.status', self.CRITICAL, message=message)
raise CheckException(message)
missing_rate_codes = defaultdict(list)
for service_code, rate_codes in iteritems(rate_codes_dict):
for rate_code in rate_codes:
price_dimensions = get_aws_prices(pricing_client, service_code, rate_code)
if price_dimensions is None:
missing_rate_codes[service_code].append(rate_code)
continue
name = 'aws.pricing.{}'.format(service_code.lower())
price = get_price_from_price_dimensions(price_dimensions)
tags = get_tags_from_price_dimensions(price_dimensions)
self.gauge(name, price, tags)
# Python dictionaries evaluate to true when not empty
if not missing_rate_codes:
self.service_check('aws_pricing.status', self.OK)
else:
message = 'Pricing data not found for these service rate codes: {}'.format(dict(missing_rate_codes))
self.service_check('aws_pricing.status', self.WARNING, message=message)
except ClientError as client_error:
self.service_check('aws_pricing.status', self.CRITICAL, message=str(client_error))
raise CheckException('Pricing Service client error: {}'.format(str(client_error)))
def get_rate_codes_dict_from_instance(service_codes, instance):
rate_codes_dict = {}
for service_code in service_codes:
instance_rate_codes = instance.get(service_code)
if instance_rate_codes is not None:
rate_codes_dict[service_code] = instance_rate_codes
return rate_codes_dict
def get_aws_service_codes(pricing_client):
response = pricing_client.describe_services(FormatVersion='aws_v1')
service_codes = map(lambda service: service['ServiceCode'], response['Services'])
return service_codes
def get_aws_prices(pricing_client, service_code, rate_code):
response = pricing_client.get_products(
FormatVersion='aws_v1',
ServiceCode=service_code,
Filters=[{'Type': 'TERM_MATCH', 'Field': 'RateCode', 'Value': rate_code}],
MaxResults=1,
)
price_dimensions = None
if len(response['PriceList']) > 0:
response_obj = json.loads(response['PriceList'][0])
terms = response_obj['terms'].values()
price_dimensions = find_price_dimensions_by_rate_code(rate_code, terms)
return price_dimensions
def find_price_dimensions_by_rate_code(rate_code, terms):
rate_code_parts = rate_code.split('.')
term_code = '.'.join(rate_code_parts[:2])
term = next(filter(lambda term: term_code in term, terms))
price_dimensions = term[term_code]['priceDimensions'][rate_code]
return price_dimensions
def get_tags_from_price_dimensions(price_dimensions):
return {'rate_code': price_dimensions['rateCode'], 'unit': price_dimensions['unit']}
def get_price_from_price_dimensions(price_dimensions):
return float(price_dimensions['pricePerUnit']['USD'])
|
bsd-3-clause
| 5,405,679,731,763,715,000
| 36.009091
| 116
| 0.647998
| false
| 4.00689
| false
| false
| false
|
nicfit/vexmpp
|
vexmpp/protocols/disco.py
|
1
|
4899
|
# -*- coding: utf-8 -*-
from .. import stream
from ..jid import Jid
from ..stanzas import Iq, ElementWrapper
from ..errors import XmppError
from ..utils import xpathFilter
"""XEP 30"""
from .. import getLogger
log = getLogger(__name__)
NS_URI_BASE = "http://jabber.org/protocol/disco"
NS_URI_INFO = "{}#info".format(NS_URI_BASE)
NS_URI_ITEMS = "{}#items".format(NS_URI_BASE)
async def getInfo(stream, to, node=None, timeout=None):
iq = Iq(to=to, request=("query", NS_URI_INFO), attrs={"node": node},
id_prefix="disco#info_get")
return (await stream.sendAndWait(iq, raise_on_error=True, timeout=timeout))
async def getItems(stream, to, node=None, timeout=None):
iq = Iq(to=to, request=("query", NS_URI_ITEMS), id_prefix="disco#items_get")
if node:
iq.set("node", node)
iq = await stream.sendAndWait(iq, raise_on_error=True, timeout=timeout)
return iq
class Identity:
def __init__(self, category, type, name=None, lang=None):
self.category = category
self.type = type
self.name = name
self.lang = lang
def __str__(self):
return ("Identity [category=%s type=%s name=%s lang=%s]"
% (self.category, self.type, self.name, self.lang))
def __hash__(self):
return self.__str__().__hash__()
def __eq__(self, o):
return (type(o) == type(self) and
o.category == self.category and
o.type == self.type and
o.name == self.name and
o.lang == self.lang)
class Info:
def __init__(self):
self.disco_jid = None
self.identities = set()
self.features = set()
self.items = set()
self.node = None
class DiscoCache:
def __init__(self):
self.cache = {}
def clear(self):
self.cache.clear()
def getJidsForFeature(self, feature):
jids = []
for disco_jid, disco_info in self.cache.items():
if feature in disco_info.features:
jids.append(disco_jid)
return jids
class DiscoCacheMixin(stream.Mixin):
def __init__(self):
self._cache = DiscoCache()
super().__init__([('disco_cache', self._cache)])
async def postSession(self, stream):
await self.update(stream, stream.jid.host)
async def update(self, stream, disco_jid):
self._cache.clear()
# Fetch all disco info for the server
disco_info = await self._disco(stream, disco_jid, True)
# Fetch details about all the server's items (but not info about each
# item)
if disco_info and disco_info.items is not None:
for jid in disco_info.items:
try:
await self._disco(stream, jid, False)
except XmppError as ex:
log.warn("Stanza error while disco'ing item '{}': {}"
.format(jid.full, ex))
async def _disco(self, stream, jid, fetch_items):
if not isinstance(jid, Jid):
jid = Jid(jid)
disco_info = Info()
disco_info.disco_jid = jid
info = await getInfo(stream, to=jid, timeout=stream.default_timeout)
for child in info.query:
if child.tag == "{%s}identity" % NS_URI_INFO:
ident = Identity(category=child.attrib['category'],
type=child.attrib['type'],
name=child.attrib.get('name', None))
disco_info.identities.add(ident)
elif child.tag == "{%s}feature" % NS_URI_INFO:
disco_info.features.add(child.attrib['var'])
if fetch_items:
items = await getItems(stream, jid,
timeout=stream.default_timeout)
for child in items.query:
if child.tag == "{%s}item" % NS_URI_ITEMS:
disco_info.items.add(Jid(child.attrib['jid']))
self._cache.cache[jid] = disco_info
return disco_info
class DiscoInfoMixin(stream.Mixin):
def __init__(self):
self._features = []
super().__init__([('disco_info_features', self._features)])
@xpathFilter([("/iq[@type='get']/ns:query", {"ns": NS_URI_INFO}),
("/iq[@type='get']/ns:query", {"ns": NS_URI_ITEMS})])
async def onStanza(self, stream, stanza):
log.debug("disco#info request")
if stanza.query.tag.startswith("{%s}" % NS_URI_INFO):
# disco#info
query = ElementWrapper(stanza.query)
ident = query.appendChild("identity")
ident.set("category", "client")
ident.set("name", "Botch")
ident.set("type", "bot")
query.appendChild("feature").set("var", NS_URI_INFO)
else:
# disco#items
pass
stream.send(stanza.resultResponse())
|
mit
| 4,763,337,293,667,032,000
| 30.606452
| 80
| 0.551949
| false
| 3.664174
| false
| false
| false
|
wavefrontHQ/python-client
|
wavefront_api_client/models/response_container_monitored_service_dto.py
|
1
|
4497
|
# coding: utf-8
"""
Wavefront REST API
<p>The Wavefront REST API enables you to interact with Wavefront servers using standard REST API tools. You can use the REST API to automate commonly executed operations such as automatically tagging sources.</p><p>When you make REST API calls outside the Wavefront REST API documentation you must add the header \"Authorization: Bearer <<API-TOKEN>>\" to your HTTP requests.</p> # noqa: E501
OpenAPI spec version: v2
Contact: chitimba@wavefront.com
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
import pprint
import re # noqa: F401
import six
class ResponseContainerMonitoredServiceDTO(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'response': 'MonitoredServiceDTO',
'status': 'ResponseStatus'
}
attribute_map = {
'response': 'response',
'status': 'status'
}
def __init__(self, response=None, status=None): # noqa: E501
"""ResponseContainerMonitoredServiceDTO - a model defined in Swagger""" # noqa: E501
self._response = None
self._status = None
self.discriminator = None
if response is not None:
self.response = response
self.status = status
@property
def response(self):
"""Gets the response of this ResponseContainerMonitoredServiceDTO. # noqa: E501
:return: The response of this ResponseContainerMonitoredServiceDTO. # noqa: E501
:rtype: MonitoredServiceDTO
"""
return self._response
@response.setter
def response(self, response):
"""Sets the response of this ResponseContainerMonitoredServiceDTO.
:param response: The response of this ResponseContainerMonitoredServiceDTO. # noqa: E501
:type: MonitoredServiceDTO
"""
self._response = response
@property
def status(self):
"""Gets the status of this ResponseContainerMonitoredServiceDTO. # noqa: E501
:return: The status of this ResponseContainerMonitoredServiceDTO. # noqa: E501
:rtype: ResponseStatus
"""
return self._status
@status.setter
def status(self, status):
"""Sets the status of this ResponseContainerMonitoredServiceDTO.
:param status: The status of this ResponseContainerMonitoredServiceDTO. # noqa: E501
:type: ResponseStatus
"""
if status is None:
raise ValueError("Invalid value for `status`, must not be `None`") # noqa: E501
self._status = status
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
if issubclass(ResponseContainerMonitoredServiceDTO, dict):
for key, value in self.items():
result[key] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, ResponseContainerMonitoredServiceDTO):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
|
apache-2.0
| 1,715,293,696,044,203,000
| 30.669014
| 409
| 0.599956
| false
| 4.33237
| false
| false
| false
|
hiryou/MLPractice
|
neural_net/by_numpy.py
|
1
|
3555
|
from builtins import classmethod
import numpy as np
from datetime import datetime as dt
"""
Inspired by https://repl.it/repls/OrganicVainDoom#main.py
"""
class NeuralNet(object):
train_cnt = 0
epoch = 0
eta = 0.5
# TODO make constructor-only param
h_layers = [3]
X = None
Y = None
X_size = 0 # neural count
Y_size = 0 # neural count
# hidden layers & last output layers
W = list()
H = list()
def __init__(self, X, Y, epoch):
self.X, self.Y = self.__scaled(X, Y)
self.train_cnt = len(self.X)
self.X_size = len(self.X[0])
self.Y_size = len(self.Y[0])
self.epoch = epoch
self.h_layers.append(self.Y_size)
left_neuron_cnt = self.X_size
for neuron_cnt in self.h_layers:
ww = np.random.randn(left_neuron_cnt, neuron_cnt)
hh = np.full((self.train_cnt, neuron_cnt), -0.0001)
self.W.append(ww)
self.H.append(hh)
left_neuron_cnt = neuron_cnt
pass
@staticmethod
def sigmoid(s):
return 1 / (1 + np.exp(-s))
@staticmethod
def sigmoid_prime(sig):
return sig * (1 - sig)
def get_train_loss(self):
Y = self.__scaled_back(self.Y)
H_last = self.__scaled_back(self.H[-1])
return np.mean(
np.square(Y - H_last)
)
pass
def do_train(self):
for i in range(self.epoch):
self.__forward(self.X)
self.__backward()
#print("epoch = {}: loss = {}".format( i, str(self.get_train_loss()) ))
def __scaled(self, X, Y):
# normalize
# max 24h a day
# max score = 100
return X/24, Y/100
def __scaled_back(self, Y):
# max score = 100
return Y*100
def __forward(self, X):
left_mt = X
for idx in range(len(self.h_layers)):
net_H_idx = np.dot(left_mt, self.W[idx])
self.H[idx] = self.sigmoid(net_H_idx)
left_mt = self.H[idx]
return self.H[-1]
def __backward(self):
# delta: start initially from layer H2 (output)
delta_H = [None for idx in range(len(self.h_layers))]
delta_H[-1] = (self.Y - self.H[-1]) * self.sigmoid_prime(self.H[-1])
# then delta: reversed loop from semi-last element -> beginning
for idx in range(len(self.h_layers)-2, -1, -1):
delta_H[idx] = delta_H[idx+1].dot(self.W[idx+1].T) * self.sigmoid_prime(self.H[idx])
pass
# update weights: start from right most layer
for idx in range(len(self.h_layers) - 1, 0, -1):
self.W[idx] += (1 / self.train_cnt) * self.eta * self.H[idx-1].T.dot(delta_H[idx])
pass
# update weights: at layer W0 back to input
self.W[0] += (1 / self.train_cnt) * self.eta * self.X.T.dot(delta_H[0])
f = open('study-sleep-grade.txt')
lines = f.readlines()
f.close()
# print(lines)
x_all = []
y_all = []
for line in lines:
p = line.strip().split(", ")
y = p[0].strip().split(' ')
x = p[1].strip().split(' ')
x_all.append(x)
y_all.append(y)
INP = np.array((x_all[:-1]), dtype=float)
Y = np.array((y_all[:-1]), dtype=float)
nn = NeuralNet(INP, Y, epoch=1000)
print("-------------------------")
print("training ...")
tic = dt.now().microsecond
nn.do_train()
toc = dt.now().microsecond
print("-------------------------")
print("train loss = {}".format( str(nn.get_train_loss()) ))
print("Train taken {} micro-secs".format('{:,}'.format(toc - tic)))
|
mit
| 7,135,865,273,220,866,000
| 26.55814
| 96
| 0.536428
| false
| 3.11296
| false
| false
| false
|
lgarren/spack
|
var/spack/repos/builtin/packages/r-simpleaffy/package.py
|
1
|
2156
|
##############################################################################
# Copyright (c) 2013-2017, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/llnl/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
class RSimpleaffy(RPackage):
"""Provides high level functions for reading Affy .CEL files,
phenotypic data, and then computing simple things with it, such as
t-tests, fold changes and the like. Makes heavy use of the affy
library. Also has some basic scatter plot functions and mechanisms
for generating high resolution journal figures..."""
homepage = "http://bioconductor.org/packages/simpleaffy/"
url = "https://bioconductor.org/packages/3.5/bioc/src/contrib/simpleaffy_2.52.0.tar.gz"
version('2.52.0', 'aa305099a57b3d868be53dc8c539b74e')
depends_on('r-biocgenerics', type=('build', 'run'))
depends_on('r-biobase', type=('build', 'run'))
depends_on('r-affy', type=('build', 'run'))
depends_on('r-genefilter', type=('build', 'run'))
depends_on('r-gcrma', type=('build', 'run'))
depends_on('r@3.4.0:3.4.9', when='@2.52.0')
|
lgpl-2.1
| 4,242,531,252,514,886,000
| 46.911111
| 96
| 0.668831
| false
| 3.749565
| false
| false
| false
|
fedebell/Laboratorio3
|
relazione6/calcoli.py
|
1
|
1559
|
import uncertainties
from uncertainties import ufloat
import math
import numpy
import numpy
import pylab
from scipy.optimize import curve_fit
import math
import scipy.stats
import uncertainties
from uncertainties import unumpy
Vmeno = ufloat(-15.00, 15.00*0.005)
Vpiu = ufloat(14.99, 14.99*0.005)
R1 = ufloat(2.18, 2.18*0.008)*1000
R2 = ufloat(21.5, 21.5*0.008)*1000
print("Vmeno = ", Vmeno)
print("Vpiu = ", Vpiu)
print("R1 = ", R1)
print("R2 = ", R2)
A = -R2/R1
print("A_atteso = ", A)
#Resistenze ingresso
V1 = ufloat(9.68, 0.08)
V2 = ufloat(4.88, 0.04)
RS = ufloat(2.19, ((2.19*0.008)**2+0.01**2)**0.5)
R_in_att = RS*1/(V1/V2 -1)
print("V1 = ", V1)
print("V2 = ", V2)
print("RS = ", RS)
print("R_in_attesa = ", R_in_att)
deltaV = ufloat(1.00, 0.04)
deltat = ufloat(90, 1)*10**(-3)
s = deltaV/deltat
print("s = ", s)
#Amplificatore invertente
R1 = ufloat(218, ((0.008*218)**2+1)**0.5)
P1 = ufloat(97.8, ((0.008*97.8)**2+0.1**2)**0.5)*1000
VIN = ufloat(0.340, 0.002)
print("R1 = ", R1)
print("P1 = ", P1)
print("VIN = ", VIN)
#Integratore
R1 = ufloat(984, 984*0.008)
R2 = ufloat(11.77, 11.77*0.008)*1000
C = ufloat(45.2, 45.2*0.04)
print("R1 = ", R1)
print("R2 = ", R2)
print("C = ", C)
#Derivatore
deltaV = ufloat(1.00, 0.03)
deltat = ufloat(0.088, 0.004)
slew = deltaV/deltat
print("slew rate", slew)
V_I = ufloat(4.68, 0.02)
V_OUT = ufloat(1.02, 0.02)
A = V_OUT/V_I
print("Amplificazione derivatore a 100\,Hz:", A)
f = 0.100
f_t = 3.42515
Amax = 11.597690
A = Amax/(1+(f_t/f)**2)**0.5
print("Amplificazione derivatore a 100\,Hz attesa:", A)
|
gpl-3.0
| 1,469,787,664,021,489,200
| 20.356164
| 55
| 0.630532
| false
| 2.003856
| false
| true
| false
|
emc-openstack/storops
|
storops_test/vnx/resource/test_mirror_view.py
|
1
|
23552
|
# coding=utf-8
# Copyright (c) 2015 EMC Corporation.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from __future__ import unicode_literals
from unittest import TestCase
from hamcrest import assert_that, instance_of, raises, none, not_none
from hamcrest import equal_to, has_length
from storops.exception import VNXMirrorLunNotAvailableError, \
VNXMirrorNameInUseError, VNXMirrorAlreadyMirroredError, \
VNXMirrorImageNotFoundError, VNXMirrorFractureImageError, \
VNXMirrorSyncImageError, VNXMirrorPromoteNonLocalImageError, \
VNXMirrorPromotePrimaryError, VNXMirrorFeatureNotAvailableError, \
VNXMirrorNotFoundError, VNXDeleteMirrorWithSecondaryError, \
VNXMirrorRemoveSynchronizingError, VNXMirrorGroupAlreadyMemberError, \
VNXMirrorGroupMirrorNotMemberError, VNXMirrorGroupAlreadyPromotedError, \
VNXMirrorGroupNameInUseError, VNXMirrorException
from storops_test.vnx.cli_mock import patch_cli
from storops_test.vnx.cli_mock import t_cli
from storops.vnx.enums import VNXMirrorViewRecoveryPolicy, \
VNXMirrorViewSyncRate, VNXSPEnum, VNXMirrorImageState, \
VNXMirrorGroupRecoveryPolicy
from storops.vnx.resource.mirror_view import VNXMirrorView, \
VNXMirrorViewImage, VNXMirrorGroup, VNXMirrorGroupList, \
VNXMirrorViewAsync, VNXMirrorGroupAsync, VNXMirrorGroupAsyncList
__author__ = 'Cedric Zhuang'
class VNXMirrorViewTest(TestCase):
@patch_cli
def test_get_all(self):
mv_list = VNXMirrorView.get(t_cli())
assert_that(len(mv_list), equal_to(4))
@patch_cli(output='mirror_not_installed.txt')
def test_mirror_view_not_installed(self):
mv_list = VNXMirrorView.get(t_cli())
assert_that(len(mv_list), equal_to(0))
mv = VNXMirrorView.get(t_cli(), 'mv_sync_2')
assert_that(mv.existed, equal_to(False))
@patch_cli
def test_get(self):
mv = VNXMirrorView.get(t_cli(), 'mv_sync_2')
assert_that(mv.uid, equal_to(
'50:06:01:60:88:60:05:FE:04:00:00:00:00:00:00:00'))
assert_that(mv.name, equal_to('mv_sync_2'))
assert_that(mv.description, equal_to(''))
assert_that(mv.logical_unit_numbers, 30)
assert_that(mv.quiesce_threshold, equal_to(60))
assert_that(mv.recovery_policy,
equal_to(VNXMirrorViewRecoveryPolicy.MANUAL))
assert_that(len(mv.images), equal_to(2))
assert_that(mv.images[0], instance_of(VNXMirrorViewImage))
assert_that(mv.synchronization_rate,
equal_to(VNXMirrorViewSyncRate.MEDIUM))
assert_that(mv.existed, equal_to(True))
assert_that(mv.state, equal_to('Active'))
assert_that(mv.image_transitioning, equal_to(False))
assert_that(mv.image_size, equal_to(2097152))
assert_that(mv.image_count, equal_to(2))
assert_that(mv.image_faulted, equal_to(False))
assert_that(mv.minimum_number_of_images_required, equal_to(0))
assert_that(mv.write_intent_log_used, equal_to(True))
assert_that(mv.synchronizing_progress, equal_to(100))
assert_that(mv.remote_mirror_status, equal_to('Secondary Copy'))
assert_that(mv.faulted, equal_to(False))
assert_that(mv.transitioning, equal_to(False))
assert_that(mv.is_primary, equal_to(False))
@patch_cli
def test_image_properties(self):
mv = VNXMirrorView.get(t_cli(), 'mv0')
assert_that(mv.is_primary, equal_to(True))
assert_that(mv.primary_image.is_primary, equal_to(True))
assert_that(mv.secondary_image.is_primary, equal_to(False))
@patch_cli
def test_create_success(self):
mv = VNXMirrorView.create(t_cli(), 'mv0', 245)
assert_that(mv.name, equal_to('mv0'))
@patch_cli
def test_create_lun_not_available_for_mirror(self):
def f():
VNXMirrorView.create(t_cli(), 'mv0', 244)
assert_that(f, raises(VNXMirrorLunNotAvailableError, 'not available'))
@patch_cli
def test_create_name_in_use(self):
def f():
VNXMirrorView.create(t_cli(), 'mv0', 246)
assert_that(f, raises(VNXMirrorNameInUseError, 'in use'))
@patch_cli
def test_add_image_success(self):
mv = VNXMirrorView.get(t_cli(), 'mv0')
mv.add_image('192.168.1.94', 71)
assert_that(len(mv.images), equal_to(2))
@patch_cli
def test_add_image_already_mirrored(self):
def f():
mv = VNXMirrorView.get(t_cli(), 'mv0')
mv.add_image('192.168.1.94', 72)
assert_that(f, raises(VNXMirrorAlreadyMirroredError, 'exists'))
@patch_cli
def test_get_image_found(self):
mv = VNXMirrorView.get(t_cli(), 'mv0')
image = mv.get_image('50:06:01:60:88:60:05:FE')
assert_that(image.state, equal_to(VNXMirrorImageState.SYNCHRONIZED))
@patch_cli
def test_get_image_not_found(self):
def f():
mv = VNXMirrorView.get(t_cli(), 'mv0')
mv.get_image('50:06:01:60:88:60:05:FF')
assert_that(f, raises(VNXMirrorImageNotFoundError, 'not found'))
@patch_cli
def test_remove_image_not_found(self):
def f():
mv = VNXMirrorView.get(t_cli(), 'mv0')
mv.remove_image('50:06:01:60:88:60:05:FF')
assert_that(f, raises(VNXMirrorImageNotFoundError, 'not found'))
@patch_cli
def test_remove_image_success(self):
mv = VNXMirrorView.get(t_cli(), 'mv0')
# no error raised
mv.remove_image()
@patch_cli
def test_remove_image_no_secondary_image(self):
def f():
mv = VNXMirrorView.get(t_cli(), 'mv1')
mv.remove_image()
assert_that(f,
raises(VNXMirrorImageNotFoundError, 'no secondary'))
@patch_cli
def test_fracture_primary_image(self):
def f():
mv = VNXMirrorView.get(t_cli(), 'mv0')
mv.fracture_image('50:06:01:60:B6:E0:1C:F4')
assert_that(f, raises(VNXMirrorFractureImageError, 'Cannot'))
@patch_cli
def test_fracture_image_success(self):
mv = VNXMirrorView.get(t_cli(), 'mv0')
# no error raised
mv.fracture_image()
@patch_cli
def test_fracture_image_not_found(self):
def f():
mv = VNXMirrorView.get(t_cli(), 'mv0')
mv.fracture_image('50:06:01:60:88:60:05:FF')
assert_that(f, raises(VNXMirrorImageNotFoundError))
@patch_cli
def test_sync_image_not_found(self):
def f():
mv = VNXMirrorView.get(t_cli(), 'mv0')
mv.sync_image('50:06:01:60:88:60:05:FF')
assert_that(f, raises(VNXMirrorImageNotFoundError))
@patch_cli
def test_sync_image_failed(self):
def f():
mv = VNXMirrorView.get(t_cli(), 'mv0')
mv.sync_image()
assert_that(f, raises(VNXMirrorSyncImageError, 'failed'))
@patch_cli
def test_promote_image_not_found(self):
def f():
mv = VNXMirrorView.get(t_cli(), 'mv0')
mv.promote_image('50:06:01:60:88:60:05:FF')
assert_that(f, raises(VNXMirrorImageNotFoundError))
@patch_cli
def test_promote_non_local_image(self):
def f():
mv = VNXMirrorView.get(t_cli(), 'mv0')
mv.promote_image()
assert_that(f, raises(VNXMirrorPromoteNonLocalImageError,
'not local'))
@patch_cli
def test_promote_already_promoted(self):
def f():
mv = VNXMirrorView.get(t_cli(), 'mv0')
mv.promote_image('50:06:01:60:88:60:05:F0')
assert_that(f, raises(VNXMirrorPromotePrimaryError, 'primary image'))
@patch_cli
def test_mirror_view_feature_not_installed(self):
def f():
mv = VNXMirrorView.get(t_cli(), 'mv9')
mv.delete()
assert_that(f, raises(VNXMirrorFeatureNotAvailableError,
'not installed'))
@patch_cli
def test_delete_mirror_not_found_error(self):
def f():
mv = VNXMirrorView.get(t_cli(), 'mv8')
mv.delete()
assert_that(f, raises(VNXMirrorNotFoundError, 'not found'))
@patch_cli
def test_delete_mirror_has_secondary(self):
def f():
mv = VNXMirrorView.get(t_cli(), 'mv7')
mv.delete()
assert_that(f, raises(VNXDeleteMirrorWithSecondaryError,
'at least one secondary'))
@patch_cli
def test_remove_mirror_image_is_synchronizing(self):
def f():
mv = VNXMirrorView.get(t_cli(), 'mv2')
mv.remove_image()
assert_that(f, raises(VNXMirrorRemoveSynchronizingError,
'is being synchronized'))
@patch_cli
def test_force_delete_mirror_has_secondary(self):
mv = VNXMirrorView.get(t_cli(), 'mv0')
# no error raised
mv.delete(force=True)
class VNXMirrorViewImageTest(TestCase):
@patch_cli
def test_properties(self):
mv = VNXMirrorView.get(t_cli(), 'mv_sync_2')
image = mv.get_image('50:06:01:60:88:60:05:FE')
assert_that(image.uid, equal_to('50:06:01:60:88:60:05:FE'))
assert_that(image.existed, equal_to(True))
assert_that(image.is_primary, equal_to(True))
assert_that(image.logical_unit_uid, equal_to(
'60:06:01:60:41:C4:3D:00:6E:1C:50:9D:05:95:E5:11'))
assert_that(image.condition, equal_to('Primary Image'))
assert_that(image.state, none())
assert_that(image.preferred_sp, equal_to(VNXSPEnum.SP_A))
class VNXMirrorGroupTest(TestCase):
@patch_cli
def test_create(self):
mg = VNXMirrorGroup.create(t_cli(), name='test_group')
assert_that(mg, instance_of(VNXMirrorGroup))
@patch_cli
def test_create_name_in_use(self):
def _inner():
VNXMirrorGroup.create(t_cli(), name='test_group_in_use')
assert_that(_inner, raises(VNXMirrorGroupNameInUseError))
@patch_cli
def test_create_and_add(self):
mirror = VNXMirrorView.get(t_cli(), name='mv_sync_2')
mg = VNXMirrorGroup.create(t_cli(), name='petermg1', mirror=mirror)
assert_that(mg, instance_of(VNXMirrorGroup))
@patch_cli
def test_get_single(self):
mg = VNXMirrorGroup.get(t_cli(), name='petermg')
assert_that(mg, instance_of(VNXMirrorGroup))
assert_that(mg.name, equal_to('petermg'))
assert_that(mg.gid, equal_to('50:06:01:60:B6:60:25:22:00:00:00:00'))
assert_that(mg.description, equal_to(''))
assert_that(mg.state, equal_to('Synchronized'))
assert_that(mg.role, equal_to('Primary'))
assert_that(mg.condition, equal_to('Active'))
assert_that(mg.policy, equal_to(VNXMirrorGroupRecoveryPolicy.MANUAL))
assert_that(mg.mirrors, has_length(2))
assert_that(mg.group_mirrors, has_length(2))
for m in mg.mirrors:
assert_that(m, instance_of(VNXMirrorView))
for mg in mg.group_mirrors:
assert_that(
mg.mirror_name,
not_none())
assert_that(mg.src_lun_id, instance_of(int))
@patch_cli
def test_get_all(self):
mg_list = VNXMirrorGroup.get(t_cli())
assert_that(len(mg_list), equal_to(2))
assert_that(mg_list, instance_of(VNXMirrorGroupList))
@patch_cli
def test_promote_group(self):
mg1 = VNXMirrorGroup.get(t_cli(), name='petermg1')
mg1.promote_group()
@patch_cli
def test_fracture_group(self):
mg1 = VNXMirrorGroup.get(t_cli(), name='petermg1')
mg1.fracture_group()
@patch_cli
def test_add_to_group(self):
mirror = VNXMirrorView.get(t_cli(), name='mv_sync_2')
mg1 = VNXMirrorGroup.get(t_cli(), name='petermg1')
mg1.add_mirror(mirror)
@patch_cli
def test_add_to_group_existed(self):
mirror = VNXMirrorView.get(t_cli(), name='mv0')
mg1 = VNXMirrorGroup.get(t_cli(), name='petermg')
def _inner():
mg1.add_mirror(mirror)
assert_that(_inner, raises(VNXMirrorGroupAlreadyMemberError))
@patch_cli
def test_remove_from_group(self):
mirror = VNXMirrorGroup.get(t_cli(), name='mv_sync_2')
mg1 = VNXMirrorGroup.get(t_cli(), name='petermg')
mg1.remove_mirror(mirror)
@patch_cli
def test_remove_from_group_already_removed(self):
mirror = VNXMirrorGroup.get(t_cli(), name='not_in_group')
mg1 = VNXMirrorGroup.get(t_cli(), name='petermg')
def _inner():
mg1.remove_mirror(mirror)
assert_that(_inner, raises(VNXMirrorGroupMirrorNotMemberError))
@patch_cli
def test_sync_group(self):
mg1 = VNXMirrorGroup.get(t_cli(), name='petermg')
mg1.sync_group()
@patch_cli
def test_sync_group_already_promoted(self):
mg1 = VNXMirrorGroup.get(t_cli(), name='mg_promote_on_primary')
assert_that(mg1.sync_group, raises(VNXMirrorGroupAlreadyPromotedError))
@patch_cli
def test_delete_group(self):
mg1 = VNXMirrorGroup.get(t_cli(), name='petermg')
mg1.delete()
class VNXMirrorViewAsyncTest(TestCase):
@patch_cli
def test_get_all(self):
mv_list = VNXMirrorViewAsync.get(t_cli())
assert_that(len(mv_list), equal_to(2))
@patch_cli
def test_get(self):
mv = VNXMirrorViewAsync.get(t_cli(), 'testdr_001')
assert_that(mv.uid, equal_to(
'8F:23:60:B6:60:01:06:50:08:00:00:00:00:00:00:00'))
assert_that(mv.name, equal_to('testdr_001'))
assert_that(mv.description, equal_to(''))
assert_that(mv.logical_unit_numbers, 55)
assert_that(mv.recovery_policy,
equal_to(VNXMirrorViewRecoveryPolicy.AUTO))
assert_that(len(mv.images), equal_to(2))
assert_that(mv.images[0], instance_of(VNXMirrorViewImage))
assert_that(mv.synchronization_rate,
equal_to(VNXMirrorViewSyncRate.MEDIUM))
assert_that(mv.existed, equal_to(True))
assert_that(mv.state, equal_to('Active'))
assert_that(mv.image_transitioning, equal_to(False))
assert_that(mv.image_size, equal_to(104857600))
assert_that(mv.image_count, equal_to(2))
assert_that(mv.image_faulted, equal_to(False))
assert_that(mv.minimum_number_of_images_required, equal_to(0))
assert_that(mv.synchronizing_progress, equal_to(100))
assert_that(mv.remote_mirror_status, equal_to('Mirrored'))
assert_that(mv.faulted, equal_to(False))
assert_that(mv.transitioning, equal_to(False))
assert_that(mv.is_primary, equal_to(True))
@patch_cli
def test_image_properties(self):
mv = VNXMirrorViewAsync.get(t_cli(), 'testdr_001')
assert_that(mv.is_primary, equal_to(True))
assert_that(mv.primary_image.is_primary, equal_to(True))
assert_that(mv.secondary_image.is_primary, equal_to(False))
@patch_cli
def test_create_success(self):
mv = VNXMirrorViewAsync.create(t_cli(), 'testdr_003', 71)
assert_that(mv.name, equal_to('testdr_003'))
@patch_cli
def test_create_lun_not_available_for_mirror(self):
def f():
VNXMirrorViewAsync.create(t_cli(), 'mv0', 244)
assert_that(f, raises(VNXMirrorException, 'LUN does not exist'))
@patch_cli
def test_create_name_in_use(self):
def f():
VNXMirrorViewAsync.create(t_cli(), 'testdr_003', 72)
assert_that(f, raises(VNXMirrorNameInUseError, 'in use'))
@patch_cli
def test_add_image_success(self):
mv = VNXMirrorViewAsync.get(t_cli(), 'testdr_005')
mv.add_image('192.168.1.94', 71)
assert_that(len(mv.images), equal_to(2))
@patch_cli
def test_add_image_already_mirrored(self):
def f():
mv = VNXMirrorViewAsync.get(t_cli(), 'testdr_005')
mv.add_image('192.168.1.94', 72)
assert_that(f, raises(VNXMirrorAlreadyMirroredError, 'exists'))
@patch_cli
def test_get_image_found(self):
mv = VNXMirrorViewAsync.get(t_cli(), 'testdr_004')
image = mv.get_image('50:06:01:60:B6:60:23:7E')
assert_that(image.state, equal_to(VNXMirrorImageState.SYNCHRONIZED))
@patch_cli
def test_get_image_not_found(self):
def f():
mv = VNXMirrorViewAsync.get(t_cli(), 'testdr_004')
mv.get_image('50:06:01:60:88:60:05:FF')
assert_that(f, raises(VNXMirrorImageNotFoundError, 'not found'))
@patch_cli
def test_remove_image_not_found(self):
def f():
mv = VNXMirrorViewAsync.get(t_cli(), 'testdr_004')
mv.remove_image('50:06:01:60:88:60:05:FF')
assert_that(f, raises(VNXMirrorException, 'image does not exist'))
@patch_cli
def test_remove_image_success(self):
mv = VNXMirrorViewAsync.get(t_cli(), 'testdr_004')
# no error raised
mv.remove_image()
@patch_cli
def test_remove_image_no_secondary_image(self):
def f():
mv = VNXMirrorViewAsync.get(t_cli(), 'testdr_003')
mv.remove_image()
assert_that(f,
raises(VNXMirrorImageNotFoundError, 'no secondary'))
@patch_cli
def test_fracture_primary_image(self):
def f():
mv = VNXMirrorViewAsync.get(t_cli(), 'testdr_005')
mv.fracture_image('50:06:01:60:B6:60:23:8F')
assert_that(f, raises(VNXMirrorException, 'does not exist'))
@patch_cli
def test_fracture_image_success(self):
mv = VNXMirrorViewAsync.get(t_cli(), 'testdr_005')
# no error raised
mv.fracture_image()
@patch_cli
def test_sync_image_not_found(self):
def f():
mv = VNXMirrorViewAsync.get(t_cli(), 'testdr_005')
mv.sync_image('50:06:01:60:88:60:05:FF')
assert_that(f, raises(VNXMirrorException, 'does not exist'))
@patch_cli
def test_sync_image_failed(self):
def f():
mv = VNXMirrorViewAsync.get(t_cli(), 'testdr_005')
mv.sync_image()
assert_that(f, raises(VNXMirrorException, 'already synchronized'))
@patch_cli
def test_promote_image_not_found(self):
def f():
mv = VNXMirrorViewAsync.get(t_cli(), 'testdr_005')
mv.promote_image('50:06:01:60:88:60:05:FF')
assert_that(f, raises(VNXMirrorException, 'does not exist'))
@patch_cli
def test_promote_non_local_image(self):
def f():
mv = VNXMirrorViewAsync.get(t_cli(), 'testdr_005')
mv.promote_image()
assert_that(f, raises(VNXMirrorException,
'promotion wasn\'t local'))
@patch_cli
def test_delete_mirror_not_found_error(self):
def f():
mv = VNXMirrorViewAsync.get(t_cli(), 'mv8')
mv.delete()
assert_that(f, raises(VNXMirrorException, 'mirror does not exist'))
@patch_cli
def test_delete_mirror_has_secondary(self):
def f():
mv = VNXMirrorViewAsync.get(t_cli(), 'testdr_005')
mv.delete()
assert_that(f, raises(VNXMirrorException,
'mirror with secondary images'))
@patch_cli
def test_force_delete_mirror_has_secondary(self):
mv = VNXMirrorViewAsync.get(t_cli(), 'testdr_006')
# no error raised
mv.delete(force=True)
class VNXMirrorGroupAsyncTest(TestCase):
@patch_cli
def test_create(self):
mg = VNXMirrorGroupAsync.create(t_cli(), name='test_group')
assert_that(mg, instance_of(VNXMirrorGroupAsync))
@patch_cli
def test_create_name_in_use(self):
def _inner():
VNXMirrorGroupAsync.create(t_cli(), name='test_group_in_use')
assert_that(_inner, raises(VNXMirrorException, 'same name as'))
@patch_cli
def test_create_and_add(self):
mirror = VNXMirrorViewAsync.get(t_cli(), name='testdr_004')
mg = VNXMirrorGroupAsync.create(t_cli(), name='petermg1',
mirror=mirror)
assert_that(mg, instance_of(VNXMirrorGroupAsync))
@patch_cli
def test_get_single(self):
mg = VNXMirrorGroupAsync.get(t_cli(), name='petermg')
assert_that(mg, instance_of(VNXMirrorGroupAsync))
assert_that(mg.name, equal_to('petermg'))
assert_that(mg.gid, equal_to('50:06:01:60:B6:60:23:8F:03:00:00:00'))
assert_that(mg.description, equal_to(''))
assert_that(mg.state, equal_to('Synchronized'))
assert_that(mg.role, equal_to('Primary'))
assert_that(mg.condition, equal_to('Normal'))
assert_that(mg.policy, equal_to(VNXMirrorGroupRecoveryPolicy.AUTO))
assert_that(len(mg.group_mirrors), equal_to(1))
@patch_cli
def test_get_all(self):
mg_list = VNXMirrorGroupAsync.get(t_cli())
assert_that(len(mg_list), equal_to(2))
assert_that(mg_list, instance_of(VNXMirrorGroupAsyncList))
@patch_cli
def test_promote_group(self):
mg1 = VNXMirrorGroupAsync.get(t_cli(), name='petermg2')
mg1.promote_group()
@patch_cli
def test_fracture_group(self):
mg1 = VNXMirrorGroupAsync.get(t_cli(), name='petermg1')
mg1.fracture_group()
@patch_cli
def test_add_to_group(self):
mirror = VNXMirrorViewAsync.get(t_cli(), name='testdr_004')
mg1 = VNXMirrorGroupAsync.get(t_cli(), name='petermg1')
mg1.add_mirror(mirror)
@patch_cli(output='mirror_-async_-addtogroup_-name_petermg1_'
'-mirrorname_testdr_004_ALREADYMEMBER.txt')
def test_add_to_group_existed(self):
mirror = VNXMirrorViewAsync.get(t_cli(), name='testdr_004')
mg1 = VNXMirrorGroupAsync.get(t_cli(), name='petermg1')
def _inner():
mg1.add_mirror(mirror)
assert_that(_inner, raises(VNXMirrorGroupAlreadyMemberError))
@patch_cli
def test_remove_from_group(self):
mirror = VNXMirrorGroupAsync.get(t_cli(), name='testdr_004')
mg1 = VNXMirrorGroupAsync.get(t_cli(), name='petermg1')
mg1.remove_mirror(mirror)
@patch_cli
def test_remove_from_group_already_removed(self):
mirror = VNXMirrorGroupAsync.get(t_cli(), name='testdr_003')
mg1 = VNXMirrorGroupAsync.get(t_cli(), name='petermg1')
def _inner():
mg1.remove_mirror(mirror)
assert_that(_inner, raises(VNXMirrorGroupMirrorNotMemberError))
@patch_cli
def test_sync_group(self):
mg1 = VNXMirrorGroupAsync.get(t_cli(), name='petermg')
mg1.sync_group()
@patch_cli
def test_delete_non_empty_group(self):
mg1 = VNXMirrorGroupAsync.get(t_cli(), name='petermg')
def _inner():
mg1.delete()
assert_that(_inner, raises(VNXMirrorException, 'still has members'))
@patch_cli
def test_delete_group(self):
mg1 = VNXMirrorGroupAsync.get(t_cli(), name='test_group')
mg1.delete()
|
apache-2.0
| 2,035,904,516,088,921,300
| 34.047619
| 79
| 0.619268
| false
| 3.126925
| true
| false
| false
|
crawfordsm/zSALT
|
zsalt/extract.py
|
1
|
5452
|
#!/usr/bin/env python
# Copyright (c) 2009, South African Astronomical Observatory (SAAO) #
# All rights reserved. See LICENSE file for more detail. #
"""
SPECEXTRACT extracts a 1-D spectrum from a 2-D data file.
Author Version Date
-----------------------------------------------
S. M. Crawford (SAAO) 1.0 15 Nov 2010
TODO
----
1. The task still needs to be written
LIMITATIONS
-----------
"""
# Ensure python 2.5 compatibility
from __future__ import with_statement
import os
import sys
import time
import numpy as np
from astropy.io import fits
from PySpectrograph.Spectra import Spectrum
from PySpectrograph.Spectra import apext
from PySpectrograph.Spectra import findobj
debug = True
def extract(hdu, ext=1, method='normal', section=[],
minsize=3.0, thresh=3.0, convert=True):
"""For a given image, extract a 1D spectra from the image
and write the spectra to the output file
"""
ap_list = []
i = ext
if hdu[i].name == 'SCI':
# set up the data, variance, and bad pixel frames
# first step is to find the region to extract
data_arr = hdu[i].data
try:
var_arr = hdu[hdu[i].header['VAREXT']].data
except:
var_arr = None
try:
bpm_arr = hdu[hdu[i].header['BPMEXT']].data
except:
bpm_arr = None
var_arr = None
bpm_arr = None
xarr = np.arange(len(data_arr[0]))
# convert using the WCS information
try:
w0 = hdu[i].header['CRVAL1']
dw = hdu[i].header['CD1_1']
except Exception as e:
msg = 'Error on Ext %i: %s' % (i, e)
raise Exception(msg)
warr = w0 + dw * xarr
# convert from air to vacuum
if convert:
warr = Spectrum.air2vac(warr)
# set up the sections in case of findobj
if section is None:
section = findobj.findObjects(
data_arr,
method='median',
specaxis=1,
minsize=minsize,
thresh=thresh,
niter=5)
# extract all of the regions
for sec in section:
ap = apext.apext(warr, data_arr, ivar=var_arr)
y1, y2 = sec
ap.flatten(y1, y2)
ap_list.append(ap)
return ap_list
def write_extract(ofile, ap_list, outformat='ascii', fvar=None, clobber=False):
"""Write out to either a txt file or fits file depending on the extension
of ofile
"""
if outformat == 'FITS':
write_extract_fits(ofile, ap_list, clobber)
elif outformat == 'ascii':
write_extract_text(ofile, ap_list, clobber)
else:
msg = '%s is not a supported output format' % outformat
raise Exception(msg)
return
def write_extract_text(ofile, ap_list, clobber=False):
"""Write out the extracted spectrum to a text file. If the file already
exists, this will not overwrite it. The first
For each spectrum in ap_list, it will add a columns onto the output file
so that the first column is always wavelength, the second column is
flux, and the third column is sigma, and then repeat the flux and sigma
columns
ofile: Output file to write
ap_list: List of extracted spectrum
clobber: delete ofile if it already exists
"""
if os.path.isfile(ofile) and not clobber:
return
# open the file
dout = saltio.openascii(ofile, 'w')
# first extract warr, assume it is the same for all frames
warr = ap_list[0].wave
# write out the spectrum
for i in range(len(warr)):
outstr = '%7.3f ' % warr[i]
for ap in ap_list:
flux = ap.ldata[i]
try:
fvar = abs(ap.lvar[i]) ** 0.5
except:
fvar = 1
outstr += "%7.3f %7.3f " % (flux, fvar)
outstr += '\n'
dout.write(outstr)
dout.close()
return
def write_extract_fits(ofile, ap_list, clobber=False):
"""Write out the extracted spectrum to a FITS table. If the file already
exists, this will not overwrite it.
For each spectrum in ap_list, it will add another extension to the
fits file. Each extension will have the first column as wavelength,
the second column as counts, and the third column as sigma on the
counts.
ofile: Output file to write
ap_list: List of extracted spectrum
clobber: delete ofile if it already exists
"""
# delete the file
if os.path.isfile(ofile) and clobber:
saltio.delete(ofile)
# create the primary array
hdu = pyfits.PrimaryHDU()
hdulist = pyfits.HDUList([hdu])
# create the columns and the
for ap in ap_list:
fvar = abs(ap.lvar) ** 0.5
# create the columns
col1 = pyfits.Column(
name='wavelength',
format='D',
unit='Angstroms',
array=ap.wave)
col2 = pyfits.Column(
name='counts',
format='D',
unit='Counts',
array=ap.ldata)
col3 = pyfits.Column(name='counts_err', format='D', array=fvar)
# add to the table
tbhdu = pyfits.new_table([col1, col2, col3])
hdulist.append(tbhdu)
# write it out
hdulist.writeto(ofile)
return
|
bsd-3-clause
| 6,011,496,125,247,817
| 26.26
| 79
| 0.570983
| false
| 3.656606
| false
| false
| false
|
atkvo/masters-bot
|
src/autobot/src/pidControl.py
|
1
|
1392
|
#!/usr/bin/env python
import rospy
from autobot.msg import drive_param
from autobot.msg import pid_input
from std_msgs.msg import String
import math
kp = 14.0 * 3
kd = 0.09 * 10 ## handling how fast
servo_offset = 18.5
prev_error = 0.0
vel_input = 0.0
mode = 'wall'
motorPub = rospy.Publisher('drive_parameters', drive_param, queue_size=10)
def control(data):
global kp
global kd
global servo_offset
global prev_error
global vel_input
global mode
driveParam = drive_param()
driveParam.velocity = data.pid_vel
if mode == 'wall':
pid_error = data.pid_error
error = pid_error * kp
errordot = kd * (pid_error - prev_error)
angle = error + errordot
if angle > 100:
angle = 100
elif angle < -100:
angle = -100
prev_error = pid_error
print 'pid_error {}\nangle {}'.format(pid_error, angle)
driveParam.angle = angle
elif mode == 'corner':
print 'corner mode, angle 100'
driveParam.angle = 100
motorPub.publish(driveParam)
def update_mode(_mode):
global mode
mode = _mode.data
if __name__ == '__main__':
print("Listening to error for PID")
rospy.init_node('pid_controller', anonymous=True)
rospy.Subscriber("error", pid_input, control)
rospy.Subscriber("mode", String, update_mode)
rospy.spin()
|
mit
| -3,342,215,794,730,230,300
| 19.776119
| 74
| 0.617816
| false
| 3.532995
| false
| false
| false
|
Telestream/telestream-cloud-python-sdk
|
telestream_cloud_qc_sdk/telestream_cloud_qc/models/hdr_standard_type.py
|
1
|
2817
|
# coding: utf-8
"""
Qc API
Qc API # noqa: E501
The version of the OpenAPI document: 3.0.0
Contact: cloudsupport@telestream.net
Generated by: https://openapi-generator.tech
"""
import pprint
import re # noqa: F401
import six
from telestream_cloud_qc.configuration import Configuration
class HdrStandardType(object):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
"""
allowed enum values
"""
GENERICHDR = "GenericHdr"
HDR10 = "Hdr10"
ARIBB67 = "AribB67"
allowable_values = [GENERICHDR, HDR10, ARIBB67] # noqa: E501
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
openapi_types = {
}
attribute_map = {
}
def __init__(self, local_vars_configuration=None): # noqa: E501
"""HdrStandardType - a model defined in OpenAPI""" # noqa: E501
if local_vars_configuration is None:
local_vars_configuration = Configuration()
self.local_vars_configuration = local_vars_configuration
self.discriminator = None
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, HdrStandardType):
return False
return self.to_dict() == other.to_dict()
def __ne__(self, other):
"""Returns true if both objects are not equal"""
if not isinstance(other, HdrStandardType):
return True
return self.to_dict() != other.to_dict()
|
mit
| 5,876,936,412,095,734,000
| 26.617647
| 74
| 0.556621
| false
| 4.053237
| true
| false
| false
|
ppyordanov/HCI_4_Future_Cities
|
Server/src/virtualenv/Lib/site-packages/setuptools/command/install_egg_info.py
|
1
|
3830
|
from setuptools import Command
from setuptools.archive_util import unpack_archive
from distutils import log, dir_util
import os, pkg_resources
class install_egg_info(Command):
"""Install an .egg-info directory for the package"""
description = "Install an .egg-info directory for the package"
user_options = [
('install-dir=', 'd', "directory to install to"),
]
def initialize_options(self):
self.install_dir = None
def finalize_options(self):
self.set_undefined_options('install_lib', ('install_dir', 'install_dir'))
ei_cmd = self.get_finalized_command("egg_info")
basename = pkg_resources.Distribution(
None, None, ei_cmd.egg_name, ei_cmd.egg_version
).egg_name() + '.egg-info'
self.source = ei_cmd.egg_info
self.target = os.path.join(self.install_dir, basename)
self.outputs = [self.target]
def run(self):
self.run_command('egg_info')
target = self.target
if os.path.isdir(self.target) and not os.path.islink(self.target):
dir_util.remove_tree(self.target, dry_run=self.dry_run)
elif os.path.exists(self.target):
self.execute(os.unlink, (self.target,), "Removing " + self.target)
if not self.dry_run:
pkg_resources.ensure_directory(self.target)
self.execute(self.copytree, (),
"Copying %s to %s" % (self.source, self.target)
)
self.install_namespaces()
def get_outputs(self):
return self.outputs
def copytree(self):
# Copy the .egg-info tree to site-packages
def skimmer(src, dst):
# filter out source-control directories; note that 'src' is always
# a '/'-separated path, regardless of platform. 'dst' is a
# platform-specific path.
for skip in '.svn/', 'CVS/':
if src.startswith(skip) or '/' + skip in src:
return None
self.outputs.append(dst)
log.debug("Copying %s to %s", src, dst)
return dst
unpack_archive(self.source, self.target, skimmer)
def install_namespaces(self):
nsp = self._get_all_ns_packages()
if not nsp: return
filename, ext = os.path.splitext(self.target)
filename += '-nspkg.pth';
self.outputs.append(filename)
log.info("Installing %s", filename)
if not self.dry_run:
f = open(filename, 'wt')
for pkg in nsp:
# ensure pkg is not a unicode string under Python 2.7
pkg = str(pkg)
pth = tuple(pkg.split('.'))
trailer = '\n'
if '.' in pkg:
trailer = (
"; m and setattr(sys.modules[%r], %r, m)\n"
% ('.'.join(pth[:-1]), pth[-1])
)
f.write(
"import sys,types,os; "
"p = os.path.join(sys._getframe(1).f_locals['sitedir'], "
"*%(pth)r); "
"ie = os.path.exists(os.path.join(p,'__init__.py')); "
"m = not ie and "
"sys.modules.setdefault(%(pkg)r,types.ModuleType(%(pkg)r)); "
"mp = (m or []) and m.__dict__.setdefault('__path__',[]); "
"(p not in mp) and mp.append(p)%(trailer)s"
% locals()
)
f.close()
def _get_all_ns_packages(self):
nsp = {}
for pkg in self.distribution.namespace_packages or []:
pkg = pkg.split('.')
while pkg:
nsp['.'.join(pkg)] = 1
pkg.pop()
nsp = list(nsp)
nsp.sort() # set up shorter names first
return nsp
|
mit
| 105,027,369,872,871,100
| 35.47619
| 81
| 0.518799
| false
| 3.920164
| false
| false
| false
|
wysiwyng/sr13
|
configReader.py
|
1
|
4083
|
# Copyright (C) 2105 wysiwyng
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import os, ConfigParser
class ConfigReader(object):
def __init__(self, path, debug = None):
self.debug = debug
self.debugMsg("opening config file and creating configParser")
self.parser = ConfigParser.ConfigParser()
cFile = open(os.path.join(path, "mai-bot.cfg"))
self.debugMsg("config file open, checking config file")
self.parser.readfp(cFile)
if not self.parser.has_section("mai-bot-cfg"):
raise ValueError("invalid config file")
self.debugMsg("config file is valid, ready to read values")
cFile.close()
def getKey(self, key):
return self.parser.get("mai-bot-cfg", key)
def getMaxSpeed(self):
if self.parser.has_option("mai-bot-cfg", "max-speed"):
return self.parser.getint("mai-bot-cfg", "max-speed")
else:
return 255
def getDistModifier(self):
if self.parser.has_option("mai-bot-cfg", "dist-modifier"):
return self.parser.getint("mai-bot-cfg", "dist-modifier")
else:
return 10
def getDistModifierBegin(self):
if self.parser.has_option("mai-bot-cfg", "dist-mod-begin"):
return self.parser.getint("mai-bot-cfg", "dist-mod-begin")
else:
return 80
def getCamResX(self):
if self.parser.has_option("mai-bot-cfg", "cam-res-x"):
return self.parser.getint("mai-bot-cfg", "cam-res-x")
else:
return 800
def getCamResY(self):
if self.parser.has_option("mai-bot-cfg", "cam-res-y"):
return self.parser.getint("mai-bot-cfg", "cam-res-y")
else:
return 600
def getMaxTries(self):
if self.parser.has_option("mai-bot-cfg", "max-tries"):
return self.parser.getint("mai-bot-cfg", "max-tries")
else:
return 2
def getDirection(self):
if self.parser.has_option("mai-bot-cfg", "direction"):
return self.parser.get("mai-bot-cfg", "direction")
else:
return "left"
def getCommands(self):
if self.parser.has_option("mai-bot-cfg", "commands"):
return str(self.parser.get("mai-bot-cfg", "commands")).split(",")
else:
return ["nearest", "middle-left", "far-left"]
def getTokenOrder(self):
if self.parser.has_option("mai-bot-cfg", "token-order"):
return str(self.parser.get("mai-bot-cfg", "token-order")).split(",")
else:
return ["0","1","2","3","4","5"]
def getDebug(self):
if self.parser.has_option("mai-bot-cfg", "debug"):
return self.parser.getboolean("mai-bot-cfg", "debug")
else:
return False
#def getStart(self):
# if self.parser.has_option("mai-bot-cfg", "start"):
# return self.parser.get("mai-bot-cfg", "start")
# else:
# return "nearest"
#def getMaxHeight(self):
# if self.parser.has_option("mai-bot-cfg", "max-height"):
# return self.parser.getint("mai-bot-cfg", "max-height")
# else:
# return 4
def debugMsg(self, message):
if self.debug != None:
self.debug.printMsg(message, self)
def __str__(self):
return "ConfigReader"
|
gpl-3.0
| -65,932,045,673,835,944
| 35.783784
| 80
| 0.582905
| false
| 3.652057
| true
| false
| false
|
nefarioustim/parker
|
test/test_crawlpage.py
|
1
|
2207
|
# -*- coding: utf-8 -*-
"""Test the CrawlPage object."""
import pytest
from parker import parser, crawlpage, parsedpage
from test_client import client_fixture_crawl, client_fixture
from test_page import page_fixture_crawl, page_fixture
import utils
TEST_URI = "http://www.staples.co.uk/"
TEST_CONSUME_SELECTOR = "#PageInner .skuPage"
EXPECTED_URI_COUNT = 300
EXPECTED_URIS = set(utils.load_stub_as_json('expecteduris.json'))
@pytest.fixture(scope="function")
def crawlpage_fixture(page_fixture_crawl):
"""Test fixture to ensure correct mocking for crawlpage."""
return crawlpage.get_instance(
page_fixture_crawl
)
def test_get_instance_creates_crawlpage_object(page_fixture_crawl):
"""Test crawlpage.get_instance creates a CrawlPage object."""
test_crawlpage = crawlpage.get_instance(
page_fixture_crawl
)
expected_repr = "<class 'parker.crawlpage.CrawlPage'>(%s)" % (
TEST_URI
)
assert isinstance(test_crawlpage, crawlpage.CrawlPage)
assert isinstance(test_crawlpage.parsedpage, parsedpage.ParsedPage)
assert test_crawlpage.__repr__() == expected_repr
def test_get_instance_raises_typeerror_unexpected_parameter_type():
"""Test crawlpage.get_instance throws TypeError on unexpected param."""
with pytest.raises(TypeError):
test_crawlpage = crawlpage.get_instance(None)
def test_get_uris_returns_list_of_internal_uris(crawlpage_fixture):
"""Test crawlpage.get_uris returns a set of internal URIs."""
test_crawlpage = crawlpage_fixture
uris = test_crawlpage.get_uris(TEST_URI)
assert isinstance(uris, set)
assert len(uris) == EXPECTED_URI_COUNT
assert uris == EXPECTED_URIS
def test_has_selector_returns_false_if_not(crawlpage_fixture):
"""Test crawlpage.has_selector returns false if selector not in page."""
test_crawlpage = crawlpage_fixture
assert not test_crawlpage.has_selector(TEST_CONSUME_SELECTOR)
def test_has_selector_returns_true_if_has(page_fixture):
"""Test crawlpage.has_selector returns true if selector in page."""
test_crawlpage = crawlpage.get_instance(
page_fixture
)
assert test_crawlpage.has_selector(TEST_CONSUME_SELECTOR)
|
gpl-3.0
| -7,499,028,635,234,379,000
| 31.940299
| 76
| 0.7227
| false
| 3.349014
| true
| false
| false
|
panjia1983/channel_backward
|
exp/test.py
|
1
|
2770
|
import argparse
parser = argparse.ArgumentParser()
parser.add_argument("--interactive", action="store_true")
args = parser.parse_args()
import openravepy
import trajoptpy
import json
env = openravepy.Environment()
env.StopSimulation()
env.Load('test.xml')
trajoptpy.SetInteractive(args.interactive) # pause every iteration, until you press 'p'. Press escape to disable further plotting
robot = env.GetRobots()[0]
robot.SetDOFValues([0, 0.7, 0, 0, 0, 0, 0], robot.GetManipulator('leftarm').GetArmIndices())
robot.SetDOFValues([0.5], [22])
robot.SetDOFValues([-1, 1.18, -0.44, 0, 0, 0, 0], robot.GetManipulator('rightarm').GetArmIndices())
joint_target = [-0.21, -0.075, 0, 0, 0, 0, 0]
dynamic_objects = ["mug-shelf", "mug-shelf1", "mug-shelf2", "PR2"]
static_objects = ["bookshelf"]
cost_params = []
for name in dynamic_objects:
cost_params.append({
"name" : name,
"coeffs" : [20],
"dist_pen" : [0.025],
})
for name in static_objects:
cost_params.append({
"name" : name,
"coeffs" : [20],
"dist_pen" : [0.025],
})
request = {
"basic_info": {
"n_steps": 20,
"manip": "leftarm",
"start_fixed": True,
},
"costs": [
{
"type" : "joint_vel",
"params": {"coeffs" : [1]},
},
{
"type": "continuous_collision",
"name": "cont_collision",
"params": {
"object_costs": cost_params,
}
}
],
"constraints": [
{
"type": "joint",
"params": {"vals": joint_target},
},
],
"init_info": {
"type": "straight_line",
"endpoint": joint_target,
}
}
#robot.SetDOFValues(
# [
# 0, 0, 0,
# 0, 0, 0,
# 0, 0, 0,
# 0, 0, 0,
# 0, 0, 0,
# 0, 0.7, 0,
# 0, 0, 0,
# 0, 0.5, 0,
# 0, 0, 0,
# -1, 1.18, -0.44,
# 0, 0, 0,
# 0, 0, 0,
# 0, 0, 0
# ]
#)
#robot.SetDOFValues(
# [0, 0, 0,
# 0, 0, 0,
# 0, 0, 0,
# 0, 0, 0,
# 0, 0, 0,
# -0.21, -0.075, 0,
# 0, 0.0, 0,
# 0, 0.5, 0,
# 0, 0, 0,
# -1, 1.18, -0.44,
# 0, 0, 0,
# 0, 0, 0,
# 0, 0, 0]
#)
s = json.dumps(request) # convert dictionary into json-formatted string
prob = trajoptpy.ConstructProblem(s, env) # create object that stores optimization problem
result = trajoptpy.OptimizeProblem(prob) # do optimization
print result
from trajoptpy.check_traj import traj_is_safe
prob.SetRobotActiveDOFs() # set robot DOFs to DOFs in optimization problem
assert traj_is_safe(result.GetTraj(), robot) # Check that trajectory is collision free
|
bsd-2-clause
| 5,083,429,379,926,947,000
| 22.87931
| 129
| 0.513718
| false
| 2.864529
| false
| false
| false
|
cdeil/naima
|
examples/CrabNebula_proton.py
|
1
|
2205
|
#!/usr/bin/env python
import numpy as np
import naima
from astropy import units as u
from astropy.io import ascii
## Read data
data=ascii.read('CrabNebula_HESS_2006.dat')
## Set initial parameters
p0=np.array((474,2.34,np.log10(80.),))
labels=['norm','index','log10(cutoff)']
## Model definition
ph_energy = u.Quantity(data['energy'])
# peak gamma ph_energy production is ~0.1*Ep, so enemid corresponds to Ep=10*enemid
# If a cutoff is present, this should be reduced to reduce parameter correlation
e_0 = 5.*np.sqrt(ph_energy[0]*ph_energy[-1])
from naima.models import PionDecay, ExponentialCutoffPowerLaw
ECPL = ExponentialCutoffPowerLaw(1 / u.TeV, e_0, 2, 60. * u.TeV)
PP = PionDecay(ECPL)
distance = 2.0 * u.kpc
Epmin = ph_energy[0]*1e-2
Epmax = ph_energy[-1]*1e3
proton_energy = np.logspace(np.log10(Epmin.value),
np.log10(Epmax.value),50)*ph_energy.unit
def ppgamma(pars,data):
PP.particle_distribution.amplitude = pars[0] / u.TeV
PP.particle_distribution.alpha = pars[1]
PP.particle_distribution.e_cutoff = (10**pars[2])*u.TeV
# convert to same units as observed differential spectrum
model = PP.flux(data,distance).to('1/(s cm2 TeV)')
# Save a realization of the particle distribution to the metadata blob
proton_dist= PP.particle_distribution(proton_energy)
return model, model, (proton_energy, proton_dist)
## Prior definition
def lnprior(pars):
"""
Return probability of parameter values according to prior knowledge.
Parameter limits should be done here through uniform prior ditributions
"""
logprob = naima.uniform_prior(pars[0],0.,np.inf) \
+ naima.uniform_prior(pars[1],-1,5)
return logprob
if __name__=='__main__':
## Run sampler
sampler,pos = naima.run_sampler(data_table=data, p0=p0, labels=labels,
model=ppgamma, prior=lnprior, nwalkers=16, nburn=50, nrun=10,
threads=4)
## Save sampler
from astropy.extern import six
from six.moves import cPickle
sampler.pool=None
cPickle.dump(sampler,open('CrabNebula_proton_sampler.pickle','wb'))
## Diagnostic plots
naima.generate_diagnostic_plots('CrabNebula_proton',sampler,sed=True)
|
bsd-3-clause
| -4,077,216,500,975,527,400
| 26.222222
| 83
| 0.696599
| false
| 3.008186
| false
| false
| false
|
wiredfool/fmod
|
fmod/controllers/moderate.py
|
1
|
2153
|
import logging
from pylons import request, response, session, url, tmpl_context as c
from pylons.controllers.util import abort, redirect
from fmod.lib.base import BaseController, render
from fmod import model
log = logging.getLogger(__name__)
class ModerateController(BaseController):
requires_auth=True
def __before__(self):
BaseController.__before__(self)
# logged in...
c.username = session['user']
if not session['mod']:
redirect(url('/ping/index'))
#if not request.method=='GET': #UNDONE POST
# throw("Error - must GET")
def _get_decision(self, id, flag):
if id == None:
raise "Error - Need an image id"
d = model.Decision()
setattr(d, flag, True)
d.image = id
d.username = c.username
d.save()
d.update_pings()
d.commit()
return d
def _remove(self, d, tag, rule=None):
img = d.getImage()
if img.in_pool():
tags = [tag, 'removed-from-strobist-pool']
if rule:
tags.append('see-rule-%s'%rule)
img.tag(tags, session['token'])
ret = img.remove_from_group(session['token'])
if ret:
return "Success"
else:
return "Could not remove from pool"
else:
return "Not in pool"
def defer(self, id=None):
#this is a noop.
return "Success"
def ok(self, id=None):
try:
self._get_decision(id, 'fl_ok')
return "Success"
except Exception, msg:
return msg
def ns(self, id=None):
try:
d = self._get_decision(id, 'fl_ns')
return self._remove(d, 'no-off-camera-flash',1)
except Exception, msg:
return msg
def nsi(self, id=None):
try:
d = self._get_decision(id, 'fl_nsi')
return self._remove(d, 'no-strobist-info',2)
except Exception, msg:
return msg
def isi(self, id=None):
try:
d = self._get_decision(id, 'fl_isi')
return self._remove(d, 'incomplete-strobist-info',2)
except Exception, msg:
return msg
def nsfw(self, id=None):
try:
d = self._get_decision(id, 'fl_nsfw')
return self._remove(d, 'NSFW',3)
except Exception, msg:
return msg
def bump(self, id=None):
try:
d = self._get_decision(id, 'fl_bump')
return self._remove(d, 'no-bumping')
except Exception, msg:
return msg
|
gpl-2.0
| -7,267,325,370,894,829,000
| 20.747475
| 69
| 0.645611
| false
| 2.78886
| false
| false
| false
|
schleichdi2/OpenNfr_E2_Gui-6.0
|
lib/python/Screens/InputBox.py
|
1
|
5801
|
from enigma import eRCInput, getPrevAsciiCode
from Screens.Screen import Screen
from Screens.MessageBox import MessageBox
from Screens.VirtualKeyBoard import VirtualKeyBoard
from Components.ActionMap import NumberActionMap
from Components.Label import Label
from Components.Input import Input
from Components.Pixmap import Pixmap
from Tools.BoundFunction import boundFunction
from Tools.Notifications import AddPopup
from time import time
from Components.config import config
class InputBox(Screen):
def __init__(self, session, title = "", windowTitle = _("Input"), useableChars = None, **kwargs):
Screen.__init__(self, session)
self["text"] = Label(title)
self["input"] = Input(**kwargs)
self["VKeyIcon"] = Pixmap()
self["help_text"] = Label(_("use virtual keyboard for text input"))
self.onShown.append(boundFunction(self.setTitle, windowTitle))
if useableChars is not None:
self["input"].setUseableChars(useableChars)
self["actions"] = NumberActionMap(["WizardActions", "InputBoxActions", "InputAsciiActions", "KeyboardInputActions"],
{
"gotAsciiCode": self.gotAsciiCode,
"ok": self.go,
"back": self.cancel,
"left": self.keyLeft,
"right": self.keyRight,
"home": self.keyHome,
"end": self.keyEnd,
"deleteForward": self.keyDelete,
"deleteBackward": self.keyBackspace,
"tab": self.keyTab,
"toggleOverwrite": self.keyInsert,
"showVirtualKeyboard": self.virtualKeyBoard,
"1": self.keyNumberGlobal,
"2": self.keyNumberGlobal,
"3": self.keyNumberGlobal,
"4": self.keyNumberGlobal,
"5": self.keyNumberGlobal,
"6": self.keyNumberGlobal,
"7": self.keyNumberGlobal,
"8": self.keyNumberGlobal,
"9": self.keyNumberGlobal,
"0": self.keyNumberGlobal
}, -1)
if self["input"].type == Input.TEXT:
if config.misc.remotecontrol_text_support.value:
self.onExecBegin.append(self.setKeyboardModeNone)
else:
self.onExecBegin.append(self.setKeyboardModeAscii)
else:
self.onExecBegin.append(self.setKeyboardModeNone)
def virtualKeyBoard(self):
self.input_text = self["input"].getText()
input_title = self["text"].getText()
self.session.openWithCallback(self.virtualKeyBoardCB, VirtualKeyBoard, title = input_title, text = self.input_text)
def virtualKeyBoardCB(self, res):
if res:
self.input_text = res
self["input"].setText(self.input_text)
self["input"].end()
def gotAsciiCode(self):
self["input"].handleAscii(getPrevAsciiCode())
def keyLeft(self):
self["input"].left()
def keyRight(self):
self["input"].right()
def keyNumberGlobal(self, number):
self["input"].number(number)
def keyDelete(self):
self["input"].delete()
def go(self):
self.close(self["input"].getText())
def cancel(self):
self.close(None)
def keyHome(self):
self["input"].home()
def keyEnd(self):
self["input"].end()
def keyBackspace(self):
self["input"].deleteBackward()
def keyTab(self):
self["input"].tab()
def keyInsert(self):
self["input"].toggleOverwrite()
class PinInput(InputBox):
def __init__(self, session, service = "", triesEntry = None, pinList = [], popup = False, simple=True, *args, **kwargs):
InputBox.__init__(self, session = session, text = " ", maxSize = True, type = Input.PIN, *args, **kwargs)
self.waitTime = 15
self.triesEntry = triesEntry
self.pinList = pinList
self["service"] = Label(service)
if service and simple:
self.skinName = "PinInputPopup"
if self.getTries() == 0:
if (self.triesEntry.time.value + (self.waitTime * 60)) > time():
remaining = (self.triesEntry.time.value + (self.waitTime * 60)) - time()
remainingMinutes = int(remaining / 60)
remainingSeconds = int(remaining % 60)
messageText = _("You have to wait %s!") % (str(remainingMinutes) + " " + _("minutes") + ", " + str(remainingSeconds) + " " + _("seconds"))
if service and simple:
AddPopup(messageText, type = MessageBox.TYPE_ERROR, timeout = 3)
self.closePinCancel()
else:
self.onFirstExecBegin.append(boundFunction(self.session.openWithCallback, self.closePinCancel, MessageBox, messageText, MessageBox.TYPE_ERROR, timeout = 3))
else:
self.setTries(3)
self["tries"] = Label("")
self.onShown.append(self.showTries)
def gotAsciiCode(self):
if self["input"].currPos == len(self["input"]) - 1:
InputBox.gotAsciiCode(self)
self.go()
else:
InputBox.gotAsciiCode(self)
def keyNumberGlobal(self, number):
if self["input"].currPos == len(self["input"]) - 1:
InputBox.keyNumberGlobal(self, number)
self.go()
else:
InputBox.keyNumberGlobal(self, number)
def checkPin(self, pin):
if pin is not None and " " not in pin and int(pin) in self.pinList:
return True
return False
def go(self):
if self.pinList:
self.triesEntry.time.value = int(time())
self.triesEntry.time.save()
if self.checkPin(self["input"].getText()):
self.setTries(3)
self.closePinCorrect()
else:
self.keyHome()
self.decTries()
if self.getTries() == 0:
self.closePinWrong()
else:
pin = self["input"].getText()
if pin and pin.isdigit():
self.close(int(pin))
else:
self.close(None)
def closePinWrong(self, *args):
print "args:", args
self.close(False)
def closePinCorrect(self, *args):
self.setTries(3)
self.close(True)
def closePinCancel(self, *args):
self.close(None)
def cancel(self):
self.closePinCancel()
def getTries(self):
return self.triesEntry and self.triesEntry.tries.value
def decTries(self):
self.setTries(self.triesEntry.tries.value - 1)
self.showTries()
def setTries(self, tries):
self.triesEntry.tries.value = tries
self.triesEntry.tries.save()
def showTries(self):
self["tries"].setText(self.triesEntry and _("Tries left:") + " " + str(self.getTries() or ""))
def keyRight(self):
pass
|
gpl-2.0
| 7,238,666,065,602,169,000
| 27.576355
| 161
| 0.693156
| false
| 3.085638
| false
| false
| false
|
praekelt/molo
|
molo/core/backends.py
|
1
|
2724
|
from django.contrib.auth.models import Group
from django_cas_ng.backends import CASBackend
from django.contrib.auth import get_user_model
from django.contrib.auth.backends import ModelBackend
from django.core.exceptions import PermissionDenied
from molo.profiles.models import UserProfile
UserModel = get_user_model()
class MoloModelBackend(ModelBackend):
def authenticate(
self, request, username=None, password=None, *args, **kwargs):
if username is None:
username = kwargs.get(UserModel.USERNAME_FIELD)
if request is not None:
try:
user = UserModel._default_manager.get_by_natural_key(username)
if user.is_superuser:
UserProfile.objects.get(user=user)
else:
UserProfile.objects.get(user=user, site=request.site)
except UserProfile.DoesNotExist:
raise PermissionDenied
except UserModel.DoesNotExist:
UserModel().set_password(password)
return super(MoloModelBackend, self).authenticate(
request=request, username=username, password=password, **kwargs)
class MoloCASBackend(CASBackend):
def authenticate(self, request, ticket, service):
user = super(
MoloCASBackend, self).authenticate(request, ticket, service)
if user is None:
return None
if 'attributes' in request.session \
and 'has_perm' in request.session['attributes']\
and request.session['attributes']['has_perm'] == 'True':
if request.session['attributes']['is_admin'] == 'True':
user.email = request.session['attributes']['email']
user.is_staff = True
user.is_superuser = True
user.save()
else:
wagtail_login_only_group = Group.objects.filter(
name='Wagtail Login Only').first()
if wagtail_login_only_group and not user.groups.exists():
user.groups.add(wagtail_login_only_group)
elif not user.profile.admin_sites.filter(
pk=request.site.pk).exists():
return None
"""
TODO: Handle case where Moderator group does not exist.
We need to log this or find ways of notifying users that
the moderator group was removed or renamed.
There isn't much we can do about this case though.
"""
else:
user.is_staff = False
user.is_superuser = False
user.save()
return None
return user
|
bsd-2-clause
| -3,061,280,361,301,114,400
| 36.315068
| 78
| 0.585536
| false
| 4.762238
| false
| false
| false
|
lem9/weblate
|
weblate/gitexport/views.py
|
1
|
5514
|
# -*- coding: utf-8 -*-
#
# Copyright © 2012 - 2017 Michal Čihař <michal@cihar.com>
#
# This file is part of Weblate <https://weblate.org/>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
#
from base64 import b64decode
from email import message_from_string
import os.path
import subprocess
from django.contrib.auth.models import User
from django.core.exceptions import PermissionDenied
from django.http import Http404
from django.http.response import HttpResponseServerError, HttpResponse
from django.shortcuts import redirect
from django.utils.encoding import force_text
from django.views.decorators.csrf import csrf_exempt
from django.views.decorators.cache import never_cache
from weblate.trans.views.helper import get_subproject
from weblate.permissions.helpers import can_access_vcs
GIT_PATHS = [
'/usr/lib/git',
'/usr/lib/git-core',
]
def find_git_http_backend():
"""Find git http backend"""
if hasattr(find_git_http_backend, 'result'):
return find_git_http_backend.result
try:
path = subprocess.check_output(['git', '--exec-path']).decode('utf-8')
if path:
GIT_PATHS.insert(0, path)
except OSError:
pass
for path in GIT_PATHS:
name = os.path.join(path, 'git-http-backend')
if os.path.exists(name):
find_git_http_backend.result = name
return name
def response_authenticate():
"""Return 401 response with authenticate header."""
response = HttpResponse(status=401)
response['WWW-Authenticate'] = 'Basic realm="Weblate Git access"'
return response
def authenticate(request, auth):
"""Perform authentication with HTTP Basic auth"""
auth = force_text(auth, encoding='iso-8859-1')
try:
method, data = auth.split(None, 1)
if method.lower() == 'basic':
username, code = b64decode(data).decode('iso-8859-1').split(':', 1)
try:
user = User.objects.get(
username=username,
auth_token__key=code
)
except User.DoesNotExist:
return False
if not user.is_active:
return False
request.user = user
return True
else:
return False
except (ValueError, TypeError):
return False
@never_cache
@csrf_exempt
def git_export(request, project, subproject, path):
"""Git HTTP server view.
Wrapper around git-http-backend to provide Git repositories export over
HTTP. Performs permission checks and hands over execution to the wrapper.
"""
# Probably browser access
if path == '':
return redirect(
'subproject',
project=project,
subproject=subproject,
permanent=False
)
# HTTP authentication
auth = request.META.get('HTTP_AUTHORIZATION', b'')
if auth and not authenticate(request, auth):
return response_authenticate()
# Permissions
try:
obj = get_subproject(request, project, subproject)
except Http404:
if not request.user.is_authenticated():
return response_authenticate()
raise
if not can_access_vcs(request.user, obj.project):
raise PermissionDenied('No VCS permissions')
return run_git_http(request, obj, path)
def run_git_http(request, obj, path):
"""Git HTTP backend execution wrapper."""
# Find Git HTTP backend
git_http_backend = find_git_http_backend()
if git_http_backend is None:
return HttpResponseServerError('git-http-backend not found')
# Invoke Git HTTP backend
process = subprocess.Popen(
[git_http_backend],
env={
'REQUEST_METHOD': request.method,
'PATH_TRANSLATED': os.path.join(obj.get_path(), path),
'GIT_HTTP_EXPORT_ALL': '1',
'CONTENT_TYPE': request.META.get('CONTENT_TYPE', ''),
'QUERY_STRING': request.META.get('QUERY_STRING', ''),
'HTTP_CONTENT_ENCODING': request.META.get(
'HTTP_CONTENT_ENCODING', ''
),
},
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
)
output, output_err = process.communicate(request.body)
retcode = process.poll()
# Log error
if output_err:
obj.log_error('git: {0}'.format(force_text(output_err)))
# Handle failure
if retcode:
return HttpResponseServerError(output_err)
headers, content = output.split(b'\r\n\r\n', 1)
message = message_from_string(headers.decode('utf-8'))
# Handle status in response
if 'status' in message:
return HttpResponse(
status=int(message['status'].split()[0])
)
# Send content
response = HttpResponse(
content_type=message['content-type']
)
response.write(content)
return response
|
gpl-3.0
| -5,599,721,127,871,912,000
| 29.114754
| 79
| 0.640537
| false
| 4.112687
| false
| false
| false
|
mahandra/recipes_video_conv
|
local_scripts/ipaddreslogfilenginx.py
|
1
|
2172
|
#!/usr/bin/env python
#-*- coding: utf-8 -*-
__author__ = 'mah'
__email__ = 'andrew.makhotin@gmail.com'
import os
import logging.handlers
import re
log = logging.getLogger('Logging for check_sound')
log.setLevel(logging.DEBUG)
formatter = logging.Formatter('%(asctime)s - %(levelname)s - %(message)s')
handler = logging.handlers.RotatingFileHandler(os.path.splitext(os.path.basename(__file__))[0] + '.log',
maxBytes=(1048576 * 5),
backupCount=5)
handler.setFormatter(formatter)
consolehandler = logging.StreamHandler() # for stdout
consolehandler.setFormatter(formatter)
log.addHandler(consolehandler)
log.addHandler(handler)
espsip = [
'75.118.34.94',
'172.12.84.222',
'64.53.234.20',
'73.16.137.91',
'104.51.192.248',
'68.83.215.73',
'73.178.242.57',
'71.202.71.229',
'65.128.78.35',
'73.44.14.111',
'24.10.65.66',
'100.14.48.172',
'73.183.241.96',
'69.115.90.83',
'117.197.129.168',
'47.20.70.84',
'208.59.187.242',
'70.176.209.44',
'76.16.113.22',
'50.47.132.68',
'98.223.121.76',
'35.128.24.125',
'75.118.149.204',
'67.170.175.139',
'162.213.78.32',
'73.27.55.238',
'67.4.213.95',
'108.16.239.210',
'73.110.27.155',
'71.228.23.63',
'47.34.210.9',
'73.211.202.139',
'47.187.106.177',
'50.167.154.182',
'107.3.129.14',
'12.185.249.139',
'24.187.19.54',
'67.184.85.60',
'173.22.125.78',
'63.225.196.19',
'68.82.249.67',
'104.186.108.65',
'98.176.171.206',
'47.198.141.184',
'100.14.88.98',
'108.223.7.64',
'68.173.247.131',
'208.104.48.61',
'131.103.138.15',
'180.188.233.82',
'174.113.130.205',
'76.187.199.85',
]
def main():
with open('nginx_access_pf.log', 'r') as fd:
c = 1
exist =[]
for i in fd:
# print ':', i
for ip in espsip:
m = re.search(ip, i)
if m:
print i
exist.append(ip)
c += 1
# if c == 10:
# break
print 'exist:', exist
with open('existip', 'w') as f:
for i in exist:
f.write(i + '\n')
if __name__ == '__main__':
log.info('Start main')
main()
|
gpl-2.0
| 5,657,711,219,305,236,000
| 19.884615
| 104
| 0.546961
| false
| 2.510983
| false
| false
| false
|
ulikoehler/ODBPy
|
ODBPy/DrillTools.py
|
1
|
2203
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Read the structured text ODB++ drill tools file
"""
import gzip
from collections import namedtuple, defaultdict
import os.path
from enum import Enum
from .Utils import readFileLines
from .StructuredTextParser import read_structured_text
from .Structures import HolePlating
__all__ = ["DrillToolSet", "DrillTool", "DrillToolType", "parse_drill_tools", "read_drill_tools"]
DrillToolSet = namedtuple("DrillToolSet", ["metadata", "tools"])
DrillTool = namedtuple("DrillTool", ["num", "type", "tooltype", "size", "info"]) # size in mil
_drill_plating_map = {
"VIA": HolePlating.Via,
"NON_PLATED": HolePlating.NonPlated,
"PLATED": HolePlating.Plated
}
class DrillToolType(Enum):
"""Drill tool type, i.e the TYPE2 field of the tools file"""
Standard = 1
Photo = 2
Laser = 3
PressFit = 4
_drill_tool_type_map = {
"STANDARD": DrillToolType.Standard,
"PHOTO": DrillToolType.Photo,
"LASER": DrillToolType.Laser,
"PRESS_FIT": DrillToolType.PressFit
}
def structured_array_to_drill_tool(array):
if array.name not in ["TOOL", "TOOLS"]:
raise ValueError("Array {} does not have TOOLS name but {}".format(
array, array.name))
info = {
k: v for k, v in array.attributes.items()
# Remove keys which are used in the tool directly
if k not in ["NUM", "TYPE", "DRILL_SIZE", "TYPE2"]
}
return DrillTool(array.attributes["NUM"],
_drill_plating_map[array.attributes["TYPE"]],
_drill_tool_type_map[array.attributes["TYPE2"]],
array.attributes["DRILL_SIZE"], info)
def parse_drill_tools(structured_text):
"""Parse a DrillToolSet from a StructuredText set"""
metadata, arrays = structured_text
tools = (structured_array_to_drill_tool(array) for array in arrays)
toolmap = {
tool.num: tool for tool in tools
}
return DrillToolSet(metadata, toolmap)
def read_drill_tools(odbpath):
"Read the drill tools from a given ODB++ directory"
stext = read_structured_text(os.path.join(odbpath, "steps/pcb/layers/through_drill/tools"))
return parse_drill_tools(stext)
|
apache-2.0
| -3,222,016,735,727,269,400
| 32.892308
| 97
| 0.660463
| false
| 3.21137
| false
| false
| false
|
bdcht/crysp
|
crysp/mode.py
|
1
|
7354
|
# -*- coding: utf-8 -*-
# This code is part of crysp
# Copyright (C) 2013 Axel Tillequin (bdcht3@gmail.com)
# published under GPLv2 license
from crysp.padding import nopadding,pkcs7
from io import BytesIO
from crysp.bits import pack, unpack, Bits
# -----------------------------------------------------------------------------
# Mode of Operation Core class, default padding is nopadding.
class Mode(object):
def __init__(self,cipher,pad=nopadding):
self._cipher = cipher
self.pad = pad(l=cipher.blocksize)
@property
def len(self):
return self._cipher.blocksize//8
def iterblocks(self,M,**kargs):
for B in self.pad.iterblocks(M,**kargs):
yield B
# mandatory API:
def enc(self,M):
raise NotImplementedError
def dec(self,C):
raise NotImplementedError
# xor input byte strings (over min length):
def xorstr(self,a,b):
a = bytes(a)
b = bytes(b)
return bytes([x^y for (x,y) in zip(a,b)])
# -----------------------------------------------------------------------------
# Electronic Code Book, default padding is pkcs7
class ECB(Mode):
def __init__(self,cipher,pad=pkcs7):
super().__init__(cipher,pad)
# encryption mode
def enc(self,M):
C = []
for b in self.iterblocks(M):
C.append(self._cipher.enc(b))
return b''.join(C)
# decryption mode
def dec(self,C):
n,p = divmod(len(C),self.len)
assert p==0
P = BytesIO(C)
M = []
for b in range(n):
M.append(self._cipher.dec(P.read(self.len)))
return self.pad.remove(b''.join(M))
# -----------------------------------------------------------------------------
# Electronic Code Book with Cypher Text Stealing (nopadding)
class CTS_ECB(Mode):
def __init__(self,cipher,pad=nopadding):
super().__init__(cipher,pad)
# encryption mode
def enc(self,M):
n,p = divmod(len(M),self.len)
C = []
for b in self.iterblocks(M[:n*self.len]):
C.append(self._cipher.enc(b))
if p>0:
clast = C.pop()
b = self.iterblocks(M[n*self.len:])[0]
C.append(self._cipher.enc(b+clast[p:]))
C.append(clast[0:p])
return b''.join(C)
# decryption mode
def dec(self,C):
n,p = divmod(len(C),self.len)
P = BytesIO(C)
M = []
for b in range(n):
M.append(self._cipher.dec(P.read(self.len)))
if p>0:
mlast = M.pop()
M.append(self._cipher.dec(P.read(p)+mast[p:]))
M.append(mlast[:p])
return b''.join(M)
# -----------------------------------------------------------------------------
# Cipher Block Chaining, default padding is pkcs7
class CBC(Mode):
def __init__(self,cipher,IV,pad=pkcs7):
super().__init__(cipher,pad)
assert len(IV)==self.len
self.IV = IV
# encryption mode
def enc(self,M):
C = [self.IV]
for b in self.iterblocks(M):
x = self.xorstr(b,C[-1])
C.append(self._cipher.enc(x))
return b''.join(C)
# decryption mode
def dec(self,C):
l = self.len
n,p = divmod(len(C),l)
assert p==0
M = []
while len(C)>l:
c = C[-l:]
C = C[:-l]
M.insert(0,self.xorstr(C[-l:],self._cipher.dec(c)))
return self.pad.remove(b''.join(M))
# -----------------------------------------------------------------------------
# Cipher Block Chaining with Cipher Text Stealing (nopadding)
class CTS_CBC(Mode):
def __init__(self,cipher,IV,pad=nopadding):
super().__init__(cipher,pad)
assert len(IV)==self.len
self.IV = IV
# encryption mode
def enc(self,M):
n,p = divmod(len(M),self.len)
C = [self.IV]
for b in self.iterblocks(M[:n*self.len]):
x = self.xorstr(b,C[-1])
C.append(self._cipher.enc(x))
if p>0:
clast = C.pop()
b = self.iterblocks(M[n*self.len:]).ljust(self.len,b'\0')
x = self.xorstr(b,clast)
C.append(self._cipher.enc(x))
C.append(clast[:p])
return b''.join(C)
# decryption mode
def dec(self,C):
l = self.len
n,p = divmod(len(C),l)
M = []
if p>0:
clast = C[-p:]
C = C[:-p]
cend = C[-l:]
C = C[:-l]
mend = self._cipher.dec(cend)
mprev = self._cipher.dec(clast+mend[p:])
M.insert(0,self.xorstr(clast,mend[:p]))
M.insert(0,self.xorstr(C[-l:],mprev))
C = self.IV+C
while len(C)>l:
c = C[-l:]
C = C[:-l]
M.insert(0,self.xorstr(C[-l:],self._cipher.dec(c)))
return b''.join(M)
# -----------------------------------------------------------------------------
# Counter mode with provided iterable counter (no padding)
class DefaultCounter:
def __init__(self,bytesize,iv=None):
self.bytesize = bytesize
if iv is not None:
x = bytesize//2
assert len(iv)==bytesize
self.setup(iv[0:x],iv[x:])
def setup(self,nonce=None,count=None):
l = self.bytesize
if nonce is None:
nonce = b'\0'*(l//2)
if count is None:
count = b'\0'*(l//2)
self.nonce = nonce
self.count0 = count
return self
def reset(self):
self.count = Bits(*unpack(self.count0,'>L'))
def __call__(self):
try:
res = pack(self.count,'>L')
self.count += 1
return self.nonce+res
except AttributeError:
print("setup and reset counter is needed")
class CTR(Mode):
def __init__(self,cipher,counter=None):
super().__init__(cipher)
if counter is None:
counter = DefaultCounter(self.len)
elif isinstance(counter,bytes):
counter = DefaultCounter(self.len,counter)
self.counter = counter
# encryption mode
def enc(self,M):
self.counter.reset()
self.pad.reset()
C = []
for b in self.iterblocks(M):
c = self.counter()
k = self._cipher.enc(c)
x = self.xorstr(b,k)
C.append(x)
return b''.join(C)
# decryption mode
def dec(self,C):
self.counter.reset()
self.pad.reset()
P = self.enc(C)
n,p = divmod(len(C),self.len)
if p>0:
assert len(P)==n+1
res = P[:-p]
else:
assert len(P)==n
res = P
return res
# -----------------------------------------------------------------------------
# Chain mode of Operation Core class for Digest algorithms, nopadding default
class Chain(object):
def __init__(self,cipherclass,pad=nopadding):
self._cipherclass = cipherclass
self.pad = pad
def iterblocks(self,M,**kargs):
for b in self.pad.iterblocks(M,**kargs):
yield b
# mandatory API:
def __call__(self,M):
raise NotImplementedError
# xor input byte strings (over min length):
def xorstr(self,a,b):
a = bytes(a)
b = bytes(b)
return bytes([x^y for (x,y) in zip(a,b)])
|
gpl-2.0
| 8,923,609,616,704,921,000
| 29.263374
| 79
| 0.483682
| false
| 3.580331
| false
| false
| false
|
OpenMined/PySyft
|
packages/syft/tests/syft/core/pointer/pointer_test.py
|
1
|
6660
|
# stdlib
from io import StringIO
import sys
from typing import Any
from typing import List
# third party
import pytest
import torch as th
# syft absolute
import syft as sy
from syft.core.node.common.client import AbstractNodeClient
from syft.core.pointer.pointer import Pointer
def validate_output(data: Any, data_ptr: Pointer) -> None:
old_stdout = sys.stdout
sys.stdout = newstdout = StringIO()
data_ptr.print()
sys.stdout = old_stdout
assert newstdout.getvalue().strip("\n") == str(repr(data))
def validate_permission_error(data_ptr: Pointer) -> None:
old_stdout = sys.stdout
sys.stdout = newstdout = StringIO()
data_ptr.print()
sys.stdout = old_stdout
assert newstdout.getvalue().startswith("No permission to print")
@pytest.mark.slow
@pytest.mark.parametrize("with_verify_key", [True, False])
def test_make_pointable(
with_verify_key: bool,
node: sy.VirtualMachine,
client: sy.VirtualMachineClient,
root_client: sy.VirtualMachineClient,
) -> None:
ten = th.tensor([1, 2])
ptr = ten.send(root_client, pointable=False)
assert len(client.store) == 0
if with_verify_key:
ptr.update_searchability(target_verify_key=client.verify_key)
else:
ptr.update_searchability()
assert len(client.store) == 1
@pytest.mark.slow
@pytest.mark.parametrize("with_verify_key", [True, False])
def test_make_unpointable(
with_verify_key: bool,
node: sy.VirtualMachine,
client: sy.VirtualMachineClient,
root_client: sy.VirtualMachineClient,
) -> None:
ten = th.tensor([1, 2])
ptr = ten.send(root_client, pointable=False)
if with_verify_key:
ptr.update_searchability(target_verify_key=client.verify_key)
else:
ptr.update_searchability()
assert len(client.store) == 1
if with_verify_key:
ptr.update_searchability(pointable=False, target_verify_key=client.verify_key)
else:
ptr.update_searchability(pointable=False)
assert len(client.store) == 0
@pytest.mark.slow
def test_pointable_property(
client: sy.VirtualMachineClient, root_client: sy.VirtualMachineClient
) -> None:
ten = th.tensor([1, 2])
ptr = ten.send(root_client, pointable=False)
assert len(client.store) == 0
ptr.pointable = False
assert len(client.store) == 0
ptr.pointable = True
assert len(client.store) == 1
ptr.pointable = True
assert len(client.store) == 1
ptr.pointable = False
assert len(client.store) == 0
@pytest.mark.slow
@pytest.mark.xfail
def test_tags(root_client: sy.VirtualMachineClient) -> None:
ten = th.tensor([1, 2])
ten = ten.tag("tag1", "tag1", "other")
assert ten.tags == ["tag1", "other"]
# .send without `tags` passed in
ptr = ten.send(root_client)
assert ptr.tags == ["tag1", "other"]
# .send with `tags` passed in
ptr = ten.send(root_client, tags=["tag2", "tag2", "other"])
assert ten.tags == ["tag2", "other"]
assert ptr.tags == ["tag2", "other"]
th.Tensor([1, 2, 3]).send(root_client, pointable=True, tags=["a"])
th.Tensor([1, 2, 3]).send(root_client, pointable=True, tags=["b"])
th.Tensor([1, 2, 3]).send(root_client, pointable=True, tags=["c"])
th.Tensor([1, 2, 3]).send(root_client, pointable=True, tags=["d"])
sy.lib.python.Int(2).send(root_client, pointable=True, tags=["e"])
sy.lib.python.List([1, 2, 3]).send(root_client, pointable=True, tags=["f"])
a = root_client.store["a"]
b = root_client.store["b"]
c = root_client.store["c"]
d = root_client.store["d"]
e = root_client.store["e"]
result_ptr = a.requires_grad
assert result_ptr.tags == ["a", "requires_grad"]
result_ptr = b.pow(e)
assert result_ptr.tags == ["b", "e", "pow"]
result_ptr = c.pow(exponent=e)
assert result_ptr.tags == ["c", "e", "pow"]
result_ptr = root_client.torch.pow(d, e)
assert result_ptr.tags == ["d", "e", "pow"]
result_ptr = root_client.torch.pow(d, 3)
assert result_ptr.tags == ["d", "pow"]
# __len__ auto gets if you have permission
f_root = root_client.store["f"]
assert len(f_root) == 3
def test_auto_approve_length_request(client: sy.VirtualMachineClient) -> None:
remote_list = sy.lib.python.List([1, 2, 3]).send(client)
result_len_ptr = remote_list.len()
assert result_len_ptr is not None
assert result_len_ptr.get() == 3
remote_list = client.syft.lib.python.List([1, 2, 3])
result_len_ptr = remote_list.len()
assert result_len_ptr is not None
assert result_len_ptr.get() == 3
def test_description(root_client: sy.VirtualMachineClient) -> None:
ten = th.tensor([1, 2])
ten = ten.describe("description 1")
assert ten.description == "description 1"
# .send without `description` passed in
ptr = ten.send(root_client)
assert ptr.description == "description 1"
# .send with `description` passed in
ptr = ten.send(root_client, description="description 2")
assert ten.description == "description 2"
assert ptr.description == "description 2"
def test_printing(
client: sy.VirtualMachineClient, root_client: sy.VirtualMachineClient
) -> None:
data_types = [
sy.lib.python.Int(1),
sy.lib.python.Float(1.5),
sy.lib.python.Bool(True),
sy.lib.python.List([1, 2, 3]),
sy.lib.python.Tuple((1, 2, 3)),
th.tensor([1, 2, 3]),
]
for data in data_types:
validate_output(data, data.send(root_client))
for data in data_types:
validate_permission_error(data.send(client))
@pytest.mark.slow
def test_printing_remote_creation(
client: sy.VirtualMachineClient, root_client: sy.VirtualMachineClient
) -> None:
def create_data_types(client: AbstractNodeClient) -> List[Pointer]:
return [
client.syft.lib.python.Int(1),
client.syft.lib.python.Float(1.5),
client.syft.lib.python.Bool(True),
client.syft.lib.python.List([1, 2, 3]),
client.syft.lib.python.Tuple((1, 2, 3)),
client.torch.Tensor([1, 2, 3]),
]
for elem in create_data_types(root_client):
out = elem.get(delete_obj=False)
validate_output(out, elem)
for idx, elem in enumerate(create_data_types(client)):
validate_permission_error(elem)
def test_exhausted(root_client: sy.VirtualMachineClient) -> None:
int_ptr = root_client.syft.lib.python.Int(0)
int_ptr.get() # ptr gets exhausted after this call
with pytest.raises(ReferenceError) as e:
int_ptr.get()
assert str(e.value) == "Object has already been deleted. This pointer is exhausted"
|
apache-2.0
| 7,374,268,223,427,589,000
| 27.583691
| 87
| 0.644745
| false
| 3.290514
| true
| false
| false
|
mikeshardmind/SinbadCogs
|
scheduler/converters.py
|
1
|
4607
|
from __future__ import annotations
import argparse
import dataclasses
from datetime import datetime, timedelta, timezone
from typing import NamedTuple, Optional, Tuple
from redbot.core.commands import BadArgument, Context
from .time_utils import parse_time, parse_timedelta
class NonNumeric(NamedTuple):
parsed: str
@classmethod
async def convert(cls, context: Context, argument: str):
if argument.isdigit():
raise BadArgument("Event names must contain at least 1 non-numeric value")
return cls(argument)
class NoExitParser(argparse.ArgumentParser):
def error(self, message):
raise BadArgument()
@dataclasses.dataclass()
class Schedule:
start: datetime
command: str
recur: Optional[timedelta] = None
quiet: bool = False
def to_tuple(self) -> Tuple[str, datetime, Optional[timedelta]]:
return self.command, self.start, self.recur
@classmethod
async def convert(cls, ctx: Context, argument: str):
start: datetime
command: Optional[str] = None
recur: Optional[timedelta] = None
command, *arguments = argument.split(" -- ")
if arguments:
argument = " -- ".join(arguments)
else:
command = None
parser = NoExitParser(description="Scheduler event parsing", add_help=False)
parser.add_argument(
"-q", "--quiet", action="store_true", dest="quiet", default=False
)
parser.add_argument("--every", nargs="*", dest="every", default=[])
if not command:
parser.add_argument("command", nargs="*")
at_or_in = parser.add_mutually_exclusive_group()
at_or_in.add_argument("--start-at", nargs="*", dest="at", default=[])
at_or_in.add_argument("--start-in", nargs="*", dest="in", default=[])
try:
vals = vars(parser.parse_args(argument.split(" ")))
except Exception as exc:
raise BadArgument() from exc
if not (vals["at"] or vals["in"]):
raise BadArgument("You must provide one of `--start-in` or `--start-at`")
if not command and not vals["command"]:
raise BadArgument("You have to provide a command to run")
command = command or " ".join(vals["command"])
for delta in ("in", "every"):
if vals[delta]:
parsed = parse_timedelta(" ".join(vals[delta]))
if not parsed:
raise BadArgument("I couldn't understand that time interval")
if delta == "in":
start = datetime.now(timezone.utc) + parsed
else:
recur = parsed
if recur.total_seconds() < 60:
raise BadArgument(
"You can't schedule something to happen that frequently, "
"I'll get ratelimited."
)
if vals["at"]:
try:
start = parse_time(" ".join(vals["at"]))
except Exception:
raise BadArgument("I couldn't understand that starting time.") from None
return cls(command=command, start=start, recur=recur, quiet=vals["quiet"])
class TempMute(NamedTuple):
reason: Optional[str]
start: datetime
@classmethod
async def convert(cls, ctx: Context, argument: str):
start: datetime
reason: str
parser = NoExitParser(description="Scheduler event parsing", add_help=False)
parser.add_argument("reason", nargs="*")
at_or_in = parser.add_mutually_exclusive_group()
at_or_in.add_argument("--until", nargs="*", dest="until", default=[])
at_or_in.add_argument("--for", nargs="*", dest="for", default=[])
try:
vals = vars(parser.parse_args(argument.split()))
except Exception as exc:
raise BadArgument() from exc
if not (vals["until"] or vals["for"]):
raise BadArgument("You must provide one of `--until` or `--for`")
reason = " ".join(vals["reason"])
if vals["for"]:
parsed = parse_timedelta(" ".join(vals["for"]))
if not parsed:
raise BadArgument("I couldn't understand that time interval")
start = datetime.now(timezone.utc) + parsed
if vals["until"]:
try:
start = parse_time(" ".join(vals["at"]))
except Exception:
raise BadArgument("I couldn't understand that unmute time.") from None
return cls(reason, start)
|
mit
| -8,876,680,537,058,773,000
| 32.384058
| 88
| 0.573692
| false
| 4.442623
| false
| false
| false
|
luoxsbupt/ibus
|
ui/gtk/languagebar.py
|
1
|
8475
|
# vim:set et sts=4 sw=4:
#
# ibus - The Input Bus
#
# Copyright(c) 2007-2008 Huang Peng <shawn.p.huang@gmail.com>
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2 of the License, or(at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the
# Free Software Foundation, Inc., 59 Temple Place, Suite 330,
# Boston, MA 02111-1307 USA
import gtk
import gtk.gdk as gdk
import gobject
import ibus
import icon
from handle import Handle
from menu import menu_position
from engineabout import EngineAbout
from toolitem import ToolButton,\
ToggleToolButton, \
SeparatorToolItem, \
MenuToolButton
from gettext import dgettext
_ = lambda a : dgettext("ibus", a)
N_ = lambda a : a
ICON_SIZE = gtk.ICON_SIZE_MENU
class LanguageBar(gtk.Toolbar):
__gtype_name__ = "IBusLanguagePanel"
__gsignals__ = {
"property-activate" : (
gobject.SIGNAL_RUN_FIRST,
gobject.TYPE_NONE,
(gobject.TYPE_STRING, gobject.TYPE_INT)),
"get-im-menu" : (
gobject.SIGNAL_RUN_LAST,
gobject.TYPE_PYOBJECT,
()),
"show-engine-about" : (
gobject.SIGNAL_RUN_LAST,
gobject.TYPE_PYOBJECT,
()),
}
def __init__ (self):
super(LanguageBar, self).__init__()
self.__show = 1
self.__enabled = False
self.__has_focus = False
self.__show_im_name = False
self.__im_name = None
self.set_style(gtk.TOOLBAR_BOTH_HORIZ)
self.set_show_arrow(False)
self.set_property("icon-size", ICON_SIZE)
self.__create_ui()
self.__properties = []
self.__toplevel = gtk.Window(gtk.WINDOW_POPUP)
self.__toplevel.connect("size-allocate", self.__toplevel_size_allocate_cb)
self.__toplevel.add(self)
root = gdk.get_default_root_window()
try:
self.__work_area = root.property_get("_NET_WORKAREA")[2]
except:
w, h = root.get_size()
self.__work_area = 0, 0, w, h
self.__position = self.__work_area[0] + self.__work_area[2] - 20, self.__work_area[1] + self.__work_area[3] - 20
self.__toplevel.move(*self.__position)
def __create_ui(self):
# create move handle
self.__handle = gtk.ToolItem()
handle = Handle()
self.__handle.add(handle)
self.insert(self.__handle, -1)
handle.connect("move-end", self.__handle_move_end_cb)
# create input methods menu
# prop = ibus.Property(key = "", type = ibus.PROP_TYPE_TOGGLE, icon = "ibus", tooltip = _("Switch input method"))
self.__im_menu = gtk.ToggleToolButton()
self.__im_menu.set_homogeneous(False)
self.__im_menu.connect("toggled", self.__im_menu_toggled_cb)
self.insert(self.__im_menu, -1)
self.__about_button = gtk.ToolButton(gtk.STOCK_ABOUT)
self.__about_button.set_no_show_all(True)
self.__about_button.set_tooltip_text(_("About the Input Method"))
self.__about_button.connect("clicked", self.__about_button_clicked_cb)
self.insert(self.__about_button, -1)
def __im_menu_toggled_cb(self, widget):
if self.__im_menu.get_active():
menu = self.emit("get-im-menu")
menu.connect("deactivate", self.__im_menu_deactivate_cb)
menu.popup(None, None,
menu_position,
0,
gtk.get_current_event_time(),
widget)
def __about_button_clicked_cb(self, widget):
if self.__enabled:
self.emit("show-engine-about")
def __im_menu_deactivate_cb(self, menu):
self.__im_menu.set_active(False)
def __handle_move_end_cb(self, handle):
x, y = self.__toplevel.get_position()
w, h = self.__toplevel.get_size()
self.__position = x + w, y + h
def __toplevel_size_allocate_cb(self, toplevel, allocation):
x, y = self.__position
if x - self.__work_area[0] >= self.__work_area[2] - 80:
self.__toplevel.move(x - allocation.width, y - allocation.height)
def __remove_properties(self):
# reset all properties
map(lambda i: i.destroy(), self.__properties)
self.__properties = []
def __set_opacity(self, opacity):
if self.__toplevel.window == None:
self.__toplevel.realize()
self.__toplevel.window.set_opacity(opacity)
def do_show(self):
gtk.Toolbar.do_show(self)
def do_size_request(self, requisition):
gtk.Toolbar.do_size_request(self, requisition)
self.__toplevel.resize(1, 1)
def set_im_icon(self, icon_name):
widget = icon.IconWidget(icon_name, 18)
self.__im_menu.set_icon_widget(widget)
def set_show_im_name(self, show):
self.__show_im_name = show
self.set_im_name(self.__im_name)
self.__im_menu.set_is_important(show)
def set_im_name(self, text):
self.__im_name = text
if text:
self.__im_menu.set_tooltip_text(text)
self.__im_menu.set_label(text)
else:
self.__im_menu.set_tooltip_text(_("Switch input method"))
self.__im_menu.set_label("")
def reset(self):
self.__remove_properties()
def set_enabled(self, enabled):
self.__enabled = enabled
if self.__enabled:
self.__about_button.show()
self.__set_opacity(1.0)
if self.__has_focus:
if self.__show in (1, 2):
self.show_all()
else:
self.__about_button.hide()
self.__set_opacity(0.5)
if self.__show in (1, 0):
self.hide_all()
def is_enabled(self):
return self.__enabled
def set_show(self, show):
if show not in (0, 1, 2):
show = 1
self.__show = show
if self.__has_focus:
self.focus_in()
else:
self.focus_out()
def get_show(self):
return self.__show
def register_properties(self, props):
self.__remove_properties()
# create new properties
for i, prop in enumerate(props):
if prop.type == ibus.PROP_TYPE_NORMAL:
item = ToolButton(prop = prop)
elif prop.type == ibus.PROP_TYPE_TOGGLE:
item = ToggleToolButton(prop = prop)
elif prop.type == ibus.PROP_TYPE_MENU:
item = MenuToolButton(prop = prop)
elif prop.type == PROP_TYPE_SEPARATOR:
item = SeparatorToolItem()
else:
raise IBusException("Unknown property type = %d" % prop.type)
item.connect("property-activate",
lambda w, n, s: self.emit("property-activate", n, s))
item.set_sensitive(prop.sensitive)
item.set_no_show_all(True)
if prop.visible:
item.show()
else:
item.hide()
self.__properties.append(item)
self.insert(item, i + 2)
def update_property(self, prop):
map(lambda x: x.update_property(prop), self.__properties)
def show_all(self):
self.__toplevel.show_all()
self.__toplevel.window.raise_()
gtk.Toolbar.show_all(self)
def hide_all(self):
x, y = self.__toplevel.get_position()
self.__toplevel.hide_all()
gtk.Toolbar.hide_all(self)
# save bar position
self.__toplevel.move(x, y)
def focus_in(self):
self.__has_focus = True
self.__im_menu.set_sensitive(True)
if self.__enabled:
if self.__show in (1, 2):
self.show_all()
else:
self.hide_all()
def focus_out(self):
self.__has_focus = False
self.__im_menu.set_sensitive(False)
if self.__show in (0, 1):
self.hide_all()
else:
self.show_all()
|
lgpl-2.1
| 1,944,587,740,358,431,500
| 31.224335
| 121
| 0.56649
| false
| 3.712221
| false
| false
| false
|
DaveBuckingham/robosoft
|
record_mode.py
|
1
|
8314
|
"""
Provides functions for
1) recording outputs to file
2) replaying outputs from files
"""
import global_data
import mctransmitter
import datetime
import os
import errno
import time
import threading
import ui_display
playback_file_tag = None
save_filename_prefix = 'botwurst_command_record_'
default_save_directory = 'botwurst_command_recordings'
save_file_extension = '.dat'
# TODO set recording limit]
def make_directory(directory_name):
try:
os.makedirs(directory_name + '/')
except OSError as exception:
if exception.errno != errno.EEXIST:
raise
def set_default_save_directory(directory_name):
record_save_directory = directory_name
# HELPER FUNCTION FOR LOOKING AT GLOBAL VARIABLES
def print_global_record_variables():
print "RECORDING VARIABLE SETTINGS"
print "===================="
print "Recording: ", global_data.record
print "Will store in file numbered: ", global_data.record_file_number, " in directory: ", default_save_directory
print "Initial time: ", global_data.record_start_time
print "Recording array is empty: ", (len(global_data.record_array) == 0)
print "===================="
# RECORDING FUNCTIONS
def initialize_record_mode(file_number):
"""
Sets all the global_data variables to reflect that we are now recording
Creates the specified directory in which the recording file is to be saved, if directory does not exist
:param file_number: Tag for file where recording will be stored
"""
# if record_array is not empty back it up to file
if global_data.record_array:
file_tag = global_data.record_file_number + "_backup"
create_record_file(file_tag)
global_data.record = True
global_data.record_file_number = file_number
global_data.record_start_time = datetime.datetime.now()
# if save_directory already exists as subdirectory, nothing will happen
make_directory(default_save_directory)
def append_instruction(instruction):
"""
Appends the instruction to record array in global data with time step from 0
:param instruction: triple (PIN TYPE, PIN INDEX, VAL)
"""
time_stamp = datetime.datetime.now()
# TODO: look into note about datetime subtraction (is exact but may overflow)
time_diff = time_stamp - global_data.record_start_time
pin_type = instruction[0]
pin_index = instruction[1]
value = instruction[2]
record_instruction = (pin_type, pin_index, value, time_diff.total_seconds())
global_data.record_array.append(record_instruction)
# 2) CREATE A FILE FROM RECORD ARRAY
def create_record_file(file_tag=None, save_directory=None):
"""
Creates a file with the list of instructions in record_array
:param file_tag: defaults to file_number in global data
"""
if file_tag is None:
file_tag = global_data.record_file_number
if save_directory is None:
save_directory = default_save_directory
record_filename = save_directory + '/' + save_filename_prefix + str(file_tag) + save_file_extension
# Create new file, or overwrite file if it exists
with open(record_filename, 'w') as recording_file:
# Copy all commands to the file
for command in global_data.record_array:
recording_file.write(str(command) + '\n')
# Reinitialize all record variables
global_data.record = False
global_data.record_file_number = None
global_data.record_start_time = None
global_data.record_array = []
# 2) PLAYBACK FUNCTIONS
def clear_playback_array():
global_data.playback_array = []
def populate_playback_array_from_file(filename, is_file_tag=False, save_directory=None):
"""
Appends instructions from current file to playback array
:param filename: name of file containing recording information
:param is_file_tag: True if only using number to identify file (default False)
:param save_directory: default directory specified in global data
"""
if save_directory is None:
save_directory = default_save_directory
if is_file_tag:
filename = save_filename_prefix + str(filename)
playback_file = open(save_directory + '/' + str(filename) + save_file_extension, 'r')
playback_file_lines = playback_file.readlines()
for line in playback_file_lines:
global_data.playback_array.append((eval(line.rstrip())))
def playback_instruction(pin_type, pin_index, value):
if pin_type == 'd':
# print "DIGITAL, PIN_INDEX: ", pin_index, "VALUE: ", value
mctransmitter.tx_digital(pin_index, value)
elif pin_type == 'a':
# print "ANALOG, PIN_INDEX: ", pin_index, "VALUE: ", value
mctransmitter.tx_analog(pin_index, value)
class Playback_From_Array(threading.Thread):
def __init__(self, parent, queue):
threading.Thread.__init__(self)
self._queue = queue
self._parent = parent
self.start()
def run(self):
curr_time_stamp = 0
for instruction in self._queue:
while global_data.playback_paused:
if global_data.playback_cancel:
break
time.sleep(.1)
if global_data.playback_cancel:
break
temp_time_stamp = instruction[3]
time_diff = (temp_time_stamp - curr_time_stamp)
time.sleep(time_diff)
playback_instruction(instruction[0], instruction[1], instruction[2])
curr_time_stamp = temp_time_stamp
ui_display.update()
clear_playback_array()
global_data.playback = False
global_data.playback_file_number = None
ui_display.update()
def playback_from_file(filename, is_file_tag=False, save_directory=None):
clear_playback_array()
global_data.playback = True
global_data.playback_file_number = filename
populate_playback_array_from_file(filename, is_file_tag, save_directory)
playback_thread = Playback_From_Array(None, global_data.playback_array)
return playback_thread
# TESTING FUNCTIONS: TO REMOVE
# class Print_Hello_Every_Sec(threading.Thread):
# def __init__(self, parent, queue):
# threading.Thread.__init__(self)
# self._queue = queue
# self._parent = parent
# self.start()
#
# def run(self):
# for i in range(15):
# print "**********HELLO THERE**************"
# time.sleep(1)
#
# class Pause_Unpause(threading.Thread):
# def __init__(self, parent, queue):
# threading.Thread.__init__(self)
# self._queue = queue
# self._parent = parent
# self.start()
#
# def run(self):
# time.sleep(2)
# global_data.playback_paused = True
# print "PAUSING"
# time.sleep(5)
# global_data.playback_cancel = True
# print "CANCELLING"
# time.sleep(5)
# print "UNPAUSING"
# global_data.playback_paused = False
#
#
# def create_dummy_instruction_file(file_tag):
# short_delay = 0.1
# long_delay = 1
#
# initialize_record_mode(file_tag)
# print_global_record_variables()
#
# i = 1
# j = 0
#
# for iterator in range(10):
# i_is_even = (1 == i%2)
#
# digital_instruction = ('d', 0, i_is_even)
# append_instruction(digital_instruction)
#
# time.sleep(short_delay)
#
# digital_instruction = ('d', 1, not i_is_even)
# append_instruction(digital_instruction)
#
# time.sleep(short_delay)
#
# val = abs((j % 510) - 255)
#
# analog_instruction = ('a', 0, val)
# append_instruction(analog_instruction)
#
# time.sleep(short_delay)
#
# analog_instruction = ('a', 1, 255 - val)
# append_instruction(analog_instruction)
#
# time.sleep(long_delay)
#
# i = i + 1
# j = j + 20
#
# create_record_file()
#
# def main():
# test_file_tag = 5
# # create_dummy_instruction_file(test_file_tag)
#
# pause_thread = Pause_Unpause(None, None)
# playback_thread = playback_from_file(test_file_tag, True)
# print_hello_thread = Print_Hello_Every_Sec(None, None)
#
# print_hello_thread.join()
# playback_thread.join()
# pause_thread.join()
#
# print_global_record_variables()
#
#
# main()
|
mit
| 2,830,740,662,388,459,500
| 28.799283
| 116
| 0.636998
| false
| 3.540886
| false
| false
| false
|
Kumapapa2012/Learning-Machine-Learning
|
Reversi/agent_lrelu_0_1.py
|
1
|
15736
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import print_function
import argparse
import copy
import numpy as np
np.random.seed(0)
import chainer
import chainer.functions as F
import chainer.links as L
from chainer import cuda
from chainer import optimizers
from rlglue.agent.Agent import Agent
from rlglue.agent import AgentLoader as AgentLoader
from rlglue.types import Action
from rlglue.types import Observation
from rlglue.utils import TaskSpecVRLGLUE3
# QNet
# ニューラルネットワークのクラス
class QNet(chainer.Chain):
# __init__( n_in, n_units, n_out)
# n_in: 入力層サイズ
# n_units: 中間層サイズ
# n_out: 出力層サイズ
def __init__(self, n_in, n_units, n_out):
super(QNet, self).__init__(
l1=L.Linear(n_in, n_units),
l20=L.Linear(n_units, n_units),
l21=L.Linear(n_units, n_units),
l22=L.Linear(n_units, n_units),
l23=L.Linear(n_units, n_units),
l24=L.Linear(n_units, n_units),
l25=L.Linear(n_units, n_units),
l26=L.Linear(n_units, n_units),
l27=L.Linear(n_units, n_units),
l3=L.Linear(n_units, n_out),
)
#value(x)
# x: 入力層の値
#ニューラルネットワークによる計算
#Return: 出力層の結果
def value(self, x):
h = F.leaky_relu(self.l1(x),slope=0.1) #slope=0.2(default)
h = F.leaky_relu(self.l20(h),slope=0.1)
h = F.leaky_relu(self.l21(h),slope=0.1)
h = F.leaky_relu(self.l22(h),slope=0.1)
h = F.leaky_relu(self.l23(h),slope=0.1)
h = F.leaky_relu(self.l24(h),slope=0.1)
h = F.leaky_relu(self.l25(h),slope=0.1)
h = F.leaky_relu(self.l26(h),slope=0.1)
h = F.leaky_relu(self.l27(h),slope=0.1)
return self.l3(h)
#__call__(s_data, a_data, y_data)
# s_data: 状態
# a_data: アクション
# y_data: 教師データ(次の行動の最大Q値)
#学習用コールバック。
#1. s_data を Forward 伝播する(Q,Q_Data)
#2. t_data に Q_Dataをコピー
#3.t_data の a_data[i] の値を y_data[i]の Q 値で置き換え教師データ作成(t)
#4. Q,t の二乗誤差を算出
#
#Return: 二乗誤差計算結果
def __call__(self, s_data, a_data, y_data):
self.loss = None
s = chainer.Variable(self.xp.asarray(s_data))
Q = self.value(s)
Q_data = copy.deepcopy(Q.data)
if type(Q_data).__module__ != np.__name__:
Q_data = self.xp.asnumpy(Q_data)
t_data = copy.deepcopy(Q_data)
for i in range(len(y_data)):
t_data[i, a_data[i]] = y_data[i]
t = chainer.Variable(self.xp.asarray(t_data))
self.loss = F.mean_squared_error(Q, t)
print('Loss:', self.loss.data)
return self.loss
# エージェントクラス
class KmoriReversiAgent(Agent):
#__init__(gpu, size)
# gpu: GPU 番号(0以上、CPU 使用の場合 -1)
# size: 正方形ボードの 1 辺の長さ(6 以上の偶数)
# エージェントの初期化、学習の内容を定義する
def __init__(self, gpu, size):
# サイズは 6 以上の偶数で。
if size<6 and size%2 != 0 : print("size must be even number and 6 or above!") ; exit()
# 盤の情報(オセロは8)
self.n_rows = int(size)
self.n_cols = self.n_rows
# 学習のInputサイズ
self.dim = self.n_rows * self.n_cols # ボードサイズ=出力層のサイズ
self.bdim = self.dim * 4 # 学習用データのサイズ
self.gpu = gpu
# 学習を開始させるステップ数
self.learn_start = 5 * 10**3
# 保持するデータ数(changed)
self.capacity = 2 * 10**4
# eps = ランダムに○を決定する確率
self.eps_start = 1.0
self.eps_end = 0.001
self.eps = self.eps_start
# 学習時にさかのぼるAction数
self.n_frames = 9
# 一度の学習で使用するデータサイズ
self.batch_size = 128
self.replay_mem = []
self.last_state = None
self.last_action = None
self.reward = None
self.state = np.zeros((1, self.n_frames, self.bdim)).astype(np.float32)
self.step_counter = 0
self.update_freq = 1 * 10**4
self.r_win = 1.0
self.r_draw = -0.5
self.r_lose = -1.0
self.frozen = False
self.win_or_draw = 0
self.stop_learning = 200
#agent_init(task_spec_str)
# task_spec_str: RL_Glue から渡されるタスク情報
# ゲーム情報の初期化
def agent_init(self, task_spec_str):
task_spec = TaskSpecVRLGLUE3.TaskSpecParser(task_spec_str)
if not task_spec.valid:
raise ValueError(
'Task spec could not be parsed: {}'.format(task_spec_str))
self.gamma = task_spec.getDiscountFactor() # 割引率
# DQN 作成
# Arg1: 入力層サイズ
# Arg2: 隠れ層ノード数
# Arg3: 出力層サイズ
self.Q = QNet(self.bdim*self.n_frames, self.bdim*self.n_frames, self.dim)
if self.gpu >= 0:
cuda.get_device(self.gpu).use()
self.Q.to_gpu()
self.xp = np if self.gpu < 0 else cuda.cupy
self.targetQ = copy.deepcopy(self.Q)
self.optimizer = optimizers.RMSpropGraves(lr=0.00025, alpha=0.95,
momentum=0.0)
self.optimizer.setup(self.Q)
#agent_start(observation)
# observation: ゲーム状態(ボード状態など)
#environment.py の env_startの次に呼び出される。
#1手目 Action を決定し、実行する。
#実行した Action をエージェントへの情報として RL_Glue に渡す。
def agent_start(self, observation):
# stepを1増やす
self.step_counter += 1
# kmori: 独自のobservationを使用して、状態をアップデート。
# 一部サンプルに合わせ、残りは別の方法で作成した。
self.update_state(observation)
self.update_targetQ()
# 自分が打つ手を決定する。
int_action = self.select_int_action()
action = Action()
action.intArray = [int_action]
# eps を更新する。epsはランダムに○を打つ確率
self.update_eps()
# state = 盤の状態 と action = ○を打つ場所 を退避する
self.last_state = copy.deepcopy(self.state)
self.last_action = copy.deepcopy(int_action)
return action
#agent_step(reward, observation)
# reward: 報酬
# observation: ゲーム状態(ボード状態など)
#エージェントの二手目以降、ゲームが終わるまで呼ばれる。
#(Reversi の場合、報酬は常にゼロとなる)
def agent_step(self, reward, observation):
# ステップを1増加
self.step_counter += 1
self.update_state(observation)
self.update_targetQ()
# 自分が打つ手を決定する。
int_action = self.select_int_action() # 戻り値が -1 ならパス。
action = Action()
action.intArray = [int_action]
self.reward = reward
# epsを更新
self.update_eps()
# データを保存 (状態、アクション、報酬、結果)
self.store_transition(terminal=False)
if not self.frozen:
# 学習実行
if self.step_counter > self.learn_start:
self.replay_experience()
self.last_state = copy.deepcopy(self.state)
self.last_action = copy.deepcopy(int_action)
# ○の位置をエージェントへ渡す
return action
#agent_end(reward)
# reward: 報酬
# ゲームが終了した時点で呼ばれる
def agent_end(self, reward):
# 環境から受け取った報酬
self.reward = reward
if not self.frozen:
if self.reward >= self.r_draw:
self.win_or_draw += 1
else:
self.win_or_draw = 0
if self.win_or_draw == self.stop_learning:
self.frozen = True
f = open('result.txt', 'a')
f.writelines('Agent frozen\n')
f.close()
# データを保存 (状態、アクション、報酬、結果)
self.store_transition(terminal=True)
if not self.frozen:
# 学習実行
if self.step_counter > self.learn_start:
self.replay_experience()
def agent_cleanup(self):
# (今後実装)
# RL_Cleanup により呼ばれるはず。
# ここでモデルをセーブすればきっといい。
pass
def agent_message(self, message):
pass
#update_state(observation=None)
# observation: ゲーム状態(ボード状態など)
#ゲーム状態を state に格納する。
def update_state(self, observation=None):
# 学習用の状態保存。
if observation is None:
frame = np.zeros(1, 1, self.bdim).astype(np.float32)
else:
# observation の内容から、学習用データを作成。
observation_binArray=[]
pageSize=self.n_rows*self.n_cols
# コマの位置
for i in range(0,pageSize):
observation_binArray.append(int(observation.intArray[i]))
observation_binArray.append(int(observation.intArray[pageSize+i]))
# コマを置ける場所
for i in range(0,pageSize):
observation_binArray.append(int(observation.intArray[2*pageSize+i]))
observation_binArray.append(int(observation.intArray[3*pageSize+i]))
frame = (np.asarray(observation_binArray).astype(np.float32)
.reshape(1, 1, -1))
self.state = np.hstack((self.state[:, 1:], frame))
#update_eps()
#ゲームの手数合計に基づき、ε-Greedy 法の ε を更新。
def update_eps(self):
if self.step_counter > self.learn_start:
if len(self.replay_mem) < self.capacity:
self.eps -= ((self.eps_start - self.eps_end) /
(self.capacity - self.learn_start + 1))
#update_targetQ()
#update_freq 毎に、現時点の Q 値を、targetQ(Q 値推測用 Network) にコピー。
def update_targetQ(self):
if self.step_counter % self.update_freq == 0:
self.targetQ = copy.deepcopy(self.Q)
#select_int_action()
#現在のボード状態から、DQN を用いて打つ手を決める。
#コマを置く場所を決める。
def select_int_action(self):
bits = self.state[0, -1] # ここでは stateの最後の要素つまり現時点の情報を得ている。
# ここでは、空きマスを取得している。
# このオセロでは、コマを置ける場所は Observation に含まれるのでそれを使用する。
free=[]
freeInBoard=bits[(2*self.n_rows*self.n_cols):]
for i in range(0, len(freeInBoard), 2) :
if int(freeInBoard[i]) == 1 :
free.append(i//2)
# 置ける場所がなければここでパス
if len(free)==0:
# pass...
return -1
# Q値を求める
s = chainer.Variable(self.xp.asarray(self.state))
Q = self.Q.value(s)
# Follow the epsilon greedy strategy
if np.random.rand() < self.eps:
int_action = free[np.random.randint(len(free))]
else:
# 先頭のQ値
Qdata = Q.data[0]
if type(Qdata).__module__ != np.__name__:
Qdata = self.xp.asnumpy(Qdata)
# アクションを決定します。
# 石を置けるマスの中から、Q値の最も高いものを行動として選択しています。
for i in np.argsort(-Qdata):
if i in free:
int_action = i
break
return int_action
def store_transition(self, terminal=False):
if len(self.replay_mem) < self.capacity:
self.replay_mem.append(
(self.last_state, self.last_action, self.reward,
self.state, terminal))
else:
# self.replay_mem[1:] で先頭つまり最古の要素を除く配列に、新しいものを追加。
# これにより FIFO でリストが回転する。
self.replay_mem = (self.replay_mem[1:] +
[(self.last_state, self.last_action, self.reward,
self.state, terminal)])
def replay_experience(self):
# replay_memory から バッチサイズ分の要素をランダムに取得する。
indices = np.random.randint(0, len(self.replay_mem), self.batch_size)
samples = np.asarray(self.replay_mem)[indices]
s, a, r, s2, t = [], [], [], [], []
for sample in samples:
s.append(sample[0])
a.append(sample[1])
r.append(sample[2])
s2.append(sample[3])
t.append(sample[4])
s = np.asarray(s).astype(np.float32)
a = np.asarray(a).astype(np.int32)
r = np.asarray(r).astype(np.float32)
s2 = np.asarray(s2).astype(np.float32)
t = np.asarray(t).astype(np.float32)
#Q 値推測用ネットワーク targetQ を取得し、s2の Q 値を求める
s2 = chainer.Variable(self.xp.asarray(s2))
Q = self.targetQ.value(s2)
Q_data = Q.data
if type(Q_data).__module__ == np.__name__:
max_Q_data = np.max(Q_data, axis=1)
else:
max_Q_data = np.max(self.xp.asnumpy(Q_data).astype(np.float32), axis=1)
#targetQで推測した Q 値を使用して 教師データ t 作成
t = np.sign(r) + (1 - t)*self.gamma*max_Q_data
self.optimizer.update(self.Q, s, a, t)
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Deep Q-Learning')
parser.add_argument('--gpu', '-g', default=-1, type=int,
help='GPU ID (negative value indicates CPU)')
parser.add_argument('--size', '-s', default=6, type=int,
help='Reversi board size')
args = parser.parse_args()
AgentLoader.loadAgent(KmoriReversiAgent(args.gpu,args.size))
|
mit
| 504,017,417,974,920,600
| 30.820388
| 94
| 0.513312
| false
| 2.568281
| false
| false
| false
|
openforis/sepal
|
modules/google-earth-engine/docker/src/sepalinternal/mosaic/cloud_score.py
|
1
|
1357
|
import ee
# Based on scripts by Ian Hausman, which in turn is based on script by Matt Hancher
# https://groups.google.com/d/msg/google-earth-engine-developers/i63DS-Dg8Sg/_hgCBEYeBwAJ
def cloud_score(image):
def rescale(image, exp, thresholds):
return image.expression(exp, {'i': image}) \
.subtract(thresholds[0]).divide(thresholds[1] - thresholds[0])
# Compute several indicators of cloudyness and take the minimum of them.
score = ee.Image(1)
blueCirrusScore = ee.Image(0)
# Clouds are reasonably bright in the blue or cirrus bands.
# Use .max as a pseudo OR conditional
blueCirrusScore = blueCirrusScore.max(rescale(image, 'i.blue', [0.1, 0.5]))
blueCirrusScore = blueCirrusScore.max(rescale(image, 'i.aerosol', [0.1, 0.5]))
blueCirrusScore = blueCirrusScore.max(rescale(image, 'i.cirrus', [0.1, 0.3]))
score = score.min(blueCirrusScore)
# Clouds are reasonably bright in all visible bands.
score = score.min(rescale(image, 'i.red + i.green + i.blue', [0.2, 0.8]))
# Clouds are reasonably bright in all infrared bands.
score = score.min(
rescale(image, 'i.nir + i.swir1 + i.swir2', [0.3, 0.8]))
# However, clouds are not snow.
ndsi = image.normalizedDifference(['green', 'swir1'])
score = score.min(rescale(ndsi, 'i', [0.8, 0.6]))
return score
|
mit
| 2,958,949,883,621,575,000
| 38.911765
| 89
| 0.66986
| false
| 2.89339
| false
| false
| false
|
pyrocko/pyrocko
|
src/model/event.py
|
1
|
14506
|
# http://pyrocko.org - GPLv3
#
# The Pyrocko Developers, 21st Century
# ---|P------/S----------~Lg----------
from __future__ import absolute_import, division
import logging
import numpy as num
import hashlib
import base64
from pyrocko import util, moment_tensor
from pyrocko.guts import Float, String, Timestamp, Unicode, \
StringPattern, List, Dict, Any
from .location import Location
logger = logging.getLogger('pyrocko.model.event')
guts_prefix = 'pf'
d2r = num.pi / 180.
def cmp(a, b):
return (a > b) - (a < b)
def ehash(s):
return str(base64.urlsafe_b64encode(
hashlib.sha1(s.encode('utf8')).digest()).decode('ascii'))
def float_or_none_to_str(x, prec=9):
return 'None' if x is None else '{:.{prec}e}'.format(x, prec=prec)
class FileParseError(Exception):
pass
class EventExtrasDumpError(Exception):
pass
class EOF(Exception):
pass
class EmptyEvent(Exception):
pass
class Tag(StringPattern):
pattern = r'^[A-Za-z][A-Za-z0-9._]{0,128}(:[A-Za-z0-9._-]*)?$'
class Event(Location):
'''Seismic event representation
:param lat: latitude of hypocenter (default 0.0)
:param lon: longitude of hypocenter (default 0.0)
:param time: origin time system timestamp
:param name: event identifier as string (optional)
:param depth: source depth (optional)
:param magnitude: magnitude of event (optional)
:param region: source region (optional)
:param catalog: name of catalog that lists this event (optional)
:param moment_tensor: moment tensor as
:py:class:`moment_tensor.MomentTensor` instance (optional)
:param duration: source duration as float (optional)
:param tags: list of tags describing event (optional)
:param extras: dictionary for user defined event attributes (optional).
Keys must be strings, values must be YAML serializable.
'''
time = Timestamp.T(default=Timestamp.D('1970-01-01 00:00:00'))
depth = Float.T(optional=True)
name = String.T(default='', optional=True, yamlstyle="'")
magnitude = Float.T(optional=True)
magnitude_type = String.T(optional=True, yamlstyle="'")
region = Unicode.T(optional=True, yamlstyle="'")
catalog = String.T(optional=True, yamlstyle="'")
moment_tensor = moment_tensor.MomentTensor.T(optional=True)
duration = Float.T(optional=True)
tags = List.T(Tag.T(), default=[])
extras = Dict.T(String.T(), Any.T(), default={})
def __init__(
self, lat=0., lon=0., north_shift=0., east_shift=0., time=0.,
name='', depth=None, elevation=None,
magnitude=None, magnitude_type=None, region=None, load=None,
loadf=None, catalog=None, moment_tensor=None, duration=None,
tags=None, extras=None):
if tags is None:
tags = []
if extras is None:
extras = {}
vals = None
if load is not None:
vals = Event.oldload(load)
elif loadf is not None:
vals = Event.oldloadf(loadf)
if vals:
lat, lon, north_shift, east_shift, time, name, depth, magnitude, \
magnitude_type, region, catalog, moment_tensor, duration, \
tags = vals
Location.__init__(
self, lat=lat, lon=lon,
north_shift=north_shift, east_shift=east_shift,
time=time, name=name, depth=depth,
elevation=elevation,
magnitude=magnitude, magnitude_type=magnitude_type,
region=region, catalog=catalog,
moment_tensor=moment_tensor, duration=duration, tags=tags,
extras=extras)
def time_as_string(self):
return util.time_to_str(self.time)
def set_name(self, name):
self.name = name
def olddump(self, filename):
file = open(filename, 'w')
self.olddumpf(file)
file.close()
def olddumpf(self, file):
if self.extras:
raise EventExtrasDumpError(
'Event user-defined extras attributes cannot be dumped in the '
'"basic" event file format. Use '
'dump_events(..., format="yaml").')
file.write('name = %s\n' % self.name)
file.write('time = %s\n' % util.time_to_str(self.time))
if self.lat != 0.0:
file.write('latitude = %.12g\n' % self.lat)
if self.lon != 0.0:
file.write('longitude = %.12g\n' % self.lon)
if self.north_shift != 0.0:
file.write('north_shift = %.12g\n' % self.north_shift)
if self.east_shift != 0.0:
file.write('east_shift = %.12g\n' % self.east_shift)
if self.magnitude is not None:
file.write('magnitude = %g\n' % self.magnitude)
file.write('moment = %g\n' %
moment_tensor.magnitude_to_moment(self.magnitude))
if self.magnitude_type is not None:
file.write('magnitude_type = %s\n' % self.magnitude_type)
if self.depth is not None:
file.write('depth = %.10g\n' % self.depth)
if self.region is not None:
file.write('region = %s\n' % self.region)
if self.catalog is not None:
file.write('catalog = %s\n' % self.catalog)
if self.moment_tensor is not None:
m = self.moment_tensor.m()
sdr1, sdr2 = self.moment_tensor.both_strike_dip_rake()
file.write((
'mnn = %g\nmee = %g\nmdd = %g\nmne = %g\nmnd = %g\nmed = %g\n'
'strike1 = %g\ndip1 = %g\nrake1 = %g\n'
'strike2 = %g\ndip2 = %g\nrake2 = %g\n') % (
(m[0, 0], m[1, 1], m[2, 2], m[0, 1], m[0, 2], m[1, 2]) +
sdr1 + sdr2))
if self.duration is not None:
file.write('duration = %g\n' % self.duration)
if self.tags:
file.write('tags = %s\n' % ', '.join(self.tags))
@staticmethod
def unique(events, deltat=10., group_cmp=(lambda a, b:
cmp(a.catalog, b.catalog))):
groups = Event.grouped(events, deltat)
events = []
for group in groups:
if group:
group.sort(group_cmp)
events.append(group[-1])
return events
@staticmethod
def grouped(events, deltat=10.):
events = list(events)
groups = []
for ia, a in enumerate(events):
groups.append([])
haveit = False
for ib, b in enumerate(events[:ia]):
if abs(b.time - a.time) < deltat:
groups[ib].append(a)
haveit = True
break
if not haveit:
groups[ia].append(a)
groups = [g for g in groups if g]
groups.sort(key=lambda g: sum(e.time for e in g) // len(g))
return groups
@staticmethod
def dump_catalog(events, filename=None, stream=None):
if filename is not None:
file = open(filename, 'w')
else:
file = stream
try:
i = 0
for ev in events:
ev.olddumpf(file)
file.write('--------------------------------------------\n')
i += 1
finally:
if filename is not None:
file.close()
@staticmethod
def oldload(filename):
with open(filename, 'r') as file:
return Event.oldloadf(file)
@staticmethod
def oldloadf(file):
d = {}
try:
for line in file:
if line.lstrip().startswith('#'):
continue
toks = line.split(' = ', 1)
if len(toks) == 2:
k, v = toks[0].strip(), toks[1].strip()
if k in ('name', 'region', 'catalog', 'magnitude_type'):
d[k] = v
if k in (('latitude longitude magnitude depth duration '
'north_shift east_shift '
'mnn mee mdd mne mnd med strike1 dip1 rake1 '
'strike2 dip2 rake2 duration').split()):
d[k] = float(v)
if k == 'time':
d[k] = util.str_to_time(v)
if k == 'tags':
d[k] = [x.strip() for x in v.split(',')]
if line.startswith('---'):
d['have_separator'] = True
break
except Exception as e:
raise FileParseError(e)
if not d:
raise EOF()
if 'have_separator' in d and len(d) == 1:
raise EmptyEvent()
mt = None
m6 = [d[x] for x in 'mnn mee mdd mne mnd med'.split() if x in d]
if len(m6) == 6:
mt = moment_tensor.MomentTensor(m=moment_tensor.symmat6(*m6))
else:
sdr = [d[x] for x in 'strike1 dip1 rake1'.split() if x in d]
if len(sdr) == 3:
moment = 1.0
if 'moment' in d:
moment = d['moment']
elif 'magnitude' in d:
moment = moment_tensor.magnitude_to_moment(d['magnitude'])
mt = moment_tensor.MomentTensor(
strike=sdr[0], dip=sdr[1], rake=sdr[2],
scalar_moment=moment)
return (
d.get('latitude', 0.0),
d.get('longitude', 0.0),
d.get('north_shift', 0.0),
d.get('east_shift', 0.0),
d.get('time', 0.0),
d.get('name', ''),
d.get('depth', None),
d.get('magnitude', None),
d.get('magnitude_type', None),
d.get('region', None),
d.get('catalog', None),
mt,
d.get('duration', None),
d.get('tags', []))
@staticmethod
def load_catalog(filename):
file = open(filename, 'r')
try:
while True:
try:
ev = Event(loadf=file)
yield ev
except EmptyEvent:
pass
except EOF:
pass
file.close()
def get_hash(self):
e = self
if isinstance(e.time, float):
stime = util.time_to_str(e.time, format='%Y-%m-%d %H:%M:%S.3FRAC')
else:
stime = util.time_to_str(e.time, format='%Y-%m-%d %H:%M:%S.6FRAC')
s = float_or_none_to_str
to_hash = ', '.join((
stime,
s(e.lat), s(e.lon), s(e.depth),
float_or_none_to_str(e.magnitude, 5),
str(e.catalog), str(e.name or ''),
str(e.region)))
return ehash(to_hash)
def human_str(self):
s = [
'Latitude [deg]: %g' % self.lat,
'Longitude [deg]: %g' % self.lon,
'Time [UTC]: %s' % util.time_to_str(self.time)]
if self.name:
s.append('Name: %s' % self.name)
if self.depth is not None:
s.append('Depth [km]: %g' % (self.depth / 1000.))
if self.magnitude is not None:
s.append('Magnitude [%s]: %3.1f' % (
self.magnitude_type or 'M?', self.magnitude))
if self.region:
s.append('Region: %s' % self.region)
if self.catalog:
s.append('Catalog: %s' % self.catalog)
if self.moment_tensor:
s.append(str(self.moment_tensor))
return '\n'.join(s)
def detect_format(filename):
with open(filename, 'r') as f:
for line in f:
line = line.strip()
if not line or line.startswith('#') or line.startswith('%'):
continue
if line.startswith('--- !pf.Event'):
return 'yaml'
else:
return 'basic'
return 'basic'
def load_events(filename, format='detect'):
'''Read events file.
:param filename: name of file as str
:param format: file format: ``'detect'``, ``'basic'``, or ``'yaml'``
:returns: list of :py:class:`Event` objects
'''
if format == 'detect':
format = detect_format(filename)
if format == 'yaml':
from pyrocko import guts
events = [
ev for ev in guts.load_all(filename=filename)
if isinstance(ev, Event)]
return events
elif format == 'basic':
return list(Event.load_catalog(filename))
else:
from pyrocko.io.io_common import FileLoadError
raise FileLoadError('unknown event file format: %s' % format)
class OneEventRequired(Exception):
pass
def load_one_event(filename, format='detect'):
events = load_events(filename)
if len(events) != 1:
raise OneEventRequired(
'exactly one event is required in "%s"' % filename)
return events[0]
def dump_events(events, filename=None, stream=None, format='basic'):
'''Write events file.
:param events: list of :py:class:`Event` objects
:param filename: name of file as str
:param format: file format: ``'basic'``, or ``'yaml'``
'''
if format == 'basic':
Event.dump_catalog(events, filename=filename, stream=stream)
elif format == 'yaml':
from pyrocko import guts
events = [ev for ev in events if isinstance(ev, Event)]
guts.dump_all(object=events, filename=filename, stream=None)
else:
from pyrocko.io.io_common import FileSaveError
raise FileSaveError('unknown event file format: %s' % format)
def load_kps_event_list(filename):
elist = []
f = open(filename, 'r')
for line in f:
toks = line.split()
if len(toks) < 7:
continue
tim = util.to_time_float(util.ctimegm(toks[0]+' '+toks[1]))
lat, lon, depth, magnitude = [float(x) for x in toks[2:6]]
duration = float(toks[10])
region = toks[-1]
name = util.gmctime_fn(tim)
e = Event(
lat, lon, tim,
name=name,
depth=depth,
magnitude=magnitude,
duration=duration,
region=region)
elist.append(e)
f.close()
return elist
def load_gfz_event_list(filename):
from pyrocko import catalog
cat = catalog.Geofon()
elist = []
f = open(filename, 'r')
for line in f:
e = cat.get_event(line.strip())
elist.append(e)
f.close()
return elist
|
gpl-3.0
| 7,530,054,320,324,479,000
| 29.033126
| 79
| 0.52268
| false
| 3.679858
| false
| false
| false
|
mikebryant/rapid-router
|
game/views/api.py
|
1
|
9289
|
# -*- coding: utf-8 -*-
# Code for Life
#
# Copyright (C) 2015, Ocado Innovation Limited
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# ADDITIONAL TERMS – Section 7 GNU General Public Licence
#
# This licence does not grant any right, title or interest in any “Ocado” logos,
# trade names or the trademark “Ocado” or any other trademarks or domain names
# owned by Ocado Innovation Limited or the Ocado group of companies or any other
# distinctive brand features of “Ocado” as may be secured from time to time. You
# must not distribute any modification of this program using the trademark
# “Ocado” or claim any affiliation or association with Ocado or its employees.
#
# You are not authorised to use the name Ocado (or any of its trade names) or
# the names of any author or contributor in advertising or for publicity purposes
# pertaining to the distribution of this program, without the prior written
# authorisation of Ocado.
#
# Any propagation, distribution or conveyance of this program must include this
# copyright notice and these terms. You must not misrepresent the origins of this
# program; modified versions of the program must be marked as such and not
# identified as the original program.
from django.http import HttpResponse
from game.models import Level, Episode, LevelBlock, Block, Character, LevelDecor
from game.serializers import LevelListSerializer, EpisodeListSerializer, LevelDetailSerializer, EpisodeDetailSerializer, \
LevelBlockSerializer, BlockSerializer, CharacterSerializer, LevelMapDetailSerializer, \
LevelDecorSerializer, LevelModeSerializer, LevelMapListSerializer
from rest_framework.decorators import api_view
from rest_framework.response import Response
from rest_framework.reverse import reverse
from rest_framework import viewsets
from game.decor import get_all_decor, get_decor_element_by_pk, get_decors_url
from game.theme import get_all_themes, get_theme_by_pk, get_themes_url
@api_view(('GET',))
def api_root(request, format=None):
return Response({
'blocks': reverse('block-list', request=request, format=format),
'characters': reverse('character-list', request=request, format=format),
'decors': reverse('decor-list', request=request, format=format),
'episodes': reverse('episode-list', request=request, format=format),
'levels': reverse('level-list', request=request, format=format),
'maps': reverse('map-list', request=request, format=format),
'themes': reverse('theme-list', request=request, format=format),
})
@api_view(('GET',))
def decor_list(request, format=None):
decors = get_all_decor()
data = [{get_decors_url(i.pk, request)} for i in decors]
return Response(data)
@api_view(('GET',))
def decor_detail(request, pk, format=None):
try:
decor = get_decor_element_by_pk(pk=pk)
except KeyError:
return HttpResponse(status=404)
data = decor.__dict__.copy()
data['theme'] = get_themes_url(data['theme'].pk, request)
return Response(data)
@api_view(('GET',))
def level_list(request, format=None):
levels = Level.objects.sorted_levels()
serializer = LevelListSerializer(levels, many=True, context={'request': request})
return Response(serializer.data)
# pk is the episode id
@api_view(('GET',))
def level_for_episode(request, pk, format=None):
levels = Level.objects.filter(episode__id=pk)
serializer = LevelListSerializer(levels, many=True, context={'request': request})
return Response(serializer.data)
@api_view(('GET',))
def level_detail(request, pk, format=None):
try:
level = Level.objects.get(pk=pk)
except Level.DoesNotExist:
return HttpResponse(status=404)
serializer = LevelDetailSerializer(level, context={'request': request})
return Response(serializer.data)
@api_view(('GET',))
def map_list(request, format=None):
levels = Level.objects.sorted_levels()
serializer = LevelMapListSerializer(levels, many=True, context={'request': request})
return Response(serializer.data)
@api_view(('GET',))
def map_for_level(request, pk, format=None):
try:
level = Level.objects.get(pk=pk)
except Level.DoesNotExist:
return HttpResponse(status=404)
serializer = LevelMapDetailSerializer(level, context={'request': request})
return Response(serializer.data)
@api_view(('GET',))
def mode_for_level(request, pk, format=None):
try:
level = Level.objects.get(pk=pk)
except Level.DoesNotExist:
return HttpResponse(status=404)
serializer = LevelModeSerializer(level, context={'request': request})
return Response(serializer.data)
@api_view(('GET',))
def episode_list(request, format=None):
episodes = Episode.objects.all()
serializer = EpisodeListSerializer(episodes, many=True, context={'request': request})
return Response(serializer.data)
@api_view(('GET',))
def episode_detail(request, pk, format=None):
try:
episode = Episode.objects.get(pk=pk)
except Episode.DoesNotExist:
return HttpResponse(status=404)
serializer = EpisodeDetailSerializer(episode, context={'request': request})
return Response(serializer.data)
@api_view(('GET',))
def levelblock_list(request, level, format=None):
blocks = LevelBlock.objects.filter(level__id=level)
serializer = LevelBlockSerializer(blocks, many=True, context={'request': request})
return Response(serializer.data)
@api_view(('GET',))
def levelblock_for_level(request, pk, format=None):
levelblocks = LevelBlock.objects.filter(level__id=pk)
serializer = LevelBlockSerializer(levelblocks, many=True, context={'request': request})
return Response(serializer.data)
@api_view(('GET',))
def levelblock_detail(request, pk, format=None):
try:
levelblock = LevelBlock.objects.get(pk=pk)
except LevelBlock.DoesNotExist:
return HttpResponse(status=404)
serializer = LevelBlockSerializer(levelblock, context={'request': request})
return Response(serializer.data)
@api_view(('GET',))
def leveldecor_list(request, level, format=None):
leveldecors = LevelDecor.objects.filter(level__id=level)
serializer = LevelDecorSerializer(leveldecors, many=True, context={'request': request})
return Response(serializer.data)
@api_view(('GET',))
def leveldecor_for_level(request, pk, format=None):
leveldecors = LevelDecor.objects.filter(level__id=pk)
serializer = LevelDecorSerializer(leveldecors, many=True, context={'request': request})
return Response(serializer.data)
@api_view(('GET',))
def leveldecor_detail(request, pk, format=None):
try:
leveldecor = LevelDecor.objects.get(pk=pk)
except LevelDecor.DoesNotExist:
return HttpResponse(status=404)
serializer = LevelDecorSerializer(leveldecor, context={'request': request})
return Response(serializer.data)
@api_view(('GET',))
def block_list(request, format=None):
block = Block.objects.all()
serializer = BlockSerializer(block, many=True, context={'request': request})
return Response(serializer.data)
@api_view(('GET',))
def block_detail(request, pk, format=None):
try:
block = Block.objects.get(pk=pk)
except Block.DoesNotExist:
return HttpResponse(status=404)
serializer = BlockSerializer(block, context={'request': request})
return Response(serializer.data)
@api_view(('GET',))
def theme_list(request, format=None):
themes = get_all_themes()
data = [{get_themes_url(i.pk, request)} for i in themes]
return Response(data)
@api_view(('GET',))
def theme_detail(request, pk, format=None):
try:
theme = get_theme_by_pk(pk)
except KeyError:
return HttpResponse(status=404)
return Response(theme.__dict__)
@api_view(('GET',))
def character_list(request, format=None):
characters = Character.objects.all()
serializer = CharacterSerializer(characters, many=True, context={'request': request})
return Response(serializer.data)
@api_view(('GET',))
def character_detail(request, pk, format=None):
try:
character = Character.objects.get(pk=pk)
except Character.DoesNotExist:
return HttpResponse(status=404)
serializer = CharacterSerializer(character, context={'request': request})
return Response(serializer.data)
# Maybe used later for when we use a viewset which requires multiple serializer
class MultiSerializerViewSet(viewsets.ModelViewSet):
serializers = {
'default': None,
}
def get_serializer_class(self):
return self.serializers.get(self.action,
self.serializers['default'])
|
agpl-3.0
| 7,866,183,302,795,193,000
| 33.722846
| 122
| 0.715349
| false
| 3.850083
| false
| false
| false
|
magfest/ubersystem
|
uber/models/mits.py
|
1
|
11679
|
import os
from functools import wraps
from PIL import Image
from residue import CoerceUTF8 as UnicodeText, UTCDateTime, UUID
from sqlalchemy import and_
from sideboard.lib import on_startup
from sqlalchemy.schema import ForeignKey
from sqlalchemy.types import Boolean, Integer
from sqlalchemy.ext.hybrid import hybrid_property
from uber.config import c
from uber.models import MagModel
from uber.models.types import default_relationship as relationship, utcnow, Choice, DefaultColumn as Column, MultiChoice
__all__ = ['MITSTeam', 'MITSApplicant', 'MITSGame', 'MITSPicture', 'MITSDocument', 'MITSTimes']
class MITSTeam(MagModel):
name = Column(UnicodeText)
days_available = Column(Integer, nullable=True)
hours_available = Column(Integer, nullable=True)
concurrent_attendees = Column(Integer, default=0)
panel_interest = Column(Boolean, nullable=True, admin_only=True)
showcase_interest = Column(Boolean, nullable=True, admin_only=True)
want_to_sell = Column(Boolean, default=False)
address = Column(UnicodeText)
submitted = Column(UTCDateTime, nullable=True)
waiver_signature = Column(UnicodeText)
waiver_signed = Column(UTCDateTime, nullable=True)
applied = Column(UTCDateTime, server_default=utcnow())
status = Column(Choice(c.MITS_APP_STATUS), default=c.PENDING, admin_only=True)
applicants = relationship('MITSApplicant', backref='team')
games = relationship('MITSGame', backref='team')
schedule = relationship('MITSTimes', uselist=False, backref='team')
panel_app = relationship('MITSPanelApplication', uselist=False, backref='team')
duplicate_of = Column(UUID, nullable=True)
deleted = Column(Boolean, default=False)
# We've found that a lot of people start filling out an application and
# then instead of continuing their application just start over fresh and
# fill out a new one. In these cases we mark the application as
# soft-deleted and then set the duplicate_of field so that when an
# applicant tries to log into the original application, we can redirect
# them to the correct application.
email_model_name = 'team'
@property
def accepted(self):
return self.status == c.ACCEPTED
@property
def email(self):
return [applicant.email for applicant in self.primary_contacts]
@property
def primary_contacts(self):
return [a for a in self.applicants if a.primary_contact]
@property
def salutation(self):
return ' and '.join(applicant.first_name for applicant in self.primary_contacts)
@property
def comped_badge_count(self):
return len([
a for a in self.applicants
if a.attendee_id and a.attendee.paid in [c.NEED_NOT_PAY, c.REFUNDED]])
@property
def total_badge_count(self):
return len([a for a in self.applicants if a.attendee_id])
@property
def can_add_badges(self):
uncomped_badge_count = len([
a for a in self.applicants
if a.attendee_id and a.attendee.paid not in [c.NEED_NOT_PAY, c.REFUNDED]])
claimed_badges = len(self.applicants) - uncomped_badge_count
return claimed_badges < c.MITS_BADGES_PER_TEAM
@property
def can_save(self):
return c.HAS_MITS_ADMIN_ACCESS or self.status in [c.ACCEPTED, c.WAITLISTED] or (
self.is_new
and c.BEFORE_MITS_SUBMISSION_DEADLINE
or c.BEFORE_MITS_EDITING_DEADLINE)
@property
def completed_panel_request(self):
return self.panel_interest is not None
@property
def completed_showcase_request(self):
return self.showcase_interest is not None
@property
def completed_hotel_form(self):
"""
This is "any" rather than "all" because teams are allowed to
add and remove members even after their application has been
submitted. Rather than suddenly downgrade their completion
percentage, it makes more sense to send such teams an
automated email indicating that they need to provide their
remaining hotel info.
"""
return any(a.declined_hotel_space or a.requested_room_nights for a in self.applicants)
@property
def no_hotel_space(self):
return all(a.declined_hotel_space for a in self.applicants)
@property
def steps_completed(self):
if not self.days_available:
return 1
elif not self.games:
return 2
elif not self.submitted:
return 3
else:
return 4
@property
def completion_percentage(self):
return 100 * self.steps_completed // c.MITS_APPLICATION_STEPS
class MITSApplicant(MagModel):
team_id = Column(ForeignKey('mits_team.id'))
attendee_id = Column(ForeignKey('attendee.id'), nullable=True)
primary_contact = Column(Boolean, default=False)
first_name = Column(UnicodeText)
last_name = Column(UnicodeText)
email = Column(UnicodeText)
cellphone = Column(UnicodeText)
contact_method = Column(Choice(c.MITS_CONTACT_OPTS), default=c.TEXTING)
declined_hotel_space = Column(Boolean, default=False)
requested_room_nights = Column(MultiChoice(c.MITS_ROOM_NIGHT_OPTS), default='')
email_model_name = 'applicant'
@property
def email_to_address(self):
if self.attendee:
return self.attendee.email
return self.email
@property
def full_name(self):
return self.first_name + ' ' + self.last_name
def has_requested(self, night):
return night in self.requested_room_nights_ints
class MITSGame(MagModel):
team_id = Column(ForeignKey('mits_team.id'))
name = Column(UnicodeText)
promo_blurb = Column(UnicodeText)
description = Column(UnicodeText)
genre = Column(UnicodeText)
phase = Column(Choice(c.MITS_PHASE_OPTS), default=c.DEVELOPMENT)
min_age = Column(Choice(c.MITS_AGE_OPTS), default=c.CHILD)
age_explanation = Column(UnicodeText)
min_players = Column(Integer, default=2)
max_players = Column(Integer, default=4)
copyrighted = Column(Choice(c.MITS_COPYRIGHT_OPTS), nullable=True)
personally_own = Column(Boolean, default=False)
unlicensed = Column(Boolean, default=False)
professional = Column(Boolean, default=False)
pictures = relationship('MITSPicture', backref='team')
documents = relationship('MITSDocument', backref='team')
@hybrid_property
def has_been_accepted(self):
return self.team.status == c.ACCEPTED
@has_been_accepted.expression
def has_been_accepted(cls):
return and_(MITSTeam.id == cls.team_id, MITSTeam.status == c.ACCEPTED)
@property
def guidebook_name(self):
return self.team.name
@property
def guidebook_subtitle(self):
return self.name
@property
def guidebook_desc(self):
return self.description
@property
def guidebook_location(self):
return ''
@property
def guidebook_image(self):
if not self.pictures:
return ''
for image in self.pictures:
if image.is_header:
return image.filename
return self.pictures[0].filename
@property
def guidebook_thumbnail(self):
if not self.pictures:
return ''
for image in self.pictures:
if image.is_thumbnail:
return image.filename
return self.pictures[1].filename if len(self.pictures) > 1 else self.pictures[0].filename
@property
def guidebook_images(self):
if not self.pictures:
return ['', '']
header = None
thumbnail = None
for image in self.pictures:
if image.is_header and not header:
header = image
if image.is_thumbnail and not thumbnail:
thumbnail = image
if not header:
header = self.pictures[0]
if not thumbnail:
thumbnail = self.pictures[1] if len(self.pictures) > 1 else self.pictures[0]
if header == thumbnail:
return [header.filename], [header]
else:
return [header.filename, thumbnail.filename], [header, thumbnail]
class MITSPicture(MagModel):
game_id = Column(UUID, ForeignKey('mits_game.id'))
filename = Column(UnicodeText)
content_type = Column(UnicodeText)
extension = Column(UnicodeText)
description = Column(UnicodeText)
@property
def url(self):
return '../mits/view_picture?id={}'.format(self.id)
@property
def filepath(self):
return os.path.join(c.MITS_PICTURE_DIR, str(self.id))
@property
def is_header(self):
try:
return Image.open(self.filepath).size == tuple(map(int, c.MITS_HEADER_SIZE))
except OSError:
# This probably isn't an image, so it's not a header image
return
@property
def is_thumbnail(self):
try:
return Image.open(self.filepath).size == tuple(map(int, c.MITS_THUMBNAIL_SIZE))
except OSError:
# This probably isn't an image, so it's not a thumbnail image
return
class MITSDocument(MagModel):
game_id = Column(UUID, ForeignKey('mits_game.id'))
filename = Column(UnicodeText)
description = Column(UnicodeText)
@property
def url(self):
return '../mits/download_doc?id={}'.format(self.id)
@property
def filepath(self):
return os.path.join(c.MITS_PICTURE_DIR, str(self.id))
class MITSTimes(MagModel):
team_id = Column(ForeignKey('mits_team.id'))
showcase_availability = Column(MultiChoice(c.MITS_SHOWCASE_SCHEDULE_OPTS))
availability = Column(MultiChoice(c.MITS_SCHEDULE_OPTS))
class MITSPanelApplication(MagModel):
team_id = Column(ForeignKey('mits_team.id'))
name = Column(UnicodeText)
description = Column(UnicodeText)
length = Column(Choice(c.PANEL_STRICT_LENGTH_OPTS), default=c.SIXTY_MIN)
participation_interest = Column(Boolean, default=False)
@on_startup
def add_applicant_restriction():
"""
We use convenience functions for our form handling, e.g. to
instantiate an attendee from an id or from form data we use the
session.attendee() method. This method runs on startup and overrides
the methods which are used for the game application forms to add a
new "applicant" parameter. If truthy, this triggers three
additional behaviors:
1) We check that there is currently a logged in team, and redirect
to the initial application form if there is not.
2) We check that the item being edited belongs to the
currently-logged-in team and raise an exception if it does not.
This check is bypassed for new things which have not yet been
saved to the database.
3) We set the "team" relationship on the model to the
logged-in team.
"""
from uber.models import Session
def override_getter(method_name):
orig_getter = getattr(Session.SessionMixin, method_name)
@wraps(orig_getter)
def with_applicant(self, *args, **kwargs):
applicant = kwargs.pop('applicant', False)
instance = orig_getter(self, *args, **kwargs)
if applicant:
team = self.logged_in_mits_team()
assert instance.is_new or team == instance.team
instance.team = team
return instance
setattr(Session.SessionMixin, method_name, with_applicant)
for name in [
'mits_applicant', 'mits_game', 'mits_times', 'mits_picture', 'mits_document', 'mits_panel_application'
]:
override_getter(name)
|
agpl-3.0
| 212,782,243,609,174,400
| 32.950581
| 120
| 0.662557
| false
| 3.831693
| false
| false
| false
|
nuagenetworks/tempest
|
tempest/api/compute/servers/test_virtual_interfaces.py
|
1
|
2373
|
# Copyright 2013 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import netaddr
from tempest.api.compute import base
from tempest import config
from tempest.lib import decorators
from tempest import test
CONF = config.CONF
class VirtualInterfacesTestJSON(base.BaseV2ComputeTest):
@classmethod
def setup_credentials(cls):
# This test needs a network and a subnet
cls.set_network_resources(network=True, subnet=True)
super(VirtualInterfacesTestJSON, cls).setup_credentials()
@classmethod
def setup_clients(cls):
super(VirtualInterfacesTestJSON, cls).setup_clients()
cls.client = cls.servers_client
@classmethod
def resource_setup(cls):
super(VirtualInterfacesTestJSON, cls).resource_setup()
server = cls.create_test_server(wait_until='ACTIVE')
cls.server_id = server['id']
@decorators.skip_because(bug="1183436",
condition=CONF.service_available.neutron)
@test.idempotent_id('96c4e2ef-5e4d-4d7f-87f5-fed6dca18016')
@test.services('network')
def test_list_virtual_interfaces(self):
# Positive test:Should be able to GET the virtual interfaces list
# for a given server_id
output = self.client.list_virtual_interfaces(self.server_id)
self.assertIsNotNone(output)
virt_ifaces = output
self.assertNotEqual(0, len(virt_ifaces['virtual_interfaces']),
'Expected virtual interfaces, got 0 interfaces.')
for virt_iface in virt_ifaces['virtual_interfaces']:
mac_address = virt_iface['mac_address']
self.assertTrue(netaddr.valid_mac(mac_address),
"Invalid mac address detected. mac address: %s"
% mac_address)
|
apache-2.0
| 5,632,622,960,499,097,000
| 37.901639
| 78
| 0.677202
| false
| 4.155867
| true
| false
| false
|
dunkhong/grr
|
grr/server/grr_response_server/gui/api_plugins/timeline.py
|
1
|
2641
|
#!/usr/bin/env python
"""A module with API handlers related to the timeline colllection."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from typing import Optional
from typing import Text
from grr_response_core.lib.rdfvalues import structs as rdf_structs
from grr_response_core.lib.util import body
from grr_response_proto.api import timeline_pb2
from grr_response_server import access_control
from grr_response_server import data_store
from grr_response_server.flows.general import timeline
from grr_response_server.gui import api_call_handler_base
from grr_response_server.gui.api_plugins import client as api_client
from grr_response_server.gui.api_plugins import flow as api_flow
class ApiGetCollectedTimelineArgs(rdf_structs.RDFProtoStruct):
"""An RDF wrapper class for the arguments of timeline exporter arguments."""
protobuf = timeline_pb2.ApiGetCollectedTimelineArgs
rdf_deps = [
api_client.ApiClientId,
api_flow.ApiFlowId,
]
class ApiGetCollectedTimelineHandler(api_call_handler_base.ApiCallHandler):
"""An API handler for the timeline exporter."""
args_type = ApiGetCollectedTimelineArgs
def Handle(
self,
args,
token = None,
):
"""Handles requests for the timeline export API call."""
client_id = str(args.client_id)
flow_id = str(args.flow_id)
flow_obj = data_store.REL_DB.ReadFlowObject(client_id, flow_id)
if flow_obj.flow_class_name != timeline.TimelineFlow.__name__:
message = "Flow '{}' is not a timeline flow".format(flow_id)
raise ValueError(message)
if args.format == ApiGetCollectedTimelineArgs.Format.BODY: # pytype: disable=attribute-error
return self._StreamBody(client_id=client_id, flow_id=flow_id)
if args.format == ApiGetCollectedTimelineArgs.Format.RAW_GZCHUNKED: # pytype: disable=attribute-error
return self._StreamRawGzchunked(client_id=client_id, flow_id=flow_id)
message = "Incorrect timeline export format: {}".format(args.format)
raise ValueError(message)
def _StreamBody(
self,
client_id,
flow_id,
):
entries = timeline.Entries(client_id=client_id, flow_id=flow_id)
content = body.Stream(entries)
filename = "timeline_{}.body".format(flow_id)
return api_call_handler_base.ApiBinaryStream(filename, content)
def _StreamRawGzchunked(
self,
client_id,
flow_id,
):
content = timeline.Blobs(client_id=client_id, flow_id=flow_id)
filename = "timeline_{}.gzchunked".format(flow_id)
return api_call_handler_base.ApiBinaryStream(filename, content)
|
apache-2.0
| 8,599,119,469,534,929,000
| 32.858974
| 106
| 0.728891
| false
| 3.603001
| false
| false
| false
|
barct/odoo-coop
|
ersep_regulations/__openerp__.py
|
1
|
1907
|
# -*- coding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2016 Fernando Hidalgo (http://www.hidalgofernando.com.ar)
# All Rights Reserved.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'ERSeP Regulations',
'version': '9.0.0.0.1',
'category': 'Tools',
'sequence': 1,
'summary': '',
'description': """
ERSeP Regulations
=================
This module is a regionalization of Córdoba province for odoo-coop
Based on the experience of the "Cooperativa Anisacate" cooperative.
Uses the Argentine tax & legal regulations and particularly those of the province of "Córdoba"
through the regulator ERSeP.
""",
'author': 'Fernando Hidalgo',
'website': 'www.hidalgofernando.com.ar',
'license': 'AGPL-3',
'images': [
],
'depends': [
'electric_utility',
'base',
'l10n_ar_chart',
],
'external_dependencies': {
# 'python': ['dbfread', 'hashlib'],
},
'data': [
'data/account_chart.xml',
'data/account_tax.xml',
'data/service_category.xml',
],
'demo': [
],
'test': [
],
}
|
gpl-3.0
| 8,221,518,737,955,704,000
| 30.75
| 94
| 0.579528
| false
| 3.706226
| false
| false
| false
|
arsenalstriker14/imagetraccloud
|
imagetrac_docker/taskmanager/migrations/0002_auto_20170122_1808.py
|
1
|
3274
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.5 on 2017-01-22 18:08
from __future__ import unicode_literals
import django.core.files.storage
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('taskmanager', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='inboxentry',
name='attachment',
field=models.FileField(blank=True, null=True, storage=django.core.files.storage.FileSystemStorage(base_url='/uploads', location='/app/uploads'), upload_to='attachments/'),
),
migrations.AlterField(
model_name='inboxentry',
name='attachment10',
field=models.FileField(blank=True, null=True, storage=django.core.files.storage.FileSystemStorage(base_url='/uploads', location='/app/uploads'), upload_to='attachments/'),
),
migrations.AlterField(
model_name='inboxentry',
name='attachment2',
field=models.FileField(blank=True, null=True, storage=django.core.files.storage.FileSystemStorage(base_url='/uploads', location='/app/uploads'), upload_to='attachments/'),
),
migrations.AlterField(
model_name='inboxentry',
name='attachment3',
field=models.FileField(blank=True, null=True, storage=django.core.files.storage.FileSystemStorage(base_url='/uploads', location='/app/uploads'), upload_to='attachments/'),
),
migrations.AlterField(
model_name='inboxentry',
name='attachment4',
field=models.FileField(blank=True, null=True, storage=django.core.files.storage.FileSystemStorage(base_url='/uploads', location='/app/uploads'), upload_to='attachments/'),
),
migrations.AlterField(
model_name='inboxentry',
name='attachment5',
field=models.FileField(blank=True, null=True, storage=django.core.files.storage.FileSystemStorage(base_url='/uploads', location='/app/uploads'), upload_to='attachments/'),
),
migrations.AlterField(
model_name='inboxentry',
name='attachment6',
field=models.FileField(blank=True, null=True, storage=django.core.files.storage.FileSystemStorage(base_url='/uploads', location='/app/uploads'), upload_to='attachments/'),
),
migrations.AlterField(
model_name='inboxentry',
name='attachment7',
field=models.FileField(blank=True, null=True, storage=django.core.files.storage.FileSystemStorage(base_url='/uploads', location='/app/uploads'), upload_to='attachments/'),
),
migrations.AlterField(
model_name='inboxentry',
name='attachment8',
field=models.FileField(blank=True, null=True, storage=django.core.files.storage.FileSystemStorage(base_url='/uploads', location='/app/uploads'), upload_to='attachments/'),
),
migrations.AlterField(
model_name='inboxentry',
name='attachment9',
field=models.FileField(blank=True, null=True, storage=django.core.files.storage.FileSystemStorage(base_url='/uploads', location='/app/uploads'), upload_to='attachments/'),
),
]
|
mit
| 2,795,914,541,022,039,600
| 48.606061
| 183
| 0.63989
| false
| 4.330688
| false
| false
| false
|
dbrattli/python-gearshift
|
gearshift/visit/api.py
|
1
|
10475
|
import logging
try:
from hashlib import sha1
except ImportError:
from sha import sha as sha1
import threading
import time
from random import random
from datetime import timedelta, datetime
import cherrypy
from cherrypy import request
from gearshift import config
from gearshift.util import load_class
from gearshift.identity.base import verify_identity_status
log = logging.getLogger("gearshift.visit")
# Global VisitManager
_manager = None
# Global list of plugins for the Visit Tracking framework
_plugins = list()
# Accessor functions for getting and setting the current visit information.
def current():
"""Retrieve the current visit record from the cherrypy request."""
return getattr(cherrypy.request, "tg_visit", None)
def set_current(visit):
"""Set the current visit record on the cherrypy request being processed."""
cherrypy.request.tg_visit = visit
def _create_visit_manager(timeout):
"""Create a VisitManager based on the plugin specified in the config file."""
plugin_name = config.get("tools.visit.manager",
"gearshift.visit.sovisit.SqlObjectVisitManager")
try:
plugin = load_class(plugin_name)
except Exception, e:
log.error("Error loading visit plugin '%s': %s", plugin_name, e)
raise RuntimeError("VisitManager plugin missing: %s" % plugin_name)
log.debug("Loading visit manager from plugin: %s", plugin_name)
return plugin(timeout)
# Interface for the TurboGears extension
def shutdown_extension():
# Bail out if this extension is not running.
global _manager
if not _manager:
return
log.info("Visit Tracking shutting down")
_manager.shutdown()
_manager = None
def create_extension_model():
"""Create the data model of the VisitManager if one exists."""
if _manager:
_manager.create_model()
def enable_visit_plugin(plugin):
"""Register a visit tracking plugin.
These plugins will be called for each request.
"""
_plugins.append(plugin)
class Visit(object):
"""Basic container for visit related data."""
def __init__(self, key, is_new):
self.key = key
self.is_new = is_new
class VisitTool(cherrypy.Tool):
"""A tool that automatically tracks visitors."""
def __init__(self):
log.debug("Visit tool initialised")
## start_extension()
# Raise priority since we need the VisitTool to run as early as
# possible
return super(VisitTool, self).__init__(point="before_handler",
callable=self.before_handler,
priority=20)
def start_extension(self):
# Bail out if the application hasn't enabled this extension
if not config.get("tools.visit.on", False):
return
# Bail out if this extension is already running
global _manager
if _manager:
return
log.info("Visit Tracking starting")
# How long may the visit be idle before a new visit ID is assigned?
# The default is 20 minutes.
timeout = timedelta(minutes=config.get("tools.visit.timeout", 20))
# Create the thread that manages updating the visits
_manager = _create_visit_manager(timeout)
def before_handler(self, **kw):
"""Check whether submitted request belongs to an existing visit."""
get = kw.get
# Where to look for the session key in the request and in which order
source = [s.strip().lower() for s in
kw.get('source', 'cookie').split(',')]
if set(source).difference(('cookie', 'form')):
log.warning("Unsupported 'tools.visit.source' '%s' in configuration.")
# Get the name to use for the identity cookie.
self.cookie_name = get("cookie.name", "tg-visit")
# and the name of the request param. MUST NOT contain dashes or dots,
# otherwise the NestedVariablesFilter will chocke on it.
visit_key_param = get("form.name", "tg_visit")
# TODO: The path should probably default to whatever
# the root is masquerading as in the event of a
# virtual path filter.
self.cookie_path = get("cookie.path", "/")
# The secure bit should be set for HTTPS only sites
self.cookie_secure = get("cookie.secure", False)
# By default, I don't specify the cookie domain.
self.cookie_domain = get("cookie.domain", None)
assert self.cookie_domain != "localhost", "localhost" \
" is not a valid value for visit.cookie.domain. Try None instead."
# Use max age only if the cookie shall explicitly be permanent
self.cookie_max_age = get("cookie.permanent",
False) and int(get("timeout", "20")) * 60 or None
cpreq = cherrypy.request
visit = current()
if not visit:
visit_key = None
for source in source:
if source == 'cookie':
visit_key = cpreq.cookie.get(self.cookie_name)
if visit_key:
visit_key = visit_key.value
log.debug("Retrieved visit key '%s' from cookie '%s'.",
visit_key, self.cookie_name)
elif source == 'form':
visit_key = cpreq.params.pop(visit_key_param, None)
log.debug(
"Retrieved visit key '%s' from request param '%s'.",
visit_key, visit_key_param)
if visit_key:
visit = _manager.visit_for_key(visit_key)
break
if visit:
log.debug("Using visit from request with key: %s", visit_key)
else:
visit_key = self._generate_key()
visit = _manager.new_visit_with_key(visit_key)
log.debug("Created new visit with key: %s", visit_key)
self.send_cookie(visit_key)
set_current(visit)
# Inform all the plugins that a request has been made for the current
# visit. This gives plugins the opportunity to track click-path or
# retrieve the visitor's identity.
try:
for plugin in _plugins:
plugin.record_request(visit)
except cherrypy.InternalRedirect, e:
# Can't allow an InternalRedirect here because CherryPy is dumb,
# instead change cherrypy.request.path_info to the url desired.
cherrypy.request.path_info = e.path
def _generate_key():
"""Return a (pseudo)random hash based on seed."""
# Adding remote.ip and remote.port doesn't make this any more secure,
# but it makes people feel secure... It's not like I check to make
# certain you're actually making requests from that host and port. So
# it's basically more noise.
key_string = '%s%s%s%s' % (random(), datetime.now(),
cherrypy.request.remote.ip, cherrypy.request.remote.port)
return sha1(key_string).hexdigest()
_generate_key = staticmethod(_generate_key)
def clear_cookie(self):
"""Clear any existing visit ID cookie."""
cookies = cherrypy.response.cookie
# clear the cookie
log.debug("Clearing visit ID cookie")
cookies[self.cookie_name] = ''
cookies[self.cookie_name]['path'] = self.cookie_path
cookies[self.cookie_name]['expires'] = ''
cookies[self.cookie_name]['max-age'] = 0
def send_cookie(self, visit_key):
"""Send an visit ID cookie back to the browser."""
cookies = cherrypy.response.cookie
cookies[self.cookie_name] = visit_key
cookies[self.cookie_name]['path'] = self.cookie_path
if self.cookie_secure:
cookies[self.cookie_name]['secure'] = True
if self.cookie_domain:
cookies[self.cookie_name]['domain'] = self.cookie_domain
max_age = self.cookie_max_age
if max_age:
# use 'expires' because MSIE ignores 'max-age'
cookies[self.cookie_name]['expires'] = '"%s"' % time.strftime(
"%a, %d-%b-%Y %H:%M:%S GMT",
time.gmtime(time.time() + max_age))
# 'max-age' takes precedence on standard conformant browsers
# (this is better because there of no time sync issues here)
cookies[self.cookie_name]['max-age'] = max_age
log.debug("Sending visit ID cookie: %s",
cookies[self.cookie_name].output())
class BaseVisitManager(threading.Thread):
def __init__(self, timeout):
super(BaseVisitManager, self).__init__(name="VisitManager")
self.timeout = timeout
self.queue = dict()
self.lock = threading.Lock()
self._shutdown = threading.Event()
self.interval = 30
self.setDaemon(True)
# We need to create the visit model before the manager thread is
# started.
self.create_model()
self.start()
def create_model(self):
pass
def new_visit_with_key(self, visit_key):
"""Return a new Visit object with the given key."""
raise NotImplementedError
def visit_for_key(self, visit_key):
"""Return the visit for this key.
Return None if the visit doesn't exist or has expired.
"""
raise NotImplementedError
def update_queued_visits(self, queue):
"""Extend the expiration of the queued visits."""
raise NotImplementedError
def update_visit(self, visit_key, expiry):
try:
self.lock.acquire()
self.queue[visit_key] = expiry
finally:
self.lock.release()
def shutdown(self, timeout=None):
self._shutdown.set()
self.join(timeout)
if self.isAlive():
log.error("Visit Manager thread failed to shutdown.")
def run(self):
while not self._shutdown.isSet():
self.lock.acquire()
queue = None
try:
# make a copy of the queue and empty the original
if self.queue:
queue = self.queue.copy()
self.queue.clear()
finally:
self.lock.release()
if queue is not None:
self.update_queued_visits(queue)
self._shutdown.wait(self.interval)
|
mit
| -8,914,088,577,078,914,000
| 36.27758
| 82
| 0.599427
| false
| 4.3107
| true
| false
| false
|
elebihan/python-ptraceplus
|
ptraceplus/tracer.py
|
1
|
5688
|
# -*- coding: utf-8 -*-
#
# python-ptraceplus - Ptrace bindings + extra stuff
#
# Copyright (c) 2013 Eric Le Bihan <eric.le.bihan.dev@free.fr>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
"""
Process tracing helper
"""
import os
import signal
import ptraceminus as ptrace
from collections import OrderedDict
from gettext import gettext as _
from .process import TracedProcess, create_process_event, SignalEvent
from .utils import spawn_child
from .common import debug
class TracerError(Exception):
"""Error raised when a tracing operation failed"""
class Tracer(object):
"""Trace a process"""
def __init__(self):
self._procs = OrderedDict()
self._fork_enabled = False
self._exec_enabled = False
self._sysgood_enabled = False
self._options = 0
def __getitem__(self, key):
return self._procs[key]
def __iter__(self):
return self._procs.itervalues()
def __contains__(self, key):
return key in self._procs
@property
def has_processes(self):
return (len(self._procs) != 0)
def _set_fork_enabled(self, value):
mask = ptrace.O_TRACEFORK | ptrace.O_TRACEVFORK
if value:
self._options |= mask
else:
self._options &= ~mask
self._fork_enabled = value
def _get_fork_enabled(self):
return self._fork_enabled
fork_enabled = property(_get_fork_enabled, _set_fork_enabled,
None,
"Enable fork tracing")
def _set_exec_enabled(self, value):
mask = ptrace.O_TRACEEXEC | ptrace.O_TRACEEXIT
if value:
self._options |= mask
else:
self._options &= ~mask
self._exec_enabled = value
def _get_exec_enabled(self):
return self._exec_enabled
exec_enabled = property(_get_exec_enabled, _set_exec_enabled,
None,
"Enable exec tracing")
def _set_sysgood_enabled(self, value):
mask = ptrace.O_TRACESYSGOOD
if value:
self._options |= mask
else:
self._options &= ~mask
self._sysgood_enabled = value
def _get_sysgood_enabled(self):
return self._sysgood_enabled
sysgood_enabled = property(_get_sysgood_enabled, _set_sysgood_enabled,
None,
"""Enable sysgood: ask the kernel to set bit
#7 of the signal number if the signal comes
from kernel space. It is unset if it comes
from user space""")
def spawn_process(self, args, env=None, quiet=True):
flags = 0
pid = spawn_child(args, env, quiet)
pid, status = os.waitpid(pid, flags)
proc = self.add_process(pid)
proc.syscall()
return proc
def add_process(self, pid, is_attached=True, parent=None):
if pid in self._procs:
raise TracerError(_('Process {} already registered').format(pid))
debug(_("Adding process {}").format(pid))
proc = self.keep_process(pid, parent)
if not is_attached:
proc.attach()
proc.options = self._options
return proc
def keep_process(self, pid, parent=None):
if pid in self._procs:
debug(_("Remembering process {}").format(pid))
return self._procs[pid]
if parent:
details = "({})".format(parent.pid)
else:
details = ''
debug(_("Keeping process {} {}").format(pid, details))
proc = TracedProcess(pid, parent)
self._procs[pid] = proc
return proc
def remove_process(self, pid):
debug(_("Removing process {}").format(pid))
try:
proc = self._procs.pop(pid)
except KeyError:
raise TracerError(_('Process not found'))
proc.terminate()
proc.detach()
debug(_("{} processes still traced").format(len(self._procs)))
def wait_for_event(self, wanted_pid=None, blocking=True):
flags = 0
if not blocking:
flags |= os.WNOHANG
if wanted_pid and wanted_pid not in self._procs:
raise TracerError(_("Unknown PID ({})").format(wanted_pid))
pid = wanted_pid or -1
pid, status = os.waitpid(pid, flags)
return create_process_event(pid, status)
def wait_for_signal(self, *signals, **kwargs):
pid = kwargs.get('pid', None)
while True:
event = self.wait_for_event(pid)
if isinstance(event, SignalEvent):
if event.signum in signals or not signals:
return event
def wait_for_syscall(self, pid=None):
return self.wait_for_signal(signal.SIGTRAP, pid)
def quit(self):
while self._procs:
pid, proc = self._procs.popitem()
debug(_("Removing process {}").format(pid))
proc.terminate()
proc.detach()
# vim: ts=4 sts=4 sw=4 sta et ai
|
gpl-3.0
| 987,896,519,618,176,600
| 30.776536
| 77
| 0.581927
| false
| 4.109827
| false
| false
| false
|
videntity/tweatwell
|
apps/twitbot/views.py
|
1
|
2939
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# vim: ai ts=4 sts=4 et sw=4
from django.conf import settings
from django.http import HttpResponse, Http404,HttpResponseRedirect
from django.shortcuts import render_to_response
from django.template import RequestContext
from django.contrib.auth.models import User
from ..accounts.models import UserProfile
from models import TwitBot
from utils import twitbotsearch, convert_twitter_date
from ..checkin.models import Freggie
from ..checkin.freggies import fruit_tuple, veg_tuple
from operator import itemgetter, attrgetter
import json, sys, StringIO, pycurl
from django.forms.models import model_to_dict
def executetwitsearchbot(request, cron_key):
if cron_key != settings.CRON_KEY:
return HttpResponse("Forbidden", status=401)
freggielist=[]
#get the most recent since_id from db
tb=TwitBot.objects.get(pk=1)
d=twitbotsearch(settings.TWITTERHASH, tb.since_id)
latest_since_id=tb.since_id
if d.has_key('results'):
for i in reversed(d['results']):
jsonstr=json.dumps(i, indent = 4,)
x=dict(json.loads(jsonstr))
#if the from_user is in our DB, then create a Freggie
if int(tb.since_id) <= int(x['id']):
latest_since_id=x['id']
try:
freggie=None
up=UserProfile.objects.get(twitter=x['from_user'])
print "process", x['text'], x['id']
for i in fruit_tuple:
if str(x['text']).lower().__contains__(i):
freggie = i
for i in veg_tuple:
if str(x['text']).lower().__contains__(i):
freggie = i
if freggie:
mydate = convert_twitter_date(str(x['created_at']))
f=Freggie.objects.create(user=up.user, freggie=freggie,
text=x['text'], sinceid=x['id'],
evdt=mydate)
freggiedict=model_to_dict(f, exclude=['evdt','photo',
'since_id'])
freggiedict['created_at']=x['created_at']
freggiedict['twitter_id']=x['id']
freggielist.append(freggiedict)
except(UserProfile.DoesNotExist):
print "A tweat was found but no matching user profile"
except:
print str(sys.exc_info())
#return HttpResponse(str(sys.exc_info()), status=500)
tb.since_id=int(latest_since_id)+1
tb.save()
jsonstr=json.dumps(freggielist, indent = 4,)
return HttpResponse(jsonstr, mimetype="text/plain")
|
gpl-2.0
| 316,786,970,059,591,800
| 41
| 79
| 0.532494
| false
| 4.042641
| false
| false
| false
|
gwax/nikola
|
tests/test_rst_compiler.py
|
1
|
8674
|
# coding: utf8
# Author: Rodrigo Bistolfi
# Date: 03/2013
""" Test cases for Nikola ReST extensions.
A base class ReSTExtensionTestCase provides the tests basic behaviour.
Subclasses must override the "sample" class attribute with the ReST markup.
The sample will be rendered as HTML using publish_parts() by setUp().
One method is provided for checking the resulting HTML:
* assertHTMLContains(element, attributes=None, text=None)
The HTML is parsed with lxml for checking against the data you provide. The
method takes an element argument, a string representing the *name* of an HTML
tag, like "script" or "iframe". We will try to find this tag in the document
and perform the tests on it. You can pass a dictionary to the attributes kwarg
representing the name and the value of the tag attributes. The text kwarg takes
a string argument, which will be tested against the contents of the HTML
element.
One last caveat: you need to url unquote your urls if you are going to test
attributes like "src" or "link", since the HTML rendered by docutils will be
always unquoted.
"""
import os
import io
try:
from io import StringIO
except ImportError:
from StringIO import StringIO # NOQA
import tempfile
import docutils
from lxml import html
import unittest
import nikola.plugins.compile.rest
from nikola.plugins.compile.rest import vimeo
import nikola.plugins.compile.rest.listing
from nikola.plugins.compile.rest.doc import Plugin as DocPlugin
from nikola.utils import _reload
from .base import BaseTestCase, FakeSite, FakePost
class ReSTExtensionTestCase(BaseTestCase):
""" Base class for testing ReST extensions """
sample = 'foo'
deps = None
def setUp(self):
self.compiler = nikola.plugins.compile.rest.CompileRest()
self.compiler.set_site(FakeSite())
return super(ReSTExtensionTestCase, self).setUp()
def basic_test(self):
""" Parse cls.sample into a HTML document tree """
self.setHtmlFromRst(self.sample)
def setHtmlFromRst(self, rst):
""" Create html output from rst string """
tmpdir = tempfile.mkdtemp()
inf = os.path.join(tmpdir, 'inf')
outf = os.path.join(tmpdir, 'outf')
with io.open(inf, 'w+', encoding='utf8') as f:
f.write(rst)
p = FakePost('', '')
p._depfile[outf] = []
self.compiler.site.post_per_input_file[inf] = p
self.html = self.compiler.compile(inf, outf)
with io.open(outf, 'r', encoding='utf8') as f:
self.html = f.read()
os.unlink(inf)
os.unlink(outf)
depfile = [p for p in p._depfile[outf] if p != outf]
depfile = '\n'.join(depfile)
if depfile:
self.assertEqual(self.deps.strip(), depfile)
os.rmdir(tmpdir)
self.html_doc = html.parse(StringIO(self.html))
def assertHTMLContains(self, element, attributes=None, text=None):
""" Test if HTML document includes an element with the given
attributes and text content
"""
try:
tag = next(self.html_doc.iter(element))
except StopIteration:
raise Exception("<{0}> not in {1}".format(element, self.html))
else:
if attributes:
arg_attrs = set(attributes.items())
tag_attrs = set(tag.items())
self.assertTrue(arg_attrs.issubset(tag_attrs))
if text:
self.assertIn(text, tag.text)
class ReSTExtensionTestCaseTestCase(ReSTExtensionTestCase):
""" Simple test for our base class :) """
sample = '.. raw:: html\n\n <iframe src="foo" height="bar">spam</iframe>'
def test_test(self):
self.basic_test()
self.assertHTMLContains("iframe", attributes={"src": "foo"},
text="spam")
self.assertRaises(Exception, self.assertHTMLContains, "eggs", {})
class MathTestCase(ReSTExtensionTestCase):
sample = ':math:`e^{ix} = \cos x + i\sin x`'
def test_math(self):
""" Test that math is outputting TeX code."""
self.basic_test()
self.assertHTMLContains("span", attributes={"class": "math"},
text="\(e^{ix} = \cos x + i\sin x\)")
class SlidesTestCase(ReSTExtensionTestCase):
""" Slides test case """
sample = '.. slides:: IMG.jpg\n'
def test_slides(self):
""" Test the slides js generation and img tag creation """
self.basic_test()
self.assertHTMLContains("img", attributes={"src": "IMG.jpg"})
class SoundCloudTestCase(ReSTExtensionTestCase):
""" SoundCloud test case """
sample = '.. soundcloud:: SID\n :height: 400\n :width: 600'
def test_soundcloud(self):
""" Test SoundCloud iframe tag generation """
self.basic_test()
self.assertHTMLContains("iframe",
attributes={"src": ("https://w.soundcloud.com"
"/player/?url=http://"
"api.soundcloud.com/"
"tracks/SID"),
"height": "400", "width": "600"})
class VimeoTestCase(ReSTExtensionTestCase):
"""Vimeo test.
Set Vimeo.request_size to False for avoiding querying the Vimeo api
over the network
"""
sample = '.. vimeo:: VID\n :height: 400\n :width: 600'
def setUp(self):
""" Disable query of the vimeo api over the wire """
vimeo.Vimeo.request_size = False
super(VimeoTestCase, self).setUp()
_reload(nikola.plugins.compile.rest)
def test_vimeo(self):
""" Test Vimeo iframe tag generation """
self.basic_test()
self.assertHTMLContains("iframe",
attributes={"src": ("https://player.vimeo.com/"
"video/VID"),
"height": "400", "width": "600"})
class YoutubeTestCase(ReSTExtensionTestCase):
""" Youtube test case """
sample = '.. youtube:: YID\n :height: 400\n :width: 600'
def test_youtube(self):
""" Test Youtube iframe tag generation """
self.basic_test()
self.assertHTMLContains("iframe",
attributes={"src": ("https://www.youtube.com/"
"embed/YID?rel=0&hd=1&"
"wmode=transparent"),
"height": "400", "width": "600"})
class ListingTestCase(ReSTExtensionTestCase):
""" Listing test case and CodeBlock alias tests """
deps = None
sample1 = '.. listing:: nikola.py python\n\n'
sample2 = '.. code-block:: python\n\n import antigravity'
sample3 = '.. sourcecode:: python\n\n import antigravity'
# def test_listing(self):
# """ Test that we can render a file object contents without errors """
# with cd(os.path.dirname(__file__)):
# self.deps = 'listings/nikola.py'
# self.setHtmlFromRst(self.sample1)
def test_codeblock_alias(self):
""" Test CodeBlock aliases """
self.deps = None
self.setHtmlFromRst(self.sample2)
self.setHtmlFromRst(self.sample3)
class DocTestCase(ReSTExtensionTestCase):
""" Ref role test case """
sample = 'Sample for testing my :doc:`doesnt-exist-post`'
sample1 = 'Sample for testing my :doc:`fake-post`'
sample2 = 'Sample for testing my :doc:`titled post <fake-post>`'
def setUp(self):
# Initialize plugin, register role
self.plugin = DocPlugin()
self.plugin.set_site(FakeSite())
# Hack to fix leaked state from integration tests
try:
f = docutils.parsers.rst.roles.role('doc', None, None, None)[0]
f.site = FakeSite()
except AttributeError:
pass
return super(DocTestCase, self).setUp()
def test_doc_doesnt_exist(self):
self.assertRaises(Exception, self.assertHTMLContains, 'anything', {})
def test_doc(self):
self.setHtmlFromRst(self.sample1)
self.assertHTMLContains('a',
text='Fake post',
attributes={'href': '/posts/fake-post'})
def test_doc_titled(self):
self.setHtmlFromRst(self.sample2)
self.assertHTMLContains('a',
text='titled post',
attributes={'href': '/posts/fake-post'})
if __name__ == "__main__":
unittest.main()
|
mit
| 2,583,950,506,363,282,000
| 34.117409
| 79
| 0.588771
| false
| 4.043823
| true
| false
| false
|
czcorpus/kontext
|
lib/plugins/abstract/user_items.py
|
1
|
4225
|
# Copyright (c) 2015 Charles University in Prague, Faculty of Arts,
# Institute of the Czech National Corpus
# Copyright (c) 2015 Tomas Machalek <tomas.machalek@gmail.com>
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; version 2
# dated June, 1991.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
"""
A plug-in template for managing items (corpora, subcorpora, aligned corpora)
user can access via fast access widget. This is a generalization of
user corpus list.
Expected factory method signature: create_instance(config, db)
"""
import abc
import hashlib
import l10n
from controller.errors import UserActionException
class UserItemException(UserActionException):
"""
General error related to
the plug-in
"""
pass
class FavoriteItem(object):
"""
A reference to a corpus in user's list
"""
def __init__(self, data=None):
if data is None:
data = {}
self.name = data.get('name', 'New item')
self.corpora = data.get('corpora', [])
self.size = data.get('size', None)
self.size_info = l10n.simplify_num(self.size) if self.size else None
self.subcorpus_id = data.get('subcorpus_id', None)
self.subcorpus_orig_id = data.get('subcorpus_orig_id', self.subcorpus_id)
self.ident = data.get('id', hashlib.md5(self.sort_key.encode()).hexdigest())
@property
def is_single_corpus(self):
return not self.subcorpus_id and len(self.corpora) == 1
@property
def main_corpus_id(self):
return self.corpora[0]['id']
@property
def sort_key(self):
return '{0} {1}'.format(' '.join(x['name'] for x in self.corpora), self.subcorpus_id)
def to_dict(self):
return dict(
id=self.ident,
name=self.name,
size=self.size,
size_info=self.size_info,
corpora=self.corpora,
subcorpus_id=self.subcorpus_id,
subcorpus_orig_id=self.subcorpus_orig_id
)
class AbstractUserItems(abc.ABC):
"""
A 'user_items' (= favorite corpora, subcorpora, aligned corpora)
plug-in interface.
Please note that to initiate the plug-in with request-specific
data the 'setup(controller)' method must be implemented. The controller
detects it automatically and calls it for all active plug-ins implementing
it.
"""
def from_dict(self, data):
"""
According to provided data it returns a proper
implementation of GeneralItem. OPTIONAL implementation
arguments:
data -- a dict
"""
raise NotImplementedError()
@abc.abstractmethod
def serialize(self, obj):
"""
Exports a GeneralItem instance or a list of GeneralItem instances (both variants
must be supported) to JSON used for internal storage (i.e. no client-side stuff)
"""
@abc.abstractmethod
def get_user_items(self, plugin_ctx):
"""
Returns a list of user items (GeneralItem implementations)
arguments:
plugin_ctx --
return:
a list or a compatible structure containing GeneralItem objects
"""
@abc.abstractmethod
def add_user_item(self, plugin_ctx, item):
"""
Adds (persistently) an item to user's list.
arguments:
plugin_ctx --
item -- an instance of GeneralItem implementation
"""
@abc.abstractmethod
def delete_user_item(self, plugin_ctx, item_id):
"""
Removes (in a persistent way) an item from user's list.
arguments:
plugin_ctx --
item_id -- an ID of GeneralItem instance
"""
|
gpl-2.0
| 8,109,087,419,113,496,000
| 29.178571
| 93
| 0.645917
| false
| 3.982092
| false
| false
| false
|
goodwillcoding/trac-subtickettypes
|
subtickettypes/web_ui.py
|
1
|
9663
|
#
# Copyright 2009, Niels Sascha Reedijk <niels.reedijk@gmail.com>
# All rights reserved. Distributed under the terms of the MIT License.
#
# debug
from pprint import pprint
from pkg_resources import resource_filename
from genshi import HTML
from genshi.builder import tag
from genshi.filters.transform import Transformer
from trac.core import *
from trac.ticket import model
from trac.util.text import unicode_quote_plus
from trac.web.api import IRequestFilter
from trac.web.chrome import ITemplateProvider
from trac.web.chrome import ITemplateStreamFilter
from trac.web.chrome import add_notice
from trac.web.chrome import add_script
from trac.ticket.roadmap import TicketGroupStats
from trac.util.translation import _
# --------------------------------------------------------------------------- #
class SubTicketTypesModule(Component):
"""Implements subtickettypes in Trac's interface."""
implements(IRequestFilter, ITemplateProvider, ITemplateStreamFilter)
# ....................................................................... #
# IRequestFilter method
def pre_process_request(self, req, handler):
# handle the admin panel
if req.path_info.startswith("/admin/ticket/type/"):
# handle cancel submit by redirecting back to the list page
# TODO: patch subcomponents with "cancel" check
if req.method == "POST" and req.args.get('cancel'):
req.redirect(req.href.admin('ticket', 'type'))
if req.method == "POST" and 'rename_children' in req.args:
# if we are not renaming the children for a ticket type that
# has childer, its a regular update, so let trac handle it.
if req.args.get('rename_children') != 'on':
return handler
# other wise first lets rename the parent type first
# get the original name (before renaming)
# 19 is the length of "/admin/ticket/type/" string
parent_ticket_type_name = req.path_info[19:]
parent_ticket_type = model.Type(self.env, parent_ticket_type_name)
parent_ticket_type.name = req.args.get('name')
try:
parent_ticket_type.update()
except self.env.db_exc.IntegrityError:
raise TracError(_('The ticket type "%(name)s" already '
'exists.', name=parent_ticket_type_name))
# Now update names in the the child ticket types
child_ticket_types = self._get_tickettype_children(parent_ticket_type_name)
for ticket_type in child_ticket_types:
ticket_type.name = ticket_type.name.replace(parent_ticket_type_name, req.args.get('name'), 1)
ticket_type.update()
add_notice(req, _('Your changes have been saved.'))
req.redirect(req.href.admin('ticket', 'type'))
return handler
# ....................................................................... #
# IRequestFilter method
def post_process_request(self, req, template, data, content_type):
# The /query paths are handled in filter_stream()
if req.path_info.startswith('/ticket/') or \
req.path_info.startswith('/newticket'):
add_script(req, 'subtickettypes/tickettypeselect.js')
if template == "query.html":
# Allow users to query for parent ticket types and include all
# sub ticket types as well
# check if the entry already exists (might be added by another
# plugin)
begins_with_select_item = {'name': _("begins with"), 'value': ""}
if begins_with_select_item not in data['modes']['select']:
data['modes']['select'].insert(0, begins_with_select_item)
if template == "milestone_view.html":
# Group tickets in the milestone view by base component.
if data['grouped_by'] == "type":
ticket_type_name = ''
new_groups = []
new_ticket_types = []
for ticket_type in data['groups']:
ticket_type_name = ticket_type['name'].split("/")[0]
if ticket_type_name not in new_ticket_types:
# This ticket type is not yet in the new list of ticket
# types, add it.
new_ticket_types.append(ticket_type_name)
# Fix URLs to the querys (we use unicode_quote_plus to
# replace the '/' with something URL safe (like the
# hrefs are)
new_hrefs = []
for interval_href in ticket_type['interval_hrefs']:
new_hrefs.append(interval_href.replace(unicode_quote_plus(ticket_type['name']), '^' + ticket_type_name))
ticket_type['stats_href'] = ticket_type['stats_href'].replace(unicode_quote_plus(ticket_type['name']), '^' + ticket_type_name)
ticket_type['interval_hrefs'] = new_hrefs
# Set the name to the base name (in case this originally
# is a sub ticket type.
ticket_type['name'] = ticket_type_name
new_groups.append(ticket_type)
else:
# This is a sub ticket type. Add the stats to the main ticket type.
# Note that above two lists are created. Whenever an
# item is added to one, an analogous one is added to
# the other. This code uses that logic.
core_ticket_type = new_groups[new_ticket_types.index(ticket_type_name)]
merged_stats = core_ticket_type['stats'] #TicketGroupStats from trac.ticket.roadmap
new_stats = ticket_type['stats']
# Bear with me as we go to this mess that is the group stats
# (or of course this hack, depending on who's viewpoint).
# First merge the totals
merged_stats.count += new_stats.count
# The stats are divided in intervals, merge these.
i = 0
for interval in merged_stats.intervals:
new_interval = new_stats.intervals[i]
interval['count'] += new_interval['count']
i += 1
merged_stats.refresh_calcs()
# Now store the new milestone tickey type groups
data['groups'] = new_groups
return template, data, content_type
# ....................................................................... #
# ITemplateProvider methods
def get_htdocs_dirs(self):
"""Return the absolute path of a directory containing additional
static resources (such as images, style sheets, etc).
"""
return [('subtickettypes', resource_filename(__name__, 'htdocs'))]
# ....................................................................... #
# ITemplateProvider methods
def get_templates_dirs(self):
"""Return the absolute path of the directory containing the provided
ClearSilver templates.
"""
return ""
# ....................................................................... #
# ITemplateStreamFilter method
def filter_stream(self, req, method, filename, stream, data):
# alternate matching possibilities
# if req.path_info.startswith('/admin/ticket/type'):
# Match to the admin ticket type detail editing panel of ticket type
if filename == "admin_enums.html" \
and data['active_cat'] == u'ticket' \
and data['active_panel'] == u'type' \
and data['view'] == 'detail':
# If ticket type has children, then add a checkbox to rename those
if len(self._get_tickettype_children(data['enum'].name)) > 0:
stream |= Transformer("//div[@class='field'][1]").after(self._build_renamechildren_field())
elif req.path_info.startswith('/query'):
# We need to load our script after the initializeFilters() call done by Trac
html = HTML('<script type="text/javascript" charset="utf-8" src="' +
req.href.base +
'/chrome/subtickettypes/tickettypeselect.js"></script>')
stream |= Transformer('//head').append(html)
return stream
# ....................................................................... #
# Helper function
def _get_tickettype_children(self, name):
tickettypes = model.Type.select(self.env)
result = []
for tickettype in tickettypes:
if tickettype.name.startswith(name + "/") and tickettype.name != name:
result.append(tickettype)
return result
# ....................................................................... #
# Helper function
def _build_renamechildren_field(self):
return tag.div(tag.label(
tag.input(_("Also rename children"), \
type='checkbox',
id='rename_children', \
name='rename_children',
checked='checked') \
), \
class_='field')
|
bsd-3-clause
| -1,324,141,922,772,074,800
| 45.456731
| 150
| 0.526338
| false
| 4.729809
| false
| false
| false
|
corpnewt/CorpBot.py
|
Cogs/Telephone.py
|
1
|
22909
|
import asyncio, discord, re, os, random
from discord.ext import commands
from collections import OrderedDict
from Cogs import Utils, DisplayName, Nullify, FuzzySearch, PickList, Message
def setup(bot):
# Add the bot and deps
settings = bot.get_cog("Settings")
bot.add_cog(Telephone(bot, settings))
class Telephone(commands.Cog):
# Init with the bot reference, and a reference to the settings var
def __init__(self, bot, settings):
self.bot = bot
self.settings = settings
self.switchboard = []
# Regex for extracting urls from strings
self.regex = re.compile(r"(http|ftp|https)://([\w_-]+(?:(?:\.[\w_-]+)+))([\w.,@?^=%&:/~+#-]*[\w@?^=%&/~+#-])?")
self.dregex = re.compile(r"(?i)(discord(\.gg|app\.com)\/)([^\s]+)")
global Utils, DisplayName
Utils = self.bot.get_cog("Utils")
DisplayName = self.bot.get_cog("DisplayName")
# Proof-of-concept placeholders
'''@commands.Cog.listener()
async def on_message_context(self, ctx, message):
return'''
async def killcheck(self, message):
ignore = False
for cog in self.bot.cogs:
real_cog = self.bot.get_cog(cog)
if real_cog == self:
# Don't check ourself
continue
try:
check = await real_cog.message(message)
except AttributeError:
continue
try:
if check['Ignore']:
ignore = True
except KeyError:
pass
return ignore
async def ontyping(self, channel, user, when):
# Check if the channel is typing, and send typing to receiving
# channels if in call
# Don't listen to bots
if user.bot:
return
call = self._incall(channel.guild)
if not call:
return
if not call["Connected"]:
# Don't forward typing until they pick up
return
for caller in call['Members']:
if caller is channel.guild:
continue
# Get the tele channel
call_channel = self._gettelechannel(caller)
if not call_channel:
continue
await call_channel.trigger_typing()
def _gettelechannel(self, server):
teleChannel = self.settings.getServerStat(server, "TeleChannel")
if teleChannel:
teleChannel = DisplayName.channelForName(str(teleChannel), server, "text")
if teleChannel == "":
return None
return teleChannel
def _getsafenumber(self, number, server):
numeric = "0123456789"
found = False
for guild in self.bot.guilds:
if guild.id == server.id:
continue
teleNum = self.settings.getServerStat(guild, "TeleNumber")
if teleNum == number:
found = True
break
if not found:
return number
while True:
found = False
newNum = "".join(random.choice(numeric) for i in range(7))
for guild in self.bot.guilds:
teleNum = self.settings.getServerStat(guild, "TeleNumber")
if teleNum == newNum:
found = True
break
if not found:
return newNum
def _incall(self, server):
for call in self.switchboard:
if server in call["Members"]:
return call
return None
def _getothernumber(self, call, server):
# Returns the other caller's number
if not server in call["Members"]:
# We're uh.. not in this call
return None
for member in call["Members"]:
if not member is server:
# HA! GOTEM
return self.settings.getServerStat(member, "TeleNumber")
def _hangup(self, caller):
# Hangs up all calls the caller is in
for call in self.switchboard:
if caller in call["Members"]:
self.switchboard.remove(call)
@commands.command(pass_context=True)
async def teleblocklinks(self, ctx, *, yes_no = None):
"""Enables/Disables removing links sent over telephone calls (bot-admin only)."""
if not await Utils.is_bot_admin_reply(ctx): return
await ctx.send(Utils.yes_no_setting(ctx,"Block telephone links","TeleBlockLinks",yes_no,default=True))
@commands.command(pass_context=True)
async def phonebook(self, ctx, *, look_up = None):
"""Lets you page through the phonebook - or optionally lets you search for a server name or number."""
# Build our phone list
entries = []
for guild in self.bot.guilds:
teleNum = self.settings.getServerStat(guild, "TeleNumber")
if teleNum:
entries.append({ "name": guild.name, "value": teleNum[:3] + "-" + teleNum[3:] })
if not len(entries):
await ctx.send(":telephone: The phonebook is *empty!*")
return
# Sort alphabetically
entries = sorted(entries, key = lambda x: x["name"])
if look_up == None:
await PickList.PagePicker(title=":telephone: Phonebook",list=entries,ctx=ctx).pick()
return
# Search time!
look_up_num = re.sub(r'\W+', '', look_up)
id_ratio = 0
if len(look_up_num):
look_up_num = look_up_num if len(look_up_num) < 7 else look_up_num[:3]+"-"+look_up_num[3:]
idMatch = FuzzySearch.search(look_up_num, entries, 'value', 3)
id_ratio = idMatch[0]['Ratio']
if id_ratio == 1:
# Found it!
return await Message.Embed(title=":telephone: Phonebook",fields=[idMatch[0]["Item"]],color=ctx.author).send(ctx)
# Look up by name now
nameMatch = FuzzySearch.search(look_up, entries, 'name', 3)
if nameMatch[0]['Ratio'] == 1:
# Exact name
# Found it!
return await Message.Embed(title=":telephone: Phonebook",fields=[nameMatch[0]["Item"]],color=ctx.author).send(ctx)
# now we need to find which is better
matchCheck = []
if nameMatch[0]['Ratio'] > id_ratio:
matchCheck = nameMatch
else:
matchCheck = idMatch
fields = [m["Item"] for m in matchCheck]
return await Message.Embed(title=":telephone: Phonebook - Closest Matches",fields=fields,color=ctx.author).send(ctx)
@commands.command(pass_context=True)
async def telenumber(self, ctx):
"""Prints your telephone number."""
teleNum = self.settings.getServerStat(ctx.guild, "TeleNumber")
if not teleNum:
await ctx.send(":telephone: is currently *disabled*.")
return
teleNumFormat = teleNum[:3] + "-" + teleNum[3:]
await ctx.send("Your :telephone: number is: *{}*".format(teleNumFormat))
@commands.command(pass_context=True)
async def callerid(self, ctx):
"""Reveals the last number to call regardless of *67 settings (bot-admin only)."""
isAdmin = ctx.message.author.permissions_in(ctx.message.channel).administrator
if not isAdmin:
checkAdmin = self.settings.getServerStat(ctx.message.guild, "AdminArray")
for role in ctx.message.author.roles:
for aRole in checkAdmin:
# Get the role that corresponds to the id
if str(aRole['ID']) == str(role.id):
isAdmin = True
target = self.settings.getServerStat(ctx.guild, "LastCall")
if target == None:
await ctx.send(":telephone: No prior calls recorded.")
else:
if self.settings.getServerStat(ctx.guild, "LastCallHidden") and not isAdmin:
target = "UNKNOWN CALLER (bot-admins and admins can reveal this)"
await ctx.send(":telephone: Last number recorded: {}".format(target[:3] + "-" + target[3:]))
@commands.command(pass_context=True)
async def settelechannel(self, ctx, *, channel = None):
"""Sets the channel for telephone commands - or disables that if nothing is passed (admin only)."""
isAdmin = ctx.message.author.permissions_in(ctx.message.channel).administrator
# Only allow admins to change server stats
if not isAdmin:
await ctx.channel.send('You do not have sufficient privileges to access this command.')
return
if channel == None:
self.settings.setServerStat(ctx.message.guild, "TeleChannel", "")
self.settings.setServerStat(ctx.guild, "TeleNumber", None)
msg = ':telephone: *disabled*.'
await ctx.channel.send(msg)
return
channel = DisplayName.channelForName(channel, ctx.guild, "text")
if channel == None:
await ctx.send("I couldn't find that channel :(")
return
self.settings.setServerStat(ctx.message.guild, "TeleChannel", channel.id)
teleNumber = self._getsafenumber(str(channel.id)[len(str(channel.id))-7:], ctx.guild)
self.settings.setServerStat(ctx.guild, "TeleNumber", teleNumber)
msg = ':telephone: channel set to {}'.format(channel.mention)
await ctx.channel.send(msg)
@commands.command(pass_context=True)
async def telechannel(self, ctx):
"""Prints the current channel for telephone commands."""
teleChan = self.settings.getServerStat(ctx.guild, "TeleChannel")
if not teleChan:
await ctx.send(":telephone: is currently *disabled*.")
return
channel = DisplayName.channelForName(str(teleChan), ctx.guild, "text")
if channel:
await ctx.send("The current :telephone: channel is {}".format(channel.mention))
return
await ctx.send("Channel id: *{}* no longer exists on this server. Consider updating this setting!".format(teleChan))
@commands.command(pass_context=True)
async def teleblock(self, ctx, *, guild_name = None):
"""Blocks all tele-numbers associated with the passed guild (bot-admin only)."""
isAdmin = ctx.author.permissions_in(ctx.channel).administrator
if not isAdmin:
checkAdmin = self.settings.getServerStat(ctx.guild, "AdminArray")
for role in ctx.author.roles:
for aRole in checkAdmin:
# Get the role that corresponds to the id
if str(aRole['ID']) == str(role.id):
isAdmin = True
# Only allow admins to change server stats
if not isAdmin:
await ctx.send('You do not have sufficient privileges to access this command.')
return
if guild_name == None:
await ctx.send("Usage: `{}teleblock [guild_name]`".format(ctx.prefix))
return
# Verify our guild
found = False
target = None
for guild in self.bot.guilds:
teleNum = self.settings.getServerStat(guild, "TeleNumber")
if not teleNum:
continue
if guild.name.lower() == guild_name.lower():
if guild.id == ctx.guild.id:
# We're uh... blocking ourselves.
await ctx.send("You can't block your own number...")
return
found = True
target = guild
break
if not found:
await ctx.send("I couldn't find that guild to block. Maybe they're not setup for :telephone: yet?")
return
# Here, we should have a guild to block
block_list = self.settings.getServerStat(ctx.guild, "TeleBlock")
if block_list == None:
block_list = []
block_list.append(target.id)
self.settings.setServerStat(ctx.guild, "TeleBlock", block_list)
msg = "You are now blocking *{}!*".format(Nullify.escape_all(target.name))
await ctx.send(msg)
@commands.command(pass_context=True)
async def teleunblock(self, ctx, *, guild_name = None):
"""Unblocks all tele-numbers associated with the passed guild (bot-admin only)."""
isAdmin = ctx.author.permissions_in(ctx.channel).administrator
if not isAdmin:
checkAdmin = self.settings.getServerStat(ctx.guild, "AdminArray")
for role in ctx.author.roles:
for aRole in checkAdmin:
# Get the role that corresponds to the id
if str(aRole['ID']) == str(role.id):
isAdmin = True
# Only allow admins to change server stats
if not isAdmin:
await ctx.send('You do not have sufficient privileges to access this command.')
return
if guild_name == None:
await ctx.send("Usage: `{}teleunblock [guild_name]`".format(ctx.prefix))
return
block_list = self.settings.getServerStat(ctx.guild, "TeleBlock")
if block_list == None:
block_list = []
if not len(block_list):
await ctx.send("No blocked numbers - nothing to unblock!")
return
# Verify our guild
found = False
target = None
for guild in self.bot.guilds:
teleNum = self.settings.getServerStat(guild, "TeleNumber")
if guild.name.lower() == guild_name.lower():
found = True
target = guild
break
if not found:
await ctx.send("I couldn't find that guild...")
return
if not target.id in block_list:
msg = "*{}* is not currently blocked.".format(Nullify.escape_all(target.name))
await ctx.send(msg)
return
# Here, we should have a guild to unblock
block_list.remove(target.id)
self.settings.setServerStat(ctx.guild, "TeleBlock", block_list)
msg = "You have unblocked *{}!*".format(Nullify.escape_all(target.name))
await ctx.send(msg)
@commands.command(pass_context=True)
async def teleblocks(self, ctx):
"""Lists guilds with blocked tele-numbers."""
block_list = self.settings.getServerStat(ctx.guild, "TeleBlock")
if block_list == None:
block_list = []
if not len(block_list):
await ctx.send("No blocked numbers!")
return
block_names = []
for block in block_list:
server = self.bot.get_guild(block)
if not server:
block_list.remove(block)
continue
block_names.append("*" + server.name + "*")
self.settings.setServerStat(ctx.guild, "TeleBlock", block_list)
msg = "__Tele-Blocked Servers:__\n\n"
#msg += ", ".join(str(x) for x in block_list)
msg += ", ".join(Nullify.escape_all(block_names))
await ctx.send(msg)
@commands.command(pass_context=True)
async def call(self, ctx, *, number = None):
"""Calls the passed number. Can use *67 to hide your identity - or *69 to connect to the last incoming call (ignored if another number is present)."""
teleChan = self._gettelechannel(ctx.guild)
if not teleChan:
await ctx.send(":telephone: is currently *disabled*. You can set it up with `{}settelechannel [channel]`".format(ctx.prefix))
return
if not teleChan.id == ctx.channel.id:
await ctx.send(":telephone: calls must be made in {}".format(teleChan.mention))
return
# Check if we're already in a call
incall = self._incall(ctx.guild)
if incall:
if incall["Hidden"]:
call_with = "UNKOWN CALLER"
else:
teleNum = self._getothernumber(incall, ctx.guild)
call_with = teleNum[:3] + "-" + teleNum[3:]
# Busy :(
caller = self._gettelechannel(ctx.guild)
if caller:
await caller.send(":telephone: You're already in a call with: *{}*".format(call_with))
return
hidden = False
target = None
dial_hide = False
if not number == None:
if "*67" in number:
hidden = True
if "*69" in number:
target = self.settings.getServerStat(ctx.guild, "LastCall")
if self.settings.getServerStat(ctx.guild, "LastCallHidden"):
dial_hide = True
number = number.replace("*67", "").replace("*69", "")
number = re.sub(r'\W+', '', number)
if len(number):
dial_hide = False
target = number
await self._dial(ctx.guild, target, hidden, dial_hide)
async def _dial(self, caller, target, hidden, dial_hide):
if target == None:
# Need a random number
numbers = []
for guild in self.bot.guilds:
if guild.id == caller.id:
continue
teleNum = self.settings.getServerStat(guild, "TeleNumber")
if teleNum:
numbers.append(guild)
if len(numbers):
target = random.choice(numbers)
else:
found = False
for guild in self.bot.guilds:
teleNum = self.settings.getServerStat(guild, "TeleNumber")
if teleNum == target:
if guild.id == caller.id:
# We're uh... calling ourselves.
caller = self._gettelechannel(caller)
if caller:
await caller.send(":telephone: ***Beep beep beep beep!*** *Busy signal...*")
return
found = True
target = guild
break
if not found:
target = None
if target == None:
# We didn't find a server to connect to
caller = self._gettelechannel(caller)
if caller:
await caller.send(":telephone: ***Beep beep beep!*** *We're sorry, the number you've dialed is not in service at this time.*")
return
# Check for a blocked server
block_list = self.settings.getServerStat(caller, "TeleBlock")
if block_list == None:
block_list = []
tblock_list = self.settings.getServerStat(target, "TeleBlock")
if tblock_list == None:
block_list = []
if target.id in block_list or caller.id in tblock_list:
# Blocked! - checks if both parties are blocked by each other
caller = self._gettelechannel(caller)
if caller:
await caller.send(":telephone: ***Beep beep beep!*** *We're sorry, your call cannot be completed as dialed.*")
return
target_channel = self._gettelechannel(target)
if target_channel == None:
# We found a server - but they have no telechannel
caller = self._gettelechannel(caller)
if caller:
await caller.send(":telephone: ***Beep beep beep!*** *We're sorry, the number you've dialed is not in service at this time.*")
return
# Check if the caller is in a call currently
if self._incall(target):
# Busy :(
caller = self._gettelechannel(caller)
if caller:
await caller.send(":telephone: ***Beep beep beep beep!*** *Busy signal...*")
return
# Ring!
try:
await self._ring(caller, target, hidden, dial_hide)
except:
# Something went wrong - hang up and inform both parties that the call was disconnected
self._hangup(caller)
caller = self._gettelechannel(caller)
target = self._gettelechannel(target)
try:
await caller.send(":telephone: The line went dead!")
except:
pass
try:
await target.send(":telephone: The line went dead!")
except:
pass
async def _ring(self, caller, receiver, hidden, dial_hide):
# This should be called when he have a valid caller, receiver, and no one is busy
receiver_chan = self._gettelechannel(receiver)
caller_chan = self._gettelechannel(caller)
if receiver_chan == None or caller_chan == None:
# No dice
return
# Add both to the call list
self.switchboard.append({ "Members": [caller, receiver], "Hidden": hidden, "Connected": False })
our_call = self.switchboard[len(self.switchboard)-1]
# Let the caller know we're dialing
msg = ":telephone: Dialing... "
teleNum = self.settings.getServerStat(receiver, "TeleNumber")
msg_add = []
if hidden:
msg_add.append("*67 ")
if dial_hide:
msg_add.append("###-")
msg_add.append("####")
else:
msg_add.append(teleNum[:3]+"-")
msg_add.append(teleNum[3:])
# Send dialing
message = await caller_chan.send(msg)
# Dialing edits
for i in msg_add:
msg += i
await message.edit(content=msg)
await asyncio.sleep(0.5)
# Here - we should have "dialed"
# Send a message to the other channel that there's a call incoming
# Save last call
self.settings.setServerStat(receiver, "LastCall", self.settings.getServerStat(caller, "TeleNumber"))
if hidden:
caller_number = "UNKNOWN CALLER"
self.settings.setServerStat(receiver, "LastCallHidden", True)
else:
self.settings.setServerStat(receiver, "LastCallHidden", False)
caller_number = self.settings.getServerStat(caller, "TeleNumber")
caller_number = caller_number[:3] + "-" + caller_number[3:]
await receiver_chan.send(":telephone: Incoming call from: *{}*\nType *pickup* to answer.".format(caller_number))
# Ring for 30 seconds - then report no answer
# Setup the check
def check(ctx, msg):
# This now catches the message and the context
# print(ctx)
if msg.author.bot:
return False
m_cont = msg.content.lower()
if msg.channel == receiver_chan and m_cont == "pickup":
return True
if msg.channel == caller_chan and m_cont == "hangup":
return True
return False
# Wait for a response
try:
talk = await self.bot.wait_for('message_context', check=check, timeout=30)
except Exception:
talk = None
if talk:
talk = talk[1]
if talk == None:
# No answer - hangup
self._hangup(caller)
await caller_chan.send(":telephone: No answer...")
await receiver_chan.send(":telephone: Ringing stops.")
return
elif talk.content.lower() == "hangup":
# You hung up the call
self._hangup(caller)
await caller_chan.send(":telephone: You have hung up.")
await receiver_chan.send(":telephone: Ringing stops.")
return
# Connect the call:
our_call["Connected"] = True
# They answered!
await caller_chan.send(":telephone_receiver: Connected.\nType *hangup* to end the call.")
await receiver_chan.send(":telephone_receiver: Connected.\nType *hangup* to end the call.")
# Wait on the call
while True:
# Setup the check
def check_in_call(msg):
if msg.author.bot:
return False
if msg.channel == receiver_chan or msg.channel == caller_chan:
return True
return False
try:
# 1 minute timeout
talk = await self.bot.wait_for('message', check=check_in_call, timeout=60)
except Exception:
talk = None
if talk == None:
# Timed out
self._hangup(caller)
self._hangup(receiver)
await caller_chan.send(":telephone: Disconnected.")
await receiver_chan.send(":telephone: Disconnected.")
return
elif talk.content.lower() == "hangup":
# One side hung up
self._hangup(caller)
self._hangup(receiver)
if talk.channel == caller_chan:
# The caller disconnected
await receiver_chan.send(":telephone: The other phone was hung up.")
await caller_chan.send(":telephone: You have hung up.")
else:
# The receiver disconnected
await caller_chan.send(":telephone: The other phone was hung up.")
await receiver_chan.send(":telephone: You have hung up.")
return
else:
talk_msg = talk.content
# Let's make sure we strip links out - and nullify discord.gg links to patch a spam loophole
# Create a set of all matches (to avoid duplicates in case of spam)
if self.settings.getServerStat(receiver if talk.channel==caller_chan else caller,"TeleBlockLinks",True):
# Remove links only if the target channel chooses to
matches = [x.group(0) for x in re.finditer(self.regex, talk_msg)]
dmatches = [x.group(0) for x in re.finditer(self.dregex, talk_msg)]
matches.extend(dmatches)
matches = OrderedDict.fromkeys(matches) # Use an OrderedDict to avoid duplicates
# Now we iterate that list and replace all links with `link removed`
for x in matches:
talk_msg = talk_msg.replace(x,"`link removed`")
# Clean out mentions from the message
talk_msg = Utils.suppressed(talk.guild,talk_msg,force=True)
# Must be conversation
if talk.channel == caller_chan:
# Coming from the talking channel
if hidden:
await receiver_chan.send(":telephone_receiver: " + talk_msg)
else:
user = DisplayName.name(talk.author).replace("`","").replace("\\","") # Remove @here and @everyone mentions in username
await receiver_chan.send(":telephone_receiver: `{}`: {}".format(user, talk_msg))
else:
user = DisplayName.name(talk.author).replace("`","").replace("\\","") # Remove @here and @everyone mentions in username
await caller_chan.send(":telephone_receiver: `{}`: {}".format(user, talk_msg))
|
mit
| 4,624,505,264,969,987,000
| 33.462848
| 153
| 0.659173
| false
| 3.266648
| false
| false
| false
|
TissueMAPS/TmLibrary
|
tmlib/workflow/metaextract/args.py
|
1
|
1432
|
# TmLibrary - TissueMAPS library for distibuted image analysis routines.
# Copyright (C) 2016 Markus D. Herrmann, University of Zurich and Robin Hafen
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from tmlib.workflow.args import Argument
from tmlib.workflow.args import BatchArguments
from tmlib.workflow.args import SubmissionArguments
from tmlib.workflow import register_step_batch_args
from tmlib.workflow import register_step_submission_args
@register_step_batch_args('metaextract')
class MetaextractBatchArguments(BatchArguments):
batch_size = Argument(
type=int, help='number of images that should be processed per job',
default=100, flag='batch-size', short_flag='b'
)
@register_step_submission_args('metaextract')
class MetaextractSubmissionArguments(SubmissionArguments):
pass
|
agpl-3.0
| -114,338,109,586,010,220
| 41.117647
| 78
| 0.776536
| false
| 4.011204
| false
| false
| false
|
hammerhorn/hammerhorn-jive
|
hr/hr.py
|
1
|
2017
|
#!/usr/bin/env python
"""
Produces horizontal lines for use in shell scripts.
usage: hr.py [-h] [-w WIDTH] [-p PATTERN] [-c]
optional arguments:
-h, --help show this help message and exit
-w WIDTH, --width WIDTH
-p PATTERN, --pattern PATTERN
-c, --center
* floats should give screenwidths, ints shoudl give charwidths
"""
__author__ = 'Chris Horn <hammerhorn@gmail.com>'
__license__ = 'GPL'
import argparse
from cjh.cli import Cli
Cli()
#arg_dic = {}
def _parse_args():
"""
Parse all args and return 'args' namespace.
"""
parser = argparse.ArgumentParser(
description='Produces horizontal lines for use in shell scripts.')
parser.add_argument(
'-w', '--width', type=float, help='width in columns or width in ' +
'screenwidths')
parser.add_argument(
'-p', '--pattern', type=str, help='symbol or sequence of symbols')
parser.add_argument('-c', '--center', action='store_true', help='centered')
#parser.add_argument("-s", "--string", action='store_true')
return parser.parse_args()
if __name__ == '__main__':
ARGS = _parse_args()
else: ARGS = None
# If ARGS.width is an integer, convert it to be of type int.
# An int for this value means width in columns.
# A decimal < 1 means a percentage of the width of the terminal.
if ARGS is not None and ARGS.width is not None and\
(ARGS.width == int(ARGS.width)):
ARGS.width = int(ARGS.width)
# possible to this automatically?
def populate_args():
"""
Convert args namespace to a dictionary, for use in the Cli.hrule()
method.
"""
kw_dict = {}
if ARGS is not None:
if ARGS.width is not None:
kw_dict.update({'width': ARGS.width})
if ARGS.pattern is not None:
kw_dict.update({'symbols': ARGS.pattern})
if ARGS.center is True:
kw_dict.update({'centered': ARGS.center})
return kw_dict
# print arg_dic
ARG_DICT = populate_args()
if __name__ == '__main__':
Cli.hrule(**ARG_DICT)
|
gpl-2.0
| 8,735,727,038,346,566,000
| 27.408451
| 79
| 0.625682
| false
| 3.395623
| false
| false
| false
|
rwl/puddle
|
puddle/python_editor/python_editor_extension.py
|
1
|
3510
|
#------------------------------------------------------------------------------
# Copyright (C) 2009 Richard W. Lincoln
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to
# deal in the Software without restriction, including without limitation the
# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
# sell copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
#------------------------------------------------------------------------------
""" Python editor extensions """
#------------------------------------------------------------------------------
# Imports:
#------------------------------------------------------------------------------
from os.path import dirname
from enthought.pyface.api import ImageResource
from puddle.resource.editor import Editor
#------------------------------------------------------------------------------
# "PythonEditorExtension" class:
#------------------------------------------------------------------------------
class PythonEditorExtension(Editor):
""" Associates a Python editor with *.py files.
"""
# The object contribution's globally unique identifier.
id = "puddle.python_editor"
# A name that will be used in the UI for this editor
name = "Python Editor"
# An icon that will be used for all resources that match the
# specified extensions
image = ImageResource("python")
# The contributed editor class
editor_class = "puddle.python_editor.python_workbench_editor:" \
"PythonWorkbenchEditor"
# The list of file types understood by the editor
extensions = [".py"]
# If true, this editor will be used as the default editor for the type
default = False
#------------------------------------------------------------------------------
# "TextEditorExtension" class:
#------------------------------------------------------------------------------
class TextEditorExtension(Editor):
""" Associates a text editor with *.py files.
"""
# The object contribution's globally unique identifier.
id = "puddle.python_editor.text_editor_extension"
# A name that will be used in the UI for this editor
name = "Text Editor"
# An icon that will be used for all resources that match the
# specified extensions
image = ImageResource("python")
# The contributed editor class
editor_class = "enthought.plugins.text_editor.editor.text_editor:" \
"TextEditor"
# The list of file types understood by the editor
extensions = [".py"]
# If true, this editor will be used as the default editor for the type
default = True
# EOF -------------------------------------------------------------------------
|
mit
| -6,106,553,099,906,862,000
| 38.438202
| 79
| 0.575783
| false
| 5.072254
| false
| false
| false
|
marcoprado17/flask-bone
|
src/r.py
|
1
|
4665
|
# !/usr/bin/env python
# -*- coding: utf-8 -*-
# ======================================================================================================================
# The MIT License (MIT)
# ======================================================================================================================
# Copyright (c) 2016 [Marco Aurélio Prado - marco.pdsv@gmail.com]
# ======================================================================================================================
class Resources:
def __init__(self):
self.string = self.__Strings()
self.id = self.__Ids()
self.dimen = self.__Dimens()
class __Strings:
def __init__(self):
self.navbar = self.__Navbar()
self.register = self.__Register()
self.email = self.__Email()
self.micro_blog = "Micro Blog"
self.temp_error_html = "Ocorreu um erro inesperado em nossos servidores, nossa equipe técnica resolverá o problema assim que possível. Clique <a href=%(href)s>aqui</a> para voltar à pagina inicial."
self.db_access_error = "Ocorreu um erro ao acessar o banco de dados. Tente novamente."
self.send_email_error = "Ocorreu um erro ao enviar o email. Tente novamente."
self.static = "static"
self.toast = "toast"
self.success = "success"
self.info = "info"
self.warning = "warning"
self.error = "error"
self.category = "%(type)s-%(level)s"
class __Navbar:
def __init__(self):
self.home = "Home"
self.posts = "Posts"
self.add_post = "Adicionar Post"
self.view_posts = "Visualizar Posts"
self.categories = "Categorias"
self.add_category = "Adicionar Categoria"
self.subcategories = "Subcategorias"
self.add_subcategory = "Adicionar Subcategoria"
self.login = "Entrar"
self.register = "Cadastrar"
self.leave = "Sair"
class __Register:
def __init__(self):
self.email_already_registered = \
"Email já cadastrado. Para entrar com este email, clique <a href='%(href)s'>aqui</a>."
self.password_mismatch = "As senhas digitadas não são iguais."
self.password_length = "A senha deve possuir entre %(min_length)d e %(max_length)d caracteres."
self.email_label = "Email"
self.password_label = "Senha"
self.password_confirmation_label = "Confirmação da senha"
self.register = "Cadastrar"
self.already_has_account = "Já possui conta?"
self.title = "Cadastro"
class __Email:
def __init__(self):
self.register = self.__Register()
class __Register:
def __init__(self):
self.welcome = "Olá!"
self.confirm_email = "Deseja utilizar <u>%(email)s</u> como seu email principal na %(title)s?"
self.confirm_button_text = "Sim, desejo utilizar este email"
self.subject = "Confirme seu endereço de e-mail | %(title)s"
class __Ids:
def __init__(self):
self.navbar = self.__Navbar()
self.register = self.__Register()
class __Navbar:
def __init__(self):
self.home = "home"
self.posts = "posts"
self.categories = "categories"
self.subcategories = "subcategories"
self.register = "register"
self.login = "login"
class __Register:
def __init__(self):
self.example = "example"
class __Dimens:
def __init__(self):
self.navbar = self.__Navbar()
self.register = self.__Register()
self.models = self.__Models()
self.default_password_field_max_length = 256
self.default_string_field_max_length = 1024
class __Navbar:
def __init__(self):
self.example = 42
class __Register:
def __init__(self):
self.example = 42
class __Models:
def __init__(self):
self.user = self.__User()
class __User:
def __init__(self):
self.email_max_length = 256
self.password_min_length = 6
self.password_max_length = 32
R = Resources()
|
mit
| 4,281,181,409,038,982,000
| 38.760684
| 210
| 0.468186
| false
| 4.105914
| false
| false
| false
|
coala-analyzer/coala-gui
|
source/workspace/WorkspaceWindow.py
|
1
|
5608
|
from gi.repository import Gtk
import os
from collections import OrderedDict
from coalib.settings.ConfigurationGathering import load_configuration
from coalib.output.ConfWriter import ConfWriter
from coalib.output.printers.LogPrinter import LogPrinter
from pyprint.NullPrinter import NullPrinter
from coalib.settings.Section import Section
from coalib.misc.DictUtilities import update_ordered_dict_key
from source.workspace.SectionView import SectionView
class WorkspaceWindow(Gtk.ApplicationWindow):
def __init__(self, application, src):
Gtk.ApplicationWindow.__init__(self, application=application)
self.connect("delete_event", self.on_close)
self._ui = Gtk.Builder()
self._ui.add_from_resource("/org/coala/WorkspaceWindow.ui")
self.section_stack_map = {}
self.sections_dict = OrderedDict()
self.src = src
self._setup_view()
self.read_coafile()
def _setup_view(self):
self.headerbar = self._ui.get_object("headerbar")
self.set_titlebar(self.headerbar)
self.add(self._ui.get_object("container"))
self.stack = self._ui.get_object("main_stack")
self.sections = self._ui.get_object("sections")
self.section_switcher = self._ui.get_object("section_switcher")
self.section_switcher.connect("row-selected",
self.on_row_selection_changed)
self.add_section_button = self._ui.get_object("add_section_button")
self.add_section_button.connect("clicked", self.add_section)
def add_section(self, button=None, name=None):
section_row_template = Gtk.Builder()
section_row_template.add_from_resource('/org/coala/SectionRow.ui')
section_row = Gtk.ListBoxRow()
box = section_row_template.get_object("section_row")
editable_label = section_row_template.get_object("name-edit")
delete_button = section_row_template.get_object("delete_button")
if name is not None:
editable_label.entry.set_text(name)
self.create_section_view(widget=editable_label,
row_obejct=section_row)
editable_label.connect("edited",
self.update_section_name,
name,
self.section_stack_map[section_row])
else:
editable_label.connect("edited",
self.create_section_view,
section_row)
section_row.add(box)
section_row.set_visible(True)
delete_button.connect("clicked", self.delete_row, section_row)
self.section_switcher.add(section_row)
self.section_switcher.queue_draw()
return section_row
def delete_row(self, button, listboxrow):
del self.sections_dict[self.section_stack_map[listboxrow].get_name()]
self.section_stack_map[listboxrow].destroy()
del self.section_stack_map[listboxrow]
listboxrow.destroy()
conf_writer = ConfWriter(self.src+'/.coafile')
conf_writer.write_sections(self.sections_dict)
conf_writer.close()
def on_close(self, event, widget):
self.get_application().greeter.show()
self.destroy()
def create_section_view(self, widget=None, arg=None, row_obejct=None):
section_view = SectionView(self.sections_dict, self.src)
section_view.set_visible(True)
section_view.set_name(widget.get_name())
self.sections.add_named(section_view, widget.get_name())
self.sections.set_visible_child_name(widget.get_name())
if arg is not None:
widget.connect("edited",
self.update_section_name,
widget.get_name(),
section_view)
self.sections_dict[widget.get_name()] = Section(widget.get_name())
section_view.add_setting()
conf_writer = ConfWriter(self.src+'/.coafile')
conf_writer.write_sections(self.sections_dict)
conf_writer.close()
self.section_stack_map[row_obejct] = section_view
def on_row_selection_changed(self, listbox, row):
self.sections.set_visible_child(self.section_stack_map[row])
def read_coafile(self):
if os.path.isfile(self.src+'/.coafile'):
self.sections_dict = load_configuration(
["-c", self.src+'/.coafile'], LogPrinter(NullPrinter()))[0]
for section in self.sections_dict:
section_row = self.add_section(name=section)
for setting in self.sections_dict[section].contents:
if "comment" in setting:
continue
self.section_stack_map[section_row].add_setting(
self.sections_dict[section].contents[setting])
self.section_stack_map[section_row].add_setting()
def update_section_name(self, widget, arg, old_name, section_view):
section_view.set_name(widget.get_name())
self.sections_dict[old_name].name = widget.get_name()
self.sections_dict = update_ordered_dict_key(self.sections_dict,
old_name,
widget.get_name())
widget.connect("edited", self.update_section_name, widget.get_name())
conf_writer = ConfWriter(self.src+'/.coafile')
conf_writer.write_sections(self.sections_dict)
conf_writer.close()
|
agpl-3.0
| -2,998,170,187,900,760,000
| 42.8125
| 78
| 0.605742
| false
| 3.974486
| false
| false
| false
|
jamesaud/se1-group4
|
jmatcher/posts/migrations/0001_initial.py
|
1
|
1897
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.5 on 2017-03-26 16:11
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Post',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('description', models.CharField(max_length=1000)),
('image', models.ImageField(null=True, upload_to='')),
('created_at', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(auto_now=True)),
('likes', models.ManyToManyField(related_name='likes', to=settings.AUTH_USER_MODEL)),
('shares', models.ManyToManyField(related_name='shares', to=settings.AUTH_USER_MODEL)),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
migrations.CreateModel(
name='PostComments',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('comment', models.CharField(max_length=250)),
('created_at', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(auto_now=True)),
('commenting_user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
('post', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='posts.Post')),
],
),
]
|
mit
| -4,786,760,844,580,201,000
| 43.116279
| 129
| 0.605166
| false
| 4.178414
| false
| false
| false
|
hyunjunbrucelee/2017sejongAI
|
hash/test.py
|
1
|
1327
|
from PIL import Image
import numpy as np
# 이미지 데이터를 Averages Hash로 변환 함수 선언
def average_hash(fname, size = 16): #average_hash(파일이름, 사이즈)
img = Image.open(fname) # 이미지 데이터 열기
img = img.convert('L') # 그레이스케일로 변환
#'1'지정하게 되면 이진화 그밖에 "RGB", "RGBA", "CMYAK" 모드 지정가능
img = img.resize((size, size), Image.ANTIALIAS) # 리사이즈
pixel_data = img.getdata() # 픽셀 데이테 가져오기
pixels = np.array(pixel_data) # Numpy 배열로 변환하기
pixels = pixels.reshape((size, size)) # 2차원 배열로 변환
avg = pixels.mean() # 평균 구하기
diff = 1*(pixels>avg) #평균보다 크면 1, 작으면 0으로 변환하기
return diff
# 이전 해시로 변환하는 함수 선언
def np2hash(n):
bhash = []
for n1 in ahash.tolist():
s1 = [str(i) for i in n1]
s2 = "".join(s1)
i = int(s2,2) #이진수를 정수로 변환하기
bhash.append("%04x"%i)
return "".join(bhash)
# Average Hash 출력하기
ahash = average_hash('dog.jpg')
print(ahash)
print(np2hash(ahash))
#학습을 시킬 준비가 됨
|
gpl-3.0
| -888,860,325,065,370,600
| 27.083333
| 80
| 0.539637
| false
| 1.628305
| false
| false
| false
|
stellaf/odoofleet
|
fleet_task_issue/fleet_task_issue.py
|
1
|
7626
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Addon by CLEARCORP S.A. <http://clearcorp.co.cr> and AURIUM TECHNOLOGIES <http://auriumtechnologies.com>
#
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from odoo import models, fields, api, _
class FleetVehicle(models.Model):
_inherit = 'fleet.vehicle'
@api.model
def create(self, vals):
acount_obj=self.env['account.analytic.account']
fleet_id = super(FleetVehicle, self).create(vals)
account_id=acount_obj.create({'name':self._vehicle_name_get(fleet_id)})
fleet_id.write({'analytic_account_id':account_id.id,'use_tasks':True,'use_issues':True})
return fleet_id
@api.multi
def _count_vehicle_task(self):
project_obj = self.env['project.project']
self.task_count=len(project_obj.search([('analytic_account_id', '=', self.analytic_account_id.id)]).task_ids)
@api.multi
def _count_vehicle_issue(self):
issue_obj = self.env['project.project']
self.issue_count=len(issue_obj.search([('analytic_account_id', '=', self.analytic_account_id.id)]).issue_ids)
@api.multi
def _count_swhwlogbook_task(self):
domain=[('project_id.analytic_account_id', '=', self.analytic_account_id.id), ('task_type_id.name','ilike','SWHW')]
self.swhw_task_count=self.env['project.task'].search_count(domain)
@api.multi
def _count_wkshop_task(self):
domain=[('project_id.analytic_account_id', '=', self.analytic_account_id.id), ('task_type_id.name','ilike','Workshop')]
self.wkshop_task_count=self.env['project.task'].search_count(domain)
@api.multi
def write(self, vals):
acount_obj=self.env['account.analytic.account']
res = super(FleetVehicle, self).write(vals)
if not self.analytic_account_id:
account_id=acount_obj.create({'name':self._vehicle_name_get(self)})
self.write({'analytic_account_id':account_id.id,'use_tasks':True,'use_issues':True})
self.analytic_account_id.write({'name':self.name,'use_tasks':True,'use_issues':True})
return res
@api.multi
def _vehicle_name_get(self,record):
res = record.model_id.brand_id.name + '/' + record.model_id.name + '/' + record.license_plate
return res
@api.multi
def action_view_alltasks(self):
action = self.env.ref('project.act_project_project_2_project_task_all')
active_id = self.env['project.project'].search([('analytic_account_id', '=', self.analytic_account_id.id)]).id
context = {'group_by': 'stage_id', 'search_default_project_id': [active_id], 'default_project_id': active_id, }
return {
'key2':'tree_but_open',
'name': action.name,
'res_model': 'project.task',
'help': action.help,
'type': action.type,
'view_type': action.view_type,
'view_mode': action.view_mode,
'res_id': active_id,
'views': action.views,
'target': action.target,
'context':context,
'nodestroy': True,
'flags': {'form': {'action_buttons': True}}
}
@api.multi
def action_view_allissues(self):
action = self.env.ref('project_issue.act_project_project_2_project_issue_all')
active_id = self.env['project.project'].search([('analytic_account_id', '=', self.analytic_account_id.id)]).id
context = {'group_by': 'stage_id', 'search_default_project_id': [active_id], 'default_project_id': active_id,}
return {
'name': action.name,
'res_model': 'project.issue',
'help': action.help,
'type': action.type,
'view_type': action.view_type,
'view_mode': action.view_mode,
'views': action.views,
'target': action.target,
'res_id': active_id,
'context':context,
'nodestroy': True,
'flags': {'form': {'action_buttons': True}}
}
# this part of code, you shall define the project task type to "SWHW" and "Workshop", using the apps in the odoo store, named "task type color"
# @api.multi
# def action_view_SWHWlogbooktasks(self):
# self.ensure_one()
# action = self.env.ref('project.act_project_project_2_project_task_all')
# active_id = self.env['project.project'].search([('analytic_account_id', '=', self.analytic_account_id.id)]).id
# context = {'group_by': 'stage_id', 'search_default_project_id': [active_id], 'default_project_id': active_id, 'task_type_id.name':'SWHW',}
# return {
# 'key2':'tree_but_open',
# 'name': action.name,
# 'res_model': 'project.task',
# 'help': action.help,
# 'type': action.type,
# 'view_type': action.view_type,
# 'view_mode': action.view_mode,
# 'res_id': active_id,
# 'views': action.views,
# 'target': action.target,
# 'context':context,
# 'nodestroy': True,
# 'flags': {'form': {'action_buttons': True}}
# }
#
# @api.multi
# def action_view_Workshoptasks(self):
# self.ensure_one()
# action = self.env.ref('project.act_project_project_2_project_task_all')
# active_id = self.env['project.project'].search([('analytic_account_id', '=', self.analytic_account_id.id)]).id
# context = {'group_by': 'stage_id', 'search_default_project_id': [active_id], 'default_project_id': active_id, 'task_type_id.name':'Workshop',}
# return {
# 'key2':'tree_but_open',
# 'name': action.name,
# 'res_model': 'project.task',
# 'help': action.help,
# 'type': action.type,
# 'view_type': action.view_type,
# 'view_mode': action.view_mode,
# 'res_id': active_id,
# 'views': action.views,
# 'target': action.target,
# 'context':context,
# 'nodestroy': True,
# 'flags': {'form': {'action_buttons': True}}
# }
analytic_account_id = fields.Many2one('account.analytic.account',string='Analytic Account')
task_count = fields.Integer(compute=_count_vehicle_task, string="Vehicle Tasks" , multi=True)
issue_count = fields.Integer(compute=_count_vehicle_issue, string="Vehicle Issues" , multi=True)
# swhw_task_count = fields.Integer(compute=_count_swhwlogbook_task, string="SWHWlogbook Tasks" , multi=True)
# wkshop_task_count = fields.Integer(compute=_count_wkshop_task, string="workshop Tasks" , multi=True)
class fleet_vehicle_log_services(models.Model):
_inherit = 'fleet.vehicle.log.services'
invoice_id = fields.Many2one('account.invoice',string='Facture')
|
gpl-3.0
| -4,983,628,240,611,340,000
| 44.664671
| 152
| 0.590873
| false
| 3.425876
| false
| false
| false
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.