repo_name
stringlengths 5
92
| path
stringlengths 4
221
| copies
stringclasses 19
values | size
stringlengths 4
6
| content
stringlengths 766
896k
| license
stringclasses 15
values | hash
int64 -9,223,277,421,539,062,000
9,223,102,107B
| line_mean
float64 6.51
99.9
| line_max
int64 32
997
| alpha_frac
float64 0.25
0.96
| autogenerated
bool 1
class | ratio
float64 1.5
13.6
| config_test
bool 2
classes | has_no_keywords
bool 2
classes | few_assignments
bool 1
class |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
teamosceola/bitbake
|
lib/bb/ui/knotty.py
|
1
|
12691
|
#
# BitBake (No)TTY UI Implementation
#
# Handling output to TTYs or files (no TTY)
#
# Copyright (C) 2006-2007 Richard Purdie
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 2 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
from __future__ import division
import os
import sys
import xmlrpclib
import logging
import progressbar
import bb.msg
from bb.ui import uihelper
logger = logging.getLogger("BitBake")
interactive = sys.stdout.isatty()
class BBProgress(progressbar.ProgressBar):
def __init__(self, msg, maxval):
self.msg = msg
widgets = [progressbar.Percentage(), ' ', progressbar.Bar(), ' ',
progressbar.ETA()]
progressbar.ProgressBar.__init__(self, maxval, [self.msg + ": "] + widgets)
class NonInteractiveProgress(object):
fobj = sys.stdout
def __init__(self, msg, maxval):
self.msg = msg
self.maxval = maxval
def start(self):
self.fobj.write("%s..." % self.msg)
self.fobj.flush()
return self
def update(self, value):
pass
def finish(self):
self.fobj.write("done.\n")
self.fobj.flush()
def new_progress(msg, maxval):
if interactive:
return BBProgress(msg, maxval)
else:
return NonInteractiveProgress(msg, maxval)
def pluralise(singular, plural, qty):
if(qty == 1):
return singular % qty
else:
return plural % qty
def main(server, eventHandler):
# Get values of variables which control our output
includelogs = server.runCommand(["getVariable", "BBINCLUDELOGS"])
loglines = server.runCommand(["getVariable", "BBINCLUDELOGS_LINES"])
consolelogfile = server.runCommand(["getVariable", "BB_CONSOLELOG"])
helper = uihelper.BBUIHelper()
console = logging.StreamHandler(sys.stdout)
format = bb.msg.BBLogFormatter("%(levelname)s: %(message)s")
bb.msg.addDefaultlogFilter(console)
console.setFormatter(format)
logger.addHandler(console)
if consolelogfile:
consolelog = logging.FileHandler(consolelogfile)
bb.msg.addDefaultlogFilter(consolelog)
consolelog.setFormatter(format)
logger.addHandler(consolelog)
try:
cmdline = server.runCommand(["getCmdLineAction"])
if not cmdline:
print("Nothing to do. Use 'bitbake world' to build everything, or run 'bitbake --help' for usage information.")
return 1
elif not cmdline['action']:
print(cmdline['msg'])
return 1
ret = server.runCommand(cmdline['action'])
if ret != True:
print("Couldn't get default commandline! %s" % ret)
return 1
except xmlrpclib.Fault as x:
print("XMLRPC Fault getting commandline:\n %s" % x)
return 1
parseprogress = None
cacheprogress = None
shutdown = 0
interrupted = False
return_value = 0
errors = 0
warnings = 0
taskfailures = []
while True:
try:
event = eventHandler.waitEvent(0.25)
if event is None:
if shutdown > 1:
break
continue
helper.eventHandler(event)
if isinstance(event, bb.runqueue.runQueueExitWait):
if not shutdown:
shutdown = 1
if shutdown and helper.needUpdate:
activetasks, failedtasks = helper.getTasks()
if activetasks:
print("Waiting for %s active tasks to finish:" % len(activetasks))
for tasknum, task in enumerate(activetasks):
print("%s: %s (pid %s)" % (tasknum, activetasks[task]["title"], task))
if isinstance(event, logging.LogRecord):
if event.levelno >= format.ERROR:
errors = errors + 1
return_value = 1
elif event.levelno == format.WARNING:
warnings = warnings + 1
# For "normal" logging conditions, don't show note logs from tasks
# but do show them if the user has changed the default log level to
# include verbose/debug messages
if event.taskpid != 0 and event.levelno <= format.NOTE:
continue
logger.handle(event)
continue
if isinstance(event, bb.build.TaskFailed):
return_value = 1
logfile = event.logfile
if logfile and os.path.exists(logfile):
print("ERROR: Logfile of failure stored in: %s" % logfile)
if includelogs and not event.errprinted:
print("Log data follows:")
f = open(logfile, "r")
lines = []
while True:
l = f.readline()
if l == '':
break
l = l.rstrip()
if loglines:
lines.append(' | %s' % l)
if len(lines) > int(loglines):
lines.pop(0)
else:
print('| %s' % l)
f.close()
if lines:
for line in lines:
print(line)
if isinstance(event, bb.build.TaskBase):
logger.info(event._message)
continue
if isinstance(event, bb.event.ParseStarted):
if event.total == 0:
continue
parseprogress = new_progress("Parsing recipes", event.total).start()
continue
if isinstance(event, bb.event.ParseProgress):
parseprogress.update(event.current)
continue
if isinstance(event, bb.event.ParseCompleted):
if not parseprogress:
continue
parseprogress.finish()
print(("Parsing of %d .bb files complete (%d cached, %d parsed). %d targets, %d skipped, %d masked, %d errors."
% ( event.total, event.cached, event.parsed, event.virtuals, event.skipped, event.masked, event.errors)))
continue
if isinstance(event, bb.event.CacheLoadStarted):
cacheprogress = new_progress("Loading cache", event.total).start()
continue
if isinstance(event, bb.event.CacheLoadProgress):
cacheprogress.update(event.current)
continue
if isinstance(event, bb.event.CacheLoadCompleted):
cacheprogress.finish()
print("Loaded %d entries from dependency cache." % event.num_entries)
continue
if isinstance(event, bb.command.CommandFailed):
return_value = event.exitcode
errors = errors + 1
logger.error("Command execution failed: %s", event.error)
shutdown = 2
continue
if isinstance(event, bb.command.CommandExit):
if not return_value:
return_value = event.exitcode
continue
if isinstance(event, (bb.command.CommandCompleted, bb.cooker.CookerExit)):
shutdown = 2
continue
if isinstance(event, bb.event.MultipleProviders):
logger.info("multiple providers are available for %s%s (%s)", event._is_runtime and "runtime " or "",
event._item,
", ".join(event._candidates))
logger.info("consider defining a PREFERRED_PROVIDER entry to match %s", event._item)
continue
if isinstance(event, bb.event.NoProvider):
return_value = 1
errors = errors + 1
if event._runtime:
r = "R"
else:
r = ""
if event._dependees:
logger.error("Nothing %sPROVIDES '%s' (but %s %sDEPENDS on or otherwise requires it)", r, event._item, ", ".join(event._dependees), r)
else:
logger.error("Nothing %sPROVIDES '%s'", r, event._item)
if event._reasons:
for reason in event._reasons:
logger.error("%s", reason)
continue
if isinstance(event, bb.runqueue.sceneQueueTaskStarted):
logger.info("Running setscene task %d of %d (%s)" % (event.stats.completed + event.stats.active + event.stats.failed + 1, event.stats.total, event.taskstring))
continue
if isinstance(event, bb.runqueue.runQueueTaskStarted):
if event.noexec:
tasktype = 'noexec task'
else:
tasktype = 'task'
logger.info("Running %s %s of %s (ID: %s, %s)",
tasktype,
event.stats.completed + event.stats.active +
event.stats.failed + 1,
event.stats.total, event.taskid, event.taskstring)
continue
if isinstance(event, bb.runqueue.runQueueTaskFailed):
taskfailures.append(event.taskstring)
logger.error("Task %s (%s) failed with exit code '%s'",
event.taskid, event.taskstring, event.exitcode)
continue
if isinstance(event, bb.runqueue.sceneQueueTaskFailed):
logger.warn("Setscene task %s (%s) failed with exit code '%s' - real task will be run instead",
event.taskid, event.taskstring, event.exitcode)
continue
# ignore
if isinstance(event, (bb.event.BuildBase,
bb.event.StampUpdate,
bb.event.ConfigParsed,
bb.event.RecipeParsed,
bb.event.RecipePreFinalise,
bb.runqueue.runQueueEvent,
bb.runqueue.runQueueExitWait,
bb.event.OperationStarted,
bb.event.OperationCompleted,
bb.event.OperationProgress)):
continue
logger.error("Unknown event: %s", event)
except EnvironmentError as ioerror:
# ignore interrupted io
if ioerror.args[0] == 4:
pass
except KeyboardInterrupt:
if shutdown == 1:
print("\nSecond Keyboard Interrupt, stopping...\n")
server.runCommand(["stateStop"])
if shutdown == 0:
interrupted = True
print("\nKeyboard Interrupt, closing down...\n")
server.runCommand(["stateShutdown"])
shutdown = shutdown + 1
pass
summary = ""
if taskfailures:
summary += pluralise("\nSummary: %s task failed:",
"\nSummary: %s tasks failed:", len(taskfailures))
for failure in taskfailures:
summary += "\n %s" % failure
if warnings:
summary += pluralise("\nSummary: There was %s WARNING message shown.",
"\nSummary: There were %s WARNING messages shown.", warnings)
if return_value:
summary += pluralise("\nSummary: There was %s ERROR message shown, returning a non-zero exit code.",
"\nSummary: There were %s ERROR messages shown, returning a non-zero exit code.", errors)
if summary:
print(summary)
if interrupted:
print("Execution was interrupted, returning a non-zero exit code.")
if return_value == 0:
return_value = 1
return return_value
|
gpl-2.0
| -854,224,645,206,136,700
| 38.908805
| 175
| 0.532897
| false
| 4.772847
| false
| false
| false
|
MaxLinCode/tardy-HackIllinois-2017
|
alexa/lambda_function.py
|
1
|
6803
|
"""
This sample demonstrates a simple skill built with the Amazon Alexa Skills Kit.
The Intent Schema, Built-in Slots, and Sample Utterances for this skill, as well
as testing instructions are located at http://amzn.to/1LzFrj6
For additional samples, visit the Alexa Skills Kit Getting Started guide at
http://amzn.to/1LGWsLG
"""
from __future__ import print_function
from twilio.rest import TwilioRestClient
from loadData import rawToTime, getNumber
from config import *
accountSid = 'ACcf54ef49063aaa784c99aec82d7f16c2'
authToken = '31f817a48ee7cd461c07c57490eac6ce'
fromNumber = '19163183442'
# --------------- Helpers that build all of the responses ----------------------
def build_speechlet_response(title, output, reprompt_text, should_end_session):
return {
'outputSpeech': {
'type': 'PlainText',
'text': output
},
'card': {
'type': 'Simple',
'title': 'SessionSpeechlet - ' + title,
'content': 'SessionSpeechlet - ' + output
},
'reprompt': {
'outputSpeech': {
'type': 'PlainText',
'text': reprompt_text
}
},
'shouldEndSession': should_end_session
}
def build_response(session_attributes, speechlet_response):
return {
'version': '1.0',
'sessionAttributes': session_attributes,
'response': speechlet_response
}
# --------------- Functions that control the skill's behavior ------------------
def get_welcome_response():
session_attributes = {}
card_title = "Welcome"
speech_output = "Hello, welcome to the Tardy skill."
# If the user either does not reply to the welcome message or says something
# that is not understood, they will be prompted again with this text.
reprompt_text = "You can ask me to send a message to your friends."
should_end_session = False
return build_response(session_attributes, build_speechlet_response(
card_title, speech_output, reprompt_text, should_end_session))
def sarah_intent_handler(intent):
card_title = "Sarah"
speech_output = "Sarah is the best"
return build_response(None, build_speechlet_response(
card_title, speech_output, None, False))
def formatMessage(userName, targetName, time, place):
return "Hello %s, %s would like to meet at %s at %s." % (targetName.title(), userName.title(), place.title(), time)
def getInfo(userId, target, time, place):
d = {}
time = rawToTime(time)
userName = ""
for x in target:
arr = getNumber(userId, target)
if userName == "":
username = arr[0]
d[arr[1]] = [arr[2], formatMessage(userName, a[1], time, place)]
for key in d:
sendText(d[key][0], d[key][1])
def twilio_intent_handler(intent):
card_title = "Twilio"
#print(intent['slots'])
target = intent["slots"]["nameSlot"]["value"]
time = intent["slots"]["timeSlot"]["value"]
place = intent["slots"]["placeSlot"]["value"]
#userID = kijZjJJ5ozPZxfeHYfjh3zd3TUh1
getInfo('kijZjJJ5ozPZxfeHYfjh3zd3TUh1', target, time, place)
#cellNumber = ""
#messageText = ""
#slots = intent['slots']
#cellNumber = slots['numberSlot']['value']
#messageText = slots['msgSlot']['value']
# call the method to send text
speech_output = "Message sent."
# Setting reprompt_text to None signifies that we do not want to reprompt
# the user. If the user does not respond or says something that is not
# understood, the session will end.
return build_response(None, build_speechlet_response(
card_title, speech_output, None, False))
#number,message
def sendText(to_num, msg_text):
try:
client = TwilioRestClient(accountSid, authToken)
client.messages.create(
to=to_num,
from_=from_num,
body=msg_text
)
return True
except Exception as e:
print("Failed to send message: ")
print(e.code)
return False
def help_intent_handler(intent):
card_title = "Help"
speech_output = "Ask me to send someone a text."
return build_response(None, build_speechlet_response(
card_title, speech_output, None, False))
def misunderstood_handler(intent):
card_title = "Misunderstood"
speech_output = "Sorry, please try again."
return build_response(None, build_speechlet_response(
card_title, speech_output, None, True))
def handle_session_end_request():
card_title = "Session Ended"
speech_output = "Thank you for trying our Tardy skill. " \
"Have a great time at Hack Illinois! "
# Setting this to true ends the session and exits the skill.
should_end_session = True
return build_response(None, build_speechlet_response(
card_title, speech_output, None, should_end_session))
# --------------- Events ------------------
def on_launch(launch_request):
""" Called when the user launches the skill without specifying what they
want
"""
print("on_launch requestId=" + launch_request['requestId'])
# Dispatch to your skill's launch
return get_welcome_response()
def on_intent(intent_request):
""" Called when the user specifies an intent for this skill """
print("on_intent requestId=" + intent_request['requestId'])
intent = intent_request['intent']
intent_name = intent_request['intent']['name']
# Dispatch to your skill's intent handlers
if intent_name == "SarahIntent":
return sarah_intent_handler(intent)
elif intent_name == "TwilioIntent":
return twilio_intent_handler(intent)
elif intent_name == "HelpIntent":
return help_intent_handler(intent)
elif intent_name == "AMAZON.CancelIntent" or intent_name == "AMAZON.StopIntent":
return handle_session_end_request()
else:
return misunderstood_handler(intent)
# --------------- Main handler ------------------
def lambda_handler(event, context):
""" Route the incoming request based on type (LaunchRequest, IntentRequest,
etc.) The JSON body of the request is provided in the event parameter.
"""
session_attributes = {}
#applicationId = event['session']['application']['applicationId']
#if applicationId != TWILIO_APPLICATION_ID:
# should_end_session = True
# bad_request_output = "Bad Request"
# print("Bad ApplicationId Received: "+applicationId)
# return build_response(session_attributes, build_speechlet_response("Twilio", bad_request_output, None, should_end_session))
if event['request']['type'] == "LaunchRequest":
return on_launch(event['request'])
elif event['request']['type'] == "IntentRequest":
return on_intent(event['request'])
|
mit
| 3,407,677,043,786,212,400
| 32.850746
| 132
| 0.641482
| false
| 3.68327
| false
| false
| false
|
quru/qis
|
src/imageserver/errors.py
|
1
|
2593
|
#
# Quru Image Server
#
# Document: errors.py
# Date started: 31 Mar 2011
# By: Matt Fozard
# Purpose: Internal errors and exceptions
# Requires:
# Copyright: Quru Ltd (www.quru.com)
# Licence:
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see http://www.gnu.org/licenses/
#
# Last Changed: $Date$ $Rev$ by $Author$
#
# Notable modifications:
# Date By Details
# ========= ==== ============================================================
#
class ImageError(ValueError):
"""
An error resulting from an invalid or unsupported imaging operation.
"""
pass
class AlreadyExistsError(ValueError):
"""
An error resulting from a duplicate value or an attempt to create an
object that already exists.
"""
pass
class DoesNotExistError(ValueError):
"""
An error resulting from an attempt to use an object that does not exist.
"""
pass
class SecurityError(Exception):
"""
An error resulting from some unauthorised action.
"""
pass
class StartupError(Exception):
"""
An error that should prevent server startup.
"""
pass
class AuthenticationError(Exception):
"""
An error resulting from a failure to authenticate.
"""
pass
class DBError(Exception):
"""
An error resulting from a database operation.
Adds an optional extra 'sql' attribute.
"""
def __init__(self, message, sql=None):
Exception.__init__(self, message)
self.sql = sql if sql is not None else ''
class DBDataError(DBError):
"""
An error resulting from incorrect database data.
"""
pass
class ParameterError(ValueError):
"""
An error resulting from an invalid parameter value.
"""
pass
class TimeoutError(RuntimeError):
"""
An error resulting from an operation timeout.
"""
pass
class ServerTooBusyError(RuntimeError):
"""
Raised when the server is too busy to service a request.
"""
pass
|
agpl-3.0
| 6,222,362,547,311,885,000
| 22.36036
| 79
| 0.649055
| false
| 4.209416
| false
| false
| false
|
lukas-bednar/python-rrmngmnt
|
rrmngmnt/ssh.py
|
1
|
10209
|
import os
import time
import socket
import paramiko
import contextlib
import subprocess
from rrmngmnt.executor import Executor
AUTHORIZED_KEYS = os.path.join("%s", ".ssh/authorized_keys")
KNOWN_HOSTS = os.path.join("%s", ".ssh/known_hosts")
ID_RSA_PUB = os.path.join("%s", ".ssh/id_rsa.pub")
ID_RSA_PRV = os.path.join("%s", ".ssh/id_rsa")
CONNECTIVITY_TIMEOUT = 600
CONNECTIVITY_SAMPLE_TIME = 20
class RemoteExecutor(Executor):
"""
Any resource which provides SSH service.
This class is meant to replace our current utilities.machine.LinuxMachine
classs. This allows you to lower access to communicate with ssh.
Like a live interaction, getting rid of True/False results, and
mixing stdout with stderr.
You can still use use 'run_cmd' method if you don't care.
But I would recommed you to work like this:
"""
TCP_TIMEOUT = 10.0
class LoggerAdapter(Executor.LoggerAdapter):
"""
Makes sure that all logs which are done via this class, has
appropriate prefix. [user@IP/password]
"""
def process(self, msg, kwargs):
return (
"[%s@%s/%s] %s" % (
self.extra['self'].user.name,
self.extra['self'].address,
self.extra['self'].user.password,
msg,
),
kwargs,
)
class Session(Executor.Session):
"""
Represents active ssh connection
"""
def __init__(self, executor, timeout=None, use_pkey=False):
super(RemoteExecutor.Session, self).__init__(executor)
if timeout is None:
timeout = RemoteExecutor.TCP_TIMEOUT
self._timeout = timeout
self._ssh = paramiko.SSHClient()
self._ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
if use_pkey:
self.pkey = paramiko.RSAKey.from_private_key_file(
ID_RSA_PRV % os.path.expanduser('~')
)
self._executor.user.password = None
else:
self.pkey = None
def __exit__(self, type_, value, tb):
if type_ is socket.timeout:
self._update_timeout_exception(value)
try:
self.close()
except Exception as ex:
if type_ is None:
raise
else:
self._executor.logger.debug(
"Can not close ssh session %s", ex,
)
def open(self):
self._ssh.get_host_keys().clear()
try:
self._ssh.connect(
self._executor.address,
username=self._executor.user.name,
password=self._executor.user.password,
timeout=self._timeout,
pkey=self.pkey
)
except (socket.gaierror, socket.herror) as ex:
args = list(ex.args)
message = "%s: %s" % (self._executor.address, args[1])
args[1] = message
ex.strerror = message
ex.args = tuple(args)
raise
except socket.timeout as ex:
self._update_timeout_exception(ex)
raise
def close(self):
self._ssh.close()
def _update_timeout_exception(self, ex, timeout=None):
if getattr(ex, '_updated', False):
return
if timeout is None:
timeout = self._timeout
message = "%s: timeout(%s)" % (
self._executor.address, timeout
)
ex.args = (message,)
ex._updated = True
def command(self, cmd):
return RemoteExecutor.Command(cmd, self)
def run_cmd(self, cmd, input_=None, timeout=None):
cmd = self.command(cmd)
return cmd.run(input_, timeout)
@contextlib.contextmanager
def open_file(self, path, mode='r', bufsize=-1):
with contextlib.closing(self._ssh.open_sftp()) as sftp:
with contextlib.closing(
sftp.file(
path,
mode,
bufsize,
)
) as fh:
yield fh
class Command(Executor.Command):
"""
This class holds all data related to command execution.
- the command itself
- stdout/stderr streams
- out/err string which were produced by command
- returncode the exit status of command
"""
def __init__(self, cmd, session):
super(RemoteExecutor.Command, self).__init__(
subprocess.list2cmdline(cmd),
session,
)
self._in = None
self._out = None
self._err = None
def get_rc(self, wait=False):
if self._rc is None:
if self._out is not None:
if self._out.channel.exit_status_ready() or wait:
self._rc = self._out.channel.recv_exit_status()
return self._rc
@contextlib.contextmanager
def execute(self, bufsize=-1, timeout=None, get_pty=False):
"""
This method allows you to work directly with streams.
with cmd.execute() as in_, out, err:
# where in_, out and err are file-like objects
# where you can read data from these
"""
try:
self.logger.debug("Executing: %s", self.cmd)
self._in, self._out, self._err = self._ss._ssh.exec_command(
self.cmd,
bufsize=bufsize,
timeout=timeout,
get_pty=get_pty,
)
yield self._in, self._out, self._err
self.get_rc(True)
except socket.timeout as ex:
self._ss._update_timeout_exception(ex, timeout)
raise
finally:
if self._in is not None:
self._in.close()
if self._out is not None:
self._out.close()
if self._err is not None:
self._err.close()
self.logger.debug("Results of command: %s", self.cmd)
self.logger.debug(" OUT: %s", self.out)
self.logger.debug(" ERR: %s", self.err)
self.logger.debug(" RC: %s", self.rc)
def run(self, input_, timeout=None, get_pty=False):
with self.execute(
timeout=timeout, get_pty=get_pty
) as (in_, out, err):
if input_:
in_.write(input_)
in_.close()
self.out = out.read()
self.err = err.read()
return self.rc, self.out, self.err
def __init__(self, user, address, use_pkey=False):
"""
:param user: user
:type user: instance of User
:param address: ip / hostname
:type address: str
:param use_pkey: use ssh private key in the connection
:type use_pkey: bool
"""
super(RemoteExecutor, self).__init__(user)
self.address = address
self.use_pkey = use_pkey
def session(self, timeout=None):
"""
:param timeout: tcp timeout
:type timeout: float
:return: the session
:rtype: instance of RemoteExecutor.Session
"""
return RemoteExecutor.Session(self, timeout, self.use_pkey)
def run_cmd(self, cmd, input_=None, tcp_timeout=None, io_timeout=None):
"""
:param cmd: command
:type cmd: list
:param input_: input data
:type input_: str
:param tcp_timeout: tcp timeout
:type tcp_timeout: float
:param io_timeout: timeout for data operation (read/write)
:type io_timeout: float
:return: rc, out, err
:rtype: tuple (int, str, str)
"""
with self.session(tcp_timeout) as session:
return session.run_cmd(cmd, input_, io_timeout)
def is_connective(self, tcp_timeout=20.0):
"""
Check if address is connective via ssh
:param tcp_timeout: time to wait for response
:type tcp_timeout: float
:return: True if address is connective, False otherwise
:rtype: bool
"""
try:
self.logger.info(
"Check if address is connective via ssh in given timeout %s",
tcp_timeout
)
self.run_cmd(['true'], tcp_timeout=tcp_timeout)
return True
except (socket.timeout, socket.error) as e:
self.logger.debug("Socket error: %s", e)
except Exception as e:
self.logger.debug("SSH exception: %s", e)
return False
def wait_for_connectivity_state(
self, positive,
timeout=CONNECTIVITY_TIMEOUT,
sample_time=CONNECTIVITY_SAMPLE_TIME
):
"""
Wait until address will be connective or not via ssh
:param positive: wait for the positive or negative connective state
:type positive: bool
:param timeout: wait timeout
:type timeout: int
:param sample_time: sample the ssh each sample_time seconds
:type sample_time: int
:return: True, if positive and ssh is connective or
negative and ssh does not connective, otherwise False
:rtype: bool
"""
reachable = "unreachable" if positive else "reachable"
timeout_counter = 0
while self.is_connective() != positive:
if timeout_counter > timeout:
self.logger.error(
"Address %s is still %s via ssh, after %s seconds",
self.address, reachable, timeout
)
return False
time.sleep(sample_time)
timeout_counter += sample_time
return True
|
gpl-2.0
| -7,273,027,268,052,574,000
| 33.962329
| 77
| 0.515526
| false
| 4.467834
| false
| false
| false
|
lipro-yocto/git-repo
|
subcmds/cherry_pick.py
|
1
|
3421
|
# Copyright (C) 2010 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import re
import sys
from command import Command
from git_command import GitCommand
CHANGE_ID_RE = re.compile(r'^\s*Change-Id: I([0-9a-f]{40})\s*$')
class CherryPick(Command):
common = True
helpSummary = "Cherry-pick a change."
helpUsage = """
%prog <sha1>
"""
helpDescription = """
'%prog' cherry-picks a change from one branch to another.
The change id will be updated, and a reference to the old
change id will be added.
"""
def _Options(self, p):
pass
def ValidateOptions(self, opt, args):
if len(args) != 1:
self.Usage()
def Execute(self, opt, args):
reference = args[0]
p = GitCommand(None,
['rev-parse', '--verify', reference],
capture_stdout=True,
capture_stderr=True)
if p.Wait() != 0:
print(p.stderr, file=sys.stderr)
sys.exit(1)
sha1 = p.stdout.strip()
p = GitCommand(None, ['cat-file', 'commit', sha1], capture_stdout=True)
if p.Wait() != 0:
print("error: Failed to retrieve old commit message", file=sys.stderr)
sys.exit(1)
old_msg = self._StripHeader(p.stdout)
p = GitCommand(None,
['cherry-pick', sha1],
capture_stdout=True,
capture_stderr=True)
status = p.Wait()
print(p.stdout, file=sys.stdout)
print(p.stderr, file=sys.stderr)
if status == 0:
# The cherry-pick was applied correctly. We just need to edit the
# commit message.
new_msg = self._Reformat(old_msg, sha1)
p = GitCommand(None, ['commit', '--amend', '-F', '-'],
provide_stdin=True,
capture_stdout=True,
capture_stderr=True)
p.stdin.write(new_msg)
p.stdin.close()
if p.Wait() != 0:
print("error: Failed to update commit message", file=sys.stderr)
sys.exit(1)
else:
print('NOTE: When committing (please see above) and editing the commit '
'message, please remove the old Change-Id-line and add:')
print(self._GetReference(sha1), file=sys.stderr)
print(file=sys.stderr)
def _IsChangeId(self, line):
return CHANGE_ID_RE.match(line)
def _GetReference(self, sha1):
return "(cherry picked from commit %s)" % sha1
def _StripHeader(self, commit_msg):
lines = commit_msg.splitlines()
return "\n".join(lines[lines.index("") + 1:])
def _Reformat(self, old_msg, sha1):
new_msg = []
for line in old_msg.splitlines():
if not self._IsChangeId(line):
new_msg.append(line)
# Add a blank line between the message and the change id/reference
try:
if new_msg[-1].strip() != "":
new_msg.append("")
except IndexError:
pass
new_msg.append(self._GetReference(sha1))
return "\n".join(new_msg)
|
apache-2.0
| 5,867,796,251,983,264,000
| 28.747826
| 78
| 0.621163
| false
| 3.597266
| false
| false
| false
|
CloudBoltSoftware/cloudbolt-forge
|
ui_extensions/multilevelapprovals/views.py
|
1
|
14185
|
from urllib.parse import urlparse
from django.contrib import messages
from django.http import HttpResponseRedirect
from django.shortcuts import render, get_object_or_404
from django.template import loader
from django.urls import reverse
from django.utils.translation import ugettext as _, ungettext
from accounts.models import UserProfile
from accounts.templatetags import account_tags
from cbhooks.exceptions import HookFailureException
from common.views import clear_cached_submenu
from costs.utils import (
is_rates_feature_enabled,
)
from cscv.models import CITConf, can_order_be_tested, CITTest
from orders.forms import DenyOrderForm
from orders.models import Order
from orders.templatetags.order_tags import order_pictograph, order_status_icon
from quota.exceptions import QuotaError
from servicecatalog.models import ServiceBlueprint
from utilities.decorators import json_view, dialog_view
from utilities.exceptions import (
InvalidCartException, InvalidConfigurationException,
CloudBoltException
)
from utilities.cb_http import django_sort_cols_from_datatable_request
from utilities.logger import ThreadLogger
from utilities.templatetags.helper_tags import link_or_label, how_long_ago
from utilities.views import access_denied
from .models import CustomOrder
from extensions.views import admin_extension
#@admin_extension(title='Multilevel Approvals Extension')
logger = ThreadLogger(__name__)
# Intentionally not protected at view level
@admin_extension(title='Multilevel Approvals Extension')
def order_list(request, message=""):
profile = request.get_user_profile()
# NOTE: order info will be sent via AJAX
return render(request, 'multilevelapprovals/templates/list.html', {
'pagetitle': _("Order List"),
'message': message,
'profile': profile,
'enable_rates_feature': is_rates_feature_enabled(),
})
# Intentionally not protected at view level
@json_view
def order_list_json(request, extra_context={}):
profile = request.get_user_profile()
# List of orders the user has permissions to view:
orders = Order.objects_for_profile(profile)
num_total_records = orders.count()
search = request.GET.get('sSearch')
if search:
orders = orders.search(search)
num_filtered_records = orders.count()
# Sorting: client passes column # which must be translated to model field
sort_cols = django_sort_cols_from_datatable_request(request, [
'id',
None,
'status',
'group',
# order by first & last which is how it's presented
['owner__user__first_name', 'owner__user__last_name'],
'create_date',
None, # Actions column is not sortable
])
orders = orders.order_by(*sort_cols)
# Pagination:
start = int(request.GET.get('iDisplayStart', None))
if start is not None:
end = int(start) + int(request.GET.get('iDisplayLength', 0))
orders = orders[start:end]
# Cache links to objects (since generating each requires a database hit):
_group_link_or_label_cache = {}
_owner_link_or_label_cache = {}
profiles_visible_to_this_profile = UserProfile.objects_for_profile(profile)
def cached_group_link_or_label(group):
try:
return _group_link_or_label_cache[group]
except KeyError:
rendered = link_or_label(group, profile)
_group_link_or_label_cache[group] = rendered
return rendered
def cached_owner_link_or_label(owner):
"""
Ensure that owner avatar and link-or-label is only constructed once
per page view.
"""
if not owner or not owner.user:
return ""
try:
rendered = _owner_link_or_label_cache[owner]
except KeyError:
rendered = account_tags.rich_gravatar(
owner,
size=20,
link=(owner in profiles_visible_to_this_profile),
full_name=True
)
_owner_link_or_label_cache[owner] = rendered
return rendered
actions_template = loader.get_template('multilevelapprovals/templates/actions.html')
rows = []
for order in orders:
# Render the actions column value as HTML:
actions_html = actions_template.render(context={
'order': order,
'profile': profile,
'is_owner': order.owner == profile,
'can_approve': profile.has_permission('order.approve', order),
'can_cancel': order.can_cancel(profile),
'can_save_to_catalog': order.can_save_to_catalog(profile),
}, request=request)
#approval_str = "" #SRM
#for dict in is_multilevel_approval(order):
# for key in dict.keys():
# strng = UserProfile.objects.get(id=dict[key]).user.username
# if not approval_str:
# approval_str = key + ":", strng
# else:
# approval_str += "<BR>" + key + ":", strng
row = [
# We know that the user has access to view this order already,
# so show URL instead of link_or_label:
'<a href="%s">%s</a>' % (order.get_absolute_url(),
order.nickname()),
order_pictograph(order),
order_status_icon(order),
cached_group_link_or_label(order.group),
cached_owner_link_or_label(order.owner),
how_long_ago(order.create_date),
actions_html,
]
rows.append(row)
return {
# unaltered from client-side value, but cast to int to avoid XSS
# http://datatables.net/usage/server-side
"sEcho": int(request.GET.get('sEcho', 1)),
"iTotalRecords": num_total_records,
"iTotalDisplayRecords": num_filtered_records,
"aaData": rows, # Orders for the current page only
}
def modify(request, order_id):
"""
POST requests from the order list and detail views go here.
"""
order = get_object_or_404(Order, pk=order_id)
profile = request.get_user_profile()
# action matches the button values in order_actions templatetag.
action = request.POST.get('action', [''])
logger.info(f'SRM: in modify: action == {action}')
if action in ['approve', 'deny']:
if not profile.has_permission('order.approve', order):
return access_denied(
request, _("You do not have permission to approve this item."))
msg = ""
redirect_url = request.META['HTTP_REFERER']
if action == 'submit':
if not profile.has_permission('order.submit', order):
return access_denied(
request, _("You do not have permission to submit this order."))
try:
order.submit()
msg += order.start_approval_process(request)
messages.info(request, msg)
except QuotaError as e: # could happen if order is auto-approved
messages.error(request, e)
except InvalidConfigurationException as e:
messages.error(request, e)
except HookFailureException as e:
messages.error(request, e)
redirect_url = reverse('order_detail', args=[order.id])
elif action == 'approve':
logger.info('SRM: in modify: action == approve (should work) -- b4 approve_my_grms')
logger.info(f'SRM: order = {order}')
logger.info(f'SRM: profile = {profile}')
if CustomOrder.is_multilevel_approval(order):
logger.info(f'SRM: is multilevel -- approving GRMs')
CustomOrder.approve_my_grms(order, profile)
if all(CustomOrder.is_multilevel_approval(order).values()):
logger.info(f'SRM: all values return true - can approve')
else:
logger.info(f'SRM: not all values return true - cant approve')
messages.info(request, "partial approval processed")
return HttpResponseRedirect(reverse('order_detail', args=[order.id]))
try:
jobs, extramsg = order.approve(profile)
if jobs:
# template tweaks the message based on where we are going next
redirect_parsed = urlparse(redirect_url)
msg = loader.render_to_string('orders/approved_msg.html', {
'order': order,
'autoapproved': False,
'num_jobs': len(jobs),
'extramsg': extramsg,
'request': request,
'redirect_url': redirect_parsed.path,
})
else:
msg = extramsg
messages.info(request, msg)
except QuotaError as e:
messages.error(request, e)
except CloudBoltException as e:
messages.warning(request, e)
except:
raise
elif action == 'cancel':
if not order.can_cancel(profile):
return access_denied(
request, _("You do not have permission to cancel this order."))
order.cancel()
if order.owner:
clear_cached_submenu(order.owner.user_id, 'orders')
msg = _("Order #{order_id} has been canceled.").format(order_id=order.id)
messages.info(request, msg)
elif action == 'clear':
order.group = None
order.blueprint = None
order.save()
for order_item in order.orderitem_set.all():
order_item.delete()
if order.owner:
clear_cached_submenu(order.owner.user_id, 'orders')
messages.success(request, _("Your current order has been cleared."))
elif action == 'remind':
logger.info(_("User requested order approval reminder for order {order_id}").format(order_id=order_id))
try:
msg = order.send_reminder(request)
logger.debug(msg)
messages.info(request, msg)
except InvalidConfigurationException as e:
messages.error(request, e)
elif action == 'duplicate':
# Global Viewers are a special case where objects_for_profile will
# return True since they can view all orders, but we don't want them to
# be able to do anything like duplicate it (unless they have additional
# permissions)
duplicable, reason = order.can_duplicate(profile)
if not duplicable:
if reason == 'permission':
return access_denied(
request, _("You do not have permission to duplicate this order."))
elif reason == 'group':
messages.error(request, _("Orders with no group cannot be duplicated."))
return HttpResponseRedirect(reverse('order_detail', args=[order.id]))
try:
profile = request.get_user_profile()
cart = profile.get_current_order()
cart = order.duplicate(cart)
items_duplicated = cart.items_duplicated
hostnames_updated = cart.hostnames_updated
msg = ungettext("Duplicated {num_items} order item under "
"<a href='{url}'>your current order</a>.",
"Duplicated {num_items} order items under "
"<a href='{url}'>your current order</a>.",
items_duplicated).format(num_items=items_duplicated,
url=cart.get_absolute_url())
if hostnames_updated:
uniq_msg = ungettext("{updated_count} order item was updated to "
"avoid creating identical hostnames.",
"{updated_count} order items were updated to "
"avoid creating identical hostnames.",
hostnames_updated).format(updated_count=hostnames_updated)
msg += uniq_msg
clear_cached_submenu(profile.user_id, 'orders')
messages.success(request, msg)
return HttpResponseRedirect(reverse('current_order'))
except InvalidCartException as e:
messages.error(request, e)
elif action == 'save_as_blueprint':
profile = request.get_user_profile()
if order.group and not profile.has_permission('blueprint.manage', order.group):
return access_denied(
request, _("You need to have blueprint management permission for "
"group '{group}' to create a blueprint from this order.").format(group=order.group))
bp = ServiceBlueprint.from_order(order)
clear_cached_submenu(profile.user_id, 'catalog')
messages.success(
request,
_("Successfully saved the <a href='{order_url}'>order</a> "
"as blueprint <a href='{blueprint_url}'>{blueprint_name}</a>").format(
order_url=order.get_absolute_url(),
blueprint_url=bp.get_absolute_url(),
blueprint_name=bp.name))
redirect_url = bp.get_absolute_url()
elif action == 'add_to_cit':
if can_order_be_tested(order):
cit_test = CITTest.objects.create(
name=order.name,
order=order,
cit_conf=CITConf.objects.first(),
expected_status=order.status,
)
messages.success(
request,
_('Created CIT test "{}". It will be automatically tested during '
'the text text run.'.format(link_or_label(cit_test, profile)))
)
else:
messages.error(request, "This order could not be added to CIT.")
return HttpResponseRedirect(redirect_url)
|
apache-2.0
| -6,349,238,309,283,162,000
| 38.298295
| 111
| 0.582023
| false
| 4.323377
| true
| false
| false
|
LeandroRoberto/sapl
|
sapl/comissoes/views.py
|
1
|
3353
|
from django.core.urlresolvers import reverse
from django.db.models import F
from django.views.generic import ListView
from sapl.crud.base import RP_DETAIL, RP_LIST, Crud, CrudAux, MasterDetailCrud
from sapl.materia.models import MateriaLegislativa, Tramitacao
from .models import (CargoComissao, Comissao, Composicao, Participacao,
Periodo, TipoComissao)
def pegar_url_composicao(pk):
participacao = Participacao.objects.get(id=pk)
comp_pk = participacao.composicao.pk
url = reverse('sapl.comissoes:composicao_detail', kwargs={'pk': comp_pk})
return url
CargoCrud = CrudAux.build(CargoComissao, 'cargo_comissao')
PeriodoComposicaoCrud = CrudAux.build(Periodo, 'periodo_composicao_comissao')
TipoComissaoCrud = CrudAux.build(
TipoComissao, 'tipo_comissao', list_field_names=[
'sigla', 'nome', 'natureza', 'dispositivo_regimental'])
class ParticipacaoCrud(MasterDetailCrud):
model = Participacao
parent_field = 'composicao__comissao'
public = [RP_DETAIL, ]
ListView = None
is_m2m = True
link_return_to_parent_field = True
class BaseMixin(MasterDetailCrud.BaseMixin):
list_field_names = ['composicao', 'parlamentar', 'cargo']
class ComposicaoCrud(MasterDetailCrud):
model = Composicao
parent_field = 'comissao'
model_set = 'participacao_set'
public = [RP_LIST, RP_DETAIL, ]
class ListView(MasterDetailCrud.ListView):
template_name = "comissoes/composicao_list.html"
paginate_by = None
def take_composicao_pk(self):
try:
return int(self.request.GET['pk'])
except:
return 0
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
context['composicao_pk'] = context['composicao_list'].last(
).pk if self.take_composicao_pk(
) == 0 else self.take_composicao_pk()
context['participacao_set'] = Participacao.objects.filter(
composicao__pk=context['composicao_pk']
).order_by('parlamentar')
return context
class ComissaoCrud(Crud):
model = Comissao
help_path = 'modulo_comissoes'
public = [RP_LIST, RP_DETAIL, ]
class BaseMixin(Crud.BaseMixin):
list_field_names = ['nome', 'sigla', 'tipo', 'data_criacao', 'ativa']
ordering = '-ativa', 'sigla'
class MateriasTramitacaoListView(ListView):
template_name = "comissoes/materias_em_tramitacao.html"
paginate_by = 10
def get_queryset(self):
# FIXME: Otimizar consulta
ts = Tramitacao.objects.order_by(
'materia', '-data_tramitacao', '-id').annotate(
comissao=F('unidade_tramitacao_destino__comissao')).distinct(
'materia').values_list('materia', 'comissao')
ts = list(filter(lambda x: x[1] == int(self.kwargs['pk']), ts))
ts = list(zip(*ts))
ts = ts[0] if ts else []
materias = MateriaLegislativa.objects.filter(
pk__in=ts).order_by('tipo', '-ano', '-numero')
return materias
def get_context_data(self, **kwargs):
context = super(
MateriasTramitacaoListView, self).get_context_data(**kwargs)
context['object'] = Comissao.objects.get(id=self.kwargs['pk'])
return context
|
gpl-3.0
| 4,696,481,446,960,925,000
| 32.53
| 78
| 0.638831
| false
| 3.012579
| false
| false
| false
|
mdovgialo/steam-vr-wheel
|
steam_vr_wheel/pyvjoy/_wrapper.py
|
1
|
2789
|
import os
import sys
from ctypes import *
dll_filename = "vJoyInterface.dll"
dll_path = os.path.dirname(__file__) + os.sep + dll_filename
try:
_vj = cdll.LoadLibrary(dll_path)
except OSError:
sys.exit("Unable to load vJoy SDK DLL. Ensure that %s is present" % dll_filename)
def vJoyEnabled():
"""Returns True if vJoy is installed and enabled"""
result = _vj.vJoyEnabled()
if result == 0:
raise vJoyNotEnabledException()
else:
return True
def DriverMatch():
"""Check if the version of vJoyInterface.dll and the vJoy Driver match"""
result = _vj.DriverMatch()
if result == 0:
raise vJoyDriverMismatch()
else:
return True
def GetVJDStatus(rID):
"""Get the status of a given vJoy Device"""
return _vj.GetVJDStatus(rID)
def AcquireVJD(rID):
"""Attempt to acquire a vJoy Device"""
result = _vj.AcquireVJD(rID)
if result == 0:
#Check status
status = GetVJDStatus(rID)
if status != VJD_STAT_FREE:
raise vJoyFailedToAcquireException("Cannot acquire vJoy Device because it is not in VJD_STAT_FREE")
else:
raise vJoyFailedToAcquireException()
else:
return True
def RelinquishVJD(rID):
"""Relinquish control of a vJoy Device"""
result = _vj.RelinquishVJD(rID)
if result == 0:
raise vJoyFailedToRelinquishException()
else:
return True
def SetBtn(state,rID,buttonID):
"""Sets the state of vJoy Button to on or off. SetBtn(state,rID,buttonID)"""
result = _vj.SetBtn(state,rID,buttonID)
if result == 0:
raise vJoyButtonError()
else:
return True
def SetDiscPov(PovValue, rID, PovID):
"""Write Value to a given discrete POV defined in the specified VDJ"""
if PovValue < -1 or PovValue > 3:
raise vJoyInvalidPovValueException()
if PovID < 1 or PovID > 4:
raise vJoyInvalidPovIDException
return _vj.SetDiscPov(PovValue,rID,PovID)
def SetContPov(PovValue, rID, PovID):
"""Write Value to a given continuous POV defined in the specified VDJ"""
if PovValue < -1 or PovValue > 35999:
raise vJoyInvalidPovValueException()
if PovID < 1 or PovID > 4:
raise vJoyInvalidPovIDException
return _vj.SetContPov(PovValue,rID,PovID)
def SetBtn(state,rID,buttonID):
"""Sets the state of vJoy Button to on or off. SetBtn(state,rID,buttonID)"""
result = _vj.SetBtn(state,rID,buttonID)
if result == 0:
raise vJoyButtonError()
else:
return True
def ResetVJD(rID):
"""Reset all axes and buttons to default for specified vJoy Device"""
return _vj.ResetVJD(rID)
def ResetButtons(rID):
"""Reset all buttons to default for specified vJoy Device"""
return _vj.ResetButtons(rID)
def ResetPovs(rID):
"""Reset all POV hats to default for specified vJoy Device"""
return _vj.ResetButtons(rID)
|
mit
| 8,956,066,866,461,069,000
| 21.241667
| 102
| 0.688777
| false
| 2.783433
| false
| false
| false
|
millken/simple-rtmp-server
|
trunk/research/community/server.py
|
1
|
4633
|
#!/usr/bin/python
'''
The MIT License (MIT)
Copyright (c) 2013-2014 winlin
Permission is hereby granted, free of charge, to any person obtaining a copy of
this software and associated documentation files (the "Software"), to deal in
the Software without restriction, including without limitation the rights to
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
the Software, and to permit persons to whom the Software is furnished to do so,
subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
'''
"""
the community is a default demo server for srs
"""
import sys
# reload sys model to enable the getdefaultencoding method.
reload(sys)
# set the default encoding to utf-8
# using exec to set the encoding, to avoid error in IDE.
exec("sys.setdefaultencoding('utf-8')")
assert sys.getdefaultencoding().lower() == "utf-8"
import os, json, time, datetime, cherrypy, threading
# simple log functions.
def trace(msg):
date = datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S")
print "[%s][trace] %s"%(date, msg)
# enable crossdomain access for js-client
# define the following method:
# def OPTIONS(self, *args, **kwargs)
# enable_crossdomain()
# invoke this method to enable js to request crossdomain.
def enable_crossdomain():
cherrypy.response.headers["Access-Control-Allow-Origin"] = "*"
cherrypy.response.headers["Access-Control-Allow-Methods"] = "GET, POST, HEAD, PUT, DELETE"
# generate allow headers for crossdomain.
allow_headers = ["Cache-Control", "X-Proxy-Authorization", "X-Requested-With", "Content-Type"]
cherrypy.response.headers["Access-Control-Allow-Headers"] = ",".join(allow_headers)
# error codes definition
class Error:
# ok, success, completed.
success = 0
# HTTP RESTful path.
class Root(object):
exposed = True
def __init__(self):
self.api = Api()
def GET(self):
enable_crossdomain();
return json.dumps({"code":Error.success, "urls":{"api":"the api root"}})
def OPTIONS(self, *args, **kwargs):
enable_crossdomain();
# HTTP RESTful path.
class Api(object):
exposed = True
def __init__(self):
self.v1 = V1()
def GET(self):
enable_crossdomain();
return json.dumps({"code":Error.success,
"urls": {
"v1": "the api version 1.0"
}
});
def OPTIONS(self, *args, **kwargs):
enable_crossdomain();
# HTTP RESTful path. to access as:
# http://127.0.0.1:8085/api/v1/clients
class V1(object):
exposed = True
def __init__(self):
pass;
def OPTIONS(self, *args, **kwargs):
enable_crossdomain();
'''
main code start.
'''
# donot support use this module as library.
if __name__ != "__main__":
raise Exception("embed not support")
# check the user options
if len(sys.argv) <= 1:
print "SRS community server, Copyright (c) 2013-2014 winlin"
print "Usage: python %s <port>"%(sys.argv[0])
print " port: the port to listen at."
print "For example:"
print " python %s 1949"%(sys.argv[0])
print ""
print "See also: https://github.com/winlinvip/simple-rtmp-server"
sys.exit(1)
# parse port from user options.
port = int(sys.argv[1])
static_dir = os.path.abspath(os.path.join(os.path.dirname(sys.argv[0]), "static-dir"))
trace("api server listen at port: %s, static_dir: %s"%(port, static_dir))
# cherrypy config.
conf = {
'global': {
'server.shutdown_timeout': 1,
'server.socket_host': '0.0.0.0',
'server.socket_port': port,
'tools.encode.on': True,
'tools.staticdir.on': True,
'tools.encode.encoding': "utf-8",
#'server.thread_pool': 2, # single thread server.
},
'/': {
'tools.staticdir.dir': static_dir,
'tools.staticdir.index': "index.html",
# for cherrypy RESTful api support
'request.dispatch': cherrypy.dispatch.MethodDispatcher()
}
}
# start cherrypy web engine
trace("start cherrypy server")
root = Root()
cherrypy.quickstart(root, '/', conf)
|
mit
| -3,654,521,204,986,091,500
| 31.398601
| 98
| 0.668465
| false
| 3.671157
| false
| false
| false
|
golya/FuzzLabs
|
engine/tests/steps/modules.py
|
1
|
1961
|
from behave import *
import os
import sys
import inspect
ROOT_DIR = os.path.dirname(
os.path.abspath(
inspect.getfile(inspect.currentframe()
)))
sys.path.append(ROOT_DIR + "/../../classes")
from ConfigurationHandler import ConfigurationHandler
from ModuleHandler import ModuleHandler
@given('we have root and config')
def step_impl(context):
assert os.path.isfile(ROOT_DIR + "/../../etc/engine.config")
context.root = ROOT_DIR + "/../../"
config_file = ROOT_DIR + "/../../etc/engine.config"
context.config_data = ConfigurationHandler(config_file).get()
@when('we load the modules')
def step_impl(context):
context.module_inst = ModuleHandler(context.root, context.config_data)
context.modules_list = context.module_inst.loaded_modules
@then('we get a list of modules')
def step_impl(context):
status = type(context.modules_list) == list
if status:
for module in context.modules_list:
if not module.get('instance') or \
not module.get('name') or \
not module.get('mtime') or \
not module.get('type'):
status = False
break
context.module_inst.unload_modules()
assert status
@given('we have modules loaded')
def step_impl(context):
assert os.path.isfile(ROOT_DIR + "/../../etc/engine.config")
root_dir = ROOT_DIR + "/../../"
config_file = ROOT_DIR + "/../../etc/engine.config"
config_data = ConfigurationHandler(config_file).get()
context.module_inst = ModuleHandler(root_dir, config_data)
context.modules_list = context.module_inst.loaded_modules
status = type(context.modules_list) == list
assert status
@when('we unload the modules')
def step_impl(context):
context.module_inst.unload_modules()
@then('we get an empty list')
def step_impl(context):
assert context.module_inst.loaded_modules == []
|
gpl-2.0
| 9,094,677,830,503,925,000
| 29.640625
| 74
| 0.63743
| false
| 3.793037
| true
| false
| false
|
amanzi/ats-dev
|
tools/utils/transect_data.py
|
2
|
7741
|
"""Loads and/or plots 2D, topologlically structured data on quadrilaterals using matplotlib.
"""
import sys,os
import numpy as np
import h5py
import mesh
import colors
def fullname(varname):
fullname = varname
if not '.cell.' in fullname:
fullname = fullname+'.cell.0'
return fullname
def transect_data(varnames, keys='all', directory=".", filename="visdump_data.h5",
mesh_filename="visdump_mesh.h5", coord_order=None, deformable=False, return_map=False):
"""Pulls simulation output into structured 2D arrays for transect-based, (i,j) indexing.
Input:
varnames | A list of variable names to pull, e.g.
| ['saturation_liquid', 'saturation_ice'], or a single variable
| name, e.g. 'saturation_liquid'
keys | Indices of timesteps to pull. Either an int (i.e. 0, -1, etc)
| for the kth timestep, or a list of ints, or 'all'.
directory | Directory of the run. Defaults to '.'
filename | Filename of the run. Defaults to 'visdump_data.h5'
mesh_filename | Filename of the mesh. Defaults to 'visdump_mesh.h5'
coord_order | Order of the transect coordinates. Defaults to ['x','z']. The
| mesh is sorted in this order.
deformable | Is the mesh deforming?
return_map | See return value below.
Output:
Output is an array of shape:
( len(varnames+2), len(keys), n_cells_coord_order[0], n_cells_coord_order[1] )
data[0,0,:,:] is the coord_order[0] centroid
data[1,0,:,:] is the coord_order[1] centroid
data[i+2,k,:,:] is the ith varname data at the kth requested timestep, sorted in
the same way as the centroids.
Note that the data is re-ordered in INCREASING coordinate, i.e. bottom to top in z.
If return_map is True, then returns a tuple, (data, map) where
map is a (NX,NZ) array of integers specifying which global id
corresponds to the (i,j) cell. This is useful for mapping input
data back INTO the unstructured mesh.
Example usage:
Calculate and plot the thaw depth at step 5.
// Pull saturation ice -- TD is where sat ice = 0."
data = transect_data(['saturation_ice', 5)
// x coordinate for plotting
x = data[0,0,:,0]
// for each column, find highest z where sat_ice > 0.
td_i = np.array([np.where(data[2,0,i,:] > 0.)[0][-1] for i in range(data.shape[2])])
// now that we have an index into the highest cell with ice, determine td as the
// mean of the highest cell with ice and the one above that. Note this assumes
// all columns have some thawing.
td_z = np.array( [ (dat[1,0,i,td_i[i]] + dat[1,0,i,td_i[i+1]]) / 2.
for i in range(len(td_i)) ] )
plt.plot(x, td_z)
"""
if coord_order is None:
coord_order = ['x','z']
if type(varnames) is str:
varnames = [varnames,]
# get centroids
xyz = mesh.meshElemCentroids(mesh_filename, directory)
# round to avoid issues
xyz = np.round(xyz, decimals=5)
# get ordering of centroids
dtype = [(coord_order[0], float), (coord_order[1], float)]
num_order = []
for i in coord_order:
if i == 'x':
num_order.append(0)
elif i == 'y':
num_order.append(1)
elif i == 'z':
num_order.append(2)
xyz_sort_order = np.array([tuple([xyz[i,x] for x in num_order]) for i in range(len(xyz))], dtype=dtype)
xyz_sorting = xyz_sort_order.argsort(order=coord_order)
with h5py.File(os.path.join(directory,filename),'r') as dat:
keys_avail = dat[fullname(varnames[0])].keys()
keys_avail.sort(lambda a,b: int.__cmp__(int(a),int(b)))
if keys == 'all':
keys = keys_avail
elif type(keys) is str:
keys = [keys,]
elif type(keys) is int:
keys = [keys_avail[keys],]
elif type(keys) is slice:
keys = keys_avail[keys]
elif type(keys) is list:
if all(type(k) is int for k in keys):
keys = [keys_avail[k] for k in keys]
elif all(type(k) is str for k in keys):
pass
else:
raise RuntimeError("Keys requested cannot be processed -- should be 'all', int, or str key, or list of ints or strs.")
# get data
vals = np.zeros((len(varnames)+2, len(keys), len(xyz)), 'd')
for i,key in enumerate(keys):
if deformable:
xyz = mesh.meshElemCentroids(mesh_filename, directory)
vals[0,i,:] = xyz[xyz_sorting,num_order[0]]
vals[1,i,:] = xyz[xyz_sorting,num_order[1]]
for j,varname in enumerate(varnames):
vals[j+2,i,:] = dat[fullname(varname)][key][:,0][xyz_sorting]
# reshape the data
# determine nx
nx = len(set(vals[0,0,:]))
nz = vals.shape[2] / nx
if (nx * nz != vals.shape[2]):
raise RuntimeError("Assumption about first coordinate being cleanly binnable is falling apart -- ask Ethan to rethink this algorithm!")
shp = vals.shape
if not return_map:
return vals.reshape(shp[0], shp[1], nx, nz)
else:
return vals.reshape(shp[0], shp[1], nx, nz), xyz_sorting.reshape(nx, nz)
def plot(dataset, ax, cax=None, vmin=None, vmax=None, cmap="jet",
label=None, mesh_filename="visdump_mesh.h5", directory=".", y_coord=0.0,
linewidths=1):
"""Draws a dataset on an ax."""
import matplotlib.collections
from matplotlib import pyplot as plt
if vmin is None:
vmin = dataset.min()
if vmax is None:
vmax = dataset.max()
# get the mesh and collapse to 2D
etype, coords, conn = mesh.meshElemXYZ(filename=mesh_filename, directory=directory)
if etype is not 'HEX':
raise RuntimeError("Only works for Hexs")
coords2 = np.array([[coords[i][0::2] for i in c[1:] if abs(coords[i][1] - y_coord) < 1.e-8] for c in conn])
try:
assert coords2.shape[2] == 2
assert coords2.shape[1] == 4
except AssertionError:
print(coords2.shape)
for c in conn:
if len(c) != 9:
print c
raise RuntimeError("what is a conn?")
coords3 = np.array([coords[i][:] for i in c[1:] if abs(coords[i][1] - y_coord) < 1.e-8])
if coords3.shape[0] != 4:
print coords
raise RuntimeError("Unable to squash to 2D")
# reorder anti-clockwise
for i,c in enumerate(coords2):
centroid = c.mean(axis=0)
def angle(p1,p2):
a1 = np.arctan2((p1[1]-centroid[1]),(p1[0]-centroid[0]))
a2 = np.arctan2((p2[1]-centroid[1]),(p2[0]-centroid[0]))
if a1 < a2:
return -1
elif a2 < a1:
return 1
else:
return 0
c2 = np.array(sorted(c,angle))
coords2[i] = c2
polygons = matplotlib.collections.PolyCollection(coords2, edgecolor='k', cmap=cmap, linewidths=linewidths)
polygons.set_array(dataset)
polygons.set_clim(vmin,vmax)
ax.add_collection(polygons)
xmin = min(c[0] for c in coords.itervalues())
xmax = max(c[0] for c in coords.itervalues())
zmin = min(c[2] for c in coords.itervalues())
zmax = max(c[2] for c in coords.itervalues())
ax.set_xlim(xmin,xmax)
ax.set_ylim(zmin,zmax)
if cax is not None:
cb = plt.colorbar(polygons, cax=cax)
if label is not None:
cb.set_label(label)
return ((xmin,xmax),(zmin,zmax))
|
bsd-3-clause
| -6,002,022,548,617,249,000
| 35.687204
| 143
| 0.575119
| false
| 3.435863
| false
| false
| false
|
gunan/tensorflow
|
tensorflow/python/keras/layers/preprocessing/benchmarks/categorical_encoding_benchmark.py
|
1
|
3177
|
# Copyright 2020 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Benchmark for Keras categorical_encoding preprocessing layer."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import time
from absl import flags
import numpy as np
from tensorflow.python import keras
from tensorflow.python.compat import v2_compat
from tensorflow.python.data.ops import dataset_ops
from tensorflow.python.framework import dtypes
from tensorflow.python.keras.layers.preprocessing import categorical_encoding
from tensorflow.python.ops import random_ops
from tensorflow.python.platform import benchmark
from tensorflow.python.platform import test
FLAGS = flags.FLAGS
v2_compat.enable_v2_behavior()
class BenchmarkLayer(benchmark.Benchmark):
"""Benchmark the layer forward pass."""
def run_dataset_implementation(self, output_mode, batch_size, sequence_length,
max_tokens):
input_t = keras.Input(shape=(sequence_length,), dtype=dtypes.int32)
layer = categorical_encoding.CategoricalEncoding(
max_tokens=max_tokens, output_mode=output_mode)
_ = layer(input_t)
num_repeats = 5
starts = []
ends = []
for _ in range(num_repeats):
ds = dataset_ops.Dataset.from_tensor_slices(
random_ops.random_uniform([batch_size * 10, sequence_length],
minval=0,
maxval=max_tokens - 1,
dtype=dtypes.int32))
ds = ds.shuffle(batch_size * 100)
ds = ds.batch(batch_size)
num_batches = 5
ds = ds.take(num_batches)
ds = ds.prefetch(num_batches)
starts.append(time.time())
# Benchmarked code begins here.
for i in ds:
_ = layer(i)
# Benchmarked code ends here.
ends.append(time.time())
avg_time = np.mean(np.array(ends) - np.array(starts)) / num_batches
name = "categorical_encoding|batch_%s|seq_length_%s|%s_max_tokens" % (
batch_size, sequence_length, max_tokens)
self.report_benchmark(iters=num_repeats, wall_time=avg_time, name=name)
def benchmark_vocab_size_by_batch(self):
for batch in [32, 256, 2048]:
for sequence_length in [10, 1000]:
for num_tokens in [100, 1000, 20000]:
self.run_dataset_implementation(
output_mode="count",
batch_size=batch,
sequence_length=sequence_length,
max_tokens=num_tokens)
if __name__ == "__main__":
test.main()
|
apache-2.0
| 5,262,048,413,489,094,000
| 35.517241
| 80
| 0.652188
| false
| 4.052296
| false
| false
| false
|
mommermi/callhorizons
|
callhorizons/callhorizons.py
|
1
|
60381
|
"""CALLHORIZONS - a Python interface to access JPL HORIZONS
ephemerides and orbital elements.
This module provides a convenient python interface to the JPL
HORIZONS system by directly accessing and parsing the HORIZONS
website. Ephemerides can be obtained through get_ephemerides,
orbital elements through get_elements. Function
export2pyephem provides an interface to the PyEphem module.
michael.mommert (at) nau.edu, latest version: v1.0.5, 2017-05-05.
This code is inspired by code created by Alex Hagen.
* v1.
* v1.0.5: 15-epoch limit for set_discreteepochs removed
* v1.0.4: improved asteroid and comet name parsing
* v1.0.3: ObsEclLon and ObsEclLat added to get_ephemerides
* v1.0.2: Python 3.5 compatibility implemented
* v1.0.1: get_ephemerides fixed
* v1.0: bugfixes completed, planets/satellites accessible, too
* v0.9: first release
"""
from __future__ import (print_function, unicode_literals)
import re
import sys
import time
import numpy as np
import warnings
try:
# Python 3
import urllib.request as urllib
except ImportError:
# Python 2
import urllib2 as urllib
warnings.filterwarnings('once', category=DeprecationWarning)
warnings.warn(('CALLHORIZONS is not maintained anymore; please use '
'astroquery.jplhorizons instead (https://github.com/'
'astropy/astroquery)'),
DeprecationWarning)
def _char2int(char):
""" translate characters to integer values (upper and lower case)"""
if char.isdigit():
return int(float(char))
if char.isupper():
return int(char, 36)
else:
return 26 + int(char, 36)
class query():
# constructor
def __init__(self, targetname, smallbody=True, cap=True, nofrag=False,
comet=False, asteroid=False):
"""Initialize query to Horizons
:param targetname: HORIZONS-readable target number, name, or designation
:param smallbody: boolean use ``smallbody=False`` if targetname is a
planet or spacecraft (optional, default: `True`);
also use `True` if the targetname is exact and
should be queried as is
:param cap: set to `True` to return the current apparition for
comet targets
:param nofrag: set to `True` to disable HORIZONS's comet
fragment search
:param comet: set to `True` if this is a comet (will override
automatic targetname parsing)
:param asteroid: set to `True` if this is an asteroid (will override
automatic targetname parsing)
:return: None
"""
self.targetname = str(targetname)
self.not_smallbody = not smallbody
self.cap = cap
self.nofrag = nofrag
self.comet = comet # is this object a comet?
self.asteroid = asteroid # is this object an asteroid?
self.start_epoch = None
self.stop_epoch = None
self.step_size = None
self.discreteepochs = None
self.url = None
self.data = None
assert not (
self.comet and self.asteroid), 'Only one of comet or asteroid can be `True`.'
return None
# small body designation parsing
def parse_comet(self):
"""Parse `targetname` as if it were a comet.
:return: (string or None, int or None, string or None);
The designation, number and prefix, and name of the comet as derived
from `self.targetname` are extracted into a tuple; each element that
does not exist is set to `None`. Parenthesis in `self.targetname`
will be ignored.
:example: the following table shows the result of the parsing:
+--------------------------------+--------------------------------+
|targetname |(desig, prefixnumber, name) |
+================================+================================+
|1P/Halley |(None, '1P', 'Halley') |
+--------------------------------+--------------------------------+
|3D/Biela |(None, '3D', 'Biela') |
+--------------------------------+--------------------------------+
|9P/Tempel 1 |(None, '9P', 'Tempel 1') |
+--------------------------------+--------------------------------+
|73P/Schwassmann Wachmann 3 C |(None, '73P', |
| |'Schwassmann Wachmann 3 C') |
+--------------------------------+--------------------------------+
|73P-C/Schwassmann Wachmann 3 C |(None, '73P-C', |
| |'Schwassmann Wachmann 3 C') |
+--------------------------------+--------------------------------+
|73P-BB |(None, '73P-BB', None) |
+--------------------------------+--------------------------------+
|322P |(None, '322P', None) |
+--------------------------------+--------------------------------+
|X/1106 C1 |('1166 C1', 'X', None) |
+--------------------------------+--------------------------------+
|P/1994 N2 (McNaught-Hartley) |('1994 N2', 'P', |
| |'McNaught-Hartley') |
+--------------------------------+--------------------------------+
|P/2001 YX127 (LINEAR) |('2001 YX127', 'P', 'LINEAR') |
+--------------------------------+--------------------------------+
|C/-146 P1 |('-146 P1', 'C', None) |
+--------------------------------+--------------------------------+
|C/2001 A2-A (LINEAR) |('2001 A2-A', 'C', 'LINEAR') |
+--------------------------------+--------------------------------+
|C/2013 US10 |('2013 US10', 'C', None) |
+--------------------------------+--------------------------------+
|C/2015 V2 (Johnson) |('2015 V2', 'C', 'Johnson') |
+--------------------------------+--------------------------------+
|C/2016 KA (Catalina) |('2016 KA', 'C', 'Catalina') |
+--------------------------------+--------------------------------+
"""
import re
pat = ('^(([1-9]+[PDCXAI](-[A-Z]{1,2})?)|[PDCXAI]/)' + # prefix [0,1,2]
'|([-]?[0-9]{3,4}[ _][A-Z]{1,2}([0-9]{1,3})?(-[1-9A-Z]{0,2})?)' +
# designation [3,4]
('|(([A-Z][a-z]?[A-Z]*[a-z]*[ -]?[A-Z]?[1-9]*[a-z]*)' +
'( [1-9A-Z]{1,2})*)') # name [5,6]
)
m = re.findall(pat, self.targetname.strip())
# print(m)
prefixnumber = None
desig = None
name = None
if len(m) > 0:
for el in m:
# prefix/number
if len(el[0]) > 0:
prefixnumber = el[0].replace('/', '')
# designation
if len(el[3]) > 0:
desig = el[3].replace('_', ' ')
# name
if len(el[5]) > 0:
if len(el[5]) > 1:
name = el[5]
return (desig, prefixnumber, name)
def parse_asteroid(self):
"""Parse `targetname` as if it were a asteroid.
:return: (string or None, int or None, string or None);
The designation, number, and name of the asteroid as derived from
`self.targetname` are extracted into a tuple; each element that
does not exist is set to `None`. Parenthesis in `self.targetname`
will be ignored. Packed designations and numbers are unpacked.
:example: the following table shows the result of the parsing:
+--------------------------------+---------------------------------+
|targetname |(desig, number, name) |
+================================+=================================+
|1 |(None, 1, None) |
+--------------------------------+---------------------------------+
|2 Pallas |(None, 2, Pallas) |
+--------------------------------+---------------------------------+
|\(2001\) Einstein |(None, 2001, Einstein) |
+--------------------------------+---------------------------------+
|1714 Sy |(None, 1714, Sy) |
+--------------------------------+---------------------------------+
|2014 MU69 |(2014 MU69, None, None) |
+--------------------------------+---------------------------------+
|(228195) 6675 P-L |(6675 P-L, 228195, None) |
+--------------------------------+---------------------------------+
|4101 T-3 |(4101 T-3, None, None) |
+--------------------------------+---------------------------------+
|4015 Wilson-Harrington (1979 VA)|(1979 VA, 4015, Wilson-Harrington|
+--------------------------------+---------------------------------+
|J95X00A |(1995 XA, None, None) |
+--------------------------------+---------------------------------+
|K07Tf8A |(2007 TA418, None, None) |
+--------------------------------+---------------------------------+
|G3693 |(None, 163693, None) |
+--------------------------------+---------------------------------+
|2017 U1 |(None, None, None) |
+--------------------------------+---------------------------------+
"""
pat = ('(([1-2][0-9]{0,3}[ _][A-Z]{2}[0-9]{0,3})' # designation [0,1]
'|([1-9][0-9]{3}[ _](P-L|T-[1-3])))' # Palomar-Leiden [0,2,3]
'|([IJKL][0-9]{2}[A-Z][0-9a-z][0-9][A-Z])' # packed desig [4]
'|([A-Za-z][0-9]{4})' # packed number [5]
'|([A-Z][A-Z]*[a-z][a-z]*[^0-9]*'
'[ -]?[A-Z]?[a-z]*[^0-9]*)' # name [6]
'|([1-9][0-9]*(\b|$))') # number [7,8]
# regex patterns that will be ignored as they might cause
# confusion
non_pat = ('([1-2][0-9]{0,3}[ _][A-Z][0-9]*(\b|$))') # comet desig
if sys.version_info > (3, 0):
raw = self.targetname.translate(str.maketrans('()', ' ')).strip()
else:
import string
raw = self.targetname.translate(string.maketrans('()',
' ')).strip()
# reject non_pat patterns
non_m = re.findall(non_pat, raw)
# print('reject', raw, non_m)
if len(non_m) > 0:
for ps in non_m:
for p in ps:
if p == '':
continue
raw = raw[:raw.find(p)] + raw[raw.find(p)+len(p):]
# match target patterns
m = re.findall(pat, raw)
# print(raw, m)
desig = None
number = None
name = None
if len(m) > 0:
for el in m:
# designation
if len(el[0]) > 0:
desig = el[0]
# packed designation (unpack here)
elif len(el[4]) > 0:
ident = el[4]
# old designation style, e.g.: 1989AB
if (len(ident.strip()) < 7 and ident[:4].isdigit() and
ident[4:6].isalpha()):
desig = ident[:4]+' '+ident[4:6]
# Palomar Survey
elif ident.find("PLS") == 0:
desig = ident[3:] + " P-L"
# Trojan Surveys
elif ident.find("T1S") == 0:
desig = ident[3:] + " T-1"
elif ident.find("T2S") == 0:
desig = ident[3:] + " T-2"
elif ident.find("T3S") == 0:
desig = ident[3:] + " T-3"
# insert blank in designations
elif (ident[0:4].isdigit() and ident[4:6].isalpha() and
ident[4] != ' '):
desig = ident[:4]+" "+ident[4:]
# MPC packed 7-digit designation
elif (ident[0].isalpha() and ident[1:3].isdigit() and
ident[-1].isalpha() and ident[-2].isdigit()):
yr = str(_char2int(ident[0]))+ident[1:3]
let = ident[3]+ident[-1]
num = str(_char2int(ident[4]))+ident[5]
num = num.lstrip("0")
desig = yr+' '+let+num
# nothing to do
else:
desig = ident
# packed number (unpack here)
elif len(el[5]) > 0:
ident = el[5]
number = ident = int(str(_char2int(ident[0]))+ident[1:])
# number
elif len(el[7]) > 0:
if sys.version_info > (3, 0):
number = int(float(el[7].translate(str.maketrans('()',
' '))))
else:
import string
number = int(float(el[7].translate(string.maketrans('()',
' '))))
# name (strip here)
elif len(el[6]) > 0:
if len(el[6].strip()) > 1:
name = el[6].strip()
return (desig, number, name)
def isorbit_record(self):
"""`True` if `targetname` appears to be a comet orbit record number.
NAIF record numbers are 6 digits, begin with a '9' and can
change at any time.
"""
import re
test = re.match('^9[0-9]{5}$', self.targetname.strip()) is not None
return test
def iscomet(self):
"""`True` if `targetname` appears to be a comet. """
# treat this object as comet if there is a prefix/number
if self.comet is not None:
return self.comet
elif self.asteroid is not None:
return not self.asteroid
else:
return (self.parse_comet()[0] is not None or
self.parse_comet()[1] is not None)
def isasteroid(self):
"""`True` if `targetname` appears to be an asteroid."""
if self.asteroid is not None:
return self.asteroid
elif self.comet is not None:
return not self.comet
else:
return any(self.parse_asteroid()) is not None
# set epochs
def set_epochrange(self, start_epoch, stop_epoch, step_size):
"""Set a range of epochs, all times are UT
:param start_epoch: str;
start epoch of the format 'YYYY-MM-DD [HH-MM-SS]'
:param stop_epoch: str;
final epoch of the format 'YYYY-MM-DD [HH-MM-SS]'
:param step_size: str;
epoch step size, e.g., '1d' for 1 day, '10m' for 10 minutes...
:return: None
:example: >>> import callhorizons
>>> ceres = callhorizons.query('Ceres')
>>> ceres.set_epochrange('2016-02-26', '2016-10-25', '1d')
Note that dates are mandatory; if no time is given, midnight is assumed.
"""
self.start_epoch = start_epoch
self.stop_epoch = stop_epoch
self.step_size = step_size
return None
def set_discreteepochs(self, discreteepochs):
"""Set a list of discrete epochs, epochs have to be given as Julian
Dates
:param discreteepochs: array_like
list or 1D array of floats or strings
:return: None
:example: >>> import callhorizons
>>> ceres = callhorizons.query('Ceres')
>>> ceres.set_discreteepochs([2457446.177083, 2457446.182343])
"""
if not isinstance(discreteepochs, (list, np.ndarray)):
discreteepochs = [discreteepochs]
self.discreteepochs = list(discreteepochs)
# data access functions
@property
def fields(self):
"""returns list of available properties for all epochs"""
try:
return self.data.dtype.names
except AttributeError:
return []
def __len__(self):
"""returns total number of epochs that have been queried"""
try:
# Cast to int because a long is returned from shape on Windows.
return int(self.data.shape[0])
except AttributeError:
return 0
@property
def dates(self):
"""returns list of epochs that have been queried (format 'YYYY-MM-DD HH-MM-SS')"""
try:
return self.data['datetime']
except:
return []
@property
def query(self):
"""returns URL that has been used in calling HORIZONS"""
try:
return self.url
except:
return []
@property
def dates_jd(self):
"""returns list of epochs that have been queried (Julian Dates)"""
try:
return self.data['datetime_jd']
except:
return []
def __repr__(self):
"""returns brief query information"""
return "<callhorizons.query object: %s>" % self.targetname
def __str__(self):
"""returns information on the current query as string"""
output = "targetname: %s\n" % self.targetname
if self.discreteepochs is not None:
output += "discrete epochs: %s\n" % \
" ".join([str(epoch) for epoch in self.discreteepochs])
if (self.start_epoch is not None and self.stop_epoch is not None and
self.step_size is not None):
output += "epoch range from %s to %s in steps of %s\n" % \
(self.start_epoch, self.stop_epoch, self.step_size)
output += "%d data sets queried with %d different fields" % \
(len(self), len(self.fields))
return output
def __getitem__(self, key):
"""provides access to query data
:param key: str/int;
epoch index or property key
:return: query data according to key
"""
# check if data exist
if self.data is None or len(self.data) == 0:
print('CALLHORIZONS ERROR: run get_ephemerides or get_elements',
'first')
return None
return self.data[key]
# call functions
def get_ephemerides(self, observatory_code,
airmass_lessthan=99,
solar_elongation=(0, 180),
skip_daylight=False):
"""Call JPL HORIZONS website to obtain ephemerides based on the
provided targetname, epochs, and observatory_code. For a list
of valid observatory codes, refer to
http://minorplanetcenter.net/iau/lists/ObsCodesF.html
:param observatory_code: str/int;
observer's location code according to Minor Planet Center
:param airmass_lessthan: float;
maximum airmass (optional, default: 99)
:param solar_elongation: tuple;
permissible solar elongation range (optional, deg)
:param skip_daylight: boolean;
crop daylight epoch during query (optional)
:result: int; number of epochs queried
:example: >>> ceres = callhorizons.query('Ceres')
>>> ceres.set_epochrange('2016-02-23 00:00', '2016-02-24 00:00', '1h')
>>> print (ceres.get_ephemerides(568), 'epochs queried')
The queried properties and their definitions are:
+------------------+-----------------------------------------------+
| Property | Definition |
+==================+===============================================+
| targetname | official number, name, designation [string] |
+------------------+-----------------------------------------------+
| H | absolute magnitude in V band (float, mag) |
+------------------+-----------------------------------------------+
| G | photometric slope parameter (float) |
+------------------+-----------------------------------------------+
| datetime | epoch date and time (str, YYYY-MM-DD HH:MM:SS)|
+------------------+-----------------------------------------------+
| datetime_jd | epoch Julian Date (float) |
+------------------+-----------------------------------------------+
| solar_presence | information on Sun's presence (str) |
+------------------+-----------------------------------------------+
| lunar_presence | information on Moon's presence (str) |
+------------------+-----------------------------------------------+
| RA | target RA (float, J2000.0) |
+------------------+-----------------------------------------------+
| DEC | target DEC (float, J2000.0) |
+------------------+-----------------------------------------------+
| RA_rate | target rate RA (float, arcsec/s) |
+------------------+-----------------------------------------------+
| DEC_rate | target RA (float, arcsec/s, includes cos(DEC))|
+------------------+-----------------------------------------------+
| AZ | Azimuth meas East(90) of North(0) (float, deg)|
+------------------+-----------------------------------------------+
| EL | Elevation (float, deg) |
+------------------+-----------------------------------------------+
| airmass | target optical airmass (float) |
+------------------+-----------------------------------------------+
| magextinct | V-mag extinction due airmass (float, mag) |
+------------------+-----------------------------------------------+
| V | V magnitude (comets: total mag) (float, mag) |
+------------------+-----------------------------------------------+
| illumination | fraction of illuminated disk (float) |
+------------------+-----------------------------------------------+
| EclLon | heliocentr. ecl. long. (float, deg, J2000.0) |
+------------------+-----------------------------------------------+
| EclLat | heliocentr. ecl. lat. (float, deg, J2000.0) |
+------------------+-----------------------------------------------+
| ObsEclLon | obscentr. ecl. long. (float, deg, J2000.0) |
+------------------+-----------------------------------------------+
| ObsEclLat | obscentr. ecl. lat. (float, deg, J2000.0) |
+------------------+-----------------------------------------------+
| r | heliocentric distance (float, au) |
+------------------+-----------------------------------------------+
| r_rate | heliocentric radial rate (float, km/s) |
+------------------+-----------------------------------------------+
| delta | distance from the observer (float, au) |
+------------------+-----------------------------------------------+
| delta_rate | obs-centric radial rate (float, km/s) |
+------------------+-----------------------------------------------+
| lighttime | one-way light time (float, s) |
+------------------+-----------------------------------------------+
| elong | solar elongation (float, deg) |
+------------------+-----------------------------------------------+
| elongFlag | app. position relative to Sun (str) |
+------------------+-----------------------------------------------+
| alpha | solar phase angle (float, deg) |
+------------------+-----------------------------------------------+
| sunTargetPA | PA of Sun->target vector (float, deg, EoN) |
+------------------+-----------------------------------------------+
| velocityPA | PA of velocity vector (float, deg, EoN) |
+------------------+-----------------------------------------------+
| GlxLon | galactic longitude (float, deg) |
+------------------+-----------------------------------------------+
| GlxLat | galactic latitude (float, deg) |
+------------------+-----------------------------------------------+
| RA_3sigma | 3sigma pos. unc. in RA (float, arcsec) |
+------------------+-----------------------------------------------+
| DEC_3sigma | 3sigma pos. unc. in DEC (float, arcsec) |
+------------------+-----------------------------------------------+
"""
# queried fields (see HORIZONS website for details)
# if fields are added here, also update the field identification below
quantities = '1,3,4,8,9,10,18,19,20,21,23,24,27,31,33,36'
# encode objectname for use in URL
objectname = urllib.quote(self.targetname.encode("utf8"))
# construct URL for HORIZONS query
url = "https://ssd.jpl.nasa.gov/horizons_batch.cgi?batch=l" \
+ "&TABLE_TYPE='OBSERVER'" \
+ "&QUANTITIES='" + str(quantities) + "'" \
+ "&CSV_FORMAT='YES'" \
+ "&ANG_FORMAT='DEG'" \
+ "&CAL_FORMAT='BOTH'" \
+ "&SOLAR_ELONG='" + str(solar_elongation[0]) + "," \
+ str(solar_elongation[1]) + "'" \
+ "&CENTER='"+str(observatory_code)+"'"
if self.not_smallbody:
url += "&COMMAND='" + \
urllib.quote(self.targetname.encode("utf8")) + "'"
elif self.cap and self.comet:
for ident in self.parse_comet():
if ident is not None:
break
if ident is None:
ident = self.targetname
url += "&COMMAND='DES=" + \
urllib.quote(ident.encode("utf8")) + "%3B" + \
("CAP'" if self.cap else "'")
elif self.isorbit_record():
# Comet orbit record. Do not use DES, CAP. This test must
# occur before asteroid test.
url += "&COMMAND='" + \
urllib.quote(self.targetname.encode("utf8")) + "%3B'"
elif self.isasteroid() and not self.comet:
# for asteroids, use 'DES="designation";'
for ident in self.parse_asteroid():
if ident is not None:
break
if ident is None:
ident = self.targetname
url += "&COMMAND='" + \
urllib.quote(str(ident).encode("utf8")) + "%3B'"
elif self.iscomet() and not self.asteroid:
# for comets, potentially append the current apparition
# (CAP) parameter, or the fragmentation flag (NOFRAG)
for ident in self.parse_comet():
if ident is not None:
break
if ident is None:
ident = self.targetname
url += "&COMMAND='DES=" + \
urllib.quote(ident.encode("utf8")) + "%3B" + \
("NOFRAG%3B" if self.nofrag else "") + \
("CAP'" if self.cap else "'")
# elif (not self.targetname.replace(' ', '').isalpha() and not
# self.targetname.isdigit() and not
# self.targetname.islower() and not
# self.targetname.isupper()):
# # lower case + upper case + numbers = pot. case sensitive designation
# url += "&COMMAND='DES=" + \
# urllib.quote(self.targetname.encode("utf8")) + "%3B'"
else:
url += "&COMMAND='" + \
urllib.quote(self.targetname.encode("utf8")) + "%3B'"
if self.discreteepochs is not None:
url += "&TLIST="
for date in self.discreteepochs:
url += "'" + str(date) + "'"
elif (self.start_epoch is not None and self.stop_epoch is not None and
self.step_size is not None):
url += "&START_TIME='" \
+ urllib.quote(self.start_epoch.encode("utf8")) + "'" \
+ "&STOP_TIME='" \
+ urllib.quote(self.stop_epoch.encode("utf8")) + "'" \
+ "&STEP_SIZE='" + str(self.step_size) + "'"
else:
raise IOError('no epoch information given')
if airmass_lessthan < 99:
url += "&AIRMASS='" + str(airmass_lessthan) + "'"
if skip_daylight:
url += "&SKIP_DAYLT='YES'"
else:
url += "&SKIP_DAYLT='NO'"
self.url = url
# print (url)
# call HORIZONS
i = 0 # count number of connection tries
while True:
try:
src = urllib.urlopen(url).readlines()
break
except urllib.URLError:
time.sleep(0.1)
# in case the HORIZONS website is blocked (due to another query)
# wait 0.1 second and try again
i += 1
if i > 50:
return 0 # website could not be reached
# disseminate website source code
# identify header line and extract data block (ephemerides data)
# also extract targetname, absolute mag. (H), and slope parameter (G)
headerline = []
datablock = []
in_datablock = False
H, G = np.nan, np.nan
for idx, line in enumerate(src):
line = line.decode('UTF-8')
if "Date__(UT)__HR:MN" in line:
headerline = line.split(',')
if "$$EOE\n" in line:
in_datablock = False
if in_datablock:
datablock.append(line)
if "$$SOE\n" in line:
in_datablock = True
if "Target body name" in line:
targetname = line[18:50].strip()
if ("rotational period in hours)" in
src[idx].decode('UTF-8')):
HGline = src[idx+2].decode('UTF-8').split('=')
if 'B-V' in HGline[2] and 'G' in HGline[1]:
try:
H = float(HGline[1].rstrip('G'))
except ValueError:
pass
try:
G = float(HGline[2].rstrip('B-V'))
except ValueError:
pass
if ("Multiple major-bodies match string" in
src[idx].decode('UTF-8') or
("Matching small-bodies" in src[idx].decode('UTF-8') and not
"No matches found" in src[idx+1].decode('UTF-8'))):
raise ValueError('Ambiguous target name; check URL: %s' %
url)
if ("Matching small-bodies" in src[idx].decode('UTF-8') and
"No matches found" in src[idx+1].decode('UTF-8')):
raise ValueError('Unknown target; check URL: %s' % url)
# field identification for each line
ephemerides = []
for line in datablock:
line = line.split(',')
# ignore line that don't hold any data
if len(line) < len(quantities.split(',')):
continue
this_eph = []
fieldnames = []
datatypes = []
# create a dictionary for each date (each line)
for idx, item in enumerate(headerline):
if ('Date__(UT)__HR:MN' in item):
this_eph.append(line[idx].strip())
fieldnames.append('datetime')
datatypes.append(object)
if ('Date_________JDUT' in item):
this_eph.append(np.float64(line[idx]))
fieldnames.append('datetime_jd')
datatypes.append(np.float64)
# read out and convert solar presence
try:
this_eph.append({'*': 'daylight', 'C': 'civil twilight',
'N': 'nautical twilight',
'A': 'astronomical twilight',
' ': 'dark',
't': 'transiting'}[line[idx+1]])
except KeyError:
this_eph.append('n.a.')
fieldnames.append('solar_presence')
datatypes.append(object)
# read out and convert lunar presence
try:
this_eph.append({'m': 'moonlight',
' ': 'dark'}[line[idx+2]])
except KeyError:
this_eph.append('n.a.')
fieldnames.append('lunar_presence')
datatypes.append(object)
if (item.find('R.A._(ICRF/J2000.0)') > -1):
this_eph.append(np.float64(line[idx]))
fieldnames.append('RA')
datatypes.append(np.float64)
if (item.find('DEC_(ICRF/J2000.0)') > -1):
this_eph.append(np.float64(line[idx]))
fieldnames.append('DEC')
datatypes.append(np.float64)
if (item.find('dRA*cosD') > -1):
try:
this_eph.append(np.float64(line[idx])/3600.) # "/s
except ValueError:
this_eph.append(np.nan)
fieldnames.append('RA_rate')
datatypes.append(np.float64)
if (item.find('d(DEC)/dt') > -1):
try:
this_eph.append(np.float64(line[idx])/3600.) # "/s
except ValueError:
this_eph.append(np.nan)
fieldnames.append('DEC_rate')
datatypes.append(np.float64)
if (item.find('Azi_(a-app)') > -1):
try: # if AZ not given, e.g. for space telescopes
this_eph.append(np.float64(line[idx]))
fieldnames.append('AZ')
datatypes.append(np.float64)
except ValueError:
pass
if (item.find('Elev_(a-app)') > -1):
try: # if EL not given, e.g. for space telescopes
this_eph.append(np.float64(line[idx]))
fieldnames.append('EL')
datatypes.append(np.float64)
except ValueError:
pass
if (item.find('a-mass') > -1):
try: # if airmass not given, e.g. for space telescopes
this_eph.append(np.float64(line[idx]))
except ValueError:
this_eph.append(np.nan)
fieldnames.append('airmass')
datatypes.append(np.float64)
if (item.find('mag_ex') > -1):
try: # if mag_ex not given, e.g. for space telescopes
this_eph.append(np.float64(line[idx]))
except ValueError:
this_eph.append(np.nan)
fieldnames.append('magextinct')
datatypes.append(np.float64)
if (item.find('APmag') > -1):
try:
this_eph.append(np.float64(line[idx]))
except ValueError:
this_eph.append(np.nan)
fieldnames.append('V')
datatypes.append(np.float64)
if (item.find('Illu%') > -1):
try:
this_eph.append(np.float64(line[idx]))
except ValueError:
this_eph.append(np.nan)
fieldnames.append('illumination')
datatypes.append(np.float64)
if (item.find('hEcl-Lon') > -1):
try:
this_eph.append(np.float64(line[idx]))
except ValueError:
this_eph.append(np.nan)
fieldnames.append('EclLon')
datatypes.append(np.float64)
if (item.find('hEcl-Lat') > -1):
try:
this_eph.append(np.float64(line[idx]))
except ValueError:
this_eph.append(np.nan)
fieldnames.append('EclLat')
datatypes.append(np.float64)
if (item.find('ObsEcLon') > -1):
try:
this_eph.append(np.float64(line[idx]))
except ValueError:
this_eph.append(np.nan)
fieldnames.append('ObsEclLon')
datatypes.append(np.float64)
if (item.find('ObsEcLat') > -1):
try:
this_eph.append(np.float64(line[idx]))
except ValueError:
this_eph.append(np.nan)
fieldnames.append('ObsEclLat')
datatypes.append(np.float64)
if (item.find(' r') > -1) and \
(headerline[idx+1].find("rdot") > -1):
try:
this_eph.append(np.float64(line[idx]))
except ValueError:
this_eph.append(np.nan)
fieldnames.append('r')
datatypes.append(np.float64)
if (item.find('rdot') > -1):
try:
this_eph.append(np.float64(line[idx]))
except ValueError:
this_eph.append(np.nan)
fieldnames.append('r_rate')
datatypes.append(np.float64)
if (item.find('delta') > -1):
try:
this_eph.append(np.float64(line[idx]))
except ValueError:
this_eph.append(np.nan)
fieldnames.append('delta')
datatypes.append(np.float64)
if (item.find('deldot') > -1):
try:
this_eph.append(np.float64(line[idx]))
except ValueError:
this_eph.append(np.nan)
fieldnames.append('delta_rate')
datatypes.append(np.float64)
if (item.find('1-way_LT') > -1):
try:
this_eph.append(np.float64(line[idx])*60.) # seconds
except ValueError:
this_eph.append(np.nan)
fieldnames.append('lighttime')
datatypes.append(np.float64)
if (item.find('S-O-T') > -1):
try:
this_eph.append(np.float64(line[idx]))
except ValueError:
this_eph.append(np.nan)
fieldnames.append('elong')
datatypes.append(np.float64)
# in the case of space telescopes, '/r S-T-O' is used;
# ground-based telescopes have both parameters in separate
# columns
if (item.find('/r S-T-O') > -1):
this_eph.append({'/L': 'leading', '/T': 'trailing'}
[line[idx].split()[0]])
fieldnames.append('elongFlag')
datatypes.append(object)
try:
this_eph.append(np.float64(line[idx].split()[1]))
except ValueError:
this_eph.append(np.nan)
fieldnames.append('alpha')
datatypes.append(np.float64)
elif (item.find('S-T-O') > -1):
try:
this_eph.append(np.float64(line[idx]))
except ValueError:
this_eph.append(np.nan)
fieldnames.append('alpha')
datatypes.append(np.float64)
elif (item.find('/r') > -1):
this_eph.append({'/L': 'leading', '/T': 'trailing',
'/?': 'not defined'}
[line[idx]])
fieldnames.append('elongFlag')
datatypes.append(object)
if (item.find('PsAng') > -1):
try:
this_eph.append(np.float64(line[idx]))
except ValueError:
this_eph.append(np.nan)
fieldnames.append('sunTargetPA')
datatypes.append(np.float64)
if (item.find('PsAMV') > -1):
try:
this_eph.append(np.float64(line[idx]))
except ValueError:
this_eph.append(np.nan)
fieldnames.append('velocityPA')
datatypes.append(np.float64)
if (item.find('GlxLon') > -1):
try:
this_eph.append(np.float64(line[idx]))
except ValueError:
this_eph.append(np.nan)
fieldnames.append('GlxLon')
datatypes.append(np.float64)
if (item.find('GlxLat') > -1):
try:
this_eph.append(np.float64(line[idx]))
except ValueError:
this_eph.append(np.nan)
fieldnames.append('GlxLat')
datatypes.append(np.float64)
if (item.find('RA_3sigma') > -1):
try:
this_eph.append(np.float64(line[idx]))
except ValueError:
this_eph.append(np.nan)
fieldnames.append('RA_3sigma')
datatypes.append(np.float64)
if (item.find('DEC_3sigma') > -1):
try:
this_eph.append(np.float64(line[idx]))
except ValueError:
this_eph.append(np.nan)
fieldnames.append('DEC_3sigma')
datatypes.append(np.float64)
# in the case of a comet, use total mag for V
if (item.find('T-mag') > -1):
try:
this_eph.append(np.float64(line[idx]))
except ValueError:
this_eph.append(np.nan)
fieldnames.append('V')
datatypes.append(np.float64)
# append target name
this_eph.append(targetname)
fieldnames.append('targetname')
datatypes.append(object)
# append H
this_eph.append(H)
fieldnames.append('H')
datatypes.append(np.float64)
# append G
this_eph.append(G)
fieldnames.append('G')
datatypes.append(np.float64)
if len(this_eph) > 0:
ephemerides.append(tuple(this_eph))
if len(ephemerides) == 0:
return 0
# combine ephemerides with column names and data types into ndarray
assert len(ephemerides[0]) == len(fieldnames) == len(datatypes)
self.data = np.array(ephemerides,
dtype=[(str(fieldnames[i]), datatypes[i]) for i
in range(len(fieldnames))])
return len(self)
def get_elements(self, center='500@10', asteroid=False, comet=False):
"""Call JPL HORIZONS website to obtain orbital elements based on the
provided targetname, epochs, and center code. For valid center
codes, please refer to http://ssd.jpl.nasa.gov/horizons.cgi
:param center: str;
center body (default: 500@10 = Sun)
:result: int; number of epochs queried
:example: >>> ceres = callhorizons.query('Ceres')
>>> ceres.set_epochrange('2016-02-23 00:00', '2016-02-24 00:00', '1h')
>>> print (ceres.get_elements(), 'epochs queried')
The queried properties and their definitions are:
+------------------+-----------------------------------------------+
| Property | Definition |
+==================+===============================================+
| targetname | official number, name, designation [string] |
+------------------+-----------------------------------------------+
| H | absolute magnitude in V band (float, mag) |
+------------------+-----------------------------------------------+
| G | photometric slope parameter (float) |
+------------------+-----------------------------------------------+
| datetime_jd | epoch Julian Date (float) |
+------------------+-----------------------------------------------+
| e | eccentricity (float) |
+------------------+-----------------------------------------------+
| p | periapsis distance (float, au) |
+------------------+-----------------------------------------------+
| a | semi-major axis (float, au) |
+------------------+-----------------------------------------------+
| incl | inclination (float, deg) |
+------------------+-----------------------------------------------+
| node | longitude of Asc. Node (float, deg) |
+------------------+-----------------------------------------------+
| argper | argument of the perifocus (float, deg) |
+------------------+-----------------------------------------------+
| Tp | time of periapsis (float, Julian Date) |
+------------------+-----------------------------------------------+
| meananomaly | mean anomaly (float, deg) |
+------------------+-----------------------------------------------+
| trueanomaly | true anomaly (float, deg) |
+------------------+-----------------------------------------------+
| period | orbital period (float, Earth yr) |
+------------------+-----------------------------------------------+
| Q | apoapsis distance (float, au) |
+------------------+-----------------------------------------------+
"""
# encode objectname for use in URL
objectname = urllib.quote(self.targetname.encode("utf8"))
# call Horizons website and extract data
url = "https://ssd.jpl.nasa.gov/horizons_batch.cgi?batch=l" \
+ "&TABLE_TYPE='ELEMENTS'" \
+ "&CSV_FORMAT='YES'" \
+ "&CENTER='" + str(center) + "'" \
+ "&OUT_UNITS='AU-D'" \
+ "&REF_PLANE='ECLIPTIC'" \
+ "REF_SYSTEM='J2000'" \
+ "&TP_TYPE='ABSOLUTE'" \
+ "&ELEM_LABELS='YES'" \
+ "CSV_FORMAT='YES'" \
+ "&OBJ_DATA='YES'"
# check if self.targetname is a designation
# lower case + upper case + numbers = pot. case sensitive designation
if self.not_smallbody:
url += "&COMMAND='" + \
urllib.quote(self.targetname.encode("utf8")) + "'"
elif self.isorbit_record():
# Comet orbit record. Do not use DES, CAP. This test must
# occur before asteroid test.
url += "&COMMAND='" + \
urllib.quote(self.targetname.encode("utf8")) + "%3B'"
elif self.isasteroid() and not self.comet:
# for asteroids, use 'DES="designation";'
for ident in self.parse_asteroid():
if ident is not None:
break
if ident is None:
ident = self.targetname
url += "&COMMAND='" + \
urllib.quote(str(ident).encode("utf8")) + "%3B'"
elif self.iscomet() and not self.asteroid:
# for comets, potentially append the current apparition
# (CAP) parameter, or the fragmentation flag (NOFRAG)
for ident in self.parse_comet():
if ident is not None:
break
if ident is None:
ident = self.targetname
url += "&COMMAND='DES=" + \
urllib.quote(ident.encode("utf8")) + "%3B" + \
("NOFRAG%3B" if self.nofrag else "") + \
("CAP'" if self.cap else "'")
# elif (not self.targetname.replace(' ', '').isalpha() and not
# self.targetname.isdigit() and not
# self.targetname.islower() and not
# self.targetname.isupper()):
# url += "&COMMAND='DES=" + str(objectname) + "%3B'"
else:
url += "&COMMAND='" + str(objectname) + "%3B'"
if self.discreteepochs is not None:
url += "&TLIST="
for date in self.discreteepochs:
url += "'" + str(date) + "'"
elif (self.start_epoch is not None and self.stop_epoch is not None and
self.step_size is not None):
url += "&START_TIME='" \
+ urllib.quote(self.start_epoch.encode("utf8")) + "'" \
+ "&STOP_TIME='" \
+ urllib.quote(self.stop_epoch.encode("utf8")) + "'" \
+ "&STEP_SIZE='" + str(self.step_size) + "'"
else:
raise IOError('no epoch information given')
self.url = url
i = 0 # count number of connection tries
while True:
try:
src = urllib.urlopen(url).readlines()
break
except urllib.URLError:
time.sleep(0.1)
# in case the HORIZONS website is blocked (due to another query)
# wait 1 second and try again
i += 1
if i > 50:
return 0 # website could not be reached
# disseminate website source code
# identify header line and extract data block (elements data)
# also extract targetname, abs. magnitude (H), and slope parameter (G)
headerline = []
datablock = []
in_datablock = False
H, G = np.nan, np.nan
for idx, line in enumerate(src):
line = line.decode('UTF-8')
if 'JDTDB,' in line:
headerline = line.split(',')
if "$$EOE\n" in line:
in_datablock = False
if in_datablock:
datablock.append(line)
if "$$SOE\n" in line:
in_datablock = True
if "Target body name" in line:
targetname = line[18:50].strip()
if "rotational period in hours)" in src[idx].decode('UTF-8'):
HGline = src[idx+2].decode('UTF-8').split('=')
if 'B-V' in HGline[2] and 'G' in HGline[1]:
try:
H = float(HGline[1].rstrip('G'))
except ValueError:
pass
try:
G = float(HGline[2].rstrip('B-V'))
except ValueError:
pass
if ("Multiple major-bodies match string" in src[idx].decode('UTF-8') or
("Matching small-bodies" in src[idx].decode('UTF-8') and not
"No matches found" in src[idx+1].decode('UTF-8'))):
raise ValueError('Ambiguous target name; check URL: %s' %
url)
if ("Matching small-bodies" in src[idx].decode('UTF-8') and
"No matches found" in src[idx+1].decode('UTF-8')):
raise ValueError('Unknown target; check URL: %s' % url)
# field identification for each line in
elements = []
for line in datablock:
line = line.split(',')
this_el = []
fieldnames = []
datatypes = []
# create a dictionary for each date (each line)
for idx, item in enumerate(headerline):
if (item.find('JDTDB') > -1):
this_el.append(np.float64(line[idx]))
fieldnames.append('datetime_jd')
datatypes.append(np.float64)
if (item.find('EC') > -1):
this_el.append(np.float64(line[idx]))
fieldnames.append('e')
datatypes.append(np.float64)
if (item.find('QR') > -1):
this_el.append(np.float64(line[idx]))
fieldnames.append('p')
datatypes.append(np.float64)
if (item.find('A') > -1) and len(item.strip()) == 1:
this_el.append(np.float64(line[idx]))
fieldnames.append('a')
datatypes.append(np.float64)
if (item.find('IN') > -1):
this_el.append(np.float64(line[idx]))
fieldnames.append('incl')
datatypes.append(np.float64)
if (item.find('OM') > -1):
this_el.append(np.float64(line[idx]))
fieldnames.append('node')
datatypes.append(np.float64)
if (item.find('W') > -1):
this_el.append(np.float64(line[idx]))
fieldnames.append('argper')
datatypes.append(np.float64)
if (item.find('Tp') > -1):
this_el.append(np.float64(line[idx]))
fieldnames.append('Tp')
datatypes.append(np.float64)
if (item.find('MA') > -1):
this_el.append(np.float64(line[idx]))
fieldnames.append('meananomaly')
datatypes.append(np.float64)
if (item.find('TA') > -1):
this_el.append(np.float64(line[idx]))
fieldnames.append('trueanomaly')
datatypes.append(np.float64)
if (item.find('PR') > -1):
# Earth years
this_el.append(np.float64(line[idx])/(365.256))
fieldnames.append('period')
datatypes.append(np.float64)
if (item.find('AD') > -1):
this_el.append(np.float64(line[idx]))
fieldnames.append('Q')
datatypes.append(np.float64)
# append targetname
this_el.append(targetname)
fieldnames.append('targetname')
datatypes.append(object)
# append H
this_el.append(H)
fieldnames.append('H')
datatypes.append(np.float64)
# append G
this_el.append(G)
fieldnames.append('G')
datatypes.append(np.float64)
if len(this_el) > 0:
elements.append(tuple(this_el))
if len(elements) == 0:
return 0
# combine elements with column names and data types into ndarray
assert len(elements[0]) == len(fieldnames) == len(datatypes)
self.data = np.array(elements,
dtype=[(str(fieldnames[i]), datatypes[i]) for i
in range(len(fieldnames))])
return len(self)
def export2pyephem(self, center='500@10', equinox=2000.):
"""Call JPL HORIZONS website to obtain orbital elements based on the
provided targetname, epochs, and center code and create a
PyEphem (http://rhodesmill.org/pyephem/) object. This function
requires PyEphem to be installed.
:param center: str;
center body (default: 500@10 = Sun)
:param equinox: float;
equinox (default: 2000.0)
:result: list;
list of PyEphem objects, one per epoch
:example: >>> import callhorizons
>>> import numpy
>>> import ephem
>>>
>>> ceres = callhorizons.query('Ceres')
>>> ceres.set_epochrange('2016-02-23 00:00', '2016-02-24 00:00', '1h')
>>> ceres_pyephem = ceres.export2pyephem()
>>>
>>> nau = ephem.Observer() # setup observer site
>>> nau.lon = -111.653152/180.*numpy.pi
>>> nau.lat = 35.184108/180.*numpy.pi
>>> nau.elevation = 2100 # m
>>> nau.date = '2015/10/5 01:23' # UT
>>> print ('next rising: %s' % nau.next_rising(ceres_pyephem[0]))
>>> print ('next transit: %s' % nau.next_transit(ceres_pyephem[0]))
>>> print ('next setting: %s' % nau.next_setting(ceres_pyephem[0]))
"""
try:
import ephem
except ImportError:
raise ImportError(
'export2pyephem requires PyEphem to be installed')
# obtain orbital elements
self.get_elements(center)
objects = []
for el in self.data:
n = 0.9856076686/np.sqrt(el['a']**3) # mean daily motion
epoch_djd = el['datetime_jd']-2415020.0 # Dublin Julian date
epoch = ephem.date(epoch_djd)
epoch_str = "%d/%f/%d" % (epoch.triple()[1], epoch.triple()[2],
epoch.triple()[0])
# export to PyEphem
objects.append(ephem.readdb("%s,e,%f,%f,%f,%f,%f,%f,%f,%s,%i,%f,%f" %
(el['targetname'], el['incl'], el['node'],
el['argper'], el['a'], n, el['e'],
el['meananomaly'], epoch_str, equinox,
el['H'], el['G'])))
return objects
|
mit
| 2,673,770,013,249,938,400
| 44.952055
| 90
| 0.409781
| false
| 4.403194
| false
| false
| false
|
torkelsson/meta-package-manager
|
meta_package_manager/managers/mas.py
|
1
|
5319
|
# -*- coding: utf-8 -*-
#
# Copyright (c) 2016-2018 Kevin Deldycke <kevin@deldycke.com>
# and contributors.
# All Rights Reserved.
#
# This program is Free Software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
from __future__ import (
absolute_import,
division,
print_function,
unicode_literals
)
import re
from boltons.cacheutils import cachedproperty
from ..base import PackageManager
from ..platform import MACOS
class MAS(PackageManager):
platforms = frozenset([MACOS])
# 'mas outdated' output has been changed in 1.3.1: https://github.com
# /mas-cli/mas/commit/ca72ee42b1c5f482513b1d2fbf780b0bf3d9618b
requirement = '>= 1.3.1'
name = "Mac AppStore"
def get_version(self):
""" Fetch version from ``mas version`` output."""
return self.run([self.cli_path, 'version'])
@cachedproperty
def installed(self):
""" Fetch installed packages from ``mas list`` output.
Raw CLI output samples:
.. code-block:: shell-session
$ mas list
408981434 iMovie (10.1.4)
747648890 Telegram (2.30)
"""
installed = {}
output = self.run([self.cli_path] + self.cli_args + ['list'])
if output:
regexp = re.compile(r'(\d+) (.*) \((\S+)\)$')
for package in output.split('\n'):
match = regexp.match(package)
if match:
package_id, package_name, installed_version = \
match.groups()
installed[package_id] = {
'id': package_id,
'name': package_name,
# Normalize unknown version. See:
# https://github.com/mas-cli/mas/commit
# /1859eaedf49f6a1ebefe8c8d71ec653732674341
'installed_version': (
installed_version if installed_version != 'unknown'
else None)}
return installed
def search(self, query):
""" Fetch matching packages from ``mas search`` output.
Raw CLI output samples:
.. code-block:: shell-session
$ mas search python
689176796 Python Runner
630736088 Learning Python
945397020 Run Python
891162632 Python Lint
1025391371 Tutorial for Python
1164498373 PythonGames
"""
matches = {}
output = self.run([self.cli_path] + self.cli_args + [
'search', query])
if output:
regexp = re.compile(r'(\d+) (.*)$')
for package in output.split('\n'):
match = regexp.match(package)
if match:
package_id, package_name = match.groups()
matches[package_id] = {
'id': package_id,
'name': package_name,
'latest_version': None,
'exact': self.exact_match(query, package_name)}
return matches
@cachedproperty
def outdated(self):
""" Fetch outdated packages from ``mas outdated`` output.
Raw CLI output samples:
.. code-block:: shell-session
$ mas outdated
.. todo
An example of ``mas outdated`` output is missing above.
"""
outdated = {}
output = self.run([self.cli_path] + self.cli_args + ['outdated'])
if output:
regexp = re.compile(r'(\d+) (.*) \((\S+) -> (\S+)\)$')
for package in output.split('\n'):
match = regexp.match(package)
if match:
package_id, package_name, installed_version, \
latest_version = match.groups()
outdated[package_id] = {
'id': package_id,
'name': package_name,
'latest_version': latest_version,
# Normalize unknown version. See:
# https://github.com/mas-cli/mas/commit
# /1859eaedf49f6a1ebefe8c8d71ec653732674341
'installed_version': (
installed_version if installed_version != 'unknown'
else None)}
return outdated
def upgrade_cli(self, package_id=None):
cmd = [self.cli_path] + self.cli_args + ['upgrade']
if package_id:
cmd.append(package_id)
return cmd
def upgrade_all_cli(self):
return self.upgrade_cli()
|
gpl-2.0
| -8,493,799,302,312,990,000
| 31.432927
| 79
| 0.536943
| false
| 4.275723
| false
| false
| false
|
apuigsech/CryptoAPI
|
CryptoAPI/CryptoAPI.py
|
1
|
8197
|
#!/usr/bin/env python
# CryptoAPI: Python Crypto API implementation
#
# Copyright (c) 2014 - Albert Puigsech Galicia (albert@puigsech.com)
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
# DEALINGS IN THE SOFTWARE.
from CryptsyAPI import CryptsyAPI
from BittrexAPI import BittrexAPI
class CryptoAPI_iface(object):
def balances(self, currency=None, cached=None):
raise NotImplementedError( "Method not implemented" )
def marketstatus(self, market=None, depth_level=None, cached=None):
raise NotImplementedError( "Method not implemented" )
def orders(self, market=None, cached=None):
raise NotImplementedError( "Method not implemented" )
def putorder(self, market, type, pricetype, amount, price=None, simulation=None):
raise NotImplementedError( "Method not implemented" )
def delorder(self, order_id=None, simulation=None):
raise NotImplementedError( "Method not implemented" )
class CryptoAPI_cryptsy(CryptsyAPI, CryptoAPI_iface):
def __init__(self, key, secret, simulation=False, cached=False):
super(CryptoAPI_cryptsy, self).__init__(key, secret, simulation, cached)
CryptoAPI_iface.__init__(self)
def balances(self, currency=None, cached=None):
if cached == None:
cached = self.cached
ret = {
'available': {},
'hold': {},
'total': {},
}
info = self.getinfo(cached)['return']
for i in info['balances_available']:
if i == currency or (currency == None and (float(info['balances_available'][i]) > 0 or info['balances_hold'].has_key(i))):
ret['available'][i] = float(info['balances_available'][i])
ret['hold'][i] = float(info['balances_hold'][i]) if info['balances_hold'].has_key(i) else float(0)
ret['total'][i] = ret['available'][i] + ret['hold'][i]
return ret
def marketstatus(self, market=None, depth_level=None, cached=None):
if cached == None:
cached = self.cached
status = self.getmarkets(cached)['return']
ret = {}
for i in status:
marketname = '{0}-{1}'.format(i['secondary_currency_code'], i['primary_currency_code'])
if marketname == market or i['primary_currency_code'] == market or i['secondary_currency_code'] == market or market == None:
ret[marketname] = {
'id': int(i['marketid']),
'last_price': float(i['last_trade']),
'high_price': float(i['high_trade']),
'low_price': float(i['low_trade']),
'volume': float(i['current_volume']),
'depth': None
}
if depth_level != None and depth_level > 0:
depth = self.depth(i['marketid'], cached)['return']
ret[marketname]['depth'] = {
'buy': [],
'sell': [],
}
for j in depth['buy'][0:depth_level]:
ret[marketname]['depth']['buy'].append([float(j[0]),float(j[1])])
for j in depth['sell'][0:depth_level]:
ret[marketname]['depth']['sell'].append([float(j[0]),float(j[1])])
return ret
def orders(self, market=None, cached=None):
if cached == None:
cached = self.cached
orders = self.allmyorders(cached)['return']
ret = []
for i in orders:
marketname = self._getmarketfromid(i['marketid'])
ret.append({
'id': int(i['orderid']),
'market': 'TBD',
'price': i['price'],
'amount': i['orig_quantity'],
'remaining_amount': i['quantity'],
})
return ret
def putorder(self, market, type, pricetype, amount, price=None, simulation=None):
if simulation == None:
simulation = self.simulation
status = self.marketstatus(market, 1)
print status
if pricetype == 'market':
price = 4294967296
elif pricetype == 'best':
if type == 'buy':
price = status[market]['depth']['sell'][0][0]
elif type == 'sell':
price = status[market]['depth']['buy'][0][0]
elif pricetype == 'border' or pricetype == 'overboder':
if type == 'buy':
price = status[market]['depth']['buy'][0][0]
elif type == 'sell':
price = status[market]['depth']['sell'][0][0]
if pricetype == 'overboder':
if type == 'buy':
price += 0.00000001
elif type == 'sell':
price -= 0.00000001
return self.createorder(status[market]['id'], type, amount, price)
def delorder(self, order_id=None, simulation=None):
return None
def _getmarketfromid(self, id):
markets = self.marketstatus(cached=True)
for marketname in markets:
if markets[marketname]['id'] == id:
return marketname
return None
def _getidfrommarket(self, market):
markets = self.marketstatus(cached=True)
if markets.has_key(market):
return markets[market]['id']
else:
return None
class CryptoAPI_bittrex(BittrexAPI, CryptoAPI_iface):
def __init__(self, key, secret, simulation=False, cached=False):
super(CryptoAPI_bittrex, self).__init__(key, secret, simulation, cached)
def balances(self, currency=None, cached=None):
if cached == None:
cached = self.cached
ret = {
'available': {},
'hold': {},
'total': {},
}
if currency==None:
info = self.getbalances(cached)['result']
else:
pass
info = [self.getbalance(currency, cached)['result']]
for i in info:
ret['available'][i['Currency']] = float(i['Available'])
ret['hold'][i['Currency']] = float(i['Pending'])
ret['total'][i['Currency']] = float(i['Balance'])
return ret
def marketstatus(self, market=None, depth_level=None, cached=None):
if cached == None:
cached = self.cached
ret = {}
status = self.getmarkets(cached)['result']
status = self.getmarketsummaries(cached)['result']
for i in status:
marketname = i['MarketName']
#if marketname == market or market == i['BaseCurrency'] or market == i['MarketCurrency'] or market == None:
if marketname == market or market in marketname or market == None:
if i['Volume'] == None:
i['Volume'] = 0
ret[marketname] = {
'id': marketname,
'last_price': float(i['Last']),
'high_price': float(str(i['High'])), # FIX a bug on Bittrex data returned
'low_price': float(i['Low']),
'volume': float(i['Volume']),
'depth': None
}
if depth_level != None and depth_level > 0:
depth = self.getorderbook(marketname, 'both', depth_level, cached)['result']
ret[marketname]['depth'] = {
'buy': [],
'sell': [],
}
for j in depth['buy'][0:depth_level]:
ret[marketname]['depth']['buy'].append([float(j['Rate']),float(j['Quantity'])])
for j in depth['sell'][0:depth_level]:
ret[marketname]['depth']['sell'].append([float(j['Rate']),float(j['Quantity'])])
return ret
def orders(self, market=None, cached=None):
if cached == None:
cached = self.cached
ret = []
orders = self.getopenorders(market, cached)['return']
for i in orders:
marketname = self._getmarketfromid(i['marketid'])
ret.append({
'id': int(i['orderid']),
'market': 'TBD',
'price': i['price'],
'amount': i['orig_quantity'],
'remaining_amount': i['quantity'],
})
return ret
pass
def putorder(self, market, type, pricetype, amount, price=None, simulation=None):
pass
def delorder(self, order_id=None, simulation=None):
pass
def CryptoAPI(type, key, secret, simulation=False, cached=False):
# TODO Security: type validation
code = 'CryptoAPI_{0}(key, secret, simulation, cached)'.format(type)
api = eval(code)
return api
|
gpl-3.0
| 8,747,078,000,745,754,000
| 27.866197
| 127
| 0.654508
| false
| 3.276179
| false
| false
| false
|
jbq/ufw
|
src/backend.py
|
1
|
24402
|
'''backend.py: interface for ufw backends'''
#
# Copyright 2008-2011 Canonical Ltd.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 3,
# as published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
import errno
import os
import re
import stat
import sys
import ufw.util
from ufw.util import warn, debug
from ufw.common import UFWError, config_dir, iptables_dir, UFWRule
import ufw.applications
class UFWBackend:
'''Interface for backends'''
def __init__(self, name, dryrun, extra_files=None):
self.defaults = None
self.name = name
self.dryrun = dryrun
self.rules = []
self.rules6 = []
self.files = {'defaults': os.path.join(config_dir, 'default/ufw'),
'conf': os.path.join(config_dir, 'ufw/ufw.conf'),
'apps': os.path.join(config_dir, 'ufw/applications.d') }
if extra_files != None:
self.files.update(extra_files)
self.loglevels = {'off': 0,
'low': 100,
'medium': 200,
'high': 300,
'full': 400 }
self.do_checks = True
try:
self._do_checks()
self._get_defaults()
self._read_rules()
except Exception:
raise
self.profiles = ufw.applications.get_profiles(self.files['apps'])
self.iptables = os.path.join(iptables_dir, "iptables")
self.iptables_restore = os.path.join(iptables_dir, "iptables-restore")
self.ip6tables = os.path.join(iptables_dir, "ip6tables")
self.ip6tables_restore = os.path.join(iptables_dir, \
"ip6tables-restore")
self.iptables_version = ufw.util.get_iptables_version(self.iptables)
def is_enabled(self):
'''Is firewall configured as enabled'''
if self.defaults.has_key('enabled') and \
self.defaults['enabled'] == 'yes':
return True
return False
def use_ipv6(self):
'''Is firewall configured to use IPv6'''
if self.defaults.has_key('ipv6') and \
self.defaults['ipv6'] == 'yes' and \
os.path.exists("/proc/sys/net/ipv6"):
return True
return False
def _get_default_policy(self, primary="input"):
'''Get default policy for specified primary chain'''
policy = "default_" + primary + "_policy"
rstr = ""
if self.defaults[policy] == "accept":
rstr = "allow"
elif self.defaults[policy] == "accept_no_track":
rstr = "allow-without-tracking"
elif self.defaults[policy] == "reject":
rstr = "reject"
else:
rstr = "deny"
return rstr
def _do_checks(self):
'''Perform basic security checks:
is setuid or setgid (for non-Linux systems)
checks that script is owned by root
checks that every component in absolute path are owned by root
warn if script is group writable
warn if part of script path is group writable
Doing this at the beginning causes a race condition with later
operations that don't do these checks. However, if the user running
this script is root, then need to be root to exploit the race
condition (and you are hosed anyway...)
'''
if not self.do_checks:
err_msg = _("Checks disabled")
warn(err_msg)
return True
# Not needed on Linux, but who knows the places we will go...
if os.getuid() != os.geteuid():
err_msg = _("ERROR: this script should not be SUID")
raise UFWError(err_msg)
if os.getgid() != os.getegid():
err_msg = _("ERROR: this script should not be SGID")
raise UFWError(err_msg)
uid = os.getuid()
if uid != 0:
err_msg = _("You need to be root to run this script")
raise UFWError(err_msg)
# Use these so we only warn once
warned_world_write = {}
warned_group_write = {}
warned_owner = {}
profiles = []
if not os.path.isdir(self.files['apps']):
warn_msg = _("'%s' does not exist") % (self.files['apps'])
warn(warn_msg)
else:
pat = re.compile(r'^\.')
for profile in os.listdir(self.files['apps']):
if not pat.search(profile):
profiles.append(os.path.join(self.files['apps'], profile))
for path in self.files.values() + [ os.path.abspath(sys.argv[0]) ] + \
profiles:
while True:
debug("Checking " + path)
if path == self.files['apps'] and \
not os.path.isdir(self.files['apps']):
break
try:
statinfo = os.stat(path)
mode = statinfo[stat.ST_MODE]
except OSError:
err_msg = _("Couldn't stat '%s'") % (path)
raise UFWError(err_msg)
except Exception:
raise
if statinfo.st_uid != 0 and not warned_owner.has_key(path):
warn_msg = _("uid is %(uid)s but '%(path)s' is owned by " \
"%(st_uid)s") % ({'uid': str(uid), \
'path': path, \
'st_uid': str(statinfo.st_uid)})
warn(warn_msg)
warned_owner[path] = True
if mode & stat.S_IWOTH and not warned_world_write.has_key(path):
warn_msg = _("%s is world writable!") % (path)
warn(warn_msg)
warned_world_write[path] = True
if mode & stat.S_IWGRP and not warned_group_write.has_key(path):
warn_msg = _("%s is group writable!") % (path)
warn(warn_msg)
warned_group_write[path] = True
if path == "/":
break
path = os.path.dirname(path)
if not path:
raise OSError(errno.ENOENT, "Could not find '%s'" % (path))
for f in self.files:
if f != 'apps' and not os.path.isfile(self.files[f]):
err_msg = _("'%(f)s' file '%(name)s' does not exist") % \
({'f': f, 'name': self.files[f]})
raise UFWError(err_msg)
def _get_defaults(self):
'''Get all settings from defaults file'''
self.defaults = {}
for f in [self.files['defaults'], self.files['conf']]:
try:
orig = ufw.util.open_file_read(f)
except Exception:
err_msg = _("Couldn't open '%s' for reading") % (f)
raise UFWError(err_msg)
pat = re.compile(r'^\w+="?\w+"?')
for line in orig:
if pat.search(line):
tmp = re.split(r'=', line.strip())
self.defaults[tmp[0].lower()] = tmp[1].lower().strip('"\'')
orig.close()
# do some default policy sanity checking
policies = ['accept', 'accept_no_track', 'drop', 'reject']
for c in [ 'input', 'output', 'forward' ]:
if not self.defaults.has_key('default_%s_policy' % (c)):
err_msg = _("Missing policy for '%s'" % (c))
raise UFWError(err_msg)
p = self.defaults['default_%s_policy' % (c)]
if p not in policies or \
(p == 'accept_no_track' and c == 'forward'):
err_msg = _("Invalid policy '%(policy)s' for '%(chain)s'" % \
({'policy': p, 'chain': c}))
raise UFWError(err_msg)
def set_default(self, fn, opt, value):
'''Sets option in defaults file'''
if not re.match(r'^[\w_]+$', opt):
err_msg = _("Invalid option")
raise UFWError(err_msg)
# Perform this here so we can present a nice error to the user rather
# than a traceback
if not os.access(fn, os.W_OK):
err_msg = _("'%s' is not writable" % (fn))
raise UFWError(err_msg)
try:
fns = ufw.util.open_files(fn)
except Exception:
raise
fd = fns['tmp']
found = False
pat = re.compile(r'^' + opt + '=')
for line in fns['orig']:
if pat.search(line):
ufw.util.write_to_file(fd, opt + "=" + value + "\n")
found = True
else:
ufw.util.write_to_file(fd, line)
# Add the entry if not found
if not found:
ufw.util.write_to_file(fd, opt + "=" + value + "\n")
try:
ufw.util.close_files(fns)
except Exception:
raise
# Now that the files are written out, update value in memory
self.defaults[opt.lower()] = value.lower().strip('"\'')
def set_default_application_policy(self, policy):
'''Sets default application policy of firewall'''
if not self.dryrun:
if policy == "allow":
try:
self.set_default(self.files['defaults'], \
"DEFAULT_APPLICATION_POLICY", \
"\"ACCEPT\"")
except Exception:
raise
elif policy == "deny":
try:
self.set_default(self.files['defaults'], \
"DEFAULT_APPLICATION_POLICY", \
"\"DROP\"")
except Exception:
raise
elif policy == "reject":
try:
self.set_default(self.files['defaults'], \
"DEFAULT_APPLICATION_POLICY", \
"\"REJECT\"")
except Exception:
raise
elif policy == "skip":
try:
self.set_default(self.files['defaults'], \
"DEFAULT_APPLICATION_POLICY", \
"\"SKIP\"")
except Exception:
raise
else:
err_msg = _("Unsupported policy '%s'") % (policy)
raise UFWError(err_msg)
rstr = _("Default application policy changed to '%s'") % (policy)
return rstr
def get_app_rules_from_template(self, template):
'''Return a list of UFWRules based on the template rule'''
rules = []
profile_names = self.profiles.keys()
if template.dport in profile_names and template.sport in profile_names:
dports = ufw.applications.get_ports(self.profiles[template.dport])
sports = ufw.applications.get_ports(self.profiles[template.sport])
for i in dports:
tmp = template.dup_rule()
tmp.dapp = ""
tmp.set_port("any", "src")
try:
(port, proto) = ufw.util.parse_port_proto(i)
tmp.set_protocol(proto)
tmp.set_port(port, "dst")
except Exception:
raise
tmp.dapp = template.dapp
if template.dport == template.sport:
# Just use the same ports as dst for src when they are the
# same to avoid duplicate rules
tmp.sapp = ""
try:
(port, proto) = ufw.util.parse_port_proto(i)
tmp.set_protocol(proto)
tmp.set_port(port, "src")
except Exception:
raise
tmp.sapp = template.sapp
rules.append(tmp)
else:
for j in sports:
rule = tmp.dup_rule()
rule.sapp = ""
try:
(port, proto) = ufw.util.parse_port_proto(j)
rule.set_protocol(proto)
rule.set_port(port, "src")
except Exception:
raise
if rule.protocol == "any":
rule.set_protocol(tmp.protocol)
rule.sapp = template.sapp
rules.append(rule)
elif template.sport in profile_names:
for p in ufw.applications.get_ports(self.profiles[template.sport]):
rule = template.dup_rule()
rule.sapp = ""
try:
(port, proto) = ufw.util.parse_port_proto(p)
rule.set_protocol(proto)
rule.set_port(port, "src")
except Exception:
raise
rule.sapp = template.sapp
rules.append(rule)
elif template.dport in profile_names:
for p in ufw.applications.get_ports(self.profiles[template.dport]):
rule = template.dup_rule()
rule.dapp = ""
try:
(port, proto) = ufw.util.parse_port_proto(p)
rule.set_protocol(proto)
rule.set_port(port, "dst")
except Exception:
raise
rule.dapp = template.dapp
rules.append(rule)
if len(rules) < 1:
err_msg = _("No rules found for application profile")
raise UFWError(err_msg)
return rules
def update_app_rule(self, profile):
'''Update rule for profile in place. Returns result string and bool
on whether or not the profile is used in the current ruleset.
'''
updated_rules = []
updated_rules6 = []
last_tuple = ""
rstr = ""
updated_profile = False
# Remember, self.rules is from user[6].rules, and not the running
# firewall.
for r in self.rules + self.rules6:
if r.dapp == profile or r.sapp == profile:
# We assume that the rules are in app rule order. Specifically,
# if app rule has multiple rules, they are one after the other.
# If the rule ordering changes, the below will have to change.
tupl = r.get_app_tuple()
if tupl == last_tuple:
# Skip the rule if seen this tuple already (ie, it is part
# of a known tuple).
continue
else:
# Have a new tuple, so find and insert new app rules here
template = r.dup_rule()
template.set_protocol("any")
if template.dapp != "":
template.set_port(template.dapp, "dst")
if template.sapp != "":
template.set_port(template.sapp, "src")
try:
new_app_rules = self.get_app_rules_from_template(\
template)
except Exception:
raise
for new_r in new_app_rules:
new_r.normalize()
if new_r.v6:
updated_rules6.append(new_r)
else:
updated_rules.append(new_r)
last_tuple = tupl
updated_profile = True
else:
if r.v6:
updated_rules6.append(r)
else:
updated_rules.append(r)
if updated_profile:
self.rules = updated_rules
self.rules6 = updated_rules6
rstr += _("Rules updated for profile '%s'") % (profile)
try:
self._write_rules(False) # ipv4
self._write_rules(True) # ipv6
except Exception:
err_msg = _("Couldn't update application rules")
raise UFWError(err_msg)
return (rstr, updated_profile)
def find_application_name(self, profile_name):
'''Find the application profile name for profile_name'''
if self.profiles.has_key(profile_name):
return profile_name
match = ""
matches = 0
for n in self.profiles.keys():
if n.lower() == profile_name.lower():
match = n
matches += 1
debug_msg = "'%d' matches for '%s'" % (matches, profile_name)
debug(debug_msg)
if matches == 1:
return match
elif matches > 1:
err_msg = _("Found multiple matches for '%s'. Please use exact profile name") % \
(profile_name)
err_msg = _("Could not find a profile matching '%s'") % (profile_name)
raise UFWError(err_msg)
def find_other_position(self, position, v6):
'''Return the absolute position in the other list of the rule with the
user position of the given list. For example, find_other_position(4,
True) will return the absolute position of the rule in the ipv4 list
matching the user specified '4' rule in the ipv6 list.
'''
# Invalid search (v6 rule with too low position)
if v6 and position > len(self.rules6):
raise ValueError()
# Invalid search (v4 rule with too high position)
if not v6 and position > len(self.rules):
raise ValueError()
if position < 1:
raise ValueError()
rules = []
if v6:
rules = self.rules6
else:
rules = self.rules
# self.rules[6] is a list of tuples. Some application rules have
# multiple tuples but the user specifies by ufw rule, not application
# tuple, so we need to find how many tuples there are leading up to
# the specified position, which we can then use as an offset for
# getting the proper match_rule.
app_rules = {}
tuple_offset = 0
for i, r in enumerate(rules):
if i >= position:
break
tupl = ""
if r.dapp != "" or r.sapp != "":
tupl = r.get_app_tuple()
if app_rules.has_key(tupl):
tuple_offset += 1
else:
app_rules[tupl] = True
rules = []
if v6:
rules = self.rules
match_rule = self.rules6[position - 1 + tuple_offset].dup_rule()
match_rule.set_v6(False)
else:
rules = self.rules6
match_rule = self.rules[position - 1 + tuple_offset].dup_rule()
match_rule.set_v6(True)
count = 1
for r in rules:
if UFWRule.match(r, match_rule) == 0:
return count
count += 1
return 0
def get_loglevel(self):
'''Gets current log level of firewall'''
level = 0
rstr = _("Logging: ")
if not self.defaults.has_key('loglevel') or \
self.defaults['loglevel'] not in self.loglevels.keys():
level = -1
rstr += _("unknown")
else:
level = self.loglevels[self.defaults['loglevel']]
if level == 0:
rstr += "off"
else:
rstr += "on (%s)" % (self.defaults['loglevel'])
return (level, rstr)
def set_loglevel(self, level):
'''Sets log level of firewall'''
if level not in self.loglevels.keys() + ['on']:
err_msg = _("Invalid log level '%s'") % (level)
raise UFWError(err_msg)
new_level = level
if level == "on":
if not self.defaults.has_key('loglevel') or \
self.defaults['loglevel'] == "off":
new_level = "low"
else:
new_level = self.defaults['loglevel']
try:
self.set_default(self.files['conf'], "LOGLEVEL", new_level)
self.update_logging(new_level)
except Exception:
raise
if new_level == "off":
return _("Logging disabled")
else:
return _("Logging enabled")
def get_rules(self):
'''Return list of all rules'''
return self.rules + self.rules6
def get_rules_count(self, v6):
'''Return number of ufw rules (not iptables rules)'''
rules = []
if v6:
rules = self.rules6
else:
rules = self.rules
count = 0
app_rules = {}
for r in rules:
tupl = ""
if r.dapp != "" or r.sapp != "":
tupl = r.get_app_tuple()
if app_rules.has_key(tupl):
debug("Skipping found tuple '%s'" % (tupl))
continue
else:
app_rules[tupl] = True
count += 1
return count
def get_rule_by_number(self, num):
'''Return rule specified by number seen via "status numbered"'''
rules = self.get_rules()
count = 1
app_rules = {}
for r in rules:
tupl = ""
if r.dapp != "" or r.sapp != "":
tupl = r.get_app_tuple()
if app_rules.has_key(tupl):
debug("Skipping found tuple '%s'" % (tupl))
continue
else:
app_rules[tupl] = True
if count == int(num):
return r
count += 1
return None
def get_matching(self, rule):
'''See if there is a matching rule in the existing ruleset. Note this
does not group rules by tuples.'''
matched = []
count = 0
for r in self.get_rules():
count += 1
ret = rule.fuzzy_dst_match(r)
if ret < 1:
matched.append(count)
return matched
# API overrides
def set_default_policy(self, policy, direction):
'''Set default policy for specified direction'''
raise UFWError("UFWBackend.set_default_policy: need to override")
def get_running_raw(self, rules_type):
'''Get status of running firewall'''
raise UFWError("UFWBackend.get_running_raw: need to override")
def get_status(self, verbose, show_count):
'''Get managed rules'''
raise UFWError("UFWBackend.get_status: need to override")
def set_rule(self, rule, allow_reload):
'''Update firewall with rule'''
raise UFWError("UFWBackend.set_rule: need to override")
def start_firewall(self):
'''Start the firewall'''
raise UFWError("UFWBackend.start_firewall: need to override")
def stop_firewall(self):
'''Stop the firewall'''
raise UFWError("UFWBackend.stop_firewall: need to override")
def get_app_rules_from_system(self, template, v6):
'''Get a list if rules based on template'''
raise UFWError("UFWBackend.get_app_rules_from_system: need to " + \
"override")
def update_logging(self, level):
'''Update loglevel of running firewall'''
raise UFWError("UFWBackend.update_logging: need to override")
def reset(self):
'''Reset the firewall'''
raise UFWError("UFWBackend.reset: need to override")
|
gpl-3.0
| 7,539,033,067,947,464,000
| 35.475336
| 93
| 0.489017
| false
| 4.356722
| false
| false
| false
|
craigderington/studentloan5
|
tests/engine.py
|
1
|
5088
|
from subprocess import call
from os import path
import hitchpostgres
import hitchselenium
import hitchpython
import hitchserve
import hitchredis
import hitchtest
import hitchsmtp
# Get directory above this file
PROJECT_DIRECTORY = path.abspath(path.join(path.dirname(__file__), '..'))
class ExecutionEngine(hitchtest.ExecutionEngine):
"""Engine for orchestating and interacting with the app."""
def set_up(self):
"""Ensure virtualenv present, then run all services."""
python_package = hitchpython.PythonPackage(
python_version=self.preconditions['python_version']
)
python_package.build()
python_package.verify()
call([
python_package.pip, "install", "-r",
path.join(PROJECT_DIRECTORY, "requirements/local.txt")
])
postgres_package = hitchpostgres.PostgresPackage(
version=self.settings["postgres_version"],
)
postgres_package.build()
postgres_package.verify()
redis_package = hitchredis.RedisPackage(version="2.8.4")
redis_package.build()
redis_package.verify()
self.services = hitchserve.ServiceBundle(
project_directory=PROJECT_DIRECTORY,
startup_timeout=float(self.settings["startup_timeout"]),
shutdown_timeout=5.0,
)
postgres_user = hitchpostgres.PostgresUser("studentloan5", "password")
self.services['Postgres'] = hitchpostgres.PostgresService(
postgres_package=postgres_package,
users=[postgres_user, ],
databases=[hitchpostgres.PostgresDatabase("studentloan5", postgres_user), ]
)
self.services['HitchSMTP'] = hitchsmtp.HitchSMTPService(port=1025)
self.services['Django'] = hitchpython.DjangoService(
python=python_package.python,
port=8000,
version=str(self.settings.get("django_version")),
settings="config.settings.local",
needs=[self.services['Postgres'], ],
env_vars=self.settings['environment_variables'],
)
self.services['Redis'] = hitchredis.RedisService(
redis_package=redis_package,
port=16379,
)
self.services['Firefox'] = hitchselenium.SeleniumService(
xvfb=self.settings.get("quiet", False),
no_libfaketime=True,
)
# import hitchcron
# self.services['Cron'] = hitchcron.CronService(
# run=self.services['Django'].manage("trigger").command,
# every=1,
# needs=[ self.services['Django'], ],
# )
self.services.startup(interactive=False)
# Configure selenium driver
self.driver = self.services['Firefox'].driver
self.driver.set_window_size(self.settings['window_size']['height'], self.settings['window_size']['width'])
self.driver.set_window_position(0, 0)
self.driver.implicitly_wait(2.0)
self.driver.accept_next_alert = True
def pause(self, message=None):
"""Stop. IPython time."""
if hasattr(self, 'services'):
self.services.start_interactive_mode()
self.ipython(message)
if hasattr(self, 'services'):
self.services.stop_interactive_mode()
def load_website(self):
"""Navigate to website in Firefox."""
self.driver.get(self.services['Django'].url())
def click(self, on):
"""Click on HTML id."""
self.driver.find_element_by_id(on).click()
def fill_form(self, **kwargs):
"""Fill in a form with id=value."""
for element, text in kwargs.items():
self.driver.find_element_by_id(element).send_keys(text)
def click_submit(self):
"""Click on a submit button if it exists."""
self.driver.find_element_by_css_selector("button[type=\"submit\"]").click()
def confirm_emails_sent(self, number):
"""Count number of emails sent by app."""
assert len(self.services['HitchSMTP'].logs.json()) == int(number)
def wait_for_email(self, containing=None):
"""Wait for, and return email."""
self.services['HitchSMTP'].logs.out.tail.until_json(
lambda email: containing in email['payload'] or containing in email['subject'],
timeout=25,
lines_back=1,
)
def time_travel(self, days=""):
"""Make all services think that time has skipped forward."""
self.services.time_travel(days=int(days))
def on_failure(self):
"""Stop and IPython."""
if not self.settings['quiet']:
if self.settings.get("pause_on_failure", False):
self.pause(message=self.stacktrace.to_template())
def on_success(self):
"""Pause on success if enabled."""
if self.settings.get("pause_on_success", False):
self.pause(message="SUCCESS")
def tear_down(self):
"""Shut down services required to run your test."""
if hasattr(self, 'services'):
self.services.shutdown()
|
bsd-3-clause
| -3,658,331,897,922,075,600
| 33.378378
| 114
| 0.612225
| false
| 4.076923
| false
| false
| false
|
opnsense/core
|
src/opnsense/scripts/netflow/lib/flowparser.py
|
1
|
8475
|
"""
Copyright (c) 2019 Ad Schellevis <ad@opnsense.org>
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
1. Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES,
INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY
AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY,
OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
POSSIBILITY OF SUCH DAMAGE.
--------------------------------------------------------------------------------------
flowd log parser
"""
import struct
import syslog
from socket import inet_ntop, AF_INET, AF_INET6, ntohl
class FlowParser:
# fields in order of appearance, use bitmask compare
field_definition_order = [
'tag',
'recv_time',
'proto_flags_tos',
'agent_addr4',
'agent_addr6',
'src_addr4',
'src_addr6',
'dst_addr4',
'dst_addr6',
'gateway_addr4',
'gateway_addr6',
'srcdst_port',
'packets',
'octets',
'if_indices',
'agent_info',
'flow_times',
'as_info',
'flow_engine_info'
]
# extract definition, integer values are read as rawdata (not parsed)
field_definition = {
'tag': 'I',
'recv_time': '>II',
'proto_flags_tos': 'BBBB',
'agent_addr4': 4,
'agent_addr6': 16,
'src_addr4': 4,
'src_addr6': 16,
'dst_addr4': 4,
'dst_addr6': 16,
'gateway_addr4': 4,
'gateway_addr6': 16,
'srcdst_port': '>HH',
'packets': '>Q',
'octets': '>Q',
'if_indices': '>II',
'agent_info': '>IIIHH',
'flow_times': '>II',
'as_info': 'IIBBH',
'flow_engine_info': 'HHII'
}
def __init__(self, filename, recv_stamp=None):
self._filename = filename
self._recv_stamp = recv_stamp
# cache formatter vs byte length
self._fmt_cache = dict()
# pre-calculate powers of 2
self._pow = dict()
for idx in range(len(self.field_definition_order)):
self._pow[idx] = pow(2, idx)
def calculate_size(self, fmt):
if fmt not in self._fmt_cache:
fmts = {'B': 1, 'H': 2, 'I': 4, 'Q': 8}
self._fmt_cache[fmt] = 0
for key in fmt:
if key in fmts:
self._fmt_cache[fmt] += fmts[key]
return self._fmt_cache[fmt]
def _parse_binary(self, raw_data, data_fields):
""" parse binary record
:param raw_data: binary data record
:param data_fields: field bitmask, provided by header
:return: dict
"""
raw_data_idx = 0
raw_record = dict()
for idx in range(len(self.field_definition_order)):
if self._pow[idx] & data_fields:
fieldname = self.field_definition_order[idx]
if fieldname in self.field_definition:
if type(self.field_definition[fieldname]) is int:
fsize = self.field_definition[fieldname]
raw_record[fieldname] = raw_data[raw_data_idx:raw_data_idx + fsize]
else:
fsize = self.calculate_size(self.field_definition[fieldname])
try:
content = struct.unpack(
self.field_definition[fieldname],
raw_data[raw_data_idx:raw_data_idx + fsize]
)
raw_record[fieldname] = content[0] if len(content) == 1 else content
except struct.error as e:
# the flowd record doesn't appear to be as expected, log for now.
syslog.syslog(syslog.LOG_NOTICE, "flowparser failed to unpack %s (%s)" % (fieldname, e))
raw_data_idx += fsize
return raw_record
def __iter__(self):
""" iterate flowd log file
:return:
"""
# pre-compile address formatters to save time
with open(self._filename, 'rb') as flowh:
while True:
# header [version, len_words, reserved, fields]
hdata = flowh.read(8)
if hdata == b'':
break
header = struct.unpack('BBHI', hdata)
record = self._parse_binary(
raw_data=flowh.read(header[1] * 4),
data_fields=ntohl(header[3])
)
if 'recv_time' not in record or 'agent_info' not in record:
# XXX invalid (empty?) flow record.
continue
record['recv_sec'] = record['recv_time'][0]
if self._recv_stamp is not None and record['recv_sec'] < self._recv_stamp:
# self._recv_stamp can contain the last received timestamp, in which case
# we should not return older data. The exact timestamp will be returned, so the
# consumer knows it doesn't have to read other, older, flowd log files
continue
record['sys_uptime_ms'] = record['agent_info'][0]
record['netflow_ver'] = record['agent_info'][3]
record['recv'] = record['recv_sec']
record['recv_usec'] = record['recv_time'][1]
record['if_ndx_in'] = -1
record['if_ndx_out'] = -1
record['src_port'] = 0
record['dst_port'] = 0
record['protocol'] = 0
if 'proto_flags_tos' in record:
record['tcp_flags'] = record['proto_flags_tos'][0]
record['protocol'] = record['proto_flags_tos'][1]
record['tos'] = record['proto_flags_tos'][2]
if 'flow_times' in record:
record['flow_start'] = record['flow_times'][0]
record['flow_finish'] = record['flow_times'][1]
else:
record['flow_start'] = record['sys_uptime_ms']
record['flow_finish'] = record['sys_uptime_ms']
if 'if_indices' in record:
record['if_ndx_in'] = record['if_indices'][0]
record['if_ndx_out'] = record['if_indices'][1]
if 'srcdst_port' in record:
record['src_port'] = record['srcdst_port'][0]
record['dst_port'] = record['srcdst_port'][1]
# concat ipv4/v6 fields into field without [4,6]
for key in self.field_definition_order:
if key in record:
if key[-1] == '4':
record[key[:-1]] = inet_ntop(AF_INET, record[key])
elif key[-1] == '6':
record[key[:-1]] = inet_ntop(AF_INET6, record[key])
# calculated values
record['flow_end'] = record['recv_sec'] - (record['sys_uptime_ms'] - record['flow_finish']) / 1000.0
record['duration_ms'] = (record['flow_finish'] - record['flow_start'])
record['flow_start'] = record['flow_end'] - record['duration_ms'] / 1000.0
if 'packets' not in record or 'octets' not in record or 'src_addr' not in record or 'dst_addr' not in record:
# this can't be useful data, skip record
continue
yield record
|
bsd-2-clause
| 991,364,815,688,320,300
| 41.80303
| 125
| 0.520354
| false
| 4.2375
| false
| false
| false
|
paulbersch/django-locus
|
locus/utils/location.py
|
1
|
1360
|
import math
# add back later
# import GeoIP
nauticalMilePerLat = 60.00721
nauticalMilePerLongitude = 60.10793
rad = math.pi / 180.0
milesPerNauticalMile = 1.15078
def calcDistance(lat1, lon1, lat2, lon2):
"""
Caclulate distance between two lat lons in NM
"""
lat1 = float(lat1)
lat2 = float(lat2)
lon1 = float(lon1)
lon2 = float(lon2)
yDistance = (lat2 - lat1) * nauticalMilePerLat
xDistance = (math.cos(lat1 * rad) + math.cos(lat2 * rad)) * (lon2 - lon1) * (nauticalMilePerLongitude / 2)
distance = math.sqrt( yDistance**2 + xDistance**2 )
return distance * milesPerNauticalMile
def milesBox( lat, lon, radius ):
"""
Returns two lat/lon pairs as (lat1, lon2, lat2, lon2) which define a box that exactly surrounds
a circle of radius of the given amount in miles.
"""
# this gives us a tuple of values that can easily be used to get a list of "possibly close"
# dealers. then we use the calcDistance function to check if it's ACTUALLY within the radius.
latRange = radius / ( milesPerNauticalMile * 60.0 )
lonRange = radius / ( math.cos(lat * rad) * milesPerNauticalMile * 60.0)
return ( lat - latRange, lon - lonRange, lat + latRange, lon + lonRange )
def revLookup(ip):
return False
"""
gi = GeoIP.open("/usr/local/share/GeoIP/GeoLiteCity.dat",GeoIP.GEOIP_STANDARD)
return gi.record_by_addr(ip)
"""
|
mit
| 4,176,555,150,693,318,000
| 28.565217
| 107
| 0.696324
| false
| 2.792608
| false
| false
| false
|
MinoMino/minqlx
|
python/minqlx/_handlers.py
|
1
|
18535
|
# minqlx - Extends Quake Live's dedicated server with extra functionality and scripting.
# Copyright (C) 2015 Mino <mino@minomino.org>
# This file is part of minqlx.
# minqlx is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# minqlx is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with minqlx. If not, see <http://www.gnu.org/licenses/>.
import minqlx
import collections
import sched
import re
# ====================================================================
# REGULAR EXPRESSIONS
# ====================================================================
_re_say = re.compile(r"^say +\"?(?P<msg>.+)\"?$", flags=re.IGNORECASE)
_re_say_team = re.compile(r"^say_team +\"?(?P<msg>.+)\"?$", flags=re.IGNORECASE)
_re_callvote = re.compile(r"^(?:cv|callvote) +(?P<cmd>[^ ]+)(?: \"?(?P<args>.+?)\"?)?$", flags=re.IGNORECASE)
_re_vote = re.compile(r"^vote +(?P<arg>.)", flags=re.IGNORECASE)
_re_team = re.compile(r"^team +(?P<arg>.)", flags=re.IGNORECASE)
_re_vote_ended = re.compile(r"^print \"Vote (?P<result>passed|failed).\n\"$")
_re_userinfo = re.compile(r"^userinfo \"(?P<vars>.+)\"$")
# ====================================================================
# LOW-LEVEL HANDLERS
# These are all called by the C code, not within Python.
# ====================================================================
def handle_rcon(cmd):
"""Console commands that are to be processed as regular pyminqlx
commands as if the owner executes it. This allows the owner to
interact with the Python part of minqlx without having to connect.
"""
try:
minqlx.COMMANDS.handle_input(minqlx.RconDummyPlayer(), cmd, minqlx.CONSOLE_CHANNEL)
except:
minqlx.log_exception()
return True
def handle_client_command(client_id, cmd):
"""Client commands are commands such as "say", "say_team", "scores",
"disconnect" and so on. This function parses those and passes it
on to the event dispatcher.
:param client_id: The client identifier.
:type client_id: int
:param cmd: The command being ran by the client.
:type cmd: str
"""
try:
# Dispatch the "client_command" event before further processing.
player = minqlx.Player(client_id)
retval = minqlx.EVENT_DISPATCHERS["client_command"].dispatch(player, cmd)
if retval is False:
return False
elif isinstance(retval, str):
# Allow plugins to modify the command before passing it on.
cmd = retval
res = _re_say.match(cmd)
if res:
msg = res.group("msg").replace("\"", "")
channel = minqlx.CHAT_CHANNEL
if minqlx.EVENT_DISPATCHERS["chat"].dispatch(player, msg, channel) is False:
return False
return cmd
res = _re_say_team.match(cmd)
if res:
msg = res.group("msg").replace("\"", "")
if player.team == "free": # I haven't tried this, but I don't think it's even possible.
channel = minqlx.FREE_CHAT_CHANNEL
elif player.team == "red":
channel = minqlx.RED_TEAM_CHAT_CHANNEL
elif player.team == "blue":
channel = minqlx.BLUE_TEAM_CHAT_CHANNEL
else:
channel = minqlx.SPECTATOR_CHAT_CHANNEL
if minqlx.EVENT_DISPATCHERS["chat"].dispatch(player, msg, channel) is False:
return False
return cmd
res = _re_callvote.match(cmd)
if res and not minqlx.Plugin.is_vote_active():
vote = res.group("cmd")
args = res.group("args") if res.group("args") else ""
# Set the caller for vote_started in case the vote goes through.
minqlx.EVENT_DISPATCHERS["vote_started"].caller(player)
if minqlx.EVENT_DISPATCHERS["vote_called"].dispatch(player, vote, args) is False:
return False
return cmd
res = _re_vote.match(cmd)
if res and minqlx.Plugin.is_vote_active():
arg = res.group("arg").lower()
if arg == "y" or arg == "1":
if minqlx.EVENT_DISPATCHERS["vote"].dispatch(player, True) is False:
return False
elif arg == "n" or arg == "2":
if minqlx.EVENT_DISPATCHERS["vote"].dispatch(player, False) is False:
return False
return cmd
res = _re_team.match(cmd)
if res:
arg = res.group("arg").lower()
target_team = ""
if arg == player.team[0]:
# Don't trigger if player is joining the same team.
return cmd
elif arg == "f":
target_team = "free"
elif arg == "r":
target_team = "red"
elif arg == "b":
target_team = "blue"
elif arg == "s":
target_team = "spectator"
elif arg == "a":
target_team = "any"
if target_team:
if minqlx.EVENT_DISPATCHERS["team_switch_attempt"].dispatch(player, player.team, target_team) is False:
return False
return cmd
res = _re_userinfo.match(cmd)
if res:
new_info = minqlx.parse_variables(res.group("vars"), ordered=True)
old_info = player.cvars
changed = {}
for key in new_info:
if key not in old_info or (key in old_info and new_info[key] != old_info[key]):
changed[key] = new_info[key]
if changed:
ret = minqlx.EVENT_DISPATCHERS["userinfo"].dispatch(player, changed)
if ret is False:
return False
elif isinstance(ret, dict):
for key in ret:
new_info[key] = ret[key]
cmd = "userinfo \"{}\"".format("".join(["\\{}\\{}".format(key, new_info[key]) for key in new_info]))
return cmd
except:
minqlx.log_exception()
return True
def handle_server_command(client_id, cmd):
try:
# Dispatch the "server_command" event before further processing.
try:
player = minqlx.Player(client_id) if client_id >= 0 else None
except minqlx.NonexistentPlayerError:
return True
retval = minqlx.EVENT_DISPATCHERS["server_command"].dispatch(player, cmd)
if retval is False:
return False
elif isinstance(retval, str):
cmd = retval
res = _re_vote_ended.match(cmd)
if res:
if res.group("result") == "passed":
minqlx.EVENT_DISPATCHERS["vote_ended"].dispatch(True)
else:
minqlx.EVENT_DISPATCHERS["vote_ended"].dispatch(False)
return cmd
except:
minqlx.log_exception()
return True
# Executing tasks right before a frame, by the main thread, will often be desirable to avoid
# weird behavior if you were to use threading. This list will act as a task queue.
# Tasks can be added by simply adding the @minqlx.next_frame decorator to functions.
frame_tasks = sched.scheduler()
next_frame_tasks = collections.deque()
def handle_frame():
"""This will be called every frame. To allow threads to call stuff from the
main thread, tasks can be scheduled using the :func:`minqlx.next_frame` decorator
and have it be executed here.
"""
while True:
# This will run all tasks that are currently scheduled.
# If one of the tasks throw an exception, it'll log it
# and continue execution of the next tasks if any.
try:
frame_tasks.run(blocking=False)
break
except:
minqlx.log_exception()
continue
try:
minqlx.EVENT_DISPATCHERS["frame"].dispatch()
except:
minqlx.log_exception()
return True
try:
while True:
func, args, kwargs = next_frame_tasks.popleft()
frame_tasks.enter(0, 0, func, args, kwargs)
except IndexError:
pass
_zmq_warning_issued = False
_first_game = True
_ad_round_number = 0
def handle_new_game(is_restart):
# This is called early in the launch process, so it's a good place to initialize
# minqlx stuff that needs QLDS to be initialized.
global _first_game
if _first_game:
minqlx.late_init()
_first_game = False
# A good place to warn the owner if ZMQ stats are disabled.
global _zmq_warning_issued
if not bool(int(minqlx.get_cvar("zmq_stats_enable"))) and not _zmq_warning_issued:
logger = minqlx.get_logger()
logger.warning("Some events will not work because ZMQ stats is not enabled. "
"Launch the server with \"zmq_stats_enable 1\"")
_zmq_warning_issued = True
minqlx.set_map_subtitles()
if not is_restart:
try:
minqlx.EVENT_DISPATCHERS["map"].dispatch(
minqlx.get_cvar("mapname"),
minqlx.get_cvar("g_factory"))
except:
minqlx.log_exception()
return True
try:
minqlx.EVENT_DISPATCHERS["new_game"].dispatch()
except:
minqlx.log_exception()
return True
def handle_set_configstring(index, value):
"""Called whenever the server tries to set a configstring. Can return
False to stop the event.
"""
global _ad_round_number
try:
res = minqlx.EVENT_DISPATCHERS["set_configstring"].dispatch(index, value)
if res is False:
return False
elif isinstance(res, str):
value = res
# VOTES
if index == 9 and value:
cmd = value.split()
vote = cmd[0] if cmd else ""
args = " ".join(cmd[1:]) if len(cmd) > 1 else ""
minqlx.EVENT_DISPATCHERS["vote_started"].dispatch(vote, args)
return
# GAME STATE CHANGES
elif index == 0:
old_cs = minqlx.parse_variables(minqlx.get_configstring(index))
if not old_cs:
return
new_cs = minqlx.parse_variables(value)
old_state = old_cs["g_gameState"]
new_state = new_cs["g_gameState"]
if old_state != new_state:
if old_state == "PRE_GAME" and new_state == "IN_PROGRESS":
pass
elif old_state == "PRE_GAME" and new_state == "COUNT_DOWN":
_ad_round_number = 1
minqlx.EVENT_DISPATCHERS["game_countdown"].dispatch()
elif old_state == "COUNT_DOWN" and new_state == "IN_PROGRESS":
pass
#minqlx.EVENT_DISPATCHERS["game_start"].dispatch()
elif old_state == "IN_PROGRESS" and new_state == "PRE_GAME":
pass
elif old_state == "COUNT_DOWN" and new_state == "PRE_GAME":
pass
else:
logger = minqlx.get_logger()
logger.warning("UNKNOWN GAME STATES: {} - {}".format(old_state, new_state))
# ROUND COUNTDOWN AND START
elif index == 661:
cvars = minqlx.parse_variables(value)
if cvars:
if "turn" in cvars:
# it is A&D
if int(cvars["state"]) == 0:
return
# round cvar appears only on round countdown
# and first round is 0, not 1
try:
round_number = int(cvars["round"]) * 2 + 1 + int(cvars["turn"])
_ad_round_number = round_number
except KeyError:
round_number = _ad_round_number
else:
# it is CA
round_number = int(cvars["round"])
if round_number and "time" in cvars:
minqlx.EVENT_DISPATCHERS["round_countdown"].dispatch(round_number)
return
elif round_number:
minqlx.EVENT_DISPATCHERS["round_start"].dispatch(round_number)
return
return res
except:
minqlx.log_exception()
return True
def handle_player_connect(client_id, is_bot):
"""This will be called whenever a player tries to connect. If the dispatcher
returns False, it will not allow the player to connect and instead show them
a message explaining why. The default message is "You are banned from this
server.", but it can be set with :func:`minqlx.set_ban_message`.
:param client_id: The client identifier.
:type client_id: int
:param is_bot: Whether or not the player is a bot.
:type is_bot: bool
"""
try:
player = minqlx.Player(client_id)
return minqlx.EVENT_DISPATCHERS["player_connect"].dispatch(player)
except:
minqlx.log_exception()
return True
def handle_player_loaded(client_id):
"""This will be called whenever a player has connected and finished loading,
meaning it'll go off a bit later than the usual "X connected" messages.
This will not trigger on bots.
:param client_id: The client identifier.
:type client_id: int
"""
try:
player = minqlx.Player(client_id)
return minqlx.EVENT_DISPATCHERS["player_loaded"].dispatch(player)
except:
minqlx.log_exception()
return True
def handle_player_disconnect(client_id, reason):
"""This will be called whenever a player disconnects.
:param client_id: The client identifier.
:type client_id: int
"""
try:
player = minqlx.Player(client_id)
return minqlx.EVENT_DISPATCHERS["player_disconnect"].dispatch(player, reason)
except:
minqlx.log_exception()
return True
def handle_player_spawn(client_id):
"""Called when a player spawns. Note that a spectator going in free spectate mode
makes the client spawn, so you'll want to check for that if you only want "actual"
spawns.
"""
try:
player = minqlx.Player(client_id)
return minqlx.EVENT_DISPATCHERS["player_spawn"].dispatch(player)
except:
minqlx.log_exception()
return True
def handle_kamikaze_use(client_id):
"""This will be called whenever player uses kamikaze item.
:param client_id: The client identifier.
:type client_id: int
"""
try:
player = minqlx.Player(client_id)
return minqlx.EVENT_DISPATCHERS["kamikaze_use"].dispatch(player)
except:
minqlx.log_exception()
return True
def handle_kamikaze_explode(client_id, is_used_on_demand):
"""This will be called whenever kamikaze explodes.
:param client_id: The client identifier.
:type client_id: int
:param is_used_on_demand: Non-zero if kamikaze is used on demand.
:type is_used_on_demand: int
"""
try:
player = minqlx.Player(client_id)
return minqlx.EVENT_DISPATCHERS["kamikaze_explode"].dispatch(player, True if is_used_on_demand else False)
except:
minqlx.log_exception()
return True
def handle_console_print(text):
"""Called whenever the server prints something to the console and when rcon is used."""
try:
if not text:
return
# Log console output. Removes the need to have stdout logs in addition to minqlx.log.
minqlx.get_logger().debug(text.rstrip("\n"))
res = minqlx.EVENT_DISPATCHERS["console_print"].dispatch(text)
if res is False:
return False
if _print_redirection:
global _print_buffer
_print_buffer += text
if isinstance(res, str):
return res
return text
except:
minqlx.log_exception()
return True
_print_redirection = None
_print_buffer = ""
def redirect_print(channel):
"""Redirects print output to a channel. Useful for commands that execute console commands
and want to redirect the output to the channel instead of letting it go to the console.
To use it, use the return value with the "with" statement.
.. code-block:: python
def cmd_echo(self, player, msg, channel):
with minqlx.redirect_print(channel):
minqlx.console_command("echo {}".format(" ".join(msg)))
"""
class PrintRedirector:
def __init__(self, channel):
if not isinstance(channel, minqlx.AbstractChannel):
raise ValueError("The redirection channel must be an instance of minqlx.AbstractChannel.")
self.channel = channel
def __enter__(self):
global _print_redirection
_print_redirection = self.channel
def __exit__(self, exc_type, exc_val, exc_tb):
global _print_redirection
self.flush()
_print_redirection = None
def flush(self):
global _print_buffer
self.channel.reply(_print_buffer)
_print_buffer = ""
return PrintRedirector(channel)
def register_handlers():
minqlx.register_handler("rcon", handle_rcon)
minqlx.register_handler("client_command", handle_client_command)
minqlx.register_handler("server_command", handle_server_command)
minqlx.register_handler("frame", handle_frame)
minqlx.register_handler("new_game", handle_new_game)
minqlx.register_handler("set_configstring", handle_set_configstring)
minqlx.register_handler("player_connect", handle_player_connect)
minqlx.register_handler("player_loaded", handle_player_loaded)
minqlx.register_handler("player_disconnect", handle_player_disconnect)
minqlx.register_handler("player_spawn", handle_player_spawn)
minqlx.register_handler("console_print", handle_console_print)
minqlx.register_handler("kamikaze_use", handle_kamikaze_use)
minqlx.register_handler("kamikaze_explode", handle_kamikaze_explode)
|
gpl-3.0
| 3,269,673,672,761,068,500
| 35.201172
| 120
| 0.580523
| false
| 3.98003
| true
| false
| false
|
mediatum/mediatum
|
utils/hash.py
|
1
|
1599
|
"""
mediatum - a multimedia content repository
Copyright (C) 2007 Arne Seifert <seiferta@in.tum.de>
Copyright (C) 2007 Matthias Kramm <kramm@in.tum.de>
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
import logging
import os
import hashlib
from core import db
q = db.query
logg = logging.getLogger(__name__)
def calcChecksum(filename, method):
if os.path.exists(filename):
f = open(filename)
if method == "SHA-1":
h = hashlib.sha1()
else:
h = hashlib.new('ripemd160')
h.update(f.read())
f.close()
return h.hexdigest()
else:
return ""
def calcChecksumFromMetadata(node):
h = hashlib.sha1()
h.update(str(node.id)) # h.update requires string or buffer as argument
h.update(node.getName())
def attributesToString(node):
string = ""
for item in node.attrs.items():
string += item[0] + item[1]
return string
h.update(attributesToString(node))
return h.hexdigest()
|
gpl-3.0
| -1,159,093,611,323,354,600
| 27.052632
| 76
| 0.676673
| false
| 3.853012
| false
| false
| false
|
baseclue/django-rest-test
|
tests/test_compare.py
|
1
|
15578
|
import unittest
from rest_test import compare
class DictTestCase(unittest.TestCase):
def test_basic(self):
data = dict(
a=1,
b='2'
)
expected_data = dict(
b='2',
a=1
)
assert compare(data, expected_data)
def test_basic_false(self):
data = dict(
a=1,
b='2'
)
expected_data = dict(
b=2,
a=1
)
self.assertFalse(compare(data, expected_data))
def test_deep(self):
data = dict(
a=1,
b=dict(
b=dict(
a='test'
),
a=2,
c=''
)
)
expected_data = dict(
a=1,
b=dict(
a=2,
b=dict(
a='test'
),
c=''
)
)
assert compare(data, expected_data)
def test_deep_false(self):
data = dict(
a=1,
b=dict(
b=dict(
a='test'
),
a=2,
c=''
)
)
expected_data = dict(
a=1,
b=dict(
a=2,
b=dict(
b=1
),
c=''
)
)
self.assertFalse(compare(data, expected_data))
class ItemEllipsisTestCase(unittest.TestCase):
def test_basic(self):
data = dict(
a=1,
b='2'
)
expected_data = dict(
b='2',
a=...
)
assert compare(data, expected_data)
def test_basic_false(self):
data = dict(
a=1,
b='2'
)
expected_data = dict(
b=2,
a=...
)
self.assertFalse(compare(data, expected_data))
def test_deep(self):
data = dict(
a=1,
b=dict(
b=dict(
a='test'
),
a=2,
c=''
)
)
expected_data = dict(
a=1,
b=dict(
a=2,
b=...,
c=''
)
)
assert compare(data, expected_data)
def test_deep_false(self):
data = dict(
a=1,
b=dict(
b=dict(
a='test'
),
a=2,
c=''
)
)
expected_data = dict(
a=1,
b=dict(
a=3,
b=...,
c=''
)
)
self.assertFalse(compare(data, expected_data))
def test_missing_basic_false(self):
data = dict(
a=1,
b='2'
)
expected_data = dict(
a=...
)
self.assertFalse(compare(data, expected_data))
def test_moreover_basic_false(self):
data = dict(
a=1,
b='2'
)
expected_data = dict(
b=2,
a=...,
c='test'
)
self.assertFalse(compare(data, expected_data))
def test_missing_deep_false(self):
data = dict(
a=1,
b=dict(
b=dict(
a='test'
),
a=2,
c=''
)
)
expected_data = dict(
a=1,
b=dict(
a=2,
b=...,
)
)
self.assertFalse(compare(data, expected_data))
def test_moreover_deep_false(self):
data = dict(
a=1,
b=dict(
b=dict(
a='test'
),
a=2,
c=''
)
)
expected_data = dict(
a=1,
b=dict(
a=3,
b=...,
c='',
d='test'
)
)
self.assertFalse(compare(data, expected_data))
class DictEllipsisTestCase(unittest.TestCase):
def test_empty(self):
data = dict(
)
expected_data = {
...: ...
}
assert compare(data, expected_data)
def test_basic(self):
data = dict(
a=1,
b='2'
)
expected_data = {
...: ...
}
assert compare(data, expected_data)
def test_basic_more(self):
data = {
'a': 1,
'b': '2',
'c': 3
}
expected_data = {
...: ...,
'b': '2'
}
assert compare(data, expected_data)
def test_basic_false(self):
data = dict(
a=1,
b='2'
)
expected_data = {
'b': 2,
...: ...
}
self.assertFalse(compare(data, expected_data))
def test_deep(self):
data = dict(
a=1,
b=dict(
b=dict(
a='test'
),
a=2,
c=''
)
)
expected_data = dict(
a=1,
b={
'a': 2,
...: ...,
'c': ''
}
)
assert compare(data, expected_data)
def test_deep_false(self):
data = dict(
a=1,
b=dict(
b=dict(
a='test'
),
a=2,
c=''
)
)
expected_data = dict(
a=1,
b={
'a': 3,
...: ...,
'c': ''
}
)
self.assertFalse(compare(data, expected_data))
def test_moreover_basic_false(self):
data = dict(
a=1,
b='2'
)
expected_data = {
'b': 2,
...: ...,
'c': 'test'
}
self.assertFalse(compare(data, expected_data))
def test_missing_deep_false(self):
data = dict(
a=1,
b=dict(
b=dict(
a='test'
),
a=2,
c=''
)
)
expected_data = dict(
a=1,
b={
'a': 2,
...: ...
}
)
assert compare(data, expected_data)
def test_moreover_deep_false(self):
data = dict(
a=1,
b=dict(
b=dict(
a='test'
),
a=2,
c=''
)
)
expected_data = dict(
a=1,
b={
'a': 3,
...: ...,
'c': '',
'd': 'test'
}
)
self.assertFalse(compare(data, expected_data))
def test_bad_usage(self):
data = dict(
a=1,
b=dict(
b=dict(
a='test'
),
a=2,
c=''
)
)
expected_data = {
'a': 1,
...: dict(
b=dict(
a='test'
),
a=2,
c=''
)
}
with self.assertRaises(TypeError):
compare(data, expected_data)
class ListTestCase(unittest.TestCase):
def test_basic(self):
data = [
1,
'2'
]
expected_data = [
1,
'2'
]
assert compare(data, expected_data)
def test_basic_false(self):
data = [
1,
2
]
expected_data = [
2,
1
]
self.assertFalse(compare(data, expected_data))
def test_combination(self):
data = [
dict(
a=1,
b=dict(
b=dict(
a='test'
),
a=2,
c=''
)
),
dict(
a=2,
b=dict(
b=dict(
a='test'
),
a=2,
c=''
)
)
]
expected_data = [
dict(
a=1,
b=dict(
b=dict(
a='test'
),
a=2,
c=''
)
),
dict(
a=2,
b=dict(
b=dict(
a='test'
),
a=2,
c=''
)
)
]
assert compare(data, expected_data)
class ListEllipsisTestCase(unittest.TestCase):
def test_empty(self):
data = [
'1',
{},
3
]
expected_data = [
...
]
assert compare(data, expected_data)
def test_start(self):
data = [
'1',
{},
3
]
expected_data = [
...,
3
]
assert compare(data, expected_data)
def test_multiple(self):
data = [
'1',
2,
3,
'4',
5
]
expected_data = [
...,
2,
...
]
assert compare(data, expected_data)
def test_end(self):
data = [
1,
2,
3,
4,
5
]
expected_data = [
1,
...
]
assert compare(data, expected_data)
def test_multiple_in(self):
data = [
1,
2,
3,
4,
5,
6,
7
]
expected_data = [
...,
2,
...,
5,
...
]
assert compare(data, expected_data)
def test_start_false(self):
data = [
1,
2,
3
]
expected_data = [
...,
4
]
self.assertFalse(compare(data, expected_data))
def test_multiple_false(self):
data = [
1,
2,
3,
4,
5
]
expected_data = [
...,
6,
...
]
self.assertFalse(compare(data, expected_data))
def test_end_false(self):
data = [
1,
2,
3,
4,
5
]
expected_data = [
2,
...
]
self.assertFalse(compare(data, expected_data))
def test_multiple_in_optional(self):
data = [
1,
2,
3,
4,
5,
6,
7
]
expected_data = [
...,
2,
...,
3,
...
]
assert compare(data, expected_data)
def test_multiple_in_optional_between(self):
data = [
2,
3,
]
expected_data = [
...,
2,
...,
3,
...
]
assert compare(data, expected_data)
def test_bad_usage(self):
data = [
1,
2,
3,
4,
5,
6,
7
]
expected_data = [
...,
...,
7
]
with self.assertRaises(TypeError):
compare(data, expected_data)
def test_one(self):
data = [1]
expected_data = [..., 1, ...]
assert compare(data, expected_data)
class CombinationEllipsisTestCase(unittest.TestCase):
def test_combination(self):
data = [
{
'foo': 1,
'bar': 2,
'zoo': 3,
}
]
expected_data = [
...,
{
...: ...,
'bar': 2
},
...
]
assert compare(data, expected_data)
def test_combination_empty(self):
data = [
{
}
]
expected_data = [
...,
{
...: ...,
},
...
]
assert compare(data, expected_data)
class TypeTestCase(unittest.TestCase):
def test_list(self):
data = [
'1',
{},
3
]
expected_data = list
assert compare(data, expected_data)
def test_dict(self):
data = {
'1': 2,
2: 3,
3: 2
}
expected_data = dict
assert compare(data, expected_data)
def test_list_with_dict(self):
data = [
'1',
{'test': 'test_value'},
3
]
expected_data = [
'1',
dict,
3
]
assert compare(data, expected_data)
def test_dict_with_list(self):
data = {
'1': 2,
'test_key': [1, 2, 'u'],
3: 2
}
expected_data = {
'1': 2,
'test_key': list,
3: 2
}
assert compare(data, expected_data)
def test_different_types_in_list(self):
data = [
'1',
{},
3
]
expected_data = [
str,
dict,
int
]
assert compare(data, expected_data)
def test_different_types_in_dict(self):
data = {
'1': 2,
2: 'test',
3: [1, 2, 3]
}
expected_data = {
'1': int,
2: str,
3: list
}
assert compare(data, expected_data)
def test_different_types_in_dict_in_deep(self):
data = [
'1',
{
'1': 2,
2: 'test',
3: [1, 2, 3]
},
3
]
expected_data = [
'1',
{
'1': int,
2: str,
3: list
},
3
]
assert compare(data, expected_data)
class CombinationTypeEllipsisTestCase(unittest.TestCase):
def test_combination(self):
data = [
{
'foo': 1,
'bar': 2,
'zoo': 3,
},
{
'test_foo': '1',
'test_bar': 2,
'test_zoo': [1, 2, 3],
},
]
expected_data = [
...,
{
...: ...,
'bar': int
},
...,
{
'test_foo': str,
'test_bar': 2,
'test_zoo': list,
}
]
assert compare(data, expected_data)
if __name__ == '__main__':
unittest.main()
|
apache-2.0
| 7,426,249,069,602,658,000
| 18.399751
| 57
| 0.29991
| false
| 4.53376
| true
| false
| false
|
apeyrard/sjtu-work
|
DIP/exercises/ex3/ex3.py
|
1
|
3652
|
#!/usr/bin/env python3
# -*- coding: UTF-8 -*-
import sys
from PIL import Image
import numpy as np
import math
import argparse
def getMatrix(image):
data = list(image.getdata())
width, height = image.size
matrix = np.array(data).reshape(height,width)
return matrix
def getData(matrix):
data = list(matrix.reshape(matrix.shape[0]*matrix.shape[1]))
return data
def preprocessing(matrix):
newMat = matrix.copy()
for y in range(newMat.shape[1]):
for x in range(newMat.shape[0]):
newMat[x][y] = newMat[x][y]*(-1)**(x+y)
return newMat
def postprocessing(matrix):
return preprocessing(matrix)
def ideal(matrix, cutoff, function):
newMat = matrix.copy()
center = (math.floor(newMat.shape[0]/2), math.floor(newMat.shape[1]/2))
for y in range(newMat.shape[1]):
for x in range(newMat.shape[0]):
dist = math.sqrt((x-center[0])**2+(y-center[1])**2)
if function == 'low':
if dist > cutoff:
newMat[x][y] = 0+0j
if function == 'high':
if dist < cutoff:
newMat[x][y] = 0+0j
return newMat
def butter(matrix, order, cutoff, function):
if order is None:
print("Order must be specified for butterworth filter")
sys.exit(1)
newMat = matrix.copy()
center = (math.floor(newMat.shape[0]/2), math.floor(newMat.shape[1]/2))
for y in range(newMat.shape[1]):
for x in range(newMat.shape[0]):
dist = math.sqrt((x-center[0])**2+(y-center[1])**2)
if function == 'low':
newMat[x][y] = newMat[x][y] * (1/(1+(dist/cutoff)**(2*order)))
if function == 'high':
newMat[x][y] = newMat[x][y] * (1-(1/(1+(dist/cutoff)**(2*order))))
return newMat
def gauss(matrix, cutoff, function):
newMat = matrix.copy()
center = (math.floor(newMat.shape[0]/2), math.floor(newMat.shape[1]/2))
for y in range(newMat.shape[1]):
for x in range(newMat.shape[0]):
dist = math.sqrt((x-center[0])**2+(y-center[1])**2)
if function == 'low':
newMat[x][y] = newMat[x][y] * (math.exp(-(dist**2)/(2*(cutoff**2))))
if function == 'high':
newMat[x][y] = newMat[x][y] * (1- (math.exp(-(dist**2)/(2*(cutoff**2)))))
return newMat
parser = argparse.ArgumentParser(description='Filtering in frequency domain')
parser.add_argument('--ideal', action='store_true')
parser.add_argument('--butterworth', action='store_true')
parser.add_argument('--gaussian', action='store_true')
parser.add_argument('--highpass', action='store_true')
parser.add_argument('--lowpass', action='store_true')
parser.add_argument('cutoff', type=float)
parser.add_argument('--order', type=float)
parser.add_argument('image')
args = parser.parse_args()
try:
with Image.open(args.image) as im:
if args.lowpass:
filtering = 'low'
else:
filtering = 'high'
imNew = Image.new(im.mode, im.size)
matrix = getMatrix(im)
prepMat = preprocessing(matrix)
fourierMat = np.fft.fft2(prepMat)
if args.ideal:
imageF = ideal(fourierMat, args.cutoff, filtering)
elif args.butterworth:
imageF = butter(fourierMat, args.order, args.cutoff, filtering)
else:
imageF = gauss(fourierMat, args.cutoff, filtering)
newImage = np.fft.ifft2(imageF)
postNew = postprocessing(newImage)
imNew.putdata(getData(postNew))
imNew.show()
except FileNotFoundError as e:
sys.exit("Error : file not found")
|
mit
| 3,300,094,426,064,420,000
| 31.035088
| 89
| 0.589266
| false
| 3.275336
| false
| false
| false
|
luk156/minimo
|
minimo/documento/migrations/0004_auto__add_unitamisura__add_field_riga_unita.py
|
1
|
10331
|
# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'UnitaMisura'
db.create_table(u'documento_unitamisura', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('nome', self.gf('django.db.models.fields.CharField')(default='Numero', max_length=30)),
('sigla', self.gf('django.db.models.fields.CharField')(default='N', max_length=4)),
('stato', self.gf('django.db.models.fields.BooleanField')(default=True)),
))
db.send_create_signal(u'documento', ['UnitaMisura'])
# Adding field 'Riga.unita'
db.add_column(u'documento_riga', 'unita',
self.gf('django.db.models.fields.related.ForeignKey')(default=None, to=orm['documento.UnitaMisura'], null=True, blank=True),
keep_default=False)
def backwards(self, orm):
# Deleting model 'UnitaMisura'
db.delete_table(u'documento_unitamisura')
# Deleting field 'Riga.unita'
db.delete_column(u'documento_riga', 'unita_id')
models = {
u'auth.group': {
'Meta': {'object_name': 'Group'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
u'documento.documento': {
'Meta': {'ordering': "['data']", 'object_name': 'Documento'},
'bollo': ('django.db.models.fields.CharField', [], {'max_length': '30', 'null': 'True', 'blank': 'True'}),
'cap': ('django.db.models.fields.CharField', [], {'max_length': '6', 'null': 'True', 'blank': 'True'}),
'citta': ('django.db.models.fields.CharField', [], {'max_length': '70', 'null': 'True', 'blank': 'True'}),
'cod_fiscale': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'}),
'data': ('django.db.models.fields.DateField', [], {}),
'data_consegna': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'descrizione_ritenuta': ('django.db.models.fields.CharField', [], {'max_length': '70', 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'importo_residuo': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'note': ('django.db.models.fields.TextField', [], {'max_length': '1024', 'null': 'True', 'blank': 'True'}),
'numero': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'p_iva': ('django.db.models.fields.CharField', [], {'max_length': '30', 'null': 'True', 'blank': 'True'}),
'pagamento': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['documento.Pagamento']", 'null': 'True', 'blank': 'True'}),
'provincia': ('django.db.models.fields.CharField', [], {'max_length': '10', 'null': 'True', 'blank': 'True'}),
'ragione_sociale': ('django.db.models.fields.CharField', [], {'max_length': '70', 'null': 'True', 'blank': 'True'}),
'riferimento': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['documento.Documento']", 'null': 'True', 'blank': 'True'}),
'ritenuta': ('django.db.models.fields.IntegerField', [], {'default': 'None', 'null': 'True', 'blank': 'True'}),
'sconto': ('django.db.models.fields.IntegerField', [], {'default': 'None', 'null': 'True', 'blank': 'True'}),
'stato': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'template': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'documento_template'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': u"orm['template.TemplateDocumento']"}),
'tipo': ('django.db.models.fields.CharField', [], {'max_length': '5'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']"}),
'valore_bollo': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'via': ('django.db.models.fields.CharField', [], {'max_length': '70', 'null': 'True', 'blank': 'True'})
},
u'documento.pagamento': {
'Meta': {'object_name': 'Pagamento'},
'giorni': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'iban': ('django.db.models.fields.CharField', [], {'max_length': '70', 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'intestazione': ('django.db.models.fields.CharField', [], {'max_length': '70', 'null': 'True', 'blank': 'True'}),
'istituto': ('django.db.models.fields.CharField', [], {'max_length': '70', 'null': 'True', 'blank': 'True'}),
'nome': ('django.db.models.fields.CharField', [], {'max_length': '30'}),
'stato': ('django.db.models.fields.BooleanField', [], {'default': 'True'})
},
u'documento.riga': {
'Meta': {'object_name': 'Riga'},
'codice': ('django.db.models.fields.CharField', [], {'default': 'None', 'max_length': '70', 'null': 'True', 'blank': 'True'}),
'descrizione': ('django.db.models.fields.TextField', [], {}),
'descrizione_imposta': ('django.db.models.fields.CharField', [], {'default': 'None', 'max_length': '70', 'null': 'True', 'blank': 'True'}),
'documento': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['documento.Documento']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'importo_unitario': ('django.db.models.fields.FloatField', [], {'default': '1'}),
'imposta': ('django.db.models.fields.IntegerField', [], {'default': 'None', 'null': 'True', 'blank': 'True'}),
'quantita': ('django.db.models.fields.FloatField', [], {}),
'unita': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': u"orm['documento.UnitaMisura']", 'null': 'True', 'blank': 'True'})
},
u'documento.unitamisura': {
'Meta': {'object_name': 'UnitaMisura'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'nome': ('django.db.models.fields.CharField', [], {'default': "'Numero'", 'max_length': '30'}),
'sigla': ('django.db.models.fields.CharField', [], {'default': "'N'", 'max_length': '4'}),
'stato': ('django.db.models.fields.BooleanField', [], {'default': 'True'})
},
u'template.templatedocumento': {
'Meta': {'object_name': 'TemplateDocumento'},
'descrizione': ('django.db.models.fields.CharField', [], {'max_length': '70', 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'nome': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '70'}),
'template': ('django.db.models.fields.files.FileField', [], {'max_length': '100'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']"})
}
}
complete_apps = ['documento']
|
gpl-2.0
| 823,570,124,755,032,000
| 74.416058
| 209
| 0.55106
| false
| 3.467942
| false
| false
| false
|
fzimmermann89/pyload
|
module/plugins/hoster/FastixRu.py
|
1
|
1366
|
# -*- coding: utf-8 -*-
import re
import urllib
from module.plugins.internal.MultiHoster import MultiHoster, create_getInfo
from module.plugins.internal.utils import json
class FastixRu(MultiHoster):
__name__ = "FastixRu"
__type__ = "hoster"
__version__ = "0.17"
__status__ = "testing"
__pattern__ = r'http://(?:www\.)?fastix\.(ru|it)/file/\w{24}'
__config__ = [("activated", "bool", "Activated", True),
("use_premium" , "bool", "Use premium account if available" , True),
("revertfailed", "bool", "Revert to standard download if fails", True)]
__description__ = """Fastix multi-hoster plugin"""
__license__ = "GPLv3"
__authors__ = [("Massimo Rosamilia", "max@spiritix.eu")]
def setup(self):
self.chunk_limit = 3
def handle_premium(self, pyfile):
self.html = self.load("http://fastix.ru/api_v2/",
get={'apikey': self.account.get_data('apikey'),
'sub' : "getdirectlink",
'link' : pyfile.url})
data = json.loads(self.html)
self.log_debug("Json data", data)
if "error\":true" in self.html:
self.offline()
else:
self.link = data['downloadlink']
getInfo = create_getInfo(FastixRu)
|
gpl-3.0
| 3,599,834,605,401,120,300
| 29.355556
| 90
| 0.533675
| false
| 3.520619
| false
| false
| false
|
Purg/kwiver
|
vital/bindings/python/vital/types/landmark_map.py
|
1
|
4829
|
"""
ckwg +31
Copyright 2016 by Kitware, Inc.
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
* Neither name of Kitware, Inc. nor the names of any contributors may be used
to endorse or promote products derived from this software without specific
prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS ``AS IS''
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHORS OR CONTRIBUTORS BE LIABLE FOR
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
==============================================================================
vital::landmark_map interface
"""
import ctypes
from vital.types import Landmark
from vital.util import VitalObject, free_void_ptr
class LandmarkMap (VitalObject):
@classmethod
def from_dict(cls, id_lm_d):
"""
Create a new instance of LandmarkMap using the given dictionary mapping
integer IDs to Landmark instances.
:param id_lm_d: dictionary mapping integer IDs to Landmark instances
:type id_lm_d: dict[int|long, vital.types.Landmark]
:return: New landmark map instance containing a copy of the input map.
:rtype: LandmarkMap
"""
s = len(id_lm_d)
t_lm_ids = (ctypes.c_int64 * s)
t_lm_landmarks = (Landmark.c_ptr_type() * s)
lm_ids = t_lm_ids()
lm_landmarks = t_lm_landmarks()
i = 0
for k, l in id_lm_d.iteritems():
lm_ids[i] = k
lm_landmarks[i] = l.c_pointer
i += 1
lm_cptr = cls._call_cfunc(
'vital_landmark_map_new',
[t_lm_landmarks, t_lm_ids, ctypes.c_size_t],
[lm_landmarks, lm_ids, s],
cls.c_ptr_type()
)
return cls(lm_cptr)
def __init__(self, from_cptr=None):
"""
Create and empty map, or initialize from and existing C instance pointer
:param from_cptr: Optional existing landmark map C pointer
"""
super(LandmarkMap, self).__init__(from_cptr)
def _new(self):
return self._call_cfunc(
'vital_landmark_map_new_empty',
restype=self.C_TYPE_PTR
)
def _destroy(self):
self._call_cfunc(
'vital_landmark_map_destroy', [self.C_TYPE_PTR], [self]
)
def __eq__(self, other):
return (
isinstance(other, LandmarkMap) and
self.as_dict() == other.as_dict()
)
def __ne__(self, other):
return not (self == other)
def __len__(self):
return self.size
@property
def size(self):
"""
Get the size of this map
:return: the size of this map
:rtype: int
"""
return self._call_cfunc(
'vital_landmark_map_size',
[self.C_TYPE_PTR], [self],
ctypes.c_size_t
)
def as_dict(self):
"""
Get a copy of this map as a python dictionary
:return: Dictionary mapping landmark IDs to Landmark instances
:rtype: dict[int|long, vital.types.Landmark]
"""
t_lm_ids = ctypes.POINTER(ctypes.c_int64)
t_lm_landmarks = ctypes.POINTER(Landmark.c_ptr_type())
lm_ids = t_lm_ids()
lm_landmarks = t_lm_landmarks()
self._call_cfunc(
'vital_landmark_map_landmarks',
[self.C_TYPE_PTR, ctypes.POINTER(t_lm_ids), ctypes.POINTER(t_lm_landmarks)],
[self, ctypes.byref(lm_ids), ctypes.byref(lm_landmarks)]
)
d = {}
s = self.size
for i in xrange(s):
# Need to copy ctypes pointer object
l_cptr = Landmark.c_ptr_type()(lm_landmarks[i].contents)
d[lm_ids[i]] = Landmark(from_cptr=l_cptr)
free_void_ptr(lm_ids)
free_void_ptr(lm_landmarks)
return d
|
bsd-3-clause
| -1,191,061,869,346,105,300
| 30.562092
| 88
| 0.61607
| false
| 3.820411
| false
| false
| false
|
tectronics/openmalaria-git
|
util/compareOutput.py
|
1
|
7178
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# This file is part of OpenMalaria.
#
# Copyright (C) 2005-2010 Swiss Tropical Institute and Liverpool School Of Tropical Medicine
#
# OpenMalaria is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or (at
# your option) any later version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
import sys
import math
from optparse import OptionParser
from approxEqual import ApproxSame
from readOutput import readEntries
REL_PRECISION=1e-6
ABS_PRECISION=1e-6
def charEqual (fn1,fn2):
MAX=10*1024
f1 = open(fn1,'r')
f2 = open(fn2,'r')
while True:
s1 = f1.read(MAX)
s2 = f2.read(MAX)
if (len(s1)==0) or (len(s2)==0):
# end of one or both files; equal if it's the end of both
return len(s1) == len(s2)
if s1 != s2:
return False
def main(fn1,fn2,maxDiffsToPrint=6):
"""Takes names of the two files to compare and optionally an argument describing
the maximum number of differences to print directly (note: order is not intuitive).
Returns a tuple ret,ident; ret is 0 if test passes (output considered near-enough equal),
ident is 1 if files are binary-equal."""
ret=0
opt=""
if REL_PRECISION!=1e-6:
opt+=" --rel-prescision="+str(REL_PRECISION)
if ABS_PRECISION!=1e-6:
opt+=" --abs-prescision="+str(ABS_PRECISION)
print "\033[1;34m compareOutput.py"+opt+" "+fn1+" "+fn2+" "+str(maxDiffsToPrint)+"\033[0;0m"
# Read both files and combine into a map of key to pairs (v1, v2)
try:
if charEqual (fn1,fn2):
print "output.txt files are identical"
return 0,True
print "output.txt files aren't binary-equal"
values1=readEntries(fn1)
values2=readEntries(fn2)
# python 3000 syntax is "except IOError as e", backported to 2.6 but not always supported. Old syntax:
except IOError, e:
print str(e)
return 1,False
values=dict()
for (k,v1) in values1.iteritems():
v2=None
if (k in values2):
v2=values2[k]
del values2[k]
values[k] = (v1,v2)
for (k,v2) in values2.iteritems():
values[k] = (None,v2)
# Go through all values:
numPrinted=0
numDiffs=0
numMissing1=0
numMissing2=0
perMeasureNum = dict()
perMeasureTotal1 = dict()
perMeasureTotal2 = dict()
perMeasureNumDiff = dict()
perMeasureDiffSum = dict()
perMeasureDiffAbsSum = dict()
approxSame = ApproxSame(REL_PRECISION, ABS_PRECISION)
for (k,(v1,v2)) in values.iteritems():
if v1==None:
numMissing1 += 1
elif v2==None:
numMissing2 += 1
else:
perMeasureNum[k.a] = perMeasureNum.get(k.a, 0) + 1
perMeasureTotal1[k.a] = perMeasureTotal1.get(k.a, 0.0) + v1
perMeasureTotal2[k.a] = perMeasureTotal2.get(k.a, 0.0) + v2
# Compare with relative precision
if approxSame (v1, v2):
continue
numDiffs += 1
# Sum up total difference per measure
perMeasureDiffSum[k.a] = perMeasureDiffSum.get(k.a,0.0) + v2 - v1
perMeasureDiffAbsSum[k.a] = perMeasureDiffAbsSum.get(k.a,0.0) + math.fabs(v2-v1)
numPrinted += 1
perMeasureNumDiff[k.a] = perMeasureNumDiff.get(k.a,0) + 1;
if (numPrinted <= maxDiffsToPrint):
print "survey "+str(k.b)+", group "+str(k.c)+", measure "+str(k.a)+": "+str(v1)+" -> "+str(v2)
if (numPrinted == maxDiffsToPrint):
print "[won't print any more line-by-line diffs]"
if (numMissing1 > 0) or (numMissing2 > 0):
print str(numMissing1) + " entries missing from first file, " + str(numMissing2) +" from second"
ret = 3
maxDiffSum=0.0
maxAbsDiffSum=0.0
for (k.a,absDiff) in perMeasureDiffAbsSum.iteritems():
if not (absDiff <= 1e-6): # handle NANs
# standard division throws on divide-by-zero, which I don't want
def div(x,y):
try:
return x/y
except ZeroDivisionError:
return 1e400 * 0 # nan
diff=perMeasureDiffSum[k.a]
sum1=perMeasureTotal1[k.a]
sum2=perMeasureTotal2[k.a]
diffSum=div(diff,sum1)
maxDiffSum=max(maxDiffSum,math.fabs(diffSum))
absDiffSum=div(absDiff,sum1)
maxAbsDiffSum=max(maxAbsDiffSum,absDiffSum)
print "for measure "+str(k.a)+":\tsum(1st file):"+str(sum1)+"\tsum(2nd file):"+str(sum2)+"\tdiff/sum: "+str(diffSum)+"\t(abs diff)/sum: "+str(absDiffSum)
if maxDiffSum>0 or maxAbsDiffSum>0:
print "Max diff/sum:",maxDiffSum,"max (abs diff)/sum:",maxAbsDiffSum
if numDiffs == 0:
print "No significant differences (total relative diff: "+str(approxSame.getTotalRelDiff())+"), ok."
return ret,False
else:
print "\033[1;31m"+str(numDiffs)+" significant differences (total relative diff: "+str(approxSame.getTotalRelDiff())+ ")!\033[0;0m"
return 1,False
# Test for options
def evalOptions (args):
parser = OptionParser(usage="Usage: %prog [options] logfile1 logfile2 [max different lines to print]",
# damn reformatting into a single paragraph: this doesn't get printed very nicely when --help is invoked
description="""Compare logfile1 and logfile2 for differences, returning a measure of difference.
See http://code.google.com/p/openmalaria/wiki/UtilsRunScripts#compareOutput.py for details on output.""")
parser.add_option("-R","--rel-precision",
action="store", dest="rel_precision", type="float",
help="Set relative precision (default: 1.0e-6)")
parser.add_option("-A","--abs-precision",
action="store", dest="abs_precision", type="float",
help="Set absolute precision (default: 1.0e-6)")
(options, others) = parser.parse_args(args=args)
return options,others
if __name__ == '__main__':
(options,others) = evalOptions (sys.argv[1:])
if options.rel_precision:
REL_PRECISION=options.rel_precision
if options.abs_precision:
ABS_PRECISION=options.abs_precision
if (len(others) == 3):
ret,ident = main (others[0],others[1],int(others[2]))
elif (len(others) == 2):
ret,ident = main (others[0],others[1])
else:
print "Usage: "+sys.argv[0]+" logfile1 logfile2 [max different lines to print]"
ret=-1
sys.exit(ret)
|
gpl-2.0
| -189,319,169,589,652,500
| 38.224044
| 165
| 0.617721
| false
| 3.43445
| false
| false
| false
|
linyc74/CaMNIST
|
view.py
|
1
|
12377
|
import numpy as np
import cv2, time, sys, threading, json, os
from PyQt4 import QtCore, QtGui
from controller import *
class CamnistGUI(QtGui.QMainWindow):
def __init__(self, controller_obj):
super(CamnistGUI, self).__init__()
self.controller = controller_obj
pkg_dir = os.path.dirname(__file__)
path = os.path.join(pkg_dir, 'parameters/gui.json')
gui_parms = json.loads(open(path, 'r').read())
w = gui_parms['monitor_width']
h = gui_parms['monitor_height']
self.setWindowTitle('CaMNIST')
self.setWindowIcon(QtGui.QIcon('icons/cool.png'))
self.setGeometry(100, 100, w, h)
self.setFixedSize(w, h)
self.setMouseTracking(True)
self.monitor = QtGui.QLabel(self)
self.monitor.setGeometry(0, 0, w, h)
self.monitor.setAlignment(QtCore.Qt.AlignCenter)
self.toolbar = QtGui.QToolBar('Tool Bar')
self.toolbar.setMovable(True)
self.toolbar.setStyleSheet("QToolBar { background:white; }")
self.toolbar.setIconSize(QtCore.QSize(30, 45))
self.addToolBar(QtCore.Qt.LeftToolBarArea, self.toolbar)
self.info_window = TextWindow()
self.camera_tuner_window = CameraTunerWindow( controller_obj = self.controller )
self.__init__toolbtns()
def __init__toolbtns(self):
# Each action has a unique key and a name
# key = icon filename = method name
# name = text of the action/button
# ( keys , names )
K = [('snapshot' , 'Snapshot' ),
('toggle_recording' , 'Record Video' ),
('open_info' , 'Show Real-time Info' ),
('open_camera_tuner', 'Adjust Camera Parameters' )]
self.actions = {}
self.toolbtns = {}
# Create actions and tool buttons
for key, name in K:
pkg_dir = os.path.dirname(__file__)
path = os.path.join(pkg_dir, 'icons/' + key + '.png')
icon = QtGui.QIcon(path)
self.actions[key] = QtGui.QAction(icon, name, self)
self.toolbtns[key] = self.toolbar.addAction(self.actions[key])
# For actions that needs to be connected to the core object,
K = ['snapshot', 'toggle_recording']
# In this loop I defined a standard way of
# connecting each action to a method in the core object via the controller object.
for key in K:
# Get a argument-less method from the controller object.
# Note that the method_name = key.
method = self.controller.get_method( method_name = key )
# The get_method() returns None
# if a particular method is not found in the core object.
if not method is None:
# Connect the action to the method in the controller object
self.actions[key].triggered.connect(method)
# For actions that needs to be connected to the self gui object,
keys = ['open_info', 'open_camera_tuner']
for key in keys:
try:
method = getattr(self, key)
self.actions[key].triggered.connect(method)
except Exception as exception_inst:
print(exception_inst)
def open_info(self):
if not self.info_window.isVisible():
self.info_window.show()
def open_camera_tuner(self):
self.camera_tuner_window.show()
def wheelEvent(self, event):
if event.delta() > 0:
self.controller.call_method('zoom_in')
else:
self.controller.call_method('zoom_out')
def closeEvent(self, event):
reply = QtGui.QMessageBox.question(self,
'CaMNIST',
'Are you sure you want to quit CaMNIST?',
QtGui.QMessageBox.Yes, QtGui.QMessageBox.No)
if reply == QtGui.QMessageBox.Yes:
self.controller.call_method('close')
self.info_window.close()
self.camera_tuner_window.close()
event.accept()
else:
event.ignore()
# Methods for incoming signals
def connect_signals(self, thread, signal_name):
'Called by an external object to connect signals.'
# The suffix '(PyQt_PyObject)' means the argument to be transferred
# could be any type of python objects,
# not limited to Qt objects.
signal = signal_name + '(PyQt_PyObject)'
# The method name to be called upon signal arrival = the signal name
try:
method = getattr(self, signal_name)
self.connect(thread, QtCore.SIGNAL(signal), method)
except Exception as exception_inst:
print("Try to connect PyQt signal '{}'".format(signal_name))
print(exception_inst + '\n')
def progress_update(self, text_value):
self.progress_bar.progress_update(text_value)
def display_image(self, image):
# convert from BGR to RGB for latter QImage
image = cv2.cvtColor(image, cv2.COLOR_BGR2RGB)
height, width, bytesPerComponent = image.shape
bytesPerLine = bytesPerComponent * width
# convert cv2 image to QImage
Q_img = QtGui.QImage(image,
width, height, bytesPerLine,
QtGui.QImage.Format_RGB888)
# Convert QImage to QPixmap
Q_pixmap = QtGui.QPixmap.fromImage(Q_img)
# Set the QLabel to display the QPixmap
self.monitor.setPixmap(Q_pixmap)
def recording_starts(self):
self.actions['toggle_recording'].setIcon(QtGui.QIcon('icons/stop_recording.png'))
self.actions['toggle_recording'].setText('Stop')
def recording_ends(self):
self.actions['toggle_recording'].setIcon(QtGui.QIcon('icons/toggle_recording.png'))
self.actions['toggle_recording'].setText('Record Video')
def set_info_text(self, text):
self.info_window.setText(text)
def display_topography(self, vertices):
self.gl_window.gl_widget.updateObject(vertices)
class SliderWidget(QtGui.QWidget):
'''
This widget wraps a single parameter in the TunerWindow.
Name, value, min, max, interval are stored in this object.
Three gui elements are included to display the information of the parameter:
1) QLabel showing name
2) QLabel showing value
3) QSlider
'''
def __init__(self, parent, name, min, max, value, interval):
super(SliderWidget, self).__init__(parent)
self.name = name
self.min = min
self.max = max
self.value = value
self.interval = interval
self.hbox = QtGui.QHBoxLayout()
self.QLabel_name = QtGui.QLabel(self)
self.QLabel_value = QtGui.QLabel(self)
self.QSlider = QtGui.QSlider(QtCore.Qt.Horizontal, self)
self.setLayout(self.hbox)
self.hbox.addWidget(self.QLabel_name)
self.hbox.addWidget(self.QLabel_value)
self.hbox.addWidget(self.QSlider)
self.QLabel_name.setText(name)
self.QLabel_value.setText(str(value))
self.QSlider.setMinimum(min)
self.QSlider.setMaximum(max)
self.QSlider.setValue(value)
self.QSlider.setSingleStep(interval)
self.QSlider.setTickInterval(interval)
self.QSlider.setTickPosition(QtGui.QSlider.TicksBelow)
self.QSlider.valueChanged.connect(self.setValue)
def setValue(self, value):
# Round the value to fit the interval
value = value - self.min
value = round( value / float(self.interval) ) * self.interval
value = int( value + self.min )
self.value = value
self.QSlider.setValue(value)
self.QLabel_value.setText(str(value))
class TextWindow(QtGui.QWidget):
def __init__(self):
super(TextWindow, self).__init__()
self.setWindowTitle('Info')
self.setWindowIcon(QtGui.QIcon('icons/cool.png'))
self.setGeometry(150, 150, 512, 256)
self.setFixedSize(512, 256)
self.font = QtGui.QFont()
self.font.setFamily('Segoe UI')
self.font.setBold(False)
self.font.setPixelSize(14)
self.textbox = QtGui.QLabel(self)
self.textbox.setGeometry(0, 0, 512, 256)
self.textbox.setAlignment(QtCore.Qt.AlignLeft)
self.textbox.setFont(self.font)
def setText(self, text):
self.textbox.setText(text)
class TunerWindow(QtGui.QWidget):
'''
A gui template window for tuning parameters.
This class does not contain any business logic.
All it does is to provide an interface to adjust parameters through gui.
Each parameter is wrapped in a 'block' of SliderWidget object.
Properties (name, min, max, value, interval)
of each parameter is stored in the SliderWidget object.
'''
def __init__(self):
super(TunerWindow, self).__init__()
# self.setMinimumWidth(600)
# self.setMaximumWidth(600)
self.main_vbox = QtGui.QVBoxLayout()
self.setLayout(self.main_vbox)
self.btn_hbox = QtGui.QHBoxLayout()
self.main_vbox.addLayout(self.btn_hbox)
K = [('ok' ,'OK' ),
('cancel','Cancel'),
('apply' ,'Apply' )]
self.btn = {}
for key, name in K:
self.btn[key] = QtGui.QPushButton(name, self)
self.btn[key].clicked.connect(getattr(self, key))
self.btn_hbox.addWidget( self.btn[key] )
self.parameters = []
def apply_parameter(self):
'''
Supposed to be overridden.
Defines what to do when ok() or apply() are called.
'''
pass
def ok(self):
self.apply_parameter()
self.hide()
def cancel(self):
self.hide()
def apply(self):
self.apply_parameter()
def add_parameter(self, name, min, max, value, interval):
'''
Add a new SliderWidget object holding all information of the new parameter.
'''
widget = SliderWidget(parent = self,
name = name,
min = min,
max = max,
value = value,
interval = interval)
self.parameters.append(widget)
self.main_vbox.insertWidget(len(self.main_vbox)-1, widget)
class CameraTunerWindow(TunerWindow):
'''
Inherits from the TunerWindow class.
The business logics for the camera imaging parameters
is specified in this class.
This class also manages the transfer of camera parameters
to the core object.
'''
def __init__(self, controller_obj):
super(CameraTunerWindow, self).__init__()
self.controller = controller_obj
self.setWindowIcon(QtGui.QIcon('icons/cool.png'))
self.setWindowTitle('Stereo Depth Parameters')
self.setMinimumWidth(600)
self.add_parameter(name='brightness' , min=0 , max=255 , value=150 , interval=5 )
self.add_parameter(name='contrast' , min=0 , max=255 , value=64 , interval=5 )
self.add_parameter(name='saturation' , min=0 , max=255 , value=80 , interval=5 )
self.add_parameter(name='gain' , min=0 , max=255 , value=50 , interval=5 )
self.add_parameter(name='exposure' , min=-7 , max=-1 , value=-4 , interval=1 )
self.add_parameter(name='white_balance' , min=3000, max=6500, value=5000, interval=100)
self.add_parameter(name='focus' , min=0 , max=255 , value=0 , interval=5 )
def apply_parameter(self):
'''
Transfers parameters to the core object via the controller.
'''
parms = {}
for p in self.parameters:
parms[p.name] = p.value
self.controller.call_method( method_name = 'apply_camera_parameters',
arg = parms )
if __name__ == '__main__':
app = QtGui.QApplication(sys.argv)
gui = CamnistGUI( controller_obj = MockController() )
gui.show()
sys.exit(app.exec_())
|
mit
| 6,695,720,950,841,457,000
| 32.271505
| 95
| 0.586329
| false
| 3.997739
| false
| false
| false
|
google-research/google-research
|
poem/core/keypoint_profiles.py
|
1
|
48752
|
# coding=utf-8
# Copyright 2021 The Google Research Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Keypoint profile class and utility functions."""
import abc
import enum
import six
import tensorflow as tf
from poem.core import keypoint_utils
class LeftRightType(enum.Enum):
"""Keypoint/segment left/right type."""
UNKNOWN = 0
CENTRAL = 1
LEFT = 2
RIGHT = 3
def infer_keypoint_left_right_type(left_right_types, indices):
"""Infers keypoint left/right type.
The inferred left/right type is decided as follows:
1. If either type is UNKNOWN, returns UNKNOWN.
2. If both types are the same, returns this type.
3. If one type is CENTRAL, and the other type is LEFT or RIGHT, returns the
other type.
4. If one type is LEFT and the other type is RIGHT, returns CENTRAL.
Args:
left_right_types: A list of LeftRightType enum values for all keypoints.
indices: A list of integers for keypoint indices.
Returns:
A LeftRightType enum value for inferred type.
Raises:
ValueError: If any index is out of range.
"""
if not indices:
return LeftRightType.UNKNOWN
def lookup(i):
if i < 0 or i >= len(left_right_types):
raise ValueError('Left/right type index is out of range: %d.' % i)
return left_right_types[i]
if len(indices) == 1:
return lookup(indices[0])
output_type = LeftRightType.CENTRAL
for i in indices:
current_type = lookup(i)
if current_type == LeftRightType.UNKNOWN:
return LeftRightType.UNKNOWN
if output_type == LeftRightType.CENTRAL:
output_type = current_type
elif current_type != LeftRightType.CENTRAL and current_type != output_type:
output_type = LeftRightType.CENTRAL
return output_type
def infer_segment_left_right_type(left_right_types, start_indices, end_indices):
"""Infers segment left/right type.
The inferred left/right type is decided as follows:
1. If either type is UNKNOWN, returns UNKNOWN.
2. If both types are the same, returns this type.
3. If one type is CENTRAL, and the other type is LEFT or RIGHT, returns the
other type.
4. If one type is LEFT and the other type is RIGHT, returns CENTRAL.
Args:
left_right_types: A list of LeftRightType enum values for all keypoints.
start_indices: A list of integers for LHS keypoint indices.
end_indices: A list of integers for RHS keypoint indices.
Returns:
A LeftRightType enum value for inferred type.
"""
lhs_type = infer_keypoint_left_right_type(left_right_types, start_indices)
rhs_type = infer_keypoint_left_right_type(left_right_types, end_indices)
if lhs_type == LeftRightType.UNKNOWN or rhs_type == LeftRightType.UNKNOWN:
return LeftRightType.UNKNOWN
if lhs_type == LeftRightType.CENTRAL:
return rhs_type
if rhs_type == LeftRightType.CENTRAL:
return lhs_type
return lhs_type if lhs_type == rhs_type else LeftRightType.CENTRAL
class KeypointProfile(six.with_metaclass(abc.ABCMeta, object)):
"""Keypoint profile base class."""
def __init__(self,
name,
keypoint_names,
offset_keypoint_names,
scale_keypoint_name_pairs,
scale_distance_reduction_fn,
scale_unit,
segment_name_pairs,
head_keypoint_name=None,
neck_keypoint_name=None,
left_shoulder_keypoint_name=None,
right_shoulder_keypoint_name=None,
left_elbow_keypoint_name=None,
right_elbow_keypoint_name=None,
left_wrist_keypoint_name=None,
right_wrist_keypoint_name=None,
spine_keypoint_name=None,
pelvis_keypoint_name=None,
left_hip_keypoint_name=None,
right_hip_keypoint_name=None,
left_knee_keypoint_name=None,
right_knee_keypoint_name=None,
left_ankle_keypoint_name=None,
right_ankle_keypoint_name=None):
"""Initializer."""
self._name = name
self._keypoint_names = [name for name, _ in keypoint_names]
self._keypoint_left_right_types = [
left_right_type for _, left_right_type in keypoint_names
]
self._offset_keypoint_index = [
self._keypoint_names.index(keypoint_name)
for keypoint_name in offset_keypoint_names
]
self._scale_keypoint_index_pairs = []
for start_names, end_names in scale_keypoint_name_pairs:
self._scale_keypoint_index_pairs.append(
([self._keypoint_names.index(name) for name in start_names],
[self._keypoint_names.index(name) for name in end_names]))
self._scale_distance_reduction_fn = scale_distance_reduction_fn
self._scale_unit = scale_unit
self._segment_index_pairs = []
for start_names, end_names in segment_name_pairs:
self._segment_index_pairs.append(
([self._keypoint_names.index(name) for name in start_names],
[self._keypoint_names.index(name) for name in end_names]))
self._head_keypoint_name = head_keypoint_name
self._neck_keypoint_name = neck_keypoint_name
self._left_shoulder_keypoint_name = left_shoulder_keypoint_name
self._right_shoulder_keypoint_name = right_shoulder_keypoint_name
self._left_elbow_keypoint_name = left_elbow_keypoint_name
self._right_elbow_keypoint_name = right_elbow_keypoint_name
self._left_wrist_keypoint_name = left_wrist_keypoint_name
self._right_wrist_keypoint_name = right_wrist_keypoint_name
self._spine_keypoint_name = spine_keypoint_name
self._pelvis_keypoint_name = pelvis_keypoint_name
self._left_hip_keypoint_name = left_hip_keypoint_name
self._right_hip_keypoint_name = right_hip_keypoint_name
self._left_knee_keypoint_name = left_knee_keypoint_name
self._right_knee_keypoint_name = right_knee_keypoint_name
self._left_ankle_keypoint_name = left_ankle_keypoint_name
self._right_ankle_keypoint_name = right_ankle_keypoint_name
@property
def name(self):
"""Gets keypoint profile name."""
return self._name
@property
def keypoint_names(self):
"""Gets keypoint names."""
return self._keypoint_names
@property
@abc.abstractmethod
def keypoint_dim(self):
"""Gets keypoint dimensionality."""
raise NotImplementedError
@property
def keypoint_num(self):
"""Gets number of keypoints."""
return len(self._keypoint_names)
def keypoint_left_right_type(self, keypoint_index):
"""Gets keypoint left/right type given index."""
if isinstance(keypoint_index, int):
keypoint_index = [keypoint_index]
return infer_keypoint_left_right_type(self._keypoint_left_right_types,
keypoint_index)
def segment_left_right_type(self, start_index, end_index):
"""Gets segment left/right type given index."""
if isinstance(start_index, int):
start_index = [start_index]
if isinstance(end_index, int):
end_index = [end_index]
return infer_segment_left_right_type(self._keypoint_left_right_types,
start_index, end_index)
@property
def offset_keypoint_index(self):
"""Gets offset keypoint index."""
return self._offset_keypoint_index
@property
def scale_keypoint_index_pairs(self):
"""Gets scale keypoint index pairs."""
return self._scale_keypoint_index_pairs
@property
def scale_unit(self):
"""Gets scale unit."""
return self._scale_unit
@property
def segment_index_pairs(self):
"""Gets segment index pairs."""
return self._segment_index_pairs
@property
def keypoint_affinity_matrix(self):
"""Gets keypoint affinity matrix.
If a segment has multi-point end, all pairs of relevant points are
considered as in affinity.
Returns:
matrix: A double list of floats for the keypoint affinity matrix.
Raises:
ValueError: If affinity matrix has any isolated node.
"""
matrix = [[0.0
for _ in range(self.keypoint_num)]
for _ in range(self.keypoint_num)]
# Self-affinity.
for i in range(self.keypoint_num):
matrix[i][i] = 1.0
for lhs_index, rhs_index in self._segment_index_pairs:
for i in lhs_index:
for j in lhs_index:
matrix[i][j] = 1.0
matrix[j][i] = 1.0
for i in rhs_index:
for j in rhs_index:
matrix[i][j] = 1.0
matrix[j][i] = 1.0
for i in lhs_index:
for j in rhs_index:
matrix[i][j] = 1.0
matrix[j][i] = 1.0
# Check if the affinity matrix is valid, i.e., each node must have degree
# greater than 1 (no isolated node).
for row in matrix:
if sum(row) <= 1.0:
raise ValueError(
'Affinity matrix has a node with degree less than 2: %s.' %
str(matrix))
return matrix
def keypoint_index(self, keypoint_name, raise_error_if_not_found=False):
"""Gets keypoint index given name.
If `raise_error_if_not_found` is True, raises ValueError if keypoint does
not exist. Otherwise, returns -1 if keypoint does not exist.
Args:
keypoint_name: A string for keypoint name to find index of.
raise_error_if_not_found: A boolean for whether to raise ValueError if
keypoint does not exist.
Returns:
An integer for keypoint index.
Raises:
ValueError: If keypoint does not exist and `raise_error_if_not_found` is
True.
"""
if keypoint_name in self._keypoint_names:
return self._keypoint_names.index(keypoint_name)
if raise_error_if_not_found:
raise ValueError('Failed to find keypoint: `%s`.' % str(keypoint_name))
return -1
@property
def head_keypoint_index(self):
"""Gets head keypoint index."""
if not self._head_keypoint_name:
raise ValueError('Head keypoint is not specified.')
return [
self.keypoint_index(name, raise_error_if_not_found=True)
for name in self._head_keypoint_name
]
@property
def neck_keypoint_index(self):
"""Gets neck keypoint index."""
if not self._neck_keypoint_name:
raise ValueError('Neck keypoint is not specified.')
return [
self.keypoint_index(name, raise_error_if_not_found=True)
for name in self._neck_keypoint_name
]
@property
def left_shoulder_keypoint_index(self):
"""Gets left shoulder keypoint index."""
if not self._left_shoulder_keypoint_name:
raise ValueError('Left shoulder keypoint is not specified.')
return [
self.keypoint_index(name, raise_error_if_not_found=True)
for name in self._left_shoulder_keypoint_name
]
@property
def right_shoulder_keypoint_index(self):
"""Gets right shoulder keypoint index."""
if not self._right_shoulder_keypoint_name:
raise ValueError('Right shoulder keypoint is not specified.')
return [
self.keypoint_index(name, raise_error_if_not_found=True)
for name in self._right_shoulder_keypoint_name
]
@property
def left_elbow_keypoint_index(self):
"""Gets left elbow keypoint index."""
if not self._left_elbow_keypoint_name:
raise ValueError('Left elbow keypoint is not specified.')
return [
self.keypoint_index(name, raise_error_if_not_found=True)
for name in self._left_elbow_keypoint_name
]
@property
def right_elbow_keypoint_index(self):
"""Gets right elbow keypoint index."""
if not self._right_elbow_keypoint_name:
raise ValueError('Right elbow keypoint is not specified.')
return [
self.keypoint_index(name, raise_error_if_not_found=True)
for name in self._right_elbow_keypoint_name
]
@property
def left_wrist_keypoint_index(self):
"""Gets left wrist keypoint index."""
if not self._left_wrist_keypoint_name:
raise ValueError('Left wrist keypoint is not specified.')
return [
self.keypoint_index(name, raise_error_if_not_found=True)
for name in self._left_wrist_keypoint_name
]
@property
def right_wrist_keypoint_index(self):
"""Gets right wrist keypoint index."""
if not self._right_wrist_keypoint_name:
raise ValueError('Right wrist keypoint is not specified.')
return [
self.keypoint_index(name, raise_error_if_not_found=True)
for name in self._right_wrist_keypoint_name
]
@property
def spine_keypoint_index(self):
"""Gets spine keypoint index."""
if not self._spine_keypoint_name:
raise ValueError('Spine keypoint is not specified.')
return [
self.keypoint_index(name, raise_error_if_not_found=True)
for name in self._spine_keypoint_name
]
@property
def pelvis_keypoint_index(self):
"""Gets pelvis keypoint index."""
if not self._pelvis_keypoint_name:
raise ValueError('Pelvis keypoint is not specified.')
return [
self.keypoint_index(name, raise_error_if_not_found=True)
for name in self._pelvis_keypoint_name
]
@property
def left_hip_keypoint_index(self):
"""Gets left hip keypoint index."""
if not self._left_hip_keypoint_name:
raise ValueError('Left hip keypoint is not specified.')
return [
self.keypoint_index(name, raise_error_if_not_found=True)
for name in self._left_hip_keypoint_name
]
@property
def right_hip_keypoint_index(self):
"""Gets right hip keypoint index."""
if not self._right_hip_keypoint_name:
raise ValueError('Right hip keypoint is not specified.')
return [
self.keypoint_index(name, raise_error_if_not_found=True)
for name in self._right_hip_keypoint_name
]
@property
def left_knee_keypoint_index(self):
"""Gets left knee keypoint index."""
if not self._left_knee_keypoint_name:
raise ValueError('Left knee keypoint is not specified.')
return [
self.keypoint_index(name, raise_error_if_not_found=True)
for name in self._left_knee_keypoint_name
]
@property
def right_knee_keypoint_index(self):
"""Gets right knee keypoint index."""
if not self._right_knee_keypoint_name:
raise ValueError('Right knee keypoint is not specified.')
return [
self.keypoint_index(name, raise_error_if_not_found=True)
for name in self._right_knee_keypoint_name
]
@property
def left_ankle_keypoint_index(self):
"""Gets left ankle keypoint index."""
if not self._left_ankle_keypoint_name:
raise ValueError('Left ankle keypoint is not specified.')
return [
self.keypoint_index(name, raise_error_if_not_found=True)
for name in self._left_ankle_keypoint_name
]
@property
def right_ankle_keypoint_index(self):
"""Gets right ankle keypoint index."""
if not self._right_ankle_keypoint_name:
raise ValueError('Right ankle keypoint is not specified.')
return [
self.keypoint_index(name, raise_error_if_not_found=True)
for name in self._right_ankle_keypoint_name
]
@property
def standard_part_names(self):
"""Gets all standard part names."""
return [
'HEAD', 'NECK', 'LEFT_SHOULDER', 'RIGHT_SHOULDER', 'LEFT_ELBOW',
'RIGHT_ELBOW', 'LEFT_WRIST', 'RIGHT_WRIST', 'SPINE', 'PELVIS',
'LEFT_HIP', 'RIGHT_HIP', 'LEFT_KNEE', 'RIGHT_KNEE', 'LEFT_ANKLE',
'RIGHT_ANKLE'
]
def get_standard_part_index(self, part_name):
"""Gets part index by standardized name."""
if part_name.upper() == 'HEAD':
return self.head_keypoint_index
if part_name.upper() == 'NECK':
return self.neck_keypoint_index
if part_name.upper() == 'LEFT_SHOULDER':
return self.left_shoulder_keypoint_index
if part_name.upper() == 'RIGHT_SHOULDER':
return self.right_shoulder_keypoint_index
if part_name.upper() == 'LEFT_ELBOW':
return self.left_elbow_keypoint_index
if part_name.upper() == 'RIGHT_ELBOW':
return self.right_elbow_keypoint_index
if part_name.upper() == 'LEFT_WRIST':
return self.left_wrist_keypoint_index
if part_name.upper() == 'RIGHT_WRIST':
return self.right_wrist_keypoint_index
if part_name.upper() == 'SPINE':
return self.spine_keypoint_index
if part_name.upper() == 'PELVIS':
return self.pelvis_keypoint_index
if part_name.upper() == 'LEFT_HIP':
return self.left_hip_keypoint_index
if part_name.upper() == 'RIGHT_HIP':
return self.right_hip_keypoint_index
if part_name.upper() == 'LEFT_KNEE':
return self.left_knee_keypoint_index
if part_name.upper() == 'RIGHT_KNEE':
return self.right_knee_keypoint_index
if part_name.upper() == 'LEFT_ANKLE':
return self.left_ankle_keypoint_index
if part_name.upper() == 'RIGHT_ANKLE':
return self.right_ankle_keypoint_index
raise ValueError('Unsupported part name: `%s`.' % part_name)
def normalize(self, keypoints, keypoint_masks=None):
"""Normalizes keypoints."""
del keypoint_masks
return keypoint_utils.normalize_points(
keypoints,
offset_point_indices=self._offset_keypoint_index,
scale_distance_point_index_pairs=self._scale_keypoint_index_pairs,
scale_distance_reduction_fn=self._scale_distance_reduction_fn,
scale_unit=self._scale_unit)
def denormalize(self,
normalized_keypoints,
offset_points,
scale_distances,
keypoint_masks=None):
"""Denormalizes keypoints."""
del keypoint_masks
return (normalized_keypoints / self._scale_unit * scale_distances +
offset_points)
class KeypointProfile3D(KeypointProfile):
"""3D keypoint profile base class."""
def __init__(self,
name,
keypoint_names,
offset_keypoint_names,
scale_keypoint_name_pairs,
segment_name_pairs,
scale_distance_reduction_fn=tf.math.reduce_sum,
scale_unit=1.0,
head_keypoint_name=None,
neck_keypoint_name=None,
left_shoulder_keypoint_name=None,
right_shoulder_keypoint_name=None,
left_elbow_keypoint_name=None,
right_elbow_keypoint_name=None,
left_wrist_keypoint_name=None,
right_wrist_keypoint_name=None,
spine_keypoint_name=None,
pelvis_keypoint_name=None,
left_hip_keypoint_name=None,
right_hip_keypoint_name=None,
left_knee_keypoint_name=None,
right_knee_keypoint_name=None,
left_ankle_keypoint_name=None,
right_ankle_keypoint_name=None):
"""Initializer."""
super(KeypointProfile3D, self).__init__(
name=name,
keypoint_names=keypoint_names,
offset_keypoint_names=offset_keypoint_names,
scale_keypoint_name_pairs=scale_keypoint_name_pairs,
scale_distance_reduction_fn=scale_distance_reduction_fn,
scale_unit=scale_unit,
segment_name_pairs=segment_name_pairs,
head_keypoint_name=head_keypoint_name,
neck_keypoint_name=neck_keypoint_name,
left_shoulder_keypoint_name=left_shoulder_keypoint_name,
right_shoulder_keypoint_name=right_shoulder_keypoint_name,
left_elbow_keypoint_name=left_elbow_keypoint_name,
right_elbow_keypoint_name=right_elbow_keypoint_name,
left_wrist_keypoint_name=left_wrist_keypoint_name,
right_wrist_keypoint_name=right_wrist_keypoint_name,
spine_keypoint_name=spine_keypoint_name,
pelvis_keypoint_name=pelvis_keypoint_name,
left_hip_keypoint_name=left_hip_keypoint_name,
right_hip_keypoint_name=right_hip_keypoint_name,
left_knee_keypoint_name=left_knee_keypoint_name,
right_knee_keypoint_name=right_knee_keypoint_name,
left_ankle_keypoint_name=left_ankle_keypoint_name,
right_ankle_keypoint_name=right_ankle_keypoint_name)
@property
def keypoint_dim(self):
"""Gets keypoint dimensionality."""
return 3
class KeypointProfile2D(KeypointProfile):
"""2D keypoint profile base class."""
def __init__(self,
name,
keypoint_names,
offset_keypoint_names,
scale_keypoint_name_pairs,
segment_name_pairs,
compatible_keypoint_name_dict=None,
scale_distance_reduction_fn=tf.math.reduce_max,
scale_unit=0.5,
head_keypoint_name=None,
neck_keypoint_name=None,
left_shoulder_keypoint_name=None,
right_shoulder_keypoint_name=None,
left_elbow_keypoint_name=None,
right_elbow_keypoint_name=None,
left_wrist_keypoint_name=None,
right_wrist_keypoint_name=None,
spine_keypoint_name=None,
pelvis_keypoint_name=None,
left_hip_keypoint_name=None,
right_hip_keypoint_name=None,
left_knee_keypoint_name=None,
right_knee_keypoint_name=None,
left_ankle_keypoint_name=None,
right_ankle_keypoint_name=None):
"""Initializer."""
super(KeypointProfile2D, self).__init__(
name=name,
keypoint_names=keypoint_names,
offset_keypoint_names=offset_keypoint_names,
scale_keypoint_name_pairs=scale_keypoint_name_pairs,
scale_distance_reduction_fn=scale_distance_reduction_fn,
scale_unit=scale_unit,
segment_name_pairs=segment_name_pairs,
head_keypoint_name=head_keypoint_name,
neck_keypoint_name=neck_keypoint_name,
left_shoulder_keypoint_name=left_shoulder_keypoint_name,
right_shoulder_keypoint_name=right_shoulder_keypoint_name,
left_elbow_keypoint_name=left_elbow_keypoint_name,
right_elbow_keypoint_name=right_elbow_keypoint_name,
left_wrist_keypoint_name=left_wrist_keypoint_name,
right_wrist_keypoint_name=right_wrist_keypoint_name,
spine_keypoint_name=spine_keypoint_name,
pelvis_keypoint_name=pelvis_keypoint_name,
left_hip_keypoint_name=left_hip_keypoint_name,
right_hip_keypoint_name=right_hip_keypoint_name,
left_knee_keypoint_name=left_knee_keypoint_name,
right_knee_keypoint_name=right_knee_keypoint_name,
left_ankle_keypoint_name=left_ankle_keypoint_name,
right_ankle_keypoint_name=right_ankle_keypoint_name)
self._compatible_keypoint_name_dict = {}
if compatible_keypoint_name_dict is not None:
for _, compatible_keypoint_names in compatible_keypoint_name_dict.items():
if len(compatible_keypoint_names) != len(self._keypoint_names):
raise ValueError('Compatible keypoint names must be of the same size '
'as keypoint names.')
self._compatible_keypoint_name_dict = compatible_keypoint_name_dict
@property
def keypoint_dim(self):
"""Gets keypoint dimensionality."""
return 2
@property
def compatible_keypoint_name_dict(self):
"""Gets compatible keypoint name dictionary."""
return self._compatible_keypoint_name_dict
class Std16KeypointProfile3D(KeypointProfile3D):
"""Standard 3D 16-keypoint profile."""
def __init__(self):
"""Initializer."""
super(Std16KeypointProfile3D,
self).__init__(
name='3DSTD16',
keypoint_names=[('HEAD', LeftRightType.CENTRAL),
('NECK', LeftRightType.CENTRAL),
('LEFT_SHOULDER', LeftRightType.LEFT),
('RIGHT_SHOULDER', LeftRightType.RIGHT),
('LEFT_ELBOW', LeftRightType.LEFT),
('RIGHT_ELBOW', LeftRightType.RIGHT),
('LEFT_WRIST', LeftRightType.LEFT),
('RIGHT_WRIST', LeftRightType.RIGHT),
('SPINE', LeftRightType.CENTRAL),
('PELVIS', LeftRightType.CENTRAL),
('LEFT_HIP', LeftRightType.LEFT),
('RIGHT_HIP', LeftRightType.RIGHT),
('LEFT_KNEE', LeftRightType.LEFT),
('RIGHT_KNEE', LeftRightType.RIGHT),
('LEFT_ANKLE', LeftRightType.LEFT),
('RIGHT_ANKLE', LeftRightType.RIGHT)],
offset_keypoint_names=['PELVIS'],
scale_keypoint_name_pairs=[(['NECK'], ['SPINE']),
(['SPINE'], ['PELVIS'])],
segment_name_pairs=[(['HEAD'], ['NECK']),
(['NECK'], ['LEFT_SHOULDER']),
(['NECK'], ['RIGHT_SHOULDER']),
(['NECK'], ['SPINE']),
(['LEFT_SHOULDER'], ['LEFT_ELBOW']),
(['RIGHT_SHOULDER'], ['RIGHT_ELBOW']),
(['LEFT_ELBOW'], ['LEFT_WRIST']),
(['RIGHT_ELBOW'], ['RIGHT_WRIST']),
(['SPINE'], ['PELVIS']),
(['PELVIS'], ['LEFT_HIP']),
(['PELVIS'], ['RIGHT_HIP']),
(['LEFT_HIP'], ['LEFT_KNEE']),
(['RIGHT_HIP'], ['RIGHT_KNEE']),
(['LEFT_KNEE'], ['LEFT_ANKLE']),
(['RIGHT_KNEE'], ['RIGHT_ANKLE'])],
head_keypoint_name=['HEAD'],
neck_keypoint_name=['NECK'],
left_shoulder_keypoint_name=['LEFT_SHOULDER'],
right_shoulder_keypoint_name=['RIGHT_SHOULDER'],
left_elbow_keypoint_name=['LEFT_ELBOW'],
right_elbow_keypoint_name=['RIGHT_ELBOW'],
left_wrist_keypoint_name=['LEFT_WRIST'],
right_wrist_keypoint_name=['RIGHT_WRIST'],
spine_keypoint_name=['SPINE'],
pelvis_keypoint_name=['PELVIS'],
left_hip_keypoint_name=['LEFT_HIP'],
right_hip_keypoint_name=['RIGHT_HIP'],
left_knee_keypoint_name=['LEFT_KNEE'],
right_knee_keypoint_name=['RIGHT_KNEE'],
left_ankle_keypoint_name=['LEFT_ANKLE'],
right_ankle_keypoint_name=['RIGHT_ANKLE'])
class Std13KeypointProfile3D(KeypointProfile3D):
"""Standard 3D 13-keypoint profile."""
def __init__(self):
"""Initializer."""
super(Std13KeypointProfile3D, self).__init__(
name='3DSTD13',
keypoint_names=[('HEAD', LeftRightType.CENTRAL),
('LEFT_SHOULDER', LeftRightType.LEFT),
('RIGHT_SHOULDER', LeftRightType.RIGHT),
('LEFT_ELBOW', LeftRightType.LEFT),
('RIGHT_ELBOW', LeftRightType.RIGHT),
('LEFT_WRIST', LeftRightType.LEFT),
('RIGHT_WRIST', LeftRightType.RIGHT),
('LEFT_HIP', LeftRightType.LEFT),
('RIGHT_HIP', LeftRightType.RIGHT),
('LEFT_KNEE', LeftRightType.LEFT),
('RIGHT_KNEE', LeftRightType.RIGHT),
('LEFT_ANKLE', LeftRightType.LEFT),
('RIGHT_ANKLE', LeftRightType.RIGHT)],
offset_keypoint_names=['LEFT_HIP', 'RIGHT_HIP'],
scale_keypoint_name_pairs=[(['LEFT_SHOULDER', 'RIGHT_SHOULDER'],
['LEFT_HIP', 'RIGHT_HIP'])],
segment_name_pairs=[
(['HEAD'], ['LEFT_SHOULDER', 'RIGHT_SHOULDER']),
(['LEFT_SHOULDER', 'RIGHT_SHOULDER'], ['LEFT_SHOULDER']),
(['LEFT_SHOULDER', 'RIGHT_SHOULDER'], ['RIGHT_SHOULDER']),
(['LEFT_SHOULDER', 'RIGHT_SHOULDER'],
['LEFT_SHOULDER', 'RIGHT_SHOULDER', 'LEFT_HIP', 'RIGHT_HIP']),
(['LEFT_SHOULDER'], ['LEFT_ELBOW']),
(['RIGHT_SHOULDER'], ['RIGHT_ELBOW']),
(['LEFT_ELBOW'], ['LEFT_WRIST']),
(['RIGHT_ELBOW'], ['RIGHT_WRIST']),
(['LEFT_SHOULDER', 'RIGHT_SHOULDER', 'LEFT_HIP',
'RIGHT_HIP'], ['LEFT_HIP', 'RIGHT_HIP']),
(['LEFT_HIP', 'RIGHT_HIP'], ['LEFT_HIP']),
(['LEFT_HIP', 'RIGHT_HIP'], ['RIGHT_HIP']),
(['LEFT_HIP'], ['LEFT_KNEE']), (['RIGHT_HIP'], ['RIGHT_KNEE']),
(['LEFT_KNEE'], ['LEFT_ANKLE']), (['RIGHT_KNEE'], ['RIGHT_ANKLE'])
],
head_keypoint_name=['HEAD'],
neck_keypoint_name=['LEFT_SHOULDER', 'RIGHT_SHOULDER'],
left_shoulder_keypoint_name=['LEFT_SHOULDER'],
right_shoulder_keypoint_name=['RIGHT_SHOULDER'],
left_elbow_keypoint_name=['LEFT_ELBOW'],
right_elbow_keypoint_name=['RIGHT_ELBOW'],
left_wrist_keypoint_name=['LEFT_WRIST'],
right_wrist_keypoint_name=['RIGHT_WRIST'],
spine_keypoint_name=[
'LEFT_SHOULDER', 'RIGHT_SHOULDER', 'LEFT_HIP', 'RIGHT_HIP'
],
pelvis_keypoint_name=['LEFT_HIP', 'RIGHT_HIP'],
left_hip_keypoint_name=['LEFT_HIP'],
right_hip_keypoint_name=['RIGHT_HIP'],
left_knee_keypoint_name=['LEFT_KNEE'],
right_knee_keypoint_name=['RIGHT_KNEE'],
left_ankle_keypoint_name=['LEFT_ANKLE'],
right_ankle_keypoint_name=['RIGHT_ANKLE'])
class LegacyH36m17KeypointProfile3D(KeypointProfile3D):
"""Legacy Human3.6M 3D 17-keypoint profile."""
def __init__(self):
"""Initializer."""
super(LegacyH36m17KeypointProfile3D, self).__init__(
name='LEGACY_3DH36M17',
keypoint_names=[('Hip', LeftRightType.CENTRAL),
('Head', LeftRightType.CENTRAL),
('Neck/Nose', LeftRightType.CENTRAL),
('Thorax', LeftRightType.CENTRAL),
('LShoulder', LeftRightType.LEFT),
('RShoulder', LeftRightType.RIGHT),
('LElbow', LeftRightType.LEFT),
('RElbow', LeftRightType.RIGHT),
('LWrist', LeftRightType.LEFT),
('RWrist', LeftRightType.RIGHT),
('Spine', LeftRightType.CENTRAL),
('LHip', LeftRightType.LEFT),
('RHip', LeftRightType.RIGHT),
('LKnee', LeftRightType.LEFT),
('RKnee', LeftRightType.RIGHT),
('LFoot', LeftRightType.LEFT),
('RFoot', LeftRightType.RIGHT)],
offset_keypoint_names=['Hip'],
scale_keypoint_name_pairs=[(['Hip'], ['Spine']),
(['Spine'], ['Thorax'])],
segment_name_pairs=[(['Hip'], ['Spine']), (['Hip'], ['LHip']),
(['Hip'], ['RHip']), (['Spine'], ['Thorax']),
(['LHip'], ['LKnee']), (['RHip'], ['RKnee']),
(['LKnee'], ['LFoot']), (['RKnee'], ['RFoot']),
(['Thorax'], ['Neck/Nose']),
(['Thorax'], ['LShoulder']),
(['Thorax'], ['RShoulder']),
(['Neck/Nose'], ['Head']),
(['LShoulder'], ['LElbow']),
(['RShoulder'], ['RElbow']),
(['LElbow'], ['LWrist']), (['RElbow'], ['RWrist'])],
head_keypoint_name=['Head'],
neck_keypoint_name=['Thorax'],
left_shoulder_keypoint_name=['LShoulder'],
right_shoulder_keypoint_name=['RShoulder'],
left_elbow_keypoint_name=['LElbow'],
right_elbow_keypoint_name=['RElbow'],
left_wrist_keypoint_name=['LWrist'],
right_wrist_keypoint_name=['RWrist'],
spine_keypoint_name=['Spine'],
pelvis_keypoint_name=['Hip'],
left_hip_keypoint_name=['LHip'],
right_hip_keypoint_name=['RHip'],
left_knee_keypoint_name=['LKnee'],
right_knee_keypoint_name=['RKnee'],
left_ankle_keypoint_name=['LFoot'],
right_ankle_keypoint_name=['RFoot'])
class LegacyH36m13KeypointProfile3D(KeypointProfile3D):
"""Legacy Human3.6M 3D 13-keypoint profile."""
def __init__(self):
"""Initializer."""
super(LegacyH36m13KeypointProfile3D, self).__init__(
name='LEGACY_3DH36M13',
keypoint_names=[('Head', LeftRightType.CENTRAL),
('LShoulder', LeftRightType.LEFT),
('RShoulder', LeftRightType.RIGHT),
('LElbow', LeftRightType.LEFT),
('RElbow', LeftRightType.RIGHT),
('LWrist', LeftRightType.LEFT),
('RWrist', LeftRightType.RIGHT),
('LHip', LeftRightType.LEFT),
('RHip', LeftRightType.RIGHT),
('LKnee', LeftRightType.LEFT),
('RKnee', LeftRightType.RIGHT),
('LFoot', LeftRightType.LEFT),
('RFoot', LeftRightType.RIGHT)],
offset_keypoint_names=['LHip'],
scale_keypoint_name_pairs=[
(['LHip', 'RHip'], ['LShoulder', 'RShoulder']),
],
segment_name_pairs=[(['LHip', 'RHip'], ['LShoulder', 'RShoulder']),
(['LHip', 'RHip'], ['LHip']),
(['LHip', 'RHip'], ['RHip']), (['LHip'], ['LKnee']),
(['RHip'], ['RKnee']), (['LKnee'], ['LFoot']),
(['RKnee'], ['RFoot']),
(['LShoulder', 'RShoulder'], ['Head']),
(['LShoulder', 'RShoulder'], ['LShoulder']),
(['LShoulder', 'RShoulder'], ['RShoulder']),
(['LShoulder'], ['LElbow']),
(['RShoulder'], ['RElbow']),
(['LElbow'], ['LWrist']), (['RElbow'], ['RWrist'])],
head_keypoint_name=['Head'],
neck_keypoint_name=['LShoulder', 'RShoulder'],
left_shoulder_keypoint_name=['LShoulder'],
right_shoulder_keypoint_name=['RShoulder'],
left_elbow_keypoint_name=['LElbow'],
right_elbow_keypoint_name=['RElbow'],
left_wrist_keypoint_name=['LWrist'],
right_wrist_keypoint_name=['RWrist'],
spine_keypoint_name=['LShoulder', 'RShoulder', 'LHip', 'RHip'],
pelvis_keypoint_name=['LHip', 'RHip'],
left_hip_keypoint_name=['LHip'],
right_hip_keypoint_name=['RHip'],
left_knee_keypoint_name=['LKnee'],
right_knee_keypoint_name=['RKnee'],
left_ankle_keypoint_name=['LFoot'],
right_ankle_keypoint_name=['RFoot'])
class LegacyMpii3dhp17KeypointProfile3D(KeypointProfile3D):
"""Legacy MPII-3DHP 3D 17-keypoint profile."""
def __init__(self):
"""Initializer."""
super(LegacyMpii3dhp17KeypointProfile3D, self).__init__(
name='LEGACY_3DMPII3DHP17',
keypoint_names=[('pelvis', LeftRightType.CENTRAL),
('head', LeftRightType.CENTRAL),
('neck', LeftRightType.CENTRAL),
('head_top', LeftRightType.CENTRAL),
('left_shoulder', LeftRightType.LEFT),
('right_shoulder', LeftRightType.RIGHT),
('left_elbow', LeftRightType.LEFT),
('right_elbow', LeftRightType.RIGHT),
('left_wrist', LeftRightType.LEFT),
('right_wrist', LeftRightType.RIGHT),
('spine', LeftRightType.CENTRAL),
('left_hip', LeftRightType.LEFT),
('right_hip', LeftRightType.RIGHT),
('left_knee', LeftRightType.LEFT),
('right_knee', LeftRightType.RIGHT),
('left_ankle', LeftRightType.LEFT),
('right_ankle', LeftRightType.RIGHT)],
offset_keypoint_names=['pelvis'],
scale_keypoint_name_pairs=[(['pelvis'], ['spine']),
(['spine'], ['neck'])],
segment_name_pairs=[(['pelvis'], ['spine']), (['pelvis'], ['left_hip']),
(['pelvis'], ['right_hip']), (['spine'], ['neck']),
(['left_hip'], ['left_knee']),
(['right_hip'], ['right_knee']),
(['left_knee'], ['left_ankle']),
(['right_knee'], ['right_ankle']),
(['neck'], ['head']), (['neck'], ['left_shoulder']),
(['neck'], ['right_shoulder']),
(['head'], ['head_top']),
(['left_shoulder'], ['left_elbow']),
(['right_shoulder'], ['right_elbow']),
(['left_elbow'], ['left_wrist']),
(['right_elbow'], ['right_wrist'])],
head_keypoint_name=['head'],
neck_keypoint_name=['neck'],
left_shoulder_keypoint_name=['left_shoulder'],
right_shoulder_keypoint_name=['right_shoulder'],
left_elbow_keypoint_name=['left_elbow'],
right_elbow_keypoint_name=['right_elbow'],
left_wrist_keypoint_name=['left_wrist'],
right_wrist_keypoint_name=['right_wrist'],
spine_keypoint_name=['spine'],
pelvis_keypoint_name=['pelvis'],
left_hip_keypoint_name=['left_hip'],
right_hip_keypoint_name=['right_hip'],
left_knee_keypoint_name=['left_knee'],
right_knee_keypoint_name=['right_knee'],
left_ankle_keypoint_name=['left_ankle'],
right_ankle_keypoint_name=['right_ankle'])
class Std13KeypointProfile2D(KeypointProfile2D):
"""Standard 2D 13-keypoint profile."""
def __init__(self):
"""Initializer."""
super(Std13KeypointProfile2D, self).__init__(
name='2DSTD13',
keypoint_names=[('NOSE_TIP', LeftRightType.CENTRAL),
('LEFT_SHOULDER', LeftRightType.LEFT),
('RIGHT_SHOULDER', LeftRightType.RIGHT),
('LEFT_ELBOW', LeftRightType.LEFT),
('RIGHT_ELBOW', LeftRightType.RIGHT),
('LEFT_WRIST', LeftRightType.LEFT),
('RIGHT_WRIST', LeftRightType.RIGHT),
('LEFT_HIP', LeftRightType.LEFT),
('RIGHT_HIP', LeftRightType.RIGHT),
('LEFT_KNEE', LeftRightType.LEFT),
('RIGHT_KNEE', LeftRightType.RIGHT),
('LEFT_ANKLE', LeftRightType.LEFT),
('RIGHT_ANKLE', LeftRightType.RIGHT)],
offset_keypoint_names=['LEFT_HIP', 'RIGHT_HIP'],
scale_keypoint_name_pairs=[(['LEFT_SHOULDER'], ['RIGHT_SHOULDER']),
(['LEFT_SHOULDER'], ['LEFT_HIP']),
(['LEFT_SHOULDER'], ['RIGHT_HIP']),
(['RIGHT_SHOULDER'], ['LEFT_HIP']),
(['RIGHT_SHOULDER'], ['RIGHT_HIP']),
(['LEFT_HIP'], ['RIGHT_HIP'])],
segment_name_pairs=[(['NOSE_TIP'], ['LEFT_SHOULDER']),
(['NOSE_TIP'], ['RIGHT_SHOULDER']),
(['LEFT_SHOULDER'], ['RIGHT_SHOULDER']),
(['LEFT_SHOULDER'], ['LEFT_ELBOW']),
(['RIGHT_SHOULDER'], ['RIGHT_ELBOW']),
(['LEFT_ELBOW'], ['LEFT_WRIST']),
(['RIGHT_ELBOW'], ['RIGHT_WRIST']),
(['LEFT_SHOULDER'], ['LEFT_HIP']),
(['RIGHT_SHOULDER'], ['RIGHT_HIP']),
(['LEFT_HIP'], ['RIGHT_HIP']),
(['LEFT_HIP'], ['LEFT_KNEE']),
(['RIGHT_HIP'], ['RIGHT_KNEE']),
(['LEFT_KNEE'], ['LEFT_ANKLE']),
(['RIGHT_KNEE'], ['RIGHT_ANKLE'])],
compatible_keypoint_name_dict={
'3DSTD16': [
'HEAD', 'LEFT_SHOULDER', 'RIGHT_SHOULDER', 'LEFT_ELBOW',
'RIGHT_ELBOW', 'LEFT_WRIST', 'RIGHT_WRIST', 'LEFT_HIP',
'RIGHT_HIP', 'LEFT_KNEE', 'RIGHT_KNEE', 'LEFT_ANKLE',
'RIGHT_ANKLE'
],
'3DSTD13': [
'HEAD', 'LEFT_SHOULDER', 'RIGHT_SHOULDER', 'LEFT_ELBOW',
'RIGHT_ELBOW', 'LEFT_WRIST', 'RIGHT_WRIST', 'LEFT_HIP',
'RIGHT_HIP', 'LEFT_KNEE', 'RIGHT_KNEE', 'LEFT_ANKLE',
'RIGHT_ANKLE'
],
'LEGACY_3DH36M17': [
'Head', 'LShoulder', 'RShoulder', 'LElbow', 'RElbow', 'LWrist',
'RWrist', 'LHip', 'RHip', 'LKnee', 'RKnee', 'LFoot', 'RFoot'
],
'LEGACY_3DMPII3DHP17': [
'head', 'left_shoulder', 'right_shoulder', 'left_elbow',
'right_elbow', 'left_wrist', 'right_wrist', 'left_hip',
'right_hip', 'left_knee', 'right_knee', 'left_ankle',
'right_ankle'
],
},
head_keypoint_name=['NOSE_TIP'],
neck_keypoint_name=['LEFT_SHOULDER', 'RIGHT_SHOULDER'],
left_shoulder_keypoint_name=['LEFT_SHOULDER'],
right_shoulder_keypoint_name=['RIGHT_SHOULDER'],
left_elbow_keypoint_name=['LEFT_ELBOW'],
right_elbow_keypoint_name=['RIGHT_ELBOW'],
left_wrist_keypoint_name=['LEFT_WRIST'],
right_wrist_keypoint_name=['RIGHT_WRIST'],
spine_keypoint_name=[
'LEFT_SHOULDER', 'RIGHT_SHOULDER', 'LEFT_HIP', 'RIGHT_HIP'
],
pelvis_keypoint_name=['LEFT_HIP', 'RIGHT_HIP'],
left_hip_keypoint_name=['LEFT_HIP'],
right_hip_keypoint_name=['RIGHT_HIP'],
left_knee_keypoint_name=['LEFT_KNEE'],
right_knee_keypoint_name=['RIGHT_KNEE'],
left_ankle_keypoint_name=['LEFT_ANKLE'],
right_ankle_keypoint_name=['RIGHT_ANKLE'])
class LegacyCoco13KeypointProfile2D(Std13KeypointProfile2D):
"""Legacy COCO 2D 13-keypoint profile.
This profile is the same as the `2DSTD13` profil, except the name.
"""
def __init__(self):
"""Initializer."""
super(LegacyCoco13KeypointProfile2D, self).__init__()
self._name = 'LEGACY_2DCOCO13'
class LegacyH36m13KeypointProfile2D(KeypointProfile2D):
"""Legacy Human3.6M 2D 13-keypoint profile."""
def __init__(self):
"""Initializer."""
super(LegacyH36m13KeypointProfile2D,
self).__init__(
name='LEGACY_2DH36M13',
keypoint_names=[('Head', LeftRightType.CENTRAL),
('LShoulder', LeftRightType.LEFT),
('RShoulder', LeftRightType.RIGHT),
('LElbow', LeftRightType.LEFT),
('RElbow', LeftRightType.RIGHT),
('LWrist', LeftRightType.LEFT),
('RWrist', LeftRightType.RIGHT),
('LHip', LeftRightType.LEFT),
('RHip', LeftRightType.RIGHT),
('LKnee', LeftRightType.LEFT),
('RKnee', LeftRightType.RIGHT),
('LFoot', LeftRightType.LEFT),
('RFoot', LeftRightType.RIGHT)],
offset_keypoint_names=['LHip', 'RHip'],
scale_keypoint_name_pairs=[(['LShoulder'], ['RShoulder']),
(['LShoulder'], ['LHip']),
(['LShoulder'], ['RHip']),
(['RShoulder'], ['LHip']),
(['RShoulder'], ['RHip']),
(['LHip'], ['RHip'])],
segment_name_pairs=[(['Head'], ['LShoulder']),
(['Head'], ['RShoulder']),
(['LShoulder'], ['LElbow']),
(['LElbow'], ['LWrist']),
(['RShoulder'], ['RElbow']),
(['RElbow'], ['RWrist']),
(['LShoulder'], ['LHip']),
(['RShoulder'], ['RHip']),
(['LHip'], ['LKnee']), (['LKnee'], ['LFoot']),
(['RHip'], ['RKnee']), (['RKnee'], ['RFoot']),
(['LShoulder'], ['RShoulder']),
(['LHip'], ['RHip'])],
compatible_keypoint_name_dict={
'3DSTD16': [
'HEAD', 'LEFT_SHOULDER', 'RIGHT_SHOULDER', 'LEFT_ELBOW',
'RIGHT_ELBOW', 'LEFT_WRIST', 'RIGHT_WRIST', 'LEFT_HIP',
'RIGHT_HIP', 'LEFT_KNEE', 'RIGHT_KNEE', 'LEFT_ANKLE',
'RIGHT_ANKLE'
],
'3DSTD13': [
'HEAD', 'LEFT_SHOULDER', 'RIGHT_SHOULDER', 'LEFT_ELBOW',
'RIGHT_ELBOW', 'LEFT_WRIST', 'RIGHT_WRIST', 'LEFT_HIP',
'RIGHT_HIP', 'LEFT_KNEE', 'RIGHT_KNEE', 'LEFT_ANKLE',
'RIGHT_ANKLE'
],
'LEGACY_3DH36M17': [
'Head', 'LShoulder', 'RShoulder', 'LElbow', 'RElbow',
'LWrist', 'RWrist', 'LHip', 'RHip', 'LKnee', 'RKnee',
'LFoot', 'RFoot'
],
'LEGACY_3DMPII3DHP17': [
'head', 'left_shoulder', 'right_shoulder', 'left_elbow',
'right_elbow', 'left_wrist', 'right_wrist', 'left_hip',
'right_hip', 'left_knee', 'right_knee', 'left_ankle',
'right_ankle'
],
},
head_keypoint_name=['Head'],
neck_keypoint_name=['LShoulder', 'RShoulder'],
left_shoulder_keypoint_name=['LShoulder'],
right_shoulder_keypoint_name=['RShoulder'],
left_elbow_keypoint_name=['LElbow'],
right_elbow_keypoint_name=['RElbow'],
left_wrist_keypoint_name=['LWrist'],
right_wrist_keypoint_name=['RWrist'],
spine_keypoint_name=['LShoulder', 'RShoulder', 'LHip', 'RHip'],
pelvis_keypoint_name=['LHip', 'RHip'],
left_hip_keypoint_name=['LHip'],
right_hip_keypoint_name=['RHip'],
left_knee_keypoint_name=['LKnee'],
right_knee_keypoint_name=['RKnee'],
left_ankle_keypoint_name=['LFoot'],
right_ankle_keypoint_name=['RFoot'])
def create_keypoint_profile_or_die(keypoint_profile_name):
"""Creates keypoint profile based on name.
Args:
keypoint_profile_name: A string for keypoint profile name.
Returns:
A keypint profile class object.
Raises:
ValueError: If keypoint profile name is unsupported.
"""
if keypoint_profile_name == '3DSTD16':
return Std16KeypointProfile3D()
if keypoint_profile_name == '3DSTD13':
return Std13KeypointProfile3D()
if keypoint_profile_name == 'LEGACY_3DH36M17':
return LegacyH36m17KeypointProfile3D()
if keypoint_profile_name == 'LEGACY_3DH36M13':
return LegacyH36m13KeypointProfile3D()
if keypoint_profile_name == 'LEGACY_3DMPII3DHP17':
return LegacyMpii3dhp17KeypointProfile3D()
if keypoint_profile_name == '2DSTD13':
return Std13KeypointProfile2D()
if keypoint_profile_name == 'LEGACY_2DCOCO13':
return LegacyCoco13KeypointProfile2D()
if keypoint_profile_name == 'LEGACY_2DH36M13':
return LegacyH36m13KeypointProfile2D()
raise ValueError('Unsupported keypoint profile name: `%s`.' %
str(keypoint_profile_name))
|
apache-2.0
| 7,232,078,692,394,039,000
| 41.027586
| 80
| 0.556059
| false
| 3.58708
| false
| false
| false
|
stilobique/UE4-Tools
|
controllers/data_buffer.py
|
1
|
3448
|
import bpy
import pyperclip
from math import degrees
class DataBuffer(bpy.types.Operator):
"""Export data Position, Rotation and Scale of all selected element"""
bl_idname = "object.data_buffer"
bl_label = "Paste information buffer"
def execute(self, context):
objs = context.selected_objects
string_data_prefixe = 'Begin Map \n'' Begin Level \n'
string_data_suffixe = ' End Level\n''Begin Surface\n''End ' \
'Surface\n''End Map'
string_data = ""
for element in objs:
if element is not None:
position_x = str(round(element.location.x * 100, 2))
position_y = str(round(element.location.y * -100, 2))
position_z = str(round(element.location.z * 100, 2))
rotation_pitch = str(round(degrees(element.rotation_euler.y), 2))
rotation_yaw = str(round(degrees(element.rotation_euler.z),
2)* -1)
rotation_roll = str(round(degrees(element.rotation_euler.x), 2))
string_data = string_data + \
' Begin Actor '\
'Class=StaticMeshActor '\
'Name=' + element.name + ' ' \
'Archetype=StaticMeshActor'\
'\'/Script/Engine.Default__StaticMeshActor\'\n'\
' Begin Object Class=StaticMeshComponent '\
'Name=StaticMeshComponent0 '\
'ObjName=StaticMeshComponent0 ' \
'Archetype=StaticMeshComponent'\
'\'/Script/Engine.Default__StaticMeshActor:StaticMeshComponent0' \
'\'\n'\
' End Object\n'\
' Begin Object '\
'Name=StaticMeshComponent0\n'\
' StaticMesh=StaticMesh\'/Engine/EditorMeshes/EditorCube' \
'.EditorCube\' \n'\
' RelativeLocation=(X=' + position_x + ',Y=' + \
position_y + ',Z=' + position_z + ')\n'\
' RelativeScale3D=(X=' + str(round(element.scale.x, 2)) + ',' \
'Y=' + str(round(element.scale.y, 2)) + ',' \
'Z=' + \
str(round(element.scale.z, 2)) + ')\n'\
' RelativeRotation=(Pitch=' + rotation_pitch + ',Yaw=' + \
rotation_yaw + ',' \
'Roll=' + \
rotation_roll + ')\n'\
' CustomProperties\n' \
' End Object\n' \
' StaticMeshComponent=StaticMeshComponent0\n' \
' Components(0)=StaticMeshComponent0\n' \
' RootComponent=StaticMeshComponent0\n' \
' ActorLabel="' + element.name + '"\n' \
' End Actor\n' \
else:
self.report({'WARNING'}, "Select an object(s).")
return {'CANCELLED'}
string_complete = string_data_prefixe + string_data \
+ string_data_suffixe
# copyBuffer(objs[0].name)
pyperclip.copy(string_complete)
print(string_complete)
self.report({'INFO'}, "Data copied on your Buffer.")
return {'FINISHED'}
def register():
bpy.utils.register_class(DataBuffer)
def unregister():
bpy.utils.unregister_class(DataBuffer)
if __name__ == "__main__":
register()
|
gpl-3.0
| -6,769,097,919,469,354,000
| 37.322222
| 114
| 0.49739
| false
| 4.099881
| false
| false
| false
|
posquit0/dotfiles
|
vim/.vim/ycm_extra_conf.py
|
1
|
6657
|
# This file is NOT licensed under the GPLv3, which is the license for the rest
# of YouCompleteMe.
#
# Here's the license text for this file:
#
# This is free and unencumbered software released into the public domain.
#
# Anyone is free to copy, modify, publish, use, compile, sell, or
# distribute this software, either in source code form or as a compiled
# binary, for any purpose, commercial or non-commercial, and by any
# means.
#
# In jurisdictions that recognize copyright laws, the author or authors
# of this software dedicate any and all copyright interest in the
# software to the public domain. We make this dedication for the benefit
# of the public at large and to the detriment of our heirs and
# successors. We intend this dedication to be an overt act of
# relinquishment in perpetuity of all present and future rights to this
# software under copyright law.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
# IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY CLAIM, DAMAGES OR
# OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
# ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
# OTHER DEALINGS IN THE SOFTWARE.
#
# For more information, please refer to <http://unlicense.org/>
import os
import ycm_core
# These are the compilation flags that will be used in case there's no
# compilation database set (by default, one is not set).
# CHANGE THIS LIST OF FLAGS. YES, THIS IS THE DROID YOU HAVE BEEN LOOKING FOR.
flags = [
'-Wall',
'-Wextra',
'-Werror',
#'-Wc++98-compat',
'-Wno-long-long',
'-Wno-variadic-macros',
'-fexceptions',
'-DNDEBUG',
# You 100% do NOT need -DUSE_CLANG_COMPLETER in your flags; only the YCM
# source code needs it.
'-DUSE_CLANG_COMPLETER',
# THIS IS IMPORTANT! Without a "-std=<something>" flag, clang won't know which
# language to use when compiling headers. So it will guess. Badly. So C++
# headers will be compiled as C headers. You don't want that so ALWAYS specify
# a "-std=<something>".
# For a C project, you would set this to something like 'c99' instead of
# 'c++11'.
'-std=c++11',
# ...and the same thing goes for the magic -x option which specifies the
# language that the files to be compiled are written in. This is mostly
# relevant for c++ headers.
# For a C project, you would set this to 'c' instead of 'c++'.
'-x',
'c++',
'-isystem',
'../BoostParts',
'-isystem',
# This path will only work on OS X, but extra paths that don't exist are not
# harmful
'/System/Library/Frameworks/Python.framework/Headers',
'-isystem',
'../llvm/include',
'-isystem',
'../llvm/tools/clang/include',
'-I',
'.',
'-I',
'./ClangCompleter',
'-isystem',
'./tests/gmock/gtest',
'-isystem',
'./tests/gmock/gtest/include',
'-isystem',
'./tests/gmock',
'-isystem',
'./tests/gmock/include',
'-isystem',
'/usr/include',
'-isystem',
'/usr/local/include',
'-isystem',
'/Applications/Xcode.app/Contents/Developer/Toolchains/XcodeDefault.xctoolchain/usr/bin/../include/c++/v1',
'-isystem',
'/Applications/Xcode.app/Contents/Developer/Toolchains/XcodeDefault.xctoolchain/usr/include',
]
# Set this to the absolute path to the folder (NOT the file!) containing the
# compile_commands.json file to use that instead of 'flags'. See here for
# more details: http://clang.llvm.org/docs/JSONCompilationDatabase.html
#
# You can get CMake to generate this file for you by adding:
# set( CMAKE_EXPORT_COMPILE_COMMANDS 1 )
# to your CMakeLists.txt file.
#
# Most projects will NOT need to set this to anything; you can just change the
# 'flags' list of compilation flags. Notice that YCM itself uses that approach.
compilation_database_folder = ''
if os.path.exists( compilation_database_folder ):
database = ycm_core.CompilationDatabase( compilation_database_folder )
else:
database = None
SOURCE_EXTENSIONS = [ '.cpp', '.cxx', '.cc', '.c', '.m', '.mm' ]
def DirectoryOfThisScript():
return os.path.dirname( os.path.abspath( __file__ ) )
def MakeRelativePathsInFlagsAbsolute( flags, working_directory ):
if not working_directory:
return list( flags )
new_flags = []
make_next_absolute = False
path_flags = [ '-isystem', '-I', '-iquote', '--sysroot=' ]
for flag in flags:
new_flag = flag
if make_next_absolute:
make_next_absolute = False
if not flag.startswith( '/' ):
new_flag = os.path.join( working_directory, flag )
for path_flag in path_flags:
if flag == path_flag:
make_next_absolute = True
break
if flag.startswith( path_flag ):
path = flag[ len( path_flag ): ]
new_flag = path_flag + os.path.join( working_directory, path )
break
if new_flag:
new_flags.append( new_flag )
return new_flags
def IsHeaderFile( filename ):
extension = os.path.splitext( filename )[ 1 ]
return extension in [ '.h', '.hxx', '.hpp', '.hh' ]
def GetCompilationInfoForFile( filename ):
# The compilation_commands.json file generated by CMake does not have entries
# for header files. So we do our best by asking the db for flags for a
# corresponding source file, if any. If one exists, the flags for that file
# should be good enough.
if IsHeaderFile( filename ):
basename = os.path.splitext( filename )[ 0 ]
for extension in SOURCE_EXTENSIONS:
replacement_file = basename + extension
if os.path.exists( replacement_file ):
compilation_info = database.GetCompilationInfoForFile(
replacement_file )
if compilation_info.compiler_flags_:
return compilation_info
return None
return database.GetCompilationInfoForFile( filename )
def FlagsForFile( filename, **kwargs ):
if database:
# Bear in mind that compilation_info.compiler_flags_ does NOT return a
# python list, but a "list-like" StringVec object
compilation_info = GetCompilationInfoForFile( filename )
if not compilation_info:
return None
final_flags = MakeRelativePathsInFlagsAbsolute(
compilation_info.compiler_flags_,
compilation_info.compiler_working_dir_ )
# NOTE: This is just for YouCompleteMe; it's highly likely that your project
# does NOT need to remove the stdlib flag. DO NOT USE THIS IN YOUR
# ycm_extra_conf IF YOU'RE NOT 100% SURE YOU NEED IT.
try:
final_flags.remove( '-stdlib=libc++' )
except ValueError:
pass
else:
relative_to = DirectoryOfThisScript()
final_flags = MakeRelativePathsInFlagsAbsolute( flags, relative_to )
return {
'flags': final_flags,
'do_cache': True
}
|
mit
| -7,185,472,345,298,154,000
| 32.964286
| 107
| 0.705122
| false
| 3.55609
| true
| false
| false
|
AceSrc/datagon
|
datagon/generator/translator.py
|
1
|
3802
|
import parser
import random
result = ''
symbol = {}
cnt = 0
def Translator(ast):
def PrintError(x):
print(x)
exit(1)
def PrintMsg(x):
print(x)
def Output(x):
global result
result += str(x) + ' '
def GetRandomInt(interval):
if isinstance(interval, str):
PrintError('Error: ' + interval)
if isinstance(interval, int):
return interval
if interval[0] > interval[1]:
print('!!! Invaild Interval ')
exit(1)
rt = random.randint(interval[0], interval[1])
return rt
def AddPermutation(interval):
n = GetRandomInt(interval)
p = [i for i in range(1, n + 1)]
random.shuffle(p)
global result
for i in p:
result += str(i) + ' '
return None
def Add(a, b):
return GetRandomInt(a) + GetRandomInt(b)
def Mul(a, b):
return GetRandomInt(a) + GetRandomInt(b)
def Sub(a, b):
return GetRandomInt(a) + GetRandomInt(b)
def AddWeight(n, interval):
n = GetRandomInt(n)
for i in range(0, n):
Output(GetRandomInt(interval))
def RepeatOutput(node):
times = TranslateNode(node.params[0], node)
for i in range(0, times):
TranslateArray(node)
AddNewLine()
def HandleFunction(node):
print('handling function: ' + node.type)
print(node.params)
cases = {
'print': lambda x: Output(GetRandomInt(TranslateNode(node.params[0], x))),
'add': lambda x: Add(TranslateNode(x.params[0], x), TranslateNode(x.params[1], x)),
'sub': lambda x: Sub(TranslateNode(x.params[0], x), TranslateNode(x.params[1], x)),
'mul': lambda x: Mul(TranslateNode(x.params[0], x), TranslateNode(x.params[1], x)),
'permutation': lambda x: AddPermutation(TranslateNode(x.params[0], x)),
'weight': lambda x: AddWeight(TranslateNode(x.params[0], x), TranslateNode(x.params[1], x)),
'repeat': lambda x: RepeatOutput(x),
'set': lambda x: SetVariableValue(x.params[0].name, TranslateNode(x.params[1], x))
}
return cases.get(node.type, lambda x: None)(node)
def AddNewLine():
global cnt
cnt += 1
if cnt <= 0:
return
cnt -= 1
global result
result += '\n'
def CleanLine():
print("Clean")
global cnt
cnt -= 1
def HandleFormat(node):
print("Handling Format: " + node.value)
cases = {
'newline': lambda x: AddNewLine(),
'clearline': lambda x: CleanLine(),
}
return cases.get(node.value, lambda x: None)(node)
def GetVariableValue(name):
return symbol.get(name, name)
def SetVariableValue(name, value):
value = GetRandomInt(value)
symbol[name] = value
print('Set variable: ' + str(name) + ' = ' + str(symbol[name]))
return symbol[name]
def TranslateArray(node):
for x in node.params:
TranslateNode(x, node)
def TranslateNode(node, parent):
cases = {
parser.Function: lambda x: HandleFunction(x),
parser.Number: lambda x: x.value,
parser.Interval:
lambda x: [TranslateNode(x.left, x) + x.leftoffset, TranslateNode(x.right, x) + x.rightoffset],
parser.String: lambda x: GetVariableValue(x.name),
parser.Setvar: lambda x: SetVariableValue(x),
parser.Program: lambda x: TranslateArray(x),
parser.Format: lambda x: HandleFormat(x),
}
return cases.get(node.__class__, lambda x: None)(node)
TranslateArray(ast)
return result
|
mit
| 9,119,672,105,469,806,000
| 29.66129
| 111
| 0.553919
| false
| 3.779324
| false
| false
| false
|
sesh/djver
|
djver/djver.py
|
1
|
6137
|
#!/usr/bin/env python
"""
djver.
Usage:
djver.py [<url>] [--static-path=<static-path>] [--find-diffs] [--verbose]
Options:
--static-path=<static-path> URL path to the site's static files [default: /static/].
--find-diffs Attempt to find differences between the known versions of Django
--verbose Turn on verbose logging
"""
import os
import sys
import subprocess
import shutil
import difflib
import requests
from docopt import docopt
try:
from urlparse import urljoin
except ImportError:
from urllib.parse import urljoin
try:
from packaging.version import parse
except:
def parse(version):
return None
RESPONSE_CACHE = {}
THIRD_PARTY_CSS = [
# Third party apps, might disguise version numbers
('django-flat-theme, or Django 1.9', 'fonts.css', 'Roboto'),
('django-suit', 'forms.css', 'Django Suit'),
]
ADMIN_CHANGES = [
('2.1.2-2.1', 'css/base.css', 'background: url(../img/icon-viewlink.svg) 0 1px no-repeat;'),
('2.0.9-2.0', 'css/base.css', 'textarea:focus, select:focus, .vTextField:focus {'),
('1.11.16-1.11', 'css/base.css', 'background-position: right 7px center;'),
('1.10.8-1.10', 'css/base.css', 'color: #000;'),
('1.9.13-1.9', 'css/widgets.css', 'margin-left: 7px;'),
('1.8.19-1.8.2', 'css/forms.css', 'clear: left;'),
('1.8.1', 'css/widgets.css', '.related-widget-wrapper {'),
('1.8', 'css/widgets.css', 'opacity: 1;'),
('1.7.11-1.7', 'css/base.css', '#branding a:hover {'),
('1.6.11-1.6', 'css/widgets.css', 'width: 360px;'),
('1.5.12-1.5', 'css/widgets.css', '.url a {'),
('1.4.22-1.4.1', 'css/widgets.css', '.inline-group .aligned .selector .selector-filter label {'),
]
def check_str(url, search_str, verbose=False):
if url in RESPONSE_CACHE.keys():
content = RESPONSE_CACHE[url]
status_code = 200
else:
response = requests.get(url)
content = response.content.decode().replace(' ', '')
status_code = response.status_code
if verbose:
print('[{}] {}'.format(status_code, url))
if status_code == 200:
RESPONSE_CACHE[url] = content
return search_str.replace(' ', '') in content
def check_version(base_url, static_path, verbose=False):
if not base_url.startswith('http'):
base_url = 'http://{}'.format(base_url)
for version, path, string in ADMIN_CHANGES:
url = '{}{}admin/{}'.format(base_url, static_path, path)
if check_str(url, string, verbose):
return version
def find_diffs():
response = requests.get('https://pypi.org/pypi/Django/json')
versions = [parse(v) for v in response.json()['releases'].keys()]
versions = sorted(versions, reverse=True)
print(versions)
versions = [str(v) for v in versions if v.is_prerelease == False and v > parse("1.3.99")]
# we only care about 1.4 and above
# favour files _not_ found in django-flat-theme
files = [
# admin js
# "js/SelectBox.js",
# "js/actions.js",
# "js/actions.min.js",
# "js/calendar.js",
# "js/collapse.js",
# "js/collapse.min.js",
# "js/core.js",
# "js/inlines.js",
# "js/inlines.min.js",
# "js/jquery.init.js",
# "js/prepopulate.js",
# "js/prepopulate.min.js",
# "js/timeparse.js",
# "js/urlify.js",
# admin css
'css/widgets.css', 'css/base.css', 'css/forms.css', 'css/login.css', 'css/dashboard.css',
# 'css/ie.css', # removed in 1.9.x
]
for v in versions:
os.makedirs('files/{}/css/'.format(v), exist_ok=True)
os.makedirs('files/{}/js/'.format(v), exist_ok=True)
for fn in files:
full_path = 'files/{}/{}'.format(v, fn)
if not os.path.exists(full_path):
repo = 'https://raw.githubusercontent.com/django/django/'
url = '{}{}/django/contrib/admin/static/admin/{}'.format(repo, v, fn)
if v.startswith('1.3'):
url = '{}{}/django/contrib/admin/media/{}'.format(repo, v, fn)
response = requests.get(url)
print('[{}] {}'.format(response.status_code, url))
with open(full_path, 'wb') as f:
f.write(response.content)
matched_versions = []
for i, v1 in enumerate(versions[:-1]):
matched_versions.append(v1)
v2 = versions[i + 1]
new_line = None
for f in files:
f1 = open('files/{}/{}'.format(v1, f)).read()
f2 = open('files/{}/{}'.format(v2, f)).read()
# compare f2 to f1 so that we see _added_ lines
diff = difflib.ndiff(f2.splitlines(), f1.splitlines())
for line in diff:
if line.startswith('+ ') and '/*' not in line:
line = line[2:]
# ensure this line is unique within the file
if f1.count(line) == 1:
# we also want to make sure that it doesn't appear in any _older_ versions
for v in versions[i + 1:]:
f3 = open('files/{}/{}'.format(v, f)).read()
if line in f3:
break
new_line = line
if new_line:
if len(matched_versions) > 1:
print("('{}', '{}', '{}'),".format('-'.join([matched_versions[0], matched_versions[-1]]), f, new_line.strip()))
else:
print("('{}', '{}', '{}'),".format(matched_versions[0], f, new_line.strip()))
matched_versions = []
break
def djver():
arguments = docopt(__doc__, version='djver 2.0.0')
if arguments['--find-diffs']:
find_diffs()
elif arguments['<url>']:
version = check_version(arguments['<url>'], arguments['--static-path'], arguments['--verbose'])
if version:
print(version)
else:
print('Unable to detect version.')
if __name__ == '__main__':
djver()
|
mit
| 6,160,630,217,541,258,000
| 31.994624
| 131
| 0.536093
| false
| 3.455518
| false
| false
| false
|
pythonbyexample/PBE
|
dbe/classviews/edit_custom.py
|
1
|
5732
|
from django.forms import formsets
from django.contrib import messages
from django.db.models import Q
from detail import *
from edit import *
from dbe.shared.utils import *
class SearchFormViewMixin(BaseFormView):
ignore_get_keys = ["page"]
def get_form_kwargs(self):
""" Returns the keyword arguments for instanciating the form. """
r = self.request
kwargs = dict(initial=self.get_initial())
if r.method in ("POST", "PUT"):
kwargs.update(dict(data=r.POST, files=r.FILES))
elif r.GET:
# do get form processing if there's get data that's not in ignore list
if [k for k in r.GET.keys() if k not in self.ignore_get_keys]:
kwargs.update(dict(data=r.GET))
return kwargs
def get(self, request):
form = self.get_form()
if self.request.GET:
if form.is_valid():
self.process_form(form)
else:
return self.form_invalid(form)
return self.render_to_response(self.get_context_data(form=form))
class SearchFormView(FormView, SearchFormViewMixin):
"""FormView for search pages."""
class UpdateView2(UpdateView):
def get_success_url(self):
return self.object.get_absolute_url()
def get_context_data(self, **kwargs):
c = super(UpdateView2, self).get_context_data(**kwargs)
c.update(self.add_context())
return c
def add_context(self):
return {}
class UserUpdateView(UpdateView2):
def get_form_kwargs(self):
d = super(UpdateView2, self).get_form_kwargs()
d.update(dict(user=self.request.user))
return d
class CreateView2(CreateView):
def get_context_data(self, **kwargs):
c = super(CreateView2, self).get_context_data(**kwargs)
if hasattr(self, "add_context"):
c.update(self.add_context())
return c
def get_form_kwargs(self):
d = super(CreateView2, self).get_form_kwargs()
d.update(dict(user=self.request.user))
return d
class OwnObjMixin(SingleObjectMixin):
"""Access object, checking that it belongs to current user."""
item_name = None # used in permissions error message
owner_field = "creator" # object's field to compare to current user to check permission
def perm_error(self):
return HttpResponse("You don't have permissions to access this %s." % self.item_name)
def validate(self, obj):
if getattr(obj, self.owner_field) == self.request.user:
return True
def get_object(self, queryset=None):
obj = super(OwnObjMixin, self).get_object(queryset)
if not self.validate(obj): return None
return obj
class DeleteOwnObjView(OwnObjMixin, DeleteView):
"""Delete object, checking that it belongs to current user."""
class UpdateOwnObjView(OwnObjMixin, UpdateView2):
"""Update object, checking that it belongs to current user."""
class SearchEditFormset(SearchFormView):
"""Search form filtering a formset of items to be updated."""
model = None
formset_class = None
form_class = None
def get_form_class(self):
if self.request.method == "GET": return self.form_class
else: return self.formset_class
def get_queryset(self, form=None):
return self.model.objects.filter(self.get_query(form))
def get_query(self, form):
"""This method should always be overridden, applying search from the `form`."""
return Q()
def form_valid(self, form):
formset = None
if self.request.method == "GET":
formset = self.formset_class(queryset=self.get_queryset(form))
else:
form.save()
messages.success(self.request, "%s(s) were updated successfully" % self.model.__name__.capitalize())
formset = form
form = self.form_class(self.request.GET)
return self.render_to_response(self.get_context_data(form=form, formset=formset))
def form_invalid(self, form):
formset = form
form = self.form_class(self.request.GET)
return self.render_to_response(self.get_context_data(form=form, formset=formset))
def get(self, request, *args, **kwargs):
form = self.get_form()
if form.is_bound:
if form.is_valid(): return self.form_valid(form)
else: return self.form_invalid(form)
return self.render_to_response(self.get_context_data(form=form))
class CreateWithFormset(FormView):
""" Create multiple objects using a formset.
Passes user as an arg to each form init function.
"""
model = None
form_class = None
extra = 5
def get_form(self, form_class=None):
Formset = formsets.formset_factory(self.form_class, extra=self.extra)
Formset.form = staticmethod(curry(self.form_class, user=self.request.user))
return Formset(**self.get_form_kwargs())
def post(self, request, *args, **kwargs):
self.object = None
formset = self.get_form()
if formset.is_valid():
return self.form_valid(formset)
else:
return self.form_invalid(formset)
def form_valid(self, formset):
for form in formset:
if form.has_changed():
form.save()
return HttpResponseRedirect(reverse(self.success_url_name))
def form_invalid(self, form):
return self.render_to_response(self.get_context_data(form=form))
def get_context_data(self, **kwargs):
context = super(CreateWithFormset, self).get_context_data(**kwargs)
return updated( context, dict(formset=self.get_form()) )
|
bsd-3-clause
| -6,290,611,191,496,170,000
| 32.325581
| 112
| 0.629449
| false
| 3.834114
| false
| false
| false
|
CINPLA/expipe-dev
|
expipe-templates-cinpla/get_templates.py
|
1
|
1153
|
import expipe
import os.path as op
import os
import json
overwrite = True
base_dir = op.join(op.abspath(op.dirname(op.expanduser(__file__))), 'templates')
templates = expipe.core.FirebaseBackend("/templates").get()
for template, val in templates.items():
identifier = val.get('identifier')
if identifier is None:
continue
path = template.split('_')[0]
name = identifier.split('_')[1:]
if path == 'person':
continue
if len(name) == 0:
continue
raise ValueError('No grouping on template "' + template + '"')
fbase = '_'.join(name)
fname = op.join(base_dir, path, fbase + '.json')
result = expipe.get_template(template=template)
if op.exists(fname) and not overwrite:
raise FileExistsError('The filename "' + fname +
'" exists, set ovewrite to true.')
os.makedirs(op.dirname(fname), exist_ok=True)
print('Saving template "' + template + '" to "' + fname + '"')
with open(fname, 'w') as outfile:
result = expipe.core.convert_to_firebase(result)
json.dump(result, outfile,
sort_keys=True, indent=4)
|
gpl-3.0
| -6,103,008,432,294,853,000
| 33.939394
| 80
| 0.611448
| false
| 3.743506
| false
| false
| false
|
prontodev/stillwithus
|
stillwithus/clientsites/tests.py
|
1
|
6014
|
from django.contrib.auth.models import User
from django.core.urlresolvers import reverse
from django.test import TestCase
from .models import ClientSite
from servers.models import Server
class ClientSiteTest(TestCase):
def test_create_new_clientsite(self):
clientsite = ClientSite()
clientsite.domain = 'www.atlasperformancechicago.com'
self.assertFalse(clientsite.id)
clientsite.save()
self.assertTrue(clientsite.id)
clientsite = ClientSite.objects.get(id=clientsite.id)
self.assertEqual(clientsite.domain, 'www.atlasperformancechicago.com')
class ClientSiteViewTest(TestCase):
def setUp(self):
self.url = reverse('clientsites')
def test_clientsite_should_be_accessible(self):
response = self.client.get(self.url)
self.assertEqual(response.status_code, 200)
def test_clientsite_should_use_correct_template(self):
response = self.client.get(self.url)
self.assertTemplateUsed(response, 'clientsites.html')
def test_clientsite_should_have_title(self):
response = self.client.get(self.url)
expected = '<title>Still with Us?</title>'
self.assertContains(response, expected, status_code=200)
def test_clientsite_should_render_html_for_clientsites_correctly(self):
response = self.client.get(self.url)
expected = '<h1>Client Sites</h1>'
self.assertContains(response, expected, status_code=200)
expected = '<thead><tr><th>Domain</th><th>Still with Us?'
expected += '</th><th>Note</th></tr></thead>'
self.assertContains(response, expected, status_code=200)
def test_clientsite_should_query_domains_and_check_if_still_with_us(self):
Server.objects.bulk_create([
Server(name='Pronto 1', ip='54.72.3.133'),
Server(name='Pronto 2', ip='54.72.3.103'),
Server(name='Pronto 3', ip='54.252.146.70'),
Server(name='Pronto 4', ip='54.67.50.151'),
Server(name='Pronto 5', ip='52.1.32.33'),
Server(name='Pronto 6', ip='27.254.65.18'),
Server(name='Pronto 7', ip='54.246.93.4'),
Server(name='Pronto 8', ip='54.228.219.35'),
Server(name='Pronto 9', ip='54.72.3.253'),
Server(name='Pronto 10', ip='54.171.171.172'),
Server(name='Pronto 11', ip='46.137.96.191'),
Server(name='Pronto 12', ip='54.194.28.91'),
Server(name='Pronto 13', ip='54.72.53.55'),
])
ClientSite.objects.bulk_create([
ClientSite(domain='www.prontomarketing.com'),
ClientSite(domain='www.atlasperformancechicago.com'),
])
response = self.client.get(self.url)
expected = '<tr><td><a href="http://www.prontomarketing.com" '
expected += 'target="_blank">www.prontomarketing.com</a></td>'
expected += '<td style="color: red;">No</td><td>---</td></tr>'
self.assertContains(response, expected, count=1, status_code=200)
expected = '<td><a href="http://www.prontomarketing.com" '
expected += 'target="_blank">www.prontomarketing.com</a></td>'
self.assertContains(response, expected, count=1, status_code=200)
expected = '<tr><td><a href="http://www.atlasperformancechicago.com" '
expected += 'target="_blank">www.atlasperformancechicago.com</a></td>'
expected += '<td>Yes</td><td>---</td></tr>'
self.assertContains(response, expected, count=1, status_code=200)
expected = '<td><a href="http://www.atlasperformancechicago.com" '
expected += 'target="_blank">www.atlasperformancechicago.com</a></td>'
self.assertContains(response, expected, count=1, status_code=200)
def test_clientsite_should_add_note_if_cannot_get_ip(self):
ClientSite.objects.create(
domain='dayton.kaiafit.com'
)
response = self.client.get(self.url)
expected = '<tr><td><a href="http://dayton.kaiafit.com" '
expected += 'target="_blank">dayton.kaiafit.com</a></td>'
expected += '<td>---</td><td>Cannot get IP</td></tr>'
self.assertContains(response, expected, count=1, status_code=200)
def test_clientsite_should_render_html_for_servers_correctly(self):
response = self.client.get(self.url)
expected = '<h1>Servers</h1>'
self.assertContains(response, expected, status_code=200)
expected = '<thead><tr><th>Name</th><th>IP</th></tr></thead>'
self.assertContains(response, expected, status_code=200)
def test_clientsite_should_query_server_name_and_ip_correctly(self):
Server.objects.create(
name='AWS ELB',
ip='54.72.3.133'
)
Server.objects.create(
name='Bypronto',
ip='54.171.171.172'
)
response = self.client.get(self.url)
expected = '<tr><td>AWS ELB</td><td>54.72.3.133</td></tr>'
self.assertContains(response, expected, status_code=200)
expected = '<tr><td>Bypronto</td><td>54.171.171.172</td></tr>'
self.assertContains(response, expected, status_code=200)
class ClientSiteAdminTest(TestCase):
def setUp(self):
admin = User.objects.create_superuser(
'admin',
'admin@test.com',
'password'
)
self.client.login(
username='admin',
password='password'
)
self.url = '/admin/clientsites/clientsite/'
def test_clientsite_admin_page_should_be_accessible(self):
response = self.client.get(self.url)
self.assertEqual(response.status_code, 200)
def test_clientsite_admin_page_should_name_and_domain_columns(self):
ClientSite.objects.create(
domain='www.prontomarketing.com'
)
response = self.client.get(self.url)
expected = '<div class="text"><a href="?o=1">Domain</a></div>'
self.assertContains(response, expected, status_code=200)
|
mit
| -631,185,773,679,272,400
| 37.305732
| 78
| 0.618557
| false
| 3.490424
| true
| false
| false
|
MMohan1/dwitter
|
dwitter_app/models.py
|
1
|
1342
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models
from django.contrib.auth.models import User
import hashlib
class Dweet(models.Model):
content = models.CharField(max_length=140)
user = models.ForeignKey(User)
creation_date = models.DateTimeField(auto_now=True, blank=True)
class UserProfile(models.Model):
user = models.ForeignKey(User)
# follows = models.ManyToManyField('self', related_name='followed_by', symmetrical=False)
def gravatar_url(self):
return "http://www.gravatar.com/avatar/%s?s=50" % hashlib.md5(self.user.email).hexdigest()
User.profile = property(lambda u: UserProfile.objects.get_or_create(user=u)[0])
class Follow(models.Model):
follower = models.ForeignKey(User, related_name='follower')
following = models.ForeignKey(User, related_name='following')
follow_date = models.DateTimeField(auto_now=True)
class Likes(models.Model):
dwitte = models.ForeignKey(Dweet)
likes = models.ForeignKey(User)
creation_date = models.DateTimeField(auto_now=True)
class Meta:
unique_together = ("dwitte", "likes")
class Comments(models.Model):
dwitte = models.ForeignKey(Dweet)
comment_by = models.ForeignKey(User)
comment = models.TextField()
creation_date = models.DateTimeField(auto_now=True)
|
mit
| 5,100,131,809,558,829,000
| 28.822222
| 98
| 0.716095
| false
| 3.569149
| false
| false
| false
|
opensemanticsearch/open-semantic-etl
|
src/opensemanticetl/enhance_extract_law.py
|
1
|
5028
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
import re
import etl_plugin_core
#
# get taxonomy for aggregated facets / filters
#
# example: '§ 153 Abs. 1 Satz 2' -> ['§ 153', '§ 153 Absatz 1', '§ 153 Absatz 1 Satz 2']
# todo:
def get_taxonomy(law_clause, law_code = None):
law_clauses = [law_clause]
return law_clauses
#1.a
#1(2)
#1 (2)
#
# extract law codes
#
class enhance_extract_law(etl_plugin_core.Plugin):
def process(self, parameters=None, data=None):
if parameters is None:
parameters = {}
if data is None:
data = {}
clause_prefixes = [
'§',
'Article',
'Artikel',
'Art',
'Section',
'Sec',
]
clause_subsections = [
'Abschnitt',
'Absatz',
'Abs',
'Sentence',
'Satz',
'S',
'Halbsatz',
'Number',
'Nummer',
'Nr',
'Buchstabe',
]
text = etl_plugin_core.get_text(data)
clauses = []
rule = '(' + '|'.join(clause_prefixes) + ')\W*((\d+\W\w(\W|\b))|(\d+\w?))(\W?(' + '|'.join(clause_subsections) + ')\W*(\d+\w?|\w(\W|\b)))*'
for match in re.finditer(rule, text, re.IGNORECASE):
clause = match.group(0)
clause = clause.strip()
clauses.append(clause)
# if "§123" normalize to "§ 123"
if clause[0] == '§' and not clause[1] == ' ':
clause = '§ ' + clause[1:]
etl_plugin_core.append(data, 'law_clause_ss', clause)
code_matchtexts = etl_plugin_core.get_all_matchtexts(data.get('law_code_ss_matchtext_ss', []))
code_matchtexts_with_clause = []
preflabels = {}
if 'law_code_ss_preflabel_and_uri_ss' in data:
preflabels = etl_plugin_core.get_preflabels(data['law_code_ss_preflabel_and_uri_ss'])
if len(clauses)>0 and len(code_matchtexts)>0:
text = text.replace("\n", " ")
for code_match_id in code_matchtexts:
#get only matchtext (without ID/URI of matching entity)
for code_matchtext in code_matchtexts[code_match_id]:
for clause in clauses:
if clause + " " + code_matchtext in text or code_matchtext + " " + clause in text:
code_matchtexts_with_clause.append(code_matchtext)
# if "§123" normalize to "§ 123"
if clause[0] == '§' and not clause[1] == ' ':
clause = '§ ' + clause[1:]
law_code_preflabel = code_match_id
if code_match_id in preflabels:
law_code_clause_normalized = clause + " " + preflabels[code_match_id]
else:
law_code_clause_normalized = clause + " " + code_match_id
etl_plugin_core.append(data, 'law_code_clause_ss', law_code_clause_normalized)
if len(code_matchtexts)>0:
blacklist = []
listfile = open('/etc/opensemanticsearch/blacklist/enhance_extract_law/blacklist-lawcode-if-no-clause')
for line in listfile:
line = line.strip()
if line and not line.startswith("#"):
blacklist.append(line)
listfile.close()
if not isinstance(data['law_code_ss_matchtext_ss'], list):
data['law_code_ss_matchtext_ss'] = [data['law_code_ss_matchtext_ss']]
blacklisted_code_ids = []
for code_match_id in code_matchtexts:
for code_matchtext in code_matchtexts[code_match_id]:
if code_matchtext in blacklist:
if code_matchtext not in code_matchtexts_with_clause:
blacklisted_code_ids.append(code_match_id)
data['law_code_ss_matchtext_ss'].remove(code_match_id + "\t" + code_matchtext)
code_matchtexts = etl_plugin_core.get_all_matchtexts(data.get('law_code_ss_matchtext_ss', []))
if not isinstance(data['law_code_ss'], list):
data['law_code_ss'] = [data['law_code_ss']]
if not isinstance(data['law_code_ss_preflabel_and_uri_ss'], list):
data['law_code_ss_preflabel_and_uri_ss'] = [data['law_code_ss_preflabel_and_uri_ss']]
for blacklisted_code_id in blacklisted_code_ids:
if blacklisted_code_id not in code_matchtexts:
data['law_code_ss'].remove(preflabels[blacklisted_code_id])
data['law_code_ss_preflabel_and_uri_ss'].remove(preflabels[blacklisted_code_id] + ' <' + blacklisted_code_id + '>')
return parameters, data
|
gpl-3.0
| -8,537,179,740,462,621,000
| 32.657718
| 147
| 0.498504
| false
| 3.701107
| false
| false
| false
|
LeonidasAntoniou/dk-plus
|
test files/beacon.py
|
1
|
1799
|
"""
A simple program that sends/listens broadcast packets through UDP socket
Used to test the system if it is able to send/receive packets
"""
import time, math, sys, socket, threading, select, uuid
from collections import namedtuple
import cPickle as pickle
from params import Params
MAX_STAY = 5 #seconds until entry is removed from structure
Geo = namedtuple("Geo", "lat lon")
simple_msg = namedtuple("simple_msg", "ID text")
self_params = Params(dummy=True)
# Set the socket parameters
address = ('192.168.1.255', 54545) # host, port
sock_broad = socket.socket(socket.AF_INET, socket.SOCK_DGRAM, socket.IPPROTO_UDP)
sock_broad.setsockopt(socket.SOL_SOCKET, socket.SO_BROADCAST, 1)
# Create socket and bind to address
sock_listen = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
sock_listen.setsockopt(socket.SOL_SOCKET, socket.SO_BROADCAST, 1)
sock_listen.bind(address)
def broad():
while True:
#msg = simple_msg(self_id,"I am here")
msg = self_params
assert sock_broad.sendto(pickle.dumps(msg), address), "Message failed to send"
time.sleep(1)
def listen():
print "Waiting for message"
while True:
try:
ready = select.select([sock_listen], [], [], 1.0)
if ready[0]:
d = sock_listen.recvfrom(4096)
raw_msg = d[0]
try:
msg = pickle.loads(raw_msg)
if msg.ID == self_params.ID:
pass
else:
print "From addr: '%s', msg: '%s'" % (d[1], msg)
except Exception, e:
print "Error in receiving: ", e
except socket.timeout:
print "Reached timeout. Closing..."
t_listen.cancel()
sock_listen.close()
t_listen = threading.Thread(target=listen)
t_broad = threading.Thread(target=broad)
t_listen.daemon = True
t_broad.daemon = True
t_listen.start()
t_broad.start()
time.sleep(100) #test for 100s
print "Closing beacon"
|
gpl-3.0
| -9,053,915,557,494,988,000
| 25.850746
| 81
| 0.699277
| false
| 3.101724
| false
| false
| false
|
afb/0install
|
zeroinstall/injector/handler.py
|
1
|
9160
|
"""
Integrates download callbacks with an external mainloop.
While things are being downloaded, Zero Install returns control to your program.
Your mainloop is responsible for monitoring the state of the downloads and notifying
Zero Install when they are complete.
To do this, you supply a L{Handler} to the L{policy}.
"""
# Copyright (C) 2009, Thomas Leonard
# See the README file for details, or visit http://0install.net.
from __future__ import print_function
from zeroinstall import _, logger
import sys
if sys.version_info[0] < 3:
import __builtin__ as builtins
else:
import builtins
from zeroinstall import SafeException
from zeroinstall import support
from zeroinstall.support import tasks
from zeroinstall.injector import download
class NoTrustedKeys(SafeException):
"""Thrown by L{Handler.confirm_import_feed} on failure."""
pass
class Handler(object):
"""
A Handler is used to interact with the user (e.g. to confirm keys, display download progress, etc).
@ivar monitored_downloads: set of downloads in progress
@type monitored_downloads: {L{download.Download}}
@ivar n_completed_downloads: number of downloads which have finished for GUIs, etc (can be reset as desired).
@type n_completed_downloads: int
@ivar total_bytes_downloaded: informational counter for GUIs, etc (can be reset as desired). Updated when download finishes.
@type total_bytes_downloaded: int
@ivar dry_run: don't write or execute any files, just print notes about what we would have done to stdout
@type dry_run: bool
"""
__slots__ = ['monitored_downloads', 'dry_run', 'total_bytes_downloaded', 'n_completed_downloads']
def __init__(self, mainloop = None, dry_run = False):
"""@type dry_run: bool"""
self.monitored_downloads = set()
self.dry_run = dry_run
self.n_completed_downloads = 0
self.total_bytes_downloaded = 0
def monitor_download(self, dl):
"""Called when a new L{download} is started.
This is mainly used by the GUI to display the progress bar.
@type dl: L{zeroinstall.injector.download.Download}"""
self.monitored_downloads.add(dl)
self.downloads_changed()
@tasks.async
def download_done_stats():
yield dl.downloaded
# NB: we don't check for exceptions here; someone else should be doing that
try:
self.n_completed_downloads += 1
self.total_bytes_downloaded += dl.get_bytes_downloaded_so_far()
self.monitored_downloads.remove(dl)
self.downloads_changed()
except Exception as ex:
self.report_error(ex)
download_done_stats()
def impl_added_to_store(self, impl):
"""Called by the L{fetch.Fetcher} when adding an implementation.
The GUI uses this to update its display.
@param impl: the implementation which has been added
@type impl: L{model.Implementation}"""
pass
def downloads_changed(self):
"""This is just for the GUI to override to update its display."""
pass
@tasks.async
def confirm_import_feed(self, pending, valid_sigs, retval):
"""Sub-classes should override this method to interact with the user about new feeds.
If multiple feeds need confirmation, L{trust.TrustMgr.confirm_keys} will only invoke one instance of this
method at a time.
@param pending: the new feed to be imported
@type pending: L{PendingFeed}
@param valid_sigs: maps signatures to a list of fetchers collecting information about the key
@type valid_sigs: {L{gpg.ValidSig} : L{fetch.KeyInfoFetcher}}
@since: 0.42"""
from zeroinstall.injector import trust
assert valid_sigs
domain = trust.domain_from_url(pending.url)
# Ask on stderr, because we may be writing XML to stdout
print(_("Feed: %s") % pending.url, file=sys.stderr)
print(_("The feed is correctly signed with the following keys:"), file=sys.stderr)
for x in valid_sigs:
print("-", x, file=sys.stderr)
def text(parent):
text = ""
for node in parent.childNodes:
if node.nodeType == node.TEXT_NODE:
text = text + node.data
return text
shown = set()
key_info_fetchers = valid_sigs.values()
while key_info_fetchers:
old_kfs = key_info_fetchers
key_info_fetchers = []
for kf in old_kfs:
infos = set(kf.info) - shown
if infos:
if len(valid_sigs) > 1:
print("%s: " % kf.fingerprint)
for key_info in infos:
print("-", text(key_info), file=sys.stderr)
shown.add(key_info)
if kf.blocker:
key_info_fetchers.append(kf)
if key_info_fetchers:
for kf in key_info_fetchers: print(kf.status, file=sys.stderr)
stdin = tasks.InputBlocker(0, 'console')
blockers = [kf.blocker for kf in key_info_fetchers] + [stdin]
yield blockers
for b in blockers:
try:
tasks.check(b)
except Exception as ex:
logger.warning(_("Failed to get key info: %s"), ex)
if stdin.happened:
print(_("Skipping remaining key lookups due to input from user"), file=sys.stderr)
break
if not shown:
print(_("Warning: Nothing known about this key!"), file=sys.stderr)
if len(valid_sigs) == 1:
print(_("Do you want to trust this key to sign feeds from '%s'?") % domain, file=sys.stderr)
else:
print(_("Do you want to trust all of these keys to sign feeds from '%s'?") % domain, file=sys.stderr)
while True:
print(_("Trust [Y/N] "), end=' ', file=sys.stderr)
sys.stderr.flush()
i = support.raw_input()
if not i: continue
if i in 'Nn':
raise NoTrustedKeys(_('Not signed with a trusted key'))
if i in 'Yy':
break
trust.trust_db._dry_run = self.dry_run
retval.extend([key.fingerprint for key in valid_sigs])
@tasks.async
def confirm_install(self, msg):
"""We need to check something with the user before continuing with the install.
@raise download.DownloadAborted: if the user cancels"""
yield
print(msg, file=sys.stderr)
while True:
sys.stderr.write(_("Install [Y/N] "))
sys.stderr.flush()
i = support.raw_input()
if not i: continue
if i in 'Nn':
raise download.DownloadAborted()
if i in 'Yy':
break
def report_error(self, exception, tb = None):
"""Report an exception to the user.
@param exception: the exception to report
@type exception: L{SafeException}
@param tb: optional traceback
@since: 0.25"""
import logging
logger.warning("%s", str(exception) or type(exception),
exc_info = (exception, exception, tb) if logger.isEnabledFor(logging.INFO) else None)
class ConsoleHandler(Handler):
"""A Handler that displays progress on stderr (a tty).
(we use stderr because we use stdout to talk to the OCaml process)
@since: 0.44"""
last_msg_len = None
update = None
disable_progress = 0
screen_width = None
# While we are displaying progress, we override builtins.print to clear the display first.
original_print = None
def downloads_changed(self):
if self.monitored_downloads and self.update is None:
if self.screen_width is None:
try:
import curses
curses.setupterm()
self.screen_width = curses.tigetnum('cols') or 80
except Exception as ex:
logger.info("Failed to initialise curses library: %s", ex)
self.screen_width = 80
self.show_progress()
self.original_print = print
builtins.print = self.print
self.update = tasks.get_loop().call_repeatedly(0.2, self.show_progress)
elif len(self.monitored_downloads) == 0:
if self.update:
self.update.cancel()
self.update = None
builtins.print = self.original_print
self.original_print = None
self.clear_display()
def show_progress(self):
if not self.monitored_downloads: return
urls = [(dl.url, dl) for dl in self.monitored_downloads]
if self.disable_progress: return
screen_width = self.screen_width - 2
item_width = max(16, screen_width // len(self.monitored_downloads))
url_width = item_width - 7
msg = ""
for url, dl in sorted(urls):
so_far = dl.get_bytes_downloaded_so_far()
if url.endswith('/latest.xml'):
url = url[:-10] # remove latest.xml from mirror URLs
leaf = url.rsplit('/', 1)[-1]
if len(leaf) >= url_width:
display = leaf[:url_width]
else:
display = url[-url_width:]
if dl.expected_size:
msg += "[%s %d%%] " % (display, int(so_far * 100 / dl.expected_size))
else:
msg += "[%s] " % (display)
msg = msg[:screen_width]
if self.last_msg_len is None:
sys.stderr.write(msg)
else:
sys.stderr.write(chr(13) + msg)
if len(msg) < self.last_msg_len:
sys.stderr.write(" " * (self.last_msg_len - len(msg)))
self.last_msg_len = len(msg)
sys.stderr.flush()
return
def clear_display(self):
if self.last_msg_len != None:
sys.stderr.write(chr(13) + " " * self.last_msg_len + chr(13))
sys.stderr.flush()
self.last_msg_len = None
def report_error(self, exception, tb = None):
self.clear_display()
Handler.report_error(self, exception, tb)
def confirm_import_feed(self, pending, valid_sigs, retval):
self.clear_display()
self.disable_progress += 1
blocker = Handler.confirm_import_feed(self, pending, valid_sigs, retval)
@tasks.async
def enable():
yield blocker
self.disable_progress -= 1
self.show_progress()
enable()
return blocker
def print(self, *args, **kwargs):
self.clear_display()
self.original_print(*args, **kwargs)
|
lgpl-2.1
| 4,241,712,712,980,864,500
| 31.253521
| 125
| 0.69083
| false
| 3.199441
| false
| false
| false
|
OpenDroneMap/WebODM
|
app/api/common.py
|
1
|
1763
|
from django.core.exceptions import ObjectDoesNotExist, SuspiciousFileOperation
from rest_framework import exceptions
import os
from app import models
def get_and_check_project(request, project_pk, perms=('view_project',)):
"""
Django comes with a standard `model level` permission system. You can
check whether users are logged-in and have privileges to act on things
model wise (can a user add a project? can a user view projects?).
Django-guardian adds a `row level` permission system. Now not only can you
decide whether a user can add a project or view projects, you can specify exactly
which projects a user has or has not access to.
This brings up the reason the following function: tasks are part of a project,
and it would add a tremendous headache (and redundancy) to specify row level permissions
for each task. Instead, we check the row level permissions of the project
to which a task belongs to.
Perhaps this could be added as a django-rest filter?
Retrieves a project and raises an exception if the current user
has no access to it.
"""
try:
project = models.Project.objects.get(pk=project_pk, deleting=False)
for perm in perms:
if not request.user.has_perm(perm, project): raise ObjectDoesNotExist()
except ObjectDoesNotExist:
raise exceptions.NotFound()
return project
def path_traversal_check(unsafe_path, known_safe_path):
known_safe_path = os.path.abspath(known_safe_path)
unsafe_path = os.path.abspath(unsafe_path)
if (os.path.commonprefix([known_safe_path, unsafe_path]) != known_safe_path):
raise SuspiciousFileOperation("{} is not safe".format(unsafe_path))
# Passes the check
return unsafe_path
|
mpl-2.0
| 4,351,260,022,897,503,700
| 40.023256
| 92
| 0.724334
| false
| 4.197619
| false
| false
| false
|
zlcnup/csmath
|
hw4_lm/lm.py
|
1
|
2784
|
# -*- coding: utf-8 -*-
#!/usr/bin/enzl_v python
from pylab import *
from numpy import *
from math import *
def data_generator(N):
#生成向量函数F:ai*exp(bi*x)的系数数组
zl_mean = [3.4,4.5]
zl_cozl_v = [[1,0],[0,10]]
zl_coff = np.random.multivariate_normal(zl_mean,zl_cozl_v,N)
#生成观测值向量y
x = np.random.uniform(1, N, N)
y = [zl_coff[i][0]*exp(-zl_coff[i][1]*x[i]) for i in range(N)]
#生成初始值x0
x0 = [x[i]+np.random.normal(0.0,0.3) for i in range(N)]
return zl_coff, y, x0
def jacobian(zl_coff, x0, N):
J=zeros((N,N),float)
#计算第i个函数对X的第j个维度变量的偏导数
for i in range(N):
for j in range(N):
#-abexp(-b*xi)
J[i][j] = -(zl_coff[i][0]*zl_coff[i][1])*exp(-(zl_coff[i][1]*x0[j]))
return J
def normG(g):
absg = abs(g)
Normg = absg.argmax()
num = absg[Normg]
return num
def zl_LM(zl_coff, y, x0, N, maxIter):
zl_numIter = 0
zl_v = 2
zl_miu = 0.05 #阻尼系数
x = x0
zl_Threshold = 1e-5
zl_preszl_fx = 100000
while zl_numIter < maxIter:
zl_numIter += 1
#计算Jacobian矩阵
J = jacobian(zl_coff, x, N)
#计算Hessian矩阵,Ep以及g值
A = dot(J.T,J)
zl_fx = zeros((N,N),float)
zl_fx = [zl_coff[i][0]*exp(-zl_coff[i][1]*x[i]) for i in range(N)]
szl_fx = sum(array(zl_fx)*array(zl_fx))
Ep = array(y) - array(zl_fx)
g = array(dot(J.T,Ep))
H = A + zl_miu*np.eye(N)
DTp = solve(H, g)
x = x + DTp
zl_fx2 = zeros(N,float)
for j in range(N):
zl_fx2[j] = zl_coff[j][0]*exp(-zl_coff[j][1])
szl_fx2 = sum(array(zl_fx2)*array(zl_fx2))
if abs(szl_fx - zl_preszl_fx) < zl_Threshold:
print("The zl_vector x is: ")
print(x)
print("The sum is: ")
print(szl_fx2)
break
if szl_fx2 < (szl_fx+0.5*sum(array(g)*array(Ep))):
zl_miu /= zl_v
else :
zl_miu *= 2
if zl_numIter == maxIter:
print("The zl_vector x0 is: ")
print(x0)
print("The zl_vector x is: ")
print(x)
print("The sum is: ")
print(szl_fx2)
if __name__ == "__main__":
#输入向量空间的长度N(在这里假设m=n)
print("Please Input the dimension N of zl_vector space and the maxIter (the product of N and maxIter not be too large)")
N = input("Input N (not be too large): ")
N = int(N)
maxIter = input("Input the max number of interation (larger than half of the N): ")
maxIter = int(maxIter)
zl_coff, y, x0 = data_generator(N)
#zl_LM算法
zl_LM(zl_coff, y, x0, N, maxIter)
|
mit
| -685,786,909,710,917,200
| 28.573034
| 124
| 0.518237
| false
| 2.270923
| false
| false
| false
|
atizo/pygobject
|
ltihooks.py
|
1
|
2327
|
# -*- Mode: Python; py-indent-offset: 4 -*-
# ltihooks.py: python import hooks that understand libtool libraries.
# Copyright (C) 2000 James Henstridge.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
import os, ihooks
class LibtoolHooks(ihooks.Hooks):
def get_suffixes(self):
"""Like normal get_suffixes, but adds .la suffixes to list"""
ret = ihooks.Hooks.get_suffixes(self)
ret.insert(0, ('module.la', 'rb', 3))
ret.insert(0, ('.la', 'rb', 3))
return ret
def load_dynamic(self, name, filename, file=None):
"""Like normal load_dynamic, but treat .la files specially"""
if len(filename) > 3 and filename[-3:] == '.la':
fp = open(filename, 'r')
dlname = ''
installed = 1
line = fp.readline()
while line:
if len(line) > 7 and line[:7] == 'dlname=':
dlname = line[8:-2]
elif len(line) > 10 and line[:10] == 'installed=':
installed = line[10:-1] == 'yes'
line = fp.readline()
fp.close()
if dlname:
if installed:
filename = os.path.join(os.path.dirname(filename),
dlname)
else:
filename = os.path.join(os.path.dirname(filename),
'.libs', dlname)
return ihooks.Hooks.load_dynamic(self, name, filename, file)
importer = ihooks.ModuleImporter()
importer.set_hooks(LibtoolHooks())
def install():
importer.install()
def uninstall():
importer.uninstall()
install()
|
lgpl-2.1
| -552,883,266,822,510,400
| 37.783333
| 75
| 0.593468
| false
| 4.032929
| false
| false
| false
|
eBay/cronus-agent
|
agent/agent/lib/agent_thread/deactivate_manifest.py
|
1
|
5040
|
#pylint: disable=W0703,R0912,R0915,R0904,W0105
'''
Copyright 2014 eBay Software Foundation
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
'''
""" Thread to perform creation of a service """
import os
import shutil
import traceback
from agent.lib.utils import islink
from agent.lib.utils import readlink
from agent.lib.errors import Errors
from agent.lib.errors import AgentException
from agent.controllers.service import ServiceController
import logging
from agent.lib.agent_thread.manifest_control import ManifestControl
from agent.lib import manifestutil
class DeactivateManifest(ManifestControl):
""" This thread will attempt to activate a manifest
This means going throuh each package
call the stop
call the deactivate
delete the active link
call the activate
create the active link
call start
"""
THREAD_NAME = 'deactivate_manifest'
def __init__(self, threadMgr, service):
""" Constructor """
ManifestControl.__init__(self, threadMgr, service, manifest = None, name = 'deactivate_manifest')
self.setName(DeactivateManifest.THREAD_NAME)
self.__LOG = manifestutil.getServiceLogger(self, logging.getLogger(__name__))
def doRun(self):
""" Main body of the thread """
errorMsg = ""
errorCode = None
failed = False
try:
activePath = os.path.join(ServiceController.manifestPath(self._service), 'active')
oldManifest = None
# make sure that if the active path exists, it's a link
# if not log that and delete the link
if (os.path.exists(activePath) and not os.name == 'nt' and not islink(activePath)):
self.__LOG.error('%s is not a link. Attempted to delete' % activePath)
shutil.rmtree(activePath)
if (os.path.exists(activePath)):
oldManifest = os.path.basename(readlink(activePath))
else:
raise AgentException(error = Errors.ACTIVEMANIFEST_MANIFEST_MISSING, errorMsg = 'No active manifest - cannot deactivate service')
self.__deactivateManifest(self._service, oldManifest)
self.__removeSymlink(self._service)
except SystemExit as exc:
failed = True
if (len(exc.args) == 2):
# ok we got {err code, err msg}
errorCode = exc.args[0]
errorMsg = exc.args[1]
raise exc
except AgentException as exc:
failed = True
errorMsg = 'Deactivate Manifest - Agent Exception - %s' % exc.getMsg()
errorCode = exc.getCode()
except Exception as exc:
failed = True
errorMsg = 'Deactivate Manifest - Unknown error - (%s) - %s - %s' \
% (self._service, str(exc), traceback.format_exc(5))
errorCode = Errors.UNKNOWN_ERROR
finally:
if failed:
self.__LOG.warning(errorMsg)
self._updateStatus(httpStatus = 500, error = errorCode,
errorMsg = errorMsg)
self.__LOG.debug('Done: activate manifest for (%s)' % (self._service))
self._updateProgress(100)
def __deactivateManifest(self, service, manifest):
""" deactive a manifest. This means calling stop then deactive on the manifest
@param service - service of manifest to deactivate
@param manifest - manifest to deactivate
@param stack - stack for recovery
"""
self.__LOG.debug("Deactivate Manifest %s-%s" % (service, manifest))
if (manifest == None):
return
self._execPackages('shutdown', service, manifest, 11, 25, activateFlow = False)
self._execPackages('deactivate', service, manifest, 26, 50, activateFlow = False)
manifestutil.processControllerInPackage(service, manifest, activateFlow = False)
def __removeSymlink(self, service):
""" remove symlink """
#remove symlink
activePath = self.__getSymlinkPath(service)
if os.path.exists(activePath):
if (os.path.islink(activePath)): # *nix
os.remove(activePath)
else:
raise AgentException('Running platform seems to be neither win32 nor *nix with any (sym)link support. Can\'t proceed with link deletion')
def __getSymlinkPath(self, service):
""" return symlink path for a service """
return os.path.join(ServiceController.manifestPath(service), 'active')
|
apache-2.0
| 1,921,334,820,953,624,800
| 38.069767
| 153
| 0.639286
| false
| 4.405594
| false
| false
| false
|
cizixs/tftp
|
tftp/tftp_client.py
|
1
|
7600
|
import sys
import struct
import binascii
import argparse
import tftp
from tftp import SocketBase
from tftp import get_opcode
from tftp import default_port
from tftp import make_data_packet
from tftp import make_ack_packet
class State(object):
START, DATA = range(2)
# Make packet functions.
def make_request_packet(opcode, filename, mode='octet'):
values = (opcode, filename, 0, mode, 0)
s = struct.Struct('! H {}s B {}s B'.format(len(filename),len(mode)) )
return s.pack(*values)
def make_rrq_packet(filename):
return make_request_packet(tftp.RRQ, filename)
def make_wrq_packet(filename):
return make_request_packet(tftp.WRQ, filename)
class TftpClient(SocketBase):
def __init__(self, host='127.0.0.1', port='', filename=None, **argv):
self.host = host
self.orig_port = self.port = port or default_port()
self.block_num = 1
self.is_done = False
self.status = State.START
self.action = argv.get('action', 'get')
self.debug = argv.get('debug', False)
self.block_size = argv.get('block_size', tftp.DEFAULT_BLOCK_SIZE)
self.filename = filename
self.setup_file()
self.setup_connect()
def reset(self):
self.block_num = 1
self.is_done = False
self.status = State.START
self.port = self.orig_port or 69
self.setup_file()
self.setup_connect()
@property
def server_addr(self):
return (self.host, self.port)
def setup_file(self):
if self.filename:
if self.action == 'get':
self.fd = open(self.filename, 'wb')
elif self.action == 'put':
self.fd = open(self.filename, 'rb')
else:
raise Exception('unsupport action %s' % self.action)
def handle_packet(self, packet, addr):
"""Handle pakcet from remote.
If it's a wrong packet, not from expected host/port, discard it;
If it's a data packet, send ACK packet back;
If it's a error packet, print error and exit;
If it's a ack packet, send Data packet back.
"""
host, port = addr
if host != self.host:
# ignore packet from wrong address.
return
packet_len = len(packet)
opcode = get_opcode(packet)
if opcode == tftp.ERROR:
err_code = struct.unpack('!H', packet[2:4])[0]
err_msg = packet[4:packet_len-1]
print "Error %s: %s" % (err_code, err_msg)
sys.exit(err_code)
elif opcode == tftp.DATA:
# This is a data packet received from server, save data to file.
# update port
if self.port != port:
self.port = port
block_num = struct.unpack('!H', packet[2:4])[0]
if block_num != self.block_num:
# skip unexpected #block data packet
print 'unexpected block num %d' % block_num
return
data = packet[4:]
self.fd.write(data)
if len(packet) < self.block_size + 2:
self.is_done = True
self.fd.close()
file_len = self.block_size * (self.block_num -1) + len(data)
print '%d bytes received.' % file_len
self.block_num += 1
elif opcode == tftp.ACK:
# This is a write request ACK
# Send next block_size data to server
if self.port != port:
self.port = port
block_num = struct.unpack('!H', packet[2:4])[0]
self.verbose('received ack for %d' % block_num)
self.block_num += 1
else:
raise Exception('unrecognized packet: %s', str(opcode))
def get_next_packet(self):
if self.status == State.START:
opcode = tftp.RRQ if self.action == 'get' else tftp.WRQ
self.verbose('about to send packet %d' % opcode)
packet = make_request_packet(opcode, self.filename)
self.status = State.DATA
elif self.status == State.DATA:
if self.action == 'get':
self.verbose('about to send ack for %d' % (self.block_num - 1))
packet = make_ack_packet(self.block_num-1)
elif self.action == 'put':
self.verbose('about to send data for %d' % (self.block_num - 1))
data = self.fd.read(self.block_size)
if len(data) < self.block_size:
self.is_done = True
packet = make_data_packet(self.block_num-1, data)
return packet
def handle(self):
"""Main loop function for tftp.
The main loop works like the following:
1. get next-to-send packet
2. send the packet to server
3. receive packet from server
4. handle packet received, back to step 1.
"""
while not self.is_done:
packet = self.get_next_packet()
if packet:
self.send_packet(packet)
(packet, addr) = self.recv_packet()
self.handle_packet(packet, addr)
def main():
menu = """Tftp client help menu:
Supported commands:
connect connect to a server
get get file
put put file
quit exit
? print this menu
"""
def command_parse(line):
if not line:
return (None, None)
line = line.split()
command = line[0]
options = line[1:]
return command, options
tftp_client = TftpClient()
def connect(*args):
tftp_client.host = args[0]
if len(args) > 1:
tftp_client.port = int(args[1])
def get(*args):
print args[0]
tftp_client.action = 'get'
tftp_client.filename = args[0]
tftp_client.reset()
tftp_client.handle()
def put(*args):
tftp_client.filename = args[0]
tftp_client.action = 'put'
tftp_client.reset()
tftp_client.handle()
def quit(*args):
print 'Bye!'
def print_help(*args):
print menu
command_map = {
'connect': connect,
'get': get,
'put': put,
'quit': quit,
}
print 'Welcome to python tftpclient.'
while True:
line = raw_input('tftp> ').strip().lower()
command, options = command_parse(line)
command_map.get(command, print_help)(*options)
if command == 'quit':
break
if __name__ == "__main__":
parser = argparse.ArgumentParser(description='Tftp client in pure python.')
parser.add_argument('--host', '-s', action='store', dest='host',
default='127.0.0.1', help='Server hostname')
parser.add_argument('--port', '-p', action='store', dest='port', type=int,
default=69, help='Server port')
parser.add_argument('--file', '-f', action='store', dest='filename',
help='File to get from server')
parser.add_argument('--debug', '-d', action='store_true',
default=False, help='Debug mode: print more information(debug: False)')
parser.add_argument('action', metavar='action', nargs='*',
help='Action to conduct: put or get(default: get)')
args = parser.parse_args()
print args
if not args.filename or not args.action:
main()
else:
tftp_client = TftpClient(args.host, args.port, args.filename,
action=args.action[0], debug=args.debug)
tftp_client.handle()
|
mit
| -6,937,391,200,844,491,000
| 31.340426
| 83
| 0.550526
| false
| 3.830645
| false
| false
| false
|
mduggan/toumeika
|
shikin/review.py
|
1
|
4611
|
# -*- coding: utf-8 -*-
"""
Shikin review page and associated API
"""
from sqlalchemy import func
import datetime
import random
from flask import render_template, abort, request, jsonify, session
from . import app, ocrfix
from .model import DocSegment, DocSegmentReview, User
from .util import dologin
def get_user_or_abort():
# if request.remote_addr == '127.0.0.1':
# user = 'admin'
# else:
user = session.get('username')
if not user:
abort(403)
u = User.query.filter(User.name == user).first()
if not u:
abort(403)
return u
@app.route('/api/reviewcount/<user>')
def review_count(user):
u = User.query.filter(User.name == user).first()
if not u:
return abort(404)
return jsonify({'user': user, 'count': len(u.reviews)})
@app.route('/api/unreview/<int:segmentid>')
def unreview(segmentid):
user = get_user_or_abort()
revid = request.args.get('revid')
ds = DocSegment.query.filter(DocSegment.id == segmentid).first()
if not ds:
abort(404)
ds.viewcount = max(0, ds.viewcount-1)
app.dbobj.session.add(ds)
if not revid or not revid.isdigit():
app.dbobj.session.commit()
return
revid = int(revid)
old = DocSegmentReview.query.filter(DocSegmentReview.id == revid, DocSegmentReview.user_id == user.id).first()
if not old:
abort(404)
app.dbobj.session.delete(old)
app.dbobj.session.commit()
return jsonify({'status': 'ok', 'id': revid})
@app.route('/api/review/<int:segmentid>')
def review_submit(segmentid):
user = get_user_or_abort()
ds = DocSegment.query.filter(DocSegment.id == segmentid).first()
if not ds:
abort(404)
text = request.args.get('text')
skip = request.args.get('skip')
if text is None and not skip:
abort(404)
timestamp = datetime.datetime.now()
ds.viewcount += 1
app.dbobj.session.add(ds)
if skip:
app.dbobj.session.commit()
return jsonify({'status': 'ok'})
old = DocSegmentReview.query\
.filter(DocSegmentReview.segment_id == ds.id)\
.order_by(DocSegmentReview.rev.desc())\
.first()
if old is not None:
rev = old.rev + 1
else:
rev = 1
newrev = DocSegmentReview(segment=ds, rev=rev, timestamp=timestamp, user=user, text=text)
app.dbobj.session.add(newrev)
app.dbobj.session.commit()
return jsonify({'status': 'ok', 'id': newrev.id})
@app.route('/api/reviewdata', methods=['GET'])
def reviewdata():
# Find a random early page with lots of unreviewed items. This way even
# with multiple simulteanous users they should get different pages.
minviewcount = app.dbobj.session.query(func.min(DocSegment.viewcount)).one()[0]
q = app.dbobj.session.query(DocSegment.doc_id, DocSegment.page)\
.filter(DocSegment.ocrtext != None)\
.filter(DocSegment.viewcount <= minviewcount)\
.distinct()
pages = list(q.all())
app.logger.debug("%d pages with segments of only %d views" % (len(pages), minviewcount))
# FIXME: this kinda works, but as all the pages get reviewed it will tend
# toward giving all users the same page. not really a problem until I have
# more than 1 user.
docid, page = random.choice(pages)
q = DocSegment.query.filter(DocSegment.doc_id == docid)\
.filter(DocSegment.page == page)\
.filter(DocSegment.viewcount <= minviewcount)
segments = q.all()
if not segments:
abort(404)
segdata = []
for d in segments:
if d.usertext is None:
txt = ocrfix.guess_fix(d.ocrtext)
suggests = ocrfix.suggestions(d)
else:
txt = d.usertext.text
suggests = []
lines = max(len(d.ocrtext.splitlines()), len(txt.splitlines()))
segdata.append(dict(ocrtext=d.ocrtext, text=txt, segment_id=d.id,
x1=d.x1, x2=d.x2, y1=d.y1, y2=d.y2,
textlines=lines, docid=docid, page=page+1, suggests=suggests))
return jsonify(dict(segments=segdata, docid=docid, page=page+1))
@app.route('/review', methods=['GET', 'POST'])
def review():
""" Review page """
error = None
user = None
if request.method == 'POST':
user, error = dologin()
if 'username' in session:
u = get_user_or_abort()
uname = u.name
else:
uname = None
return render_template('review.html', user=uname, error=error)
|
bsd-2-clause
| 2,451,348,942,650,668,500
| 27.81875
| 114
| 0.605942
| false
| 3.402952
| false
| false
| false
|
falkTX/Cadence
|
src/systray.py
|
1
|
23718
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# KDE, App-Indicator or Qt Systray
# Copyright (C) 2011-2018 Filipe Coelho <falktx@falktx.com>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# For a full copy of the GNU General Public License see the COPYING file
# Imports (Global)
import os, sys
if True:
from PyQt5.QtCore import QTimer
from PyQt5.QtGui import QIcon
from PyQt5.QtWidgets import QAction, QMainWindow, QMenu, QSystemTrayIcon
else:
from PyQt4.QtCore import QTimer
from PyQt4.QtGui import QIcon
from PyQt4.QtGui import QAction, QMainWindow, QMenu, QSystemTrayIcon
try:
if False and os.getenv("DESKTOP_SESSION") in ("ubuntu", "ubuntu-2d") and not os.path.exists("/var/cadence/no_app_indicators"):
from gi import require_version
require_version('Gtk', '3.0')
from gi.repository import Gtk
require_version('AppIndicator3', '0.1')
from gi.repository import AppIndicator3 as AppIndicator
TrayEngine = "AppIndicator"
#elif os.getenv("KDE_SESSION_VERSION") >= 5:
#TrayEngine = "Qt"
#elif os.getenv("KDE_FULL_SESSION") or os.getenv("DESKTOP_SESSION") == "kde-plasma":
#from PyKDE5.kdeui import KAction, KIcon, KMenu, KStatusNotifierItem
#TrayEngine = "KDE"
else:
TrayEngine = "Qt"
except:
TrayEngine = "Qt"
print("Using Tray Engine '%s'" % TrayEngine)
iActNameId = 0
iActWidget = 1
iActParentMenuId = 2
iActFunc = 3
iSepNameId = 0
iSepWidget = 1
iSepParentMenuId = 2
iMenuNameId = 0
iMenuWidget = 1
iMenuParentMenuId = 2
# Get Icon from user theme, using our own as backup (Oxygen)
def getIcon(icon, size=16):
return QIcon.fromTheme(icon, QIcon(":/%ix%i/%s.png" % (size, size, icon)))
# Global Systray class
class GlobalSysTray(object):
def __init__(self, parent, name, icon):
object.__init__(self)
self._app = None
self._parent = parent
self._gtk_running = False
self._quit_added = False
self.act_indexes = []
self.sep_indexes = []
self.menu_indexes = []
if TrayEngine == "KDE":
self.menu = KMenu(parent)
self.menu.setTitle(name)
self.tray = KStatusNotifierItem()
self.tray.setAssociatedWidget(parent)
self.tray.setCategory(KStatusNotifierItem.ApplicationStatus)
self.tray.setContextMenu(self.menu)
self.tray.setIconByPixmap(getIcon(icon))
self.tray.setTitle(name)
self.tray.setToolTipTitle(" ")
self.tray.setToolTipIconByPixmap(getIcon(icon))
# Double-click is managed by KDE
elif TrayEngine == "AppIndicator":
self.menu = Gtk.Menu()
self.tray = AppIndicator.Indicator.new(name, icon, AppIndicator.IndicatorCategory.APPLICATION_STATUS)
self.tray.set_menu(self.menu)
# Double-click is not possible with App-Indicators
elif TrayEngine == "Qt":
self.menu = QMenu(parent)
self.tray = QSystemTrayIcon(getIcon(icon))
self.tray.setContextMenu(self.menu)
self.tray.setParent(parent)
self.tray.activated.connect(self.qt_systray_clicked)
# -------------------------------------------------------------------------------------------
def addAction(self, act_name_id, act_name_string, is_check=False):
if TrayEngine == "KDE":
act_widget = KAction(act_name_string, self.menu)
act_widget.setCheckable(is_check)
self.menu.addAction(act_widget)
elif TrayEngine == "AppIndicator":
if is_check:
act_widget = Gtk.CheckMenuItem(act_name_string)
else:
act_widget = Gtk.ImageMenuItem(act_name_string)
act_widget.set_image(None)
act_widget.show()
self.menu.append(act_widget)
elif TrayEngine == "Qt":
act_widget = QAction(act_name_string, self.menu)
act_widget.setCheckable(is_check)
self.menu.addAction(act_widget)
else:
act_widget = None
act_obj = [None, None, None, None]
act_obj[iActNameId] = act_name_id
act_obj[iActWidget] = act_widget
self.act_indexes.append(act_obj)
def addSeparator(self, sep_name_id):
if TrayEngine == "KDE":
sep_widget = self.menu.addSeparator()
elif TrayEngine == "AppIndicator":
sep_widget = Gtk.SeparatorMenuItem()
sep_widget.show()
self.menu.append(sep_widget)
elif TrayEngine == "Qt":
sep_widget = self.menu.addSeparator()
else:
sep_widget = None
sep_obj = [None, None, None]
sep_obj[iSepNameId] = sep_name_id
sep_obj[iSepWidget] = sep_widget
self.sep_indexes.append(sep_obj)
def addMenu(self, menu_name_id, menu_name_string):
if TrayEngine == "KDE":
menu_widget = KMenu(menu_name_string, self.menu)
self.menu.addMenu(menu_widget)
elif TrayEngine == "AppIndicator":
menu_widget = Gtk.MenuItem(menu_name_string)
menu_parent = Gtk.Menu()
menu_widget.set_submenu(menu_parent)
menu_widget.show()
self.menu.append(menu_widget)
elif TrayEngine == "Qt":
menu_widget = QMenu(menu_name_string, self.menu)
self.menu.addMenu(menu_widget)
else:
menu_widget = None
menu_obj = [None, None, None]
menu_obj[iMenuNameId] = menu_name_id
menu_obj[iMenuWidget] = menu_widget
self.menu_indexes.append(menu_obj)
# -------------------------------------------------------------------------------------------
def addMenuAction(self, menu_name_id, act_name_id, act_name_string, is_check=False):
i = self.get_menu_index(menu_name_id)
if i < 0: return
menu_widget = self.menu_indexes[i][iMenuWidget]
if TrayEngine == "KDE":
act_widget = KAction(act_name_string, menu_widget)
act_widget.setCheckable(is_check)
menu_widget.addAction(act_widget)
elif TrayEngine == "AppIndicator":
menu_widget = menu_widget.get_submenu()
if is_check:
act_widget = Gtk.CheckMenuItem(act_name_string)
else:
act_widget = Gtk.ImageMenuItem(act_name_string)
act_widget.set_image(None)
act_widget.show()
menu_widget.append(act_widget)
elif TrayEngine == "Qt":
act_widget = QAction(act_name_string, menu_widget)
act_widget.setCheckable(is_check)
menu_widget.addAction(act_widget)
else:
act_widget = None
act_obj = [None, None, None, None]
act_obj[iActNameId] = act_name_id
act_obj[iActWidget] = act_widget
act_obj[iActParentMenuId] = menu_name_id
self.act_indexes.append(act_obj)
def addMenuSeparator(self, menu_name_id, sep_name_id):
i = self.get_menu_index(menu_name_id)
if i < 0: return
menu_widget = self.menu_indexes[i][iMenuWidget]
if TrayEngine == "KDE":
sep_widget = menu_widget.addSeparator()
elif TrayEngine == "AppIndicator":
menu_widget = menu_widget.get_submenu()
sep_widget = Gtk.SeparatorMenuItem()
sep_widget.show()
menu_widget.append(sep_widget)
elif TrayEngine == "Qt":
sep_widget = menu_widget.addSeparator()
else:
sep_widget = None
sep_obj = [None, None, None]
sep_obj[iSepNameId] = sep_name_id
sep_obj[iSepWidget] = sep_widget
sep_obj[iSepParentMenuId] = menu_name_id
self.sep_indexes.append(sep_obj)
#def addSubMenu(self, menu_name_id, new_menu_name_id, new_menu_name_string):
#menu_index = self.get_menu_index(menu_name_id)
#if menu_index < 0: return
#menu_widget = self.menu_indexes[menu_index][1]
##if TrayEngine == "KDE":
##new_menu_widget = KMenu(new_menu_name_string, self.menu)
##menu_widget.addMenu(new_menu_widget)
##elif TrayEngine == "AppIndicator":
##new_menu_widget = Gtk.MenuItem(new_menu_name_string)
##new_menu_widget.show()
##menu_widget.get_submenu().append(new_menu_widget)
##parent_menu_widget = Gtk.Menu()
##new_menu_widget.set_submenu(parent_menu_widget)
##else:
#if (1):
#new_menu_widget = QMenu(new_menu_name_string, self.menu)
#menu_widget.addMenu(new_menu_widget)
#self.menu_indexes.append([new_menu_name_id, new_menu_widget, menu_name_id])
# -------------------------------------------------------------------------------------------
def connect(self, act_name_id, act_func):
i = self.get_act_index(act_name_id)
if i < 0: return
act_widget = self.act_indexes[i][iActWidget]
if TrayEngine == "AppIndicator":
act_widget.connect("activate", self.gtk_call_func, act_name_id)
elif TrayEngine in ("KDE", "Qt"):
act_widget.triggered.connect(act_func)
self.act_indexes[i][iActFunc] = act_func
# -------------------------------------------------------------------------------------------
#def setActionChecked(self, act_name_id, yesno):
#index = self.get_act_index(act_name_id)
#if index < 0: return
#act_widget = self.act_indexes[index][1]
##if TrayEngine == "KDE":
##act_widget.setChecked(yesno)
##elif TrayEngine == "AppIndicator":
##if type(act_widget) != Gtk.CheckMenuItem:
##return # Cannot continue
##act_widget.set_active(yesno)
##else:
#if (1):
#act_widget.setChecked(yesno)
def setActionEnabled(self, act_name_id, yesno):
i = self.get_act_index(act_name_id)
if i < 0: return
act_widget = self.act_indexes[i][iActWidget]
if TrayEngine == "KDE":
act_widget.setEnabled(yesno)
elif TrayEngine == "AppIndicator":
act_widget.set_sensitive(yesno)
elif TrayEngine == "Qt":
act_widget.setEnabled(yesno)
def setActionIcon(self, act_name_id, icon):
i = self.get_act_index(act_name_id)
if i < 0: return
act_widget = self.act_indexes[i][iActWidget]
if TrayEngine == "KDE":
act_widget.setIcon(KIcon(icon))
elif TrayEngine == "AppIndicator":
if not isinstance(act_widget, Gtk.ImageMenuItem):
# Cannot use icons here
return
act_widget.set_image(Gtk.Image.new_from_icon_name(icon, Gtk.IconSize.MENU))
#act_widget.set_always_show_image(True)
elif TrayEngine == "Qt":
act_widget.setIcon(getIcon(icon))
def setActionText(self, act_name_id, text):
i = self.get_act_index(act_name_id)
if i < 0: return
act_widget = self.act_indexes[i][iActWidget]
if TrayEngine == "KDE":
act_widget.setText(text)
elif TrayEngine == "AppIndicator":
if isinstance(act_widget, Gtk.ImageMenuItem):
# Fix icon reset
last_icon = act_widget.get_image()
act_widget.set_label(text)
act_widget.set_image(last_icon)
else:
act_widget.set_label(text)
elif TrayEngine == "Qt":
act_widget.setText(text)
def setIcon(self, icon):
if TrayEngine == "KDE":
self.tray.setIconByPixmap(getIcon(icon))
#self.tray.setToolTipIconByPixmap(getIcon(icon))
elif TrayEngine == "AppIndicator":
self.tray.set_icon(icon)
elif TrayEngine == "Qt":
self.tray.setIcon(getIcon(icon))
def setToolTip(self, text):
if TrayEngine == "KDE":
self.tray.setToolTipSubTitle(text)
elif TrayEngine == "AppIndicator":
# ToolTips are disabled in App-Indicators by design
pass
elif TrayEngine == "Qt":
self.tray.setToolTip(text)
# -------------------------------------------------------------------------------------------
#def removeAction(self, act_name_id):
#index = self.get_act_index(act_name_id)
#if index < 0: return
#act_widget = self.act_indexes[index][1]
#parent_menu_widget = self.get_parent_menu_widget(self.act_indexes[index][2])
##if TrayEngine == "KDE":
##parent_menu_widget.removeAction(act_widget)
##elif TrayEngine == "AppIndicator":
##act_widget.hide()
##parent_menu_widget.remove(act_widget)
##else:
#if (1):
#parent_menu_widget.removeAction(act_widget)
#self.act_indexes.pop(index)
#def removeSeparator(self, sep_name_id):
#index = self.get_sep_index(sep_name_id)
#if index < 0: return
#sep_widget = self.sep_indexes[index][1]
#parent_menu_widget = self.get_parent_menu_widget(self.sep_indexes[index][2])
##if TrayEngine == "KDE":
##parent_menu_widget.removeAction(sep_widget)
##elif TrayEngine == "AppIndicator":
##sep_widget.hide()
##parent_menu_widget.remove(sep_widget)
##else:
#if (1):
#parent_menu_widget.removeAction(sep_widget)
#self.sep_indexes.pop(index)
#def removeMenu(self, menu_name_id):
#index = self.get_menu_index(menu_name_id)
#if index < 0: return
#menu_widget = self.menu_indexes[index][1]
#parent_menu_widget = self.get_parent_menu_widget(self.menu_indexes[index][2])
##if TrayEngine == "KDE":
##parent_menu_widget.removeAction(menu_widget.menuAction())
##elif TrayEngine == "AppIndicator":
##menu_widget.hide()
##parent_menu_widget.remove(menu_widget.get_submenu())
##else:
#if (1):
#parent_menu_widget.removeAction(menu_widget.menuAction())
#self.remove_actions_by_menu_name_id(menu_name_id)
#self.remove_separators_by_menu_name_id(menu_name_id)
#self.remove_submenus_by_menu_name_id(menu_name_id)
# -------------------------------------------------------------------------------------------
#def clearAll(self):
##if TrayEngine == "KDE":
##self.menu.clear()
##elif TrayEngine == "AppIndicator":
##for child in self.menu.get_children():
##self.menu.remove(child)
##else:
#if (1):
#self.menu.clear()
#self.act_indexes = []
#self.sep_indexes = []
#self.menu_indexes = []
#def clearMenu(self, menu_name_id):
#menu_index = self.get_menu_index(menu_name_id)
#if menu_index < 0: return
#menu_widget = self.menu_indexes[menu_index][1]
##if TrayEngine == "KDE":
##menu_widget.clear()
##elif TrayEngine == "AppIndicator":
##for child in menu_widget.get_submenu().get_children():
##menu_widget.get_submenu().remove(child)
##else:
#if (1):
#menu_widget.clear()
#list_of_submenus = [menu_name_id]
#for x in range(0, 10): # 10x level deep, should cover all cases...
#for this_menu_name_id, menu_widget, parent_menu_id in self.menu_indexes:
#if parent_menu_id in list_of_submenus and this_menu_name_id not in list_of_submenus:
#list_of_submenus.append(this_menu_name_id)
#for this_menu_name_id in list_of_submenus:
#self.remove_actions_by_menu_name_id(this_menu_name_id)
#self.remove_separators_by_menu_name_id(this_menu_name_id)
#self.remove_submenus_by_menu_name_id(this_menu_name_id)
# -------------------------------------------------------------------------------------------
def getTrayEngine(self):
return TrayEngine
def isTrayAvailable(self):
if TrayEngine in ("KDE", "Qt"):
# Ask Qt
return QSystemTrayIcon.isSystemTrayAvailable()
if TrayEngine == "AppIndicator":
# Ubuntu/Unity always has a systray
return True
return False
def handleQtCloseEvent(self, event):
if self.isTrayAvailable() and self._parent.isVisible():
event.accept()
self.__hideShowCall()
return
self.close()
QMainWindow.closeEvent(self._parent, event)
# -------------------------------------------------------------------------------------------
def show(self):
if not self._quit_added:
self._quit_added = True
if TrayEngine != "KDE":
self.addSeparator("_quit")
self.addAction("show", self._parent.tr("Minimize"))
self.addAction("quit", self._parent.tr("Quit"))
self.setActionIcon("quit", "application-exit")
self.connect("show", self.__hideShowCall)
self.connect("quit", self.__quitCall)
if TrayEngine == "KDE":
self.tray.setStatus(KStatusNotifierItem.Active)
elif TrayEngine == "AppIndicator":
self.tray.set_status(AppIndicator.IndicatorStatus.ACTIVE)
elif TrayEngine == "Qt":
self.tray.show()
def hide(self):
if TrayEngine == "KDE":
self.tray.setStatus(KStatusNotifierItem.Passive)
elif TrayEngine == "AppIndicator":
self.tray.set_status(AppIndicator.IndicatorStatus.PASSIVE)
elif TrayEngine == "Qt":
self.tray.hide()
def close(self):
if TrayEngine == "KDE":
self.menu.close()
elif TrayEngine == "AppIndicator":
if self._gtk_running:
self._gtk_running = False
Gtk.main_quit()
elif TrayEngine == "Qt":
self.menu.close()
def exec_(self, app):
self._app = app
if TrayEngine == "AppIndicator":
self._gtk_running = True
return Gtk.main()
else:
return app.exec_()
# -------------------------------------------------------------------------------------------
def get_act_index(self, act_name_id):
for i in range(len(self.act_indexes)):
if self.act_indexes[i][iActNameId] == act_name_id:
return i
else:
print("systray.py - Failed to get action index for %s" % act_name_id)
return -1
def get_sep_index(self, sep_name_id):
for i in range(len(self.sep_indexes)):
if self.sep_indexes[i][iSepNameId] == sep_name_id:
return i
else:
print("systray.py - Failed to get separator index for %s" % sep_name_id)
return -1
def get_menu_index(self, menu_name_id):
for i in range(len(self.menu_indexes)):
if self.menu_indexes[i][iMenuNameId] == menu_name_id:
return i
else:
print("systray.py - Failed to get menu index for %s" % menu_name_id)
return -1
#def get_parent_menu_widget(self, parent_menu_id):
#if parent_menu_id != None:
#menu_index = self.get_menu_index(parent_menu_id)
#if menu_index >= 0:
#return self.menu_indexes[menu_index][1]
#else:
#print("systray.py::Failed to get parent Menu widget for", parent_menu_id)
#return None
#else:
#return self.menu
#def remove_actions_by_menu_name_id(self, menu_name_id):
#h = 0
#for i in range(len(self.act_indexes)):
#act_name_id, act_widget, parent_menu_id, act_func = self.act_indexes[i - h]
#if parent_menu_id == menu_name_id:
#self.act_indexes.pop(i - h)
#h += 1
#def remove_separators_by_menu_name_id(self, menu_name_id):
#h = 0
#for i in range(len(self.sep_indexes)):
#sep_name_id, sep_widget, parent_menu_id = self.sep_indexes[i - h]
#if parent_menu_id == menu_name_id:
#self.sep_indexes.pop(i - h)
#h += 1
#def remove_submenus_by_menu_name_id(self, submenu_name_id):
#h = 0
#for i in range(len(self.menu_indexes)):
#menu_name_id, menu_widget, parent_menu_id = self.menu_indexes[i - h]
#if parent_menu_id == submenu_name_id:
#self.menu_indexes.pop(i - h)
#h += 1
# -------------------------------------------------------------------------------------------
def gtk_call_func(self, gtkmenu, act_name_id):
i = self.get_act_index(act_name_id)
if i < 0: return None
return self.act_indexes[i][iActFunc]
def qt_systray_clicked(self, reason):
if reason in (QSystemTrayIcon.DoubleClick, QSystemTrayIcon.Trigger):
self.__hideShowCall()
# -------------------------------------------------------------------------------------------
def __hideShowCall(self):
if self._parent.isVisible():
self.setActionText("show", self._parent.tr("Restore"))
self._parent.hide()
if self._app:
self._app.setQuitOnLastWindowClosed(False)
else:
self.setActionText("show", self._parent.tr("Minimize"))
if self._parent.isMaximized():
self._parent.showMaximized()
else:
self._parent.showNormal()
if self._app:
self._app.setQuitOnLastWindowClosed(True)
QTimer.singleShot(500, self.__raiseWindow)
def __quitCall(self):
if self._app:
self._app.setQuitOnLastWindowClosed(True)
self._parent.hide()
self._parent.close()
if self._app:
self._app.quit()
def __raiseWindow(self):
self._parent.activateWindow()
self._parent.raise_()
#--------------- main ------------------
if __name__ == '__main__':
from PyQt5.QtWidgets import QApplication, QDialog, QMessageBox
class ExampleGUI(QDialog):
def __init__(self, parent=None):
QDialog.__init__(self, parent)
self.setWindowIcon(getIcon("audacity"))
self.systray = GlobalSysTray(self, "Claudia", "claudia")
self.systray.addAction("about", self.tr("About"))
self.systray.setIcon("audacity")
self.systray.setToolTip("Demo systray app")
self.systray.connect("about", self.about)
self.systray.show()
def about(self):
QMessageBox.about(self, self.tr("About"), self.tr("Systray Demo"))
def done(self, r):
QDialog.done(self, r)
self.close()
def closeEvent(self, event):
self.systray.close()
QDialog.closeEvent(self, event)
app = QApplication(sys.argv)
gui = ExampleGUI()
gui.show()
sys.exit(gui.systray.exec_(app))
|
gpl-2.0
| 3,553,723,061,951,008,000
| 33.624818
| 130
| 0.548571
| false
| 3.674361
| false
| false
| false
|
Griffiths117/TG-s-IRC
|
client/IRClient.py
|
1
|
4985
|
import socket, _thread, tkinter as tk, tkinter.ttk as ttk
from time import strftime, sleep
from tkinter import messagebox, simpledialog
#===========================================================================#
class BasicInputDialog:
def __init__(self,question,title=None,hideWindow=True):
if title == None:
title = PROGRAM_TITLE
self.master = tk.Tk()
self.string = ''
self.master.title(title)
self.frame = tk.Frame(self.master)
self.frame.pack()
self.acceptInput(question)
self.waitForInput()
try:
self.inputted = self.getText()
except Exception:
quit()
def acceptInput(self,question):
r = self.frame
k = ttk.Label(r,text=question)
k.grid(row=0,column=0)
self.e = ttk.Entry(r,width=30)
self.e.grid(row=1,columnspan=2)
self.e.focus_set()
b = ttk.Button(r,text='Enter',command=self.getText)
self.master.bind("<Return>", self.getText)
b.grid(row=0,column=1,padx=5,pady=5)
def getText(self,event=None):
self.string = self.e.get()
self.master.quit()
return self.string
def get(self):
self.master.destroy()
return self.inputted
def getString(self):
return self.string
def waitForInput(self):
self.master.mainloop()
#Main window application
class MainWindow(tk.Tk):
def __init__(self, *args, **kwargs):
tk.Tk.__init__(self, *args, **kwargs)
self.title(PROGRAM_TITLE)
self.resizable(0,0)
self.displayBox = tk.Text(self, width=100, font=THEME.font, bg=THEME.colors[3], fg=THEME.colors[0])
self.displayBox.pack()
self.displayBox.configure(state='disabled')
self.msgEntry = tk.Entry(self,width=100, font=THEME.font, bg=THEME.colors[3], fg=THEME.colors[1], insertbackground = THEME.colors[2])
self.msgEntry.pack()
self.bind("<Return>", self.sendText)
def sendText(self,event=None):
send(newMessage(self.msgEntry.get()).toString())
self.msgEntry.delete(0, 'end')
class Theme:
def __init__(self, font, colors):
self.colors = colors #Message,input,cursor,background
self.font = font
class Message:
#Static variables for formatting
sep = "§"
pref = "msg="
SUDO_PREF = "server="
#Initiate, if timestamp is not entered it will be current time
def __init__(self, sender, plainText, timestamp = None):
if timestamp == None:
timestamp = strftime("%d-%m-%Y %H:%M:%S")
self.plainText = plainText
self.sender = sender
self.timestamp = timestamp
#Sends to string object to be sent through socket
def toString(self):
return self.pref + self.sender + self.sep + self.timestamp + self.sep + self.plainText
#Turns recieved strings into messages: returns None if invalid.
def fromString(text):
if not text.startswith(Message.pref):
return Message("SERVER",text[len(Message.SUDO_PREF):]) if text.startswith(Message.SUDO_PREF) else None
data = text[len(Message.pref):].split(Message.sep,2)
return Message(data[0],data[2],data[1])
#Converts into display string
def toFormattedString(self):
return "["+self.timestamp + "] <" + self.sender + ">: "+self.plainText
#===========================================================================#
def send(msg):
try:
SEND_SOCKET.send(bytes(msg,'UTF-8'))
except:
print("Unable to send message")
def newMessage(msg):
return Message(NICKNAME, msg)
def waitForMessages(s,window):
#This should be run in a seperate thread: constantly recieves new messages
sleep(0.5)
while True:
#Recieve message and convert to string
msg = s.recv(1024)
msg = str(msg, "UTF-8")
#Checking if message follows Message class format
m = Message.fromString(msg)
if m == None: continue
msg = m.toFormattedString()
#Show in window
writeTo(window.displayBox,msg)
def writeTo(textBox,msg):
textBox.configure(state='normal')
textBox.insert('end',msg)
textBox.configure(state='disabled')
textBox.see(tk.END)
def shutdownHook():
send("!DISCONNECT")
root.destroy()
quit()
#===========================================================================#
PROGRAM_TITLE = 'TG\'s IRC'
SERVER_IP = BasicInputDialog("Enter IP:").get()
NICKNAME = BasicInputDialog("Enter Nickname:").get()
THEME = Theme(("Consolas", 10), ['aqua', 'cyan', 'white', 'black'])
RECV_SOCKET = socket.socket()
RECV_SOCKET.connect((SERVER_IP, 20075))
SEND_SOCKET = socket.socket()
SEND_SOCKET.connect((SERVER_IP, 20074))
send("!nickname="+NICKNAME)
root = MainWindow()
_thread.start_new_thread(waitForMessages, (RECV_SOCKET,root,))
root.protocol("WM_DELETE_WINDOW", shutdownHook)
root.mainloop()
|
mit
| -6,780,789,818,688,822,000
| 29.956522
| 141
| 0.595907
| false
| 3.632653
| false
| false
| false
|
TraceContext/tracecontext-spec
|
test/server.py
|
1
|
3239
|
from aiohttp import ClientSession, ClientTimeout, ContentTypeError, web
from multidict import MultiDict
class AsyncTestServer(object):
scopes = {}
def __init__(self, host, port, timeout = 5):
self.host = host
self.port = port
self.timeout = ClientTimeout(total = timeout)
self.app = web.Application()
self.app.add_routes([
web.post('/{scope}', self.scope_handler),
])
async def start(self):
self.runner = web.AppRunner(self.app)
await self.runner.setup()
self.site = web.TCPSite(self.runner, self.host, self.port)
await self.site.start()
print('harness listening on http://%s:%s'%(self.host, self.port))
async def stop(self):
await self.runner.cleanup()
async def scope_handler(self, request):
scope_id = request.match_info['scope'].split('.', maxsplit = 1)
callback_id = None if len(scope_id) == 1 else scope_id[1]
scope_id = scope_id[0]
arguments = await request.json()
scope = None
if callback_id:
scope = self.scopes[scope_id]
scope[callback_id] = {
'headers': list(request.headers.items()),
'arguments': arguments,
}
else:
scope = {
'headers': list(request.headers.items()),
'arguments': arguments,
'results': [],
}
self.scopes[scope_id] = scope
if not arguments:
return web.json_response(None)
if not isinstance(arguments, list):
arguments = [arguments]
for action in arguments:
headers = [['Accept', 'application/json']]
if 'headers' in action:
headers += action['headers']
async with ClientSession(headers = headers, timeout = self.timeout) as session:
arguments = []
if 'arguments' in action:
arguments = action['arguments'] or []
result = {}
result['url'] = action['url']
scope['results'].append(result)
try:
async with session.post(action['url'], json = arguments) as response:
result['status'] = response.status
result['headers'] = list(response.headers.items())
result['body'] = await response.json(content_type = 'application/json')
except ContentTypeError as err:
result['body'] = await response.text()
except Exception as err:
result['exception'] = type(err).__name__
result['msg'] = str(err)
if not callback_id:
del self.scopes[scope_id]
return web.json_response(scope)
class TestServer(object):
def __init__(self, host, port, timeout = 5):
import asyncio
from threading import Thread
self.loop = asyncio.get_event_loop()
self.server = AsyncTestServer(host, port, timeout)
self.thread = Thread(target = self.monitor)
self.run = True
def monitor(self):
import asyncio
while self.run:
self.loop.run_until_complete(asyncio.sleep(0.2))
def start(self):
self.loop.run_until_complete(self.server.start())
self.thread.start()
def stop(self):
self.run = False
self.thread.join()
self.loop.run_until_complete(self.server.stop())
def __enter__(self):
self.start()
return self
def __exit__(self, type, value, traceback):
self.stop()
if __name__ == '__main__':
import sys
host = '127.0.0.1'
port = 7777
if len(sys.argv) >= 2:
host = sys.argv[1]
if len(sys.argv) >= 3:
port = int(sys.argv[2])
with TestServer(host = host, port = port) as server:
input('Press Enter to quit...')
|
apache-2.0
| -567,431,452,355,279,040
| 27.663717
| 82
| 0.661624
| false
| 3.138566
| false
| false
| false
|
kichkasch/pisi
|
pisiconstants.py
|
1
|
4235
|
"""
Module for definition of shared constants between the modules.
This file is part of Pisi.
Pisi is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
Pisi is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with Pisi. If not, see <http://www.gnu.org/licenses/>
"""
PISI_NAME = 'PISI'
"""'About'-information for user - program name"""
PISI_COMMENTS = "PISI is synchronizing information"
"""'About'-information for user - comments / explainations"""
PISI_VERSION = '0.5.3' #'-svn-' #
"""'About'-information for user - current version"""
FILEPATH_COPYING = "/opt/pisi/COPYING"
"""'About'-information for user - where to find the 'licence' file"""
PISI_AUTHORS = ["Esben Damgaard","Michael Pilgermann"]
"""'About'-information for user - list of programmers"""
PISI_HOMEPAGE = "http://freshmeat.net/projects/pisiom"
"""'About'-information for user - program home page"""
PISI_TRANSLATOR_CREDITS = None
"""'About'-information for user - list of translators"""
PISI_DOCUMENTERS = ['Michael Pilgermann']
"""'About'-information for user - list of documenters"""
CONSOLE_PROGRESSBAR_WIDTH = 80
"""Length of progress bar in CLI mode"""
MODE_CALENDAR = 0
"""Type of sources to deal with are calendars"""
MODE_CONTACTS = 1
"""Type of sources to deal with are contacts"""
MODE_STRINGS = ['calendar', 'contacts']
"""Names for the types of sources in order"""
MERGEMODE_SKIP = 0
"""Resolve conflicts between two entries from two sources by skipping the entry"""
MERGEMODE_FLUSH_A = 1
"""Resolve conflicts between two entries from two sources by flushing the entire data repository for the first data source"""
MERGEMODE_FLUSH_B = 2
"""Resolve conflicts between two entries from two sources by flushing the entire data repository for the second data source"""
MERGEMODE_OVERWRITE_A = 3
"""Resolve conflicts between two entries from two sources by overwriting the single entry on the first data source"""
MERGEMODE_OVERWRITE_B = 4
"""Resolve conflicts between two entries from two sources by overwriting the single entry on the second data source"""
MERGEMODE_MANUALCONFIRM = 5
"""Resolve conflicts between two entries from two sources by asking the user for decision for every single entry"""
MERGEMODE_STRINGS = ["Skip", "Flush source 1", "Flush source 2", "Overwrite entry in source 1", "Overwrite entry in source 2", "Manual confirmation"]
"""Names of merge modes in order"""
ACTIONID_ADD = 0
"""Entry in the history of activities for synchronization modules - here for ADD"""
ACTIONID_DELETE = 1
"""Entry in the history of activities for synchronization modules - here for DELETE"""
ACTIONID_MODIFY = 2
"""Entry in the history of activities for synchronization modules - here for MODIFY"""
GOOGLE_CONTACTS_APPNAME = "pisi" + PISI_VERSION
"""application name to use for connecting against google contacts services"""
GOOGLE_CONTACTS_MAXRESULTS = 1000
"""upper limit of result set when querying google contacts api"""
GOOGLE_CALENDAR_APPNAME = "pisi" + PISI_VERSION
"""application name to use for connecting against google calendar services"""
GOOGLE_CALENDAR_MAXRESULTS = GOOGLE_CONTACTS_MAXRESULTS
"""upper limit of result set when querying google calendar api"""
FILEDOWNLOAD_TIMEOUT = 10
"""Timeout for socket opeations (e.g. http download) in seconds - None for disable"""
FILEDOWNLOAD_TMPFILE = "/tmp/pisi-remotebuffer.data"
"""Temporary file for buffering information from remote file sources"""
VCF_BYTES_PER_ENTRY = 200
"""For guessing the number of entries inside a VCF file by evaluating its size we need an estimation of the size for a single entry - for the purpose of showing some progress"""
ICS_BYTES_PER_ENTRY = 200
"""For guessing the number of entries inside an ICS file by evaluating its size we need an estimation of the size for a single entry - for the purpose of showing some progress"""
|
gpl-3.0
| -7,278,563,369,972,107,000
| 48.244186
| 178
| 0.756789
| false
| 3.885321
| false
| false
| false
|
HyperloopTeam/FullOpenMDAO
|
lib/python2.7/site-packages/openmdao.main-0.13.0-py2.7.egg/openmdao/main/test/test_scaler_adder_example.py
|
1
|
5419
|
""" Tests the scaler/adder example in our docs. This test was inconvenient to test
in its place in the docs. """
# pylint: disable-msg=C0111,C0103
import unittest
from openmdao.lib.datatypes.api import Float
from openmdao.lib.drivers.api import SLSQPdriver
from openmdao.main.api import Assembly,Component
from openmdao.main.test.simpledriver import SimpleDriver
from openmdao.util.testutil import assert_rel_error
class Paraboloid_scale(Component):
""" Evaluates the equation f(x,y) = (1000*x-3)^2 + (1000*x)*(0.01*y) + (0.01*y+4)^2 - 3 """
# set up interface to the framework
# pylint: disable-msg=E1101
x = Float(0.0, iotype='in', desc='The variable x')
y = Float(0.0, iotype='in', desc='The variable y')
f_xy = Float(iotype='out', desc='F(x,y)')
def execute(self):
"""f(x,y) = (x-3)^2 + xy + (y+4)^2 - 3
Optimal solution (minimum): x = 0.0066666666666666671; y = -733.33333333333337
"""
x = self.x
y = self.y
self.f_xy = (1000.*x-3.)**2 + (1000.*x)*(0.01*y) + (0.01*y+4.)**2 - 3.
#print "Executing, %.33f, %.33f, %.33f" % (x, y, self.f_xy)
class OptimizationUnconstrainedScale(Assembly):
"""Unconstrained optimization of the unscaled Paraboloid Component."""
def configure(self):
""" Creates a new Assembly containing an unscaled Paraboloid and an optimizer"""
# Create Optimizer instance
self.add('driver', SLSQPdriver())
# Create Paraboloid component instances
self.add('paraboloid', Paraboloid_scale())
# Driver process definition
self.driver.workflow.add('paraboloid')
# SQLSQP Flags
self.driver.iprint = 0
# Objective
self.driver.add_objective('paraboloid.f_xy')
# Design Variables
self.driver.add_parameter('paraboloid.x', low=-1000., high=1000., scaler=0.001)
self.driver.add_parameter('paraboloid.y', low=-1000., high=1000., scaler=1000.0)
class Paraboloid_shift(Component):
""" Evaluates the equation f(x,y) = (1000*x-3)^2 + (1000*x)*(0.01*(y+1000)) + (0.01*(y+1000)+4)^2 - 3 """
# set up interface to the framework
# pylint: disable-msg=E1101
x = Float(0.0, iotype='in', desc='The variable x')
y = Float(0.0, iotype='in', desc='The variable y')
f_xy = Float(iotype='out', desc='F(x,y)')
def execute(self):
"""f(x,y) = (1000*x-3)^2 + (1000*x)*(0.01*(y+1000)) + (0.01*(y+1000)+4)^2 - 3
Optimal solution (minimum): x = 0.0066666666666666671; y = -1733.33333333333337
"""
x = self.x
y = self.y
self.f_xy = (1000*x-3)**2 + (1000*x)*(0.01*(y+1000)) + (0.01*(y+1000)+4)**2 - 3
class OptimizationUnconstrainedScaleShift(Assembly):
"""Unconstrained optimization of the Paraboloid Component."""
def configure(self):
""" Creates a new Assembly containing a Paraboloid and an optimizer"""
# pylint: disable-msg=E1101
# Create Optimizer instance
self.add('driver', SLSQPdriver())
# Create Paraboloid component instances
self.add('paraboloid', Paraboloid_shift())
# Driver process definition
self.driver.workflow.add('paraboloid')
# SQLSQP Flags
self.driver.iprint = 0
# Objective
self.driver.add_objective('paraboloid.f_xy')
# Design Variables
self.driver.add_parameter('paraboloid.x', low=-1000000., high=1000000.,
scaler=0.001)
self.driver.add_parameter('paraboloid.y', low=-1000000., high=1000000.,
scaler=1000.0, adder=-1000.0)
class ScalerAdderExampleTestCase(unittest.TestCase):
def test_scale(self):
opt_problem = OptimizationUnconstrainedScale()
opt_problem.run()
assert_rel_error(self, opt_problem.paraboloid.x, 0.006667, 0.001)
assert_rel_error(self, opt_problem.paraboloid.y, -733.333313, 0.001)
J = opt_problem.driver.calc_gradient()
Jdict = opt_problem.driver.calc_gradient(return_format='dict')
def test_scale_gradients(self):
opt_problem = OptimizationUnconstrainedScale()
opt_problem.replace('driver', SimpleDriver())
opt_problem.run()
J = opt_problem.driver.calc_gradient()
Jdict = opt_problem.driver.calc_gradient(return_format='dict')
# Make sure untransforming works for dicts too
self.assertTrue(J[0][0] == Jdict['_pseudo_0.out0']['paraboloid.x'])
self.assertTrue(J[0][1] == Jdict['_pseudo_0.out0']['paraboloid.y'])
Jfddict = opt_problem.driver.calc_gradient(mode='fd', return_format='dict')
opt_problem.driver.run_iteration()
Jfd = opt_problem.driver.calc_gradient(mode='fd')
# Make sure untransforming works for dicts too
self.assertTrue(Jfd[0][0] == Jfddict['_pseudo_0.out0']['paraboloid.x'])
self.assertTrue(Jfd[0][1] == Jfddict['_pseudo_0.out0']['paraboloid.y'])
def test_scale_adder(self):
opt_problem = OptimizationUnconstrainedScaleShift()
opt_problem.run()
assert_rel_error(self, opt_problem.paraboloid.x, 0.006667, 0.001)
assert_rel_error(self, opt_problem.paraboloid.y, -1733.333313, 0.001)
if __name__ == "__main__":
unittest.main()
|
gpl-2.0
| -6,789,788,535,626,708,000
| 33.737179
| 114
| 0.60823
| false
| 3.314373
| true
| false
| false
|
ruuk/service.xbmc.tts
|
enabler.py
|
1
|
3477
|
# -*- coding: utf-8 -*-
import os, sys, xbmc, xbmcaddon
DISABLE_PATH = os.path.join(xbmc.translatePath('special://profile').decode('utf-8'), 'addon_data', 'service.xbmc.tts', 'DISABLED')
ENABLE_PATH = os.path.join(xbmc.translatePath('special://profile').decode('utf-8'), 'addon_data', 'service.xbmc.tts', 'ENABLED')
def getXBMCVersion():
import json
resp = xbmc.executeJSONRPC('{ "jsonrpc": "2.0", "method": "Application.GetProperties", "params": {"properties": ["version", "name"]}, "id": 1 }')
data = json.loads(resp)
if not 'result' in data: return None
if not 'version' in data['result']: return None
return data['result']['version']
BASE = '{ "jsonrpc": "2.0", "method": "Addons.SetAddonEnabled", "params": { "addonid": "service.xbmc.tts","enabled":%s}, "id": 1 }'
def enableAddon():
if os.path.exists(DISABLE_PATH):
os.remove(DISABLE_PATH)
markPreOrPost(enable=True)
if isPostInstalled():
if addonIsEnabled():
xbmc.executebuiltin('RunScript(service.xbmc.tts)')
else:
xbmc.executeJSONRPC(BASE % 'true') #So enable it instead
else:
xbmc.executebuiltin('RunScript(service.xbmc.tts)')
def disableAddon():
if os.path.exists(ENABLE_PATH):
os.remove(ENABLE_PATH)
markPreOrPost(disable=True)
if isPostInstalled():
version = getXBMCVersion()
if not version or version['major'] < 13: return #Disabling in this manner crashes on Frodo
xbmc.executeJSONRPC(BASE % 'false') #Try to disable it
#if res and 'error' in res: #If we have an error, it's already disabled
#print res
def markPreOrPost(enable=False, disable=False):
if os.path.exists(ENABLE_PATH) or enable:
with open(ENABLE_PATH, 'w') as f:
f.write(isPostInstalled() and 'POST' or 'PRE')
if os.path.exists(DISABLE_PATH) or disable:
with open(DISABLE_PATH, 'w') as f:
f.write(isPostInstalled() and 'POST' or 'PRE')
def addonIsEnabled():
if os.path.exists(DISABLE_PATH):
return False
if isPostInstalled():
import json
resp = xbmc.executeJSONRPC('{ "jsonrpc": "2.0", "id": 1, "method": "Addons.GetAddonDetails", "params": {"addonid":"service.xbmc.tts","properties": ["name","version","enabled"]}}')
data = json.loads(resp)
if not 'result' in data: return False
if not 'addon' in data['result']: return False
if not 'enabled' in data['result']['addon']: return False
return data['result']['addon']['enabled']
else:
return True
def toggleEnabled():
try:
if not addonIsEnabled(): raise Exception('Addon Disabled')
xbmcaddon.Addon('service.xbmc.tts')
xbmc.log('service.xbmc.tts: DISABLING')
xbmc.executebuiltin('XBMC.RunScript(service.xbmc.tts,key.SHUTDOWN)')
except:
xbmc.log('service.xbmc.tts: ENABLING')
enableAddon()
def reset():
if not addonIsEnabled(): return
disableAddon()
ct=0
while addonIsEnabled() and ct < 11:
xbmc.sleep(500)
ct+=1
enableAddon()
def isPostInstalled():
homePath = xbmc.translatePath('special://home').decode('utf-8')
postInstalledPath = os.path.join(homePath, 'addons', 'service.xbmc.tts')
return os.path.exists(postInstalledPath)
if __name__ == '__main__':
arg = None
if len(sys.argv) > 1: arg = sys.argv[1]
if arg == 'RESET':
reset()
else:
toggleEnabled()
|
gpl-2.0
| 9,017,643,453,152,437,000
| 32.12381
| 187
| 0.625252
| false
| 3.494472
| false
| false
| false
|
uclouvain/osis
|
base/models/session_exam_deadline.py
|
1
|
4136
|
##############################################################################
#
# OSIS stands for Open Student Information System. It's an application
# designed to manage the core business of higher education institutions,
# such as universities, faculties, institutes and professional schools.
# The core business involves the administration of students, teachers,
# courses, programs and so on.
#
# Copyright (C) 2015-2019 Université catholique de Louvain (http://www.uclouvain.be)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# A copy of this license - GNU General Public License - is available
# at the root of the source code of this program. If not,
# see http://www.gnu.org/licenses/.
#
##############################################################################
import datetime
from django.db import models
from base.models.enums import number_session
from base.signals.publisher import compute_student_score_encoding_deadline
from osis_common.models.osis_model_admin import OsisModelAdmin
class SessionExamDeadlineAdmin(OsisModelAdmin):
list_display = ('offer_enrollment', 'deadline', 'deadline_tutor', 'number_session', 'changed')
list_filter = ('number_session',)
raw_id_fields = ('offer_enrollment',)
search_fields = ['offer_enrollment__student__person__first_name', 'offer_enrollment__student__person__last_name',
'offer_enrollment__student__registration_id', 'offer_enrollment__education_group_year__acronym']
class SessionExamDeadline(models.Model):
external_id = models.CharField(max_length=100, blank=True, null=True, db_index=True)
changed = models.DateTimeField(null=True, auto_now=True)
deadline = models.DateField()
deliberation_date = models.DateField(blank=True, null=True)
deadline_tutor = models.IntegerField(null=True, blank=True) # Delta day(s)
number_session = models.IntegerField(choices=number_session.NUMBERS_SESSION)
offer_enrollment = models.ForeignKey('OfferEnrollment', on_delete=models.CASCADE)
__original_deliberation_date = None
def __init__(self, *args, **kwargs):
super(SessionExamDeadline, self).__init__(*args, **kwargs)
self.__original_deliberation_date = self.deliberation_date
def save(self, *args, **kwargs):
super(SessionExamDeadline, self).save(*args, **kwargs)
if self.deliberation_date != self.__original_deliberation_date:
compute_student_score_encoding_deadline.send(sender=self.__class__, session_exam_deadline=self)
@property
def deadline_tutor_computed(self):
return compute_deadline_tutor(self.deadline, self.deadline_tutor)
@property
def is_deadline_reached(self):
return self.deadline < datetime.date.today()
@property
def is_deadline_tutor_reached(self):
if self.deadline_tutor_computed:
return self.deadline_tutor_computed < datetime.date.today()
return self.is_deadline_reached
def __str__(self):
return u"%s-%s" % (self.offer_enrollment, self.number_session)
def compute_deadline_tutor(deadline, deadline_tutor):
if deadline_tutor is not None:
return deadline - datetime.timedelta(days=deadline_tutor)
return None
def filter_by_nb_session(nb_session):
return SessionExamDeadline.objects.filter(number_session=nb_session)
def get_by_offer_enrollment_nb_session(offer_enrollment, nb_session):
try:
return SessionExamDeadline.objects.get(offer_enrollment=offer_enrollment.id,
number_session=nb_session)
except SessionExamDeadline.DoesNotExist:
return None
|
agpl-3.0
| -3,591,662,348,334,433,300
| 42.072917
| 117
| 0.685127
| false
| 3.897267
| false
| false
| false
|
mabhub/Geotrek
|
geotrek/settings/base.py
|
1
|
17906
|
import os
import sys
from django.contrib.messages import constants as messages
from geotrek import __version__
from . import PROJECT_ROOT_PATH
def gettext_noop(s):
return s
DEBUG = False
TEMPLATE_DEBUG = DEBUG
TEST = 'test' in sys.argv
VERSION = __version__
ADMINS = (
('Makina Corpus', 'geobi@makina-corpus.com'),
)
MANAGERS = ADMINS
DATABASES = {
'default': {
'ENGINE': 'django.contrib.gis.db.backends.postgis',
'OPTIONS': {},
'NAME': '', # Or path to database file if using sqlite3.
'USER': '', # Not used with sqlite3.
'PASSWORD': '', # Not used with sqlite3.
'HOST': '', # Set to empty string for localhost. Not used with sqlite3.
'PORT': '', # Set to empty string for default. Not used with sqlite3.
}
}
#
# PostgreSQL Schemas for apps and models.
#
# Caution: editing this setting might not be enough.
# Indeed, it won't apply to apps that not managed of South, nor database views and functions.
# See all sql/*-schemas.sql files in each Geotrek app.
#
DATABASE_SCHEMAS = {
'default': 'geotrek',
'auth': 'django',
'django': 'django',
'easy_thumbnails': 'django',
'south': 'django',
'feedback': 'gestion',
'infrastructure': 'gestion',
'maintenance': 'gestion',
'tourism': 'tourisme',
'trekking': 'rando',
'zoning': 'zonage',
'land': 'foncier',
}
DATABASES['default']['OPTIONS'] = {
'options': '-c search_path=public,%s' % ','.join(set(DATABASE_SCHEMAS.values()))
}
#
# Authentication
#
AUTHENTICATION_BACKENDS = ('django.contrib.auth.backends.ModelBackend',)
AUTH_PROFILE_MODULE = 'authent.UserProfile'
# Settings required for geotrek.authent.backend.DatabaseBackend :
AUTHENT_DATABASE = None
AUTHENT_TABLENAME = None
AUTHENT_GROUPS_MAPPING = {
'PATH_MANAGER': 1,
'TREKKING_MANAGER': 2,
'EDITOR': 3,
'READER': 4,
}
# Local time zone for this installation. Choices can be found here:
# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
# although not all choices may be available on all operating systems.
# On Unix systems, a value of None will cause Django to use the same
# timezone as the operating system.
# If running in a Windows environment this must be set to the same as your
# system time zone.
TIME_ZONE = 'Europe/Paris'
# Language code for this installation. All choices can be found here:
# http://www.i18nguy.com/unicode/language-identifiers.html
LANGUAGE_CODE = 'fr'
MODELTRANSLATION_DEFAULT_LANGUAGE = LANGUAGE_CODE
LANGUAGES = (
('en', gettext_noop('English')),
('fr', gettext_noop('French')),
('it', gettext_noop('Italian')),
('es', gettext_noop('Spanish')),
)
LOCALE_PATHS = (
os.path.join(PROJECT_ROOT_PATH, 'locale'),
)
SITE_ID = 1
# If you set this to False, Django will make some optimizations so as not
# to load the internationalization machinery.
USE_I18N = True
# If you set this to False, Django will not format dates, numbers and
# calendars according to the current locale.
USE_L10N = True
# If you set this to False, Django will not use timezone-aware datetimes.
USE_TZ = True
DATE_INPUT_FORMATS = ('%d/%m/%Y',)
ROOT_URL = ''
LOGIN_URL = 'login'
LOGOUT_URL = 'logout'
LOGIN_REDIRECT_URL = 'home'
# Absolute filesystem path to the directory that will hold user-uploaded files.
# Example: "/home/media/media.lawrence.com/media/"
MEDIA_ROOT = os.path.join(PROJECT_ROOT_PATH, 'media')
UPLOAD_DIR = 'upload' # media root subdir
# URL that handles the media served from MEDIA_ROOT. Make sure to use a
# trailing slash.
# Examples: "http://media.lawrence.com/media/", "http://example.com/media/"
MEDIA_URL = '/media/'
MEDIA_URL_SECURE = '/media_secure/'
# Absolute path to the directory static files should be collected to.
# Don't put anything in this directory yourself; store your static files
# in apps' "static/" subdirectories and in STATICFILES_DIRS.
# Example: "/home/media/media.lawrence.com/static/"
STATIC_ROOT = ''
# URL prefix for static files.
# Example: "http://media.lawrence.com/static/"
STATIC_URL = '/static/'
# Additional locations of static files
STATICFILES_DIRS = (
os.path.join(PROJECT_ROOT_PATH, 'static'),
# Put strings here, like "/home/html/static" or "C:/www/django/static".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
)
# List of finder classes that know how to find static files in
# various locations.
STATICFILES_FINDERS = (
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
# 'django.contrib.staticfiles.finders.DefaultStorageFinder',
'compressor.finders.CompressorFinder',
)
STATICFILES_STORAGE = 'django.contrib.staticfiles.storage.StaticFilesStorage'
COMPRESSOR_ENABLED = False
COMPRESS_PARSER = 'compressor.parser.HtmlParser'
# Make this unique, and don't share it with anybody.
SECRET_KEY = 'public_key'
# List of callables that know how to import templates from various sources.
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
# 'django.template.loaders.eggs.Loader',
)
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'geotrek.authent.middleware.LocaleForcedMiddleware',
'django.middleware.locale.LocaleMiddleware',
'geotrek.common.middleware.APILocaleMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
# Uncomment the next line for simple clickjacking protection:
# 'django.middleware.clickjacking.XFrameOptionsMiddleware',
'geotrek.authent.middleware.CorsMiddleware',
'mapentity.middleware.AutoLoginMiddleware'
)
ROOT_URLCONF = 'geotrek.urls'
# Python dotted path to the WSGI application used by Django's runserver.
WSGI_APPLICATION = 'geotrek.wsgi.application'
TEMPLATE_DIRS = (
# Put strings here, like "/home/html/django_templates" or "C:/www/django/templates".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
)
TEMPLATE_CONTEXT_PROCESSORS = (
'django.contrib.auth.context_processors.auth',
'django.core.context_processors.debug',
'django.core.context_processors.i18n',
'django.core.context_processors.media',
'django.core.context_processors.static',
'django.core.context_processors.tz',
'django.core.context_processors.request',
'django.contrib.messages.context_processors.messages',
'mapentity.context_processors.settings',
)
#
# /!\ Application names (last levels) must be unique
# (c.f. auth/authent)
# https://code.djangoproject.com/ticket/12288
#
PROJECT_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'django.contrib.admin',
'django.contrib.admindocs',
'django.contrib.gis',
)
# Do not migrate translated fields, they differ per instance, and
# can be added/removed using `update_translation_fields`
if 'schemamigration' not in sys.argv:
PROJECT_APPS += ('modeltranslation',)
PROJECT_APPS += (
'south',
'leaflet',
'floppyforms',
'crispy_forms',
'compressor',
'djgeojson',
'tinymce',
'easy_thumbnails',
'shapes',
'paperclip',
'mapentity',
'rest_framework',
'embed_video',
'djcelery',
)
INSTALLED_APPS = PROJECT_APPS + (
'geotrek.cirkwi',
'geotrek.authent',
'geotrek.common',
'geotrek.altimetry',
'geotrek.core',
'geotrek.infrastructure',
'geotrek.maintenance',
'geotrek.zoning',
'geotrek.land',
'geotrek.trekking',
'geotrek.tourism',
'geotrek.flatpages',
'geotrek.feedback',
)
SERIALIZATION_MODULES = {
'geojson': 'djgeojson.serializers'
}
CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.dummy.DummyCache',
},
# The fat backend is used to store big chunk of data (>1 Mo)
'fat': {
'BACKEND': 'django.core.cache.backends.dummy.DummyCache',
}
}
# A sample logging configuration. The only tangible logging
# performed by this configuration is to send an email to
# the site admins on every HTTP 500 error when DEBUG=False.
# See http://docs.djangoproject.com/en/dev/topics/logging for
# more details on how to customize your logging configuration.
LOGGING = {
'version': 1,
'disable_existing_loggers': True,
'filters': {
'require_debug_false': {
'()': 'django.utils.log.RequireDebugFalse'
}
},
'formatters': {
'simple': {
'format': '%(levelname)s %(asctime)s %(name)s %(message)s'
},
},
'handlers': {
'mail_admins': {
'level': 'ERROR',
'filters': ['require_debug_false'],
'class': 'logging.NullHandler'
},
'console': {
'level': 'WARNING',
'class': 'logging.StreamHandler',
'formatter': 'simple'
},
},
'loggers': {
'django.db.backends': {
'handlers': ['console', 'mail_admins'],
'level': 'ERROR',
'propagate': False,
},
'django.request': {
'handlers': ['console', 'mail_admins'],
'level': 'ERROR',
'propagate': False,
},
'django': {
'handlers': ['console', 'mail_admins'],
'level': 'ERROR',
'propagate': False,
},
'south': {
'handlers': ['console', 'mail_admins'],
'level': 'ERROR',
'propagate': False,
},
'geotrek': {
'handlers': ['console', 'mail_admins'],
'level': 'INFO',
'propagate': False,
},
'mapentity': {
'handlers': ['console', 'mail_admins'],
'level': 'INFO',
'propagate': False,
},
'': {
'handlers': ['console', 'mail_admins'],
'level': 'INFO',
'propagate': False,
},
}
}
THUMBNAIL_ALIASES = {
'': {
'thumbnail': {'size': (150, 150)},
# Thumbnails for public trek website
'small-square': {'size': (120, 120), 'crop': True},
'medium': {'size': (800, 800)},
# Header image for trek export (keep ratio of TREK_EXPORT_HEADER_IMAGE_SIZE)
'print': {'size': (1000, 500), 'crop': 'smart'},
},
}
PAPERCLIP_CONFIG = {
'ENABLE_VIDEO': True,
'FILETYPE_MODEL': 'common.FileType',
'ATTACHMENT_TABLE_NAME': 'fl_t_fichier',
}
# Data projection
SRID = 3857
# API projection (client-side), can differ from SRID (database). Leaflet requires 4326.
API_SRID = 4326
# Extent in native projection (Toulouse area)
SPATIAL_EXTENT = (144968, 5415668, 175412, 5388753)
MAPENTITY_CONFIG = {
'TITLE': gettext_noop("Geotrek"),
'TEMP_DIR': '/tmp',
'HISTORY_ITEMS_MAX': 7,
'CONVERSION_SERVER': 'http://127.0.0.1:6543',
'CAPTURE_SERVER': 'http://127.0.0.1:8001',
'ROOT_URL': ROOT_URL,
'MAP_BACKGROUND_FOGGED': True,
'GEOJSON_LAYERS_CACHE_BACKEND': 'fat',
'SENDFILE_HTTP_HEADER': 'X-Accel-Redirect',
'DRF_API_URL_PREFIX': r'^api/(?P<lang>\w+)/',
}
DEFAULT_STRUCTURE_NAME = gettext_noop('Default')
VIEWPORT_MARGIN = 0.1 # On list page, around spatial extent from settings.ini
PATHS_LINE_MARKER = 'dotL'
PATH_SNAPPING_DISTANCE = 1 # Distance of path snapping in meters
SNAP_DISTANCE = 30 # Distance of snapping in pixels
ALTIMETRIC_PROFILE_PRECISION = 25 # Sampling precision in meters
ALTIMETRIC_PROFILE_BACKGROUND = 'white'
ALTIMETRIC_PROFILE_COLOR = '#F77E00'
ALTIMETRIC_PROFILE_HEIGHT = 400
ALTIMETRIC_PROFILE_WIDTH = 800
ALTIMETRIC_PROFILE_FONTSIZE = 25
ALTIMETRIC_PROFILE_FONT = 'ubuntu'
ALTIMETRIC_PROFILE_MIN_YSCALE = 1200 # Minimum y scale (in meters)
ALTIMETRIC_AREA_MAX_RESOLUTION = 150 # Maximum number of points (by width/height)
ALTIMETRIC_AREA_MARGIN = 0.15
# Let this be defined at instance-level
LEAFLET_CONFIG = {
'SRID': SRID,
'TILES': [
('Scan', 'http://{s}.tile.openstreetmap.org/{z}/{x}/{y}.png',),
('Ortho', 'http://{s}.tile.openstreetmap.org/{z}/{x}/{y}.jpg'),
],
'TILES_EXTENT': SPATIAL_EXTENT,
# Extent in API projection (Leaflet view default extent)
'SPATIAL_EXTENT': (1.3, 43.7, 1.5, 43.5),
'NO_GLOBALS': False,
'PLUGINS': {
'geotrek': {'js': ['core/leaflet.lineextremities.js',
'core/leaflet.textpath.js',
'trekking/points_reference.js',
'trekking/parking_location.js']},
'topofields': {'js': ['core/geotrek.forms.snap.js',
'core/geotrek.forms.topology.js',
'core/dijkstra.js',
'core/multipath.js',
'core/topology_helper.js']}
}
}
""" This *pool* of colors is used to colorized lands records.
"""
COLORS_POOL = {'land': ['#f37e79', '#7998f3', '#bbf379', '#f379df', '#f3bf79', '#9c79f3', '#7af379'],
'physical': ['#f3799d', '#79c1f3', '#e4f379', '#de79f3', '#79f3ba', '#f39779', '#797ff3'],
'competence': ['#a2f379', '#f379c6', '#79e9f3', '#f3d979', '#b579f3', '#79f392', '#f37984'],
'signagemanagement': ['#79a8f3', '#cbf379', '#f379ee', '#79f3e3', '#79f3d3'],
'workmanagement': ['#79a8f3', '#cbf379', '#f379ee', '#79f3e3', '#79f3d3'],
'restrictedarea': ['plum', 'violet', 'deeppink', 'orchid',
'darkviolet', 'lightcoral', 'palevioletred',
'MediumVioletRed', 'MediumOrchid', 'Magenta',
'LightSalmon', 'HotPink', 'Fuchsia']}
MAP_STYLES = {
'path': {'weight': 2, 'opacity': 1.0, 'color': '#FF4800'},
'city': {'weight': 4, 'color': 'orange', 'opacity': 0.3, 'fillOpacity': 0.0},
'district': {'weight': 6, 'color': 'orange', 'opacity': 0.3, 'fillOpacity': 0.0, 'dashArray': '12, 12'},
'restrictedarea': {'weight': 2, 'color': 'red', 'opacity': 0.5, 'fillOpacity': 0.5},
'land': {'weight': 4, 'color': 'red', 'opacity': 1.0},
'physical': {'weight': 6, 'color': 'red', 'opacity': 1.0},
'competence': {'weight': 4, 'color': 'red', 'opacity': 1.0},
'workmanagement': {'weight': 4, 'color': 'red', 'opacity': 1.0},
'signagemanagement': {'weight': 5, 'color': 'red', 'opacity': 1.0},
'print': {
'path': {'weight': 1},
'trek': {'color': '#FF3300', 'weight': 7, 'opacity': 0.5,
'arrowColor': 'black', 'arrowSize': 10},
}
}
LAYER_PRECISION_LAND = 4 # Number of fraction digit
LAYER_SIMPLIFY_LAND = 10 # Simplification tolerance
LAND_BBOX_CITIES_ENABLED = True
LAND_BBOX_DISTRICTS_ENABLED = True
LAND_BBOX_AREAS_ENABLED = False
PUBLISHED_BY_LANG = True
EXPORT_MAP_IMAGE_SIZE = {
'trek': (14.1, 11),
'poi': (14.1, 11),
'touristiccontent': (14.1, 11),
'touristicevent': (14.1, 11),
}
EXPORT_HEADER_IMAGE_SIZE = {
'trek': (10.7, 5.35), # Keep ratio of THUMBNAIL_ALIASES['print']
'poi': (10.7, 5.35), # Keep ratio of THUMBNAIL_ALIASES['print']
'touristiccontent': (10.7, 5.35), # Keep ratio of THUMBNAIL_ALIASES['print']
'touristicevent': (10.7, 5.35), # Keep ratio of THUMBNAIL_ALIASES['print']
}
COMPLETENESS_FIELDS = {
'trek': ['departure', 'duration', 'difficulty', 'description_teaser']
}
TRAIL_MODEL_ENABLED = True
TREKKING_TOPOLOGY_ENABLED = True
FLATPAGES_ENABLED = False # False because still experimental
TOURISM_ENABLED = False # False because still experimental
TREK_POI_INTERSECTION_MARGIN = 500 # meters (used only if TREKKING_TOPOLOGY_ENABLED = False)
TOURISM_INTERSECTION_MARGIN = 500 # meters (always used)
SIGNAGE_LINE_ENABLED = False
TREK_POINTS_OF_REFERENCE_ENABLED = True
TREK_EXPORT_POI_LIST_LIMIT = 14
TREK_EXPORT_INFORMATION_DESK_LIST_LIMIT = 2
TREK_DAY_DURATION = 10 # Max duration to be done in one day
TREK_ICON_SIZE_POI = 18
TREK_ICON_SIZE_PARKING = 18
TREK_ICON_SIZE_INFORMATION_DESK = 18
# Static offsets in projection units
TOPOLOGY_STATIC_OFFSETS = {'land': -5,
'physical': 0,
'competence': 5,
'signagemanagement': -10,
'workmanagement': 10}
MESSAGE_TAGS = {
messages.SUCCESS: 'alert-success',
messages.INFO: 'alert-info',
messages.DEBUG: 'alert-info',
messages.WARNING: 'alert-error',
messages.ERROR: 'alert-error',
}
CACHE_TIMEOUT_LAND_LAYERS = 60 * 60 * 24
CACHE_TIMEOUT_TOURISM_DATASOURCES = 60 * 60 * 24
TREK_CATEGORY_ORDER = None
TOURISTIC_EVENT_CATEGORY_ORDER = None
SPLIT_TREKS_CATEGORIES_BY_PRACTICE = False
SPLIT_TREKS_CATEGORIES_BY_ACCESSIBILITY = False
HIDE_PUBLISHED_TREKS_IN_TOPOLOGIES = False
ZIP_TOURISTIC_CONTENTS_AS_POI = False
CRISPY_ALLOWED_TEMPLATE_PACKS = ('bootstrap', 'bootstrap3')
CRISPY_TEMPLATE_PACK = 'bootstrap'
# Mobile app_directories
MOBILE_TILES_URL = 'http://{s}.tile.openstreetmap.org/{z}/{x}/{y}.png'
MOBILE_TILES_RADIUS_LARGE = 0.01 # ~1 km
MOBILE_TILES_RADIUS_SMALL = 0.005 # ~500 m
MOBILE_TILES_GLOBAL_ZOOMS = range(13)
MOBILE_TILES_LOW_ZOOMS = range(13, 15)
MOBILE_TILES_HIGH_ZOOMS = range(15, 17)
import djcelery
djcelery.setup_loader()
CELERY_RESULT_BACKEND = 'djcelery.backends.database:DatabaseBackend'
BROKER_URL = 'redis://127.0.0.1:6379/0'
CELERY_ACCEPT_CONTENT = ['json']
CELERY_TASK_SERIALIZER = 'json'
CELERY_RESULT_SERIALIZER = 'json'
CELERY_TASK_RESULT_EXPIRES = 5
TEST_RUNNER = 'djcelery.contrib.test_runner.CeleryTestSuiteRunner'
|
bsd-2-clause
| -7,784,692,578,393,617,000
| 30.414035
| 108
| 0.631744
| false
| 3.229798
| true
| false
| false
|
sojournexx/python
|
Assignments/TanAndrew_assign6.py
|
1
|
5318
|
#Andrew Tan, 3/25, Section 010
import myfunctions
import random
#Ask user for inputs and check validity
while True:
qns = int(input("How many problems would you like to attempt? "))
if qns <= 0:
print("Invalid number, try again\n")
continue
else:
break
while True:
width = int(input("How wide do you want your digits to be? 5-10: "))
if width < 5 or width > 10:
print("Invalid width, try again\n")
continue
else:
break
while True:
drill = str.lower(input("Would you like to activate 'drill' mode? yes or no: "))
if drill != "yes" and drill != "no":
print("Invalid response, try again\n")
continue
else:
break
print("\nHere we go!")
#Define variables to track score and statistics
tscore = 0
addition = 0
subtraction = 0
multiplication = 0
division = 0
addition_score = 0
subtraction_score = 0
multiplication_score = 0
division_score = 0
#Set number of questions
for i in range(qns):
print("\nWhat is .....\n")
#Define parameters
x = random.randint(0, 9)
op = random.randint(1, 4)
y = random.randint(0, 9)
#Check for valid division equation
if op == 4:
if y == 0:
y = random.randint(1, 9)
while x % y != 0:
x = random.randint(0, 9)
y = random.randint(1, 9)
#Display first number
if x == 0:
myfunctions.number_0(width)
elif x == 1:
myfunctions.number_1(width)
elif x == 2:
myfunctions.number_2(width)
elif x == 3:
myfunctions.number_3(width)
elif x == 4:
myfunctions.number_4(width)
elif x == 5:
myfunctions.number_5(width)
elif x == 6:
myfunctions.number_6(width)
elif x == 7:
myfunctions.number_7(width)
elif x == 8:
myfunctions.number_8(width)
elif x == 9:
myfunctions.number_9(width)
#Display operator
if op == 1:
op = "+"
myfunctions.plus(width)
addition += 1
elif op == 2:
op = "-"
myfunctions.minus(width)
subtraction += 1
elif op == 3:
op = "*"
myfunctions.multiply(width)
multiplication += 1
elif op == 4:
op = "/"
myfunctions.divide(width)
division += 1
#Display second number
if y == 0:
myfunctions.number_0(width)
elif y == 1:
myfunctions.number_1(width)
elif y == 2:
myfunctions.number_2(width)
elif y == 3:
myfunctions.number_3(width)
elif y == 4:
myfunctions.number_4(width)
elif y == 5:
myfunctions.number_5(width)
elif y == 6:
myfunctions.number_6(width)
elif y == 7:
myfunctions.number_7(width)
elif y == 8:
myfunctions.number_8(width)
elif y == 9:
myfunctions.number_9(width)
#Ask user for answer and check answer
if drill == "no":
z = int(input("= "))
if myfunctions.check_answer(x, y, z, op) == True:
print("Correct!")
tscore += 1
if op == "+":
addition_score += 1
if op == "-":
subtraction_score += 1
if op == "*":
multiplication_score += 1
if op == "/":
division_score += 1
else:
print("Sorry, that's not correct.")
elif drill == "yes":
while True:
z = int(input("= "))
if myfunctions.check_answer(x, y, z, op) == False:
print("Sorry, that's not correct.")
if op == "+":
addition_score += 1
if op == "-":
subtraction_score += 1
if op == "*":
multiplication_score += 1
if op == "/":
division_score += 1
continue
else:
print("Correct!")
break
#Display score
if drill == "no":
print("\nYou got %d out of %d correct!" %(tscore, qns))
for operator, count, score in zip(["addition", "subtraction", "multiplication", "division"], [addition, subtraction, multiplication, division], [addition_score, subtraction_score, multiplication_score, division_score]):
if count == 0:
print("\nNo %s problems presented" %(operator))
else:
print("\nTotal %s problems presented: %d" %(operator, count))
print("Correct %s problems: %d (%s)" %(operator, score, format(score/count, ".1%")))
elif drill == "yes":
for operator, count, score in zip(["addition", "subtraction", "multiplication", "division"], [addition, subtraction, multiplication, division], [addition_score, subtraction_score, multiplication_score, division_score]):
if score == 0:
praise = "(perfect!)"
else:
praise = ""
if count == 0:
print("\nNo %s problems presented" %(operator))
else:
print("\nTotal %s problems presented: %d" %(operator, count))
print("# of extra attempts needed: %d %s" %(score, praise))
|
mit
| -6,892,856,213,811,609,000
| 28.045198
| 223
| 0.511847
| false
| 3.924723
| false
| false
| false
|
FEniCS/dolfin
|
demo/undocumented/adaptive-poisson/python/demo_adaptive-poisson.py
|
1
|
2889
|
"""This demo program solves Poisson's equation
- div grad u(x, y) = f(x, y)
on the unit square with source f given by
f(x, y) = exp(-100(x^2 + y^2))
and homogeneous Dirichlet boundary conditions.
Note that we use a simplified error indicator, ignoring
edge (jump) terms and the size of the interpolation constant.
"""
# Copyright (C) 2008 Rolv Erlend Bredesen
#
# This file is part of DOLFIN.
#
# DOLFIN is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# DOLFIN is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with DOLFIN. If not, see <http://www.gnu.org/licenses/>.
#
# Modified by Anders Logg 2008-2011
from __future__ import print_function
from dolfin import *
from numpy import array, sqrt
from math import pow
from six.moves import xrange as range
TOL = 5e-4 # Error tolerance
REFINE_RATIO = 0.50 # Refine 50 % of the cells in each iteration
MAX_ITER = 20 # Maximal number of iterations
# Create initial mesh
mesh = UnitSquareMesh(4, 4)
source_str = "exp(-100.0*(pow(x[0], 2) + pow(x[1], 2)))"
source = eval("lambda x: " + source_str)
# Adaptive algorithm
for level in range(MAX_ITER):
# Define variational problem
V = FunctionSpace(mesh, "CG", 1)
v = TestFunction(V)
u = TrialFunction(V)
f = Expression(source_str, degree=2)
a = dot(grad(v), grad(u))*dx
L = v*f*dx
# Define boundary condition
u0 = Constant(0.0)
bc = DirichletBC(V, u0, DomainBoundary())
# Compute solution
u = Function(V)
solve(a == L, u, bc)
# Compute error indicators
h = array([c.h() for c in cells(mesh)])
K = array([c.volume() for c in cells(mesh)])
R = array([abs(source([c.midpoint().x(), c.midpoint().y()])) for c in cells(mesh)])
gamma = h*R*sqrt(K)
# Compute error estimate
E = sum([g*g for g in gamma])
E = sqrt(MPI.sum(mesh.mpi_comm(), E))
print("Level %d: E = %g (TOL = %g)" % (level, E, TOL))
# Check convergence
if E < TOL:
info("Success, solution converged after %d iterations" % level)
break
# Mark cells for refinement
cell_markers = MeshFunction("bool", mesh, mesh.topology().dim())
gamma_0 = sorted(gamma, reverse=True)[int(len(gamma)*REFINE_RATIO)]
gamma_0 = MPI.max(mesh.mpi_comm(), gamma_0)
for c in cells(mesh):
cell_markers[c] = gamma[c.index()] > gamma_0
# Refine mesh
mesh = refine(mesh, cell_markers)
# Plot mesh
plot(mesh)
|
lgpl-3.0
| -1,866,448,215,433,288,200
| 29.410526
| 87
| 0.661821
| false
| 3.324511
| false
| false
| false
|
theintencity/flash-videoio
|
examples/django-apps/project/experts/models.py
|
1
|
3541
|
import datetime
from google.appengine.api import users
from google.appengine.ext import db
class User(db.Model):
name = db.StringProperty('Full Name')
account = db.UserProperty()
phone_number = db.PhoneNumberProperty('Phone Number')
address = db.PostalAddressProperty('Postal Address')
website = db.StringProperty('Homepage URL')
description = db.TextProperty('Brief Biography')
rating = db.FloatProperty(default=0.0)
rating_count = db.IntegerProperty(default=0)
tags = db.StringListProperty('Expertise, one per line', default=None)
availability = db.TextProperty('Availability', default='Available by appointment on weekdays in PST timezone')
has_chat = db.BooleanProperty('Use Google Chat', default=False)
def email(self):
result = self.account.nickname() if self.account else ''
return (result + '@gmail.com') if result and '@' not in result else result
def get_current_user():
account = users.get_current_user()
if account:
user = db.GqlQuery('SELECT * FROM User WHERE account = :1', account).get()
if not user:
user = User(name='', account=account)
user.put()
user.is_active = True
user.is_staff = users.is_current_user_admin()
else:
user = User()
user.is_active = False
return user
class Tag(db.Model):
name = db.StringProperty(required=True)
count = db.IntegerProperty(default=1)
class Event(db.Model):
subject = db.StringProperty()
description = db.TextProperty()
owner = db.StringProperty()
visitor = db.StringProperty()
start_time = db.DateTimeProperty()
end_time = db.DateTimeProperty()
created_on = db.DateTimeProperty(auto_now_add=True)
class Review(db.Model):
event = db.ReferenceProperty(Event, collection_name='event_set') # TODO make required=True
for_user = db.ReferenceProperty(User, required=True, collection_name='for_user_set')
by_user = db.ReferenceProperty(User, required=True, collection_name='by_user_set')
rating = db.IntegerProperty(default=3)
description = db.TextProperty()
modified_on = db.DateTimeProperty(auto_now=True)
class ClientStream(db.Model):
clientId = db.StringProperty(required=True)
visitor = db.StringProperty()
name = db.StringProperty(default='Anonymous')
publish = db.StringProperty(required=True)
play = db.StringProperty()
is_owner = db.BooleanProperty(default=False)
owner = db.StringProperty(required=True)
modified_on = db.DateTimeProperty(auto_now=True)
created_on = db.DateTimeProperty(auto_now_add=True)
def __repr__(self):
return '<ClientStream clientId=%r visitor=%r name=%r is_owner=%r owner=%r />'%(self.clientId, self.visitor, self.name, self.is_owner, self.owner)
def get_object(self, full=True):
if full:
return {'clientId': self.clientId, 'name': self.name, 'url': self.publish}
else:
return {'clientId': self.clientId}
class OfflineMessage(db.Model):
sender = db.StringProperty()
senderName = db.StringProperty()
receiver = db.StringProperty()
text = db.StringProperty(multiline=True)
created_on = db.DateTimeProperty(auto_now_add=True)
def __repr__(self):
return '<OfflineMessage sender=%r senderName=%r receiver=%r text=%r />'%(self.sender, self.senderName, self.receiver, self.text)
def get_object(self):
return {'senderName': self.senderName, 'text': self.text}
|
lgpl-3.0
| -634,891,817,617,899,600
| 36.680851
| 153
| 0.672409
| false
| 3.78312
| false
| false
| false
|
thesilencelies/SonnetConvs
|
InceptionModule.py
|
1
|
1790
|
#implimentation of the standard inceptionnet v3 inception module in sonnet
import tensorflow as tf
import sonnet as snt
class InceptionModule(snt.AbstractModule):
def __init__(self, output_channels, name="inception_module"):
super(InceptionModule, self).__init__(name=name)
self._output_channels = output_channels
def _build(self, inputs):
reshapeFlat = lambda x : tf.contrib.layers.flatten(x)
conv1d5 = snt.Conv2D(output_channels=self._output_channels, kernel_shape=1,
stride=1,name="inception5input")
conv1d3 = snt.Conv2D(output_channels=self._output_channels, kernel_shape=1,
stride=1,name="inception3input")
conv1dm = snt.Conv2D(output_channels=self._output_channels, kernel_shape=1,
stride=1,name="inceptionpoolinput")
conv1d1 = snt.Conv2D(output_channels=self._output_channels, kernel_shape=1,
stride=1,name="inception1channel")
conv3d5a = snt.Conv2D(output_channels=self._output_channels, kernel_shape=3,
stride=1,name="inception5stage1")
conv3d5b = snt.Conv2D(output_channels=self._output_channels, kernel_shape=3,
stride=1,name="inception5stage2")
conv3d3 = snt.Conv2D(output_channels=self._output_channels, kernel_shape=3,
stride=1,name="inception3channel")
maxpool = lambda x : tf.nn.max_pool(x, ksize=[1, 2, 2, 1],
strides=[1, 2, 2, 1], padding='SAME')
return tf.concat([reshapeFlat(conv3d5b(conv3d5a(conv1d5(inputs)))),
reshapeFlat(conv3d3(conv1d3(inputs))),
reshapeFlat(maxpool(conv1dm(inputs))),
reshapeFlat(conv1d1(inputs))],1) # then connect it.
|
apache-2.0
| -6,797,985,732,095,887,000
| 43.75
| 81
| 0.632961
| false
| 3.429119
| false
| false
| false
|
YutingZhang/lmdis-rep
|
exp-ae-aflw-30.py
|
1
|
2231
|
import tensorflow as tf
import os
import sys
from copy import copy
from model.pipeline import Pipeline
from tensorflow.python import debug as tf_debug
if __name__ == "__main__":
num_keypoints = 30
patch_feature_dim = 8
decoding_levels = 5
kp_transform_loss = 1e4
recon_weight = 0.001
learning_rate=0.01
keypoint_separation_bandwidth=0.04
keypoint_separation_loss_weight = 10.0
opt = {
"optimizer": "Adam",
"data_name": "aflw_80x80",
"recon_name": "gaussian_fixedvar_in_01",
"encoder_name": "general_80x80",
"decoder_name": "general_80x80",
"latent_dim": num_keypoints*2+(num_keypoints+1)*patch_feature_dim,
"train_color_jittering": True,
"train_random_mirroring": False,
"train_batch_size": 8,
"train_shuffle_capacity": 1000,
"learning_rate": learning_rate,
"max_epochs": 2000,
"weight_decay": 1e-6,
"test_steps": 5000,
"test_limit": 200,
"recon_weight": recon_weight,
}
opt["encoder_options"] = {
"keypoint_num": num_keypoints,
"patch_feature_dim": patch_feature_dim,
"ae_recon_type": opt["recon_name"],
"keypoint_concentration_loss_weight": 100.,
"keypoint_axis_balancing_loss_weight": 200.,
"keypoint_separation_loss_weight": keypoint_separation_loss_weight,
"keypoint_separation_bandwidth": keypoint_separation_bandwidth,
"keypoint_transform_loss_weight": kp_transform_loss,
"keypoint_decoding_heatmap_levels": decoding_levels,
"keypoint_decoding_heatmap_level_base": 0.5**(1/2),
"image_channels": 3,
}
opt["decoder_options"] = copy(opt["encoder_options"])
# -------------------------------------
model_dir = os.path.join("results/aflw_30")
checkpoint_dir = 'pretrained_results'
checkpoint_filename = 'celeba_30/model/snapshot_step_205317'
vp = Pipeline(None, opt, model_dir=model_dir)
print(vp.opt)
with vp.graph.as_default():
sess = vp.create_session()
vp.run_full_train_from_checkpoint(sess, checkpoint_dir = checkpoint_dir, checkpoint_filename=checkpoint_filename)
vp.run_full_test(sess)
|
apache-2.0
| -6,170,820,400,219,724,000
| 32.298507
| 121
| 0.618557
| false
| 3.390578
| false
| true
| false
|
q14035/pimouse_ros
|
scripts/motors2.py
|
1
|
2178
|
#!/usr/bin/env python
#encoding: utf8
import sys, rospy, math
from pimouse_ros.msg import MotorFreqs
from geometry_msgs.msg import Twist
from std_srvs.srv import Trigger, TriggerResponse
class Motor():
def __init__(self):
if not self.set_power(False): sys.exit(1)
rospy.on_shutdown(self.set_power)
self.sub_raw = rospy.Subscriber('motor_raw', MotorFreqs, self.callback_raw_freq)
self.sub_cmd_vel = rospy.Subscriber('cmd_vel', Twist, self.callback_cmd_vel)
self.srv_on = rospy.Service('motor_on', Trigger, self.callback_on)
self.srv_off = rospy.Service('motor_off', Trigger, self.callback_off)
self.last_time = rospy.Time.now()
self.using_cmd_vel = False
def set_power(self, onoff = False):
en = "/dev/rtmotoren0"
try:
with open(en, 'w') as f:
f.write("1\n" if onoff else "0\n")
self.is_on = onoff
return True
except:
rospy.logerr("cannot write to " + en)
return False
def set_raw_freq(self, left_hz, right_hz):
if not self.is_on:
rospy.logerr("not enpowered")
return
try:
with open("/dev/rtmotor_raw_l0", 'w') as lf, open("/dev/rtmotor_raw_r0", 'w') as rf:
lf.write(str(int(round(left_hz))) + "\n")
rf.write(str(int(round(right_hz))) + "\n")
except:
rospy.logerr("cannot write to rtmotor_raw_*")
def callback_raw_freq(self, message):
self.set_raw_freq(message.left_hz, message.right_hz)
def callback_cmd_vel(self, message):
forward_hz = 80000.0*message.linear.x/(9*math.pi)
rot_hz = 400.0*message.angular.z/math.pi
self.set_raw_freq(forward_hz-rot_hz, forward_hz+rot_hz)
self.using_cmd_vel = True
self.last_time = rospy.Time.now()
def onoff_response(self, onoff):
d = TriggerResponse()
d.success = self.set_power(onoff)
d.message = "ON" if self.is_on else "OFF"
return d
def callback_on(self, message): return self.onoff_response(True)
def callback_off(self, message): return self.onoff_response(False)
if __name__ == '__main__':
rospy.init_node('motors')
m = Motor()
rate = rospy.Rate(10)
while not rospy.is_shutdown():
if m.using_cmd_vel and rospy.Time.now().to_sec() - m.last_time.to_sec() >= 1.0:
m.set_raw_freq(0, 0)
m.using_cmd_vel = False
rate.sleep()
|
gpl-3.0
| 1,233,239,839,957,850,400
| 29.676056
| 87
| 0.674472
| false
| 2.574468
| false
| false
| false
|
antoinecarme/pyaf
|
tests/perf/test_ozone_debug_perf.py
|
1
|
1566
|
import pandas as pd
import numpy as np
# from memory_profiler import profile
# from memprof import *
import pyaf.ForecastEngine as autof
import pyaf.Bench.TS_datasets as tsds
#get_ipython().magic('matplotlib inline')
# @memprof
def test_ozone_debug_perf():
b1 = tsds.load_ozone()
df = b1.mPastData
# df.tail(10)
# df[:-10].tail()
# df[:-10:-1]
# df.describe()
lEngine = autof.cForecastEngine()
lEngine
H = b1.mHorizon;
lEngine.mOptions.mDebugPerformance = True;
lEngine.mOptions.mEnableCycles = False;
lEngine.mOptions.mEnableTimeBasedTrends = False;
lEngine.mOptions.mEnableARModels = False;
lEngine.train(df , b1.mTimeVar , b1.mSignalVar, H);
lEngine.getModelInfo();
print(lEngine.mSignalDecomposition.mTrPerfDetails.head());
lEngine.mSignalDecomposition.mBestModel.mTimeInfo.mResolution
lEngine.standardPlots("outputs/my_ozone");
dfapp_in = df.copy();
dfapp_in.tail()
dfapp_out = lEngine.forecast(dfapp_in, H);
#dfapp_out.to_csv("outputs/ozone_apply_out.csv")
dfapp_out.tail(2 * H)
print("Forecast Columns " , dfapp_out.columns);
Forecast_DF = dfapp_out[[b1.mTimeVar , b1.mSignalVar, b1.mSignalVar + '_Forecast']]
print(Forecast_DF.info())
print("Forecasts\n" , Forecast_DF.tail(H).values);
print("\n\n<ModelInfo>")
print(lEngine.to_json());
print("</ModelInfo>\n\n")
print("\n\n<Forecast>")
print(Forecast_DF.tail(2*H).to_json(date_format='iso'))
print("</Forecast>\n\n")
test_ozone_debug_perf();
|
bsd-3-clause
| 6,925,707,315,643,373,000
| 25.1
| 87
| 0.664112
| false
| 2.89464
| false
| false
| false
|
assamite/TwatBot
|
tweets/reasoning.py
|
1
|
4674
|
'''
.. py:module:: reasoning
:platform: Unix
Reasoning object for the tweets.
'''
import logging
import traceback
logger = logging.getLogger('tweets.default')
class Reasoning():
'''Reasoning for the tweets.
Class is used to hold information about the tweet's construction, and contains
few utility functions for convenience.
After the tweet has been constructed, the class should hold at least
following attributes:
* color_code (str or unicode): color of the tweet in html-format.
* color_name (str or unicode: name constructed for the color code
* tweet (str or unicode): text of the tweet
* tweeted (bool): Was the constructed tweet send to twitter
* retweet (bool): is the tweet a retweet
* retweet_url (str or unicode): URL for the retweet (if any)
* original_tweet (str or unicode): Original tweet if this is a retweet
* muse: class instance of the used Muse
* context: class instance of the used Context
* color_semantics: class instance of the used ColorSemantics.
* values (dict): dictionary of the appreciation values generated during the tweet's construction.
'''
def __init__(self, **kwargs):
self.color_code = ""
self.color_name = ""
self.tweet = ""
self.tweeted = False
self.retweet = False
self.retweet_url = ""
self.original_tweet = ""
self.muse = None
self.context = None
self.color_semantics = None
self.muse_classname = ""
self.color_semantics_classname = ""
self.context_classname = ""
self.values = {}
self.media = None
self.appreciation = 0.0
for k, v in kwargs.items():
setattr(self, k, v)
def __repr__(self):
ret = ""
for k, v in self.__dict__.items():
ret = ret + k + ": " + str(v) + "\n"
return ret
def set_attr(self, name, value):
'''Define new or change old attribute value.
Caller should take care of the possible conflicts when changing existing
attribute values.
:param name: Name of the attribute
:type name: str
:param value: New attribute value
:type value: Object
'''
setattr(self, name, value)
if name == 'muse':
setattr(self, 'muse_classname', value.__class__.__name__)
if name == 'context':
setattr(self, 'context_classname', value.__class__.__name__)
if name == 'color_semantics':
setattr(self, 'color_semantics_classname', value.__class__.__name__)
def set_attrs(self, mappings):
'''Define new or change old attribute values in a patch.
Caller should take care of the possible conflicts when changing existing
attribute values.
:param mappings: Attribute mappings
:type mappings: dict
'''
for k, v in mappings.items():
self.set_attr(k, v)
def save(self):
'''Save tweet to database.
:returns: bool -- True is the save was made, False if not exceptions happened during the save
'''
from models import EveryColorBotTweet, Tweet, ReTweet
if self.tweet == "":
logger.info("Saving called for empty tweet. Skipping.")
return False
try:
twinst = Tweet(message = self.tweet, value = self.appreciation,\
muse = self.muse_classname,\
context = self.context_classname,\
color_code = self.color_code,\
color_name = self.color_name)
twinst.save()
if self.retweet:
screen_name = self.screen_name
if screen_name == 'everycolorbot':
inst = EveryColorBotTweet.objects.get_or_none(url = self.retweet_url)
if inst:
inst.tweeted = True
inst.save()
reinst = ReTweet(tweet_url = self.retweet_url,\
screen_name = screen_name, tweet = twinst)
reinst.save()
logger.info("Tweet saved to database: {}".format(self.tweet))
except Exception:
e = traceback.format_exc()
logger.error("Could not save tweet to database, because of error: {}".format(e))
return False
return True
|
mit
| 2,346,785,769,061,129,700
| 34.409091
| 104
| 0.545999
| false
| 4.485605
| false
| false
| false
|
swarna-k/MyDiary
|
app/models.py
|
1
|
1566
|
from app import db
from werkzeug import generate_password_hash, check_password_hash
class User(db.Model):
id = db.Column(db.Integer, primary_key = True)
firstname = db.Column(db.String(100))
lastname = db.Column(db.String(100))
email = db.Column(db.String(120), unique=True)
pwdhash = db.Column(db.String(54))
entries = db.relationship('Entry', backref='author', lazy='dynamic')
reminders = db.relationship('Reminder', backref='author', lazy='dynamic')
def __init__(self, firstname, lastname, email, password):
self.firstname = firstname.title()
self.lastname = lastname.title()
self.email = email.lower()
self.set_password(password)
def set_password(self, password):
self.pwdhash = generate_password_hash(password)
def check_password(self, password):
return check_password_hash(self.pwdhash, password)
def __repr__(self):
return '<User %r>' % (self.firstname)
class Entry(db.Model):
id = db.Column(db.Integer, primary_key = True)
name = db.Column(db.String(100))
body = db.Column(db.Text)
timestamp = db.Column(db.DateTime)
user_id = db.Column(db.Integer, db.ForeignKey('user.id'))
def __repr__(self):
return '<Entry %r>' % (self.body)
class Reminder(db.Model):
id = db.Column(db.Integer, primary_key = True)
when = db.Column(db.DateTime)
body = db.Column(db.Text)
timestamp = db.Column(db.DateTime)
user_id = db.Column(db.Integer, db.ForeignKey('user.id'))
def __repr__(self):
return '<Reminder %r>' % (self.body)
|
bsd-3-clause
| 1,779,423,689,113,203,200
| 29.705882
| 75
| 0.65645
| false
| 3.303797
| false
| false
| false
|
omarkadry/rsa_algorithm
|
RSA.py
|
1
|
5283
|
#!/usr/bin/python
#Algoriths Project Part 1a
#Omar Kadry
#CMSC 441
#Dr. Marron
#IMPLEMENTATION NOTES
#Python's built in pow function uses Binary Exponentiation and reducing modulo n to compute modular
#exponentiation. This is the same algorithm as MODULAR-EXPONENTIATION(a,b,n) as used in the text
#For large number mutliplication Python uses Karatsuba's method as discusssed in class
#Encrypted using modulus of 2048 bits
#Message Encrypted with Private Key =
#549335432742725778252187541104443188156944438806863457411666058499398272260706426139538267238120336092084632198514701950566203930065985324580534295693425367212921830205866755643739579288731322322946366466576799796974416100601383412159359169170613839877922173796152893918170136479717941167924064476336789776106984955596378941959676443995574307557232184168653454435294749983774161045180981596162964832360087083009219442813368249004389009182055455524458934480504555947413171214222377987666294266525295763559510397442092718659910879958017424466509571661222667744582625838716048450963735149873220637697801126262181088272
#n = 2372112898706524098783243835606671423055801883554227254030743710505202283932667011668956139382911768876035660572032080308562219037288900124052316286309512108625859836958747947762092799677854295671866288119481685786760570903533545560435541052326183788082279075073373227880942687435505490994525413101260845901748238215480998501123816262694263026377952163660645333809073068011604416987281948409408692393376191358516220341631487894075618891499412550098438456600441042870219500840853342452184082591601805986792948794525871595912715813197678328912976549353915846570322821639411967156886422360861220109970600152445030560129
#public key e = 1977623957817836883919633554596704012915783900570809149483856078010145425692545878452812725561415102822918517227924598205956910940350062144643427460974258169951841328548095289498955467345087157904399185646775059360160689508306113707875539862799501027047474838298216312008836598256088581250099042957573530717659415412893768343977899980494510094815770699761034869232518446869348437561961594909995056962983992121384916099020899755884457999313029602625570516932900789485878260172195900227111449085645227576679740196755445527867666825244974372425673866849078226602801561771006724501838806746943672716086807419555183315337s
import sys
import os
import random
import math
import argparse
s = 5 #s for miller-rabin test
#Constants to make code more readable
PRIME = 1
COMPOSITE = 2
#Generates Random Psuedoprimes of size bits
#validates with miller rabin test
def generate_rand(size):
n = random.SystemRandom().getrandbits(size)
while(n == 1 or n == 0):
n = random.SystemRandom().getrandbits(size)
while True:
if n % 2 == 0:
n = n + 1
if(miller_rabin(n,s) == PRIME):
return n
n = n + 2
#Miller-Rabin test
def miller_rabin(n,s):
for j in range(0,s):
a = random.SystemRandom().randint(1, n-1)
if(witness(a,n)):
return COMPOSITE
return PRIME
#Witness function for miller-rabin
def witness(a,n):
t,u = calc_t_u(n-1)
x = []
x.append(modular_exponentiation(a,u,n))
for i in range(1,t+1):
x.append(modular_exponentiation(x[i-1],2,n))
if (x[i] == 1) and (x[i-1] != 1) and (x[i-1] != n-1):
return True
if x[t] != 1:
return True
return False
def modular_exponentiation(a,b,n):
a = int(a)
b = int(b)
n = int(n)
return pow(a,b,n)
#Calculates t and u for the witness function
def calc_t_u(n):
t = 0
u = n
while (u % 2 == 0):
u = u / 2
t+=1
return t,u
#Gets a value for e
#Generates a random value and checks it's relatively prime to phi_n
def get_e(phi_n):
e = random.SystemRandom().randint(3, phi_n)
while euclid(phi_n,e) != 1:
e = random.SystemRandom().randint(3,phi_n)
return e
#Euclid and extended euclid are iterative due to recursion depth errors
#being found when the modulus size was >= 1024 bits
#Euclids algorithm
def euclid(a,b):
if a < b:
a, b = b, a
while b != 0:
a, b = b, a % b
return a
#Extended Euclid's Algorithm
def extend_euclid(a,b):
x,y, u,v = 0,1, 1,0
while a != 0:
q, r = b//a, b%a
m, n = x-u*q, y-v*q
b,a, x,y, u,v = a,r, u,v, m,n
gcd = b
return gcd, x, y
def get_mult_inverse(e, phi_n):
a,b,n = e,1,phi_n
d,_x,_y = extend_euclid(a,n)
if(d % b == 0):
return _x % n
else:
return -1
def msg_to_int(m):
x = 0
for c in m:
x = x << 8
x = x ^ ord(c)
return x
if __name__ == '__main__':
private_key = 0
public_key = 0
parser = argparse.ArgumentParser(description="Generates an RSA public and private key")
parser.add_argument("-s", "--size", type=int,
help="Size in bits of RSA Key to Generate", required=True)
parser.add_argument("-m", "--message", type=str, help="A Message to Encrypt")
args = parser.parse_args()
modulus_size = args.size
p = generate_rand(modulus_size//2)
q = generate_rand(modulus_size//2)
while(p == q):
q = generate_rand(modulus_size//2)
n = p * q
phi_n = (p - 1) * (q - 1)
e = get_e(phi_n)
d = int(get_mult_inverse(e, phi_n))
print "N = \n", n, '\n'
print "Private Key d = \n", int(d), '\n'
print "Public Key e = \n", int(e), '\n'
if(args.message):
m = args.message
"Encrypting: %s" % m
print "\"",m,"\" encrypted with the private key is\n",
m = msg_to_int(m)
p = modular_exponentiation(m,d,n)
print p
|
mit
| 2,305,337,433,324,493,600
| 33.305195
| 633
| 0.769071
| false
| 2.381876
| false
| false
| false
|
mprat/learningjulia
|
nbconvert_config.py
|
1
|
7416
|
from nbconvert.preprocessors import ExecutePreprocessor, Preprocessor
import numpy as np
def jekyllurl(path):
"""
Take the filepath of an image output by the ExportOutputProcessor
and convert it into a URL we can use with Jekyll
"""
return path.replace("../..", "")
def svg_filter(svg_xml):
"""
Remove the DOCTYPE and XML version lines from
the inline XML SVG
"""
svgstr = "".join(svg_xml)
start_index = svgstr.index("<svg")
end_index = svgstr.index("</svg>")
return svgstr[start_index:end_index + 6]
def var_def_to_var_list(var_def):
if 'linspace' in var_def:
v = var_def.replace("linspace(", "")
v = v.replace(")", "")
start, stop, num = v.split(",")
return np.linspace(
float(start.strip()),
float(stop.strip()),
float(num.strip()))
elif '[' in var_def and ']' in var_def:
v = var_def.replace("[", "")
v = v.replace("]", "")
v = v.split(",")
return [x.strip() for x in v]
else:
raise TypeError("not implemented for {}".format(var_def))
class ExecuteWithInteractPreprocessor(ExecutePreprocessor):
def preprocess_cell(self, cell, resources, cell_index):
if cell.cell_type != 'code':
return cell, resources
if "@manipulate" in cell.source:
original_source = cell.source
cell_manipulate = cell.copy()
cell_source = original_source.split("\n")
cell_manipulate.source = "\n".join([cell_source[0], cell_source[-1]])
manipulate_output = self.run_cell(cell_manipulate)
outs = []
outs.extend(manipulate_output)
main_source = "\n".join(cell_source[1:-1])
var_def = cell_source[0].replace("@manipulate", "")
var_def = var_def.replace("for", "").strip().split("=")
var_name, var_list = var_def
# currently this only works for a single for loop
# turn all the variables into a loop
all_vars = var_def_to_var_list(var_list)
for next_var in all_vars:
var_defs = "{}={}".format(var_name, next_var)
cell_copy = cell.copy()
cell_copy.source = "\n".join([var_defs, main_source.strip()])
outputs = self.run_cell(cell_copy)
outs.extend(outputs)
cell.source = original_source
cell.outputs = outs
# fix the outputs
# probably better done at the postprocessing step
# import ipdb; ipdb.set_trace()
# raise TypeError("stopping")
else:
outputs = self.run_cell(cell)
cell.outputs = outputs
return cell, resources
# if 'Interact' in cell.outputs[0]['data']['text/plain']:
# there should be a widget here
class RemoveInteractJsShimPreprocessor(Preprocessor):
def preprocess(self, nb, resources):
"""
make sure the widgets resources get put into the resources
"""
if 'widgets' in nb['metadata'].keys():
resources['metadata']['widgets'] = nb['metadata']['widgets']
return super(RemoveInteractJsShimPreprocessor, self).preprocess(nb, resources)
def preprocess_cell(self, cell, resources, cell_index):
"""
remove any outputs that have interact-js-shim
"""
if 'outputs' in cell:
outputs = cell['outputs']
new_outputs = []
for output in outputs:
new_output = output.copy()
if "data" in output.keys():
data_output = output["data"]
new_data_output = data_output.copy()
if 'text/html' in data_output.keys():
text_html = data_output['text/html']
if text_html.startswith('<div id=\"interact-js-shim\">'):
start_index = text_html.find('<div id=\"interact-js-shim\">')
end_index = text_html.find('</div>')
new_html = ""
if start_index > 0:
new_html += text_html[0:start_index]
if end_index + 6 < len(text_html):
new_html += text_html[end_index+6:]
new_html = new_html.strip()
if len(new_html) > 0:
new_data_output['text/html'] = new_html
else:
del new_data_output['text/html']
else:
new_data_output['text/html'] = text_html
if len(new_data_output.keys()) > 0:
new_output['data'] = new_data_output
else:
del new_output['data']
if 'data' in new_output:
new_outputs.append(new_output)
else:
new_outputs.append(new_output)
cell['outputs'] = new_outputs
return cell, resources
class InsertWidgetsPreprocessor(Preprocessor):
def preprocess_cell(self, cell, resources, cell_index):
"""
if the cell is a cell with @manipulate, add the appropriate
widget script into the output
"""
if cell.cell_type != 'code':
return cell, resources
if "@manipulate" in cell.source:
widget_state = resources['metadata']['widgets']['application/vnd.jupyter.widget-state+json']['state']
interact_options = cell.outputs[0]['data']['text/plain']
start_index = interact_options.find('"')
model_name = interact_options[start_index + 1:]
next_index = model_name.find('"')
model_name = model_name[:next_index]
# match the widget based on the descriptions
matched_model_id = None
for model_id in widget_state.keys():
if widget_state[model_id]['state']['description'] == model_name:
matched_model_id = model_id
break
# construct the script tag
script_tag = '<script type="application/vnd.jupyter.widget-view+json">{"model_id": "' + matched_model_id + '"}</script>'
cell.outputs[0]['data']['text/html'] = script_tag
return cell, resources
c = get_config()
c.NbConvertApp.export_format = 'html'
c.NbConvertApp.output_files_dir = '../../assets/imgs/{notebook_name}'
c.HTMLExporter.preprocessors = [
'nbconvert.preprocessors.ExecutePreprocessor',
# ExecuteWithInteractPreprocessor,
'nbconvert.preprocessors.coalesce_streams',
'nbconvert.preprocessors.ExtractOutputPreprocessor',
RemoveInteractJsShimPreprocessor,
InsertWidgetsPreprocessor]
c.HTMLExporter.template_file = 'notebooks/jekyll.tpl'
c.HTMLExporter.filters = {"jekyllimgurl": jekyllurl, "svg_filter": svg_filter}
# if there's an error in one of the cells let the execution keep going
c.ExecutePreprocessor.allow_errors = True
# disable the timeout
c.ExecutePreprocessor.timeout = -1
c.ExecutePreprocessor.iopub_timeout = 10
# write the final HTML files into the _include/notebooks directory
c.FilesWriter.build_directory = "_includes/notebooks/"
|
mit
| -4,977,311,403,519,195,000
| 38.238095
| 132
| 0.551106
| false
| 4.175676
| false
| false
| false
|
KmolYuan/pyslvs
|
test/test_core.py
|
1
|
3764
|
# -*- coding: utf-8 -*-
"""Pyslvs core module test."""
__author__ = "Yuan Chang"
__copyright__ = "Copyright (C) 2016-2021"
__license__ = "AGPL"
__email__ = "pyslvs@gmail.com"
from math import sqrt, radians
from pyslvs import (
Coord, SolverSystem, pxy, ppp, plap, pllp, plpp, palp, expr_solving,
t_config, parse_vpoints, example_list,
)
from . import TestBase
class CoreTest(TestBase):
def test_pxy(self):
"""Test for pxy function."""
coord = pxy(Coord(80, 90), 40, -20)
self.assertAlmostEqual(120, coord.x)
self.assertAlmostEqual(70, coord.y)
def test_ppp(self):
"""Test for ppp function."""
coord = ppp(Coord(0, 0), Coord(0, 90), Coord(90, 0))
self.assertAlmostEqual(90, coord.x)
self.assertAlmostEqual(90, coord.y)
def test_plap(self):
"""Test for plap function."""
coord = plap(Coord(0, 0), 50 * sqrt(2), radians(45), Coord(50, 0))
self.assertAlmostEqual(50, coord.x)
self.assertAlmostEqual(50, coord.y)
def test_pllp(self):
"""Test for pllp function."""
c1 = Coord(-30, 0)
c2 = Coord(30, 0)
coord = pllp(c1, 50, 50, c2)
self.assertAlmostEqual(0, coord.x)
self.assertAlmostEqual(40, coord.y)
coord = pllp(c1, 30, 30, c2)
self.assertAlmostEqual(coord.x, 0)
self.assertAlmostEqual(coord.y, 0)
coord = pllp(c1, 90, 30, c2)
self.assertAlmostEqual(60, coord.x)
self.assertAlmostEqual(0, coord.y)
def test_plpp(self):
"""Test for plpp function."""
coord = plpp(Coord(0, 0), sqrt(5), Coord(0, -3), Coord(3 / 2, 0))
self.assertAlmostEqual(2, coord.x)
self.assertAlmostEqual(1, coord.y)
def test_palp(self):
"""Test for palp function."""
coord = palp(Coord(0, 0), radians(15), 20, Coord(60, 10))
self.assertAlmostEqual(42.253221, coord.x, 6)
self.assertAlmostEqual(19.222356, coord.y, 6)
def test_solving(self):
"""Test triangular formula solving.
+ Test for PMKS parser.
+ Test data collecting function.
+ Test expression solving function.
"""
def test_case(name: str):
expr, inputs = example_list(name)
vpoints = parse_vpoints(expr)
exprs = t_config(vpoints, inputs)
result = expr_solving(exprs, vpoints, {pair: 0. for pair in inputs})
return result[-1]
x, y = test_case("Jansen's linkage (Single)")
self.assertAlmostEqual(-43.170055, x, 6)
self.assertAlmostEqual(-91.753226, y, 6)
x, y = test_case("Crank slider (RP joint)")
self.assertAlmostEqual(103.801126, x, 6)
self.assertAlmostEqual(78.393173, y, 6)
x, y = test_case("Parallel Linkage")
self.assertAlmostEqual(200, x, 6)
self.assertAlmostEqual(0, y, 6)
# TODO: New test case for Inverted slider
def test_solving_bfgs(self):
"""Test Sketch Solve kernel."""
expr, _ = example_list("Jansen's linkage (Single)")
system = SolverSystem(parse_vpoints(expr), {(0, 1): 0.})
result = system.solve()
x, y = result[7]
self.assertAlmostEqual(-43.170055, x, 6)
self.assertAlmostEqual(-91.753226, y, 6)
# Test if angle value changed
system.set_inputs({(0, 1): 45.})
result = system.solve()
x, y = result[7]
self.assertAlmostEqual(-24.406394, x, 6)
self.assertAlmostEqual(-91.789596, y, 6)
# Test if link length changed
system.set_data({(0, 1): 16.})
result = system.solve()
x, y = result[7]
self.assertAlmostEqual(-24.117994, x, 6)
self.assertAlmostEqual(-91.198072, y, 6)
|
agpl-3.0
| -3,383,056,336,825,432,600
| 33.53211
| 80
| 0.582359
| false
| 3.328028
| true
| false
| false
|
bubbleboy14/cantools
|
cantools/scripts/index.py
|
1
|
8655
|
"""
### Usage: ctindex [--mode=MODE] [--domain=DOMAIN] [--port=PORT] [--skip=SKIP]
### Options:
-h, --help show this help message and exit
-m MODE, --mode=MODE may be: 'refcount' (default - count up all foreignkey
references for sort orders and such); 'index' (assign
each record a sequential integer index); 'urlsafekeys'
(update all key/keylist properties to use urlsafe keys
introduced in ct 0.8); 'cleanup' (delete zero-count
reference counters). Note regarding 'index' mode: it
_must_ happen remotely; it's generally unnecessary
unless you're trying to migrate an unindexed database
away from gae and need an index/key per record; it
should be invoked from _outside_ -- that's right,
outside -- of your project's directory (to avoid
loading up a bunch of google network tools that may be
crappy or cause issues outside of their normal
'dev_appserver' environment)
-d DOMAIN, --domain=DOMAIN
('index' mode only) what's the domain of the target
server? (default: localhost)
-p PORT, --port=PORT ('index' mode only) what's the port of the target
server? (default: 8080)
-s SKIP, --skip=SKIP skip these tables ('index' mode only) - use '|' as
separator, such as 'table1|table2|table3' (default:
none)
-i INDEX, --index=INDEX
start with this index ('index' mode only) (default: 0)
As you can see, this script's behavior changes according to the backend of the target project.
### dez
Run this if your CTRefCount records get messed up for
some reason. It will go through and recount everything
(in the default 'refcount' mode -- the other modes,
'urlsafekeys' and 'cleanup', are for migrating a CT-mediated
database from an older deployment to CT 0.8 or newer).
### gae
Run this in 'index' mode on a database with lots of missing index values.
"""
from getpass import getpass
from optparse import OptionParser
from cantools.util import error, log, batch
from cantools.db import get_schema, get_model, put_multi, delete_multi, unpad_key
from cantools.web import fetch
from cantools import config
if config.web.server == "dez":
from cantools.db import session, func, refresh_counter
try:
input = raw_input # py2/3 compatibility
except NameError:
pass
counts = { "_counters": 0 }
RETRIES = 5
#
# dez
#
def get_keys(kind, reference):
log("acquiring %s (%s) keys"%(kind, reference), 1)
mod = get_model(kind)
q = session.query(getattr(mod, "key"))
qcount = q.count()
log("found %s"%(qcount,), 2)
fname, fkey = reference.split(".")
fmod = get_model(fname)
fprop = getattr(fmod, fkey)
sub = session.query(fprop, func.count("*").label("sub_count")).group_by(fprop).subquery()
q = q.join(sub, mod.key==getattr(sub.c, fkey))
newcount = q.count()
log("filtering out %s untargetted entities"%(qcount - newcount), 2)
qcount = newcount
log("returning %s keys"%(qcount,), 2)
return q.all()
def refmap():
log("compiling back reference map")
rmap = {}
for tname, schema in list(get_schema().items()):
for pname, kinds in list(schema["_kinds"].items()):
reference = "%s.%s"%(tname, pname)
counts[reference] = 0
for kind in [k for k in kinds if k != "*"]: # skip wildcard for now
if kind not in rmap:
rmap[kind] = {}
rmap[kind][reference] = get_keys(kind, reference)
return rmap
def do_batch(chunk, reference):
log("refreshing %s %s keys"%(len(chunk), reference), 1)
i = 0
rc = []
for item in chunk: # item is single-member tuple
rc.append(refresh_counter(item[0], reference))
i += 1
if not i % 100:
log("processed %s"%(i,), 3)
counts[reference] += len(chunk)
counts["_counters"] += len(rc)
log("refreshed %s total"%(counts[reference],), 2)
log("updated %s counters"%(counts["_counters"],), 2)
put_multi(rc)
log("saved", 2)
def refcount():
log("indexing foreignkey references throughout database", important=True)
import model # load schema
for kind, references in list(refmap().items()):
log("processing table: %s"%(kind,), important=True)
for reference, keys in list(references.items()):
batch(keys, do_batch, reference)
tcount = sum(counts.values()) - counts["_counters"]
log("refreshed %s rows and updated %s counters"%(tcount, counts["_counters"]), important=True)
#
# gae
#
def _log_fetch(host, url, port):
res = fetch(host, url, port)
log(res)
return res
def _index_kind(kind, host, port, pw, index):
log("indexing %s"%(kind,), important=True)
retry = 0
while "Error" in _log_fetch(host, "/_db?action=index&pw=%s&kind=%s&index=%s"%(pw, kind, index), port):
log("error indexing %s"%(kind,), important=True)
if retry == RETRIES:
error("tried %s times! sorry."%(retry,))
retry += 1
log("trying again (retry: %s)"%(retry,))
def index(host, port, skips, index):
pw = getpass("what's the admin password? ")
log("indexing db at %s:%s"%(host, port), important=True)
# log(fetch(host, "/_db?action=index&pw=%s"%(pw,), port))
log("acquiring schema")
schema = fetch(host, "/_db?action=schema", port, ctjson=True)
for kind in schema:
if kind in skips:
log("skipping %s"%(kind,), important=True)
else:
_index_kind(kind, host, port, pw, index)
#
# url safety
#
def urlsafe():
log("updating key/keylist properties with urlsafe keys", important=True)
import model
schema = get_schema()
puts = []
for mod in schema:
mods = get_model(mod).query().all()
log("%s (%s)"%(mod, len(mods)), 1)
for m in mods:
if m.polytype != mod:
log("skipping! (%s != %s)"%(m.polytype, mod), 2)
continue
m.key = unpad_key(m.key.urlsafe())
for prop in schema[mod]["_kinds"]:
if schema[mod][prop] == "key":
setattr(m, prop, unpad_key(getattr(m, prop).urlsafe()))
else: # keylist
setattr(m, prop, [unpad_key(k.urlsafe()) for k in getattr(m, prop)])
puts.append(m)
log("saving records")
put_multi(puts)
log("updated %s keys"%(len(puts),), important=True)
if input("want to prune zero-count reference counters? (y/N)").lower().startswith("y"):
cleanup()
def cleanup():
log("cleaning up zero-count reference counters", important=True)
from cantools.db import lookup
ctrz = lookup.CTRefCount.query(lookup.CTRefCount.count == 0).all()
log("deleting %s zero-count reference counters"%(len(ctrz),))
delete_multi(ctrz)
log("all gone!")
def go():
parser = OptionParser("ctindex [--mode=MODE] [--domain=DOMAIN] [--port=PORT] [--skip=SKIP]")
parser.add_option("-m", "--mode", dest="mode", default="refcount",
help="may be: 'refcount' (default - count up all foreignkey references for sort "
"orders and such); 'index' (assign each record a sequential integer index); "
"'urlsafekeys' (update all key/keylist properties to use urlsafe keys "
"introduced in ct 0.8); 'cleanup' (delete zero-count reference counters). "
"Note regarding 'index' mode: it _must_ happen remotely; it's generally "
"unnecessary unless you're trying to migrate an unindexed database away from "
"gae and need an index/key per record; it should be invoked from _outside_ "
"-- that's right, outside -- of your project's directory (to avoid loading "
"up a bunch of google network tools that may be crappy or cause issues outside "
"of their normal 'dev_appserver' environment)")
parser.add_option("-d", "--domain", dest="domain", default="localhost",
help="('index' mode only) what's the domain of the target server? (default: localhost)")
parser.add_option("-p", "--port", dest="port", default="8080",
help="('index' mode only) what's the port of the target server? (default: 8080)")
parser.add_option("-s", "--skip", dest="skip", default="",
help="skip these tables ('index' mode only) - use '|' as separator, such as 'table1|table2|table3' (default: none)")
parser.add_option("-i", "--index", dest="index", default=0,
help="start with this index ('index' mode only) (default: 0)")
options, args = parser.parse_args()
log("mode: %s"%(options.mode,), important=True)
if options.mode == "refcount":
refcount()
elif options.mode == "index":
index(options.domain, int(options.port),
options.skip and options.skip.split("|") or [], options.index)
elif options.mode == "urlsafekeys":
urlsafe()
elif options.mode == "cleanup":
cleanup()
else:
error("unknown mode: %s"%(options.mode,))
log("goodbye")
if __name__ == "__main__":
go()
|
mit
| -251,470,952,256,595,680
| 37.300885
| 118
| 0.645407
| false
| 3.305959
| false
| false
| false
|
mpeuster/son-emu
|
src/emuvim/api/openstack/openstack_dummies/keystone_dummy_api.py
|
1
|
16828
|
# Copyright (c) 2015 SONATA-NFV and Paderborn University
# ALL RIGHTS RESERVED.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Neither the name of the SONATA-NFV, Paderborn University
# nor the names of its contributors may be used to endorse or promote
# products derived from this software without specific prior written
# permission.
#
# This work has been performed in the framework of the SONATA project,
# funded by the European Commission under Grant number 671517 through
# the Horizon 2020 and 5G-PPP programmes. The authors would like to
# acknowledge the contributions of their colleagues of the SONATA
# partner consortium (www.sonata-nfv.eu).
from flask_restful import Resource
from flask import request, Response
from emuvim.api.openstack.openstack_dummies.base_openstack_dummy import BaseOpenstackDummy
from emuvim.api.openstack.helper import get_host
import logging
import json
LOG = logging.getLogger("api.openstack.keystone")
class KeystoneDummyApi(BaseOpenstackDummy):
def __init__(self, in_ip, in_port):
super(KeystoneDummyApi, self).__init__(in_ip, in_port)
self.api.add_resource(KeystoneListVersions, "/",
resource_class_kwargs={'api': self})
self.api.add_resource(KeystoneShowAPIv2, "/v2.0",
resource_class_kwargs={'api': self})
self.api.add_resource(KeystoneGetToken, "/v2.0/tokens",
resource_class_kwargs={'api': self})
self.api.add_resource(KeystoneShowAPIv3, "/v3.0",
resource_class_kwargs={'api': self})
self.api.add_resource(
KeystoneGetTokenv3, "/v3.0/auth/tokens", resource_class_kwargs={'api': self})
class KeystoneListVersions(Resource):
"""
List all known keystone versions.
Hardcoded for our version!
"""
def __init__(self, api):
self.api = api
def get(self):
"""
List API versions.
:return: Returns the api versions.
:rtype: :class:`flask.response` containing a static json encoded dict.
"""
LOG.debug("API CALL: %s GET" % str(self.__class__.__name__))
resp = dict()
resp['versions'] = dict()
version = [{
"id": "v2.0",
"links": [
{
"href": "http://%s:%d/v2.0" % (get_host(request), self.api.port),
"rel": "self"
}
],
"media-types": [
{
"base": "application/json",
"type": "application/vnd.openstack.identity-v2.0+json"
}
],
"status": "stable",
"updated": "2014-04-17T00:00:00Z"
}]
resp['versions']['values'] = version
return Response(json.dumps(resp), status=200,
mimetype='application/json')
class KeystoneShowAPIv2(Resource):
"""
Entrypoint for all openstack clients.
This returns all current entrypoints running on son-emu.
"""
def __init__(self, api):
self.api = api
def get(self):
"""
List API entrypoints.
:return: Returns an openstack style response for all entrypoints.
:rtype: :class:`flask.response`
"""
LOG.debug("API CALL: %s GET" % str(self.__class__.__name__))
# neutron_port = self.api.port + 4696
# heat_port = self.api.port + 3004
resp = dict()
resp['version'] = {
"status": "stable",
"media-types": [
{
"base": "application/json",
"type": "application/vnd.openstack.identity-v2.0+json"
}
],
"id": "v2.0",
"links": [
{
"href": "http://%s:%d/v2.0" % (get_host(request), self.api.port),
"rel": "self"
}
]
}
LOG.debug(json.dumps(resp))
return Response(json.dumps(resp), status=200,
mimetype='application/json')
class KeystoneShowAPIv3(Resource):
"""
Entrypoint for all openstack clients.
This returns all current entrypoints running on son-emu.
"""
def __init__(self, api):
self.api = api
def get(self):
"""
List API entrypoints.
:return: Returns an openstack style response for all entrypoints.
:rtype: :class:`flask.response`
"""
LOG.debug("API CALL: %s GET" % str(self.__class__.__name__))
# neutron_port = self.api.port + 4696
# heat_port = self.api.port + 3004
resp = dict()
resp['version'] = {
"status": "stable",
"media-types": [
{
"base": "application/json",
"type": "application/vnd.openstack.identity-v3.0+json"
}
],
"id": "v3.0",
"links": [
{
"href": "http://%s:%d/v3.0" % (get_host(request), self.api.port),
"rel": "self"
}
]
}
return Response(json.dumps(resp), status=200,
mimetype='application/json')
class KeystoneGetToken(Resource):
"""
Returns a static keystone token.
We don't do any validation so we don't care.
"""
def __init__(self, api):
self.api = api
def post(self):
"""
List API entrypoints.
This is hardcoded. For a working "authentication" use these ENVVARS:
* OS_AUTH_URL=http://<ip>:<port>/v2.0
* OS_IDENTITY_API_VERSION=2.0
* OS_TENANT_ID=fc394f2ab2df4114bde39905f800dc57
* OS_REGION_NAME=RegionOne
* OS_USERNAME=bla
* OS_PASSWORD=bla
:return: Returns an openstack style response for all entrypoints.
:rtype: :class:`flask.response`
"""
LOG.debug("API CALL: %s POST" % str(self.__class__.__name__))
try:
ret = dict()
req = json.loads(request.data)
ret['access'] = dict()
ret['access']['token'] = dict()
token = ret['access']['token']
token['issued_at'] = "2014-01-30T15:30:58.819Z"
token['expires'] = "2999-01-30T15:30:58.819Z"
token['id'] = req['auth'].get(
'token', {'id': 'fc394f2ab2df4114bde39905f800dc57'}).get('id')
token['tenant'] = dict()
token['tenant']['description'] = None
token['tenant']['enabled'] = True
token['tenant']['id'] = req['auth'].get(
'tenantId', 'fc394f2ab2df4114bde39905f800dc57')
token['tenant']['name'] = "tenantName"
ret['access']['user'] = dict()
user = ret['access']['user']
user['username'] = req.get('username', "username")
user['name'] = "tenantName"
user['roles_links'] = list()
user['id'] = token['tenant'].get(
'id', "fc394f2ab2df4114bde39905f800dc57")
user['roles'] = [{'name': 'Member'}]
ret['access']['region_name'] = "RegionOne"
ret['access']['serviceCatalog'] = [{
"endpoints": [
{
"adminURL": "http://%s:%s/v2.1/%s" % (get_host(request), self.api.port + 3774, user['id']),
"region": "RegionOne",
"internalURL": "http://%s:%s/v2.1/%s" % (get_host(request), self.api.port + 3774, user['id']),
"id": "2dad48f09e2a447a9bf852bcd93548ef",
"publicURL": "http://%s:%s/v2.1/%s" % (get_host(request), self.api.port + 3774, user['id'])
}
],
"endpoints_links": [],
"type": "compute",
"name": "nova"
},
{
"endpoints": [
{
"adminURL": "http://%s:%s/v2.0" % (get_host(request), self.api.port),
"region": "RegionOne",
"internalURL": "http://%s:%s/v2.0" % (get_host(request), self.api.port),
"id": "2dad48f09e2a447a9bf852bcd93543fc",
"publicURL": "http://%s:%s/v2" % (get_host(request), self.api.port)
}
],
"endpoints_links": [],
"type": "identity",
"name": "keystone"
},
{
"endpoints": [
{
"adminURL": "http://%s:%s" % (get_host(request), self.api.port + 4696),
"region": "RegionOne",
"internalURL": "http://%s:%s" % (get_host(request), self.api.port + 4696),
"id": "2dad48f09e2a447a9bf852bcd93548cf",
"publicURL": "http://%s:%s" % (get_host(request), self.api.port + 4696)
}
],
"endpoints_links": [],
"type": "network",
"name": "neutron"
},
{
"endpoints": [
{
"adminURL": "http://%s:%s" % (get_host(request), self.api.port + 4242),
"region": "RegionOne",
"internalURL": "http://%s:%s" % (get_host(request), self.api.port + 4242),
"id": "2dad48f09e2a447a9bf852bcd93548cf",
"publicURL": "http://%s:%s" % (get_host(request), self.api.port + 4242)
}
],
"endpoints_links": [],
"type": "image",
"name": "glance"
},
{
"endpoints": [
{
"adminURL": "http://%s:%s/v1/%s" % (get_host(request), self.api.port + 3004, user['id']),
"region": "RegionOne",
"internalURL": "http://%s:%s/v1/%s" % (get_host(request), self.api.port + 3004, user['id']),
"id": "2dad48f09e2a447a9bf852bcd93548bf",
"publicURL": "http://%s:%s/v1/%s" % (get_host(request), self.api.port + 3004, user['id'])
}
],
"endpoints_links": [],
"type": "orchestration",
"name": "heat"
}
]
ret['access']["metadata"] = {
"is_admin": 0,
"roles": [
"7598ac3c634d4c3da4b9126a5f67ca2b"
]
},
ret['access']['trust'] = {
"id": "394998fa61f14736b1f0c1f322882949",
"trustee_user_id": "269348fdd9374b8885da1418e0730af1",
"trustor_user_id": "3ec3164f750146be97f21559ee4d9c51",
"impersonation": False
}
return Response(json.dumps(ret), status=200,
mimetype='application/json')
except Exception as ex:
logging.exception("Keystone: Get token failed.")
return ex.message, 500
class KeystoneGetTokenv3(Resource):
"""
Returns a static keystone token.
We don't do any validation so we don't care.
"""
def __init__(self, api):
self.api = api
def post(self):
"""
List API entrypoints.
This is hardcoded. For a working "authentication" use these ENVVARS:
* OS_AUTH_URL=http://<ip>:<port>/v3
* OS_IDENTITY_API_VERSION=2.0
* OS_TENANT_ID=fc394f2ab2df4114bde39905f800dc57
* OS_REGION_NAME=RegionOne
* OS_USERNAME=bla
* OS_PASSWORD=bla
:return: Returns an openstack style response for all entrypoints.
:rtype: :class:`flask.response`
"""
LOG.debug("API CALL: %s POST" % str(self.__class__.__name__))
try:
ret = dict()
req = json.loads(request.data)
ret['token'] = dict()
token = ret['token']
token['issued_at'] = "2014-01-30T15:30:58.819Z"
token['expires_at'] = "2999-01-30T15:30:58.819Z"
token['methods'] = ["password"]
token['extras'] = dict()
token['user'] = dict()
user = token['user']
user['id'] = req['auth'].get(
'token', {'id': 'fc394f2ab2df4114bde39905f800dc57'}).get('id')
user['name'] = "tenantName"
user['password_expires_at'] = None
user['domain'] = {"id": "default", "name": "Default"}
token['audit_ids'] = ["ZzZwkUflQfygX7pdYDBCQQ"]
# project
token['project'] = {
"domain": {
"id": "default",
"name": "Default"
},
"id": "8538a3f13f9541b28c2620eb19065e45",
"name": "tenantName"
}
# catalog
token['catalog'] = [{
"endpoints": [
{
"url": "http://%s:%s/v2.1/%s" % (get_host(request), self.api.port + 3774, user['id']),
"region": "RegionOne",
"interface": "public",
"id": "2dad48f09e2a447a9bf852bcd93548ef"
}
],
"id": "2dad48f09e2a447a9bf852bcd93548ef",
"type": "compute",
"name": "nova"
},
{
"endpoints": [
{
"url": "http://%s:%s/v2.0" % (get_host(request), self.api.port),
"region": "RegionOne",
"interface": "public",
"id": "2dad48f09e2a447a9bf852bcd93543fc"
}
],
"id": "2dad48f09e2a447a9bf852bcd93543fc",
"type": "identity",
"name": "keystone"
},
{
"endpoints": [
{
"url": "http://%s:%s" % (get_host(request), self.api.port + 4696),
"region": "RegionOne",
"interface": "public",
"id": "2dad48f09e2a447a9bf852bcd93548cf"
}
],
"id": "2dad48f09e2a447a9bf852bcd93548cf",
"type": "network",
"name": "neutron"
},
{
"endpoints": [
{
"url": "http://%s:%s" % (get_host(request), self.api.port + 4242),
"region": "RegionOne",
"interface": "public",
"id": "2dad48f09e2a447a9bf852bcd93548cf"
}
],
"id": "2dad48f09e2a447a9bf852bcd93548cf",
"type": "image",
"name": "glance"
},
{
"endpoints": [
{
"url": "http://%s:%s/v1/%s" % (get_host(request), self.api.port + 3004, user['id']),
"region": "RegionOne",
"interface": "public",
"id": "2dad48f09e2a447a9bf852bcd93548bf"
}
],
"id": "2dad48f09e2a447a9bf852bcd93548bf",
"type": "orchestration",
"name": "heat"
}
]
return Response(json.dumps(ret), status=201,
mimetype='application/json')
except Exception as ex:
logging.exception("Keystone: Get token failed.")
return ex.message, 500
|
apache-2.0
| -1,443,519,102,164,560,100
| 35.822757
| 120
| 0.455253
| false
| 4
| false
| false
| false
|
arpadpe/plover
|
plover/machine/keyboard.py
|
1
|
4230
|
# Copyright (c) 2010 Joshua Harlan Lifton.
# See LICENSE.txt for details.
"For use with a computer keyboard (preferably NKRO) as a steno machine."
from plover.machine.base import StenotypeBase
from plover.oslayer.keyboardcontrol import KeyboardCapture
class Keyboard(StenotypeBase):
"""Standard stenotype interface for a computer keyboard.
This class implements the three methods necessary for a standard
stenotype interface: start_capture, stop_capture, and
add_callback.
"""
KEYS_LAYOUT = KeyboardCapture.SUPPORTED_KEYS_LAYOUT
ACTIONS = StenotypeBase.ACTIONS + ('arpeggiate',)
def __init__(self, params):
"""Monitor the keyboard's events."""
super(Keyboard, self).__init__()
self.arpeggiate = params['arpeggiate']
self._bindings = {}
self._down_keys = set()
self._released_keys = set()
self._keyboard_capture = None
self._last_stroke_key_down_count = 0
self._update_bindings()
def _update_bindings(self):
self._bindings = dict(self.keymap.get_bindings())
for key, mapping in list(self._bindings.items()):
if 'no-op' == mapping:
self._bindings[key] = None
elif 'arpeggiate' == mapping:
if self.arpeggiate:
self._bindings[key] = None
self._arpeggiate_key = key
else:
# Don't suppress arpeggiate key if it's not used.
del self._bindings[key]
def set_mappings(self, mappings):
super(Keyboard, self).set_mappings(mappings)
self._update_bindings()
def start_capture(self):
"""Begin listening for output from the stenotype machine."""
self._released_keys.clear()
self._last_stroke_key_down_count = 0
self._initializing()
try:
self._keyboard_capture = KeyboardCapture()
self._keyboard_capture.key_down = self._key_down
self._keyboard_capture.key_up = self._key_up
self._keyboard_capture.start()
except:
self._error()
raise
self._ready()
def stop_capture(self):
"""Stop listening for output from the stenotype machine."""
if self._keyboard_capture is not None:
self._keyboard_capture.cancel()
self._keyboard_capture = None
self._stopped()
def set_suppression(self, enabled):
suppressed_keys = self._bindings.keys() if enabled else ()
self._keyboard_capture.suppress_keyboard(suppressed_keys)
def suppress_last_stroke(self, send_backspaces):
send_backspaces(self._last_stroke_key_down_count)
def _key_down(self, key):
"""Called when a key is pressed."""
assert key is not None
if key in self._bindings:
self._last_stroke_key_down_count += 1
steno_key = self._bindings.get(key)
if steno_key is not None:
self._down_keys.add(steno_key)
def _key_up(self, key):
"""Called when a key is released."""
assert key is not None
steno_key = self._bindings.get(key)
if steno_key is not None:
# Process the newly released key.
self._released_keys.add(steno_key)
# Remove invalid released keys.
self._released_keys = self._released_keys.intersection(self._down_keys)
# A stroke is complete if all pressed keys have been released.
# If we are in arpeggiate mode then only send stroke when spacebar is pressed.
send_strokes = bool(self._down_keys and
self._down_keys == self._released_keys)
if self.arpeggiate:
send_strokes &= key == self._arpeggiate_key
if send_strokes:
steno_keys = list(self._down_keys)
if steno_keys:
self._down_keys.clear()
self._released_keys.clear()
self._notify(steno_keys)
self._last_stroke_key_down_count = 0
@classmethod
def get_option_info(cls):
bool_converter = lambda s: s == 'True'
return {
'arpeggiate': (False, bool_converter),
}
|
gpl-2.0
| -4,720,774,508,469,126,000
| 35.153846
| 86
| 0.591253
| false
| 3.960674
| false
| false
| false
|
santisiri/popego
|
envs/ALPHA-POPEGO/lib/python2.5/site-packages/nose-0.10.1-py2.5.egg/nose/plugins/isolate.py
|
1
|
3674
|
"""Use the isolation plugin with --with-isolation or the
NOSE_WITH_ISOLATION environment variable to clean sys.modules after
each test module is loaded and executed.
The isolation module is in effect similar to wrapping the following
functions around the import and execution of each test module::
def setup(module):
module._mods = sys.modules.copy()
def teardown(module):
to_del = [ m for m in sys.modules.keys() if m not in
module._mods ]
for mod in to_del:
del sys.modules[mod]
sys.modules.update(module._mods)
Isolation works only during lazy loading. In normal use, this is only
during discovery of modules within a directory, where the process of
importing, loading tests and running tests from each module is
encapsulated in a single loadTestsFromName call. This plugin
implements loadTestsFromNames to force the same lazy-loading there,
which allows isolation to work in directed mode as well as discovery,
at the cost of some efficiency: lazy-loading names forces full context
setup and teardown to run for each name, defeating the grouping that
is normally used to ensure that context setup and teardown are run the
fewest possible times for a given set of names.
PLEASE NOTE that this plugin should not be used in conjunction with
other plugins that assume that modules once imported will stay
imported; for instance, it may cause very odd results when used with
the coverage plugin.
"""
import logging
import sys
from nose.plugins import Plugin
log = logging.getLogger('nose.plugins.isolation')
class IsolationPlugin(Plugin):
"""
Activate the isolation plugin to isolate changes to external
modules to a single test module or package. The isolation plugin
resets the contents of sys.modules after each test module or
package runs to its state before the test. PLEASE NOTE that this
plugin should not be used with the coverage plugin in any other case
where module reloading may produce undesirable side-effects.
"""
score = 10 # I want to be last
name = 'isolation'
def configure(self, options, conf):
Plugin.configure(self, options, conf)
self._mod_stack = []
def beforeContext(self):
"""Copy sys.modules onto my mod stack
"""
mods = sys.modules.copy()
self._mod_stack.append(mods)
def afterContext(self):
"""Pop my mod stack and restore sys.modules to the state
it was in when mod stack was pushed.
"""
mods = self._mod_stack.pop()
to_del = [ m for m in sys.modules.keys() if m not in mods ]
if to_del:
log.debug('removing sys modules entries: %s', to_del)
for mod in to_del:
del sys.modules[mod]
sys.modules.update(mods)
def loadTestsFromNames(self, names, module=None):
"""Create a lazy suite that calls beforeContext and afterContext
around each name. The side-effect of this is that full context
fixtures will be set up and torn down around each test named.
"""
# Fast path for when we don't care
if not names or len(names) == 1:
return
loader = self.loader
plugins = self.conf.plugins
def lazy():
for name in names:
plugins.beforeContext()
yield loader.loadTestsFromName(name, module=module)
plugins.afterContext()
return (loader.suiteClass(lazy), [])
def prepareTestLoader(self, loader):
"""Get handle on test loader so we can use it in loadTestsFromNames.
"""
self.loader = loader
|
bsd-3-clause
| -7,653,814,076,835,601,000
| 36.489796
| 76
| 0.67828
| false
| 4.431846
| true
| false
| false
|
graik/labhamster
|
labhamster/admin.py
|
1
|
12409
|
## Copyright 2016 - 2018 Raik Gruenberg
## This file is part of the LabHamster project (https://github.com/graik/labhamster).
## LabHamster is released under the MIT open source license, which you can find
## along with this project (LICENSE) or at <https://opensource.org/licenses/MIT>.
from __future__ import unicode_literals
from labhamster.models import *
from django.contrib import admin
import django.forms
from django.http import HttpResponse
import django.utils.html as html
import customforms
def export_csv(request, queryset, fields):
"""
Helper method for Admin make_csv action. Exports selected objects as
CSV file.
fields - OrderedDict of name / field pairs, see Product.make_csv for example
"""
import csv
response = HttpResponse(content_type='text/csv')
response['Content-Disposition'] = 'attachment; filename=orders.csv'
writer = csv.writer(response)
writer.writerow(fields.keys())
for o in queryset:
columns = []
for name,value in fields.items():
try:
columns.append( eval('o.%s'%value) )
except:
columns.append("") ## capture 'None' fields
columns = [ c.encode('utf-8') if type(c) is unicode else c \
for c in columns]
writer.writerow( columns )
return response
class RequestFormAdmin(admin.ModelAdmin):
"""
ModelAdmin that adds a 'request' field to the form generated by the Admin.
This e.g. allows to extract the user ID during the creation of the form.
"""
def get_form(self, request, obj=None, **kwargs):
"""
Assign request variable to form
http://stackoverflow.com/questions/1057252/how-do-i-access-the-request-object-or-any-other-variable-in-a-forms-clean-met
(last answer, much simpler than Django 1.6 version)
"""
form = super(RequestFormAdmin, self).get_form(request, obj=obj, **kwargs)
form.request = request
return form
class GrantAdmin(admin.ModelAdmin):
ordering = ('name',)
admin.site.register(Grant, GrantAdmin)
class CategoryAdmin(admin.ModelAdmin):
ordering = ('name',)
admin.site.register(Category, CategoryAdmin)
class VendorAdmin(admin.ModelAdmin):
fieldsets = ((None, {'fields': (('name',),
('link', 'login', 'password'),)}),
('Contact', {'fields' : (('contact',),
('email','phone'),)})
)
list_display = ('name', 'link', 'login', 'password')
ordering = ('name',)
search_fields = ('name', 'contact')
admin.site.register(Vendor, VendorAdmin)
class ProductAdmin(admin.ModelAdmin):
fieldsets = ((None, {'fields': (('name', 'category'),
('vendor', 'catalog'),
('manufacturer', 'manufacturer_catalog'),
'link',
('status', 'shelflife'),
'comment',
'location')}),)
list_display = ('name', 'show_vendor', 'category', 'show_catalog',
'status')
list_filter = ('status', 'category', 'vendor')
ordering = ('name',)
search_fields = ('name', 'comment', 'catalog', 'location', 'vendor__name',
'manufacturer__name', 'manufacturer_catalog')
save_as = True
actions = ['make_ok',
'make_low',
'make_out',
'make_deprecated',
'make_csv']
## reduce size of Description text field.
formfield_overrides = {
models.TextField: {'widget': django.forms.Textarea(
attrs={'rows': 4,
'cols': 80})},
}
def make_ok(self, request, queryset):
n = queryset.update(status='ok')
self.message_user(request, '%i products were updated' % n)
make_ok.short_description = 'Mark selected entries as in stock'
def make_low(self, request, queryset):
n = queryset.update(status='low')
self.message_user(request, '%i products were updated' % n)
make_low.short_description = 'Mark selected entries as running low'
def make_out(self, request, queryset):
n = queryset.update(status='out')
self.message_user(request, '%i products were updated' % n)
make_out.short_description = 'Mark selected entries as out of stock'
def make_deprecated(self, request, queryset):
n = queryset.update(status='deprecated')
self.message_user(request, '%i products were updated' % n)
make_deprecated.short_description = 'Mark selected entries as deprecated'
def make_csv(self, request, queryset):
from collections import OrderedDict
fields = OrderedDict( [('Name', 'name'),
('Vendor', 'vendor.name'),
('Vendor Catalog','catalog'),
('Manufacturer', 'manufacturer.name'),
('Manufacturer Catalog', 'manufacturer_catalog'),
('Category','category.name'),
('Shelf_life','shelflife'),
('Status','status'),
('Location','location'),
('Link','link'),
('Comment','comment')])
return export_csv( request, queryset, fields)
make_csv.short_description = 'Export products as CSV'
## note: this currently breaks the selection of products from the
## order form "lense" button
def show_name(self, o):
"""truncate product name to less than 40 char"""
from django.utils.safestring import SafeUnicode
return html.format_html(
'<a href="{url}" title="{comment}">{name}</a>',
url=o.get_absolute_url(),
name=T.truncate(o.name, 40),
comment=SafeUnicode(o.comment))
show_name.short_description = 'Name'
show_name.admin_order_field = 'name'
def show_vendor(self, o):
"""Display in table: Vendor (Manufacturer)"""
r = o.vendor.name
if o.manufacturer:
r += '<br>(%s)' % o.manufacturer.name
return html.format_html(r)
show_vendor.admin_order_field = 'vendor'
show_vendor.short_description = 'Vendor'
def show_catalog(self, o):
return T.truncate(o.catalog, 15)
show_catalog.short_description = 'Catalog'
show_catalog.admin_order_field = 'catalog'
admin.site.register(Product, ProductAdmin)
class OrderAdmin(RequestFormAdmin):
form = customforms.OrderForm
raw_id_fields = ('product',)
fieldsets = ((None,
{'fields': (('status', 'is_urgent', 'product',),
('created_by', 'ordered_by', 'date_ordered',
'date_received'))}),
('Details', {'fields': (('unit_size', 'quantity'),
('price', 'po_number'),
('grant', 'grant_category'),
'comment')}))
radio_fields = {'grant': admin.VERTICAL,
'grant_category': admin.VERTICAL}
list_display = ('show_title', 'Status', 'show_urgent',
'show_quantity', 'show_price',
'requested', 'show_requestedby', 'ordered',
'received', 'show_comment',)
list_filter = ('status',
'product__category__name', 'grant', 'created_by', 'product__vendor__name',)
ordering = ('-date_created', 'product', '-date_ordered') #, 'price')
search_fields = ('comment', 'grant__name', 'grant__grant_id', 'product__name',
'product__vendor__name')
save_as = True
date_hierarchy = 'date_created'
actions = ['make_ordered', 'make_received', 'make_cancelled', 'make_csv']
def show_title(self, o):
"""truncate product name + supplier to less than 40 char"""
n = T.truncate(o.product.name, 40)
v = o.product.vendor.name
r = html.format_html('<a href="{}">{}', o.get_absolute_url(), n)
r += '<br>' if len(n) + len(v) > 37 else ' '
r += html.format_html('[{}]</a>',v)
return html.mark_safe(r)
show_title.short_description = 'Product'
def show_comment(self, obj):
"""
@return: str; truncated comment with full comment mouse-over
"""
if not obj.comment:
return ''
if len(obj.comment) < 30:
return obj.comment
r = obj.comment[:28]
r = '<a title="%s">%s</a>' % (obj.comment, T.truncate(obj.comment, 30))
return r
show_comment.short_description = 'comment'
show_comment.allow_tags = True
def show_price(self, o):
"""Workaround for bug in djmoney -- MoneyField confuses Admin formatting"""
if not o.price:
return ''
return o.price
show_price.admin_order_field = 'price'
show_price.short_description = 'Unit price'
def show_urgent(self, o):
"""Show exclamation mark if order is urgent"""
if not o.is_urgent:
return ''
return html.format_html(
'<big>❗</big>')
show_urgent.admin_order_field = 'is_urgent'
show_urgent.short_description = '!'
def show_requestedby(self,o):
return o.created_by
show_requestedby.admin_order_field = 'created_by'
show_requestedby.short_description = 'By'
def show_quantity(self, o):
return o.quantity
show_quantity.short_description = 'Q'
def make_ordered(self, request, queryset):
"""
Mark several orders as 'ordered'
see: https://docs.djangoproject.com/en/1.4/ref/contrib/admin/actions/
"""
import datetime
n = queryset.update(status='ordered', ordered_by=request.user,
date_ordered=datetime.datetime.now())
self.message_user(request, '%i orders were updated' % n)
make_ordered.short_description = 'Mark selected entries as ordered'
def make_received(self, request, queryset):
import datetime
n = queryset.update(date_received=datetime.datetime.now(),
status='received')
i = 0
for order in queryset:
order.product.status = 'ok'
order.product.save()
i += 1
self.message_user(request,
'%i orders were updated and %i products set to "in stock"'\
% (n, i))
make_received.short_description= 'Mark as received (and update product status)'
def make_cancelled(self, request, queryset):
import datetime
n = queryset.update(date_received=None, date_ordered=None,
status='cancelled')
self.message_user(request, '%i orders were set to cancelled' % n)
make_cancelled.short_description = 'Mark selected entries as cancelled'
def make_csv(self, request, queryset):
"""
Export selected orders as CSV file
"""
from collections import OrderedDict
fields = OrderedDict( [('Product', 'product.name'),
('Quantity', 'quantity'),
('Price','price'),
('Vendor','product.vendor.name'),
('Catalog','product.catalog'),
('PO Number', 'po_number'),
('Requested','date_created'),
('Requested by','created_by.username'),
('Ordered','date_ordered'),
('Ordered by','ordered_by.username'),
('Received','date_received'),
('Status','status'),
('Urgent','is_urgent'),
('Comment','comment')])
return export_csv(request, queryset, fields)
make_csv.short_description = 'Export orders as CSV'
admin.site.register(Order, OrderAdmin)
|
mit
| 5,380,798,477,713,165,000
| 34.864162
| 128
| 0.539608
| false
| 4.361687
| false
| false
| false
|
wa3l/mailr
|
email_model.py
|
1
|
1590
|
from flask.ext.sqlalchemy import SQLAlchemy
import html2text as convert
import time
db = SQLAlchemy()
class Email(db.Model):
"""
Email model
Store emails going through the app in a database.
"""
id = db.Column(db.Integer, primary_key=True)
to_email = db.Column(db.String(254))
to_name = db.Column(db.String(256))
from_email = db.Column(db.String(254))
from_name = db.Column(db.String(256))
subject = db.Column(db.String(78))
html = db.Column(db.UnicodeText)
text = db.Column(db.UnicodeText)
service = db.Column(db.String(10))
deliverytime = db.Column(db.BigInteger)
def __init__(self, data):
self.to_email = data['to']
self.to_name = data['to_name']
self.from_email = data['from']
self.from_name = data['from_name']
self.subject = data['subject']
self.html = data['body']
self.text = convert.html2text(data['body'])
self.service = data['service'] if data.has_key('service') else None
if data.has_key('deliverytime'):
self.deliverytime = int(data['deliverytime'])
else:
self.deliverytime = int(time.time())
def __str__(self):
return str({
'to': self.to_email,
'from': self.from_email,
'to_name': self.to_name,
'from_name': self.from_name,
'subject': self.subject,
'text': self.text,
'html': self.html,
'service': self.service,
'deliverytime': str(self.deliverytime)
})
def __repr__(self):
return str(self)
|
mit
| -104,383,001,852,401,000
| 28.444444
| 74
| 0.583648
| false
| 3.319415
| false
| false
| false
|
juju/juju-gui-charm
|
hooks/charmhelpers/core/templating.py
|
1
|
3186
|
# Copyright 2014-2015 Canonical Limited.
#
# This file is part of charm-helpers.
#
# charm-helpers is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License version 3 as
# published by the Free Software Foundation.
#
# charm-helpers is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with charm-helpers. If not, see <http://www.gnu.org/licenses/>.
import os
from charmhelpers.core import host
from charmhelpers.core import hookenv
def render(source, target, context, owner='root', group='root',
perms=0o444, templates_dir=None, encoding='UTF-8', template_loader=None):
"""
Render a template.
The `source` path, if not absolute, is relative to the `templates_dir`.
The `target` path should be absolute. It can also be `None`, in which
case no file will be written.
The context should be a dict containing the values to be replaced in the
template.
The `owner`, `group`, and `perms` options will be passed to `write_file`.
If omitted, `templates_dir` defaults to the `templates` folder in the charm.
The rendered template will be written to the file as well as being returned
as a string.
Note: Using this requires python-jinja2; if it is not installed, calling
this will attempt to use charmhelpers.fetch.apt_install to install it.
"""
try:
from jinja2 import FileSystemLoader, Environment, exceptions
except ImportError:
try:
from charmhelpers.fetch import apt_install
except ImportError:
hookenv.log('Could not import jinja2, and could not import '
'charmhelpers.fetch to install it',
level=hookenv.ERROR)
raise
apt_install('python-jinja2', fatal=True)
from jinja2 import FileSystemLoader, Environment, exceptions
if template_loader:
template_env = Environment(loader=template_loader)
else:
if templates_dir is None:
templates_dir = os.path.join(hookenv.charm_dir(), 'templates')
template_env = Environment(loader=FileSystemLoader(templates_dir))
try:
source = source
template = template_env.get_template(source)
except exceptions.TemplateNotFound as e:
hookenv.log('Could not load template %s from %s.' %
(source, templates_dir),
level=hookenv.ERROR)
raise e
content = template.render(context)
if target is not None:
target_dir = os.path.dirname(target)
if not os.path.exists(target_dir):
# This is a terrible default directory permission, as the file
# or its siblings will often contain secrets.
host.mkdir(os.path.dirname(target), owner, group, perms=0o755)
host.write_file(target, content.encode(encoding), owner, group, perms)
return content
|
agpl-3.0
| -3,079,095,182,892,421,000
| 38.333333
| 84
| 0.677652
| false
| 4.231076
| false
| false
| false
|
nkalodimas/invenio
|
modules/bibupload/lib/bibupload.py
|
1
|
143104
|
# -*- coding: utf-8 -*-
##
## This file is part of Invenio.
## Copyright (C) 2006, 2007, 2008, 2009, 2010, 2011, 2012, 2013 CERN.
##
## Invenio is free software; you can redistribute it and/or
## modify it under the terms of the GNU General Public License as
## published by the Free Software Foundation; either version 2 of the
## License, or (at your option) any later version.
##
## Invenio is distributed in the hope that it will be useful, but
## WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with Invenio; if not, write to the Free Software Foundation, Inc.,
## 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
"""
BibUpload: Receive MARC XML file and update the appropriate database
tables according to options.
"""
__revision__ = "$Id$"
import os
import re
import sys
import time
from datetime import datetime
from zlib import compress
import socket
import marshal
import copy
import tempfile
import urlparse
import urllib2
import urllib
from invenio.config import CFG_OAI_ID_FIELD, \
CFG_BIBUPLOAD_EXTERNAL_SYSNO_TAG, \
CFG_BIBUPLOAD_EXTERNAL_OAIID_TAG, \
CFG_BIBUPLOAD_EXTERNAL_OAIID_PROVENANCE_TAG, \
CFG_BIBUPLOAD_STRONG_TAGS, \
CFG_BIBUPLOAD_CONTROLLED_PROVENANCE_TAGS, \
CFG_BIBUPLOAD_SERIALIZE_RECORD_STRUCTURE, \
CFG_BIBUPLOAD_DELETE_FORMATS, \
CFG_SITE_URL, CFG_SITE_SECURE_URL, CFG_SITE_RECORD, \
CFG_OAI_PROVENANCE_ALTERED_SUBFIELD, \
CFG_BIBUPLOAD_DISABLE_RECORD_REVISIONS, \
CFG_BIBUPLOAD_CONFLICTING_REVISION_TICKET_QUEUE
from invenio.jsonutils import json, CFG_JSON_AVAILABLE
from invenio.bibupload_config import CFG_BIBUPLOAD_CONTROLFIELD_TAGS, \
CFG_BIBUPLOAD_SPECIAL_TAGS, \
CFG_BIBUPLOAD_DELETE_CODE, \
CFG_BIBUPLOAD_DELETE_VALUE, \
CFG_BIBUPLOAD_OPT_MODES
from invenio.dbquery import run_sql, \
Error
from invenio.bibrecord import create_records, \
record_add_field, \
record_delete_field, \
record_xml_output, \
record_get_field_instances, \
record_get_field_value, \
record_get_field_values, \
field_get_subfield_values, \
field_get_subfield_instances, \
record_modify_subfield, \
record_delete_subfield_from, \
record_delete_fields, \
record_add_subfield_into, \
record_find_field, \
record_extract_oai_id, \
record_extract_dois, \
record_has_field,\
records_identical
from invenio.search_engine import get_record
from invenio.dateutils import convert_datestruct_to_datetext
from invenio.errorlib import register_exception
from invenio.bibcatalog import bibcatalog_system
from invenio.intbitset import intbitset
from invenio.urlutils import make_user_agent_string
from invenio.config import CFG_BIBDOCFILE_FILEDIR
from invenio.bibtask import task_init, write_message, \
task_set_option, task_get_option, task_get_task_param, task_update_status, \
task_update_progress, task_sleep_now_if_required, fix_argv_paths
from invenio.bibdocfile import BibRecDocs, file_strip_ext, normalize_format, \
get_docname_from_url, check_valid_url, download_url, \
KEEP_OLD_VALUE, decompose_bibdocfile_url, InvenioBibDocFileError, \
bibdocfile_url_p, CFG_BIBDOCFILE_AVAILABLE_FLAGS, guess_format_from_url, \
BibRelation, MoreInfo
from invenio.search_engine import search_pattern
from invenio.bibupload_revisionverifier import RevisionVerifier, \
InvenioBibUploadConflictingRevisionsError, \
InvenioBibUploadInvalidRevisionError, \
InvenioBibUploadMissing005Error, \
InvenioBibUploadUnchangedRecordError
#Statistic variables
stat = {}
stat['nb_records_to_upload'] = 0
stat['nb_records_updated'] = 0
stat['nb_records_inserted'] = 0
stat['nb_errors'] = 0
stat['nb_holdingpen'] = 0
stat['exectime'] = time.localtime()
_WRITING_RIGHTS = None
CFG_BIBUPLOAD_ALLOWED_SPECIAL_TREATMENTS = ('oracle', )
CFG_HAS_BIBCATALOG = "UNKNOWN"
def check_bibcatalog():
"""
Return True if bibcatalog is available.
"""
global CFG_HAS_BIBCATALOG # pylint: disable=W0603
if CFG_HAS_BIBCATALOG != "UNKNOWN":
return CFG_HAS_BIBCATALOG
CFG_HAS_BIBCATALOG = True
if bibcatalog_system is not None:
bibcatalog_response = bibcatalog_system.check_system()
else:
bibcatalog_response = "No ticket system configured"
if bibcatalog_response != "":
write_message("BibCatalog error: %s\n" % (bibcatalog_response,))
CFG_HAS_BIBCATALOG = False
return CFG_HAS_BIBCATALOG
## Let's set a reasonable timeout for URL request (e.g. FFT)
socket.setdefaulttimeout(40)
def parse_identifier(identifier):
"""Parse the identifier and determine if it is temporary or fixed"""
id_str = str(identifier)
if not id_str.startswith("TMP:"):
return (False, identifier)
else:
return (True, id_str[4:])
def resolve_identifier(tmps, identifier):
"""Resolves an identifier. If the identifier is not temporary, this
function is an identity on the second argument. Otherwise, a resolved
value is returned or an exception raised"""
is_tmp, tmp_id = parse_identifier(identifier)
if is_tmp:
if not tmp_id in tmps:
raise StandardError("Temporary identifier %s not present in the dictionary" % (tmp_id, ))
if tmps[tmp_id] == -1:
# the identifier has been signalised but never assigned a value - probably error during processing
raise StandardError("Temporary identifier %s has been declared, but never assigned a value. Probably an error during processign of an appropriate FFT has happened. Please see the log" % (tmp_id, ))
return int(tmps[tmp_id])
else:
return int(identifier)
_re_find_001 = re.compile('<controlfield\\s+tag=("001"|\'001\')\\s*>\\s*(\\d*)\\s*</controlfield>', re.S)
def bibupload_pending_recids():
"""This function embed a bit of A.I. and is more a hack than an elegant
algorithm. It should be updated in case bibupload/bibsched are modified
in incompatible ways.
This function return the intbitset of all the records that are being
(or are scheduled to be) touched by other bibuploads.
"""
options = run_sql("""SELECT arguments FROM schTASK WHERE status<>'DONE' AND
proc='bibupload' AND (status='RUNNING' OR status='CONTINUING' OR
status='WAITING' OR status='SCHEDULED' OR status='ABOUT TO STOP' OR
status='ABOUT TO SLEEP')""")
ret = intbitset()
xmls = []
if options:
for arguments in options:
arguments = marshal.loads(arguments[0])
for argument in arguments[1:]:
if argument.startswith('/'):
# XMLs files are recognizable because they're absolute
# files...
xmls.append(argument)
for xmlfile in xmls:
# Let's grep for the 001
try:
xml = open(xmlfile).read()
ret += [int(group[1]) for group in _re_find_001.findall(xml)]
except:
continue
return ret
### bibupload engine functions:
def bibupload(record, opt_mode=None, opt_notimechange=0, oai_rec_id="", pretend=False,
tmp_ids=None, tmp_vers=None):
"""Main function: process a record and fit it in the tables
bibfmt, bibrec, bibrec_bibxxx, bibxxx with proper record
metadata.
Return (error_code, recID) of the processed record.
"""
if tmp_ids is None:
tmp_ids = {}
if tmp_vers is None:
tmp_vers = {}
if opt_mode == 'reference':
## NOTE: reference mode has been deprecated in favour of 'correct'
opt_mode = 'correct'
assert(opt_mode in CFG_BIBUPLOAD_OPT_MODES)
error = None
affected_tags = {}
original_record = {}
rec_old = {}
now = datetime.now() # will hold record creation/modification date
record_had_altered_bit = False
is_opt_mode_delete = False
# Extraction of the Record Id from 001, SYSNO or OAIID or DOI tags:
rec_id = retrieve_rec_id(record, opt_mode, pretend=pretend)
if rec_id == -1:
msg = " Failed: either the record already exists and insert was " \
"requested or the record does not exists and " \
"replace/correct/append has been used"
write_message(msg, verbose=1, stream=sys.stderr)
return (1, -1, msg)
elif rec_id > 0:
write_message(" -Retrieve record ID (found %s): DONE." % rec_id, verbose=2)
(unique_p, msg) = check_record_doi_is_unique(rec_id, record)
if not unique_p:
write_message(msg, verbose=1, stream=sys.stderr)
return (1, int(rec_id), msg)
if not record.has_key('001'):
# Found record ID by means of SYSNO or OAIID or DOI, and the
# input MARCXML buffer does not have this 001 tag, so we
# should add it now:
error = record_add_field(record, '001', controlfield_value=rec_id)
if error is None:
msg = " Failed: Error during adding the 001 controlfield " \
"to the record"
write_message(msg, verbose=1, stream=sys.stderr)
return (1, int(rec_id), msg)
else:
error = None
write_message(" -Added tag 001: DONE.", verbose=2)
write_message(" -Check if the xml marc file is already in the database: DONE" , verbose=2)
record_deleted_p = False
if opt_mode == 'insert' or \
(opt_mode == 'replace_or_insert') and rec_id is None:
insert_mode_p = True
# Insert the record into the bibrec databases to have a recordId
rec_id = create_new_record(pretend=pretend)
write_message(" -Creation of a new record id (%d): DONE" % rec_id, verbose=2)
# we add the record Id control field to the record
error = record_add_field(record, '001', controlfield_value=rec_id)
if error is None:
msg = " Failed: Error during adding the 001 controlfield " \
"to the record"
write_message(msg, verbose=1, stream=sys.stderr)
return (1, int(rec_id), msg)
else:
error = None
error = record_add_field(record, '005', controlfield_value=now.strftime("%Y%m%d%H%M%S.0"))
if error is None:
msg = " Failed: Error during adding to 005 controlfield to record"
write_message(msg, verbose=1, stream=sys.stderr)
return (1, int(rec_id), msg)
else:
error=None
elif opt_mode != 'insert':
insert_mode_p = False
# Update Mode
# Retrieve the old record to update
rec_old = get_record(rec_id)
record_had_altered_bit = record_get_field_values(rec_old, CFG_BIBUPLOAD_EXTERNAL_OAIID_TAG[:3], CFG_BIBUPLOAD_EXTERNAL_OAIID_TAG[3], CFG_BIBUPLOAD_EXTERNAL_OAIID_TAG[4], CFG_OAI_PROVENANCE_ALTERED_SUBFIELD)
# Also save a copy to restore previous situation in case of errors
original_record = get_record(rec_id)
if rec_old is None:
msg = " Failed during the creation of the old record!"
write_message(msg, verbose=1, stream=sys.stderr)
return (1, int(rec_id), msg)
else:
write_message(" -Retrieve the old record to update: DONE", verbose=2)
# flag to check whether the revisions have been verified and patch generated.
# If revision verification failed, then we need to manually identify the affected tags
# and process them
revision_verified = False
rev_verifier = RevisionVerifier()
#check for revision conflicts before updating record
if record_has_field(record, '005') and not CFG_BIBUPLOAD_DISABLE_RECORD_REVISIONS:
write_message(" -Upload Record has 005. Verifying Revision", verbose=2)
try:
rev_res = rev_verifier.verify_revision(record, original_record, opt_mode)
if rev_res:
opt_mode = rev_res[0]
record = rev_res[1]
affected_tags = rev_res[2]
revision_verified = True
write_message(lambda: " -Patch record generated. Changing opt_mode to correct.\nPatch:\n%s " % record_xml_output(record), verbose=2)
else:
write_message(" -No Patch Record.", verbose=2)
except InvenioBibUploadUnchangedRecordError, err:
msg = " -ISSUE: %s" % err
write_message(msg, verbose=1, stream=sys.stderr)
write_message(msg, " Continuing anyway in case there are FFT or other tags")
except InvenioBibUploadConflictingRevisionsError, err:
msg = " -ERROR: Conflicting Revisions - %s" % err
write_message(msg, verbose=1, stream=sys.stderr)
submit_ticket_for_holding_pen(rec_id, err, "Conflicting Revisions. Inserting record into holding pen.")
insert_record_into_holding_pen(record, str(rec_id))
return (2, int(rec_id), msg)
except InvenioBibUploadInvalidRevisionError, err:
msg = " -ERROR: Invalid Revision - %s" % err
write_message(msg)
submit_ticket_for_holding_pen(rec_id, err, "Invalid Revisions. Inserting record into holding pen.")
insert_record_into_holding_pen(record, str(rec_id))
return (2, int(rec_id), msg)
except InvenioBibUploadMissing005Error, err:
msg = " -ERROR: Missing 005 - %s" % err
write_message(msg)
submit_ticket_for_holding_pen(rec_id, err, "Missing 005. Inserting record into holding pen.")
insert_record_into_holding_pen(record, str(rec_id))
return (2, int(rec_id), msg)
else:
write_message(" - No 005 Tag Present. Resuming normal flow.", verbose=2)
# dictionaries to temporarily hold original recs tag-fields
existing_tags = {}
retained_tags = {}
# in case of delete operation affected tags should be deleted in delete_bibrec_bibxxx
# but should not be updated again in STAGE 4
# utilising the below flag
is_opt_mode_delete = False
if not revision_verified:
# either 005 was not present or opt_mode was not correct/replace
# in this case we still need to find out affected tags to process
write_message(" - Missing 005 or opt_mode!=Replace/Correct.Revision Verifier not called.", verbose=2)
# Identify affected tags
if opt_mode == 'correct' or opt_mode == 'replace' or opt_mode == 'replace_or_insert':
rec_diff = rev_verifier.compare_records(record, original_record, opt_mode)
affected_tags = rev_verifier.retrieve_affected_tags_with_ind(rec_diff)
elif opt_mode == 'delete':
# populate an intermediate dictionary
# used in upcoming step related to 'delete' mode
is_opt_mode_delete = True
for tag, fields in original_record.iteritems():
existing_tags[tag] = [tag + (field[1] != ' ' and field[1] or '_') + (field[2] != ' ' and field[2] or '_') for field in fields]
elif opt_mode == 'append':
for tag, fields in record.iteritems():
if tag not in CFG_BIBUPLOAD_CONTROLFIELD_TAGS:
affected_tags[tag]=[(field[1], field[2]) for field in fields]
# In Replace mode, take over old strong tags if applicable:
if opt_mode == 'replace' or \
opt_mode == 'replace_or_insert':
copy_strong_tags_from_old_record(record, rec_old)
# Delete tags to correct in the record
if opt_mode == 'correct':
delete_tags_to_correct(record, rec_old)
write_message(" -Delete the old tags to correct in the old record: DONE",
verbose=2)
# Delete tags specified if in delete mode
if opt_mode == 'delete':
record = delete_tags(record, rec_old)
for tag, fields in record.iteritems():
retained_tags[tag] = [tag + (field[1] != ' ' and field[1] or '_') + (field[2] != ' ' and field[2] or '_') for field in fields]
#identify the tags that have been deleted
for tag in existing_tags.keys():
if tag not in retained_tags:
for item in existing_tags[tag]:
tag_to_add = item[0:3]
ind1, ind2 = item[3], item[4]
if tag_to_add in affected_tags and (ind1, ind2) not in affected_tags[tag_to_add]:
affected_tags[tag_to_add].append((ind1, ind2))
else:
affected_tags[tag_to_add] = [(ind1, ind2)]
else:
deleted = list(set(existing_tags[tag]) - set(retained_tags[tag]))
for item in deleted:
tag_to_add = item[0:3]
ind1, ind2 = item[3], item[4]
if tag_to_add in affected_tags and (ind1, ind2) not in affected_tags[tag_to_add]:
affected_tags[tag_to_add].append((ind1, ind2))
else:
affected_tags[tag_to_add] = [(ind1, ind2)]
write_message(" -Delete specified tags in the old record: DONE", verbose=2)
# Append new tag to the old record and update the new record with the old_record modified
if opt_mode == 'append' or opt_mode == 'correct':
record = append_new_tag_to_old_record(record, rec_old)
write_message(" -Append new tags to the old record: DONE", verbose=2)
write_message(" -Affected Tags found after comparing upload and original records: %s"%(str(affected_tags)), verbose=2)
# 005 tag should be added everytime the record is modified
# If an exiting record is modified, its 005 tag should be overwritten with a new revision value
if record.has_key('005'):
record_delete_field(record, '005')
write_message(" Deleted the existing 005 tag.", verbose=2)
last_revision = run_sql("SELECT MAX(job_date) FROM hstRECORD WHERE id_bibrec=%s", (rec_id, ))[0][0]
if last_revision and last_revision.strftime("%Y%m%d%H%M%S.0") == now.strftime("%Y%m%d%H%M%S.0"):
## We are updating the same record within the same seconds! It's less than
## the minimal granularity. Let's pause for 1 more second to take a breath :-)
time.sleep(1)
now = datetime.now()
error = record_add_field(record, '005', controlfield_value=now.strftime("%Y%m%d%H%M%S.0"))
if error is None:
write_message(" Failed: Error during adding to 005 controlfield to record", verbose=1, stream=sys.stderr)
return (1, int(rec_id))
else:
error=None
write_message(lambda: " -Added tag 005: DONE. "+ str(record_get_field_value(record, '005', '', '')), verbose=2)
# adding 005 to affected tags will delete the existing 005 entry
# and update with the latest timestamp.
if '005' not in affected_tags:
affected_tags['005'] = [(' ', ' ')]
write_message(" -Stage COMPLETED", verbose=2)
record_deleted_p = False
try:
if not record_is_valid(record):
msg = "ERROR: record is not valid"
write_message(msg, verbose=1, stream=sys.stderr)
return (1, -1, msg)
# Have a look if we have FFT tags
write_message("Stage 2: Start (Process FFT tags if exist).", verbose=2)
record_had_FFT = False
if extract_tag_from_record(record, 'FFT') is not None:
record_had_FFT = True
if not writing_rights_p():
write_message(" Stage 2 failed: Error no rights to write fulltext files",
verbose=1, stream=sys.stderr)
task_update_status("ERROR")
sys.exit(1)
try:
record = elaborate_fft_tags(record, rec_id, opt_mode,
pretend=pretend, tmp_ids=tmp_ids,
tmp_vers=tmp_vers)
except Exception, e:
register_exception()
msg = " Stage 2 failed: Error while elaborating FFT tags: %s" % e
write_message(msg, verbose=1, stream=sys.stderr)
return (1, int(rec_id), msg)
if record is None:
msg = " Stage 2 failed: Error while elaborating FFT tags"
write_message(msg, verbose=1, stream=sys.stderr)
return (1, int(rec_id), msg)
write_message(" -Stage COMPLETED", verbose=2)
else:
write_message(" -Stage NOT NEEDED", verbose=2)
# Have a look if we have FFT tags
write_message("Stage 2B: Start (Synchronize 8564 tags).", verbose=2)
if record_had_FFT or extract_tag_from_record(record, '856') is not None:
try:
record = synchronize_8564(rec_id, record, record_had_FFT, pretend=pretend)
# in case if FFT is in affected list make appropriate changes
if ('4', ' ') not in affected_tags.get('856', []):
if '856' not in affected_tags:
affected_tags['856'] = [('4', ' ')]
elif ('4', ' ') not in affected_tags['856']:
affected_tags['856'].append(('4', ' '))
write_message(" -Modified field list updated with FFT details: %s" % str(affected_tags), verbose=2)
except Exception, e:
register_exception(alert_admin=True)
msg = " Stage 2B failed: Error while synchronizing 8564 tags: %s" % e
write_message(msg, verbose=1, stream=sys.stderr)
return (1, int(rec_id), msg)
if record is None:
msg = " Stage 2B failed: Error while synchronizing 8564 tags"
write_message(msg, verbose=1, stream=sys.stderr)
return (1, int(rec_id), msg)
write_message(" -Stage COMPLETED", verbose=2)
else:
write_message(" -Stage NOT NEEDED", verbose=2)
write_message("Stage 3: Start (Apply fields deletion requests).", verbose=2)
write_message(lambda: " Record before deletion:\n%s" % record_xml_output(record), verbose=9)
# remove fields with __DELETE_FIELDS__
# NOTE:creating a temporary deep copy of record for iteration to avoid RunTimeError
# RuntimeError due to change in dictionary size during iteration
tmp_rec = copy.deepcopy(record)
for tag in tmp_rec:
for data_tuple in record[tag]:
if (CFG_BIBUPLOAD_DELETE_CODE, CFG_BIBUPLOAD_DELETE_VALUE) in data_tuple[0]:
# delete the tag with particular indicator pairs from original record
record_delete_field(record, tag, data_tuple[1], data_tuple[2])
write_message(lambda: " Record after cleaning up fields to be deleted:\n%s" % record_xml_output(record), verbose=9)
# Update of the BibFmt
write_message("Stage 4: Start (Update bibfmt).", verbose=2)
updates_exist = not records_identical(record, original_record)
if updates_exist:
# if record_had_altered_bit, this must be set to true, since the
# record has been altered.
if record_had_altered_bit:
oai_provenance_fields = record_get_field_instances(record, CFG_BIBUPLOAD_EXTERNAL_OAIID_TAG[:3], CFG_BIBUPLOAD_EXTERNAL_OAIID_TAG[3], CFG_BIBUPLOAD_EXTERNAL_OAIID_TAG[4])
for oai_provenance_field in oai_provenance_fields:
for i, (code, dummy_value) in enumerate(oai_provenance_field[0]):
if code == CFG_OAI_PROVENANCE_ALTERED_SUBFIELD:
oai_provenance_field[0][i] = (code, 'true')
tmp_indicators = (CFG_BIBUPLOAD_EXTERNAL_OAIID_TAG[3], CFG_BIBUPLOAD_EXTERNAL_OAIID_TAG[4])
if tmp_indicators not in affected_tags.get(CFG_BIBUPLOAD_EXTERNAL_OAIID_TAG[:3], []):
if CFG_BIBUPLOAD_EXTERNAL_OAIID_TAG[:3] not in affected_tags:
affected_tags[CFG_BIBUPLOAD_EXTERNAL_OAIID_TAG[:3]] = [tmp_indicators]
else:
affected_tags[CFG_BIBUPLOAD_EXTERNAL_OAIID_TAG[:3]].append(tmp_indicators)
write_message(lambda: " Updates exists:\n%s\n!=\n%s" % (record, original_record), verbose=9)
# format the single record as xml
rec_xml_new = record_xml_output(record)
# Update bibfmt with the format xm of this record
modification_date = time.strftime('%Y-%m-%d %H:%M:%S', time.strptime(record_get_field_value(record, '005'), '%Y%m%d%H%M%S.0'))
error = update_bibfmt_format(rec_id, rec_xml_new, 'xm', modification_date, pretend=pretend)
if error == 1:
msg = " Failed: error during update_bibfmt_format 'xm'"
write_message(msg, verbose=1, stream=sys.stderr)
return (1, int(rec_id), msg)
if CFG_BIBUPLOAD_SERIALIZE_RECORD_STRUCTURE:
error = update_bibfmt_format(rec_id, marshal.dumps(record), 'recstruct', modification_date, pretend=pretend)
if error == 1:
msg = " Failed: error during update_bibfmt_format 'recstruct'"
write_message(msg, verbose=1, stream=sys.stderr)
return (1, int(rec_id), msg)
if not CFG_BIBUPLOAD_DISABLE_RECORD_REVISIONS:
# archive MARCXML format of this record for version history purposes:
error = archive_marcxml_for_history(rec_id, pretend=pretend)
if error == 1:
msg = " Failed to archive MARCXML for history"
write_message(msg, verbose=1, stream=sys.stderr)
return (1, int(rec_id), msg)
else:
write_message(" -Archived MARCXML for history: DONE", verbose=2)
# delete some formats like HB upon record change:
if updates_exist or record_had_FFT:
for format_to_delete in CFG_BIBUPLOAD_DELETE_FORMATS:
try:
delete_bibfmt_format(rec_id, format_to_delete, pretend=pretend)
except:
# OK, some formats like HB could not have been deleted, no big deal
pass
write_message(" -Stage COMPLETED", verbose=2)
# Update the database MetaData
write_message("Stage 5: Start (Update the database with the metadata).",
verbose=2)
if insert_mode_p:
update_database_with_metadata(record, rec_id, oai_rec_id, pretend=pretend)
elif opt_mode in ('replace', 'replace_or_insert',
'append', 'correct', 'delete') and updates_exist:
# now we clear all the rows from bibrec_bibxxx from the old
record_deleted_p = True
delete_bibrec_bibxxx(rec_old, rec_id, affected_tags, pretend=pretend)
# metadata update will insert tags that are available in affected_tags.
# but for delete, once the tags have been deleted from bibrec_bibxxx, they dont have to be inserted
# except for 005.
if is_opt_mode_delete:
tmp_affected_tags = copy.deepcopy(affected_tags)
for tag in tmp_affected_tags:
if tag != '005':
affected_tags.pop(tag)
write_message(" -Clean bibrec_bibxxx: DONE", verbose=2)
update_database_with_metadata(record, rec_id, oai_rec_id, affected_tags, pretend=pretend)
else:
write_message(" -Stage NOT NEEDED in mode %s" % opt_mode,
verbose=2)
write_message(" -Stage COMPLETED", verbose=2)
record_deleted_p = False
# Finally we update the bibrec table with the current date
write_message("Stage 6: Start (Update bibrec table with current date).",
verbose=2)
if opt_notimechange == 0 and (updates_exist or record_had_FFT):
bibrec_now = convert_datestruct_to_datetext(time.localtime())
write_message(" -Retrieved current localtime: DONE", verbose=2)
update_bibrec_date(bibrec_now, rec_id, insert_mode_p, pretend=pretend)
write_message(" -Stage COMPLETED", verbose=2)
else:
write_message(" -Stage NOT NEEDED", verbose=2)
# Increase statistics
if insert_mode_p:
stat['nb_records_inserted'] += 1
else:
stat['nb_records_updated'] += 1
# Upload of this record finish
write_message("Record "+str(rec_id)+" DONE", verbose=1)
return (0, int(rec_id), "")
finally:
if record_deleted_p:
## BibUpload has failed living the record deleted. We should
## back the original record then.
update_database_with_metadata(original_record, rec_id, oai_rec_id, pretend=pretend)
write_message(" Restored original record", verbose=1, stream=sys.stderr)
def record_is_valid(record):
"""
Check if the record is valid. Currently this simply checks if the record
has exactly one rec_id.
@param record: the record
@type record: recstruct
@return: True if the record is valid
@rtype: bool
"""
rec_ids = record_get_field_values(record, tag="001")
if len(rec_ids) != 1:
write_message(" The record is not valid: it has not a single rec_id: %s" % (rec_ids), stream=sys.stderr)
return False
return True
def find_record_ids_by_oai_id(oaiId):
"""
A method finding the records identifier provided the oai identifier
returns a list of identifiers matching a given oai identifier
"""
# Is this record already in invenio (matching by oaiid)
if oaiId:
recids = search_pattern(p=oaiId, f=CFG_BIBUPLOAD_EXTERNAL_OAIID_TAG, m='e')
# Is this record already in invenio (matching by reportnumber i.e.
# particularly 037. Idea: to avoid doubbles insertions)
repnumber = oaiId.split(":")[-1]
if repnumber:
recids |= search_pattern(p = repnumber,
f = "reportnumber",
m = 'e' )
# Is this record already in invenio (matching by reportnumber i.e.
# particularly 037. Idea: to avoid double insertions)
repnumber = "arXiv:" + oaiId.split(":")[-1]
recids |= search_pattern(p = repnumber,
f = "reportnumber",
m = 'e' )
return recids
else:
return intbitset()
def bibupload_post_phase(record, mode=None, rec_id="", pretend=False,
tmp_ids=None, tmp_vers=None):
def _elaborate_tag(record, tag, fun):
if extract_tag_from_record(record, tag) is not None:
try:
record = fun()
except Exception, e:
register_exception()
write_message(" Stage failed: Error while elaborating %s tags: %s" % (tag, e),
verbose=1, stream=sys.stderr)
return (1, int(rec_id)) # TODO: ?
if record is None:
write_message(" Stage failed: Error while elaborating %s tags" % (tag, ),
verbose=1, stream=sys.stderr)
return (1, int(rec_id))
write_message(" -Stage COMPLETED", verbose=2)
else:
write_message(" -Stage NOT NEEDED", verbose=2)
if tmp_ids is None:
tmp_ids = {}
if tmp_vers is None:
tmp_vers = {}
_elaborate_tag(record, "BDR", lambda: elaborate_brt_tags(record, rec_id = rec_id,
mode = mode,
pretend = pretend,
tmp_ids = tmp_ids,
tmp_vers = tmp_vers))
_elaborate_tag(record, "BDM", lambda: elaborate_mit_tags(record, rec_id = rec_id,
mode = mode,
pretend = pretend,
tmp_ids = tmp_ids,
tmp_vers = tmp_vers))
def submit_ticket_for_holding_pen(rec_id, err, msg):
"""
Submit a ticket via BibCatalog to report about a record that has been put
into the Holding Pen.
@rec_id: the affected record
@err: the corresponding Exception
msg: verbose message
"""
from invenio import bibtask
from invenio.webuser import get_email_from_username, get_uid_from_email
user = task_get_task_param("user")
uid = None
if user:
try:
uid = get_uid_from_email(get_email_from_username(user))
except Exception, err:
write_message("WARNING: can't reliably retrieve uid for user %s: %s" % (user, err), stream=sys.stderr)
if check_bibcatalog():
text = """
%(msg)s found for record %(rec_id)s: %(err)s
See: <%(siteurl)s/record/edit/#state=edit&recid=%(rec_id)s>
BibUpload task information:
task_id: %(task_id)s
task_specific_name: %(task_specific_name)s
user: %(user)s
task_params: %(task_params)s
task_options: %(task_options)s""" % {
"msg": msg,
"rec_id": rec_id,
"err": err,
"siteurl": CFG_SITE_SECURE_URL,
"task_id": task_get_task_param("task_id"),
"task_specific_name": task_get_task_param("task_specific_name"),
"user": user,
"task_params": bibtask._TASK_PARAMS,
"task_options": bibtask._OPTIONS}
bibcatalog_system.ticket_submit(subject="%s: %s by %s" % (msg, rec_id, user), recordid=rec_id, text=text, queue=CFG_BIBUPLOAD_CONFLICTING_REVISION_TICKET_QUEUE, owner=uid)
def insert_record_into_holding_pen(record, oai_id, pretend=False):
query = "INSERT INTO bibHOLDINGPEN (oai_id, changeset_date, changeset_xml, id_bibrec) VALUES (%s, NOW(), %s, %s)"
xml_record = record_xml_output(record)
bibrec_ids = find_record_ids_by_oai_id(oai_id) # here determining the identifier of the record
if len(bibrec_ids) > 0:
bibrec_id = bibrec_ids.pop()
else:
# id not found by using the oai_id, let's use a wider search based
# on any information we might have.
bibrec_id = retrieve_rec_id(record, 'holdingpen', pretend=pretend)
if bibrec_id is None:
bibrec_id = 0
if not pretend:
run_sql(query, (oai_id, xml_record, bibrec_id))
# record_id is logged as 0! ( We are not inserting into the main database)
log_record_uploading(oai_id, task_get_task_param('task_id', 0), 0, 'H', pretend=pretend)
stat['nb_holdingpen'] += 1
def print_out_bibupload_statistics():
"""Print the statistics of the process"""
out = "Task stats: %(nb_input)d input records, %(nb_updated)d updated, " \
"%(nb_inserted)d inserted, %(nb_errors)d errors, %(nb_holdingpen)d inserted to holding pen. " \
"Time %(nb_sec).2f sec." % { \
'nb_input': stat['nb_records_to_upload'],
'nb_updated': stat['nb_records_updated'],
'nb_inserted': stat['nb_records_inserted'],
'nb_errors': stat['nb_errors'],
'nb_holdingpen': stat['nb_holdingpen'],
'nb_sec': time.time() - time.mktime(stat['exectime']) }
write_message(out)
def open_marc_file(path):
"""Open a file and return the data"""
try:
# open the file containing the marc document
marc_file = open(path, 'r')
marc = marc_file.read()
marc_file.close()
except IOError, erro:
write_message("Error: %s" % erro, verbose=1, stream=sys.stderr)
write_message("Exiting.", sys.stderr)
if erro.errno == 2:
# No such file or directory
# Not scary
task_update_status("CERROR")
else:
task_update_status("ERROR")
sys.exit(1)
return marc
def xml_marc_to_records(xml_marc):
"""create the records"""
# Creation of the records from the xml Marc in argument
recs = create_records(xml_marc, 1, 1)
if recs == []:
write_message("Error: Cannot parse MARCXML file.", verbose=1, stream=sys.stderr)
write_message("Exiting.", sys.stderr)
task_update_status("ERROR")
sys.exit(1)
elif recs[0][0] is None:
write_message("Error: MARCXML file has wrong format: %s" % recs,
verbose=1, stream=sys.stderr)
write_message("Exiting.", sys.stderr)
task_update_status("CERROR")
sys.exit(1)
else:
recs = map((lambda x:x[0]), recs)
return recs
def find_record_format(rec_id, bibformat):
"""Look whether record REC_ID is formatted in FORMAT,
i.e. whether FORMAT exists in the bibfmt table for this record.
Return the number of times it is formatted: 0 if not, 1 if yes,
2 if found more than once (should never occur).
"""
out = 0
query = """SELECT COUNT(*) FROM bibfmt WHERE id_bibrec=%s AND format=%s"""
params = (rec_id, bibformat)
res = []
res = run_sql(query, params)
out = res[0][0]
return out
def find_record_from_recid(rec_id):
"""
Try to find record in the database from the REC_ID number.
Return record ID if found, None otherwise.
"""
res = run_sql("SELECT id FROM bibrec WHERE id=%s",
(rec_id,))
if res:
return res[0][0]
else:
return None
def find_record_from_sysno(sysno):
"""
Try to find record in the database from the external SYSNO number.
Return record ID if found, None otherwise.
"""
bibxxx = 'bib'+CFG_BIBUPLOAD_EXTERNAL_SYSNO_TAG[0:2]+'x'
bibrec_bibxxx = 'bibrec_' + bibxxx
res = run_sql("""SELECT bb.id_bibrec FROM %(bibrec_bibxxx)s AS bb,
%(bibxxx)s AS b WHERE b.tag=%%s AND b.value=%%s
AND bb.id_bibxxx=b.id""" % \
{'bibxxx': bibxxx,
'bibrec_bibxxx': bibrec_bibxxx},
(CFG_BIBUPLOAD_EXTERNAL_SYSNO_TAG, sysno,))
if res:
return res[0][0]
else:
return None
def find_records_from_extoaiid(extoaiid, extoaisrc=None):
"""
Try to find records in the database from the external EXTOAIID number.
Return list of record ID if found, None otherwise.
"""
assert(CFG_BIBUPLOAD_EXTERNAL_OAIID_TAG[:5] == CFG_BIBUPLOAD_EXTERNAL_OAIID_PROVENANCE_TAG[:5])
bibxxx = 'bib'+CFG_BIBUPLOAD_EXTERNAL_OAIID_TAG[0:2]+'x'
bibrec_bibxxx = 'bibrec_' + bibxxx
write_message(' Looking for extoaiid="%s" with extoaisrc="%s"' % (extoaiid, extoaisrc), verbose=9)
id_bibrecs = intbitset(run_sql("""SELECT bb.id_bibrec FROM %(bibrec_bibxxx)s AS bb,
%(bibxxx)s AS b WHERE b.tag=%%s AND b.value=%%s
AND bb.id_bibxxx=b.id""" % \
{'bibxxx': bibxxx,
'bibrec_bibxxx': bibrec_bibxxx},
(CFG_BIBUPLOAD_EXTERNAL_OAIID_TAG, extoaiid,)))
write_message(' Partially found %s for extoaiid="%s"' % (id_bibrecs, extoaiid), verbose=9)
ret = intbitset()
for id_bibrec in id_bibrecs:
record = get_record(id_bibrec)
instances = record_get_field_instances(record, CFG_BIBUPLOAD_EXTERNAL_OAIID_TAG[0:3], CFG_BIBUPLOAD_EXTERNAL_OAIID_TAG[3], CFG_BIBUPLOAD_EXTERNAL_OAIID_TAG[4])
write_message(' recid %s -> instances "%s"' % (id_bibrec, instances), verbose=9)
for instance in instances:
this_extoaisrc = field_get_subfield_values(instance, CFG_BIBUPLOAD_EXTERNAL_OAIID_PROVENANCE_TAG[5])
this_extoaisrc = this_extoaisrc and this_extoaisrc[0] or None
this_extoaiid = field_get_subfield_values(instance, CFG_BIBUPLOAD_EXTERNAL_OAIID_TAG[5])
this_extoaiid = this_extoaiid and this_extoaiid[0] or None
write_message(" this_extoaisrc -> %s, this_extoaiid -> %s" % (this_extoaisrc, this_extoaiid), verbose=9)
if this_extoaiid == extoaiid:
write_message(' recid %s -> provenance "%s"' % (id_bibrec, this_extoaisrc), verbose=9)
if this_extoaisrc == extoaisrc:
write_message('Found recid %s for extoaiid="%s" with provenance="%s"' % (id_bibrec, extoaiid, extoaisrc), verbose=9)
ret.add(id_bibrec)
break
if this_extoaisrc is None:
write_message('WARNING: Found recid %s for extoaiid="%s" that doesn\'t specify any provenance, while input record does.' % (id_bibrec, extoaiid), stream=sys.stderr)
if extoaisrc is None:
write_message('WARNING: Found recid %s for extoaiid="%s" that specify a provenance (%s), while input record does not have a provenance.' % (id_bibrec, extoaiid, this_extoaisrc), stream=sys.stderr)
return ret
def find_record_from_oaiid(oaiid):
"""
Try to find record in the database from the OAI ID number and OAI SRC.
Return record ID if found, None otherwise.
"""
bibxxx = 'bib'+CFG_OAI_ID_FIELD[0:2]+'x'
bibrec_bibxxx = 'bibrec_' + bibxxx
res = run_sql("""SELECT bb.id_bibrec FROM %(bibrec_bibxxx)s AS bb,
%(bibxxx)s AS b WHERE b.tag=%%s AND b.value=%%s
AND bb.id_bibxxx=b.id""" % \
{'bibxxx': bibxxx,
'bibrec_bibxxx': bibrec_bibxxx},
(CFG_OAI_ID_FIELD, oaiid,))
if res:
return res[0][0]
else:
return None
def find_record_from_doi(doi):
"""
Try to find record in the database from the given DOI.
Return record ID if found, None otherwise.
"""
bibxxx = 'bib02x'
bibrec_bibxxx = 'bibrec_' + bibxxx
res = run_sql("""SELECT bb.id_bibrec, bb.field_number
FROM %(bibrec_bibxxx)s AS bb, %(bibxxx)s AS b
WHERE b.tag=%%s AND b.value=%%s
AND bb.id_bibxxx=b.id""" % \
{'bibxxx': bibxxx,
'bibrec_bibxxx': bibrec_bibxxx},
('0247_a', doi,))
# For each of the result, make sure that it is really tagged as doi
for (id_bibrec, field_number) in res:
res = run_sql("""SELECT bb.id_bibrec
FROM %(bibrec_bibxxx)s AS bb, %(bibxxx)s AS b
WHERE b.tag=%%s AND b.value=%%s
AND bb.id_bibxxx=b.id and bb.field_number=%%s and bb.id_bibrec=%%s""" % \
{'bibxxx': bibxxx,
'bibrec_bibxxx': bibrec_bibxxx},
('0247_2', "doi", field_number, id_bibrec))
if res and res[0][0] == id_bibrec:
return res[0][0]
return None
def extract_tag_from_record(record, tag_number):
""" Extract the tag_number for record."""
# first step verify if the record is not already in the database
if record:
return record.get(tag_number, None)
return None
def retrieve_rec_id(record, opt_mode, pretend=False, post_phase = False):
"""Retrieve the record Id from a record by using tag 001 or SYSNO or OAI ID or DOI
tag. opt_mod is the desired mode.
@param post_phase Tells if we are calling this method in the postprocessing phase. If true, we accept presence of 001 fields even in the insert mode
@type post_phase boolean
"""
rec_id = None
# 1st step: we look for the tag 001
tag_001 = extract_tag_from_record(record, '001')
if tag_001 is not None:
# We extract the record ID from the tag
rec_id = tag_001[0][3]
# if we are in insert mode => error
if opt_mode == 'insert' and not post_phase:
write_message(" Failed: tag 001 found in the xml" \
" submitted, you should use the option replace," \
" correct or append to replace an existing" \
" record. (-h for help)",
verbose=1, stream=sys.stderr)
return -1
else:
# we found the rec id and we are not in insert mode => continue
# we try to match rec_id against the database:
if find_record_from_recid(rec_id) is not None:
# okay, 001 corresponds to some known record
return int(rec_id)
elif opt_mode in ('replace', 'replace_or_insert'):
if task_get_option('force'):
# we found the rec_id but it's not in the system and we are
# requested to replace records. Therefore we create on the fly
# a empty record allocating the recid.
write_message(" Warning: tag 001 found in the xml with"
" value %(rec_id)s, but rec_id %(rec_id)s does"
" not exist. Since the mode replace was"
" requested the rec_id %(rec_id)s is allocated"
" on-the-fly." % {"rec_id": rec_id},
stream=sys.stderr)
return create_new_record(rec_id=rec_id, pretend=pretend)
else:
# Since --force was not used we are going to raise an error
write_message(" Failed: tag 001 found in the xml"
" submitted with value %(rec_id)s. The"
" corresponding record however does not"
" exists. If you want to really create"
" such record, please use the --force"
" parameter when calling bibupload." % {
"rec_id": rec_id}, stream=sys.stderr)
return -1
else:
# The record doesn't exist yet. We shall have try to check
# the SYSNO or OAI or DOI id later.
write_message(" -Tag 001 value not found in database.",
verbose=9)
rec_id = None
else:
write_message(" -Tag 001 not found in the xml marc file.", verbose=9)
if rec_id is None:
# 2nd step we look for the SYSNO
sysnos = record_get_field_values(record,
CFG_BIBUPLOAD_EXTERNAL_SYSNO_TAG[0:3],
CFG_BIBUPLOAD_EXTERNAL_SYSNO_TAG[3:4] != "_" and \
CFG_BIBUPLOAD_EXTERNAL_SYSNO_TAG[3:4] or "",
CFG_BIBUPLOAD_EXTERNAL_SYSNO_TAG[4:5] != "_" and \
CFG_BIBUPLOAD_EXTERNAL_SYSNO_TAG[4:5] or "",
CFG_BIBUPLOAD_EXTERNAL_SYSNO_TAG[5:6])
if sysnos:
sysno = sysnos[0] # there should be only one external SYSNO
write_message(" -Checking if SYSNO " + sysno + \
" exists in the database", verbose=9)
# try to find the corresponding rec id from the database
rec_id = find_record_from_sysno(sysno)
if rec_id is not None:
# rec_id found
pass
else:
# The record doesn't exist yet. We will try to check
# external and internal OAI ids later.
write_message(" -Tag SYSNO value not found in database.",
verbose=9)
rec_id = None
else:
write_message(" -Tag SYSNO not found in the xml marc file.",
verbose=9)
if rec_id is None:
# 2nd step we look for the external OAIID
extoai_fields = record_get_field_instances(record,
CFG_BIBUPLOAD_EXTERNAL_OAIID_TAG[0:3],
CFG_BIBUPLOAD_EXTERNAL_OAIID_TAG[3:4] != "_" and \
CFG_BIBUPLOAD_EXTERNAL_OAIID_TAG[3:4] or "",
CFG_BIBUPLOAD_EXTERNAL_OAIID_TAG[4:5] != "_" and \
CFG_BIBUPLOAD_EXTERNAL_OAIID_TAG[4:5] or "")
if extoai_fields:
for field in extoai_fields:
extoaiid = field_get_subfield_values(field, CFG_BIBUPLOAD_EXTERNAL_OAIID_TAG[5:6])
extoaisrc = field_get_subfield_values(field, CFG_BIBUPLOAD_EXTERNAL_OAIID_PROVENANCE_TAG[5:6])
if extoaiid:
extoaiid = extoaiid[0]
if extoaisrc:
extoaisrc = extoaisrc[0]
else:
extoaisrc = None
write_message(" -Checking if EXTOAIID %s (%s) exists in the database" % (extoaiid, extoaisrc), verbose=9)
# try to find the corresponding rec id from the database
rec_ids = find_records_from_extoaiid(extoaiid, extoaisrc)
if rec_ids:
# rec_id found
rec_id = rec_ids.pop()
break
else:
# The record doesn't exist yet. We will try to check
# OAI id later.
write_message(" -Tag EXTOAIID value not found in database.",
verbose=9)
rec_id = None
else:
write_message(" -Tag EXTOAIID not found in the xml marc file.", verbose=9)
if rec_id is None:
# 4th step we look for the OAI ID
oaiidvalues = record_get_field_values(record,
CFG_OAI_ID_FIELD[0:3],
CFG_OAI_ID_FIELD[3:4] != "_" and \
CFG_OAI_ID_FIELD[3:4] or "",
CFG_OAI_ID_FIELD[4:5] != "_" and \
CFG_OAI_ID_FIELD[4:5] or "",
CFG_OAI_ID_FIELD[5:6])
if oaiidvalues:
oaiid = oaiidvalues[0] # there should be only one OAI ID
write_message(" -Check if local OAI ID " + oaiid + \
" exist in the database", verbose=9)
# try to find the corresponding rec id from the database
rec_id = find_record_from_oaiid(oaiid)
if rec_id is not None:
# rec_id found
pass
else:
write_message(" -Tag OAI ID value not found in database.",
verbose=9)
rec_id = None
else:
write_message(" -Tag SYSNO not found in the xml marc file.",
verbose=9)
if rec_id is None:
# 5th step we look for the DOI.
record_dois = record_extract_dois(record)
matching_recids = set()
if record_dois:
# try to find the corresponding rec id from the database
for record_doi in record_dois:
possible_recid = find_record_from_doi(record_doi)
if possible_recid:
matching_recids.add(possible_recid)
if len(matching_recids) > 1:
# Oops, this record refers to DOI existing in multiple records.
# Dunno which one to choose.
write_message(" Failed: Multiple records found in the" \
" database %s that match the DOI(s) in the input" \
" MARCXML %s" % (repr(matching_recids), repr(record_dois)),
verbose=1, stream=sys.stderr)
return -1
elif len(matching_recids) == 1:
rec_id = matching_recids.pop()
if opt_mode == 'insert':
write_message(" Failed: DOI tag matching record #%s found in the xml" \
" submitted, you should use the option replace," \
" correct or append to replace an existing" \
" record. (-h for help)" % rec_id,
verbose=1, stream=sys.stderr)
return -1
else:
write_message(" - Tag DOI value not found in database.",
verbose=9)
rec_id = None
else:
write_message(" -Tag DOI not found in the xml marc file.",
verbose=9)
# Now we should have detected rec_id from SYSNO or OAIID
# tags. (None otherwise.)
if rec_id:
if opt_mode == 'insert':
write_message(" Failed: Record found in the database," \
" you should use the option replace," \
" correct or append to replace an existing" \
" record. (-h for help)",
verbose=1, stream=sys.stderr)
return -1
else:
if opt_mode != 'insert' and \
opt_mode != 'replace_or_insert':
write_message(" Failed: Record not found in the database."\
" Please insert the file before updating it."\
" (-h for help)", verbose=1, stream=sys.stderr)
return -1
return rec_id and int(rec_id) or None
def check_record_doi_is_unique(rec_id, record):
"""
Check that DOI found in 'record' does not exist in any other
record than 'recid'.
Return (boolean, msg) where 'boolean' would be True if the DOI is
unique.
"""
record_dois = record_extract_dois(record)
if record_dois:
matching_recids = set()
for record_doi in record_dois:
possible_recid = find_record_from_doi(record_doi)
if possible_recid:
matching_recids.add(possible_recid)
if len(matching_recids) > 1:
# Oops, this record refers to DOI existing in multiple records.
msg = " Failed: Multiple records found in the" \
" database %s that match the DOI(s) in the input" \
" MARCXML %s" % (repr(matching_recids), repr(record_dois))
return (False, msg)
elif len(matching_recids) == 1:
matching_recid = matching_recids.pop()
if str(matching_recid) != str(rec_id):
# Oops, this record refers to DOI existing in a different record.
msg = " Failed: DOI(s) %s found in this record (#%s)" \
" already exist(s) in another other record (#%s)" % \
(repr(record_dois), rec_id, matching_recid)
return (False, msg)
return (True, "")
### Insert functions
def create_new_record(rec_id=None, pretend=False):
"""
Create new record in the database
@param rec_id: if specified the new record will have this rec_id.
@type rec_id: int
@return: the allocated rec_id
@rtype: int
@note: in case of errors will be returned None
"""
if rec_id is not None:
try:
rec_id = int(rec_id)
except (ValueError, TypeError), error:
write_message(" Error during the creation_new_record function: %s "
% error, verbose=1, stream=sys.stderr)
return None
if run_sql("SELECT id FROM bibrec WHERE id=%s", (rec_id, )):
write_message(" Error during the creation_new_record function: the requested rec_id %s already exists." % rec_id)
return None
if pretend:
if rec_id:
return rec_id
else:
return run_sql("SELECT max(id)+1 FROM bibrec")[0][0]
if rec_id is not None:
return run_sql("INSERT INTO bibrec (id, creation_date, modification_date) VALUES (%s, NOW(), NOW())", (rec_id, ))
else:
return run_sql("INSERT INTO bibrec (creation_date, modification_date) VALUES (NOW(), NOW())")
def insert_bibfmt(id_bibrec, marc, bibformat, modification_date='1970-01-01 00:00:00', pretend=False):
"""Insert the format in the table bibfmt"""
# compress the marc value
pickled_marc = compress(marc)
try:
time.strptime(modification_date, "%Y-%m-%d %H:%M:%S")
except ValueError:
modification_date = '1970-01-01 00:00:00'
query = """INSERT LOW_PRIORITY INTO bibfmt (id_bibrec, format, last_updated, value)
VALUES (%s, %s, %s, %s)"""
if not pretend:
row_id = run_sql(query, (id_bibrec, bibformat, modification_date, pickled_marc))
return row_id
else:
return 1
def insert_record_bibxxx(tag, value, pretend=False):
"""Insert the record into bibxxx"""
# determine into which table one should insert the record
table_name = 'bib'+tag[0:2]+'x'
# check if the tag, value combination exists in the table
query = """SELECT id,value FROM %s """ % table_name
query += """ WHERE tag=%s AND value=%s"""
params = (tag, value)
res = None
res = run_sql(query, params)
# Note: compare now the found values one by one and look for
# string binary equality (e.g. to respect lowercase/uppercase
# match), regardless of the charset etc settings. Ideally we
# could use a BINARY operator in the above SELECT statement, but
# we would have to check compatibility on various MySQLdb versions
# etc; this approach checks all matched values in Python, not in
# MySQL, which is less cool, but more conservative, so it should
# work better on most setups.
if res:
for row in res:
row_id = row[0]
row_value = row[1]
if row_value == value:
return (table_name, row_id)
# We got here only when the tag, value combination was not found,
# so it is now necessary to insert the tag, value combination into
# bibxxx table as new.
query = """INSERT INTO %s """ % table_name
query += """ (tag, value) values (%s , %s)"""
params = (tag, value)
if not pretend:
row_id = run_sql(query, params)
else:
return (table_name, 1)
return (table_name, row_id)
def insert_record_bibrec_bibxxx(table_name, id_bibxxx,
field_number, id_bibrec, pretend=False):
"""Insert the record into bibrec_bibxxx"""
# determine into which table one should insert the record
full_table_name = 'bibrec_'+ table_name
# insert the proper row into the table
query = """INSERT INTO %s """ % full_table_name
query += """(id_bibrec,id_bibxxx, field_number) values (%s , %s, %s)"""
params = (id_bibrec, id_bibxxx, field_number)
if not pretend:
res = run_sql(query, params)
else:
return 1
return res
def synchronize_8564(rec_id, record, record_had_FFT, pretend=False):
"""
Synchronize 8564_ tags and BibDocFile tables.
This function directly manipulate the record parameter.
@type rec_id: positive integer
@param rec_id: the record identifier.
@param record: the record structure as created by bibrecord.create_record
@type record_had_FFT: boolean
@param record_had_FFT: True if the incoming bibuploaded-record used FFT
@return: the manipulated record (which is also modified as a side effect)
"""
def merge_marc_into_bibdocfile(field, pretend=False):
"""
Internal function that reads a single field and stores its content
in BibDocFile tables.
@param field: the 8564_ field containing a BibDocFile URL.
"""
write_message('Merging field: %s' % (field, ), verbose=9)
url = field_get_subfield_values(field, 'u')[:1] or field_get_subfield_values(field, 'q')[:1]
description = field_get_subfield_values(field, 'y')[:1]
comment = field_get_subfield_values(field, 'z')[:1]
if url:
recid, docname, docformat = decompose_bibdocfile_url(url[0])
if recid != rec_id:
write_message("INFO: URL %s is not pointing to a fulltext owned by this record (%s)" % (url, recid), stream=sys.stderr)
else:
try:
bibdoc = BibRecDocs(recid).get_bibdoc(docname)
if description and not pretend:
bibdoc.set_description(description[0], docformat)
if comment and not pretend:
bibdoc.set_comment(comment[0], docformat)
except InvenioBibDocFileError:
## Apparently the referenced docname doesn't exist anymore.
## Too bad. Let's skip it.
write_message("WARNING: docname %s does not seem to exist for record %s. Has it been renamed outside FFT?" % (docname, recid), stream=sys.stderr)
def merge_bibdocfile_into_marc(field, subfields):
"""
Internal function that reads BibDocFile table entries referenced by
the URL in the given 8564_ field and integrate the given information
directly with the provided subfields.
@param field: the 8564_ field containing a BibDocFile URL.
@param subfields: the subfields corresponding to the BibDocFile URL
generated after BibDocFile tables.
"""
write_message('Merging subfields %s into field %s' % (subfields, field), verbose=9)
subfields = dict(subfields) ## We make a copy not to have side-effects
subfield_to_delete = []
for subfield_position, (code, value) in enumerate(field_get_subfield_instances(field)):
## For each subfield instance already existing...
if code in subfields:
## ...We substitute it with what is in BibDocFile tables
record_modify_subfield(record, '856', code, subfields[code],
subfield_position, field_position_global=field[4])
del subfields[code]
else:
## ...We delete it otherwise
subfield_to_delete.append(subfield_position)
subfield_to_delete.sort()
for counter, position in enumerate(subfield_to_delete):
## FIXME: Very hackish algorithm. Since deleting a subfield
## will alterate the position of following subfields, we
## are taking note of this and adjusting further position
## by using a counter.
record_delete_subfield_from(record, '856', position - counter,
field_position_global=field[4])
subfields = subfields.items()
subfields.sort()
for code, value in subfields:
## Let's add non-previously existing subfields
record_add_subfield_into(record, '856', code, value,
field_position_global=field[4])
def get_bibdocfile_managed_info():
"""
Internal function, returns a dictionary of
BibDocFile URL -> wanna-be subfields.
This information is retrieved from internal BibDoc
structures rather than from input MARC XML files
@rtype: mapping
@return: BibDocFile URL -> wanna-be subfields dictionary
"""
ret = {}
bibrecdocs = BibRecDocs(rec_id)
latest_files = bibrecdocs.list_latest_files(list_hidden=False)
for afile in latest_files:
url = afile.get_url()
ret[url] = {'u': url}
description = afile.get_description()
comment = afile.get_comment()
subformat = afile.get_subformat()
if description:
ret[url]['y'] = description
if comment:
ret[url]['z'] = comment
if subformat:
ret[url]['x'] = subformat
return ret
write_message("Synchronizing MARC of recid '%s' with:\n%s" % (rec_id, record), verbose=9)
tags856s = record_get_field_instances(record, '856', '%', '%')
write_message("Original 856%% instances: %s" % tags856s, verbose=9)
tags8564s_to_add = get_bibdocfile_managed_info()
write_message("BibDocFile instances: %s" % tags8564s_to_add, verbose=9)
positions_tags8564s_to_remove = []
for local_position, field in enumerate(tags856s):
if field[1] == '4' and field[2] == ' ':
write_message('Analysing %s' % (field, ), verbose=9)
for url in field_get_subfield_values(field, 'u') + field_get_subfield_values(field, 'q'):
if url in tags8564s_to_add:
# there exists a link in the MARC of the record and the connection exists in BibDoc tables
if record_had_FFT:
merge_bibdocfile_into_marc(field, tags8564s_to_add[url])
else:
merge_marc_into_bibdocfile(field, pretend=pretend)
del tags8564s_to_add[url]
break
elif bibdocfile_url_p(url) and decompose_bibdocfile_url(url)[0] == rec_id:
# The link exists and is potentially correct-looking link to a document
# moreover, it refers to current record id ... but it does not exist in
# internal BibDoc structures. This could have happen in the case of renaming a document
# or its removal. In both cases we have to remove link... a new one will be created
positions_tags8564s_to_remove.append(local_position)
write_message("%s to be deleted and re-synchronized" % (field, ), verbose=9)
break
record_delete_fields(record, '856', positions_tags8564s_to_remove)
tags8564s_to_add = tags8564s_to_add.values()
tags8564s_to_add.sort()
for subfields in tags8564s_to_add:
subfields = subfields.items()
subfields.sort()
record_add_field(record, '856', '4', ' ', subfields=subfields)
write_message('Final record: %s' % record, verbose=9)
return record
def _get_subfield_value(field, subfield_code, default=None):
res = field_get_subfield_values(field, subfield_code)
if res != [] and res != None:
return res[0]
else:
return default
def elaborate_mit_tags(record, rec_id, mode, pretend = False, tmp_ids = {},
tmp_vers = {}):
"""
Uploading MoreInfo -> BDM tags
"""
tuple_list = extract_tag_from_record(record, 'BDM')
# Now gathering information from BDR tags - to be processed later
write_message("Processing BDM entries of the record ")
recordDocs = BibRecDocs(rec_id)
if tuple_list:
for mit in record_get_field_instances(record, 'BDM', ' ', ' '):
relation_id = _get_subfield_value(mit, "r")
bibdoc_id = _get_subfield_value(mit, "i")
# checking for a possibly temporary ID
if not (bibdoc_id is None):
bibdoc_id = resolve_identifier(tmp_ids, bibdoc_id)
bibdoc_ver = _get_subfield_value(mit, "v")
if not (bibdoc_ver is None):
bibdoc_ver = resolve_identifier(tmp_vers, bibdoc_ver)
bibdoc_name = _get_subfield_value(mit, "n")
bibdoc_fmt = _get_subfield_value(mit, "f")
moreinfo_str = _get_subfield_value(mit, "m")
if bibdoc_id == None:
if bibdoc_name == None:
raise StandardError("Incorrect relation. Neither name nor identifier of the first obejct has been specified")
else:
# retrieving the ID based on the document name (inside current record)
# The document is attached to current record.
try:
bibdoc_id = recordDocs.get_docid(bibdoc_name)
except:
raise StandardError("BibDoc of a name %s does not exist within a record" % (bibdoc_name, ))
else:
if bibdoc_name != None:
write_message("Warning: both name and id of the first document of a relation have been specified. Ignoring the name")
if (moreinfo_str is None or mode in ("replace", "correct")) and (not pretend):
MoreInfo(docid=bibdoc_id , version = bibdoc_ver,
docformat = bibdoc_fmt, relation = relation_id).delete()
if (not moreinfo_str is None) and (not pretend):
MoreInfo.create_from_serialised(moreinfo_str,
docid=bibdoc_id,
version = bibdoc_ver,
docformat = bibdoc_fmt,
relation = relation_id)
return record
def elaborate_brt_tags(record, rec_id, mode, pretend=False, tmp_ids = {}, tmp_vers = {}):
"""
Process BDR tags describing relations between existing objects
"""
tuple_list = extract_tag_from_record(record, 'BDR')
# Now gathering information from BDR tags - to be processed later
relations_to_create = []
write_message("Processing BDR entries of the record ")
recordDocs = BibRecDocs(rec_id) #TODO: check what happens if there is no record yet ! Will the class represent an empty set?
if tuple_list:
for brt in record_get_field_instances(record, 'BDR', ' ', ' '):
relation_id = _get_subfield_value(brt, "r")
bibdoc1_id = None
bibdoc1_name = None
bibdoc1_ver = None
bibdoc1_fmt = None
bibdoc2_id = None
bibdoc2_name = None
bibdoc2_ver = None
bibdoc2_fmt = None
if not relation_id:
bibdoc1_id = _get_subfield_value(brt, "i")
bibdoc1_name = _get_subfield_value(brt, "n")
if bibdoc1_id == None:
if bibdoc1_name == None:
raise StandardError("Incorrect relation. Neither name nor identifier of the first obejct has been specified")
else:
# retrieving the ID based on the document name (inside current record)
# The document is attached to current record.
try:
bibdoc1_id = recordDocs.get_docid(bibdoc1_name)
except:
raise StandardError("BibDoc of a name %s does not exist within a record" % \
(bibdoc1_name, ))
else:
# resolving temporary identifier
bibdoc1_id = resolve_identifier(tmp_ids, bibdoc1_id)
if bibdoc1_name != None:
write_message("Warning: both name and id of the first document of a relation have been specified. Ignoring the name")
bibdoc1_ver = _get_subfield_value(brt, "v")
if not (bibdoc1_ver is None):
bibdoc1_ver = resolve_identifier(tmp_vers, bibdoc1_ver)
bibdoc1_fmt = _get_subfield_value(brt, "f")
bibdoc2_id = _get_subfield_value(brt, "j")
bibdoc2_name = _get_subfield_value(brt, "o")
if bibdoc2_id == None:
if bibdoc2_name == None:
raise StandardError("Incorrect relation. Neither name nor identifier of the second obejct has been specified")
else:
# retrieving the ID based on the document name (inside current record)
# The document is attached to current record.
try:
bibdoc2_id = recordDocs.get_docid(bibdoc2_name)
except:
raise StandardError("BibDoc of a name %s does not exist within a record" % (bibdoc2_name, ))
else:
bibdoc2_id = resolve_identifier(tmp_ids, bibdoc2_id)
if bibdoc2_name != None:
write_message("Warning: both name and id of the first document of a relation have been specified. Ignoring the name")
bibdoc2_ver = _get_subfield_value(brt, "w")
if not (bibdoc2_ver is None):
bibdoc2_ver = resolve_identifier(tmp_vers, bibdoc2_ver)
bibdoc2_fmt = _get_subfield_value(brt, "g")
control_command = _get_subfield_value(brt, "d")
relation_type = _get_subfield_value(brt, "t")
if not relation_type and not relation_id:
raise StandardError("The relation type must be specified")
more_info = _get_subfield_value(brt, "m")
# the relation id might be specified in the case of updating
# MoreInfo table instead of other fields
rel_obj = None
if not relation_id:
rels = BibRelation.get_relations(rel_type = relation_type,
bibdoc1_id = bibdoc1_id,
bibdoc2_id = bibdoc2_id,
bibdoc1_ver = bibdoc1_ver,
bibdoc2_ver = bibdoc2_ver,
bibdoc1_fmt = bibdoc1_fmt,
bibdoc2_fmt = bibdoc2_fmt)
if len(rels) > 0:
rel_obj = rels[0]
relation_id = rel_obj.id
else:
rel_obj = BibRelation(rel_id=relation_id)
relations_to_create.append((relation_id, bibdoc1_id, bibdoc1_ver,
bibdoc1_fmt, bibdoc2_id, bibdoc2_ver,
bibdoc2_fmt, relation_type, more_info,
rel_obj, control_command))
record_delete_field(record, 'BDR', ' ', ' ')
if mode in ("insert", "replace_or_insert", "append", "correct", "replace"):
# now creating relations between objects based on the data
if not pretend:
for (relation_id, bibdoc1_id, bibdoc1_ver, bibdoc1_fmt,
bibdoc2_id, bibdoc2_ver, bibdoc2_fmt, rel_type,
more_info, rel_obj, control_command) in relations_to_create:
if rel_obj == None:
rel_obj = BibRelation.create(bibdoc1_id = bibdoc1_id,
bibdoc1_ver = bibdoc1_ver,
bibdoc1_fmt = bibdoc1_fmt,
bibdoc2_id = bibdoc2_id,
bibdoc2_ver = bibdoc2_ver,
bibdoc2_fmt = bibdoc2_fmt,
rel_type = rel_type)
relation_id = rel_obj.id
if mode in ("replace"):
# Clearing existing MoreInfo content
rel_obj.get_more_info().delete()
if more_info:
MoreInfo.create_from_serialised(more_info, relation = relation_id)
if control_command == "DELETE":
rel_obj.delete()
else:
write_message("BDR tag is not processed in the %s mode" % (mode, ))
return record
def elaborate_fft_tags(record, rec_id, mode, pretend=False,
tmp_ids = {}, tmp_vers = {}):
"""
Process FFT tags that should contain $a with file pathes or URLs
to get the fulltext from. This function enriches record with
proper 8564 URL tags, downloads fulltext files and stores them
into var/data structure where appropriate.
CFG_BIBUPLOAD_WGET_SLEEP_TIME defines time to sleep in seconds in
between URL downloads.
Note: if an FFT tag contains multiple $a subfields, we upload them
into different 856 URL tags in the metadata. See regression test
case test_multiple_fft_insert_via_http().
"""
# Let's define some handy sub procedure.
def _add_new_format(bibdoc, url, docformat, docname, doctype, newname, description, comment, flags, modification_date, pretend=False):
"""Adds a new format for a given bibdoc. Returns True when everything's fine."""
write_message('Add new format to %s url: %s, format: %s, docname: %s, doctype: %s, newname: %s, description: %s, comment: %s, flags: %s, modification_date: %s' % (repr(bibdoc), url, docformat, docname, doctype, newname, description, comment, flags, modification_date), verbose=9)
try:
if not url: # Not requesting a new url. Just updating comment & description
return _update_description_and_comment(bibdoc, docname, docformat, description, comment, flags, pretend=pretend)
try:
if not pretend:
bibdoc.add_file_new_format(url, description=description, comment=comment, flags=flags, modification_date=modification_date)
except StandardError, e:
write_message("('%s', '%s', '%s', '%s', '%s', '%s', '%s', '%s', '%s') not inserted because format already exists (%s)." % (url, docformat, docname, doctype, newname, description, comment, flags, modification_date, e), stream=sys.stderr)
raise
except Exception, e:
write_message("Error in adding '%s' as a new format because of: %s" % (url, e), stream=sys.stderr)
raise
return True
def _add_new_version(bibdoc, url, docformat, docname, doctype, newname, description, comment, flags, modification_date, pretend=False):
"""Adds a new version for a given bibdoc. Returns True when everything's fine."""
write_message('Add new version to %s url: %s, format: %s, docname: %s, doctype: %s, newname: %s, description: %s, comment: %s, flags: %s' % (repr(bibdoc), url, docformat, docname, doctype, newname, description, comment, flags))
try:
if not url:
return _update_description_and_comment(bibdoc, docname, docformat, description, comment, flags, pretend=pretend)
try:
if not pretend:
bibdoc.add_file_new_version(url, description=description, comment=comment, flags=flags, modification_date=modification_date)
except StandardError, e:
write_message("('%s', '%s', '%s', '%s', '%s', '%s', '%s', '%s', '%s') not inserted because '%s'." % (url, docformat, docname, doctype, newname, description, comment, flags, modification_date, e), stream=sys.stderr)
raise
except Exception, e:
write_message("Error in adding '%s' as a new version because of: %s" % (url, e), stream=sys.stderr)
raise
return True
def _update_description_and_comment(bibdoc, docname, docformat, description, comment, flags, pretend=False):
"""Directly update comments and descriptions."""
write_message('Just updating description and comment for %s with format %s with description %s, comment %s and flags %s' % (docname, docformat, description, comment, flags), verbose=9)
try:
if not pretend:
bibdoc.set_description(description, docformat)
bibdoc.set_comment(comment, docformat)
for flag in CFG_BIBDOCFILE_AVAILABLE_FLAGS:
if flag in flags:
bibdoc.set_flag(flag, docformat)
else:
bibdoc.unset_flag(flag, docformat)
except StandardError, e:
write_message("('%s', '%s', '%s', '%s', '%s') description and comment not updated because '%s'." % (docname, docformat, description, comment, flags, e))
raise
return True
def _process_document_moreinfos(more_infos, docname, version, docformat, mode):
if not mode in ('correct', 'append', 'replace_or_insert', 'replace', 'correct', 'insert'):
print "exited because the mode is incorrect"
return
brd = BibRecDocs(rec_id)
docid = None
try:
docid = brd.get_docid(docname)
except:
raise StandardError("MoreInfo: No document of a given name associated with the record")
if not version:
# We have to retrieve the most recent version ...
version = brd.get_bibdoc(docname).get_latest_version()
doc_moreinfo_s, version_moreinfo_s, version_format_moreinfo_s, format_moreinfo_s = more_infos
if mode in ("replace", "replace_or_insert"):
if doc_moreinfo_s: #only if specified, otherwise do not touch
MoreInfo(docid = docid).delete()
if format_moreinfo_s: #only if specified... otherwise do not touch
MoreInfo(docid = docid, docformat = docformat).delete()
if not doc_moreinfo_s is None:
MoreInfo.create_from_serialised(ser_str = doc_moreinfo_s, docid = docid)
if not version_moreinfo_s is None:
MoreInfo.create_from_serialised(ser_str = version_moreinfo_s,
docid = docid, version = version)
if not version_format_moreinfo_s is None:
MoreInfo.create_from_serialised(ser_str = version_format_moreinfo_s,
docid = docid, version = version,
docformat = docformat)
if not format_moreinfo_s is None:
MoreInfo.create_from_serialised(ser_str = format_moreinfo_s,
docid = docid, docformat = docformat)
if mode == 'delete':
raise StandardError('FFT tag specified but bibupload executed in --delete mode')
tuple_list = extract_tag_from_record(record, 'FFT')
if tuple_list: # FFT Tags analysis
write_message("FFTs: "+str(tuple_list), verbose=9)
docs = {} # docnames and their data
for fft in record_get_field_instances(record, 'FFT', ' ', ' '):
# Very first, we retrieve the potentially temporary odentifiers...
#even if the rest fails, we should include them in teh dictionary
version = _get_subfield_value(fft, 'v', '')
# checking if version is temporary... if so, filling a different varaible
is_tmp_ver, bibdoc_tmpver = parse_identifier(version)
if is_tmp_ver:
version = None
else:
bibdoc_tmpver = None
if not version: #treating cases of empty string etc...
version = None
bibdoc_tmpid = field_get_subfield_values(fft, 'i')
if bibdoc_tmpid:
bibdoc_tmpid = bibdoc_tmpid[0]
else:
bibdoc_tmpid
is_tmp_id, bibdoc_tmpid = parse_identifier(bibdoc_tmpid)
if not is_tmp_id:
bibdoc_tmpid = None
# In the case of having temporary id's, we dont resolve them yet but signaklise that they have been used
# value -1 means that identifier has been declared but not assigned a value yet
if bibdoc_tmpid:
if bibdoc_tmpid in tmp_ids:
write_message("WARNING: the temporary identifier %s has been declared more than once. Ignoring the second occurance" % (bibdoc_tmpid, ))
else:
tmp_ids[bibdoc_tmpid] = -1
if bibdoc_tmpver:
if bibdoc_tmpver in tmp_vers:
write_message("WARNING: the temporary version identifier %s has been declared more than once. Ignoring the second occurance" % (bibdoc_tmpver, ))
else:
tmp_vers[bibdoc_tmpver] = -1
# Let's discover the type of the document
# This is a legacy field and will not be enforced any particular
# check on it.
doctype = _get_subfield_value(fft, 't', 'Main') #Default is Main
# Let's discover the url.
url = field_get_subfield_values(fft, 'a')
if url:
url = url[0]
try:
check_valid_url(url)
except StandardError, e:
raise StandardError, "fft '%s' specifies in $a a location ('%s') with problems: %s" % (fft, url, e)
else:
url = ''
#TODO: a lot of code can be compactified using similar syntax ... should be more readable on the longer scale
# maybe right side expressions look a bit cryptic, but the elaborate_fft function would be much clearer
if mode == 'correct' and doctype != 'FIX-MARC':
arg2 = ""
else:
arg2 = KEEP_OLD_VALUE
description = _get_subfield_value(fft, 'd', arg2)
# Let's discover the description
# description = field_get_subfield_values(fft, 'd')
# if description != []:
# description = description[0]
# else:
# if mode == 'correct' and doctype != 'FIX-MARC':
## If the user require to correct, and do not specify
## a description this means she really want to
## modify the description.
# description = ''
# else:
# description = KEEP_OLD_VALUE
# Let's discover the desired docname to be created/altered
name = field_get_subfield_values(fft, 'n')
if name:
## Let's remove undesired extensions
name = file_strip_ext(name[0] + '.pdf')
else:
if url:
name = get_docname_from_url(url)
elif mode != 'correct' and doctype != 'FIX-MARC':
raise StandardError, "Warning: fft '%s' doesn't specifies either a location in $a or a docname in $n" % str(fft)
else:
continue
# Let's discover the desired new docname in case we want to change it
newname = field_get_subfield_values(fft, 'm')
if newname:
newname = file_strip_ext(newname[0] + '.pdf')
else:
newname = name
# Let's discover the desired format
docformat = field_get_subfield_values(fft, 'f')
if docformat:
docformat = normalize_format(docformat[0])
else:
if url:
docformat = guess_format_from_url(url)
else:
docformat = ""
# Let's discover the icon
icon = field_get_subfield_values(fft, 'x')
if icon != []:
icon = icon[0]
if icon != KEEP_OLD_VALUE:
try:
check_valid_url(icon)
except StandardError, e:
raise StandardError, "fft '%s' specifies in $x an icon ('%s') with problems: %s" % (fft, icon, e)
else:
icon = ''
# Let's discover the comment
comment = field_get_subfield_values(fft, 'z')
if comment != []:
comment = comment[0]
else:
if mode == 'correct' and doctype != 'FIX-MARC':
## See comment on description
comment = ''
else:
comment = KEEP_OLD_VALUE
# Let's discover the restriction
restriction = field_get_subfield_values(fft, 'r')
if restriction != []:
restriction = restriction[0]
else:
if mode == 'correct' and doctype != 'FIX-MARC':
## See comment on description
restriction = ''
else:
restriction = KEEP_OLD_VALUE
document_moreinfo = _get_subfield_value(fft, 'w')
version_moreinfo = _get_subfield_value(fft, 'p')
version_format_moreinfo = _get_subfield_value(fft, 'b')
format_moreinfo = _get_subfield_value(fft, 'u')
# Let's discover the timestamp of the file (if any)
timestamp = field_get_subfield_values(fft, 's')
if timestamp:
try:
timestamp = datetime(*(time.strptime(timestamp[0], "%Y-%m-%d %H:%M:%S")[:6]))
except ValueError:
write_message('Warning: The timestamp is not in a good format, thus will be ignored. The format should be YYYY-MM-DD HH:MM:SS')
timestamp = ''
else:
timestamp = ''
flags = field_get_subfield_values(fft, 'o')
for flag in flags:
if flag not in CFG_BIBDOCFILE_AVAILABLE_FLAGS:
raise StandardError, "fft '%s' specifies a non available flag: %s" % (fft, flag)
if docs.has_key(name): # new format considered
(doctype2, newname2, restriction2, version2, urls, dummybibdoc_moreinfos2, dummybibdoc_tmpid2, dummybibdoc_tmpver2 ) = docs[name]
if doctype2 != doctype:
raise StandardError, "fft '%s' specifies a different doctype from previous fft with docname '%s'" % (str(fft), name)
if newname2 != newname:
raise StandardError, "fft '%s' specifies a different newname from previous fft with docname '%s'" % (str(fft), name)
if restriction2 != restriction:
raise StandardError, "fft '%s' specifies a different restriction from previous fft with docname '%s'" % (str(fft), name)
if version2 != version:
raise StandardError, "fft '%s' specifies a different version than the previous fft with docname '%s'" % (str(fft), name)
for (dummyurl2, format2, dummydescription2, dummycomment2, dummyflags2, dummytimestamp2) in urls:
if docformat == format2:
raise StandardError, "fft '%s' specifies a second file '%s' with the same format '%s' from previous fft with docname '%s'" % (str(fft), url, docformat, name)
if url or docformat:
urls.append((url, docformat, description, comment, flags, timestamp))
if icon:
urls.append((icon, icon[len(file_strip_ext(icon)):] + ';icon', description, comment, flags, timestamp))
else:
if url or docformat:
docs[name] = (doctype, newname, restriction, version, [(url, docformat, description, comment, flags, timestamp)], [document_moreinfo, version_moreinfo, version_format_moreinfo, format_moreinfo], bibdoc_tmpid, bibdoc_tmpver)
if icon:
docs[name][4].append((icon, icon[len(file_strip_ext(icon)):] + ';icon', description, comment, flags, timestamp))
elif icon:
docs[name] = (doctype, newname, restriction, version, [(icon, icon[len(file_strip_ext(icon)):] + ';icon', description, comment, flags, timestamp)], [document_moreinfo, version_moreinfo, version_format_moreinfo, format_moreinfo], bibdoc_tmpid, bibdoc_tmpver)
else:
docs[name] = (doctype, newname, restriction, version, [], [document_moreinfo, version_moreinfo, version_format_moreinfo, format_moreinfo], bibdoc_tmpid, bibdoc_tmpver)
write_message('Result of FFT analysis:\n\tDocs: %s' % (docs,), verbose=9)
# Let's remove all FFT tags
record_delete_field(record, 'FFT', ' ', ' ')
# Preprocessed data elaboration
bibrecdocs = BibRecDocs(rec_id)
## Let's pre-download all the URLs to see if, in case of mode 'correct' or 'append'
## we can avoid creating a new revision.
for docname, (doctype, newname, restriction, version, urls, more_infos, bibdoc_tmpid, bibdoc_tmpver ) in docs.items():
downloaded_urls = []
try:
bibdoc = bibrecdocs.get_bibdoc(docname)
except InvenioBibDocFileError:
## A bibdoc with the given docname does not exists.
## So there is no chance we are going to revise an existing
## format with an identical file :-)
bibdoc = None
new_revision_needed = False
for url, docformat, description, comment, flags, timestamp in urls:
if url:
try:
downloaded_url = download_url(url, docformat)
write_message("%s saved into %s" % (url, downloaded_url), verbose=9)
except Exception, err:
write_message("Error in downloading '%s' because of: %s" % (url, err), stream=sys.stderr)
raise
if mode == 'correct' and bibdoc is not None and not new_revision_needed:
downloaded_urls.append((downloaded_url, docformat, description, comment, flags, timestamp))
if not bibrecdocs.check_file_exists(downloaded_url, docformat):
new_revision_needed = True
else:
write_message("WARNING: %s is already attached to bibdoc %s for recid %s" % (url, docname, rec_id), stream=sys.stderr)
elif mode == 'append' and bibdoc is not None:
if not bibrecdocs.check_file_exists(downloaded_url, docformat):
downloaded_urls.append((downloaded_url, docformat, description, comment, flags, timestamp))
else:
write_message("WARNING: %s is already attached to bibdoc %s for recid %s" % (url, docname, rec_id), stream=sys.stderr)
else:
downloaded_urls.append((downloaded_url, docformat, description, comment, flags, timestamp))
else:
downloaded_urls.append(('', docformat, description, comment, flags, timestamp))
if mode == 'correct' and bibdoc is not None and not new_revision_needed:
## Since we don't need a new revision (because all the files
## that are being uploaded are different)
## we can simply remove the urls but keep the other information
write_message("No need to add a new revision for docname %s for recid %s" % (docname, rec_id), verbose=2)
docs[docname] = (doctype, newname, restriction, version, [('', docformat, description, comment, flags, timestamp) for (dummy, docformat, description, comment, flags, timestamp) in downloaded_urls], more_infos, bibdoc_tmpid, bibdoc_tmpver)
for downloaded_url, dummy, dummy, dummy, dummy, dummy in downloaded_urls:
## Let's free up some space :-)
if downloaded_url and os.path.exists(downloaded_url):
os.remove(downloaded_url)
else:
if downloaded_urls or mode != 'append':
docs[docname] = (doctype, newname, restriction, version, downloaded_urls, more_infos, bibdoc_tmpid, bibdoc_tmpver)
else:
## In case we are in append mode and there are no urls to append
## we discard the whole FFT
del docs[docname]
if mode == 'replace': # First we erase previous bibdocs
if not pretend:
for bibdoc in bibrecdocs.list_bibdocs():
bibdoc.delete()
bibrecdocs.build_bibdoc_list()
for docname, (doctype, newname, restriction, version, urls, more_infos, bibdoc_tmpid, bibdoc_tmpver) in docs.iteritems():
write_message("Elaborating olddocname: '%s', newdocname: '%s', doctype: '%s', restriction: '%s', urls: '%s', mode: '%s'" % (docname, newname, doctype, restriction, urls, mode), verbose=9)
if mode in ('insert', 'replace'): # new bibdocs, new docnames, new marc
if newname in bibrecdocs.get_bibdoc_names():
write_message("('%s', '%s') not inserted because docname already exists." % (newname, urls), stream=sys.stderr)
raise StandardError("('%s', '%s') not inserted because docname already exists." % (newname, urls), stream=sys.stderr)
try:
if not pretend:
bibdoc = bibrecdocs.add_bibdoc(doctype, newname)
bibdoc.set_status(restriction)
else:
bibdoc = None
except Exception, e:
write_message("('%s', '%s', '%s') not inserted because: '%s'." % (doctype, newname, urls, e), stream=sys.stderr)
raise e
for (url, docformat, description, comment, flags, timestamp) in urls:
assert(_add_new_format(bibdoc, url, docformat, docname, doctype, newname, description, comment, flags, timestamp, pretend=pretend))
elif mode == 'replace_or_insert': # to be thought as correct_or_insert
for bibdoc in bibrecdocs.list_bibdocs():
brd = BibRecDocs(rec_id)
dn = brd.get_docname(bibdoc.id)
if dn == docname:
if doctype not in ('PURGE', 'DELETE', 'EXPUNGE', 'REVERT', 'FIX-ALL', 'FIX-MARC', 'DELETE-FILE'):
if newname != docname:
try:
if not pretend:
bibrecdocs.change_name(newname = newname, docid = bibdoc.id)
## Let's refresh the list of bibdocs.
bibrecdocs.build_bibdoc_list()
except StandardError, e:
write_message(e, stream=sys.stderr)
raise
found_bibdoc = False
for bibdoc in bibrecdocs.list_bibdocs():
brd = BibRecDocs(rec_id)
dn = brd.get_docname(bibdoc.id)
if dn == newname:
found_bibdoc = True
if doctype == 'PURGE':
if not pretend:
bibdoc.purge()
elif doctype == 'DELETE':
if not pretend:
bibdoc.delete()
elif doctype == 'EXPUNGE':
if not pretend:
bibdoc.expunge()
elif doctype == 'FIX-ALL':
if not pretend:
bibrecdocs.fix(docname)
elif doctype == 'FIX-MARC':
pass
elif doctype == 'DELETE-FILE':
if urls:
for (url, docformat, description, comment, flags, timestamp) in urls:
if not pretend:
bibdoc.delete_file(docformat, version)
elif doctype == 'REVERT':
try:
if not pretend:
bibdoc.revert(version)
except Exception, e:
write_message('(%s, %s) not correctly reverted: %s' % (newname, version, e), stream=sys.stderr)
raise
else:
if restriction != KEEP_OLD_VALUE:
if not pretend:
bibdoc.set_status(restriction)
# Since the docname already existed we have to first
# bump the version by pushing the first new file
# then pushing the other files.
if urls:
(first_url, first_format, first_description, first_comment, first_flags, first_timestamp) = urls[0]
other_urls = urls[1:]
assert(_add_new_version(bibdoc, first_url, first_format, docname, doctype, newname, first_description, first_comment, first_flags, first_timestamp, pretend=pretend))
for (url, docformat, description, comment, flags, timestamp) in other_urls:
assert(_add_new_format(bibdoc, url, docformat, docname, doctype, newname, description, comment, flags, timestamp, pretend=pretend))
## Let's refresh the list of bibdocs.
bibrecdocs.build_bibdoc_list()
if not found_bibdoc:
if not pretend:
bibdoc = bibrecdocs.add_bibdoc(doctype, newname)
bibdoc.set_status(restriction)
for (url, docformat, description, comment, flags, timestamp) in urls:
assert(_add_new_format(bibdoc, url, docformat, docname, doctype, newname, description, comment, flags, timestamp))
elif mode == 'correct':
for bibdoc in bibrecdocs.list_bibdocs():
brd = BibRecDocs(rec_id)
dn = brd.get_docname(bibdoc.id)
if dn == docname:
if doctype not in ('PURGE', 'DELETE', 'EXPUNGE', 'REVERT', 'FIX-ALL', 'FIX-MARC', 'DELETE-FILE'):
if newname != docname:
try:
if not pretend:
bibrecdocs.change_name(docid = bibdoc.id, newname=newname)
## Let's refresh the list of bibdocs.
bibrecdocs.build_bibdoc_list()
except StandardError, e:
write_message('Error in renaming %s to %s: %s' % (docname, newname, e), stream=sys.stderr)
raise
found_bibdoc = False
for bibdoc in bibrecdocs.list_bibdocs():
brd = BibRecDocs(rec_id)
dn = brd.get_docname(bibdoc.id)
if dn == newname:
found_bibdoc = True
if doctype == 'PURGE':
if not pretend:
bibdoc.purge()
elif doctype == 'DELETE':
if not pretend:
bibdoc.delete()
elif doctype == 'EXPUNGE':
if not pretend:
bibdoc.expunge()
elif doctype == 'FIX-ALL':
if not pretend:
bibrecdocs.fix(newname)
elif doctype == 'FIX-MARC':
pass
elif doctype == 'DELETE-FILE':
if urls:
for (url, docformat, description, comment, flags, timestamp) in urls:
if not pretend:
bibdoc.delete_file(docformat, version)
elif doctype == 'REVERT':
try:
if not pretend:
bibdoc.revert(version)
except Exception, e:
write_message('(%s, %s) not correctly reverted: %s' % (newname, version, e), stream=sys.stderr)
raise
else:
if restriction != KEEP_OLD_VALUE:
if not pretend:
bibdoc.set_status(restriction)
if doctype and doctype!= KEEP_OLD_VALUE:
if not pretend:
bibdoc.change_doctype(doctype)
if urls:
(first_url, first_format, first_description, first_comment, first_flags, first_timestamp) = urls[0]
other_urls = urls[1:]
assert(_add_new_version(bibdoc, first_url, first_format, docname, doctype, newname, first_description, first_comment, first_flags, first_timestamp, pretend=pretend))
for (url, docformat, description, comment, flags, timestamp) in other_urls:
assert(_add_new_format(bibdoc, url, docformat, docname, doctype, newname, description, comment, flags, timestamp, pretend=pretend))
## Let's refresh the list of bibdocs.
bibrecdocs.build_bibdoc_list()
if not found_bibdoc:
if doctype in ('PURGE', 'DELETE', 'EXPUNGE', 'FIX-ALL', 'FIX-MARC', 'DELETE-FILE', 'REVERT'):
write_message("('%s', '%s', '%s') not performed because '%s' docname didn't existed." % (doctype, newname, urls, docname), stream=sys.stderr)
raise StandardError
else:
if not pretend:
bibdoc = bibrecdocs.add_bibdoc(doctype, newname)
bibdoc.set_status(restriction)
for (url, docformat, description, comment, flags, timestamp) in urls:
assert(_add_new_format(bibdoc, url, docformat, docname, doctype, newname, description, comment, flags, timestamp))
elif mode == 'append':
try:
found_bibdoc = False
for bibdoc in bibrecdocs.list_bibdocs():
brd = BibRecDocs(rec_id)
dn = brd.get_docname(bibdoc.id)
if dn == docname:
found_bibdoc = True
for (url, docformat, description, comment, flags, timestamp) in urls:
assert(_add_new_format(bibdoc, url, docformat, docname, doctype, newname, description, comment, flags, timestamp, pretend=pretend))
if not found_bibdoc:
try:
if not pretend:
bibdoc = bibrecdocs.add_bibdoc(doctype, docname)
bibdoc.set_status(restriction)
for (url, docformat, description, comment, flags, timestamp) in urls:
assert(_add_new_format(bibdoc, url, docformat, docname, doctype, newname, description, comment, flags, timestamp))
except Exception, e:
register_exception()
write_message("('%s', '%s', '%s') not appended because: '%s'." % (doctype, newname, urls, e), stream=sys.stderr)
raise
except:
register_exception()
raise
if not pretend:
_process_document_moreinfos(more_infos, newname, version, urls and urls[0][1], mode)
# resolving temporary version and identifier
brd = BibRecDocs(rec_id)
if bibdoc_tmpid:
if bibdoc_tmpid in tmp_ids and tmp_ids[bibdoc_tmpid] != -1:
write_message("WARNING: the temporary identifier %s has been declared more than once. Ignoring the second occurance" % (bibdoc_tmpid, ))
else:
tmp_ids[bibdoc_tmpid] = brd.get_docid(docname)
if bibdoc_tmpver:
if bibdoc_tmpver in tmp_vers and tmp_vers[bibdoc_tmpver] != -1:
write_message("WARNING: the temporary version identifier %s has been declared more than once. Ignoring the second occurance" % (bibdoc_tmpver, ))
else:
if version == None:
if version:
tmp_vers[bibdoc_tmpver] = version
else:
tmp_vers[bibdoc_tmpver] = brd.get_bibdoc(docname).get_latest_version()
else:
tmp_vers[bibdoc_tmpver] = version
return record
### Update functions
def update_bibrec_date(now, bibrec_id, insert_mode_p, pretend=False):
"""Update the date of the record in bibrec table """
if insert_mode_p:
query = """UPDATE bibrec SET creation_date=%s, modification_date=%s WHERE id=%s"""
params = (now, now, bibrec_id)
else:
query = """UPDATE bibrec SET modification_date=%s WHERE id=%s"""
params = (now, bibrec_id)
if not pretend:
run_sql(query, params)
write_message(" -Update record creation/modification date: DONE" , verbose=2)
def update_bibfmt_format(id_bibrec, format_value, format_name, modification_date=None, pretend=False):
"""Update the format in the table bibfmt"""
if modification_date is None:
modification_date = time.strftime('%Y-%m-%d %H:%M:%S')
else:
try:
time.strptime(modification_date, "%Y-%m-%d %H:%M:%S")
except ValueError:
modification_date = '1970-01-01 00:00:00'
# We check if the format is already in bibFmt
nb_found = find_record_format(id_bibrec, format_name)
if nb_found == 1:
# we are going to update the format
# compress the format_value value
pickled_format_value = compress(format_value)
# update the format:
query = """UPDATE LOW_PRIORITY bibfmt SET last_updated=%s, value=%s WHERE id_bibrec=%s AND format=%s"""
params = (modification_date, pickled_format_value, id_bibrec, format_name)
if not pretend:
row_id = run_sql(query, params)
if not pretend and row_id is None:
write_message(" Failed: Error during update_bibfmt_format function", verbose=1, stream=sys.stderr)
return 1
else:
write_message(" -Update the format %s in bibfmt: DONE" % format_name , verbose=2)
return 0
elif nb_found > 1:
write_message(" Failed: Same format %s found several time in bibfmt for the same record." % format_name, verbose=1, stream=sys.stderr)
return 1
else:
# Insert the format information in BibFMT
res = insert_bibfmt(id_bibrec, format_value, format_name, modification_date, pretend=pretend)
if res is None:
write_message(" Failed: Error during insert_bibfmt", verbose=1, stream=sys.stderr)
return 1
else:
write_message(" -Insert the format %s in bibfmt: DONE" % format_name , verbose=2)
return 0
def delete_bibfmt_format(id_bibrec, format_name, pretend=False):
"""
Delete format FORMAT_NAME from bibfmt table fo record ID_BIBREC.
"""
if not pretend:
run_sql("DELETE LOW_PRIORITY FROM bibfmt WHERE id_bibrec=%s and format=%s", (id_bibrec, format_name))
return 0
def archive_marcxml_for_history(recID, pretend=False):
"""
Archive current MARCXML format of record RECID from BIBFMT table
into hstRECORD table. Useful to keep MARCXML history of records.
Return 0 if everything went fine. Return 1 otherwise.
"""
res = run_sql("SELECT id_bibrec, value, last_updated FROM bibfmt WHERE format='xm' AND id_bibrec=%s",
(recID,))
if res and not pretend:
run_sql("""INSERT INTO hstRECORD (id_bibrec, marcxml, job_id, job_name, job_person, job_date, job_details)
VALUES (%s,%s,%s,%s,%s,%s,%s)""",
(res[0][0], res[0][1], task_get_task_param('task_id', 0), 'bibupload', task_get_task_param('user', 'UNKNOWN'), res[0][2],
'mode: ' + task_get_option('mode', 'UNKNOWN') + '; file: ' + task_get_option('file_path', 'UNKNOWN') + '.'))
return 0
def update_database_with_metadata(record, rec_id, oai_rec_id="oai", affected_tags=None, pretend=False):
"""Update the database tables with the record and the record id given in parameter"""
# extract only those tags that have been affected.
# check happens at subfield level. This is to prevent overhead
# associated with inserting already existing field with given ind pair
write_message("update_database_with_metadata: record=%s, rec_id=%s, oai_rec_id=%s, affected_tags=%s" % (record, rec_id, oai_rec_id, affected_tags), verbose=9)
tmp_record = {}
if affected_tags:
for tag in record.keys():
if tag in affected_tags.keys():
write_message(" -Tag %s found to be modified.Setting up for update" % tag, verbose=9)
# initialize new list to hold affected field
new_data_tuple_list = []
for data_tuple in record[tag]:
ind1 = data_tuple[1]
ind2 = data_tuple[2]
if (ind1, ind2) in affected_tags[tag]:
write_message(" -Indicator pair (%s, %s) added to update list" % (ind1, ind2), verbose=9)
new_data_tuple_list.append(data_tuple)
tmp_record[tag] = new_data_tuple_list
write_message(lambda: " -Modified fields: \n%s" % record_xml_output(tmp_record), verbose=2)
else:
tmp_record = record
for tag in tmp_record.keys():
# check if tag is not a special one:
if tag not in CFG_BIBUPLOAD_SPECIAL_TAGS:
# for each tag there is a list of tuples representing datafields
tuple_list = tmp_record[tag]
# this list should contain the elements of a full tag [tag, ind1, ind2, subfield_code]
tag_list = []
tag_list.append(tag)
for single_tuple in tuple_list:
# these are the contents of a single tuple
subfield_list = single_tuple[0]
ind1 = single_tuple[1]
ind2 = single_tuple[2]
# append the ind's to the full tag
if ind1 == '' or ind1 == ' ':
tag_list.append('_')
else:
tag_list.append(ind1)
if ind2 == '' or ind2 == ' ':
tag_list.append('_')
else:
tag_list.append(ind2)
datafield_number = single_tuple[4]
if tag in CFG_BIBUPLOAD_SPECIAL_TAGS:
# nothing to do for special tags (FFT, BDR, BDM)
pass
elif tag in CFG_BIBUPLOAD_CONTROLFIELD_TAGS and tag != "001":
value = single_tuple[3]
# get the full tag
full_tag = ''.join(tag_list)
# update the tables
write_message(" insertion of the tag "+full_tag+" with the value "+value, verbose=9)
# insert the tag and value into into bibxxx
(table_name, bibxxx_row_id) = insert_record_bibxxx(full_tag, value, pretend=pretend)
#print 'tname, bibrow', table_name, bibxxx_row_id;
if table_name is None or bibxxx_row_id is None:
write_message(" Failed: during insert_record_bibxxx", verbose=1, stream=sys.stderr)
# connect bibxxx and bibrec with the table bibrec_bibxxx
res = insert_record_bibrec_bibxxx(table_name, bibxxx_row_id, datafield_number, rec_id, pretend=pretend)
if res is None:
write_message(" Failed: during insert_record_bibrec_bibxxx", verbose=1, stream=sys.stderr)
else:
# get the tag and value from the content of each subfield
for subfield in subfield_list:
subtag = subfield[0]
value = subfield[1]
tag_list.append(subtag)
# get the full tag
full_tag = ''.join(tag_list)
# update the tables
write_message(" insertion of the tag "+full_tag+" with the value "+value, verbose=9)
# insert the tag and value into into bibxxx
(table_name, bibxxx_row_id) = insert_record_bibxxx(full_tag, value, pretend=pretend)
if table_name is None or bibxxx_row_id is None:
write_message(" Failed: during insert_record_bibxxx", verbose=1, stream=sys.stderr)
# connect bibxxx and bibrec with the table bibrec_bibxxx
res = insert_record_bibrec_bibxxx(table_name, bibxxx_row_id, datafield_number, rec_id, pretend=pretend)
if res is None:
write_message(" Failed: during insert_record_bibrec_bibxxx", verbose=1, stream=sys.stderr)
# remove the subtag from the list
tag_list.pop()
tag_list.pop()
tag_list.pop()
tag_list.pop()
write_message(" -Update the database with metadata: DONE", verbose=2)
log_record_uploading(oai_rec_id, task_get_task_param('task_id', 0), rec_id, 'P', pretend=pretend)
def append_new_tag_to_old_record(record, rec_old):
"""Append new tags to a old record"""
def _append_tag(tag):
if tag in CFG_BIBUPLOAD_CONTROLFIELD_TAGS:
if tag == '001':
pass
else:
# if it is a controlfield, just access the value
for single_tuple in record[tag]:
controlfield_value = single_tuple[3]
# add the field to the old record
newfield_number = record_add_field(rec_old, tag,
controlfield_value=controlfield_value)
if newfield_number is None:
write_message(" Error when adding the field"+tag, verbose=1, stream=sys.stderr)
else:
# For each tag there is a list of tuples representing datafields
for single_tuple in record[tag]:
# We retrieve the information of the tag
subfield_list = single_tuple[0]
ind1 = single_tuple[1]
ind2 = single_tuple[2]
if '%s%s%s' % (tag, ind1 == ' ' and '_' or ind1, ind2 == ' ' and '_' or ind2) in (CFG_BIBUPLOAD_EXTERNAL_OAIID_TAG[:5], CFG_BIBUPLOAD_EXTERNAL_SYSNO_TAG[:5]):
## We don't want to append the external identifier
## if it is already existing.
if record_find_field(rec_old, tag, single_tuple)[0] is not None:
write_message(" Not adding tag: %s ind1=%s ind2=%s subfields=%s: it's already there" % (tag, ind1, ind2, subfield_list), verbose=9)
continue
# We add the datafield to the old record
write_message(" Adding tag: %s ind1=%s ind2=%s subfields=%s" % (tag, ind1, ind2, subfield_list), verbose=9)
newfield_number = record_add_field(rec_old, tag, ind1,
ind2, subfields=subfield_list)
if newfield_number is None:
write_message(" Error when adding the field"+tag, verbose=1, stream=sys.stderr)
# Go through each tag in the appended record
for tag in record:
_append_tag(tag)
return rec_old
def copy_strong_tags_from_old_record(record, rec_old):
"""
Look for strong tags in RECORD and REC_OLD. If no strong tags are
found in RECORD, then copy them over from REC_OLD. This function
modifies RECORD structure on the spot.
"""
for strong_tag in CFG_BIBUPLOAD_STRONG_TAGS:
if not record_get_field_instances(record, strong_tag, strong_tag[3:4] or '%', strong_tag[4:5] or '%'):
strong_tag_old_field_instances = record_get_field_instances(rec_old, strong_tag)
if strong_tag_old_field_instances:
for strong_tag_old_field_instance in strong_tag_old_field_instances:
sf_vals, fi_ind1, fi_ind2, controlfield, dummy = strong_tag_old_field_instance
record_add_field(record, strong_tag, fi_ind1, fi_ind2, controlfield, sf_vals)
return
### Delete functions
def delete_tags(record, rec_old):
"""
Returns a record structure with all the fields in rec_old minus the
fields in record.
@param record: The record containing tags to delete.
@type record: record structure
@param rec_old: The original record.
@type rec_old: record structure
@return: The modified record.
@rtype: record structure
"""
returned_record = copy.deepcopy(rec_old)
for tag, fields in record.iteritems():
if tag in ('001', ):
continue
for field in fields:
local_position = record_find_field(returned_record, tag, field)[1]
if local_position is not None:
record_delete_field(returned_record, tag, field_position_local=local_position)
return returned_record
def delete_tags_to_correct(record, rec_old):
"""
Delete tags from REC_OLD which are also existing in RECORD. When
deleting, pay attention not only to tags, but also to indicators,
so that fields with the same tags but different indicators are not
deleted.
"""
## Some fields are controlled via provenance information.
## We should re-add saved fields at the end.
fields_to_readd = {}
for tag in CFG_BIBUPLOAD_CONTROLLED_PROVENANCE_TAGS:
if tag[:3] in record:
tmp_field_instances = record_get_field_instances(record, tag[:3], tag[3], tag[4]) ## Let's discover the provenance that will be updated
provenances_to_update = []
for instance in tmp_field_instances:
for code, value in instance[0]:
if code == tag[5]:
if value not in provenances_to_update:
provenances_to_update.append(value)
break
else:
## The provenance is not specified.
## let's add the special empty provenance.
if '' not in provenances_to_update:
provenances_to_update.append('')
potential_fields_to_readd = record_get_field_instances(rec_old, tag[:3], tag[3], tag[4]) ## Let's take all the field corresponding to tag
## Let's save apart all the fields that should be updated, but
## since they have a different provenance not mentioned in record
## they should be preserved.
fields = []
for sf_vals, ind1, ind2, dummy_cf, dummy_line in potential_fields_to_readd:
for code, value in sf_vals:
if code == tag[5]:
if value not in provenances_to_update:
fields.append(sf_vals)
break
else:
if '' not in provenances_to_update:
## Empty provenance, let's protect in any case
fields.append(sf_vals)
fields_to_readd[tag] = fields
# browse through all the tags from the MARCXML file:
for tag in record:
# check if the tag exists in the old record too:
if tag in rec_old and tag != '001':
# the tag does exist, so delete all record's tag+ind1+ind2 combinations from rec_old
for dummy_sf_vals, ind1, ind2, dummy_cf, dummyfield_number in record[tag]:
write_message(" Delete tag: " + tag + " ind1=" + ind1 + " ind2=" + ind2, verbose=9)
record_delete_field(rec_old, tag, ind1, ind2)
## Ok, we readd necessary fields!
for tag, fields in fields_to_readd.iteritems():
for sf_vals in fields:
write_message(" Adding tag: " + tag[:3] + " ind1=" + tag[3] + " ind2=" + tag[4] + " code=" + str(sf_vals), verbose=9)
record_add_field(rec_old, tag[:3], tag[3], tag[4], subfields=sf_vals)
def delete_bibrec_bibxxx(record, id_bibrec, affected_tags={}, pretend=False):
"""Delete the database record from the table bibxxx given in parameters"""
# we clear all the rows from bibrec_bibxxx from the old record
# clearing only those tags that have been modified.
write_message(lambda: "delete_bibrec_bibxxx(record=%s, id_bibrec=%s, affected_tags=%s)" % (record, id_bibrec, affected_tags), verbose=9)
for tag in affected_tags:
# sanity check with record keys just to make sure its fine.
if tag not in CFG_BIBUPLOAD_SPECIAL_TAGS:
write_message("%s found in record"%tag, verbose=2)
# for each name construct the bibrec_bibxxx table name
table_name = 'bib'+tag[0:2]+'x'
bibrec_table = 'bibrec_'+table_name
# delete all the records with proper id_bibrec. Indicators matter for individual affected tags
tmp_ind_1 = ''
tmp_ind_2 = ''
# construct exact tag value using indicators
for ind_pair in affected_tags[tag]:
if ind_pair[0] == ' ':
tmp_ind_1 = '_'
else:
tmp_ind_1 = ind_pair[0]
if ind_pair[1] == ' ':
tmp_ind_2 = '_'
else:
tmp_ind_2 = ind_pair[1]
# need to escape incase of underscore so that mysql treats it as a char
tag_val = tag+"\\"+tmp_ind_1+"\\"+tmp_ind_2 + '%'
query = """DELETE br.* FROM `%s` br,`%s` b where br.id_bibrec=%%s and br.id_bibxxx=b.id and b.tag like %%s""" % (bibrec_table, table_name)
params = (id_bibrec, tag_val)
write_message(query % params, verbose=9)
if not pretend:
run_sql(query, params)
else:
write_message("%s not found"%tag, verbose=2)
def main():
"""Main that construct all the bibtask."""
task_init(authorization_action='runbibupload',
authorization_msg="BibUpload Task Submission",
description="""Receive MARC XML file and update appropriate database
tables according to options.
Examples:
$ bibupload -i input.xml
""",
help_specific_usage=""" -a, --append\t\tnew fields are appended to the existing record
-c, --correct\t\tfields are replaced by the new ones in the existing record, except
\t\t\twhen overridden by CFG_BIBUPLOAD_CONTROLLED_PROVENANCE_TAGS
-i, --insert\t\tinsert the new record in the database
-r, --replace\t\tthe existing record is entirely replaced by the new one,
\t\t\texcept for fields in CFG_BIBUPLOAD_STRONG_TAGS
-d, --delete\t\tspecified fields are deleted in existing record
-n, --notimechange\tdo not change record last modification date when updating
-o, --holdingpen\tInsert record into holding pen instead of the normal database
--pretend\t\tdo not really insert/append/correct/replace the input file
--force\t\twhen --replace, use provided 001 tag values, even if the matching
\t\t\trecord does not exist (thus allocating it on-the-fly)
--callback-url\tSend via a POST request a JSON-serialized answer (see admin guide), in
\t\t\torder to provide a feedback to an external service about the outcome of the operation.
--nonce\t\twhen used together with --callback add the nonce value in the JSON message.
--special-treatment=MODE\tif "oracle" is specified, when used together with --callback_url,
\t\t\tPOST an application/x-www-form-urlencoded request where the JSON message is encoded
\t\t\tinside a form field called "results".
""",
version=__revision__,
specific_params=("ircazdnoS:",
[
"insert",
"replace",
"correct",
"append",
"reference",
"delete",
"notimechange",
"holdingpen",
"pretend",
"force",
"callback-url=",
"nonce=",
"special-treatment=",
"stage=",
]),
task_submit_elaborate_specific_parameter_fnc=task_submit_elaborate_specific_parameter,
task_run_fnc=task_run_core)
def task_submit_elaborate_specific_parameter(key, value, opts, args): # pylint: disable=W0613
""" Given the string key it checks it's meaning, eventually using the
value. Usually it fills some key in the options dict.
It must return True if it has elaborated the key, False, if it doesn't
know that key.
eg:
if key in ['-n', '--number']:
task_get_option(\1) = value
return True
return False
"""
# No time change option
if key in ("-n", "--notimechange"):
task_set_option('notimechange', 1)
# Insert mode option
elif key in ("-i", "--insert"):
if task_get_option('mode') == 'replace':
# if also replace found, then set to replace_or_insert
task_set_option('mode', 'replace_or_insert')
else:
task_set_option('mode', 'insert')
fix_argv_paths([args[0]])
task_set_option('file_path', os.path.abspath(args[0]))
# Replace mode option
elif key in ("-r", "--replace"):
if task_get_option('mode') == 'insert':
# if also insert found, then set to replace_or_insert
task_set_option('mode', 'replace_or_insert')
else:
task_set_option('mode', 'replace')
fix_argv_paths([args[0]])
task_set_option('file_path', os.path.abspath(args[0]))
# Holding pen mode option
elif key in ("-o", "--holdingpen"):
write_message("Holding pen mode", verbose=3)
task_set_option('mode', 'holdingpen')
fix_argv_paths([args[0]])
task_set_option('file_path', os.path.abspath(args[0]))
# Correct mode option
elif key in ("-c", "--correct"):
task_set_option('mode', 'correct')
fix_argv_paths([args[0]])
task_set_option('file_path', os.path.abspath(args[0]))
# Append mode option
elif key in ("-a", "--append"):
task_set_option('mode', 'append')
fix_argv_paths([args[0]])
task_set_option('file_path', os.path.abspath(args[0]))
# Deprecated reference mode option (now correct)
elif key in ("-z", "--reference"):
task_set_option('mode', 'correct')
fix_argv_paths([args[0]])
task_set_option('file_path', os.path.abspath(args[0]))
elif key in ("-d", "--delete"):
task_set_option('mode', 'delete')
fix_argv_paths([args[0]])
task_set_option('file_path', os.path.abspath(args[0]))
elif key in ("--pretend",):
task_set_option('pretend', True)
fix_argv_paths([args[0]])
task_set_option('file_path', os.path.abspath(args[0]))
elif key in ("--force",):
task_set_option('force', True)
fix_argv_paths([args[0]])
task_set_option('file_path', os.path.abspath(args[0]))
elif key in ("--callback-url", ):
task_set_option('callback_url', value)
elif key in ("--nonce", ):
task_set_option('nonce', value)
elif key in ("--special-treatment", ):
if value.lower() in CFG_BIBUPLOAD_ALLOWED_SPECIAL_TREATMENTS:
if value.lower() == 'oracle':
task_set_option('oracle_friendly', True)
else:
print >> sys.stderr, """The specified value is not in the list of allowed special treatments codes: %s""" % CFG_BIBUPLOAD_ALLOWED_SPECIAL_TREATMENTS
return False
elif key in ("-S", "--stage"):
print >> sys.stderr, """WARNING: the --stage parameter is deprecated and ignored."""
else:
return False
return True
def task_submit_check_options():
""" Reimplement this method for having the possibility to check options
before submitting the task, in order for example to provide default
values. It must return False if there are errors in the options.
"""
if task_get_option('mode') is None:
write_message("Please specify at least one update/insert mode!")
return False
if task_get_option('file_path') is None:
write_message("Missing filename! -h for help.")
return False
return True
def writing_rights_p():
"""Return True in case bibupload has the proper rights to write in the
fulltext file folder."""
if _WRITING_RIGHTS is not None:
return _WRITING_RIGHTS
try:
if not os.path.exists(CFG_BIBDOCFILE_FILEDIR):
os.makedirs(CFG_BIBDOCFILE_FILEDIR)
fd, filename = tempfile.mkstemp(suffix='.txt', prefix='test', dir=CFG_BIBDOCFILE_FILEDIR)
test = os.fdopen(fd, 'w')
test.write('TEST')
test.close()
if open(filename).read() != 'TEST':
raise IOError("Can not successfully write and readback %s" % filename)
os.remove(filename)
except:
register_exception(alert_admin=True)
return False
return True
def post_results_to_callback_url(results, callback_url):
write_message("Sending feedback to %s" % callback_url)
if not CFG_JSON_AVAILABLE:
from warnings import warn
warn("--callback-url used but simplejson/json not available")
return
json_results = json.dumps(results)
write_message("Message to send: %s" % json_results, verbose=9)
## <scheme>://<netloc>/<path>?<query>#<fragment>
scheme, dummynetloc, dummypath, dummyquery, dummyfragment = urlparse.urlsplit(callback_url)
## See: http://stackoverflow.com/questions/111945/is-there-any-way-to-do-http-put-in-python
if scheme == 'http':
opener = urllib2.build_opener(urllib2.HTTPHandler)
elif scheme == 'https':
opener = urllib2.build_opener(urllib2.HTTPSHandler)
else:
raise ValueError("Scheme not handled %s for callback_url %s" % (scheme, callback_url))
if task_get_option('oracle_friendly'):
write_message("Oracle friendly mode requested", verbose=9)
request = urllib2.Request(callback_url, data=urllib.urlencode({'results': json_results}))
request.add_header('Content-Type', 'application/x-www-form-urlencoded')
else:
request = urllib2.Request(callback_url, data=json_results)
request.add_header('Content-Type', 'application/json')
request.add_header('User-Agent', make_user_agent_string('BibUpload'))
write_message("Headers about to be sent: %s" % request.headers, verbose=9)
write_message("Data about to be sent: %s" % request.data, verbose=9)
res = opener.open(request)
msg = res.read()
write_message("Result of posting the feedback: %s %s" % (res.code, res.msg), verbose=9)
write_message("Returned message is: %s" % msg, verbose=9)
return res
def bibupload_records(records, opt_mode=None, opt_notimechange=0,
pretend=False, callback_url=None, results_for_callback=None):
"""perform the task of uploading a set of records
returns list of (error_code, recid) tuples for separate records
"""
#Dictionaries maintaining temporary identifiers
# Structure: identifier -> number
tmp_ids = {}
tmp_vers = {}
results = []
# The first phase -> assigning meaning to temporary identifiers
if opt_mode == 'reference':
## NOTE: reference mode has been deprecated in favour of 'correct'
opt_mode = 'correct'
record = None
for record in records:
record_id = record_extract_oai_id(record)
task_sleep_now_if_required(can_stop_too=True)
if opt_mode == "holdingpen":
#inserting into the holding pen
write_message("Inserting into holding pen", verbose=3)
insert_record_into_holding_pen(record, record_id)
else:
write_message("Inserting into main database", verbose=3)
error = bibupload(
record,
opt_mode = opt_mode,
opt_notimechange = opt_notimechange,
oai_rec_id = record_id,
pretend = pretend,
tmp_ids = tmp_ids,
tmp_vers = tmp_vers)
results.append(error)
if error[0] == 1:
if record:
write_message(lambda: record_xml_output(record),
stream=sys.stderr)
else:
write_message("Record could not have been parsed",
stream=sys.stderr)
stat['nb_errors'] += 1
if callback_url:
results_for_callback['results'].append({'recid': error[1], 'success': False, 'error_message': error[2]})
elif error[0] == 2:
if record:
write_message(lambda: record_xml_output(record),
stream=sys.stderr)
else:
write_message("Record could not have been parsed",
stream=sys.stderr)
stat['nb_holdingpen'] += 1
if callback_url:
results_for_callback['results'].append({'recid': error[1], 'success': False, 'error_message': error[2]})
elif error[0] == 0:
if callback_url:
from invenio.search_engine import print_record
results_for_callback['results'].append({'recid': error[1], 'success': True, "marcxml": print_record(error[1], 'xm'), 'url': "%s/%s/%s" % (CFG_SITE_URL, CFG_SITE_RECORD, error[1])})
else:
if callback_url:
results_for_callback['results'].append({'recid': error[1], 'success': False, 'error_message': error[2]})
# stat us a global variable
task_update_progress("Done %d out of %d." % \
(stat['nb_records_inserted'] + \
stat['nb_records_updated'],
stat['nb_records_to_upload']))
# Second phase -> Now we can process all entries where temporary identifiers might appear (BDR, BDM)
write_message("Identifiers table after processing: %s versions: %s" % (str(tmp_ids), str(tmp_vers)))
write_message("Uploading BDR and BDM fields")
if opt_mode != "holdingpen":
for record in records:
record_id = retrieve_rec_id(record, opt_mode, pretend=pretend, post_phase = True)
bibupload_post_phase(record,
rec_id = record_id,
mode = opt_mode,
pretend = pretend,
tmp_ids = tmp_ids,
tmp_vers = tmp_vers)
return results
def task_run_core():
""" Reimplement to add the body of the task."""
write_message("Input file '%s', input mode '%s'." %
(task_get_option('file_path'), task_get_option('mode')))
write_message("STAGE 0:", verbose=2)
if task_get_option('file_path') is not None:
write_message("start preocessing", verbose=3)
task_update_progress("Reading XML input")
recs = xml_marc_to_records(open_marc_file(task_get_option('file_path')))
stat['nb_records_to_upload'] = len(recs)
write_message(" -Open XML marc: DONE", verbose=2)
task_sleep_now_if_required(can_stop_too=True)
write_message("Entering records loop", verbose=3)
callback_url = task_get_option('callback_url')
results_for_callback = {'results': []}
if recs is not None:
# We proceed each record by record
bibupload_records(records=recs, opt_mode=task_get_option('mode'),
opt_notimechange=task_get_option('notimechange'),
pretend=task_get_option('pretend'),
callback_url=callback_url,
results_for_callback=results_for_callback)
else:
write_message(" Error bibupload failed: No record found",
verbose=1, stream=sys.stderr)
callback_url = task_get_option("callback_url")
if callback_url:
nonce = task_get_option("nonce")
if nonce:
results_for_callback["nonce"] = nonce
post_results_to_callback_url(results_for_callback, callback_url)
if task_get_task_param('verbose') >= 1:
# Print out the statistics
print_out_bibupload_statistics()
# Check if they were errors
return not stat['nb_errors'] >= 1
def log_record_uploading(oai_rec_id, task_id, bibrec_id, insertion_db, pretend=False):
if oai_rec_id != "" and oai_rec_id != None:
query = """UPDATE oaiHARVESTLOG SET date_inserted=NOW(), inserted_to_db=%s, id_bibrec=%s WHERE oai_id = %s AND bibupload_task_id = %s ORDER BY date_harvested LIMIT 1"""
if not pretend:
run_sql(query, (str(insertion_db), str(bibrec_id), str(oai_rec_id), str(task_id), ))
if __name__ == "__main__":
main()
|
gpl-2.0
| 413,680,829,889,584,100
| 48.092281
| 287
| 0.555743
| false
| 4.103105
| false
| false
| false
|
kivy/plyer
|
plyer/facades/wifi.py
|
1
|
4169
|
'''
Wifi Facade.
=============
The :class:`Wifi` is to provide access to the wifi of your mobile/ desktop
devices.
It currently supports `connecting`, `disconnecting`, `scanning`, `getting
available wifi network list` and `getting network information`.
Simple examples
---------------
To enable/ turn on wifi scanning::
>>> from plyer import wifi
>>> wifi.start_scanning()
Once the wifi is enabled/ turned on, then this command starts to scan
all the nearby available wifi networks.
To get network info::
>>> from plyer import wifi
>>> wifi.start_scanning()
>>> return wifi.get_network_info(name)
Returns network details of the network who's name/ssid is provided in the
`name` parameter.
To connect to a network::
>>> from plyer import wifi
>>> wifi.start_scanning()
>>> wifi.connect(network, parameters)
This connects to the network who's name/ssid is provided under `network`
parameter and along with other necessary methods for connection
which depends upon platform to platform.
please visit following files for more details about requirements of
`paramaters` argument in `connect` method:
plyer/platforms/win/wifi.py
plyer/platforms/macosx/wifi.py
plyer/platforms/win/wifi.py
To disconnect from wifi::
>>> from plyer import wifi
>>> wifi.disconnect()
This disconnects your device from any wifi network.
To get available wifi networks::
>>> from plyer import wifi
>>> wifi.start_scanning()
>>> return wifi.get_available_wifi()
This returns all the available wifi networks near the device.
Supported Platforms
-------------------
Windows, OS X, Linux
Ex: 6
----------
from plyer import wifi
wifi.enable()
This enables wifi device.
Ex: 7
----------
from plyer import wifi
wifi.disable()
This disable wifi device
'''
class Wifi:
'''
Wifi Facade.
'''
def is_enabled(self):
'''
Return enabled status of WiFi hardware.
'''
return self._is_enabled()
def is_connected(self, interface=None):
'''
Return connection state of WiFi interface.
.. versionadded:: 1.4.0
'''
return self._is_connected(interface=interface)
@property
def interfaces(self):
'''
List all available WiFi interfaces.
.. versionadded:: 1.4.0
'''
raise NotImplementedError()
def start_scanning(self, interface=None):
'''
Turn on scanning.
'''
return self._start_scanning(interface=interface)
def get_network_info(self, name):
'''
Return a dictionary of specified network.
'''
return self._get_network_info(name=name)
def get_available_wifi(self):
'''
Returns a list of all the available wifi.
'''
return self._get_available_wifi()
def connect(self, network, parameters, interface=None):
'''
Method to connect to some network.
'''
self._connect(
network=network,
parameters=parameters,
interface=interface
)
def disconnect(self, interface=None):
'''
To disconnect from some network.
'''
self._disconnect(interface=interface)
def enable(self):
'''
Wifi interface power state is set to "ON".
'''
self._enable()
def disable(self):
'''
Wifi interface power state is set to "OFF".
'''
self._disable()
# private
def _is_enabled(self):
raise NotImplementedError()
def _is_connected(self, interface=None):
raise NotImplementedError()
def _start_scanning(self, interface=None):
raise NotImplementedError()
def _get_network_info(self, **kwargs):
raise NotImplementedError()
def _get_available_wifi(self):
raise NotImplementedError()
def _connect(self, **kwargs):
raise NotImplementedError()
def _disconnect(self, interface=None):
raise NotImplementedError()
def _enable(self):
raise NotImplementedError()
def _disable(self):
raise NotImplementedError()
|
mit
| -8,623,485,429,783,333,000
| 21.294118
| 74
| 0.623411
| false
| 4.284687
| false
| false
| false
|
jakevdp/lombscargle
|
lombscargle/implementations/utils.py
|
1
|
5934
|
from __future__ import print_function, division
import numpy as np
try:
from scipy import special as scipy_special
except ImportError:
scipy_special = None
# Precomputed factorials
FACTORIALS = [1, 1, 2, 6, 24, 120, 720, 5040, 40320, 362880, 3628800,
39916800, 479001600, 6227020800, 87178291200, 1307674368000]
def factorial(N):
"""Compute the factorial of N.
If N <= 16, use a fast lookup table; otherwise use scipy.special.factorial
"""
if N < len(FACTORIALS):
return FACTORIALS[N]
elif scipy_special is None:
raise ValueError("need scipy for computing larger factorials")
else:
return int(scipy_special.factorial(N))
def bitceil(N):
"""
Find the bit (i.e. power of 2) immediately greater than or equal to N
Note: this works for numbers up to 2 ** 64.
Roughly equivalent to int(2 ** np.ceil(np.log2(N)))
"""
# Note: for Python 2.7 and 3.x, this is faster:
# return 1 << int(N - 1).bit_length()
N = int(N) - 1
for i in [1, 2, 4, 8, 16, 32]:
N |= N >> i
return N + 1
def extirpolate(x, y, N=None, M=4):
"""
Extirpolate the values (x, y) onto an integer grid range(N),
using lagrange polynomial weights on the M nearest points.
Parameters
----------
x : array_like
array of abscissas
y : array_like
array of ordinates
N : int
number of integer bins to use. For best performance, N should be larger
than the maximum of x
M : int
number of adjoining points on which to extirpolate.
Returns
-------
yN : ndarray
N extirpolated values associated with range(N)
Example
-------
>>> rng = np.random.RandomState(0)
>>> x = 100 * rng.rand(20)
>>> y = np.sin(x)
>>> y_hat = extirpolate(x, y)
>>> x_hat = np.arange(len(y_hat))
>>> f = lambda x: np.sin(x / 10)
>>> np.allclose(np.sum(y * f(x)), np.sum(y_hat * f(x_hat)))
True
Notes
-----
This code is based on the C implementation of spread() presented in
Numerical Recipes in C, Second Edition (Press et al. 1989; p.583).
"""
if not hasattr(np.ufunc, 'at'):
raise NotImplementedError("extirpolate functionality requires numpy "
"version 1.8 or newer")
x, y = map(np.ravel, np.broadcast_arrays(x, y))
if N is None:
N = int(np.max(x) + 0.5 * M + 1)
# Now use legendre polynomial weights to populate the results array;
# This is an efficient recursive implementation (See Press et al. 1989)
result = np.zeros(N, dtype=y.dtype)
# first take care of the easy cases where x is an integer
integers = (x % 1 == 0)
np.add.at(result, x[integers].astype(int), y[integers])
x, y = x[~integers], y[~integers]
# For each remaining x, find the index describing the extirpolation range.
# i.e. ilo[i] < x[i] < ilo[i] + M with x[i] in the center,
# adjusted so that the limits are within the range 0...N
ilo = np.clip((x - M // 2).astype(int), 0, N - M)
numerator = y * np.prod(x - ilo - np.arange(M)[:, np.newaxis], 0)
denominator = factorial(M - 1)
for j in range(M):
if j > 0:
denominator *= j / (j - M)
ind = ilo + (M - 1 - j)
np.add.at(result, ind, numerator / (denominator * (x - ind)))
return result
def trig_sum(t, h, df, N, f0=0, freq_factor=1,
oversampling=5, use_fft=True, Mfft=4):
"""Compute (approximate) trigonometric sums for a number of frequencies
This routine computes weighted sine and cosine sums:
S_j = sum_i { h_i * sin(2 pi * f_j * t_i) }
C_j = sum_i { h_i * cos(2 pi * f_j * t_i) }
Where f_j = freq_factor * (f0 + j * df) for the values j in 1 ... N.
The sums can be computed either by a brute force O[N^2] method, or
by an FFT-based O[Nlog(N)] method.
Parameters
----------
t : array_like
array of input times
h : array_like
array weights for the sum
df : float
frequency spacing
N : int
number of frequency bins to return
f0 : float (optional, default=0)
The low frequency to use
freq_factor : float (optional, default=1)
Factor which multiplies the frequency
use_fft : bool
if True, use the approximate FFT algorithm to compute the result.
This uses the FFT with Press & Rybicki's Lagrangian extirpolation.
oversampling : int (default = 5)
oversampling freq_factor for the approximation; roughtly the number of
time samples across the highest-frequency sinusoid. This parameter
contains the tradeoff between accuracy and speed. Not referenced
if use_fft is False.
Mfft : int
The number of adjacent points to use in the FFT approximation.
Not referenced if use_fft is False.
Returns
-------
S, C : ndarrays
summation arrays for frequencies f = df * np.arange(1, N + 1)
"""
df *= freq_factor
f0 *= freq_factor
assert df > 0
t, h = map(np.ravel, np.broadcast_arrays(t, h))
if use_fft:
Mfft = int(Mfft)
assert(Mfft > 0)
# required size of fft is the power of 2 above the oversampling rate
Nfft = bitceil(N * oversampling)
t0 = t.min()
if f0 > 0:
h = h * np.exp(2j * np.pi * f0 * (t - t0))
tnorm = ((t - t0) * Nfft * df) % Nfft
grid = extirpolate(tnorm, h, Nfft, Mfft)
fftgrid = np.fft.ifft(grid)
if t0 != 0:
f = f0 + df * np.arange(Nfft)
fftgrid *= np.exp(2j * np.pi * t0 * f)
fftgrid = fftgrid[:N]
C = Nfft * fftgrid.real
S = Nfft * fftgrid.imag
else:
f = f0 + df * np.arange(N)
C = np.dot(h, np.cos(2 * np.pi * f * t[:, np.newaxis]))
S = np.dot(h, np.sin(2 * np.pi * f * t[:, np.newaxis]))
return S, C
|
bsd-3-clause
| 7,082,537,745,301,977,000
| 30.903226
| 79
| 0.582406
| false
| 3.333708
| false
| false
| false
|
17zuoye/luigi
|
luigi/contrib/hdfs/snakebite_client.py
|
1
|
10933
|
# -*- coding: utf-8 -*-
#
# Copyright 2012-2015 Spotify AB
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""
A luigi file system client that wraps around snakebite
Originally written by Alan Brenner <alan@magnetic.com> github.com/alanbbr
"""
from luigi.contrib.hdfs import config as hdfs_config
from luigi.contrib.hdfs import error as hdfs_error
from luigi.contrib.hdfs import hadoopcli_clients as hdfs_hadoopcli_clients
from luigi import six
import luigi.contrib.target
import logging
import datetime
import os
logger = logging.getLogger('luigi-interface')
class SnakebiteHdfsClient(hdfs_hadoopcli_clients.HdfsClient):
"""
A hdfs client using snakebite. Since Snakebite has a python API, it'll be
about 100 times faster than the hadoop cli client, which does shell out to
a java program on each file system operation.
"""
def __init__(self):
super(SnakebiteHdfsClient, self).__init__()
self._bite = None
self.pid = -1
@staticmethod
def list_path(path):
if isinstance(path, list) or isinstance(path, tuple):
return path
if isinstance(path, str) or isinstance(path, unicode):
return [path, ]
return [str(path), ]
def get_bite(self):
"""
If Luigi has forked, we have a different PID, and need to reconnect.
"""
config = hdfs_config.hdfs()
if self.pid != os.getpid() or not self._bite:
client_kwargs = dict(filter(
lambda k_v: k_v[1] is not None and k_v[1] != '', six.iteritems({
'hadoop_version': config.client_version,
'effective_user': config.effective_user,
})
))
if config.snakebite_autoconfig:
"""
This is fully backwards compatible with the vanilla Client and can be used for a non HA cluster as well.
This client tries to read ``${HADOOP_PATH}/conf/hdfs-site.xml`` to get the address of the namenode.
The behaviour is the same as Client.
"""
from snakebite.client import AutoConfigClient
self._bite = AutoConfigClient(**client_kwargs)
else:
from snakebite.client import Client
self._bite = Client(config.namenode_host, config.namenode_port, **client_kwargs)
return self._bite
def exists(self, path):
"""
Use snakebite.test to check file existence.
:param path: path to test
:type path: string
:return: boolean, True if path exists in HDFS
"""
try:
return self.get_bite().test(path, exists=True)
except Exception as err: # IGNORE:broad-except
raise hdfs_error.HDFSCliError("snakebite.test", -1, str(err), repr(err))
def rename(self, path, dest):
"""
Use snakebite.rename, if available.
:param path: source file(s)
:type path: either a string or sequence of strings
:param dest: destination file (single input) or directory (multiple)
:type dest: string
:return: list of renamed items
"""
parts = dest.rstrip('/').split('/')
if len(parts) > 1:
dir_path = '/'.join(parts[0:-1])
if not self.exists(dir_path):
self.mkdir(dir_path, parents=True)
return list(self.get_bite().rename(self.list_path(path), dest))
def rename_dont_move(self, path, dest):
"""
Use snakebite.rename_dont_move, if available.
:param path: source path (single input)
:type path: string
:param dest: destination path
:type dest: string
:return: True if succeeded
:raises: snakebite.errors.FileAlreadyExistsException
"""
from snakebite.errors import FileAlreadyExistsException
try:
self.get_bite().rename2(path, dest, overwriteDest=False)
return True
except FileAlreadyExistsException:
return False
def remove(self, path, recursive=True, skip_trash=False):
"""
Use snakebite.delete, if available.
:param path: delete-able file(s) or directory(ies)
:type path: either a string or a sequence of strings
:param recursive: delete directories trees like \*nix: rm -r
:type recursive: boolean, default is True
:param skip_trash: do or don't move deleted items into the trash first
:type skip_trash: boolean, default is False (use trash)
:return: list of deleted items
"""
return list(self.get_bite().delete(self.list_path(path), recurse=recursive))
def chmod(self, path, permissions, recursive=False):
"""
Use snakebite.chmod, if available.
:param path: update-able file(s)
:type path: either a string or sequence of strings
:param permissions: \*nix style permission number
:type permissions: octal
:param recursive: change just listed entry(ies) or all in directories
:type recursive: boolean, default is False
:return: list of all changed items
"""
if type(permissions) == str:
permissions = int(permissions, 8)
return list(self.get_bite().chmod(self.list_path(path),
permissions, recursive))
def chown(self, path, owner, group, recursive=False):
"""
Use snakebite.chown/chgrp, if available.
One of owner or group must be set. Just setting group calls chgrp.
:param path: update-able file(s)
:type path: either a string or sequence of strings
:param owner: new owner, can be blank
:type owner: string
:param group: new group, can be blank
:type group: string
:param recursive: change just listed entry(ies) or all in directories
:type recursive: boolean, default is False
:return: list of all changed items
"""
bite = self.get_bite()
if owner:
if group:
return all(bite.chown(self.list_path(path), "%s:%s" % (owner, group),
recurse=recursive))
return all(bite.chown(self.list_path(path), owner, recurse=recursive))
return list(bite.chgrp(self.list_path(path), group, recurse=recursive))
def count(self, path):
"""
Use snakebite.count, if available.
:param path: directory to count the contents of
:type path: string
:return: dictionary with content_size, dir_count and file_count keys
"""
try:
res = self.get_bite().count(self.list_path(path)).next()
dir_count = res['directoryCount']
file_count = res['fileCount']
content_size = res['spaceConsumed']
except StopIteration:
dir_count = file_count = content_size = 0
return {'content_size': content_size, 'dir_count': dir_count,
'file_count': file_count}
def get(self, path, local_destination):
"""
Use snakebite.copyToLocal, if available.
:param path: HDFS file
:type path: string
:param local_destination: path on the system running Luigi
:type local_destination: string
"""
return list(self.get_bite().copyToLocal(self.list_path(path),
local_destination))
def mkdir(self, path, parents=True, mode=0o755, raise_if_exists=False):
"""
Use snakebite.mkdir, if available.
Snakebite's mkdir method allows control over full path creation, so by
default, tell it to build a full path to work like ``hadoop fs -mkdir``.
:param path: HDFS path to create
:type path: string
:param parents: create any missing parent directories
:type parents: boolean, default is True
:param mode: \*nix style owner/group/other permissions
:type mode: octal, default 0755
"""
result = list(self.get_bite().mkdir(self.list_path(path),
create_parent=parents, mode=mode))
if raise_if_exists and "ile exists" in result[0].get('error', ''):
raise luigi.target.FileAlreadyExists("%s exists" % (path, ))
return result
def listdir(self, path, ignore_directories=False, ignore_files=False,
include_size=False, include_type=False, include_time=False,
recursive=False):
"""
Use snakebite.ls to get the list of items in a directory.
:param path: the directory to list
:type path: string
:param ignore_directories: if True, do not yield directory entries
:type ignore_directories: boolean, default is False
:param ignore_files: if True, do not yield file entries
:type ignore_files: boolean, default is False
:param include_size: include the size in bytes of the current item
:type include_size: boolean, default is False (do not include)
:param include_type: include the type (d or f) of the current item
:type include_type: boolean, default is False (do not include)
:param include_time: include the last modification time of the current item
:type include_time: boolean, default is False (do not include)
:param recursive: list subdirectory contents
:type recursive: boolean, default is False (do not recurse)
:return: yield with a string, or if any of the include_* settings are
true, a tuple starting with the path, and include_* items in order
"""
bite = self.get_bite()
for entry in bite.ls(self.list_path(path), recurse=recursive):
if ignore_directories and entry['file_type'] == 'd':
continue
if ignore_files and entry['file_type'] == 'f':
continue
rval = [entry['path'], ]
if include_size:
rval.append(entry['length'])
if include_type:
rval.append(entry['file_type'])
if include_time:
rval.append(datetime.datetime.fromtimestamp(entry['modification_time'] / 1000))
if len(rval) > 1:
yield tuple(rval)
else:
yield rval[0]
|
apache-2.0
| -2,731,325,352,027,294,000
| 38.90146
| 120
| 0.60697
| false
| 4.171309
| true
| false
| false
|
a25kk/biobee
|
docs/conf.py
|
1
|
5997
|
# -*- coding: utf-8 -*-
# Build configuration file.
# This file is execfile()d with the current directory set to its
# containing dir.
# Note that not all possible configuration values are present in this
# autogenerated file.
# All configuration values have a default; values that are commented out
# serve to show the default.
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.append(os.path.abspath('.'))
# -- General configuration ----------------------------------------------------
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.doctest']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8'
# The master toctree document.
master_doc = 'index'
# General information about the project.
from datetime import datetime
project = u'biobee.buildout'
copyright = u'%s, Serge Davidov.' % datetime.now().year
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of documents that shouldn't be included in the build.
#unused_docs = []
# List of directories, relative to source directory, that shouldn't be searched
# for source files.
exclude_trees = []
# The reST default role (used for this markup: `text`) to use for all documents
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# -- Options for HTML output --------------------------------------------------
# The theme to use for HTML and HTML Help pages. Major themes that come with
# Sphinx are currently 'default' and 'sphinxdoc'.
html_theme = 'default'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_use_modindex = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# If nonempty, this is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = ''
# Output file base name for HTML help builder.
htmlhelp_basename = 'buildoutdoc'
# -- Options for LaTeX output -------------------------------------------------
# The paper size ('letter' or 'a4').
#latex_paper_size = 'letter'
# The font size ('10pt', '11pt' or '12pt').
#latex_font_size = '10pt'
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual])
latex_documents = [
('index',
'buildout.tex',
u'biobee.buildout Documentation',
u'', 'manual'
),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# Additional stuff for the LaTeX preamble.
#latex_preamble = ''
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_use_modindex = True
|
mit
| -6,147,307,173,122,633,000
| 31.770492
| 79
| 0.711689
| false
| 3.771698
| true
| false
| false
|
jonathanstrong/functor
|
setup.py
|
1
|
1091
|
#!/usr/bin/env python
# Bootstrap installation of Distribute
import distribute_setup
distribute_setup.use_setuptools()
import os
from setuptools import setup
PROJECT = u'Functor'
VERSION = '0.1'
URL = ''
AUTHOR = u'Jonathan Strong'
AUTHOR_EMAIL = u'jonathan.strong@gmail.com'
DESC = "Implements a function-object pattern in Python."
def read_file(file_name):
file_path = os.path.join(
os.path.dirname(__file__),
file_name
)
return open(file_path).read()
setup(
name=PROJECT,
version=VERSION,
description=DESC,
long_description=read_file('README.md'),
author=AUTHOR,
author_email=AUTHOR_EMAIL,
url=URL,
license=read_file('LICENSE'),
namespace_packages=[],
packages=[u'functor'],
include_package_data=True,
zip_safe=False,
install_requires=[
# -*- Requirements -*-
],
entry_points = {
# -*- Entry points -*-
},
classifiers=[
# see http://pypi.python.org/pypi?:action=list_classifiers
# -*- Classifiers -*-
"Programming Language :: Python",
],
)
|
mit
| 4,604,809,639,414,675,000
| 20.82
| 63
| 0.628781
| false
| 3.519355
| false
| false
| false
|
fusic-com/flask-webcache
|
tests/test_storage.py
|
1
|
12927
|
from __future__ import unicode_literals
import unittest
from datetime import timedelta, datetime
from six.moves.cPickle import dumps, loads
from six import iteritems
from flask import Flask, send_file
from werkzeug.wrappers import Response
from werkzeug.datastructures import HeaderSet
from werkzeug.contrib.cache import SimpleCache
from flask_webcache.storage import Config, Metadata, Store, Retrieval
from flask_webcache.storage import (CacheMiss, NoResourceMetadata, NoMatchingRepresentation, NotFreshEnoughForClient,
RecacheRequested)
from flask_webcache.recache import RECACHE_HEADER
from flask_webcache.utils import werkzeug_cache_get_or_add
from testutils import compare_numbers
a = Flask(__name__)
class UtilsTestCase(unittest.TestCase):
def test_config_kwargs(self):
with self.assertRaises(TypeError):
Config(foo=1)
def test_metadata_datastructure(self):
def check_metadata(m):
self.assertEquals(m.salt, 'qux')
self.assertIn('foo', m.vary)
self.assertIn('bar', m.vary)
m = Metadata(HeaderSet(('foo', 'bar')), 'qux')
check_metadata(m)
check_metadata(loads(dumps(m)))
m2 = Metadata(HeaderSet(('foo', 'bar')), 'qux')
self.assertEquals(m, m2)
m3 = Metadata(HeaderSet(('foo', 'bar')), 'notqux')
self.assertNotEquals(m2, m3)
class StorageTestCase(unittest.TestCase):
def setUp(self):
self.c = SimpleCache()
self.s = Store(self.c)
self.r = Retrieval(self.c)
def test_basic_cachability(self):
with a.test_request_context('/foo'):
self.assertFalse(self.s.should_cache_response(Response(x for x in 'foo')))
self.assertTrue(self.s.should_cache_response(Response(status=204)))
self.assertFalse(self.s.should_cache_response(Response(status=500)))
self.assertTrue(self.s.should_cache_response(Response('foo')))
self.assertTrue(self.s.should_cache_response(Response()))
r = Response()
r.vary.add('*')
self.assertFalse(self.s.should_cache_response(r))
with a.test_request_context('/foo', method='HEAD'):
self.assertFalse(self.s.should_cache_response(Response('foo')))
with a.test_request_context('/foo', method='POST'):
self.assertFalse(self.s.should_cache_response(Response('foo')))
def test_cache_control_cachability(self):
def check_response_with_cache_control(**cc):
r = Response()
for k, v in iteritems(cc):
setattr(r.cache_control, k, v)
return self.s.should_cache_response(r)
with a.test_request_context():
self.assertTrue(check_response_with_cache_control(max_age=10))
self.assertTrue(check_response_with_cache_control(must_revalidate=True))
self.assertFalse(check_response_with_cache_control(max_age=0))
self.assertFalse(check_response_with_cache_control(private=True))
self.assertFalse(check_response_with_cache_control(no_cache=True))
self.assertFalse(check_response_with_cache_control(no_store=True))
def test_expire_cachability(self):
def check_response_with_expires(dt):
r = Response()
r.expires = dt
return self.s.should_cache_response(r)
with a.test_request_context():
self.assertFalse(check_response_with_expires(datetime.utcnow() - timedelta(seconds=1)))
self.assertTrue(check_response_with_expires(datetime.utcnow() + timedelta(seconds=1)))
def test_default_cachability(self):
with a.test_request_context('/foo'):
self.assertTrue(self.s.should_cache_response(Response()))
with a.test_request_context('/foo', query_string='?bar'):
self.assertFalse(self.s.should_cache_response(Response()))
def test_x_cache_headers(self):
r = Response()
self.s.mark_cache_hit(r)
self.assertEquals(r.headers[self.s.X_CACHE_HEADER], 'hit')
self.s.mark_cache_miss(r)
self.assertEquals(r.headers[self.s.X_CACHE_HEADER], 'miss')
def test_metadata_miss(self):
with self.assertRaises(NoResourceMetadata):
with a.test_request_context('/foo'):
self.r.fetch_metadata()
def test_response_miss(self):
with self.assertRaises(NoResourceMetadata):
with a.test_request_context('/foo'):
self.r.fetch_response()
def test_store_retrieve_cycle(self):
with a.test_request_context('/foo'):
r = Response('foo')
self.s.cache_response(r)
self.assertEquals(len(self.c._cache), 2)
r2 = self.r.fetch_response()
self.assertEquals(r.data, r2.data)
def test_vary_miss(self):
with a.test_request_context('/foo', headers=(('accept-encoding', 'gzip'),)):
r = Response('foo')
r.vary.add('accept-encoding')
r.content_encoding = 'gzip'
self.s.cache_response(r)
with self.assertRaises(NoMatchingRepresentation):
with a.test_request_context('/foo'):
self.r.fetch_response()
def test_invalidation_condition(self):
with a.test_request_context('/foo', method="PUT"):
r = Response('foo')
self.assertTrue(self.s.should_invalidate_resource(r))
r = Response('foo', status=500)
self.assertFalse(self.s.should_invalidate_resource(r))
with a.test_request_context('/foo'):
r = Response('foo')
self.assertFalse(self.s.should_invalidate_resource(r))
def test_invalidation(self):
with a.test_request_context('/foo'):
r = Response('foo')
self.s.cache_response(r)
self.assertEquals(len(self.c._cache), 2)
with a.test_request_context('/foo', method="PUT"):
r = Response('foo')
self.assertTrue(self.s.should_invalidate_resource(r))
self.s.invalidate_resource()
self.assertEquals(len(self.c._cache), 1)
with self.assertRaises(CacheMiss):
with a.test_request_context('/foo'):
self.r.fetch_response()
def test_master_salt_invalidation(self):
with a.test_request_context('/foo'):
r = Response('foo')
self.s.cache_response(r)
self.assertEquals(self.r.fetch_response().data, b'foo')
self.r.config.master_salt = 'newsalt'
with self.assertRaises(NoMatchingRepresentation):
self.r.fetch_response()
def test_request_cache_controls(self):
with a.test_request_context('/foo'):
self.assertTrue(self.r.should_fetch_response())
with a.test_request_context('/foo', method='HEAD'):
self.assertTrue(self.r.should_fetch_response())
with a.test_request_context('/foo', method='POST'):
self.assertFalse(self.r.should_fetch_response())
with a.test_request_context('/foo', headers=(('cache-control', 'no-cache'),)):
self.assertFalse(self.r.should_fetch_response())
with a.test_request_context('/foo', headers=(('pragma', 'no-cache'),)):
self.assertFalse(self.r.should_fetch_response())
with a.test_request_context('/foo', headers=(('cache-control', 'max-age=0'),)):
self.assertFalse(self.r.should_fetch_response())
with a.test_request_context('/foo', headers=(('cache-control', 'max-age=5'),)):
self.assertTrue(self.r.should_fetch_response())
def test_response_freshness_seconds(self):
# this test is raced; if running it takes about a second, it might fail
r = Response()
self.assertEquals(0, self.r.response_freshness_seconds(r))
r.date = datetime.utcnow()
self.assertTrue(compare_numbers(self.s.DEFAULT_EXPIRATION_SECONDS,
self.r.response_freshness_seconds(r),
1))
r.expires = datetime.utcnow() + timedelta(seconds=345)
self.assertTrue(compare_numbers(345, self.r.response_freshness_seconds(r), 1))
r.cache_control.max_age=789
self.assertTrue(compare_numbers(789, self.r.response_freshness_seconds(r), 1))
def test_min_fresh(self):
# this test is raced; if running it takes about a second, it might fail
r = Response()
r.date = datetime.utcnow() - timedelta(seconds=100)
r.cache_control.max_age = 200
f = self.r.response_freshness_seconds(r)
with a.test_request_context('/foo', headers=(('cache-control', 'min-fresh=50'),)):
try:
self.r.verify_response_freshness_or_miss(r, f)
except CacheMiss:
self.fail('unexpected CacheMiss on reasonably fresh response')
with a.test_request_context('/foo', headers=(('cache-control', 'min-fresh=150'),)):
self.assertRaises(NotFreshEnoughForClient, self.r.verify_response_freshness_or_miss, r, f)
def test_request_cache_control_disobedience(self):
c = SimpleCache()
cfg = Config(request_controls_cache=False)
s = Store(c, cfg)
r = Retrieval(c, cfg)
with a.test_request_context('/foo', headers=(('cache-control', 'no-store'),)):
self.assertTrue(r.should_fetch_response())
with a.test_request_context('/foo', headers=(('cache-control', 'no-store'),)):
self.assertTrue(s.should_cache_response(Response()))
with a.test_request_context('/foo', headers=(('cache-control', 'no-store'),)):
self.assertTrue(s.should_cache_response(Response()))
resp = Response()
resp.date = datetime.utcnow() - timedelta(seconds=100)
resp.cache_control.max_age = 200
with a.test_request_context('/foo', headers=(('cache-control', 'min-fresh=150'),)):
f = self.r.response_freshness_seconds(resp)
try:
r.verify_response_freshness_or_miss(resp, f)
except CacheMiss:
self.fail('unexpected CacheMiss when ignoring request cache control')
def test_sequence_converted_responses(self):
with a.test_request_context('/foo'):
r = Response(f for f in 'foo')
r.make_sequence()
self.assertFalse(self.s.should_cache_response(r))
r = send_file(__file__)
r.make_sequence()
self.assertFalse(self.s.should_cache_response(r))
class RecacheTestCase(unittest.TestCase):
def setUp(self):
self.recached = False
def dispatcher(salt):
self.recached = True
self.c = SimpleCache()
cfg = Config(preemptive_recache_seconds=10, preemptive_recache_callback=dispatcher)
self.s = Store(self.c, cfg)
self.r = Retrieval(self.c, cfg)
def test_preemptive_recaching_predicate(self):
m = Metadata(HeaderSet(('foo', 'bar')), 'qux')
def mkretr(**kwargs):
return Retrieval(self.c, Config(**kwargs))
with a.test_request_context('/'):
self.assertFalse(mkretr(preemptive_recache_seconds=10).should_recache_preemptively(10, m))
self.assertFalse(mkretr(preemptive_recache_callback=lambda x: 0).should_recache_preemptively(10, m))
self.assertFalse(self.r.should_recache_preemptively(11, m))
self.assertTrue(self.r.should_recache_preemptively(10, m))
self.assertFalse(self.r.should_recache_preemptively(10, m))
self.c.clear()
self.assertTrue(self.r.should_recache_preemptively(10, m))
def test_preemptive_recaching_cache_bypass(self):
fresh = Response('foo')
with a.test_request_context('/foo'):
self.s.cache_response(fresh)
metadata = self.r.fetch_metadata()
with a.test_request_context('/foo'):
cached = self.r.fetch_response()
self.assertEquals(cached.headers[self.r.X_CACHE_HEADER], 'hit')
with a.test_request_context('/foo', headers={RECACHE_HEADER: metadata.salt}):
self.assertRaises(RecacheRequested, self.r.fetch_response)
with a.test_request_context('/foo', headers={RECACHE_HEADER: 'incorrect-salt'}):
try:
self.r.fetch_response()
except RecacheRequested:
self.fail('unexpected RecacheRequested for incorrect salt')
class UtilityTestCase(unittest.TestCase):
def setUp(self):
self.c = SimpleCache()
def test_werkzeug_cache_get_or_add_missing_key(self):
self.assertEquals('bar', werkzeug_cache_get_or_add(self.c, 'foo', 'bar', 10))
def test_werkzeug_cache_get_or_add_existing_key(self):
self.c.set('foo', 'bar')
self.assertEquals('bar', werkzeug_cache_get_or_add(self.c, 'foo', 'qux', 10))
|
mit
| -3,667,972,195,193,208,300
| 45.003559
| 117
| 0.61886
| false
| 3.773205
| true
| false
| false
|
eroicaleo/LearningPython
|
interview/leet/124_Binary_Tree_Maximum_Path_Sum.py
|
1
|
1054
|
#!/usr/bin/env python
from tree import *
class Solution:
def maxPathSum(self, root):
"""
:type root: TreeNode
:rtype: int
"""
if root == None:
return 0
self.maxSum = root.val
self.maxPathSumNode(root)
return self.maxSum
def maxPathSumNode(self, node):
if node == None:
return 0
leftSum = self.maxPathSumNode(node.left)
rightSum = self.maxPathSumNode(node.right)
self.maxSum = max(leftSum+node.val, rightSum+node.val, leftSum+node.val+rightSum, self.maxSum, node.val)
print('leftSum: %d, rightSum: %d, node.val: %d, self.maxSum: %d' % (leftSum, rightSum, node.val, self.maxSum))
ret = max(leftSum+node.val, rightSum+node.val, node.val)
print('node.val: %d, ret: %d' % (node.val, ret))
return ret
sol = Solution()
nodeString = "[-10,9,20,null,null,15,7]"
nodeString = "[1,2,3]"
nodeString = "[1,-2,-3,1,3,-2,null,-1]"
root = treeBuilder(nodeString)
traverse(root)
print(sol.maxPathSum(root))
|
mit
| -2,902,822,416,752,697,000
| 30
| 118
| 0.598672
| false
| 3.020057
| false
| false
| false
|
niklasf/python-prompt-toolkit
|
prompt_toolkit/layout/utils.py
|
1
|
2590
|
from __future__ import unicode_literals
from prompt_toolkit.utils import get_cwidth
__all__ = (
'token_list_len',
'token_list_width',
'token_list_to_text',
'explode_tokens',
'find_window_for_buffer_name',
)
def token_list_len(tokenlist):
"""
Return the amount of characters in this token list.
:param tokenlist: List of (token, text) or (token, text, mouse_handler)
tuples.
"""
return sum(len(item[1]) for item in tokenlist)
def token_list_width(tokenlist):
"""
Return the character width of this token list.
(Take double width characters into account.)
:param tokenlist: List of (token, text) or (token, text, mouse_handler)
tuples.
"""
return sum(get_cwidth(c) for item in tokenlist for c in item[1])
def token_list_to_text(tokenlist):
"""
Concatenate all the text parts again.
"""
return ''.join(item[1] for item in tokenlist)
def iter_token_lines(tokenlist):
"""
Iterator that yields tokenlists for each line.
"""
line = []
for token, c in explode_tokens(tokenlist):
line.append((token, c))
if c == '\n':
yield line
line = []
yield line
def split_lines(tokenlist):
"""
Take a single list of (Token, text) tuples and yield one such list for each
line.
"""
line = []
for token, string in tokenlist:
items = string.split('\n')
for item in items[:-1]:
if item:
line.append((token, item))
yield line
line = []
line.append((token, items[-1]))
if line:
yield line
def explode_tokens(tokenlist):
"""
Turn a list of (token, text) tuples into another list where each string is
exactly one character.
:param tokenlist: List of (token, text) tuples.
"""
result = []
for token, string in tokenlist:
for c in string:
result.append((token, c))
return result
def find_window_for_buffer_name(layout, buffer_name):
"""
Look for a :class:`~prompt_toolkit.layout.containers.Window` in the Layout
that contains the :class:`~prompt_toolkit.layout.controls.BufferControl`
for the given buffer and return it. If no such Window is found, return None.
"""
from .containers import Window
from .controls import BufferControl
for l in layout.walk():
if isinstance(l, Window) and isinstance(l.content, BufferControl):
if l.content.buffer_name == buffer_name:
return l
|
bsd-3-clause
| 693,291,599,595,765,100
| 23.205607
| 80
| 0.602317
| false
| 3.894737
| false
| false
| false
|
devbitstudio/portfolio
|
settings.py
|
1
|
5950
|
# Django settings for devbitstudio project.
import os
DEBUG = True
TEMPLATE_DEBUG = DEBUG
DOMAIN = 'devbitstudio.com'
EMAIL_HOST = 'localhost'
EMAIL_PORT = 25
#~ DEFAULT_FROM_EMAIL = 'contact@devbitstudio.com'
SERVER_EMAIL = 'contact@devbitstudio.com'
EMAIL_SUBJECT_PREFIX = 'DevBitStudio - '
CURRENT_PATH = os.path.abspath(os.path.dirname(__file__).decode('utf-8'))
PROJECT_DIR = os.path.dirname(__file__)
RESULTS_PER_PAGE = 12
ADMINS = (
('William Ibarra Rodriguez', 'wibarra@ucp.ho.rimed.cu'),
('Miguel Pelfort Paz', 'miguel.pelfort@gmail.com'),
)
MANAGERS = ADMINS
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.mysql', # Add 'postgresql_psycopg2', 'postgresql', 'mysql', 'sqlite3' or 'oracle'.
'NAME': 'devbitstudio', # Or path to database file if using sqlite3.
'USER': 'root', # Not used with sqlite3.
'PASSWORD': 'root', # Not used with sqlite3.
'HOST': 'localhost', # Set to empty string for localhost. Not used with sqlite3.
'PORT': '', # Set to empty string for default. Not used with sqlite3.
}
}
# Local time zone for this installation. Choices can be found here:
# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
# although not all choices may be available on all operating systems.
# On Unix systems, a value of None will cause Django to use the same
# timezone as the operating system.
# If running in a Windows environment this must be set to the same as your
# system time zone.
TIME_ZONE = 'America/Chicago'
# Language code for this installation. All choices can be found here:
# http://www.i18nguy.com/unicode/language-identifiers.html
LANGUAGE_CODE = 'en-us'
SITE_ID = 1
# If you set this to False, Django will make some optimizations so as not
# to load the internationalization machinery.
USE_I18N = True
# If you set this to False, Django will not format dates, numbers and
# calendars according to the current locale
USE_L10N = True
# Absolute filesystem path to the directory that will hold user-uploaded files.
# Example: "/home/media/media.lawrence.com/media/"
MEDIA_ROOT = os.path.join(PROJECT_DIR, 'uploads/')
# URL that handles the media served from MEDIA_ROOT. Make sure to use a
# trailing slash.
# Examples: "http://media.lawrence.com/media/", "http://example.com/media/"
MEDIA_URL = '/media/'
# Absolute path to the directory static files should be collected to.
# Don't put anything in this directory yourself; store your static files
# in apps' "static/" subdirectories and in STATICFILES_DIRS.
# Example: "/home/media/media.lawrence.com/static/"
STATIC_ROOT = ''#os.path.join(PROJECT_DIR, 'static/')
# URL prefix for static files.
# Example: "http://media.lawrence.com/static/"
STATIC_URL = '/static/'
# URL prefix for admin static files -- CSS, JavaScript and images.
# Make sure to use a trailing slash.
# Examples: "http://foo.com/static/admin/", "/static/admin/".
ADMIN_MEDIA_PREFIX = '/static/admin/'
# Additional locations of static files
STATICFILES_DIRS = (
# Put strings here, like "/home/html/static" or "C:/www/django/static".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
os.path.join(PROJECT_DIR, 'static/'),
)
# List of finder classes that know how to find static files in
# various locations.
STATICFILES_FINDERS = (
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
# 'django.contrib.staticfiles.finders.DefaultStorageFinder',
)
# Make this unique, and don't share it with anybody.
SECRET_KEY = 'iea9ivk!*ms-#$i%ix0i0b3p=u&30v+h*)&c5!%byv^i6^15%3'
# List of callables that know how to import templates from various sources.
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
# 'django.template.loaders.eggs.Loader',
)
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'marketing.urlcanon.URLCanonicalizationMiddleware',
)
ROOT_URLCONF = 'devbitstudio.urls'
TEMPLATE_DIRS = (
# Put strings here, like "/home/html/django_templates" or "C:/www/django/templates".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
os.path.join(os.path.dirname(__file__),'templates').replace('\\', '/'),
)
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.messages',
'django.contrib.staticfiles',
# Uncomment the next line to enable the admin:
'django.contrib.admin',
'main',
'django.contrib.sitemaps',
# Uncomment the next line to enable admin documentation:
# 'django.contrib.admindocs',
)
# for use with URL Canonicalization Middleware:
# this is the canonical hostname to be used by your app (required)
CANON_URL_HOST = 'devbitstudio.com'
# A sample logging configuration. The only tangible logging
# performed by this configuration is to send an email to
# the site admins on every HTTP 500 error.
# See http://docs.djangoproject.com/en/dev/topics/logging for
# more details on how to customize your logging configuration.
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'handlers': {
'mail_admins': {
'level': 'ERROR',
'class': 'django.utils.log.AdminEmailHandler'
}
},
'loggers': {
'django.request': {
'handlers': ['mail_admins'],
'level': 'ERROR',
'propagate': True,
},
}
}
|
mit
| -7,184,461,981,706,605,000
| 32.806818
| 120
| 0.689412
| false
| 3.573574
| false
| false
| false
|
ChromeDevTools/devtools-frontend
|
scripts/deps/roll_deps.py
|
2
|
2410
|
#!/usr/bin/env vpython
#
# Copyright 2019 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Update manually maintained dependencies from Chromium.
"""
import argparse
import os
import shutil
import subprocess
import sys
# Files whose location within devtools-frontend matches the upstream location.
FILES = [
'v8/include/js_protocol.pdl',
'third_party/blink/renderer/core/css/css_properties.json5',
'third_party/blink/renderer/core/html/aria_properties.json5',
'third_party/blink/public/devtools_protocol/browser_protocol.pdl',
]
# Files whose location within devtools-frontend differs from the upstream location.
FILE_MAPPINGS = {
# chromium_path => devtools_frontend_path
'components/variations/proto/devtools/client_variations.js':
'front_end/third_party/chromium/client-variations/ClientVariations.js',
'third_party/axe-core/axe.d.ts': 'front_end/third_party/axe-core/axe.d.ts',
'third_party/axe-core/axe.js': 'front_end/third_party/axe-core/axe.js',
'third_party/axe-core/axe.min.js':
'front_end/third_party/axe-core/axe.min.js',
'third_party/axe-core/LICENSE': 'front_end/third_party/axe-core/LICENSE',
}
for f in FILES:
FILE_MAPPINGS[f] = f
def parse_options(cli_args):
parser = argparse.ArgumentParser(description='Roll dependencies from Chromium.')
parser.add_argument('chromium_dir', help='path to chromium/src directory')
parser.add_argument('devtools_dir',
help='path to devtools/devtools-frontend directory')
return parser.parse_args(cli_args)
def update(options):
subprocess.check_call(['git', 'fetch', 'origin'], cwd=options.chromium_dir)
subprocess.check_call(['git', 'checkout', 'origin/main'],
cwd=options.chromium_dir)
subprocess.check_call(['gclient', 'sync'], cwd=options.chromium_dir)
def copy_files(options):
for from_path, to_path in FILE_MAPPINGS.items():
from_path = os.path.normpath(from_path)
to_path = os.path.normpath(to_path)
print('%s => %s' % (from_path, to_path))
shutil.copy(os.path.join(options.chromium_dir, from_path),
os.path.join(options.devtools_dir, to_path))
if __name__ == '__main__':
OPTIONS = parse_options(sys.argv[1:])
update(OPTIONS)
copy_files(OPTIONS)
|
bsd-3-clause
| 2,989,384,766,150,173,000
| 36.076923
| 84
| 0.692946
| false
| 3.347222
| false
| false
| false
|
google/makani
|
avionics/motor/motor_client.py
|
1
|
50178
|
#!/usr/bin/python
# Copyright 2020 Makani Technologies LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Command line client for controlling motors."""
import collections
import os
import re
import socket
import subprocess
import tempfile
import threading
import time
import makani
from makani.avionics.common import actuator_types
from makani.avionics.common import aio
from makani.avionics.common import cmd_client
from makani.avionics.common import pack_avionics_messages
from makani.avionics.common import safety_codes
from makani.avionics.firmware.params import client as param_client
from makani.avionics.motor.firmware import config_params
from makani.avionics.motor.firmware import flags
from makani.avionics.network import aio_labels
from makani.avionics.network import aio_node
from makani.avionics.network import message_type
from makani.lib.python import c_helpers
import numpy as np
from scipy import interpolate
# TODO: implement NetworkConfig() to replace all these EnumHelper's.
aio_node_helper = c_helpers.EnumHelper('AioNode', aio_node)
motor_label_helper = c_helpers.EnumHelper('MotorLabel', aio_labels,
prefix='kMotor')
motor_error_helper = c_helpers.EnumHelper('MotorError', flags)
motor_warning_helper = c_helpers.EnumHelper('MotorWarning', flags)
def BuildMotorParamDict():
"""Builds a dict mapping motor param names to their indices."""
# Build up parameter list.
filename = os.path.join(makani.HOME, 'avionics/motor/firmware/io.c')
with open(filename) as f:
f_text = f.read()
# Get parameter array string.
re_string = r'static float \*g_mutable_param_addrs\[\] = {\s*^([\s\S]*)^};'
array_string = re.search(re_string, f_text, re.MULTILINE)
re_string = r'^ *&[\w\[\]]+.([\w\.\[\]]+)'
motor_param_keys = re.findall(re_string, array_string.group(0), re.MULTILINE)
return {key: ind for ind, key in enumerate(motor_param_keys)}
# Constants.
MOTORS = [mot.upper() for mot in motor_label_helper.ShortNames()]
CONTROLLER = 'kAioNodeControllerA'
OPERATOR = 'kAioNodeOperator'
MOTOR_PARAMS = BuildMotorParamDict()
MOTOR_ERROR_NAMES = collections.OrderedDict(
(error_bitmask, motor_error_helper.Name(error_bitmask))
for error_bitmask in motor_error_helper.Values()
if motor_error_helper.Name(error_bitmask) != 'kMotorErrorAll')
MOTOR_WARNING_NAMES = collections.OrderedDict(
(warning_bitmask, motor_warning_helper.Name(warning_bitmask))
for warning_bitmask in motor_warning_helper.Values()
if motor_warning_helper.Name(warning_bitmask) != 'kMotorWarningAll')
MOTOR_STATUS_NAMES = {val: key for key, val in flags.__dict__.items()
if key.startswith('kMotorStatus')}
GEN_TABLE_PATH = os.path.join(makani.HOME,
'avionics/motor/gen_lookup_table.py')
OMEGA_MIN_LIMIT = -260.0
OMEGA_MAX_LIMIT = 260.0
TORQUE_MIN_LIMIT = -600.0
TORQUE_MAX_LIMIT = 600.0
EPS32 = np.finfo(np.float32).eps
class MotorClientError(cmd_client.WingClientError):
pass
def MotorsAsBits(motor_list):
"""Returns a bitmask describing the motors in `motor_list`."""
return sum(1 << motor_label_helper.Value(motor.capitalize())
for motor in motor_list)
def AioNodeNameFromMotorNickname(motor):
"""Returns AIO node name for the specified motor."""
return 'kAioNodeMotor' + motor.capitalize()
def AioNodeNameFromDynoNickname(motor):
"""Returns AIO node name for the specified dyno motor."""
return 'kAioNodeDynoMotor' + motor.capitalize()
def GetMotorErrorNames(error_bitmask):
"""Returns a list of error names corresponding to the specified bitmask."""
return GetFlagNames(error_bitmask, MOTOR_ERROR_NAMES, 0)
def GetMotorWarningNames(warning_bitmask):
"""Returns a list of warning names corresponding to the specified bitmask."""
return GetFlagNames(warning_bitmask, MOTOR_WARNING_NAMES, 0)
def GetFlagNames(bitmask, bitmask_dict, default_key=None):
"""Returns a list based on bitmask_dict corresponding to set bits in bitmask.
Args:
bitmask: Integer containing a bitmask of desired fields.
bitmask_dict: Dictionary with power-of-two integer keys and values
containing names of the corresponding bits.
default_key: Key to use if bitmask == 0. Set to None to return [].
Returns:
A list with the values of bitmask_dict specified by bitmask.
"""
if bitmask:
return [name for bit, name in bitmask_dict.iteritems() if bit & bitmask]
else:
if default_key is None:
return []
else:
return [bitmask_dict[default_key]]
def GenerateCommandData(args):
"""Generates the data to use for a given speed or torque command.
Args:
args: List containing command input file & optional loop parameter.
Returns:
data: Numpy array of time, torque and speed limits.
loop: Boolean of optional loop parameter.
Raises:
MotorClientError: An invalid filename or file format was specified.
"""
cmd_file = args[0]
if not os.path.isfile(cmd_file):
raise MotorClientError('Invalid filename: %s' % cmd_file)
# Handle 1st arg i.e. the command file.
if cmd_file.endswith(('.py', '.pycmd')): # Treat as a Python file.
with tempfile.NamedTemporaryFile() as table_file:
popen = subprocess.Popen([GEN_TABLE_PATH, '--input_file', cmd_file,
'--binary'],
stdout=table_file, stderr=subprocess.PIPE)
_, stderr = popen.communicate()
if popen.returncode != 0:
raise MotorClientError('Generation of lookup table from %s failed. '
'stderr:\n%s' % (cmd_file, stderr))
data = np.load(table_file.name)
print 'Using %s to generate command profile.' % cmd_file
else: # Treat as a text file for interpolation.
try:
data = np.loadtxt(cmd_file)
except (IOError, ValueError):
raise MotorClientError(
'Invalid input text file: %s. Should contain a table of time, torques'
'and speed limits with rows of the form:\n\n'
'time torque1 torque2 ... torque8 omega_lower1 omega_lower2 ...'
'omega_lower8 omega_upper1 omega_upper2 ... omega_upper8' % cmd_file)
print 'Using interpolated values from %s for command profile.' % cmd_file
if data.shape[1] != 25:
raise MotorClientError(
'Invalid number of columns in command table. Expected 25, got %d. '
'Revise input file to generate rows of the form:\n'
'time torque1 torque2 ... torque8 omega_lower1 omega_lower2 ...'
'omega_lower8 omega_upper1 omega_upper2 ... omega_upper8'
% data.shape[1])
# Handle 2nd arg i.e. the optional parameter to repeat.
if len(args) == 1:
loop = False
print 'Defaulting to \"noloop\".'
else:
if args[1] == 'loop':
loop = True
elif args[1] == 'noloop':
loop = False
else:
raise MotorClientError('Invalid option: %s. Expecting \"loop\" or '
'[default] \"noloop\".' % args[1])
return data, loop
def CheckCommandLimits(
cmd_min, cmd_max, cmd_min_limit, cmd_max_limit, cmd_type):
if cmd_min < cmd_min_limit or cmd_max > cmd_max_limit:
raise MotorClientError('Extreme %s outside of limits [%f, %f] '
'detected. Command not set.' %
(cmd_type, cmd_min_limit, cmd_max_limit))
if cmd_min > cmd_max:
raise MotorClientError('Invalid %s i.e. min value - %f, is greater '
'than max value - %f' % (cmd_type, cmd_min, cmd_max))
class CommandProfile(object):
"""Maintains a lookup table of motor commands while running motors."""
def __init__(
self, t, motor_cmd, cmd_min_limit, cmd_max_limit, cmd_type,
loop_back=False):
self._loop_back = loop_back
self._t = t
self._motor_cmd_func = interpolate.interp1d(self._t, motor_cmd, axis=0)
cmd_max = np.max(motor_cmd)
cmd_min = np.min(motor_cmd)
print ('\nWith {t_start:.2f}s < t < {t_end:.2f}s:'
'\n min({type}) = {min:f}\n max({type}) = {max:f}\n'.format(
t_start=t[0], t_end=t[-1], type=cmd_type,
min=cmd_min, max=cmd_max))
CheckCommandLimits(cmd_min, cmd_max, cmd_min_limit, cmd_max_limit, cmd_type)
def __call__(self, t):
if self._loop_back:
t = np.mod(t, self._t[-1])
elif t > self._t[-1]:
return None
return list(self._motor_cmd_func(t))
class MotorCommandClient(cmd_client.WingCommandClient):
"""Command line client for running M600 motors."""
prompt = '(motor_client) '
_NUM_RETRIES = 10
_MOTORS = 'motors'
_DYNOS = 'dynos'
def __init__(self, *args, **kwargs):
cmd_client.WingCommandClient.__init__(self, *args, **kwargs)
self._motors_selected = set()
self._dynos_selected = set()
self._spin_dir = {}
self._motor_runner = Runner(self._motors_selected, self._spin_dir)
self._dyno_runner = Runner(self._dynos_selected, self._spin_dir,
dyno_mode=True)
self._motor_listener = None
self._dyno_listener = None
self._torque = 0.0
self._omega_lower_limit = 0.0
self._omega_upper_limit = 0.0
self._arm_aio_client = aio.AioClient(
['kMessageTypeMotorSetState', 'kMessageTypeDynoMotorSetState'],
timeout=0.1)
self._set_param_aio_client = aio.AioClient(
['kMessageTypeMotorSetParam', 'kMessageTypeDynoMotorSetParam'],
timeout=0.1)
# The long range radio requires at least 2x160 ms for a complete command-
# response cycle.
self._ack_param_aio_client = aio.AioClient(
['kMessageTypeMotorAckParam'], timeout=0.35)
self._get_param_aio_client = aio.AioClient(
['kMessageTypeMotorGetParam', 'kMessageTypeDynoMotorGetParam'],
timeout=0.1)
self._param_client = param_client.Client(timeout=0.1)
def TryStopThreads(self):
self._motor_runner.TryStop()
self._dyno_runner.TryStop()
if self._motor_listener:
self._motor_listener.TryStop()
if self._dyno_listener:
self._dyno_listener.TryStop()
def _GetListenerAndRunner(self, node_type):
if node_type == self._MOTORS:
return self._motor_listener, self._motor_runner
elif node_type == self._DYNOS:
return self._dyno_listener, self._dyno_runner
else:
raise MotorClientError('Unknown node type.')
def _CheckStatus(self, valid_statuses, node_type):
listener, _ = self._GetListenerAndRunner(node_type)
if not listener:
status = flags.kMotorStatusInit
else:
status = listener.GetMostRestrictiveMotorStatus()
if status not in valid_statuses:
raise MotorClientError(
'Invalid %s status. %s' % (
node_type.capitalize(), MOTOR_STATUS_NAMES[status]))
return True
def _CheckMotorStatus(self, valid_statuses):
self._CheckStatus(valid_statuses, self._MOTORS)
def _CheckDynoStatus(self, valid_statuses):
self._CheckStatus(valid_statuses, self._DYNOS)
def _CheckTargetsSelected(self):
if self._motors_selected or self._dynos_selected:
return True
else:
raise MotorClientError('Invalid set of targets. Use either: '
'"set_targets" or "set_targets_dyno".')
def _SetTargets(self, line, node_type):
"""Sets motor or dyno targets.
Args:
line: User supplied arguments specifying target motors.
node_type: String specifying type of targets i.e. 'motors' or 'dynos'.
Raises:
MotorClientError: An invalid set of targets was specified.
"""
targets_selected, _ = cmd_client.SelectArgs(
line.split(), MOTORS, require_some=True, require_all=True,
select_all=True, require_one=False)
if node_type == self._MOTORS:
self._motors_selected = targets_selected
motor_params = self._QueryConfig(self._motors_selected, self._MOTORS)
self._spin_dir = self._GetSpinDir(motor_params)
elif node_type == self._DYNOS:
self._dynos_selected = targets_selected
self._QueryConfig(self._dynos_selected, self._DYNOS)
self.TryStopThreads()
if self._motors_selected:
print 'Motors selected: %s.' % ', '.join(self._motors_selected)
self._motor_runner = Runner(self._motors_selected, self._spin_dir)
self._motor_listener = Listener(self._motor_runner.StopRun,
self._motors_selected)
if self._dynos_selected:
print 'Dynos selected: %s.' % ', '.join(self._dynos_selected)
self._dyno_runner = Runner(self._dynos_selected, self._spin_dir,
dyno_mode=True)
self._dyno_listener = Listener(self._dyno_runner.StopRun,
self._dynos_selected, dyno_mode=True)
@cmd_client.Command()
def do_set_targets(self, line): # pylint: disable=invalid-name
"""Sets motor targets e.g. "set_targets SBO SBI"."""
self._CheckMotorStatus([flags.kMotorStatusInit, flags.kMotorStatusError])
self._SetTargets(line, self._MOTORS)
@cmd_client.Command()
def do_set_targets_dyno(self, line): # pylint: disable=invalid-name
"""Sets dyno targets e.g. "set_targets_dyno SBO SBI"."""
self._CheckDynoStatus([flags.kMotorStatusInit, flags.kMotorStatusError])
self._SetTargets(line, self._DYNOS)
@cmd_client.Command()
def do_get_targets(self, line): # pylint: disable=invalid-name
"""Displays selected motor & dyno targets."""
print 'Current targets.\nMotors: %s.\nDynos: %s.' % (
', '.join(self._motors_selected), ', '.join(self._dynos_selected))
@cmd_client.Command()
def do_clear_targets(self, line): # pylint: disable=invalid-name
"""Clears selected motor & dyno targets."""
old_motors = self._motors_selected.copy()
old_dynos = self._dynos_selected.copy()
self.TryStopThreads()
self._motors_selected = set()
self._dynos_selected = set()
self._spin_dir = {}
self._motor_runner = Runner(self._motors_selected, self._spin_dir)
self._dyno_runner = Runner(self._dynos_selected, self._spin_dir,
dyno_mode=True)
self._motor_listener = None
self._dyno_listener = None
print 'Cleared old targets.\nOld Motors: %s.\nOld Dynos: %s.' % (
', '.join(old_motors), ', '.join(old_dynos))
def complete_set_targets(self, text, *unused_args): # pylint: disable=invalid-name
return self._CompleteArg(text, sorted(MOTORS) + ['All'])
complete_set_targets_dyno = complete_set_targets
def _GetSpinDir(self, params):
"""Determine the nominal spin direction based off of the motor load type."""
# List of props that need to spin in the positive x direction / in the
# negative omega sense.
# Additional loads are to be added in future commits.
reversed_loads = [config_params.MotorLoadType.PROP_REV2_POSITIVE_X]
return {key: -1 if param and param.load_type in reversed_loads else 1
for key, param in params.iteritems()}
def _QueryConfig(self, targets, target_type):
"""Test if targets are on the network and query their configurations."""
params = {}
for target in targets:
if target_type == self._DYNOS:
node = aio_node_helper.Value(AioNodeNameFromDynoNickname(target))
elif target_type == self._MOTORS:
node = aio_node_helper.Value(AioNodeNameFromMotorNickname(target))
section = param_client.SECTION_CONFIG
try:
params[target] = self._param_client.GetSection(node, section)
except socket.timeout:
params[target] = None
self._PrintConfig(targets, params)
return params
def _PrintConfig(self, motors, params):
"""Print portions of the selected motor config params."""
load_types = [load_type.CName()[len('kMotorLoadType'):]
for load_type in config_params.MotorLoadType.Names()]
motor_types = [motor_type.CName()[len('kMotorType'):]
for motor_type in config_params.MotorType.Names()]
load_type_max_str_len = max([len(name) for name in load_types])
motor_type_max_str_len = max([len(name) for name in motor_types])
for motor in sorted(motors):
if params[motor] is None:
print '%s: unknown' % motor
else:
print '{name}: motor_type: {motor_type} load_type: {load_type}'.format(
name=motor,
motor_type=(motor_types[params[motor].motor_type]
.ljust(motor_type_max_str_len)),
load_type=(load_types[params[motor].load_type]
.ljust(load_type_max_str_len)))
print ''
@cmd_client.Command()
def do_query_config(self, line): # pylint: disable=invalid-name
targets_selected, _ = cmd_client.SelectArgs(
line.split(), MOTORS, require_some=True, require_all=True,
select_all=True, require_one=False)
self._CheckMotorStatus([flags.kMotorStatusInit, flags.kMotorStatusError])
self._QueryConfig(targets_selected, self._MOTORS)
@cmd_client.Command()
def do_query_config_dyno(self, line): # pylint: disable=invalid-name
targets_selected, _ = cmd_client.SelectArgs(
line.split(), MOTORS, require_some=True, require_all=True,
select_all=True, require_one=False)
self._CheckDynoStatus([flags.kMotorStatusInit, flags.kMotorStatusError])
self._QueryConfig(targets_selected, self._DYNOS)
def _TryArm(self, arm_msg, arm_msg_type, node_type):
listener, _ = self._GetListenerAndRunner(node_type)
for _ in xrange(self._NUM_RETRIES):
self._arm_aio_client.Send(arm_msg, arm_msg_type, OPERATOR)
time.sleep(0.1)
if listener.AllMotorsArmed():
print 'Successfully armed %s.' % node_type
return
else:
raise MotorClientError('Failed to arm %s.' % node_type)
@cmd_client.Command(num_args=0)
def do_arm(self, unused_line): # pylint: disable=invalid-name
"""Arms the selected motors and/or dynos."""
if self._motors_selected:
self._CheckMotorStatus([flags.kMotorStatusInit])
if self._dynos_selected:
self._CheckDynoStatus([flags.kMotorStatusInit])
self._CheckTargetsSelected()
if self._motors_selected:
motor_arm_msg = pack_avionics_messages.MotorSetStateMessage()
motor_arm_msg.command = actuator_types.kActuatorStateCommandArm
motor_arm_msg.command_data = safety_codes.MOTOR_ARMING_SIGNAL
print 'Arming motors.'
motor_arm_msg.selected_motors = MotorsAsBits(
self._motor_listener.GetUnarmedMotors())
self._TryArm(
motor_arm_msg, 'kMessageTypeMotorSetState', self._MOTORS)
if self._dynos_selected:
dyno_arm_msg = pack_avionics_messages.DynoMotorSetStateMessage()
dyno_arm_msg.command = actuator_types.kActuatorStateCommandArm
dyno_arm_msg.command_data = safety_codes.MOTOR_ARMING_SIGNAL
print 'Arming dynos.'
dyno_arm_msg.selected_motors = MotorsAsBits(
self._dyno_listener.GetUnarmedMotors())
self._TryArm(
dyno_arm_msg, 'kMessageTypeDynoMotorSetState', self._DYNOS)
def _SetParam(self, line, message, node_type): # pylint: disable=invalid-name
"""Sets a param for a specified motor or dyno."""
targets, args = cmd_client.SelectArgs(
line.split(), MOTORS, require_some=True, select_all=True)
param, args = cmd_client.SelectArgs(
args, MOTOR_PARAMS.keys(), require_one=True, select_all=False)
if node_type == self._DYNOS:
targets = ['DYNO_%s' % t.upper() for t in targets]
try:
value = float(args[0])
except ValueError:
raise MotorClientError('Invalid value: "%s".' % args[0])
message.id = MOTOR_PARAMS[param]
message.value = value
failed_targets = []
for target in targets:
print 'Setting %s to %g on %s.' % (param, value, target)
if target.startswith('DYNO_'):
message.selected_motors = MotorsAsBits([target[len('DYNO_'):]])
aio_target = AioNodeNameFromDynoNickname(target[len('DYNO_'):])
success = self._TrySetParam(
message, 'kMessageTypeDynoMotorSetParam', param, target, aio_target)
else:
message.selected_motors = MotorsAsBits([target])
aio_target = AioNodeNameFromMotorNickname(target)
success = self._TrySetParam(
message, 'kMessageTypeMotorSetParam', param, target, aio_target)
if not success:
failed_targets.append(target)
if failed_targets:
raise MotorClientError('Failed to verify %s from %s.'
% (param, failed_targets))
def _TrySetParam(self, message, msg_type, param, target, aio_target):
for _ in xrange(self._NUM_RETRIES):
self._set_param_aio_client.Send(message, msg_type, OPERATOR)
for _ in xrange(self._NUM_RETRIES):
try:
_, header, ack = self._ack_param_aio_client.Recv()
if (header.source == aio_node_helper.Value(aio_target)
and header.type == message_type.kMessageTypeMotorAckParam
and ack.id == message.id and ack.value == message.value):
print '%s %s: %g' % (target, param, ack.value)
return True
except socket.timeout:
return False
return False
@cmd_client.Command(num_args=3)
def do_set_param(self, line): # pylint: disable=invalid-name
"""Sets param for a specified motor, e.g. "set_motor_param SBO Ld 3.14"."""
self._CheckMotorStatus([flags.kMotorStatusInit, flags.kMotorStatusError])
message = pack_avionics_messages.MotorSetParamMessage()
self._SetParam(line, message, self._MOTORS)
@cmd_client.Command(num_args=3)
def do_set_param_dyno(self, line): # pylint: disable=invalid-name
"""Sets param for a specified dyno, e.g. "set_dyno_param SBO Ld 3.14"."""
self._CheckDynoStatus([flags.kMotorStatusInit, flags.kMotorStatusError])
message = pack_avionics_messages.DynoMotorSetParamMessage()
self._SetParam(line, message, self._DYNOS)
def complete_set_param(self, text, line, *unused_args): # pylint: disable=invalid-name
arg_number = len(line.split())
if not text:
arg_number += 1
if arg_number == 2:
return self._CompleteArg(text, sorted(MOTORS) + ['All'])
elif arg_number == 3:
return self._CompleteArg(text, sorted(MOTOR_PARAMS.keys()))
else:
return []
complete_set_param_dyno = complete_set_param
def _GetParam(self, line, message, node_type):
targets, args = cmd_client.SelectArgs(
line.split(), MOTORS, require_some=True, select_all=True)
param, _ = cmd_client.SelectArgs(
args, MOTOR_PARAMS.keys(), require_one=True, select_all=False)
if node_type == self._DYNOS:
targets = ['DYNO_%s' % t.upper() for t in targets]
message.id = MOTOR_PARAMS[param]
failed_targets = []
for target in targets:
print 'Getting %s from %s...' % (param, target)
success = True
if target.startswith('DYNO_'):
message.selected_motors = MotorsAsBits([target[len('DYNO_'):]])
aio_target = AioNodeNameFromDynoNickname(target[len('DYNO_'):])
success = self._TryGetParam(
message, 'kMessageTypeDynoMotorGetParam', param, target, aio_target)
else:
message.selected_motors = MotorsAsBits([target])
aio_target = AioNodeNameFromMotorNickname(target)
success = self._TryGetParam(
message, 'kMessageTypeMotorGetParam', param, target, aio_target)
if not success:
failed_targets.append(target)
if failed_targets:
raise MotorClientError('Failed to get %s from %s.'
% (param, failed_targets))
def _TryGetParam(self, message, msg_type, param, target, aio_target):
for _ in xrange(self._NUM_RETRIES):
self._get_param_aio_client.Send(message, msg_type, OPERATOR)
for _ in xrange(self._NUM_RETRIES):
try:
_, header, ack = self._ack_param_aio_client.Recv()
if (header.source == aio_node_helper.Value(aio_target)
and header.type == message_type.kMessageTypeMotorAckParam
and ack.id == message.id):
print '%s %s: %g' % (target, param, ack.value)
return True
except socket.timeout:
return False
return False
@cmd_client.Command()
def do_get_param(self, line): # pylint: disable=invalid-name
self._CheckMotorStatus([flags.kMotorStatusInit, flags.kMotorStatusError])
message = pack_avionics_messages.MotorGetParamMessage()
self._GetParam(line, message, self._MOTORS)
@cmd_client.Command()
def do_get_param_dyno(self, line): # pylint: disable=invalid-name
self._CheckDynoStatus([flags.kMotorStatusInit, flags.kMotorStatusError])
message = pack_avionics_messages.DynoMotorGetParamMessage()
self._GetParam(line, message, self._DYNOS)
complete_get_param = complete_set_param
complete_get_param_dyno = complete_get_param
@cmd_client.Command()
def do_run(self, line): # pylint: disable=invalid-name
"""Runs the selected motors and/or dynos.
Specify a duration in "s" or "ms". E.g. "run 10s" or "run 300ms".
Args:
line: Command to this function.
Raises:
MotorClientError: An invalid duration was specified.
"""
if self._motors_selected:
self._CheckMotorStatus([flags.kMotorStatusArmed])
if self._dynos_selected:
self._CheckDynoStatus([flags.kMotorStatusArmed])
self._CheckTargetsSelected()
if line.endswith('ms'):
line = line[:-2]
multiplier = 1e-3
elif line.endswith('s'):
line = line[:-1]
multiplier = 1.0
else:
raise MotorClientError('Usage: run {$N {s|ms}}')
try:
duration = float(line) * multiplier
except ValueError:
raise MotorClientError('Invalid run time: \'%s\'' % line)
if self._motor_runner.IsRunning() or self._dyno_runner.IsRunning():
raise MotorClientError('Already running.')
if self._motors_selected:
if not self._motor_listener.AllMotorsArmed():
raise MotorClientError('Motors not armed.')
self._motor_runner.StartRun(duration)
if self._dynos_selected:
if not self._dyno_listener.AllMotorsArmed():
raise MotorClientError('Dynos not armed.')
self._dyno_runner.StartRun(duration)
print 'Running...'
@cmd_client.Command(num_args=0)
def do_stop(self, unused_line): # pylint: disable=invalid-name
"""Stops the motors and/or dynos."""
if self._motor_runner.IsRunning() or self._dyno_runner.IsRunning():
self._motor_runner.StopRun()
self._dyno_runner.StopRun()
else:
raise MotorClientError('Not running.')
print 'Run stopped.'
def _GetCommandFunction(self, line):
"""Returns a complete command function for each selected motor and/or dyno.
Args:
line: Command to this function.
Raises:
MotorClientError: Motors and/or dynos are running.
Returns:
torque_func: A function that returns torque commands.
omega_lower_func: A function that returns omega_lower commands.
omega_upper_func: A function that returns omega_upper commands.
freeze_command: Specifies if last command should persist on stop.
"""
if self._motor_runner.IsRunning() or self._dyno_runner.IsRunning():
raise MotorClientError('Motors and/or dynos are running.')
args = line.split()
data, loop = GenerateCommandData(args)
t = data[:, 0]
torque_cmd = data[:, 1:9]
omega_lower_cmd = data[:, 9:17]
omega_upper_cmd = data[:, 17:25]
torque_func = CommandProfile(t, torque_cmd, TORQUE_MIN_LIMIT,
TORQUE_MAX_LIMIT, 'torque', loop)
omega_lower_func = CommandProfile(t, omega_lower_cmd, OMEGA_MIN_LIMIT,
OMEGA_MAX_LIMIT, 'omega', loop)
omega_upper_func = CommandProfile(t, omega_upper_cmd, OMEGA_MIN_LIMIT,
OMEGA_MAX_LIMIT, 'omega', loop)
freeze_command = False
return (torque_func, omega_lower_func, omega_upper_func, freeze_command)
@cmd_client.Command(num_args=[1, 2])
def do_set_command_function(self, line): # pylint: disable=invalid-name, g-doc-args
# pylint: disable=g-doc-args
"""Sets a command function for motor(s).
Specify a filename which may be:
- A Python file (must have .py suffix) corresponding to an input to
gen_lookup_table.py
- A text file whose output is a lookup table formatted per the output of
gen_lookup_table.py.
"""
self._CheckMotorStatus(
[flags.kMotorStatusInit, flags.kMotorStatusArmed,
flags.kMotorStatusError])
cmd_args = self._GetCommandFunction(line)
self._motor_runner.SetCommandFunction(*cmd_args)
@cmd_client.Command(num_args=[1, 2])
def do_set_command_function_dyno(self, line): # pylint: disable=invalid-name
# pylint: disable=g-doc-args
"""Sets a command function for dyno(s).
Specify a filename which may be:
- A Python file (must have .py suffix) corresponding to an input to
gen_lookup_table.py
- A text file whose output is a lookup table formatted per the output of
gen_lookup_table.py.
"""
self._CheckDynoStatus(
[flags.kMotorStatusInit, flags.kMotorStatusArmed,
flags.kMotorStatusError])
cmd_args = self._GetCommandFunction(line)
self._dyno_runner.SetCommandFunction(*cmd_args)
def complete_set_motor_command_function(self, _, line, *unused_args): # pylint: disable=invalid-name
"""Completes arguments for the "set_command_function" command."""
args = line.split(None, 2)
if len(args) > 2 or (len(args) == 2 and line.endswith(' ')):
suggestions = ['noloop', 'loop']
if len(args) == 3:
if args[2] in suggestions:
return []
suggestions = [x for x in suggestions if x.startswith(args[2])]
else:
path = args[1] if len(args) == 2 else ''
suggestions = cmd_client.CompleteFile(path)
suggestions = [x for x in suggestions
if (x.endswith(('/', '.py', '.pycmd', '.txt', '.dat'))
or x.find('.') < 0)]
return suggestions
complete_set_dyno_command_function = complete_set_motor_command_function
@cmd_client.Command(num_args=2)
def do_set_speed_limits(self, line): # pylint: disable=invalid-name
"""Sets the speed limits for torque-mode e.g. set_speed_limits 100 200."""
if not self._dynos_selected:
raise MotorClientError('No dynos selected. Use "set_targets_dyno".')
args = line.split()
try:
omega_lower = float(args[0])
omega_upper = float(args[1])
except ValueError:
raise MotorClientError('Invalid argument(s): \'{:s}\''.format(line))
CheckCommandLimits(
omega_lower, omega_upper, OMEGA_MIN_LIMIT, OMEGA_MAX_LIMIT, 'omega')
self._omega_lower_limit = omega_lower
self._omega_upper_limit = omega_upper
print 'Omega limits set to: %.2f rad/s, %.2f rad/s.' % (
self._omega_lower_limit, self._omega_upper_limit)
torque_func = lambda _: self._torque
omega_lower_func = lambda _: self._omega_lower_limit
omega_upper_func = lambda _: self._omega_upper_limit
freeze_command = True
self._dyno_runner.SetCommandFunction(torque_func, omega_lower_func,
omega_upper_func, freeze_command)
@cmd_client.Command(num_args=1)
def do_set_torque(self, line): # pylint: disable=invalid-name
"""Sets motor torque."""
if not self._dynos_selected:
raise MotorClientError('No dynos selected. Use "set_targets_dyno".')
try:
torque = float(line)
except ValueError:
raise MotorClientError('Invalid argument(s): \'{:s}\''.format(line))
if self._omega_lower_limit == 0 and self._omega_upper_limit == 0:
raise MotorClientError('Omega limits not set. Use "set_speed_limits".')
CheckCommandLimits(
torque, torque, TORQUE_MIN_LIMIT, TORQUE_MAX_LIMIT, 'torque')
self._torque = torque
print 'Torque desired: %.2f Nm. Speed limits: %.2f rad/s, %.2f rad/s.' % (
torque, self._omega_lower_limit, self._omega_upper_limit)
torque_func = lambda _: self._torque
omega_lower_func = lambda _: self._omega_lower_limit
omega_upper_func = lambda _: self._omega_upper_limit
freeze_command = True
self._dyno_runner.SetCommandFunction(torque_func, omega_lower_func,
omega_upper_func, freeze_command)
@cmd_client.Command(num_args=1)
def do_set_omega(self, line): # pylint: disable=invalid-name
"""Sets motor speed."""
if not self._motors_selected:
raise MotorClientError('No motors selected. Use "set_targets".')
try:
omega = float(line)
except ValueError:
raise MotorClientError('Invalid omega: \'{:s}\''.format(line))
CheckCommandLimits(omega, omega, OMEGA_MIN_LIMIT, OMEGA_MAX_LIMIT, 'omega')
print 'Omega desired: %s rad/s' % omega
torque_func = lambda _: 0.0
omega_lower_func = lambda _: omega
omega_upper_func = lambda _: omega
freeze_command = True
self._motor_runner.SetCommandFunction(torque_func, omega_lower_func,
omega_upper_func, freeze_command)
def _RampCommand(self, line, cmd_type, runner):
"""Sets a motor speed or torque ramp.
Args:
line: Command to this function.
cmd_type: Torque or Omega command to ramp.
runner: Runner instance to use for setting command.
Raises:
MotorClientError: An invalid parameter was specified.
"""
args = line.split(None, 2)
try:
cmd = float(args[0])
except ValueError:
raise MotorClientError('Invalid %s: \'{:s}\''.format(args[0]) % cmd_type)
if len(args) == 2:
try:
dt = self._dt = float(args[1])
except ValueError:
raise MotorClientError('Invalid time: \'{:s}\''.format(args[1]))
else:
dt = 1.0
if runner.IsRunning():
t0 = runner.GetTime()
motor_cmd = runner.GetCommand()
cmd0 = motor_cmd[cmd_type]
else:
t0 = 0.0
cmd0 = 0.0
dcmd_dt = (cmd - cmd0) / dt if abs(dt) > 10.0 * EPS32 else 0.0
def Ramp(t):
if t > t0 + dt:
return cmd
elif t > t0:
return dcmd_dt * (t - t0) + cmd0
else:
return cmd0
if cmd_type == 'omega_upper':
torque_func = lambda _: 0.0
omega_lower_func = Ramp
omega_upper_func = Ramp
elif cmd_type == 'torque':
torque_func = Ramp
omega_lower_func = lambda _: self._omega_lower_limit
omega_upper_func = lambda _: self._omega_upper_limit
else:
raise MotorClientError('Invalid command type: %s' % cmd_type)
freeze_command = True
runner.SetCommandFunction(
torque_func, omega_lower_func, omega_upper_func, freeze_command)
display_cmd = cmd_type.split('_')[0].capitalize()
print (' Ramping over dt = %4.2f:\n'
' %s(t0) = %4.1f\n'
' %s(t0 + dt) = %4.1f' % (dt, display_cmd, cmd0, display_cmd, cmd))
@cmd_client.Command(num_args=[1, 2])
def do_ramp_omega(self, line): # pylint: disable=invalid-name
# pylint: disable=g-doc-args
"""Sets a motor speed ramp.
Specify a linear angular rate ramp from the present speed omega0 to a final
speed omega1 over some time dt (in seconds) with the command:
ramp_omega [omega1] [dt]
The second argument is optional. If not specified dt = 1s is assumed.
"""
self._RampCommand(line, 'omega_upper', self._motor_runner)
@cmd_client.Command(num_args=[1, 2])
def do_ramp_torque(self, line): # pylint: disable=invalid-name
# pylint: disable=g-doc-args
"""Sets a dyno torque ramp.
Specify a linear torque ramp from the present torque T0 to a final
torque T1 over some time dt (in seconds) with the command:
ramp_torque [T1] [dt]
The second argument is optional. If not specified dt = 1s is assumed.
"""
self._RampCommand(line, 'torque', self._dyno_runner)
@cmd_client.Command(num_args=0)
def do_clear_errors(self, unused_line): # pylint: disable=invalid-name
self._CheckTargetsSelected()
if self._motors_selected:
self._CheckMotorStatus([flags.kMotorStatusInit, flags.kMotorStatusError])
self._motor_listener.ClearErrors()
self._motor_runner.ClearErrors()
if self._dynos_selected:
self._CheckDynoStatus([flags.kMotorStatusInit, flags.kMotorStatusError])
self._dyno_listener.ClearErrors()
self._dyno_runner.ClearErrors()
print 'Errors cleared.'
def _TryDisarm(self, node_type):
listener, runner = self._GetListenerAndRunner(node_type)
for _ in xrange(self._NUM_RETRIES):
runner.Disarm()
time.sleep(0.1)
if listener.AllMotorsDisarmed():
print 'Successfully disarmed %s.' % node_type
return
raise MotorClientError('Failed to disarm %s.' % node_type)
@cmd_client.Command(num_args=0)
def do_disarm(self, unused_line): # pylint: disable=invalid-name
"""Disarms the motors."""
self._CheckTargetsSelected()
print 'Disarming.'
if self._motors_selected:
self._TryDisarm(self._MOTORS)
if self._dynos_selected:
self._TryDisarm(self._DYNOS)
@cmd_client.Command()
def do_get_errors(self, unused_line): # pylint: disable=invalid-name
self._CheckTargetsSelected()
if self._motors_selected:
self._motor_listener.PrintErrors()
if self._dynos_selected:
self._dyno_listener.PrintErrors()
@cmd_client.Command()
def do_request_control_log(self, unused_line): # pylint: disable=invalid-name
self._CheckTargetsSelected()
if self._motors_selected:
self._motor_runner.RequestControlLog()
if self._dynos_selected:
self._dyno_runner.RequestControlLog()
@cmd_client.Command()
def do_request_adc_log(self, unused_line): # pylint: disable=invalid-name
self._CheckTargetsSelected()
if self._motors_selected:
self._motor_runner.RequestAdcLog()
if self._dynos_selected:
self._dyno_runner.RequestAdcLog()
class Listener(cmd_client.AioThread):
"""Continuously listens to MotorStatusMessages."""
def __init__(self, error_callback, motors, dyno_mode=False):
self._motors = motors.copy()
t_now = time.time()
self._errors = {m: flags.kMotorErrorNone for m in MOTORS}
self._warnings = {m: flags.kMotorWarningNone for m in MOTORS}
self._error_lock = threading.Lock()
self._clear_errors_stop_time = t_now
self._motor_status = {m: flags.kMotorStatusInit
for m in self._motors}
self._motor_status_lock = threading.Lock()
self._t_message = {m: t_now for m in self._motors}
self._t_message_lock = threading.Lock()
self._dyno_mode = dyno_mode
if dyno_mode:
sources = {AioNodeNameFromDynoNickname(m): m for m in self._motors}
else:
sources = {AioNodeNameFromMotorNickname(m): m for m in self._motors}
self._motor_sources = {aio.aio_node_helper.Value(k): sources[k]
for k in sources.keys()}
self._error_callback = error_callback
super(Listener, self).__init__(['kMessageTypeMotorStatus'],
allowed_sources=sources.keys(), timeout=0.1)
self.start()
def ClearErrors(self):
with self._error_lock:
for motor in self._errors.keys():
self._errors[motor] = flags.kMotorErrorNone
self._warnings[motor] = flags.kMotorWarningNone
self._clear_errors_stop_time = time.time() + 5*10e-3
def GetMostRestrictiveMotorStatus(self):
"""Returns the most restrictive status across all motors."""
with self._motor_status_lock:
motor_statuses = self._motor_status.values()
if flags.kMotorStatusRunning in motor_statuses:
return flags.kMotorStatusRunning
elif flags.kMotorStatusArmed in motor_statuses:
return flags.kMotorStatusArmed
elif flags.kMotorStatusError in motor_statuses:
return flags.kMotorStatusError
return flags.kMotorStatusInit
def AllMotorsArmed(self):
with self._motor_status_lock:
motor_statuses = self._motor_status.values()
return all(x == flags.kMotorStatusArmed for x in motor_statuses)
def AnyMotorsArmed(self):
with self._motor_status_lock:
motor_statuses = self._motor_status.values()
return any(x == flags.kMotorStatusArmed for x in motor_statuses)
def AllMotorsDisarmed(self):
with self._motor_status_lock:
motor_statuses = self._motor_status.values()
return all(x != flags.kMotorStatusArmed
and x != flags.kMotorStatusRunning
for x in motor_statuses)
def GetUnarmedMotors(self):
with self._motor_status_lock:
return [motor for motor, status in self._motor_status.iteritems()
if status == flags.kMotorStatusInit]
def PrintErrors(self):
with self._error_lock:
if (any([e != flags.kMotorErrorNone for e in self._errors.itervalues()])
or any([w != flags.kMotorWarningNone
for w in self._warnings.itervalues()])):
print 'Errors:'
for motor in MOTORS:
error = self._errors[motor]
warning = self._warnings[motor]
if error != flags.kMotorErrorNone:
print '%s: %s' % (motor, ' | '.join(GetMotorErrorNames(error)))
motor = (' ') * len(motor) # Do no print out the motor name again.
if warning != flags.kMotorWarningNone:
print '%s: %s' % (motor, ' | '.join(GetMotorWarningNames(warning)))
else:
print 'No errors or warnings.'
def _RunOnce(self):
try:
_, header, msg = self._client.Recv()
motor = self._motor_sources[header.source]
t_now = time.time()
with self._t_message_lock:
self._t_message[motor] = t_now
stale = {m: t_now - self._t_message[m] > 0.05 for m in self._motors}
new_status = False
execute_callback = False
with self._error_lock, self._motor_status_lock:
# New errors.
if t_now > self._clear_errors_stop_time:
newline = '\n'
error_diff = self._errors[motor] ^ msg.motor_error
if msg.motor_error and error_diff:
self._errors[motor] |= msg.motor_error
print ('%sNew motor error(s) %s: %s' %
(newline, motor, ' | '.join(GetMotorErrorNames(error_diff))))
newline = '' # Group errors and warning from the same motor.
warning_diff = self._warnings[motor] ^ msg.motor_warning
if warning_diff:
self._warnings[motor] = msg.motor_warning
if msg.motor_warning & warning_diff:
print ('%sNew motor warning(s) %s: %s' %
(newline, motor,
' | '.join(GetMotorWarningNames(warning_diff
& msg.motor_warning))))
else:
print ('%sCleared motor warning(s) %s: %s' %
(newline, motor,
' | '.join(GetMotorWarningNames(warning_diff
& ~msg.motor_warning))))
# Change in status.
if self._motor_status[motor] != msg.motor_status:
new_status = True
self._motor_status[motor] = msg.motor_status
# Invoke error callback after giving up self._error_lock and
# self._status_lock just in case.
if (new_status and
any([e for e in self._errors.values()]) and
all([self._motor_status[motor] &
~(flags.kMotorStatusRunning | flags.kMotorStatusWindDown) or
stale[motor] for motor in self._motors])):
execute_callback = True
if execute_callback:
self._error_callback()
except socket.timeout:
pass
class Runner(cmd_client.AioThread):
"""Continuously sends ControllerCommandMessages."""
def __init__(self, motors, spin_dir, dyno_mode=False):
self._motors = motors.copy()
self._spin_dir = [spin_dir.get(motor, 1) for motor in MOTORS]
self._clear_error_retries = 0
self._disarm_retries = 0
self._request_control_log = False
self._request_adc_log = False
self._dyno_mode = dyno_mode
if dyno_mode:
self._command = pack_avionics_messages.DynoCommandMessage()
else:
self._command = pack_avionics_messages.ControllerCommandMessage()
self._command.motor_command = flags.kMotorCommandNone
self._command_lock = threading.Lock()
self._command_function_lock = threading.Lock()
self._torque_func = lambda _: 0.0
self._omega_lower_func = lambda _: 0.0
self._omega_upper_func = lambda _: 0.0
self._freeze_command = False # Replace command with a constant on stop.
self._WriteMotorCommand()
super(Runner, self).__init__(['kMessageTypeControllerCommand',
'kMessageTypeDynoCommand'])
self.start()
def SetCommand(self, command_mask):
with self._command_lock:
self._command.motor_command |= command_mask
def _ClearCommand(self, command_mask):
with self._command_lock:
self._command.motor_command &= ~command_mask
def IsRunning(self):
return self._command.motor_command & flags.kMotorCommandRun
def StartRun(self, duration):
self._start_time = time.time()
self._stop_time = self._start_time + duration
self.SetCommand(flags.kMotorCommandRun)
def StopRun(self):
if self._freeze_command:
motor_cmd = self.GetCommand()
with self._command_function_lock:
self._torque_func = lambda _: motor_cmd['torque']
self._omega_lower_func = lambda _: motor_cmd['omega_lower']
self._omega_upper_func = lambda _: motor_cmd['omega_upper']
self._ClearCommand(flags.kMotorCommandRun)
def GetCommand(self):
"""Generates motor commands at the current time.
Returns:
motor_cmd: Command to send to motors or dynos at the current time.
"""
if self.IsRunning():
curr_time = time.time() - self._start_time
else:
curr_time = 0.0
with self._command_function_lock:
motor_cmd = {'torque': self._torque_func(curr_time),
'omega_lower': self._omega_lower_func(curr_time),
'omega_upper': self._omega_upper_func(curr_time)}
return motor_cmd
def _CheckCommand(self, cmd_dict):
for _, val in cmd_dict.iteritems():
assert isinstance(val, list)
assert len(val) == len(MOTORS)
def _WriteMotorCommand(self):
motor_cmd = self.GetCommand()
for cmd, val in motor_cmd.iteritems():
if isinstance(val, int) or isinstance(val, float):
motor_cmd[cmd] = [val for _ in MOTORS]
self._CheckCommand(motor_cmd)
torque = motor_cmd['torque']
omega_lower = motor_cmd['omega_lower']
omega_upper = motor_cmd['omega_upper']
with self._command_lock:
for i, motor in enumerate(MOTORS):
spin = self._spin_dir[i]
if motor in self._motors:
self._command.motor_torque[i] = torque[i] * spin
self._command.motor_speed_lower_limit[i] = omega_lower[i] * spin
self._command.motor_speed_upper_limit[i] = omega_upper[i] * spin
else:
self._command.motor_torque[i] = 0.0
self._command.motor_speed_lower_limit[i] = 0.0
self._command.motor_speed_upper_limit[i] = 0.0
def SetCommandFunction(self, torque_func, omega_lower_func,
omega_upper_func, freeze_command):
with self._command_function_lock:
self._torque_func = torque_func
self._omega_lower_func = omega_lower_func
self._omega_upper_func = omega_upper_func
self._freeze_command = freeze_command
self._WriteMotorCommand()
def GetTime(self):
return time.time() - self._start_time if self.IsRunning() else 0.0
def ClearErrors(self):
self.SetCommand(flags.kMotorCommandClearError)
self._clear_error_retries = 3
def Disarm(self):
self.SetCommand(flags.kMotorCommandDisarm)
self._disarm_retries = 3
def RequestControlLog(self):
self._request_control_log = True
def RequestAdcLog(self):
self._request_adc_log = True
def _RunOnce(self):
"""Modifies and sends the ControllerCommandMessage."""
if self.IsRunning():
if time.time() > self._stop_time:
self.StopRun()
print '\nFinished run.'
else:
try:
self._WriteMotorCommand()
except AssertionError:
print ('Warning: Command(t) did not return a scalar or list with '
'elements for all motors.')
self.StopRun()
if self._clear_error_retries <= 0:
self._ClearCommand(flags.kMotorCommandClearError)
else:
self._clear_error_retries -= 1
if self._disarm_retries <= 0:
self._ClearCommand(flags.kMotorCommandDisarm)
else:
self._disarm_retries -= 1
if self._request_control_log:
self.SetCommand(flags.kMotorCommandSendControlLog)
self._request_control_log = False
else:
self._ClearCommand(flags.kMotorCommandSendControlLog)
if self._request_adc_log:
self.SetCommand(flags.kMotorCommandSendAdcLog)
self._request_adc_log = False
else:
self._ClearCommand(flags.kMotorCommandSendAdcLog)
with self._command_lock:
if self._dyno_mode:
self._client.Send(self._command, 'kMessageTypeDynoCommand', OPERATOR)
else:
self._client.Send(self._command, 'kMessageTypeControllerCommand',
CONTROLLER)
time.sleep(0.0095)
if __name__ == '__main__':
client = MotorCommandClient()
try:
client.cmdloop()
except BaseException:
client.TryStopThreads()
raise
|
apache-2.0
| -5,446,123,190,470,756,000
| 35.626277
| 103
| 0.645343
| false
| 3.509442
| false
| false
| false
|
JarbasAI/JarbasAI
|
jarbas_models/tf_tacotron/models/modules.py
|
1
|
3455
|
import tensorflow as tf
from tensorflow.contrib.rnn import GRUCell
def prenet(inputs, is_training, layer_sizes=[256, 128], scope=None):
x = inputs
drop_rate = 0.5 if is_training else 0.0
with tf.variable_scope(scope or 'prenet'):
for i, size in enumerate(layer_sizes):
dense = tf.layers.dense(x, units=size, activation=tf.nn.relu,
name='dense_%d' % (i + 1))
x = tf.layers.dropout(dense, rate=drop_rate,
name='dropout_%d' % (i + 1))
return x
def encoder_cbhg(inputs, input_lengths, is_training):
return cbhg(
inputs,
input_lengths,
is_training,
scope='encoder_cbhg',
K=16,
projections=[128, 128])
def post_cbhg(inputs, input_dim, is_training):
return cbhg(
inputs,
None,
is_training,
scope='post_cbhg',
K=8,
projections=[256, input_dim])
def cbhg(inputs, input_lengths, is_training, scope, K, projections):
with tf.variable_scope(scope):
with tf.variable_scope('conv_bank'):
# Convolution bank: concatenate on the last axis to stack channels from all convolutions
conv_outputs = tf.concat(
[conv1d(inputs, k, 128, tf.nn.relu, is_training,
'conv1d_%d' % k) for k in range(1, K + 1)],
axis=-1
)
# Maxpooling:
maxpool_output = tf.layers.max_pooling1d(
conv_outputs,
pool_size=2,
strides=1,
padding='same')
# Two projection layers:
proj1_output = conv1d(maxpool_output, 3, projections[0], tf.nn.relu,
is_training, 'proj_1')
proj2_output = conv1d(proj1_output, 3, projections[1], None,
is_training, 'proj_2')
# Residual connection:
highway_input = proj2_output + inputs
# Handle dimensionality mismatch:
if highway_input.shape[2] != 128:
highway_input = tf.layers.dense(highway_input, 128)
# 4-layer HighwayNet:
for i in range(4):
highway_input = highwaynet(highway_input, 'highway_%d' % (i + 1))
rnn_input = highway_input
# Bidirectional RNN
outputs, states = tf.nn.bidirectional_dynamic_rnn(
GRUCell(128),
GRUCell(128),
rnn_input,
sequence_length=input_lengths,
dtype=tf.float32)
return tf.concat(outputs, axis=2) # Concat forward and backward
def highwaynet(inputs, scope):
with tf.variable_scope(scope):
H = tf.layers.dense(
inputs,
units=128,
activation=tf.nn.relu,
name='H')
T = tf.layers.dense(
inputs,
units=128,
activation=tf.nn.sigmoid,
name='T',
bias_initializer=tf.constant_initializer(-1.0))
return H * T + inputs * (1.0 - T)
def conv1d(inputs, kernel_size, channels, activation, is_training, scope):
with tf.variable_scope(scope):
conv1d_output = tf.layers.conv1d(
inputs,
filters=channels,
kernel_size=kernel_size,
activation=activation,
padding='same')
return tf.layers.batch_normalization(conv1d_output,
training=is_training)
|
gpl-3.0
| 2,314,599,216,257,785,300
| 31.28972
| 100
| 0.541245
| false
| 3.767721
| false
| false
| false
|
zhangg/trove
|
trove/guestagent/datastore/mysql/service.py
|
1
|
3685
|
# Copyright 2013 OpenStack Foundation
# Copyright 2013 Rackspace Hosting
# Copyright 2013 Hewlett-Packard Development Company, L.P.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
from oslo_log import log as logging
from trove.common.i18n import _
from trove.guestagent.datastore.mysql_common import service
LOG = logging.getLogger(__name__)
CONF = service.CONF
class KeepAliveConnection(service.BaseKeepAliveConnection):
pass
class MySqlAppStatus(service.BaseMySqlAppStatus):
pass
class LocalSqlClient(service.BaseLocalSqlClient):
pass
class MySqlApp(service.BaseMySqlApp):
def __init__(self, status):
super(MySqlApp, self).__init__(status, LocalSqlClient,
KeepAliveConnection)
# DEPRECATED: Mantain for API Compatibility
def get_txn_count(self):
LOG.info(_("Retrieving latest txn id."))
txn_count = 0
with self.local_sql_client(self.get_engine()) as client:
result = client.execute('SELECT @@global.gtid_executed').first()
for uuid_set in result[0].split(','):
for interval in uuid_set.split(':')[1:]:
if '-' in interval:
iparts = interval.split('-')
txn_count += int(iparts[1]) - int(iparts[0])
else:
txn_count += 1
return txn_count
def _get_slave_status(self):
with self.local_sql_client(self.get_engine()) as client:
return client.execute('SHOW SLAVE STATUS').first()
def _get_master_UUID(self):
slave_status = self._get_slave_status()
return slave_status and slave_status['Master_UUID'] or None
def _get_gtid_executed(self):
with self.local_sql_client(self.get_engine()) as client:
return client.execute('SELECT @@global.gtid_executed').first()[0]
def get_last_txn(self):
master_UUID = self._get_master_UUID()
last_txn_id = '0'
gtid_executed = self._get_gtid_executed()
for gtid_set in gtid_executed.split(','):
uuid_set = gtid_set.split(':')
if uuid_set[0] == master_UUID:
last_txn_id = uuid_set[-1].split('-')[-1]
break
return master_UUID, int(last_txn_id)
def get_latest_txn_id(self):
LOG.info(_("Retrieving latest txn id."))
return self._get_gtid_executed()
def wait_for_txn(self, txn):
LOG.info(_("Waiting on txn '%s'."), txn)
with self.local_sql_client(self.get_engine()) as client:
client.execute("SELECT WAIT_UNTIL_SQL_THREAD_AFTER_GTIDS('%s')"
% txn)
class MySqlRootAccess(service.BaseMySqlRootAccess):
def __init__(self):
super(MySqlRootAccess, self).__init__(LocalSqlClient,
MySqlApp(MySqlAppStatus.get()))
class MySqlAdmin(service.BaseMySqlAdmin):
def __init__(self):
super(MySqlAdmin, self).__init__(LocalSqlClient, MySqlRootAccess(),
MySqlApp)
get_engine = MySqlApp.get_engine
|
apache-2.0
| -3,997,285,619,942,811,600
| 33.764151
| 78
| 0.612212
| false
| 3.907741
| false
| false
| false
|
mikemintz/neutron
|
modules/iq.py
|
1
|
4752
|
# -*- coding: koi8-r -*-
## OJAB iq module
## Copyright (C) Boris Kotov <admin@avoozl.ru>
##
## This program is free software; you can redistribute it and/or modify
## it under the terms of the GNU General Public License as published by
## the Free Software Foundation; either version 2 of the License, or
## (at your option) any later version.
# Modified by me :) Gh0st AKA Bohdan Turkynewych
import os, xmpp, time
messages=None
global version
global vername
ver_queue={}
time_queue={}
iq_id=1
def versioncmd(conn, msg, args, replyto):
if args=="":
target=msg.getFrom()
else:
target=("%s/%s"%(replyto, args))
req=xmpp.protocol.Iq('get', xmpp.NS_VERSION, {}, target)
req.setID(iq_id)
ver_queue[str(iq_id)]=[replyto, msg.getFrom().getResource(), False]
conn.send(req)
globals()['iq_id']+=1
def pingcmd(conn, msg, args, replyto):
if args=="":
target=msg.getFrom()
else:
target=("%s/%s"%(replyto, args))
req=xmpp.protocol.Iq('get', xmpp.NS_VERSION, {}, target)
req.setID(iq_id)
ver_queue[str(iq_id)]=[replyto, msg.getFrom().getResource(), time.time()]
conn.send(req)
globals()['iq_id']+=1
def timecmd(conn, msg, args, replyto):
if args=="":
target=msg.getFrom()
else:
target=("%s/%s"%(replyto, args))
req=xmpp.protocol.Iq('get', xmpp.NS_TIME, {}, target)
req.setID(iq_id)
time_queue[str(iq_id)]=[replyto, msg.getFrom().getResource()]
conn.send(req)
globals()['iq_id']+=1
def versionCB(conn, iq_obj):
uname=os.popen("uname -sr", 'r')
osver=uname.read().strip()
uname.close()
pipe = os.popen('sh -c ' + '"' + 'python -V 2>&1' + '"')
python_ver = pipe.read(1024).strip()
osver = osver + ' ' + python_ver
iq_obj=iq_obj.buildReply('result')
qp=iq_obj.getTag('query')
qp.setTagData('name', vername)
qp.setTagData('version', version)
qp.setTagData('os', osver)
conn.send(iq_obj)
raise xmpp.NodeProcessed
def versionresultCB(conn, iq_obj):
qp=iq_obj.getTag('query')
rname=qp.getTagData('name')
rversion=qp.getTagData('version')
ros=qp.getTagData('os')
rid=iq_obj.getID()
if ver_queue.has_key(rid):
if ver_queue[rid][2]:
if ver_queue[rid][1]==iq_obj.getFrom().getResource():
conn.send(xmpp.protocol.Message(ver_queue[rid][0], messages['yourping']%(ver_queue[rid][1], str(round(time.time()-ver_queue[rid][2],3))), 'groupchat'))
else:
conn.send(xmpp.protocol.Message(ver_queue[rid][0], messages['ping']%(ver_queue[rid][1], iq_obj.getFrom().getResource(), str(round(time.time()-ver_queue[rid][2],3))), 'groupchat'))
else:
if ver_queue[rid][1]==iq_obj.getFrom().getResource():
conn.send(xmpp.protocol.Message(ver_queue[rid][0], messages['yourversion']%(ver_queue[rid][1], rname, rversion, ros), 'groupchat'))
else:
conn.send(xmpp.protocol.Message(ver_queue[rid][0], messages['version']%(ver_queue[rid][1], iq_obj.getFrom().getResource(), rname, rversion, ros), 'groupchat'))
def versionerrorCB(conn, iq_obj):
rid=iq_obj.getID()
if ver_queue.has_key(rid):
if ver_queue[rid][2]:
conn.send(xmpp.protocol.Message(ver_queue[rid][0], messages['ping_error']%(ver_queue[rid][1], iq_obj.getFrom().getResource()), 'groupchat'))
else:
conn.send(xmpp.protocol.Message(ver_queue[rid][0], messages['version_error']%(ver_queue[rid][1], iq_obj.getFrom().getResource()), 'groupchat'))
def timeCB(conn, iq_obj):
timep=os.popen("date -u '+%Y%m%dT%T'", 'r'); futc=timep.read(17); timep.close()
timep=os.popen("date '+%Z|%d/%m/%Y %T|'", 'r'); ftime=timep.read(); timep.close()
iq_obj = iq_obj.buildReply('result')
qp = iq_obj.getTag('query')
qp.setTagData('utc', futc)
qp.setTagData('tz', ftime.split("|")[0])
qp.setTagData('display', ftime.split("|")[1])
conn.send(iq_obj)
raise xmpp.NodeProcessed
def timeresultCB(conn, iq_obj):
qp=iq_obj.getTag('query')
rdisplay=qp.getTagData('display')
rid=iq_obj.getID()
if time_queue.has_key(rid):
if time_queue[rid][1]==iq_obj.getFrom().getResource():
conn.send(xmpp.protocol.Message(time_queue[rid][0], messages['yourtime']%(time_queue[rid][1], rdisplay), 'groupchat'))
else:
conn.send(xmpp.protocol.Message(time_queue[rid][0], messages['time']%(time_queue[rid][1], iq_obj.getFrom().getResource(), rdisplay), 'groupchat'))
def timeerrorCB(conn, iq_obj):
rid=iq_obj.getID()
if time_queue.has_key(rid):
conn.send(xmpp.protocol.Message(time_queue[rid][0], messages['time_error']%(time_queue[rid][1], iq_obj.getFrom().getResource()), 'groupchat'))
|
gpl-2.0
| -6,113,828,099,328,659,000
| 39.271186
| 195
| 0.62016
| false
| 2.915337
| false
| false
| false
|
smurfix/pybble
|
pybble/cache/__init__.py
|
1
|
1978
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import, print_function, division, unicode_literals
##
## This is part of Pybble, a WMS (Whatever Management System) based on
## Jinja2/Haml, Werkzeug, Flask, and Optimism.
##
## Pybble is Copyright © 2009-2014 by Matthias Urlichs <matthias@urlichs.de>,
## it is licensed under the GPLv3. See the file `README.md` for details,
## including an optimistic statements by the author.
##
## This paragraph is auto-generated and may self-destruct at any time,
## courtesy of "make update". The original is in ‘utils/_boilerplate.py’.
## Thus, please do not remove the next line, or insert any blank lines.
##BP
regions = None
from dogpile.cache.api import NO_VALUE
def keystr(args):
# Take care to keep this idempotent: keystr(x) == keystr(keystr(x))
return '|'.join(str(x) for x in args)
## TODO: add keyword-only region param
def delete(*args):
"""Delete a cache value (or a bunch of them)."""
global regions
if regions is None:
from .config import regions
if not regions:
return
# TODO: this only works with redis
r = regions['default'].backend.client
n = 0
if "*" in args:
for k in r.keys(keystr(args)):
r.delete(k)
n += 1
else:
r.delete(keystr(args))
n = 1
return n
def get(*args):
"""Get a cache value, or NO_VALUE if not set."""
global regions
if regions is None:
from .config import regions
if not regions:
return NO_VALUE
r = regions['default']
return r.get(keystr(args))
def set(val, *args):
"""Set a cache value. You really should use cached() instead."""
global regions
if regions is None:
from .config import regions
if not regions:
return
r = regions['default']
r.set(keystr(args),val)
def cached(func, *args):
"""Cache this function's result. Runs the function exactly once."""
global regions
if regions is None:
from .config import regions
if not regions:
return func()
r = regions['default']
return r.get_or_create(keystr(args), func)
|
gpl-3.0
| 7,012,280,615,499,374,000
| 24.960526
| 82
| 0.695895
| false
| 3.116904
| false
| false
| false
|
natj/bender
|
paper/figs/fig9.py
|
1
|
4141
|
import numpy as np
import math
from pylab import *
from palettable.wesanderson import Zissou_5 as wsZ
import matplotlib.ticker as mtick
from scipy.interpolate import interp1d
from scipy.interpolate import griddata
from scipy.signal import savgol_filter
def smooth(xx, yy):
yy = savgol_filter(yy, 7, 2)
np.clip(yy, 0.0, 1000.0, out=yy)
yy[0] = 0.0
yy[-1] = 0.0
return xx, yy
#Read JN files
def read_lineprof(fname):
da = np.genfromtxt(fname, delimiter=",")
des = np.diff(da[:,0])[2]
norm = np.sum(des*da[:,1])
return da[:,0],da[:,1]/norm
#Read JN files
def read_csv(fname):
da = np.genfromtxt(fname, delimiter=",")
des = np.diff(da[:,0])[2]
norm = np.sum(des*da[:,1])
return da[:,0],da[:,1] #/norm
## Plot
fig = figure(figsize=(5,3), dpi=80)
rc('font', family='serif')
rc('xtick', labelsize='xx-small')
rc('ytick', labelsize='xx-small')
gs = GridSpec(1, 1)
#gs.update(wspace = 0.34)
#gs.update(hspace = 0.4)
lsize = 10.0
xmin = 0.69
xmax = 0.82
#error window limits
eymin = -0.5
eymax = 0.5
#path to files
#path_JN = "../../out3/lines/"
path_JN = "../../out/lines2/"
#labels size
tsize = 10.0
nu = '700'
#fig.text(0.5, 0.92, '$\\theta_s = 18^{\\circ}$', ha='center', va='center', size=tsize)
#fig.text(0.5, 0.72, '$\\theta_s = 45^{\\circ}$', ha='center', va='center', size=tsize)
#fig.text(0.5, 0.52, '$\\theta_s = 90^{\\circ}$', ha='center', va='center', size=tsize)
#fig.text(0.5, 0.32, 'Hopf $\\theta_s = 45^{\circ}$', ha='center', va='center', size=tsize)
#fig.text(0.5, 0.12, 'Phase',ha='center', va='center', size=lsize)
ax1 = subplot(gs[0,0])
ax1.minorticks_on()
ax1.set_xlim(xmin, xmax)
ax1.set_ylim(0.0, 30)
ax1.set_ylabel('Normalized flux',size=lsize)
ax1.set_xlabel('Energy $E/E\'$',size=lsize)
#xx1, yy1 = read_lineprof(path_JN+'lineprof_f700pbbr10m1.4i20.csv')
#ax1.plot(xx1, yy1, "k--")
#xx2, yy2 = read_lineprof(path_JN+'lineprof_obl_HTq0_f700pbbr10m1.4i20.csv')
#ax1.plot(xx2, yy2, "k-")
#lineprof_obl_HTq3_f700pbbr10m1.4i20.csv
#lineprof_obl_HTq5_f700pbbr10m1.4i20.csv
#lineprof_obl_HTq2_f700pbbr10m1.4i20.csv
files_JN = [
"lineprof_f700pbbr10m1.4i20.csv",
"lineprof_obl_f700pbbr10m1.4i20.csv",
#"lineprof_sph2_HTqfix_f700pbbr10m1.4i20.csv"]
#"lineprof_obl_HTq0_f700pbbr10m1.4i20.csv",
"lineprof_obl_HTq1_f700pbbr10m1.4i20.csv"]
#"lineprof_obl_HTq4_f700pbbr10m1.4i20.csv"]
files_JN = ['sch/lineprofile_f700_bb_r10_m1.4_i20.csv',
'obl/lineprofile_f700_bb_r10_m1.4_i20.csv',
'q/lineprofile_f700_bb_r10_m1.4_i20.csv']
cols = ["black",
"blue",
"red",
"magenta"]
i = 0
for file_name in files_JN:
xx, yy = read_lineprof(path_JN+file_name)
xx, yy = smooth(xx, yy)
ax1.plot(xx, yy, color=cols[i], linestyle="solid")
i += 1
#path_JN = "../../out3/lines/"
xx, yy = read_lineprof("../../out3/lines/lineprof_obl_HTq4_f700pbbr10m1.4i20.csv")
ax1.plot(xx, yy, color="red", linestyle="dashed")
#files_Bau = [
#"sch+dopp.csv",
#"sch+dopp+obl.csv",
#"HT.csv",
#"HT_obl.csv"]
files_Bau = ['sch.csv', 'obl.csv', 'ht.csv']
i = 0
for file_name in files_Bau:
xx, yy = read_csv(path_JN+file_name)
#rescale xx for correct scaling
#xx = (xx-0.72)/(0.89-0.72)*(0.8-0.72) + 0.72
#ax1.plot(xx, yy, color=cols[i], linestyle="dashed")
i += 1
############ q's
#xx3, yy3 = read_lineprof(path_JN+'lineprof_obl_HTq1_f700pbbr10m1.4i20.csv')
#ax1.plot(xx3, yy3, "k-", label="$q = -0.268$")
#
#xx4, yy4 = read_lineprof(path_JN+'lineprof_obl_HTq2_f700pbbr10m1.4i20.csv')
#ax1.plot(xx4, yy4, "r-", label="$q \\times 2$")
#
#xx5, yy5 = read_lineprof(path_JN+'lineprof_obl_HTq3_f700pbbr10m1.4i20.csv')
#ax1.plot(xx5, yy5, "g-", label="$q \\times 3$")
#
#xx6, yy6 = read_lineprof(path_JN+'lineprof_obl_HTq4_f700pbbr10m1.4i20.csv')
#ax1.plot(xx6, yy6, "b-", label="$q \\times 4$")
#
#xx7, yy7 = read_lineprof(path_JN+'lineprof_obl_HTq5_f700pbbr10m1.4i20.csv')
#ax1.plot(xx7, yy7, "m-", label="$q \\times 5$")
#
#legend = ax1.legend(loc='upper left', shadow=False, labelspacing=0.1)
#for label in legend.get_texts():
# label.set_fontsize('x-small')
savefig('fig9_testi.pdf', bbox_inches='tight')
|
mit
| 426,456,998,150,507,200
| 23.358824
| 92
| 0.63632
| false
| 2.155648
| false
| false
| false
|
gaborvecsei/Color-Tracker
|
examples/tracking.py
|
1
|
2306
|
import argparse
from functools import partial
import cv2
import color_tracker
# You can determine these values with the HSVColorRangeDetector()
HSV_LOWER_VALUE = [155, 103, 82]
HSV_UPPER_VALUE = [178, 255, 255]
def get_args():
parser = argparse.ArgumentParser()
parser.add_argument("-low", "--low", nargs=3, type=int, default=HSV_LOWER_VALUE,
help="Lower value for the HSV range. Default = 155, 103, 82")
parser.add_argument("-high", "--high", nargs=3, type=int, default=HSV_UPPER_VALUE,
help="Higher value for the HSV range. Default = 178, 255, 255")
parser.add_argument("-c", "--contour-area", type=float, default=2500,
help="Minimum object contour area. This controls how small objects should be detected. Default = 2500")
parser.add_argument("-v", "--verbose", action="store_true")
args = parser.parse_args()
return args
def tracking_callback(tracker: color_tracker.ColorTracker, verbose: bool = True):
# Visualizing the original frame and the debugger frame
cv2.imshow("original frame", tracker.frame)
cv2.imshow("debug frame", tracker.debug_frame)
# Stop the script when we press ESC
key = cv2.waitKey(1)
if key == 27:
tracker.stop_tracking()
if verbose:
for obj in tracker.tracked_objects:
print("Object {0} center {1}".format(obj.id, obj.last_point))
def main():
args = get_args()
# Creating a kernel for the morphology operations
kernel = cv2.getStructuringElement(cv2.MORPH_ELLIPSE, (11, 11))
# Init the ColorTracker object
tracker = color_tracker.ColorTracker(max_nb_of_objects=5, max_nb_of_points=20, debug=True)
# Setting a callback which is called at every iteration
callback = partial(tracking_callback, verbose=args.verbose)
tracker.set_tracking_callback(tracking_callback=callback)
# Start tracking with a camera
with color_tracker.WebCamera(video_src=0) as webcam:
# Start the actual tracking of the object
tracker.track(webcam,
hsv_lower_value=args.low,
hsv_upper_value=args.high,
min_contour_area=args.contour_area,
kernel=kernel)
if __name__ == "__main__":
main()
|
mit
| -4,765,477,397,888,369,000
| 34.476923
| 127
| 0.647008
| false
| 3.737439
| false
| false
| false
|
jefftc/changlab
|
Betsy/Betsy/modules/convert_simplevariantfile_to_matrix.py
|
1
|
8224
|
from Module import AbstractModule
class Module(AbstractModule):
def __init__(self):
AbstractModule.__init__(self)
def run(
self, network, in_data, out_attributes, user_options, num_cores,
out_filename):
from genomicode import filelib
from genomicode import SimpleVariantMatrix
from genomicode import AnnotationMatrix
simple_file = in_data.identifier
metadata = {}
# Read all in memory. Hopefully, not too big.
ds = []
for d in filelib.read_row(simple_file, header=-1):
ds.append(d)
#if len(ds) > 50000: # DEBUG
# break
# MuSE sometimes has alternates.
# Alt A,C
# Num_Alt 13,0
# VAF 0.19,0.0
# Detect this and fix it. Take the alternate with the highest VAF.
for d in ds:
if d.Num_Alt.find(",") < 0:
continue
x1 = d.Num_Alt.split(",")
x2 = d.VAF.split(",")
assert len(x1) == len(x2)
x1 = map(int, x1)
x2 = map(float, x2)
max_vaf = max_i = None
for i in range(len(x2)):
if max_vaf is None or x2[i] > max_vaf:
max_vaf = x2[i]
max_i = i
assert max_i is not None
d.Num_Alt = str(x1[max_i])
d.VAF = str(x2[max_i])
# Make a list of all the positions.
positions = {} # (Chrom, Pos) -> 1
for d in ds:
positions[(d.Chrom, int(d.Pos))] = 1
positions = sorted(positions)
# Make a list of all the callers.
callers = {}
for d in ds:
callers[d.Caller] = 1
callers = sorted(callers)
# Make a list of all the samples.
samples = {}
for d in ds:
samples[d.Sample] = 1
samples = sorted(samples)
# Make a list of the coordinates.
coord_data = {}
for d in ds:
x = d.Chrom, int(d.Pos), d.Ref, d.Alt
coord_data[x] = 1
coord_data = sorted(coord_data)
# Make a list of all DNA calls.
call_data = []
for d in ds:
assert d.Source in ["DNA", "RNA"]
if d.Source != "DNA":
continue
num_ref = num_alt = vaf = None
if d.Num_Ref:
num_ref = int(d.Num_Ref)
if d.Num_Alt:
num_alt = int(d.Num_Alt)
if d.VAF:
vaf = float(d.VAF)
if num_ref is None and num_alt is None and vaf is None:
continue
call = SimpleVariantMatrix.Call(num_ref, num_alt, vaf)
x = d.Chrom, int(d.Pos), d.Ref, d.Alt, d.Sample, d.Caller, call
call_data.append(x)
# sample -> caller -> chrom, pos, ref, alt -> call
samp2caller2coord2call = {}
for x in call_data:
chrom, pos, ref, alt, sample, caller, call = x
coord = chrom, pos, ref, alt
if sample not in samp2caller2coord2call:
samp2caller2coord2call[sample] = {}
caller2coord2call = samp2caller2coord2call[sample]
if caller not in caller2coord2call:
caller2coord2call[caller] = {}
coord2call = caller2coord2call[caller]
# A (sample, caller, coord) may have multiple calls. For
# example, for germline samples that are called with each
# tumor sample. If this is the case, then take the call
# with the highest coverage.
if coord in coord2call:
old_call = coord2call[coord]
cov = old_cov = None
if call.num_ref is not None and call.num_alt is not None:
cov = call.num_ref + call.num_alt
if old_call.num_ref is not None and \
old_call.num_alt is not None:
old_cov = old_call.num_ref + old_call.num_alt
if cov is None and old_cov is not None:
call = old_call
elif cov is not None and old_cov is not None and cov < old_cov:
call = old_call
coord2call[coord] = call
# Count the number of callers that called a variant at each
# position for each sample.
samp2coord2caller = {} # sample -> chrom, pos, ref, alt -> caller -> 1
# Need to do this first, to make sure each caller is counted
# at most once. This is to account for germline samples that
# is called by each caller multiple times.
for x in call_data:
chrom, pos, ref, alt, sample, caller, call = x
coord = chrom, pos, ref, alt
if sample not in samp2coord2caller:
samp2coord2caller[sample] = {}
if coord not in samp2coord2caller[sample]:
samp2coord2caller[sample][coord] = {}
samp2coord2caller[sample][coord][caller] = 1
samp2coord2nc = {} # sample -> chrom, pos, ref, alt -> num_callers
for sample in samp2coord2caller:
samp2coord2nc[sample] = {}
for coord in samp2coord2caller[sample]:
samp2coord2nc[sample][coord] = len(
samp2coord2caller[sample][coord])
#for x in call_data:
# chrom, pos, ref, alt, sample, caller, call = x
# coord = chrom, pos, ref, alt
# if sample not in samp2coord2nc:
# samp2coord2nc[sample] = {}
# nc = samp2coord2nc[sample].get(coord, 0) + 1
# samp2coord2nc[sample][coord] = nc
# Format everything into an annotation matrix.
headers0 = []
headers1 = []
headers2 = []
all_annots = []
# Add the positions.
headers0 += ["", "", "", ""]
headers1 += ["", "", "", ""]
headers2 += ["Chrom", "Pos", "Ref", "Alt"]
for i in range(4):
x = [x[i] for x in coord_data]
x = [str(x) for x in x]
all_annots.append(x)
# Add the number of callers information.
headers0 += ["Num Callers"] * len(samples)
headers1 += [""] * len(samples)
headers2 += samples
for sample in samples:
annots = []
for coord in coord_data:
nc = samp2coord2nc.get(sample, {}).get(coord, "")
annots.append(nc)
all_annots.append(annots)
# Add information about calls.
for sample in samples:
caller2coord2call = samp2caller2coord2call.get(sample, {})
for i, caller in enumerate(callers):
h0 = ""
if not i:
h0 = sample
h1 = caller
h2 = "Ref/Alt/VAF"
headers0.append(h0)
headers1.append(h1)
headers2.append(h2)
coord2call = caller2coord2call.get(caller, {})
annots = []
for coord in coord_data:
x = ""
call = coord2call.get(coord)
if call:
x = SimpleVariantMatrix._format_call(call)
annots.append(x)
all_annots.append(annots)
# Set the headers.
assert len(headers0) == len(headers1)
assert len(headers0) == len(headers2)
assert len(headers0) == len(all_annots)
headers = [None] * len(headers0)
for i, x in enumerate(zip(headers0, headers1, headers2)):
x = "___".join(x)
headers[i] = x
matrix = AnnotationMatrix.create_from_annotations(headers, all_annots)
SimpleVariantMatrix.write_from_am(out_filename, matrix)
#annot_header = ["Chrom", "Pos", "Ref", "Alt"]
#matrix = SimpleVariantMatrix.make_matrix(
# samples, callers, annot_header, coord_data, named_data,
# call_data)
#SimpleVariantMatrix.write(out_filename, matrix)
return metadata
def name_outfile(self, antecedents, user_options):
return "calls.txt"
|
mit
| 3,985,949,250,645,467,600
| 36.552511
| 79
| 0.508512
| false
| 3.812703
| false
| false
| false
|
dzorlu/sdc-segmentation
|
train.py
|
1
|
4118
|
import sys
import tensorflow as tf
from tensorflow.python.ops import math_ops
sys.path.append("slim/")
slim = tf.contrib.slim
TRAIN_DIR = "/tmp/tf"
class Trainer(object):
def __init__(self, nb_classes, optimizer, learning_rate):
self.nb_classes = nb_classes
# learning rate can be a placeholder tensor
self.learning_rate = learning_rate
self.optimizer = optimizer(learning_rate)
self.train_op = None
self.prediction = None
def build(self, predictions, labels, one_hot=False):
with tf.name_scope('training'):
if one_hot:
labels = tf.one_hot(labels, depth=self.nb_classes)
labels = tf.squeeze(labels, axis=2)
label_shape = tf.shape(labels)[:2]
predictions = tf.image.resize_bilinear(predictions, label_shape, name='resize_predictions')
else:
labels = tf.reshape(labels, (-1, self.nb_clasess))
predictions = tf.reshape(predictions, (-1, self.nb_classes))
self.prediction = predictions
labels = tf.expand_dims(labels, 0)
print("pred shape {}, label shape {}".format(predictions.get_shape(), labels.get_shape()))
# wraps the softmax_with_entropy fn. adds it to loss collection
tf.losses.softmax_cross_entropy(logits=predictions, onehot_labels=labels)
# include the regulization losses in the loss collection.
total_loss = tf.losses.get_total_loss()
self.train_op = slim.learning.create_train_op(total_loss,
optimizer=self.optimizer)
def add_summaries(self):
# Add summaries for images, variables and losses.
global_summaries = set([])
# image summary
image_summary = tf.get_default_graph().get_tensor_by_name('IteratorGetNext:0')
image_summary = tf.expand_dims(image_summary, 0)
image_summary = tf.summary.image('image', image_summary)
global_summaries.add(image_summary)
# prediction summary
prediction = tf.argmax(self.prediction, axis=3)
prediction = tf.cast(prediction, tf.float32)
prediction = tf.expand_dims(prediction, 3)
image_summary = tf.summary.image('prediction', prediction)
global_summaries.add(image_summary)
for model_var in slim.get_model_variables():
global_summaries.add(tf.summary.histogram(model_var.op.name, model_var))
# total loss
total_loss_tensor = tf.get_default_graph().get_tensor_by_name('training/total_loss:0')
global_summaries.add(tf.summary.scalar(total_loss_tensor.op.name, total_loss_tensor))
# Merge all summaries together.
summary_op = tf.summary.merge(list(global_summaries), name='summary_op')
return summary_op
def train(self, iterator,
filename,
restore_fn=None,
_add_summaries = True,
number_of_steps=10000,
save_interval_secs = 12000,
same_summaries_secs=120,
keep_checkpoint_every_n_hours=5):
summary_op = None
if _add_summaries:
summary_op = self.add_summaries()
# Save checkpoints regularly.
saver = tf.train.Saver(
keep_checkpoint_every_n_hours=keep_checkpoint_every_n_hours)
# init fn for the dataset ops and checkpointin
def initializer_fn(sess):
input_tensor = tf.get_default_graph().get_tensor_by_name('training_data/input:0')
sess.run(iterator.initializer, feed_dict={input_tensor: filename})
if restore_fn:
restore_fn(sess)
init_fn = initializer_fn
# Soft placement allows placing on CPU ops without GPU implementation.
session_config = tf.ConfigProto(allow_soft_placement=True,
log_device_placement=False)
# train
slim.learning.train(train_op=self.train_op,
logdir=TRAIN_DIR,
session_config=session_config,
summary_op=summary_op,
init_fn=init_fn,
save_interval_secs = save_interval_secs,
number_of_steps=number_of_steps,
save_summaries_secs=same_summaries_secs,
saver=saver)
|
mit
| 8,798,008,712,275,257,000
| 41.453608
| 99
| 0.639631
| false
| 3.830698
| false
| false
| false
|
Aorjoa/aiyara-ceph-dash
|
.tox/flake8/lib/python2.7/site-packages/flake8/formatting/default.py
|
1
|
2191
|
"""Default formatting class for Flake8."""
from flake8.formatting import base
class SimpleFormatter(base.BaseFormatter):
"""Simple abstraction for Default and Pylint formatter commonality.
Sub-classes of this need to define an ``error_format`` attribute in order
to succeed. The ``format`` method relies on that attribute and expects the
``error_format`` string to use the old-style formatting strings with named
parameters:
* code
* text
* path
* row
* col
"""
error_format = None
def format(self, error):
"""Format and write error out.
If an output filename is specified, write formatted errors to that
file. Otherwise, print the formatted error to standard out.
"""
return self.error_format % {
"code": error.code,
"text": error.text,
"path": error.filename,
"row": error.line_number,
"col": error.column_number,
}
class Default(SimpleFormatter):
"""Default formatter for Flake8.
This also handles backwards compatibility for people specifying a custom
format string.
"""
error_format = '%(path)s:%(row)d:%(col)d: %(code)s %(text)s'
def after_init(self):
"""Check for a custom format string."""
if self.options.format.lower() != 'default':
self.error_format = self.options.format
class Pylint(SimpleFormatter):
"""Pylint formatter for Flake8."""
error_format = '%(path)s:%(row)d: [%(code)s] %(text)s'
class FilenameOnly(SimpleFormatter):
"""Only print filenames, e.g., flake8 -q."""
error_format = '%(path)s'
def after_init(self):
"""Initialize our set of filenames."""
self.filenames_already_printed = set()
def format(self, error):
"""Ensure we only print each error once."""
if error.filename not in self.filenames_already_printed:
self.filenames_already_printed.add(error.filename)
return super(FilenameOnly, self).format(error)
class Nothing(base.BaseFormatter):
"""Print absolutely nothing."""
def format(self, error):
"""Do nothing."""
pass
|
bsd-2-clause
| -733,534,517,707,735,400
| 26.3875
| 78
| 0.624829
| false
| 4.181298
| false
| false
| false
|
WisniewskiP/meson
|
install_meson.py
|
1
|
3639
|
#!/usr/bin/env python3
# Copyright 2013-2014 The Meson development team
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# This script installs Meson. We can't use Meson to install itself
# because of the bootstrap problem. We can't use any other build system
# either becaust that would be just silly.
import os, sys, glob, shutil, gzip
from optparse import OptionParser
usage_info = '%prog [--prefix PREFIX] [--destdir DESTDIR]'
parser = OptionParser(usage=usage_info)
parser.add_option('--prefix', default='/usr/local', dest='prefix',
help='the installation prefix (default: %default)')
parser.add_option('--destdir', default='', dest='destdir',
help='the destdir (default: %default)')
(options, args) = parser.parse_args(sys.argv)
if options.prefix[0] != '/':
print('Error, prefix must be an absolute path.')
sys.exit(1)
if options.destdir == '':
install_root = options.prefix
else:
install_root = os.path.join(options.destdir, options.prefix[1:])
script_dir = os.path.join(install_root, 'share/meson')
bin_dir = os.path.join(install_root, 'bin')
bin_script = os.path.join(script_dir, 'meson.py')
gui_script = os.path.join(script_dir, 'mesongui.py')
conf_script = os.path.join(script_dir, 'mesonconf.py')
bin_name = os.path.join(bin_dir, 'meson')
gui_name = os.path.join(bin_dir, 'mesongui')
conf_name = os.path.join(bin_dir, 'mesonconf')
man_dir = os.path.join(install_root, 'share/man/man1')
in_manfile = 'man/meson.1'
out_manfile = os.path.join(man_dir, 'meson.1.gz')
in_guimanfile = 'man/mesongui.1'
out_guimanfile = os.path.join(man_dir, 'mesongui.1.gz')
in_confmanfile = 'man/mesonconf.1'
out_confmanfile = os.path.join(man_dir, 'mesonconf.1.gz')
symlink_value = os.path.relpath(bin_script, os.path.dirname(bin_name))
guisymlink_value = os.path.relpath(gui_script, os.path.dirname(gui_name))
confsymlink_value = os.path.relpath(conf_script, os.path.dirname(conf_name))
files = glob.glob('*.py')
files += glob.glob('*.ui')
noinstall = ['compile_meson.py', 'install_meson.py', 'run_tests.py', 'run_cross_test.py']
files = [x for x in files if x not in noinstall]
os.makedirs(script_dir, exist_ok=True)
os.makedirs(bin_dir, exist_ok=True)
os.makedirs(man_dir, exist_ok=True)
for f in files:
print('Installing %s to %s.' %(f, script_dir))
outfilename = os.path.join(script_dir, f)
shutil.copyfile(f, outfilename)
shutil.copystat(f, outfilename)
try:
os.remove(bin_name)
except OSError:
pass
print('Creating symlinks %s and %s.' % (bin_name, gui_name))
try:
os.unlink(bin_name)
except FileNotFoundError:
pass
try:
os.unlink(gui_name)
except FileNotFoundError:
pass
try:
os.unlink(conf_name)
except FileNotFoundError:
pass
os.symlink(symlink_value, bin_name)
os.symlink(guisymlink_value, gui_name)
os.symlink(confsymlink_value, conf_name)
print('Installing manfiles to %s.' % man_dir)
open(out_manfile, 'wb').write(gzip.compress(open(in_manfile, 'rb').read()))
open(out_confmanfile, 'wb').write(gzip.compress(open(in_confmanfile, 'rb').read()))
open(out_guimanfile, 'wb').write(gzip.compress(open(in_guimanfile, 'rb').read()))
|
apache-2.0
| -5,287,735,341,432,281,000
| 35.029703
| 89
| 0.710085
| false
| 3.004955
| false
| false
| false
|
Gricha/django-empty
|
django-empty-auth/newproject/settings.py
|
1
|
3305
|
"""
Django settings for newproject project.
For more information on this file, see
https://docs.djangoproject.com/en/1.6/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.6/ref/settings/
"""
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
BASE_DIR = os.path.dirname(os.path.dirname(__file__))
from newproject.settings_local import (
SECRET_KEY,
DEBUG,
LESSC_PATH,
USE_SYSLOG,
TEMPLATE_DEBUG,
ALLOWED_HOSTS,
COMPRESS_ENABLED,
DATABASES,
ADMINS)
AUTHENTICATION_BACKENDS = (
'newproject.auth_backends.CustomUserModelBackend',
)
TEMPLATE_CONTEXT_PROCESSORS = (
"django.contrib.auth.context_processors.auth",
"django.core.context_processors.debug",
"django.core.context_processors.i18n",
"django.core.context_processors.media",
"django.core.context_processors.static",
"django.core.context_processors.tz",
"django.contrib.messages.context_processors.messages",
'django.core.context_processors.request',)
AUTH_PROFILE_MODULE = 'newproject.apps.account.CustomUser'
CUSTOM_USER_MODEL = 'newproject.apps.account.CustomUser'
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.6/howto/deployment/checklist/
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'newproject.apps.account',
'newproject.apps.main',
'compressor',
'south',
'registration',
'widget_tweaks',
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
ROOT_URLCONF = 'newproject.urls'
WSGI_APPLICATION = 'newproject.wsgi.application'
# Internationalization
# https://docs.djangoproject.com/en/1.6/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
STATIC_URL = '/static/'
# Additional locations of static files
STATICFILES_DIRS = (
os.path.join(BASE_DIR, 'static/'),
# Put strings here, like "/home/html/static" or "C:/www/django/static".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
)
# List of finder classes that know how to find static files in
# various locations.
STATICFILES_FINDERS = (
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
'compressor.finders.CompressorFinder',
)
# List of callables that know how to import templates from various sources.
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
)
TEMPLATE_DIRS = (
'newproject/templates/',
)
LOGIN_REDIRECT_URL = '/'
COMPRESS_ROOT = os.path.join(BASE_DIR, 'newproject', 'static')
COMPRESS_PRECOMPILERS = (
('text/less', '%s {infile} {outfile}' % LESSC_PATH),
)
|
unlicense
| 1,989,905,234,812,430,300
| 25.653226
| 75
| 0.722844
| false
| 3.482613
| false
| false
| false
|
SKIRT/PTS
|
magic/region/panda.py
|
1
|
4117
|
#!/usr/bin/env python
# -*- coding: utf8 -*-
# *****************************************************************
# ** PTS -- Python Toolkit for working with SKIRT **
# ** © Astronomical Observatory, Ghent University **
# *****************************************************************
## \package pts.magic.region.panda Contains the PandaRegion class and subclasses.
# -----------------------------------------------------------------
# Ensure Python 3 functionality
from __future__ import absolute_import, division, print_function
# Import astronomical modules
from astropy.coordinates import Angle
from astropy.units import Quantity
# Import the relevant PTS classes and modules
from .region import Region, PixelRegion, SkyRegion, PhysicalRegion
from ..basics.coordinate import PixelCoordinate, SkyCoordinate, PhysicalCoordinate
# -----------------------------------------------------------------
class PandaRegion(Region):
"""
This class ...
"""
def __init__(self, center, start_angle, stop_angle, nangle, inner, outer, nradius, **kwargs):
"""
The constructor ...
:param kwargs:
"""
# Check the angle
#if not isinstance(angle, Angle): raise ValueError("Angle must be a Astropy Angle object")
# Set the attributes
self.center = center
self.start_angle = start_angle
self.stop_angle = stop_angle
self.nangle = nangle
self.inner = inner
self.outer = outer
self.nradius = nradius
# Call the constructor of the base class
super(PandaRegion, self).__init__(**kwargs)
# -----------------------------------------------------------------
class PixelPandaRegion(PandaRegion, PixelRegion):
"""
This class ...
"""
def __init__(self, center, start_angle, stop_angle, nangle, inner, outer, nradius, **kwargs):
"""
This function ...
"""
# Check the start coordinate
#if not isinstance(start, PixelCoordinate): raise ValueError("Start must be pixel coordinate")
# Check the length
#if not isinstance(length, float): raise ValueError("Length must be float")
# Call the constructor of VectorRegion class
PandaRegion.__init__(self, center, start_angle, stop_angle, nangle, inner, outer, nradius, **kwargs)
# -----------------------------------------------------------------
class SkyPandaRegion(PandaRegion, SkyRegion):
"""
This class ...
"""
def __init__(self, center, start_angle, stop_angle, nangle, inner, outer, nradius, **kwargs):
"""
This function ...
:param start:
:param length:
:param angle:
:param kwargs:
"""
# Check the start coordinate
#if not isinstance(start, SkyCoordinate): raise ValueError("Start must be sky coordinate")
# Check the length
#if not isinstance(length, Quantity): raise ValueError("Length must be an angular quantity")
# Call the constructor of VectorRegion class
PandaRegion.__init__(self, center, start_angle, stop_angle, nangle, inner, outer, nradius, **kwargs)
# -----------------------------------------------------------------
class PhysicalPandaRegion(PandaRegion, PhysicalRegion):
"""
This class ...
"""
def __init__(self, center, start_angle, stop_angle, nangle, inner, outer, nradius, **kwargs):
"""
This function ...
:param start:
:param length:
:param angle:
:param kwargs:
"""
# Check the start coordinate
#if not isinstance(start, PhysicalCoordinate): raise ValueError("Start must be physical coordinate")
# Check the length
#if not isinstance(length, Quantity): raise ValueError("Length must be a physical quantity of length")
# Call the constructor of VectorRegion class
PandaRegion.__init__(self, center, start_angle, stop_angle, nangle, inner, outer, nradius, **kwargs)
# -----------------------------------------------------------------
|
agpl-3.0
| -4,254,293,722,206,895,000
| 30.661538
| 110
| 0.548105
| false
| 4.609183
| false
| false
| false
|
jor-/matrix-decomposition
|
setup.py
|
1
|
2841
|
# Copyright (C) 2017-2018 Joscha Reimer jor@informatik.uni-kiel.de
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""A setuptools based setup module.
https://packaging.python.org/en/latest/distributing.html
"""
import setuptools
import os.path
import versioneer_extended
# Get the long description from the README file
readme_file = os.path.join(os.path.abspath(os.path.dirname(__file__)), 'README.rst')
with open(readme_file, mode='r', encoding='utf-8') as f:
long_description = f.read()
# Setup
setuptools.setup(
# general informations
name='matrix-decomposition',
description='This library allows to approximate Hermitian (dense and sparse) matrices by positive definite matrices. Furthermore it allows to decompose (factorize) positive definite matrices and solve associated systems of linear equations.',
long_description=long_description,
keywords='approximation Hermitian dense sparse matrix matrices positive definite decompose factorize decomposition factorization linear equation equations Cholesky',
url='https://github.com/jor-/matrix_decomposition',
author='Joscha Reimer',
author_email='jor@informatik.uni-kiel.de',
license='AGPL',
classifiers=[
# Development Status
'Development Status :: 5 - Production/Stable',
# Intended Audience, Topic
'Intended Audience :: Science/Research',
'Topic :: Scientific/Engineering :: Mathematics',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: Utilities',
# Licence (should match "license" above)
'License :: OSI Approved :: GNU Affero General Public License v3 or later (AGPLv3+)',
# Supported Python versions
'Programming Language :: Python',
],
# version
version=versioneer_extended.get_version(),
cmdclass=versioneer_extended.get_cmdclass(),
# packages to install
packages=setuptools.find_packages(),
# dependencies
python_requires='>=3.7',
setup_requires=[
'setuptools>=0.8',
'pip>=1.4',
],
install_requires=[
'numpy>=1.15',
'scipy>=0.19',
],
extras_require={
'decompose_sparse': ['scikit-sparse>=0.4.2'],
},
)
|
agpl-3.0
| -9,103,944,859,064,064,000
| 35.423077
| 246
| 0.699754
| false
| 4.105491
| false
| false
| false
|
bpsinc-native/src_third_party_libjingle_source_talk
|
PRESUBMIT.py
|
2
|
5115
|
# libjingle
# Copyright 2013 Google Inc.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# 3. The name of the author may not be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
# WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
# EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
# OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
# OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
# ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
# List of files that should not be committed to
DO_NOT_SUBMIT_FILES = [
"talk/media/webrtc/webrtcmediaengine.h",
"talk/media/webrtc/webrtcvideoengine.cc",
"talk/media/webrtc/webrtcvideoengine.h",
"talk/media/webrtc/webrtcvideoengine_unittest.cc"]
def _LicenseHeader(input_api):
"""Returns the license header regexp."""
# Accept any year number from start of project to the current year
current_year = int(input_api.time.strftime('%Y'))
allowed_years = (str(s) for s in reversed(xrange(2004, current_year + 1)))
years_re = '(' + '|'.join(allowed_years) + ')'
years_re = '%s(--%s)?' % (years_re, years_re)
license_header = (
r'.*? libjingle\n'
r'.*? Copyright %(year)s,? Google Inc\.\n'
r'.*?\n'
r'.*? Redistribution and use in source and binary forms, with or without'
r'\n'
r'.*? modification, are permitted provided that the following conditions '
r'are met:\n'
r'.*?\n'
r'.*? 1\. Redistributions of source code must retain the above copyright '
r'notice,\n'
r'.*? this list of conditions and the following disclaimer\.\n'
r'.*? 2\. Redistributions in binary form must reproduce the above '
r'copyright notice,\n'
r'.*? this list of conditions and the following disclaimer in the '
r'documentation\n'
r'.*? and/or other materials provided with the distribution\.\n'
r'.*? 3\. The name of the author may not be used to endorse or promote '
r'products\n'
r'.*? derived from this software without specific prior written '
r'permission\.\n'
r'.*?\n'
r'.*? THIS SOFTWARE IS PROVIDED BY THE AUTHOR \`\`AS IS\'\' AND ANY '
r'EXPRESS OR IMPLIED\n'
r'.*? WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES '
r'OF\n'
r'.*? MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE '
r'DISCLAIMED\. IN NO\n'
r'.*? EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, '
r'INCIDENTAL,\n'
r'.*? SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES \(INCLUDING, '
r'BUT NOT LIMITED TO,\n'
r'.*? PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR '
r'PROFITS;\n'
r'.*? OR BUSINESS INTERRUPTION\) HOWEVER CAUSED AND ON ANY THEORY OF '
r'LIABILITY,\n'
r'.*? WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT \(INCLUDING '
r'NEGLIGENCE OR\n'
r'.*? OTHERWISE\) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, '
r'EVEN IF\n'
r'.*? ADVISED OF THE POSSIBILITY OF SUCH DAMAGE\.\n'
) % {
'year': years_re,
}
return license_header
def _ProtectedFiles(input_api, output_api):
results = []
changed_files = []
for f in input_api.AffectedFiles():
changed_files.append(f.LocalPath())
bad_files = list(set(DO_NOT_SUBMIT_FILES) & set(changed_files))
if bad_files:
error_type = output_api.PresubmitError
results.append(error_type(
'The following affected files are only allowed to be updated when '
'importing libjingle',
bad_files))
return results
def _CommonChecks(input_api, output_api):
"""Checks common to both upload and commit."""
results = []
results.extend(input_api.canned_checks.CheckLicense(
input_api, output_api, _LicenseHeader(input_api)))
results.extend(_ProtectedFiles(input_api, output_api))
return results
def CheckChangeOnUpload(input_api, output_api):
results = []
results.extend(_CommonChecks(input_api, output_api))
return results
def CheckChangeOnCommit(input_api, output_api):
results = []
results.extend(_CommonChecks(input_api, output_api))
return results
|
bsd-3-clause
| -1,188,175,575,473,091,600
| 43.094828
| 80
| 0.682502
| false
| 3.886778
| false
| false
| false
|
EvangelouSotiris/flightradiationcalc
|
main.py
|
1
|
5469
|
import time
import requests
##############################################################
############## REQUESTS MANAGEMENT/ LINKS ####################
##############################################################
headers = {'User-Agent' : 'Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:52.0) Gecko/20100101 Firefox/52.0'}
<<<<<<< HEAD
flight = input('Enter the number of your flight: ')
firstpart = 'https://data-live.flightradar24.com/clickhandler/?version=1.5&flight='
r = requests.get(firstpart+flight, headers = headers)
=======
r = requests.get('https://data-live.flightradar24.com/clickhandler/?version=1.5&flight=dfa6132', headers = headers)
>>>>>>> refs/remotes/GiorgosNikitopoulos/master
jsoned_response = r.json()
global limit = len(jsoned_response['trail']) #no need to declare that in every function
##############################################################
################### INITIALISATIONS ##########################
##############################################################
each_derivative = [None] * (limit - 1)
risingSpeed = [None]*limit
enRouteAlts = [0]*12
enRouteTimestamps = [0]*12
enRouteCounter = 0 #position on enRoute matrixes
possibleLastTimestamp = 0 #last timestamp of enroute flight - start of descension
each_derivative = [None] * (limit - 1)
y = [None] * (limit) #initialisation y-altitude , x-time and dy/dx derivative
x = [None] * (limit)
first_time_derivative_flag = 0
first_time_derivative_zero_flag = 0
##############################################################
################# MAIN PROGRAM/ LOOPS ########################
##############################################################
## Getting requests - heights , timestamps , and alt changing speed measurement ## needed in functions ##
for i in range(0, limit):
global y[i] = jsoned_response['trail'][limit - 1 - i]['alt'] #values of altitudes
if y[i] == None:
print 'y is none in ' + str(i)
break
global x[i] = jsoned_response['trail'][limit - 1 - i]['ts'] #values of timestamps
if x[i] == None:
print 'x is none in ' + str(i)
break #Break statements if x or y values are none (can't be used)
if (i>0 && x[i-1]!= None && y[i-1]!= None):
global each_derivative[i - 1] = float(float((y[i] - y[i-1])) / float((x[i] - x[i-1]))) #each derivative = speed of changing altitudes
print x[i]
print y[i]
print each_derivative[i]
## Getting the response points where ascension ends and descension starts
ascensionFinishPoint = get_ascension_point(jsoned_response)
descensionStartingPoint = get_descension_point(jsoned_response)
##############################################################
################### FUNCTIONS ################################
##############################################################
## Functions for ascension and descension points
def get_ascension_point(jsoned_response):
counter_ascend = 0 #counter will help us decide the stage of flight
for i in range(0, limit):
if(each_derivative[i] < 10 and each_derivative[i] > 0): #If u>10 for 6+ successive points=>ascension
if(first_time_derivate_flag == 0):
first_time_derivate_flag = 1
possible_ascention_point = i
counter_ascend = counter_ascend + 1
print("counter_ascend = " , counter_ascend)
counter_descend = 0
else:
counter_ascend = 0
first_time_derivate_flag = 0
if(counter_ascend > 0 or first_time_derivative_zero_flag == 1):
first_time_derivative_zero_flag = 1
if(each_derivative[i] < 5 and each_derivative[i + 1] < 5 and each_derivative[i + 2] < 5):
print ("snap_ascend")
if(counter_ascend >= 15): #into ascension stage
ascend_point = i
print ("snap_ascend")
def get_descension_point(jsoned_response):
counter_descend = 0
for i in range(0, limit):
if(each_derivative[i] > -10 and each_derivative[i] < 0 and y[i] > 18000): #If u>10 for 6+ successive points=>ascension
if(first_time_derivate_flag == 0):
first_time_derivate_flag = 1
possible_ascention_point = i
counter_descend = counter_descend + 1
print("descend = " , counter_descend)
else:
counter_descend = 0
first_time_derivate_flag = 0
if(counter_descend > 0 or first_time_derivative_zero_flag == 1):
first_time_derivative_zero_flag = 1
if(each_derivative[i] > -5 and each_derivative[i + 1] > -5 and each_derivative[i + 2] > -5):
print ("snap_descend")
if(counter_descend >= 15): #into ascension stage
descend_point = i
print ("snap_descend")
##############################################################
############### OLD COMMITS/MAYBE USEFUL #####################
##############################################################
##ARTIFACT
######## EN ROUTE STAGE
# if (each_derivative>-5 and each_derivative<5): #En route stage of flight #######CHANGEABLE
# counter_ascend = 0
# counter_descend = 0
# # print ("snap_enroute")
# if (enRouteAlts[enRouteCounter] == 0): #1st time into en route stage
# enRouteAlts[enRouteCounter] = y[i]
# enRouteTimestamps[enRouteCounter] = x[i] #x1 time airplane got into that altitude
# if (abs(y[i]-enRouteAlts[enRouteCounter])>1000): #more than 1000 feet is considered another en route alt #######CHANGEABLE
# enRouteTimestamps[enRouteCounter] = x[i]-enRouteTimestamps[enRouteCounter] #x2-x1 time airplane stayed into former alt
# enRouteCounter = enRouteCounter + 1 #next altitude/timestamp matrix pos
# enRouteAlts[enRouteCounter] = y[i] #new alt
# enRouteTimestamps[enRouteCounter] = x[i] #x1 timestamp of new alt
|
gpl-3.0
| -8,451,631,178,383,616,000
| 41.069231
| 135
| 0.588956
| false
| 3.32462
| false
| false
| false
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.