repo_name
stringlengths 5
92
| path
stringlengths 4
221
| copies
stringclasses 19
values | size
stringlengths 4
6
| content
stringlengths 766
896k
| license
stringclasses 15
values | hash
int64 -9,223,277,421,539,062,000
9,223,102,107B
| line_mean
float64 6.51
99.9
| line_max
int64 32
997
| alpha_frac
float64 0.25
0.96
| autogenerated
bool 1
class | ratio
float64 1.5
13.6
| config_test
bool 2
classes | has_no_keywords
bool 2
classes | few_assignments
bool 1
class |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
ralphhughes/TempLogger
|
DHT22.py
|
1
|
7329
|
#!/usr/bin/env python
# 2014-07-11 DHT22.py
import time
import atexit
import sys
import pigpio
class sensor:
"""
A class to read relative humidity and temperature from the
DHT22 sensor. The sensor is also known as the AM2302.
The sensor can be powered from the Pi 3V3 or the Pi 5V rail.
Powering from the 3V3 rail is simpler and safer. You may need
to power from 5V if the sensor is connected via a long cable.
For 3V3 operation connect pin 1 to 3V3 and pin 4 to ground.
Connect pin 2 to a gpio.
For 5V operation connect pin 1 to 5V and pin 4 to ground.
The following pin 2 connection works for me. Use at YOUR OWN RISK.
5V--5K_resistor--+--10K_resistor--Ground
|
DHT22 pin 2 -----+
|
gpio ------------+
"""
def __init__(self, pi, gpio, LED=None, power=None):
"""
Instantiate with the Pi and gpio to which the DHT22 output
pin is connected.
Optionally a LED may be specified. This will be blinked for
each successful reading.
Optionally a gpio used to power the sensor may be specified.
This gpio will be set high to power the sensor. If the sensor
locks it will be power cycled to restart the readings.
Taking readings more often than about once every two seconds will
eventually cause the DHT22 to hang. A 3 second interval seems OK.
"""
self.pi = pi
self.gpio = gpio
self.LED = LED
self.power = power
if power is not None:
pi.write(power, 1) # Switch sensor on.
time.sleep(2)
self.powered = True
self.cb = None
atexit.register(self.cancel)
self.bad_CS = 0 # Bad checksum count.
self.bad_SM = 0 # Short message count.
self.bad_MM = 0 # Missing message count.
self.bad_SR = 0 # Sensor reset count.
# Power cycle if timeout > MAX_TIMEOUTS.
self.no_response = 0
self.MAX_NO_RESPONSE = 2
self.rhum = -999
self.temp = -999
self.tov = None
self.high_tick = 0
self.bit = 40
pi.set_pull_up_down(gpio, pigpio.PUD_OFF)
pi.set_watchdog(gpio, 0) # Kill any watchdogs.
self.cb = pi.callback(gpio, pigpio.EITHER_EDGE, self._cb)
def _cb(self, gpio, level, tick):
"""
Accumulate the 40 data bits. Format into 5 bytes, humidity high,
humidity low, temperature high, temperature low, checksum.
"""
diff = pigpio.tickDiff(self.high_tick, tick)
if level == 0:
# Edge length determines if bit is 1 or 0.
if diff >= 50:
val = 1
if diff >= 200: # Bad bit?
self.CS = 256 # Force bad checksum.
else:
val = 0
if self.bit >= 40: # Message complete.
self.bit = 40
elif self.bit >= 32: # In checksum byte.
self.CS = (self.CS<<1) + val
if self.bit == 39:
# 40th bit received.
self.pi.set_watchdog(self.gpio, 0)
self.no_response = 0
total = self.hH + self.hL + self.tH + self.tL
if (total & 255) == self.CS: # Is checksum ok?
self.rhum = ((self.hH<<8) + self.hL) * 0.1
if self.tH & 128: # Negative temperature.
mult = -0.1
self.tH = self.tH & 127
else:
mult = 0.1
self.temp = ((self.tH<<8) + self.tL) * mult
self.tov = time.time()
if self.LED is not None:
self.pi.write(self.LED, 0)
else:
self.bad_CS += 1
elif self.bit >=24: # in temp low byte
self.tL = (self.tL<<1) + val
elif self.bit >=16: # in temp high byte
self.tH = (self.tH<<1) + val
elif self.bit >= 8: # in humidity low byte
self.hL = (self.hL<<1) + val
elif self.bit >= 0: # in humidity high byte
self.hH = (self.hH<<1) + val
else: # header bits
pass
self.bit += 1
elif level == 1:
self.high_tick = tick
if diff > 250000:
self.bit = -2
self.hH = 0
self.hL = 0
self.tH = 0
self.tL = 0
self.CS = 0
else: # level == pigpio.TIMEOUT:
self.pi.set_watchdog(self.gpio, 0)
if self.bit < 8: # Too few data bits received.
self.bad_MM += 1 # Bump missing message count.
self.no_response += 1
if self.no_response > self.MAX_NO_RESPONSE:
self.no_response = 0
self.bad_SR += 1 # Bump sensor reset count.
if self.power is not None:
self.powered = False
self.pi.write(self.power, 0)
time.sleep(2)
self.pi.write(self.power, 1)
time.sleep(2)
self.powered = True
elif self.bit < 39: # Short message receieved.
self.bad_SM += 1 # Bump short message count.
self.no_response = 0
else: # Full message received.
self.no_response = 0
def temperature(self):
"""Return current temperature."""
return self.temp
def humidity(self):
"""Return current relative humidity."""
return self.rhum
def staleness(self):
"""Return time since measurement made."""
if self.tov is not None:
return time.time() - self.tov
else:
return -999
def bad_checksum(self):
"""Return count of messages received with bad checksums."""
return self.bad_CS
def short_message(self):
"""Return count of short messages."""
return self.bad_SM
def missing_message(self):
"""Return count of missing messages."""
return self.bad_MM
def sensor_resets(self):
"""Return count of power cycles because of sensor hangs."""
return self.bad_SR
def trigger(self):
"""Trigger a new relative humidity and temperature reading."""
if self.powered:
if self.LED is not None:
self.pi.write(self.LED, 1)
self.pi.write(self.gpio, pigpio.LOW)
time.sleep(0.017) # 17 ms
self.pi.set_mode(self.gpio, pigpio.INPUT)
self.pi.set_watchdog(self.gpio, 200)
def cancel(self):
"""Cancel the DHT22 sensor."""
self.pi.set_watchdog(self.gpio, 0)
if self.cb != None:
self.cb.cancel()
self.cb = None
if __name__ == "__main__":
import time
import pigpio
import DHT22
# Intervals of about 2 seconds or less will eventually hang the DHT22.
INTERVAL=3
pi = pigpio.pi()
s = DHT22.sensor(pi, int(sys.argv[1]) ) # Pass the gpio pin from command line
next_reading = time.time()
s.trigger()
time.sleep(0.2)
print("Humidity={}% Temp={}* {:3.2f} {} {} {} {}".format(
s.humidity(), s.temperature(), s.staleness(),
s.bad_checksum(), s.short_message(), s.missing_message(),
s.sensor_resets()))
next_reading += INTERVAL
time.sleep(next_reading-time.time()) # Overall INTERVAL second polling.
s.cancel()
# pi.stop()
|
mit
| -9,112,710,096,243,817,000
| 25.363309
| 80
| 0.541957
| false
| 3.662669
| false
| false
| false
|
joxer/Baka-No-Voltron
|
tmp/android.dist/private/renpy/display/gesture.py
|
1
|
3800
|
# Copyright 2004-2015 Tom Rothamel <pytom@bishoujo.us>
#
# Permission is hereby granted, free of charge, to any person
# obtaining a copy of this software and associated documentation files
# (the "Software"), to deal in the Software without restriction,
# including without limitation the rights to use, copy, modify, merge,
# publish, distribute, sublicense, and/or sell copies of the Software,
# and to permit persons to whom the Software is furnished to do so,
# subject to the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
import pygame
import math
import renpy.display
DIRECTIONS = [ "n", "ne", "e", "se", "s", "sw", "w", "nw" ]
def dispatch_gesture(gesture):
"""
This is called with a gesture to dispatch it as an event.
"""
event = renpy.config.gestures.get(gesture, None)
if event is not None:
renpy.exports.queue_event(event)
raise renpy.display.core.IgnoreEvent()
class GestureRecognizer(object):
def __init__(self):
super(GestureRecognizer, self).__init__()
self.x = None
self.y = None
def start(self, x, y):
# The last coordinates we saw motion at.
self.x = x
self.y = y
# Minimum sizes for gestures.
self.min_component = renpy.config.screen_width * renpy.config.gesture_component_size
self.min_stroke = renpy.config.screen_width * renpy.config.gesture_stroke_size
# The direction of the current strokes.
self.current_stroke = None
# The length of the current stroke.
self.stroke_length = 0
# A list of strokes we've recognized.
self.strokes = [ ]
def take_point(self, x, y):
if self.x is None:
return
dx = x - self.x
dy = y - self.y
length = math.hypot(dx, dy)
if length < self.min_component:
return
self.x = x
self.y = y
angle = math.atan2(dx, -dy) * 180 / math.pi + 22.5
if angle < 0:
angle += 360
stroke = DIRECTIONS[int(angle / 45)]
if stroke == self.current_stroke:
self.stroke_length += length
else:
self.current_stroke = stroke
self.stroke_length = length
if self.stroke_length > self.min_stroke:
if (not self.strokes) or (self.strokes[-1] != stroke):
self.strokes.append(stroke)
def finish(self):
rv = None
if self.x is None:
return
if self.strokes:
func = renpy.config.dispatch_gesture
if func is None:
func = dispatch_gesture
rv = func("_".join(self.strokes))
self.x = None
self.y = None
return rv
def cancel(self):
self.x = None
self.y = None
def event(self, ev, x, y):
if ev.type == pygame.MOUSEBUTTONDOWN:
self.start(x, y)
elif ev.type == pygame.MOUSEMOTION:
if ev.buttons[0]:
self.take_point(x, y)
elif ev.type == pygame.MOUSEBUTTONUP:
self.take_point(x, y)
if ev.button == 1:
return self.finish()
recognizer = GestureRecognizer()
|
gpl-2.0
| 7,992,540,546,767,366,000
| 26.737226
| 92
| 0.61
| false
| 3.865717
| true
| false
| false
|
WanderingStar/rpi
|
shutdown_button.py
|
1
|
2040
|
#!/usr/bin/python
# This script is used with an LED and a momentary button, perhaps the same,
# like https://www.sparkfun.com/products/10440
# The LED should be wired to GPIO pin 23 and the button to pin 24.
# The idea is that it is run at startup (for example, from rc.local)
# It turns the LED on to indicate that it's working, and then waits
# for the user to hold down the button. When the script notices that
# the user is holding down the button (which may take up to 5 seconds),
# it starts flashing the LED to confirm. If the user continues to hold
# the button down, the LED goes off and the shutdown sequence is triggered.
# While the system is shutting down (which may take some time), the LED
# does a triple flash. When it's finished shutting down, the LED will
# turn off.
import os
import RPi.GPIO as GPIO
from time import sleep
LED = 23
BUTTON = 24
GPIO.setmode(GPIO.BCM)
GPIO.setwarnings(False)
GPIO.setup(LED, GPIO.OUT, initial=1)
GPIO.setup(BUTTON, GPIO.IN)
def flashLED(secs):
GPIO.output(LED, 0)
sleep(secs)
GPIO.output(LED, 1)
shutdown = False
count = 0
while not shutdown:
# check to see if the button is pressed
if GPIO.input(BUTTON):
# keep track of how many cycles the button has been pressed
count += 1
if count < 5:
# if it hasn't been pressed long enough yet, flash the LED
flashLED(0.25)
else:
# if it has been pressed long enough, trigger shutdown
shutdown = True
# button is not pressed
else:
# reset the counter
count = 0
# check infrequently until we notice that the button is being pressed
if count > 0:
sleep(.25)
else:
sleep(5)
# let the user know that the button press has been noted by turning off the LED
GPIO.output(LED, 0)
os.system("shutdown -h now")
sleep(1)
# triple flash the LED until the program is killed by system shutdown
while True:
flashLED(.1)
sleep(.1)
flashLED(.1)
sleep(.1)
flashLED(.1)
sleep(.5)
|
mit
| -1,626,048,357,948,252,000
| 28.142857
| 79
| 0.676961
| false
| 3.57268
| false
| false
| false
|
tanglu-org/merge-o-matic
|
deb/controlfile.py
|
1
|
5047
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# deb/controlfile.py - parse debian control files
#
# Copyright © 2008 Canonical Ltd.
# Author: Scott James Remnant <scott@ubuntu.com>.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of version 3 of the GNU General Public License as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from __future__ import with_statement
class ControlFile(object):
"""Debian control file.
This can be used directly by calling the parse() function or
overridden to add functionality.
Class Properties:
FieldNames Alternate canonical capitalisation of field names
Properties:
paras List of paragraphs as dictionaries
para Final (or single) paragraph
signed True if the paragraph was PGP signed
"""
FieldNames = []
def __init__(self, filename=None, fileobj=None, *args, **kwds):
self.paras = []
self.para = None
self.signed = False
if fileobj is not None:
self.parse(fileobj, *args, **kwds)
elif filename is not None:
self.open(filename, *args, **kwds)
def capitaliseField(self, field):
"""Capitalise a field name correctly.
Fields are stored in the dictionary canonically capitalised,
words split by dashes and the first letter of each in upper
case.
This can be overriden by adding the canonical capitalisation
of a field name to the FieldNames list.
"""
for canon in self.FieldNames:
if canon.lower() == field.lower():
return canon
return "-".join([ w.title() for w in field.split("-") ])
def open(self, file, *args, **kwds):
"""Open and parse a control-file format file."""
with open(file) as f:
try:
self.parse(f, *args, **kwds)
except Exception, e:
e.path = file
raise e
def parse(self, file, multi_para=False, signed=False):
"""Parse a control-file format file.
File is any object that acts as an iterator and returns lines,
file-like objects being most common.
Some control files may contain multiple paragraphs separated
by blank lines, if this is the case set multi_para to True.
Some single-paragraph control files may be PGP signed, if this
is the case set signed to True. If the file was actually
signed, the signed member of the object will be set to True.
"""
self.para = {}
is_signed = False
last_field = None
para_border = True
for line in file:
line = line.rstrip()
if line.startswith("#"):
continue
# Multiple blank lines are permitted at paragraph borders
if not len(line) and para_border:
continue
para_border = False
if line[:1].isspace():
if last_field is None:
raise IOError
self.para[last_field] += "\n" + line.lstrip()
elif ":" in line:
(field, value) = line.split(":", 1)
if len(field.rstrip().split(None)) > 1:
raise IOError
last_field = self.capitaliseField(field)
self.para[last_field] = value.lstrip()
elif line.startswith("-----BEGIN PGP") and signed:
if is_signed:
raise IOError
for line in file:
if not len(line) or line.startswith("\n"): break
is_signed = True
elif not len(line):
para_border = True
if multi_para:
self.paras.append(self.para)
self.para = {}
last_field = None
elif is_signed:
try:
pgpsig = file.next()
if not len(pgpsig):
raise IOError
except StopIteration:
raise IOError
if not pgpsig.startswith("-----BEGIN PGP"):
raise IOError
self.signed = True
break
else:
raise IOError
else:
raise IOError
if is_signed and not self.signed:
raise IOError
if last_field:
self.paras.append(self.para)
elif len(self.paras):
self.para = self.paras[-1]
|
gpl-3.0
| -9,124,939,611,046,515,000
| 31.346154
| 71
| 0.551724
| false
| 4.650691
| false
| false
| false
|
phobson/conda-env
|
conda_env/exceptions.py
|
1
|
2210
|
class CondaEnvException(Exception):
pass
class CondaEnvRuntimeError(RuntimeError, CondaEnvException):
pass
class EnvironmentFileNotFound(CondaEnvException):
def __init__(self, filename, *args, **kwargs):
msg = '{} file not found'.format(filename)
self.filename = filename
super(EnvironmentFileNotFound, self).__init__(msg, *args, **kwargs)
class NoBinstar(CondaEnvRuntimeError):
def __init__(self):
msg = 'The anaconda-client cli must be installed to perform this action'
super(NoBinstar, self).__init__(msg)
class AlreadyExist(CondaEnvRuntimeError):
def __init__(self):
msg = 'The environment path already exists'
super(AlreadyExist, self).__init__(msg)
class EnvironmentAlreadyInNotebook(CondaEnvRuntimeError):
def __init__(self, notebook, *args, **kwargs):
msg = "The notebook {} already has an environment"
super(EnvironmentAlreadyInNotebook, self).__init__(msg, *args, **kwargs)
class EnvironmentFileDoesNotExist(CondaEnvRuntimeError):
def __init__(self, handle, *args, **kwargs):
self.handle = handle
msg = "{} does not have an environment definition".format(handle)
super(EnvironmentFileDoesNotExist, self).__init__(msg, *args, **kwargs)
class EnvironmentFileNotDownloaded(CondaEnvRuntimeError):
def __init__(self, username, packagename, *args, **kwargs):
msg = '{}/{} file not downloaded'.format(username, packagename)
self.username = username
self.packagename = packagename
super(EnvironmentFileNotDownloaded, self).__init__(msg, *args, **kwargs)
class SpecNotFound(CondaEnvRuntimeError):
def __init__(self, msg, *args, **kwargs):
super(SpecNotFound, self).__init__(msg, *args, **kwargs)
class InvalidLoader(Exception):
def __init__(self, name):
msg = 'Unable to load installer for {}'.format(name)
super(InvalidLoader, self).__init__(msg)
class IPythonNotInstalled(CondaEnvRuntimeError):
def __init__(self):
msg = """IPython notebook is not installed. Install it with:
conda install ipython-noteboook
"""
super(IPythonNotInstalled, self).__init__(msg)
|
bsd-3-clause
| -7,228,944,400,857,512,000
| 33
| 80
| 0.669683
| false
| 4.047619
| false
| false
| false
|
abhikeshav/ydk-py
|
cisco-ios-xr/ydk/models/cisco_ios_xr/Cisco_IOS_XR_drivers_media_eth_oper.py
|
1
|
182469
|
""" Cisco_IOS_XR_drivers_media_eth_oper
This module contains a collection of YANG definitions
for Cisco IOS\-XR drivers\-media\-eth package operational data.
This module contains definitions
for the following management objects\:
ethernet\-interface\: Ethernet operational data
Copyright (c) 2013\-2015 by Cisco Systems, Inc.
All rights reserved.
"""
import re
import collections
from enum import Enum
from ydk.types import Empty, YList, YLeafList, DELETE, Decimal64, FixedBitsDict
from ydk.errors import YPYError, YPYModelError
class EthCtrlrAlarmStateEnum(Enum):
"""
EthCtrlrAlarmStateEnum
Ethernet alarm state
.. data:: ALARM_NOT_SUPPORTED = 0
Not supported on this interface
.. data:: ALARM_SET = 1
Alarm set
.. data:: ALARM_NOT_SET = 2
Alarm not set
"""
ALARM_NOT_SUPPORTED = 0
ALARM_SET = 1
ALARM_NOT_SET = 2
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_drivers_media_eth_oper as meta
return meta._meta_table['EthCtrlrAlarmStateEnum']
class EtherAinsStatusEnum(Enum):
"""
EtherAinsStatusEnum
Ether ains status
.. data:: AINS_SOAK_STATUS_NONE = 0
AINS Soak timer not running
.. data:: AINS_SOAK_STATUS_PENDING = 1
AINS Soak timer pending
.. data:: AINS_SOAK_STATUS_RUNNING = 2
AINS Soak timer running
"""
AINS_SOAK_STATUS_NONE = 0
AINS_SOAK_STATUS_PENDING = 1
AINS_SOAK_STATUS_RUNNING = 2
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_drivers_media_eth_oper as meta
return meta._meta_table['EtherAinsStatusEnum']
class EtherDomAlarmEnum(Enum):
"""
EtherDomAlarmEnum
Ether dom alarm
.. data:: NO_INFORMATION = 0
DOM Alarm information is not available
.. data:: ALARM_HIGH = 1
Alarm high
.. data:: WARNING_HIGH = 2
Warning high
.. data:: NORMAL = 3
Within normal parameters
.. data:: WARNING_LOW = 4
Warning low
.. data:: ALARM_LOW = 5
Alarm low
"""
NO_INFORMATION = 0
ALARM_HIGH = 1
WARNING_HIGH = 2
NORMAL = 3
WARNING_LOW = 4
ALARM_LOW = 5
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_drivers_media_eth_oper as meta
return meta._meta_table['EtherDomAlarmEnum']
class EtherFlowcontrolEnum(Enum):
"""
EtherFlowcontrolEnum
Flowcontrol type
.. data:: NO_FLOWCONTROL = 0
No flow control (disabled)
.. data:: EGRESS = 1
Traffic egress (pause frames ingress)
.. data:: INGRESS = 2
Traffic ingress (pause frames egress)
.. data:: BIDIRECTIONAL = 3
On both ingress and egress
"""
NO_FLOWCONTROL = 0
EGRESS = 1
INGRESS = 2
BIDIRECTIONAL = 3
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_drivers_media_eth_oper as meta
return meta._meta_table['EtherFlowcontrolEnum']
class EtherLedStateEnum(Enum):
"""
EtherLedStateEnum
Ether led state
.. data:: LED_STATE_UNKNOWN = 0
LED state is unknown
.. data:: LED_OFF = 1
LED is off
.. data:: GREEN_ON = 2
LED is green
.. data:: GREEN_FLASHING = 3
LED is flashing green
.. data:: YELLOW_ON = 4
LED is yellow
.. data:: YELLOW_FLASHING = 5
LED is flashing yellow
.. data:: RED_ON = 6
LED is red
.. data:: RED_FLASHING = 7
LED is flashing red
"""
LED_STATE_UNKNOWN = 0
LED_OFF = 1
GREEN_ON = 2
GREEN_FLASHING = 3
YELLOW_ON = 4
YELLOW_FLASHING = 5
RED_ON = 6
RED_FLASHING = 7
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_drivers_media_eth_oper as meta
return meta._meta_table['EtherLedStateEnum']
class EtherLinkStateEnum(Enum):
"""
EtherLinkStateEnum
Ethernet link state\: IEEE 802.3/802.3ae clause 30
.5.1.1.4
.. data:: STATE_UNDEFINED = 0
State undefined
.. data:: UNKNOWN_STATE = 1
Initializing, true state not yet known
.. data:: AVAILABLE = 2
Link or light normal, loopback normal
.. data:: NOT_AVAILABLE = 3
Link loss or low light, no loopback
.. data:: REMOTE_FAULT = 4
Remote fault with no detail
.. data:: INVALID_SIGNAL = 5
Invalid signal, applies only to 10BASE-FB
.. data:: REMOTE_JABBER = 6
Remote fault, reason known to be jabber
.. data:: LINK_LOSS = 7
Remote fault, reason known to be far-end link
loss
.. data:: REMOTE_TEST = 8
Remote fault, reason known to be test
.. data:: OFFLINE = 9
Offline (applies to auto-negotiation)
.. data:: AUTO_NEG_ERROR = 10
Auto-Negotiation Error
.. data:: PMD_LINK_FAULT = 11
PMD/PMA receive link fault
.. data:: FRAME_LOSS = 12
WIS loss of frames
.. data:: SIGNAL_LOSS = 13
WIS loss of signal
.. data:: LINK_FAULT = 14
PCS receive link fault
.. data:: EXCESSIVE_BER = 15
PCS Bit Error Rate monitor reporting excessive
error rate
.. data:: DXS_LINK_FAULT = 16
DTE XGXS receive link fault
.. data:: PXS_LINK_FAULT = 17
PHY XGXS transmit link fault
.. data:: SECURITY = 18
Security failure (not a valid part)
.. data:: PHY_NOT_PRESENT = 19
The optics for the port are not present
.. data:: NO_OPTIC_LICENSE = 20
License error (No advanced optical license)
.. data:: UNSUPPORTED_MODULE = 21
Module is not supported
.. data:: DWDM_LASER_SHUT = 22
DWDM Laser shutdown
.. data:: WANPHY_LASER_SHUT = 23
WANPHY Laser shutdown
.. data:: INCOMPATIBLE_CONFIG = 24
Incompatible configuration
.. data:: SYSTEM_ERROR = 25
System error
.. data:: WAN_FRAMING_ERROR = 26
WAN Framing Error
.. data:: OTN_FRAMING_ERROR = 27
OTN Framing Error
"""
STATE_UNDEFINED = 0
UNKNOWN_STATE = 1
AVAILABLE = 2
NOT_AVAILABLE = 3
REMOTE_FAULT = 4
INVALID_SIGNAL = 5
REMOTE_JABBER = 6
LINK_LOSS = 7
REMOTE_TEST = 8
OFFLINE = 9
AUTO_NEG_ERROR = 10
PMD_LINK_FAULT = 11
FRAME_LOSS = 12
SIGNAL_LOSS = 13
LINK_FAULT = 14
EXCESSIVE_BER = 15
DXS_LINK_FAULT = 16
PXS_LINK_FAULT = 17
SECURITY = 18
PHY_NOT_PRESENT = 19
NO_OPTIC_LICENSE = 20
UNSUPPORTED_MODULE = 21
DWDM_LASER_SHUT = 22
WANPHY_LASER_SHUT = 23
INCOMPATIBLE_CONFIG = 24
SYSTEM_ERROR = 25
WAN_FRAMING_ERROR = 26
OTN_FRAMING_ERROR = 27
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_drivers_media_eth_oper as meta
return meta._meta_table['EtherLinkStateEnum']
class EtherPhyPresentEnum(Enum):
"""
EtherPhyPresentEnum
Ether phy present
.. data:: PHY_NOT_PRESENT = 0
No PHY present
.. data:: PHY_PRESENT = 1
PHY is present
.. data:: NO_INFORMATION = 2
State is unknown
"""
PHY_NOT_PRESENT = 0
PHY_PRESENT = 1
NO_INFORMATION = 2
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_drivers_media_eth_oper as meta
return meta._meta_table['EtherPhyPresentEnum']
class EthernetBertErrCntEnum(Enum):
"""
EthernetBertErrCntEnum
Ethernet bert err cnt
.. data:: NO_COUNT_TYPE = 0
no count type
.. data:: BIT_ERROR_COUNT = 1
bit error count
.. data:: FRAME_ERROR_COUNT = 2
frame error count
.. data:: BLOCK_ERROR_COUNT = 3
block error count
.. data:: ETHERNET_BERT_ERR_CNT_TYPES = 4
ethernet bert err cnt types
"""
NO_COUNT_TYPE = 0
BIT_ERROR_COUNT = 1
FRAME_ERROR_COUNT = 2
BLOCK_ERROR_COUNT = 3
ETHERNET_BERT_ERR_CNT_TYPES = 4
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_drivers_media_eth_oper as meta
return meta._meta_table['EthernetBertErrCntEnum']
class EthernetBertPatternEnum(Enum):
"""
EthernetBertPatternEnum
Ethernet test patterns (IEEE spec 36A/48A)
.. data:: NO_TEST_PATTERN = 0
no test pattern
.. data:: HIGH_FREQUENCY = 1
high frequency
.. data:: LOW_FREQUENCY = 2
low frequency
.. data:: MIXED_FREQUENCY = 3
mixed frequency
.. data:: CONTINUOUS_RANDOM = 4
continuous random
.. data:: CONTINUOUS_JITTER = 5
continuous jitter
.. data:: LONG_CONTINUOUS_RANDOM = 6
long continuous random
.. data:: SHORT_CONTINUOUS_RANDOM = 7
short continuous random
.. data:: PSEUDORANDOM_SEED_A = 8
pseudorandom seed a
.. data:: PSEUDORANDOM_SEED_B = 9
pseudorandom seed b
.. data:: PRBS31 = 10
prbs31
.. data:: SQUARE_WAVE = 11
square wave
.. data:: PSEUDORANDOM = 12
pseudorandom
.. data:: ETHERNET_BERT_PATTERN_TYPES = 13
ethernet bert pattern types
"""
NO_TEST_PATTERN = 0
HIGH_FREQUENCY = 1
LOW_FREQUENCY = 2
MIXED_FREQUENCY = 3
CONTINUOUS_RANDOM = 4
CONTINUOUS_JITTER = 5
LONG_CONTINUOUS_RANDOM = 6
SHORT_CONTINUOUS_RANDOM = 7
PSEUDORANDOM_SEED_A = 8
PSEUDORANDOM_SEED_B = 9
PRBS31 = 10
SQUARE_WAVE = 11
PSEUDORANDOM = 12
ETHERNET_BERT_PATTERN_TYPES = 13
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_drivers_media_eth_oper as meta
return meta._meta_table['EthernetBertPatternEnum']
class EthernetDevEnum(Enum):
"""
EthernetDevEnum
Ethernet dev
.. data:: NO_DEVICE = 0
no device
.. data:: PMA_PMD = 1
pma pmd
.. data:: WIS = 2
wis
.. data:: PCS = 3
pcs
.. data:: PHY_XS = 4
phy xs
.. data:: DTE_XS = 5
dte xs
.. data:: ETHERNET_NUM_DEV = 6
ethernet num dev
"""
NO_DEVICE = 0
PMA_PMD = 1
WIS = 2
PCS = 3
PHY_XS = 4
DTE_XS = 5
ETHERNET_NUM_DEV = 6
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_drivers_media_eth_oper as meta
return meta._meta_table['EthernetDevEnum']
class EthernetDevIfEnum(Enum):
"""
EthernetDevIfEnum
Ethernet dev if
.. data:: NO_INTERFACE = 0
no interface
.. data:: XGMII = 1
xgmii
.. data:: XAUI = 2
xaui
.. data:: ETHERNET_NUM_DEV_IF = 3
ethernet num dev if
"""
NO_INTERFACE = 0
XGMII = 1
XAUI = 2
ETHERNET_NUM_DEV_IF = 3
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_drivers_media_eth_oper as meta
return meta._meta_table['EthernetDevIfEnum']
class EthernetDuplexEnum(Enum):
"""
EthernetDuplexEnum
Duplexity
.. data:: ETHERNET_DUPLEX_INVALID = 0
ethernet duplex invalid
.. data:: HALF_DUPLEX = 1
half duplex
.. data:: FULL_DUPLEX = 2
full duplex
"""
ETHERNET_DUPLEX_INVALID = 0
HALF_DUPLEX = 1
FULL_DUPLEX = 2
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_drivers_media_eth_oper as meta
return meta._meta_table['EthernetDuplexEnum']
class EthernetFecEnum(Enum):
"""
EthernetFecEnum
FEC type
.. data:: NOT_CONFIGURED = 0
FEC not configured
.. data:: STANDARD = 1
Reed-Solomon encoding
.. data:: DISABLED = 2
FEC explicitly disabled
"""
NOT_CONFIGURED = 0
STANDARD = 1
DISABLED = 2
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_drivers_media_eth_oper as meta
return meta._meta_table['EthernetFecEnum']
class EthernetIpgEnum(Enum):
"""
EthernetIpgEnum
Inter packet gap
.. data:: STANDARD = 0
IEEE standard value of 12
.. data:: NON_STANDARD = 1
Non-standard value of 16
"""
STANDARD = 0
NON_STANDARD = 1
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_drivers_media_eth_oper as meta
return meta._meta_table['EthernetIpgEnum']
class EthernetLoopbackEnum(Enum):
"""
EthernetLoopbackEnum
Loopback type
.. data:: NO_LOOPBACK = 0
Disabled
.. data:: INTERNAL = 1
Loopback in the framer
.. data:: LINE = 2
Loops peer's packets back to them
.. data:: EXTERNAL = 3
tx externally connected to rx
"""
NO_LOOPBACK = 0
INTERNAL = 1
LINE = 2
EXTERNAL = 3
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_drivers_media_eth_oper as meta
return meta._meta_table['EthernetLoopbackEnum']
class EthernetMediaEnum(Enum):
"""
EthernetMediaEnum
Ethernet media types\: IEEE 802.3/802.3ae clause
30.5.1.1.2
.. data:: ETHERNET_OTHER = 0
IEEE 802.3/802.3ae clause 30.2.5
.. data:: ETHERNET_UNKNOWN = 1
Initializing, true state or type not yet known
.. data:: ETHERNET_AUI = 2
No internal MAU, view from AUI
.. data:: ETHERNET_10BASE5 = 3
Thick coax MAU
.. data:: ETHERNET_FOIRL = 4
FOIRL MAU as specified in 9.9
.. data:: ETHERNET_10BASE2 = 5
Thin coax MAU
.. data:: ETHERNET_10BROAD36 = 6
Broadband DTE MAU
.. data:: ETHERNET_10BASE = 7
UTP MAU, duplexity unknown
.. data:: ETHERNET_10BASE_THD = 8
UTP MAU, half duplex
.. data:: ETHERNET_10BASE_TFD = 9
UTP MAU, full duplex
.. data:: ETHERNET_10BASE_FP = 10
Passive fiber MAU
.. data:: ETHERNET_10BASE_FB = 11
Synchronous fiber MAU
.. data:: ETHERNET_10BASE_FL = 12
Asynchronous fiber MAU, duplexity unknown
.. data:: ETHERNET_10BASE_FLHD = 13
Asynchronous fiber MAU, half duplex
.. data:: ETHERNET_10BASE_FLFD = 14
Asynchronous fiber MAU, full duplex
.. data:: ETHERNET_100BASE_T4 = 15
Four-pair Category 3 UTP
.. data:: ETHERNET_100BASE_TX = 16
Two-pair Category 5 UTP, duplexity unknown
.. data:: ETHERNET_100BASE_TXHD = 17
Two-pair Category 5 UTP, half duplex
.. data:: ETHERNET_100BASE_TXFD = 18
Two-pair Category 5 UTP, full duplex
.. data:: ETHERNET_100BASE_FX = 19
X fiber over PMD, duplexity unknown
.. data:: ETHERNET_100BASE_FXHD = 20
X fiber over PMD, half duplex
.. data:: ETHERNET_100BASE_FXFD = 21
X fiber over PMD, full duplex
.. data:: ETHERNET_100BASE_EX = 22
X fiber over PMD (40km), duplexity unknown
.. data:: ETHERNET_100BASE_EXHD = 23
X fiber over PMD (40km), half duplex
.. data:: ETHERNET_100BASE_EXFD = 24
X fiber over PMD (40km), full duplex
.. data:: ETHERNET_100BASE_T2 = 25
Two-pair Category 3 UTP, duplexity unknown
.. data:: ETHERNET_100BASE_T2HD = 26
Two-pair Category 3 UTP, half duplex
.. data:: ETHERNET_100BASE_T2FD = 27
Two-pair Category 3 UTP, full duplex
.. data:: ETHERNET_1000BASE_X = 28
X PCS/PMA, duplexity unknown
.. data:: ETHERNET_1000BASE_XHD = 29
X 1000BASE-XHDX PCS/PMA, half duplex
.. data:: ETHERNET_1000BASE_XFD = 30
X PCS/PMA, full duplex
.. data:: ETHERNET_1000BASE_LX = 31
X fiber over long-wl laser PMD, duplexity
unknown
.. data:: ETHERNET_1000BASE_LXHD = 32
X fiber over long-wl laser PMD, half duplex
.. data:: ETHERNET_1000BASE_LXFD = 33
X fiber over long-wl laser PMD, full duplex
.. data:: ETHERNET_1000BASE_SX = 34
X fiber over short-wl laser PMD, duplexity
unknown
.. data:: ETHERNET_1000BASE_SXHD = 35
X fiber over short-wl laser PMD, half duplex
.. data:: ETHERNET_1000BASE_SXFD = 36
X fiber over short-wl laser PMD, full duplex
.. data:: ETHERNET_1000BASE_CX = 37
X copper over 150-Ohm balanced PMD, duplexity
unknown
.. data:: ETHERNET_1000BASE_CXHD = 38
X copper over 150-Ohm balancedPMD, half duplex
.. data:: ETHERNET_1000BASE_CXFD = 39
X copper over 150-Ohm balancedPMD, full duplex
.. data:: ETHERNET_1000BASE = 40
Four-pair Category 5 UTP PHY, duplexity unknown
.. data:: ETHERNET_1000BASE_THD = 41
Four-pair Category 5 UTP PHY, half duplex
.. data:: ETHERNET_1000BASE_TFD = 42
Four-pair Category 5 UTP PHY, full duplex
.. data:: ETHERNET_10GBASE_X = 43
X PCS/PMA
.. data:: ETHERNET_10GBASE_LX4 = 44
X fiber over 4 lane 1310nm optics
.. data:: ETHERNET_10GBASE_R = 45
R PCS/PMA
.. data:: ETHERNET_10GBASE_ER = 46
R fiber over 1550nm optics
.. data:: ETHERNET_10GBASE_LR = 47
R fiber over 1310nm optics
.. data:: ETHERNET_10GBASE_SR = 48
R fiber over 850nm optics
.. data:: ETHERNET_10GBASE_W = 49
W PCS/PMA
.. data:: ETHERNET_10GBASE_EW = 50
W fiber over 1550nm optics
.. data:: ETHERNET_10GBASE_LW = 51
W fiber over 1310nm optics
.. data:: ETHERNET_10GBASE_SW = 52
W fiber over 850nm optics
.. data:: ETHERNET_1000BASE_ZX = 53
Single-mode fiber over 1550nm optics (Cisco)
.. data:: ETHERNET_1000BASE_CWDM = 54
CWDM with unknown wavelength optics
.. data:: ETHERNET_1000BASE_CWDM_1470 = 55
CWDM with 1470nm optics
.. data:: ETHERNET_1000BASE_CWDM_1490 = 56
CWDM with 1490nm optics
.. data:: ETHERNET_1000BASE_CWDM_1510 = 57
CWDM with 1510nm optics
.. data:: ETHERNET_1000BASE_CWDM_1530 = 58
CWDM with 1530nm optics
.. data:: ETHERNET_1000BASE_CWDM_1550 = 59
CWDM with 1550nm optics
.. data:: ETHERNET_1000BASE_CWDM_1570 = 60
CWDM with 1570nm optics
.. data:: ETHERNET_1000BASE_CWDM_1590 = 61
CWDM with 1590nm optics
.. data:: ETHERNET_1000BASE_CWDM_1610 = 62
CWDM with 1610nm optics
.. data:: ETHERNET_10GBASE_ZR = 63
Cisco-defined, over 1550nm optics
.. data:: ETHERNET_10GBASE_DWDM = 64
DWDM optics
.. data:: ETHERNET_100GBASE_LR4 = 65
fiber over 4 lane optics (long reach)
.. data:: ETHERNET_1000BASE_DWDM = 66
DWDM optics
.. data:: ETHERNET_1000BASE_DWDM_1533 = 67
DWDM with 1533nm optics
.. data:: ETHERNET_1000BASE_DWDM_1537 = 68
DWDM with 1537nm optics
.. data:: ETHERNET_1000BASE_DWDM_1541 = 69
DWDM with 1541nm optics
.. data:: ETHERNET_1000BASE_DWDM_1545 = 70
DWDM with 1545nm optics
.. data:: ETHERNET_1000BASE_DWDM_1549 = 71
DWDM with 1549nm optics
.. data:: ETHERNET_1000BASE_DWDM_1553 = 72
DWDM with 1553nm optics
.. data:: ETHERNET_1000BASE_DWDM_1557 = 73
DWDM with 1557nm optics
.. data:: ETHERNET_1000BASE_DWDM_1561 = 74
DWDM with 1561nm optics
.. data:: ETHERNET_40GBASE_LR4 = 75
fiber over 4 lane optics (long reach)
.. data:: ETHERNET_40GBASE_ER4 = 76
fiber over 4 lane optics (extended reach)
.. data:: ETHERNET_100GBASE_ER4 = 77
fiber over 4 lane optics (extended reach)
.. data:: ETHERNET_1000BASE_EX = 78
X fiber over 1310nm optics
.. data:: ETHERNET_1000BASE_BX10_D = 79
X fibre (D, 10km)
.. data:: ETHERNET_1000BASE_BX10_U = 80
X fibre (U, 10km)
.. data:: ETHERNET_1000BASE_DWDM_1561_42 = 81
DWDM with 1561.42nm optics
.. data:: ETHERNET_1000BASE_DWDM_1560_61 = 82
DWDM with 1560.61nm optics
.. data:: ETHERNET_1000BASE_DWDM_1559_79 = 83
DWDM with 1559.79nm optics
.. data:: ETHERNET_1000BASE_DWDM_1558_98 = 84
DWDM with 1558.98nm optics
.. data:: ETHERNET_1000BASE_DWDM_1558_17 = 85
DWDM with 1558.17nm optics
.. data:: ETHERNET_1000BASE_DWDM_1557_36 = 86
DWDM with 1557.36nm optics
.. data:: ETHERNET_1000BASE_DWDM_1556_55 = 87
DWDM with 1556.55nm optics
.. data:: ETHERNET_1000BASE_DWDM_1555_75 = 88
DWDM with 1555.75nm optics
.. data:: ETHERNET_1000BASE_DWDM_1554_94 = 89
DWDM with 1554.94nm optics
.. data:: ETHERNET_1000BASE_DWDM_1554_13 = 90
DWDM with 1554.13nm optics
.. data:: ETHERNET_1000BASE_DWDM_1553_33 = 91
DWDM with 1553.33nm optics
.. data:: ETHERNET_1000BASE_DWDM_1552_52 = 92
DWDM with 1552.52nm optics
.. data:: ETHERNET_1000BASE_DWDM_1551_72 = 93
DWDM with 1551.72nm optics
.. data:: ETHERNET_1000BASE_DWDM_1550_92 = 94
DWDM with 1550.92nm optics
.. data:: ETHERNET_1000BASE_DWDM_1550_12 = 95
DWDM with 1550.12nm optics
.. data:: ETHERNET_1000BASE_DWDM_1549_32 = 96
DWDM with 1549.32nm optics
.. data:: ETHERNET_1000BASE_DWDM_1548_51 = 97
DWDM with 1548.51nm optics
.. data:: ETHERNET_1000BASE_DWDM_1547_72 = 98
DWDM with 1547.72nm optics
.. data:: ETHERNET_1000BASE_DWDM_1546_92 = 99
DWDM with 1546.92nm optics
.. data:: ETHERNET_1000BASE_DWDM_1546_12 = 100
DWDM with 1546.12nm optics
.. data:: ETHERNET_1000BASE_DWDM_1545_32 = 101
DWDM with 1545.32nm optics
.. data:: ETHERNET_1000BASE_DWDM_1544_53 = 102
DWDM with 1544.53nm optics
.. data:: ETHERNET_1000BASE_DWDM_1543_73 = 103
DWDM with 1543.73nm optics
.. data:: ETHERNET_1000BASE_DWDM_1542_94 = 104
DWDM with 1542.94nm optics
.. data:: ETHERNET_1000BASE_DWDM_1542_14 = 105
DWDM with 1542.14nm optics
.. data:: ETHERNET_1000BASE_DWDM_1541_35 = 106
DWDM with 1541.35nm optics
.. data:: ETHERNET_1000BASE_DWDM_1540_56 = 107
DWDM with 1540.56nm optics
.. data:: ETHERNET_1000BASE_DWDM_1539_77 = 108
DWDM with 1539.77nm optics
.. data:: ETHERNET_1000BASE_DWDM_1538_98 = 109
DWDM with 1538.98nm optics
.. data:: ETHERNET_1000BASE_DWDM_1538_19 = 110
DWDM with 1538.19nm optics
.. data:: ETHERNET_1000BASE_DWDM_1537_40 = 111
DWDM with 1537.40nm optics
.. data:: ETHERNET_1000BASE_DWDM_1536_61 = 112
DWDM with 1536.61nm optics
.. data:: ETHERNET_1000BASE_DWDM_1535_82 = 113
DWDM with 1535.82nm optics
.. data:: ETHERNET_1000BASE_DWDM_1535_04 = 114
DWDM with 1535.04nm optics
.. data:: ETHERNET_1000BASE_DWDM_1534_25 = 115
DWDM with 1534.25nm optics
.. data:: ETHERNET_1000BASE_DWDM_1533_47 = 116
DWDM with 1533.47nm optics
.. data:: ETHERNET_1000BASE_DWDM_1532_68 = 117
DWDM with 1532.68nm optics
.. data:: ETHERNET_1000BASE_DWDM_1531_90 = 118
DWDM with 1531.90nm optics
.. data:: ETHERNET_1000BASE_DWDM_1531_12 = 119
DWDM with 1531.12nm optics
.. data:: ETHERNET_1000BASE_DWDM_1530_33 = 120
DWDM with 1530.33nm optics
.. data:: ETHERNET_1000BASE_DWDM_TUNABLE = 121
DWDM with tunable optics
.. data:: ETHERNET_10GBASE_DWDM_1561_42 = 122
DWDM with 1561.42nm optics
.. data:: ETHERNET_10GBASE_DWDM_1560_61 = 123
DWDM with 1560.61nm optics
.. data:: ETHERNET_10GBASE_DWDM_1559_79 = 124
DWDM with 1559.79nm optics
.. data:: ETHERNET_10GBASE_DWDM_1558_98 = 125
DWDM with 1558.98nm optics
.. data:: ETHERNET_10GBASE_DWDM_1558_17 = 126
DWDM with 1558.17nm optics
.. data:: ETHERNET_10GBASE_DWDM_1557_36 = 127
DWDM with 1557.36nm optics
.. data:: ETHERNET_10GBASE_DWDM_1556_55 = 128
DWDM with 1556.55nm optics
.. data:: ETHERNET_10GBASE_DWDM_1555_75 = 129
DWDM with 1555.75nm optics
.. data:: ETHERNET_10GBASE_DWDM_1554_94 = 130
DWDM with 1554.94nm optics
.. data:: ETHERNET_10GBASE_DWDM_1554_13 = 131
DWDM with 1554.13nm optics
.. data:: ETHERNET_10GBASE_DWDM_1553_33 = 132
DWDM with 1553.33nm optics
.. data:: ETHERNET_10GBASE_DWDM_1552_52 = 133
DWDM with 1552.52nm optics
.. data:: ETHERNET_10GBASE_DWDM_1551_72 = 134
DWDM with 1551.72nm optics
.. data:: ETHERNET_10GBASE_DWDM_1550_92 = 135
DWDM with 1550.92nm optics
.. data:: ETHERNET_10GBASE_DWDM_1550_12 = 136
DWDM with 1550.12nm optics
.. data:: ETHERNET_10GBASE_DWDM_1549_32 = 137
DWDM with 1549.32nm optics
.. data:: ETHERNET_10GBASE_DWDM_1548_51 = 138
DWDM with 1548.51nm optics
.. data:: ETHERNET_10GBASE_DWDM_1547_72 = 139
DWDM with 1547.72nm optics
.. data:: ETHERNET_10GBASE_DWDM_1546_92 = 140
DWDM with 1546.92nm optics
.. data:: ETHERNET_10GBASE_DWDM_1546_12 = 141
DWDM with 1546.12nm optics
.. data:: ETHERNET_10GBASE_DWDM_1545_32 = 142
DWDM with 1545.32nm optics
.. data:: ETHERNET_10GBASE_DWDM_1544_53 = 143
DWDM with 1544.53nm optics
.. data:: ETHERNET_10GBASE_DWDM_1543_73 = 144
DWDM with 1543.73nm optics
.. data:: ETHERNET_10GBASE_DWDM_1542_94 = 145
DWDM with 1542.94nm optics
.. data:: ETHERNET_10GBASE_DWDM_1542_14 = 146
DWDM with 1542.14nm optics
.. data:: ETHERNET_10GBASE_DWDM_1541_35 = 147
DWDM with 1541.35nm optics
.. data:: ETHERNET_10GBASE_DWDM_1540_56 = 148
DWDM with 1540.56nm optics
.. data:: ETHERNET_10GBASE_DWDM_1539_77 = 149
DWDM with 1539.77nm optics
.. data:: ETHERNET_10GBASE_DWDM_1538_98 = 150
DWDM with 1538.98nm optics
.. data:: ETHERNET_10GBASE_DWDM_1538_19 = 151
DWDM with 1538.19nm optics
.. data:: ETHERNET_10GBASE_DWDM_1537_40 = 152
DWDM with 1537.40nm optics
.. data:: ETHERNET_10GBASE_DWDM_1536_61 = 153
DWDM with 1536.61nm optics
.. data:: ETHERNET_10GBASE_DWDM_1535_82 = 154
DWDM with 1535.82nm optics
.. data:: ETHERNET_10GBASE_DWDM_1535_04 = 155
DWDM with 1535.04nm optics
.. data:: ETHERNET_10GBASE_DWDM_1534_25 = 156
DWDM with 1534.25nm optics
.. data:: ETHERNET_10GBASE_DWDM_1533_47 = 157
DWDM with 1533.47nm optics
.. data:: ETHERNET_10GBASE_DWDM_1532_68 = 158
DWDM with 1532.68nm optics
.. data:: ETHERNET_10GBASE_DWDM_1531_90 = 159
DWDM with 1531.90nm optics
.. data:: ETHERNET_10GBASE_DWDM_1531_12 = 160
DWDM with 1531.12nm optics
.. data:: ETHERNET_10GBASE_DWDM_1530_33 = 161
DWDM with 1530.33nm optics
.. data:: ETHERNET_10GBASE_DWDM_TUNABLE = 162
DWDM with tunable optics
.. data:: ETHERNET_40GBASE_DWDM_1561_42 = 163
DWDM with 1561.42nm optics
.. data:: ETHERNET_40GBASE_DWDM_1560_61 = 164
DWDM with 1560.61nm optics
.. data:: ETHERNET_40GBASE_DWDM_1559_79 = 165
DWDM with 1559.79nm optics
.. data:: ETHERNET_40GBASE_DWDM_1558_98 = 166
DWDM with 1558.98nm optics
.. data:: ETHERNET_40GBASE_DWDM_1558_17 = 167
DWDM with 1558.17nm optics
.. data:: ETHERNET_40GBASE_DWDM_1557_36 = 168
DWDM with 1557.36nm optics
.. data:: ETHERNET_40GBASE_DWDM_1556_55 = 169
DWDM with 1556.55nm optics
.. data:: ETHERNET_40GBASE_DWDM_1555_75 = 170
DWDM with 1555.75nm optics
.. data:: ETHERNET_40GBASE_DWDM_1554_94 = 171
DWDM with 1554.94nm optics
.. data:: ETHERNET_40GBASE_DWDM_1554_13 = 172
DWDM with 1554.13nm optics
.. data:: ETHERNET_40GBASE_DWDM_1553_33 = 173
DWDM with 1553.33nm optics
.. data:: ETHERNET_40GBASE_DWDM_1552_52 = 174
DWDM with 1552.52nm optics
.. data:: ETHERNET_40GBASE_DWDM_1551_72 = 175
DWDM with 1551.72nm optics
.. data:: ETHERNET_40GBASE_DWDM_1550_92 = 176
DWDM with 1550.92nm optics
.. data:: ETHERNET_40GBASE_DWDM_1550_12 = 177
DWDM with 1550.12nm optics
.. data:: ETHERNET_40GBASE_DWDM_1549_32 = 178
DWDM with 1549.32nm optics
.. data:: ETHERNET_40GBASE_DWDM_1548_51 = 179
DWDM with 1548.51nm optics
.. data:: ETHERNET_40GBASE_DWDM_1547_72 = 180
DWDM with 1547.72nm optics
.. data:: ETHERNET_40GBASE_DWDM_1546_92 = 181
DWDM with 1546.92nm optics
.. data:: ETHERNET_40GBASE_DWDM_1546_12 = 182
DWDM with 1546.12nm optics
.. data:: ETHERNET_40GBASE_DWDM_1545_32 = 183
DWDM with 1545.32nm optics
.. data:: ETHERNET_40GBASE_DWDM_1544_53 = 184
DWDM with 1544.53nm optics
.. data:: ETHERNET_40GBASE_DWDM_1543_73 = 185
DWDM with 1543.73nm optics
.. data:: ETHERNET_40GBASE_DWDM_1542_94 = 186
DWDM with 1542.94nm optics
.. data:: ETHERNET_40GBASE_DWDM_1542_14 = 187
DWDM with 1542.14nm optics
.. data:: ETHERNET_40GBASE_DWDM_1541_35 = 188
DWDM with 1541.35nm optics
.. data:: ETHERNET_40GBASE_DWDM_1540_56 = 189
DWDM with 1540.56nm optics
.. data:: ETHERNET_40GBASE_DWDM_1539_77 = 190
DWDM with 1539.77nm optics
.. data:: ETHERNET_40GBASE_DWDM_1538_98 = 191
DWDM with 1538.98nm optics
.. data:: ETHERNET_40GBASE_DWDM_1538_19 = 192
DWDM with 1538.19nm optics
.. data:: ETHERNET_40GBASE_DWDM_1537_40 = 193
DWDM with 1537.40nm optics
.. data:: ETHERNET_40GBASE_DWDM_1536_61 = 194
DWDM with 1536.61nm optics
.. data:: ETHERNET_40GBASE_DWDM_1535_82 = 195
DWDM with 1535.82nm optics
.. data:: ETHERNET_40GBASE_DWDM_1535_04 = 196
DWDM with 1535.04nm optics
.. data:: ETHERNET_40GBASE_DWDM_1534_25 = 197
DWDM with 1534.25nm optics
.. data:: ETHERNET_40GBASE_DWDM_1533_47 = 198
DWDM with 1533.47nm optics
.. data:: ETHERNET_40GBASE_DWDM_1532_68 = 199
DWDM with 1532.68nm optics
.. data:: ETHERNET_40GBASE_DWDM_1531_90 = 200
DWDM with 1531.90nm optics
.. data:: ETHERNET_40GBASE_DWDM_1531_12 = 201
DWDM with 1531.12nm optics
.. data:: ETHERNET_40GBASE_DWDM_1530_33 = 202
DWDM with 1530.33nm optics
.. data:: ETHERNET_40GBASE_DWDM_TUNABLE = 203
DWDM with tunable optics
.. data:: ETHERNET_100GBASE_DWDM_1561_42 = 204
DWDM with 1561.42nm optics
.. data:: ETHERNET_100GBASE_DWDM_1560_61 = 205
DWDM with 1560.61nm optics
.. data:: ETHERNET_100GBASE_DWDM_1559_79 = 206
DWDM with 1559.79nm optics
.. data:: ETHERNET_100GBASE_DWDM_1558_98 = 207
DWDM with 1558.98nm optics
.. data:: ETHERNET_100GBASE_DWDM_1558_17 = 208
DWDM with 1558.17nm optics
.. data:: ETHERNET_100GBASE_DWDM_1557_36 = 209
DWDM with 1557.36nm optics
.. data:: ETHERNET_100GBASE_DWDM_1556_55 = 210
DWDM with 1556.55nm optics
.. data:: ETHERNET_100GBASE_DWDM_1555_75 = 211
DWDM with 1555.75nm optics
.. data:: ETHERNET_100GBASE_DWDM_1554_94 = 212
DWDM with 1554.94nm optics
.. data:: ETHERNET_100GBASE_DWDM_1554_13 = 213
DWDM with 1554.13nm optics
.. data:: ETHERNET_100GBASE_DWDM_1553_33 = 214
DWDM with 1553.33nm optics
.. data:: ETHERNET_100GBASE_DWDM_1552_52 = 215
DWDM with 1552.52nm optics
.. data:: ETHERNET_100GBASE_DWDM_1551_72 = 216
DWDM with 1551.72nm optics
.. data:: ETHERNET_100GBASE_DWDM_1550_92 = 217
DWDM with 1550.92nm optics
.. data:: ETHERNET_100GBASE_DWDM_1550_12 = 218
DWDM with 1550.12nm optics
.. data:: ETHERNET_100GBASE_DWDM_1549_32 = 219
DWDM with 1549.32nm optics
.. data:: ETHERNET_100GBASE_DWDM_1548_51 = 220
DWDM with 1548.51nm optics
.. data:: ETHERNET_100GBASE_DWDM_1547_72 = 221
DWDM with 1547.72nm optics
.. data:: ETHERNET_100GBASE_DWDM_1546_92 = 222
DWDM with 1546.92nm optics
.. data:: ETHERNET_100GBASE_DWDM_1546_12 = 223
DWDM with 1546.12nm optics
.. data:: ETHERNET_100GBASE_DWDM_1545_32 = 224
DWDM with 1545.32nm optics
.. data:: ETHERNET_100GBASE_DWDM_1544_53 = 225
DWDM with 1544.53nm optics
.. data:: ETHERNET_100GBASE_DWDM_1543_73 = 226
DWDM with 1543.73nm optics
.. data:: ETHERNET_100GBASE_DWDM_1542_94 = 227
DWDM with 1542.94nm optics
.. data:: ETHERNET_100GBASE_DWDM_1542_14 = 228
DWDM with 1542.14nm optics
.. data:: ETHERNET_100GBASE_DWDM_1541_35 = 229
DWDM with 1541.35nm optics
.. data:: ETHERNET_100GBASE_DWDM_1540_56 = 230
DWDM with 1540.56nm optics
.. data:: ETHERNET_100GBASE_DWDM_1539_77 = 231
DWDM with 1539.77nm optics
.. data:: ETHERNET_100GBASE_DWDM_1538_98 = 232
DWDM with 1538.98nm optics
.. data:: ETHERNET_100GBASE_DWDM_1538_19 = 233
DWDM with 1538.19nm optics
.. data:: ETHERNET_100GBASE_DWDM_1537_40 = 234
DWDM with 1537.40nm optics
.. data:: ETHERNET_100GBASE_DWDM_1536_61 = 235
DWDM with 1536.61nm optics
.. data:: ETHERNET_100GBASE_DWDM_1535_82 = 236
DWDM with 1535.82nm optics
.. data:: ETHERNET_100GBASE_DWDM_1535_04 = 237
DWDM with 1535.04nm optics
.. data:: ETHERNET_100GBASE_DWDM_1534_25 = 238
DWDM with 1534.25nm optics
.. data:: ETHERNET_100GBASE_DWDM_1533_47 = 239
DWDM with 1533.47nm optics
.. data:: ETHERNET_100GBASE_DWDM_1532_68 = 240
DWDM with 1532.68nm optics
.. data:: ETHERNET_100GBASE_DWDM_1531_90 = 241
DWDM with 1531.90nm optics
.. data:: ETHERNET_100GBASE_DWDM_1531_12 = 242
DWDM with 1531.12nm optics
.. data:: ETHERNET_100GBASE_DWDM_1530_33 = 243
DWDM with 1530.33nm optics
.. data:: ETHERNET_100GBASE_DWDM_TUNABLE = 244
DWDM with tunable optics
.. data:: ETHERNET_40GBASE_KR4 = 245
4 lane copper (backplane)
.. data:: ETHERNET_40GBASE_CR4 = 246
4 lane copper (very short reach)
.. data:: ETHERNET_40GBASE_SR4 = 247
fiber over 4 lane optics (short reach)
.. data:: ETHERNET_40GBASE_FR = 248
serial fiber (2+ km)
.. data:: ETHERNET_100GBASE_CR10 = 249
10 lane copper (very short reach)
.. data:: ETHERNET_100GBASE_SR10 = 250
MMF fiber over 10 lane optics (short reach)
.. data:: ETHERNET_40GBASE_CSR4 = 251
fiber over 4 lane optics (extended short reach)
.. data:: ETHERNET_10GBASE_CWDM = 252
CWDM optics
.. data:: ETHERNET_10GBASE_CWDM_TUNABLE = 253
CWDM with tunable optics
.. data:: ETHERNET_10GBASE_CWDM_1470 = 254
CWDM with 1470nm optics
.. data:: ETHERNET_10GBASE_CWDM_1490 = 255
CWDM with 1490nm optics
.. data:: ETHERNET_10GBASE_CWDM_1510 = 256
CWDM with 1510nm optics
.. data:: ETHERNET_10GBASE_CWDM_1530 = 257
CWDM with 1530nm optics
.. data:: ETHERNET_10GBASE_CWDM_1550 = 258
CWDM with 1550nm optics
.. data:: ETHERNET_10GBASE_CWDM_1570 = 259
CWDM with 1570nm optics
.. data:: ETHERNET_10GBASE_CWDM_1590 = 260
CWDM with 1590nm optics
.. data:: ETHERNET_10GBASE_CWDM_1610 = 261
CWDM with 1610nm optics
.. data:: ETHERNET_40GBASE_CWDM = 262
CWDM optics
.. data:: ETHERNET_40GBASE_CWDM_TUNABLE = 263
CWDM with tunable optics
.. data:: ETHERNET_40GBASE_CWDM_1470 = 264
CWDM with 1470nm optics
.. data:: ETHERNET_40GBASE_CWDM_1490 = 265
CWDM with 1490nm optics
.. data:: ETHERNET_40GBASE_CWDM_1510 = 266
CWDM with 1510nm optics
.. data:: ETHERNET_40GBASE_CWDM_1530 = 267
CWDM with 1530nm optics
.. data:: ETHERNET_40GBASE_CWDM_1550 = 268
CWDM with 1550nm optics
.. data:: ETHERNET_40GBASE_CWDM_1570 = 269
CWDM with 1570nm optics
.. data:: ETHERNET_40GBASE_CWDM_1590 = 270
CWDM with 1590nm optics
.. data:: ETHERNET_40GBASE_CWDM_1610 = 271
CWDM with 1610nm optics
.. data:: ETHERNET_100GBASE_CWDM = 272
CWDM optics
.. data:: ETHERNET_100GBASE_CWDM_TUNABLE = 273
CWDM with tunable optics
.. data:: ETHERNET_100GBASE_CWDM_1470 = 274
CWDM with 1470nm optics
.. data:: ETHERNET_100GBASE_CWDM_1490 = 275
CWDM with 1490nm optics
.. data:: ETHERNET_100GBASE_CWDM_1510 = 276
CWDM with 1510nm optics
.. data:: ETHERNET_100GBASE_CWDM_1530 = 277
CWDM with 1530nm optics
.. data:: ETHERNET_100GBASE_CWDM_1550 = 278
CWDM with 1550nm optics
.. data:: ETHERNET_100GBASE_CWDM_1570 = 279
CWDM with 1570nm optics
.. data:: ETHERNET_100GBASE_CWDM_1590 = 280
CWDM with 1590nm optics
.. data:: ETHERNET_100GBASE_CWDM_1610 = 281
CWDM with 1610nm optics
.. data:: ETHERNET_40GBASE_ELPB = 282
Electrical loopback
.. data:: ETHERNET_100GBASE_ELPB = 283
Electrical loopback
.. data:: ETHERNET_100GBASE_LR10 = 284
Fiber over 10 lane optics (long reach)
.. data:: ETHERNET_40GBASE = 285
Four-pair Category 8 STP
.. data:: ETHERNET_100GBASE_KP4 = 286
4 lane copper (backplane)
.. data:: ETHERNET_100GBASE_KR4 = 287
Improved 4 lane copper (backplane)
.. data:: ETHERNET_10GBASE_LRM = 288
Multimode fiber with 1310nm optics (long reach)
.. data:: ETHERNET_10GBASE_CX4 = 289
4 lane X copper
.. data:: ETHERNET_10GBASE = 290
Four-pair Category 6+ UTP
.. data:: ETHERNET_10GBASE_KX4 = 291
4 lane X copper (backplane)
.. data:: ETHERNET_10GBASE_KR = 292
Copper (backplane)
.. data:: ETHERNET_10GBASE_PR = 293
Passive optical network
.. data:: ETHERNET_100BASE_LX = 294
X fiber over 4 lane 1310nm optics
.. data:: ETHERNET_100BASE_ZX = 295
Single-mode fiber over 1550nm optics (Cisco)
.. data:: ETHERNET_1000BASE_BX_D = 296
X fibre (D)
.. data:: ETHERNET_1000BASE_BX_U = 297
X fibre (U)
.. data:: ETHERNET_1000BASE_BX20_D = 298
X fibre (D, 20km)
.. data:: ETHERNET_1000BASE_BX20_U = 299
X fibre (U, 20km)
.. data:: ETHERNET_1000BASE_BX40_D = 300
X fibre (D, 40km)
.. data:: ETHERNET_1000BASE_BX40_DA = 301
X fibre (D, 40km)
.. data:: ETHERNET_1000BASE_BX40_U = 302
X fibre (U, 40km)
.. data:: ETHERNET_1000BASE_BX80_D = 303
X fibre (D, 80km)
.. data:: ETHERNET_1000BASE_BX80_U = 304
X fibre (U, 80km)
.. data:: ETHERNET_1000BASE_BX120_D = 305
X fibre (D, 120km)
.. data:: ETHERNET_1000BASE_BX120_U = 306
X fibre (U, 120km)
.. data:: ETHERNET_10GBASE_BX_D = 307
X fibre (D)
.. data:: ETHERNET_10GBASE_BX_U = 308
X fibre (U)
.. data:: ETHERNET_10GBASE_BX10_D = 309
X fibre (D, 10km)
.. data:: ETHERNET_10GBASE_BX10_U = 310
X fibre (U, 10km)
.. data:: ETHERNET_10GBASE_BX20_D = 311
X fibre (D, 20km)
.. data:: ETHERNET_10GBASE_BX20_U = 312
X fibre (U, 20km)
.. data:: ETHERNET_10GBASE_BX40_D = 313
X fibre (D, 40km)
.. data:: ETHERNET_10GBASE_BX40_U = 314
X fibre (U, 40km)
.. data:: ETHERNET_10GBASE_BX80_D = 315
X fibre (D, 80km)
.. data:: ETHERNET_10GBASE_BX80_U = 316
X fibre (U, 80km)
.. data:: ETHERNET_10GBASE_BX120_D = 317
X fibre (D, 120km)
.. data:: ETHERNET_10GBASE_BX120_U = 318
X fibre (U, 120km)
.. data:: ETHERNET_1000BASE_DR_LX = 319
X fiber over long-wl laser PMD, duplexity
unknown, dual rate
.. data:: ETHERNET_100GBASE_ER4L = 320
fiber over 4 lane optics (25km reach)
.. data:: ETHERNET_100GBASE_SR4 = 321
fiber over 4 lane optics (short reach)
.. data:: ETHERNET_40GBASE_SR_BD = 322
Bi-directional fiber over 2 lane optics (short
reach)
.. data:: ETHERNET_BASE_MAX = 323
ethernet base max
"""
ETHERNET_OTHER = 0
ETHERNET_UNKNOWN = 1
ETHERNET_AUI = 2
ETHERNET_10BASE5 = 3
ETHERNET_FOIRL = 4
ETHERNET_10BASE2 = 5
ETHERNET_10BROAD36 = 6
ETHERNET_10BASE = 7
ETHERNET_10BASE_THD = 8
ETHERNET_10BASE_TFD = 9
ETHERNET_10BASE_FP = 10
ETHERNET_10BASE_FB = 11
ETHERNET_10BASE_FL = 12
ETHERNET_10BASE_FLHD = 13
ETHERNET_10BASE_FLFD = 14
ETHERNET_100BASE_T4 = 15
ETHERNET_100BASE_TX = 16
ETHERNET_100BASE_TXHD = 17
ETHERNET_100BASE_TXFD = 18
ETHERNET_100BASE_FX = 19
ETHERNET_100BASE_FXHD = 20
ETHERNET_100BASE_FXFD = 21
ETHERNET_100BASE_EX = 22
ETHERNET_100BASE_EXHD = 23
ETHERNET_100BASE_EXFD = 24
ETHERNET_100BASE_T2 = 25
ETHERNET_100BASE_T2HD = 26
ETHERNET_100BASE_T2FD = 27
ETHERNET_1000BASE_X = 28
ETHERNET_1000BASE_XHD = 29
ETHERNET_1000BASE_XFD = 30
ETHERNET_1000BASE_LX = 31
ETHERNET_1000BASE_LXHD = 32
ETHERNET_1000BASE_LXFD = 33
ETHERNET_1000BASE_SX = 34
ETHERNET_1000BASE_SXHD = 35
ETHERNET_1000BASE_SXFD = 36
ETHERNET_1000BASE_CX = 37
ETHERNET_1000BASE_CXHD = 38
ETHERNET_1000BASE_CXFD = 39
ETHERNET_1000BASE = 40
ETHERNET_1000BASE_THD = 41
ETHERNET_1000BASE_TFD = 42
ETHERNET_10GBASE_X = 43
ETHERNET_10GBASE_LX4 = 44
ETHERNET_10GBASE_R = 45
ETHERNET_10GBASE_ER = 46
ETHERNET_10GBASE_LR = 47
ETHERNET_10GBASE_SR = 48
ETHERNET_10GBASE_W = 49
ETHERNET_10GBASE_EW = 50
ETHERNET_10GBASE_LW = 51
ETHERNET_10GBASE_SW = 52
ETHERNET_1000BASE_ZX = 53
ETHERNET_1000BASE_CWDM = 54
ETHERNET_1000BASE_CWDM_1470 = 55
ETHERNET_1000BASE_CWDM_1490 = 56
ETHERNET_1000BASE_CWDM_1510 = 57
ETHERNET_1000BASE_CWDM_1530 = 58
ETHERNET_1000BASE_CWDM_1550 = 59
ETHERNET_1000BASE_CWDM_1570 = 60
ETHERNET_1000BASE_CWDM_1590 = 61
ETHERNET_1000BASE_CWDM_1610 = 62
ETHERNET_10GBASE_ZR = 63
ETHERNET_10GBASE_DWDM = 64
ETHERNET_100GBASE_LR4 = 65
ETHERNET_1000BASE_DWDM = 66
ETHERNET_1000BASE_DWDM_1533 = 67
ETHERNET_1000BASE_DWDM_1537 = 68
ETHERNET_1000BASE_DWDM_1541 = 69
ETHERNET_1000BASE_DWDM_1545 = 70
ETHERNET_1000BASE_DWDM_1549 = 71
ETHERNET_1000BASE_DWDM_1553 = 72
ETHERNET_1000BASE_DWDM_1557 = 73
ETHERNET_1000BASE_DWDM_1561 = 74
ETHERNET_40GBASE_LR4 = 75
ETHERNET_40GBASE_ER4 = 76
ETHERNET_100GBASE_ER4 = 77
ETHERNET_1000BASE_EX = 78
ETHERNET_1000BASE_BX10_D = 79
ETHERNET_1000BASE_BX10_U = 80
ETHERNET_1000BASE_DWDM_1561_42 = 81
ETHERNET_1000BASE_DWDM_1560_61 = 82
ETHERNET_1000BASE_DWDM_1559_79 = 83
ETHERNET_1000BASE_DWDM_1558_98 = 84
ETHERNET_1000BASE_DWDM_1558_17 = 85
ETHERNET_1000BASE_DWDM_1557_36 = 86
ETHERNET_1000BASE_DWDM_1556_55 = 87
ETHERNET_1000BASE_DWDM_1555_75 = 88
ETHERNET_1000BASE_DWDM_1554_94 = 89
ETHERNET_1000BASE_DWDM_1554_13 = 90
ETHERNET_1000BASE_DWDM_1553_33 = 91
ETHERNET_1000BASE_DWDM_1552_52 = 92
ETHERNET_1000BASE_DWDM_1551_72 = 93
ETHERNET_1000BASE_DWDM_1550_92 = 94
ETHERNET_1000BASE_DWDM_1550_12 = 95
ETHERNET_1000BASE_DWDM_1549_32 = 96
ETHERNET_1000BASE_DWDM_1548_51 = 97
ETHERNET_1000BASE_DWDM_1547_72 = 98
ETHERNET_1000BASE_DWDM_1546_92 = 99
ETHERNET_1000BASE_DWDM_1546_12 = 100
ETHERNET_1000BASE_DWDM_1545_32 = 101
ETHERNET_1000BASE_DWDM_1544_53 = 102
ETHERNET_1000BASE_DWDM_1543_73 = 103
ETHERNET_1000BASE_DWDM_1542_94 = 104
ETHERNET_1000BASE_DWDM_1542_14 = 105
ETHERNET_1000BASE_DWDM_1541_35 = 106
ETHERNET_1000BASE_DWDM_1540_56 = 107
ETHERNET_1000BASE_DWDM_1539_77 = 108
ETHERNET_1000BASE_DWDM_1538_98 = 109
ETHERNET_1000BASE_DWDM_1538_19 = 110
ETHERNET_1000BASE_DWDM_1537_40 = 111
ETHERNET_1000BASE_DWDM_1536_61 = 112
ETHERNET_1000BASE_DWDM_1535_82 = 113
ETHERNET_1000BASE_DWDM_1535_04 = 114
ETHERNET_1000BASE_DWDM_1534_25 = 115
ETHERNET_1000BASE_DWDM_1533_47 = 116
ETHERNET_1000BASE_DWDM_1532_68 = 117
ETHERNET_1000BASE_DWDM_1531_90 = 118
ETHERNET_1000BASE_DWDM_1531_12 = 119
ETHERNET_1000BASE_DWDM_1530_33 = 120
ETHERNET_1000BASE_DWDM_TUNABLE = 121
ETHERNET_10GBASE_DWDM_1561_42 = 122
ETHERNET_10GBASE_DWDM_1560_61 = 123
ETHERNET_10GBASE_DWDM_1559_79 = 124
ETHERNET_10GBASE_DWDM_1558_98 = 125
ETHERNET_10GBASE_DWDM_1558_17 = 126
ETHERNET_10GBASE_DWDM_1557_36 = 127
ETHERNET_10GBASE_DWDM_1556_55 = 128
ETHERNET_10GBASE_DWDM_1555_75 = 129
ETHERNET_10GBASE_DWDM_1554_94 = 130
ETHERNET_10GBASE_DWDM_1554_13 = 131
ETHERNET_10GBASE_DWDM_1553_33 = 132
ETHERNET_10GBASE_DWDM_1552_52 = 133
ETHERNET_10GBASE_DWDM_1551_72 = 134
ETHERNET_10GBASE_DWDM_1550_92 = 135
ETHERNET_10GBASE_DWDM_1550_12 = 136
ETHERNET_10GBASE_DWDM_1549_32 = 137
ETHERNET_10GBASE_DWDM_1548_51 = 138
ETHERNET_10GBASE_DWDM_1547_72 = 139
ETHERNET_10GBASE_DWDM_1546_92 = 140
ETHERNET_10GBASE_DWDM_1546_12 = 141
ETHERNET_10GBASE_DWDM_1545_32 = 142
ETHERNET_10GBASE_DWDM_1544_53 = 143
ETHERNET_10GBASE_DWDM_1543_73 = 144
ETHERNET_10GBASE_DWDM_1542_94 = 145
ETHERNET_10GBASE_DWDM_1542_14 = 146
ETHERNET_10GBASE_DWDM_1541_35 = 147
ETHERNET_10GBASE_DWDM_1540_56 = 148
ETHERNET_10GBASE_DWDM_1539_77 = 149
ETHERNET_10GBASE_DWDM_1538_98 = 150
ETHERNET_10GBASE_DWDM_1538_19 = 151
ETHERNET_10GBASE_DWDM_1537_40 = 152
ETHERNET_10GBASE_DWDM_1536_61 = 153
ETHERNET_10GBASE_DWDM_1535_82 = 154
ETHERNET_10GBASE_DWDM_1535_04 = 155
ETHERNET_10GBASE_DWDM_1534_25 = 156
ETHERNET_10GBASE_DWDM_1533_47 = 157
ETHERNET_10GBASE_DWDM_1532_68 = 158
ETHERNET_10GBASE_DWDM_1531_90 = 159
ETHERNET_10GBASE_DWDM_1531_12 = 160
ETHERNET_10GBASE_DWDM_1530_33 = 161
ETHERNET_10GBASE_DWDM_TUNABLE = 162
ETHERNET_40GBASE_DWDM_1561_42 = 163
ETHERNET_40GBASE_DWDM_1560_61 = 164
ETHERNET_40GBASE_DWDM_1559_79 = 165
ETHERNET_40GBASE_DWDM_1558_98 = 166
ETHERNET_40GBASE_DWDM_1558_17 = 167
ETHERNET_40GBASE_DWDM_1557_36 = 168
ETHERNET_40GBASE_DWDM_1556_55 = 169
ETHERNET_40GBASE_DWDM_1555_75 = 170
ETHERNET_40GBASE_DWDM_1554_94 = 171
ETHERNET_40GBASE_DWDM_1554_13 = 172
ETHERNET_40GBASE_DWDM_1553_33 = 173
ETHERNET_40GBASE_DWDM_1552_52 = 174
ETHERNET_40GBASE_DWDM_1551_72 = 175
ETHERNET_40GBASE_DWDM_1550_92 = 176
ETHERNET_40GBASE_DWDM_1550_12 = 177
ETHERNET_40GBASE_DWDM_1549_32 = 178
ETHERNET_40GBASE_DWDM_1548_51 = 179
ETHERNET_40GBASE_DWDM_1547_72 = 180
ETHERNET_40GBASE_DWDM_1546_92 = 181
ETHERNET_40GBASE_DWDM_1546_12 = 182
ETHERNET_40GBASE_DWDM_1545_32 = 183
ETHERNET_40GBASE_DWDM_1544_53 = 184
ETHERNET_40GBASE_DWDM_1543_73 = 185
ETHERNET_40GBASE_DWDM_1542_94 = 186
ETHERNET_40GBASE_DWDM_1542_14 = 187
ETHERNET_40GBASE_DWDM_1541_35 = 188
ETHERNET_40GBASE_DWDM_1540_56 = 189
ETHERNET_40GBASE_DWDM_1539_77 = 190
ETHERNET_40GBASE_DWDM_1538_98 = 191
ETHERNET_40GBASE_DWDM_1538_19 = 192
ETHERNET_40GBASE_DWDM_1537_40 = 193
ETHERNET_40GBASE_DWDM_1536_61 = 194
ETHERNET_40GBASE_DWDM_1535_82 = 195
ETHERNET_40GBASE_DWDM_1535_04 = 196
ETHERNET_40GBASE_DWDM_1534_25 = 197
ETHERNET_40GBASE_DWDM_1533_47 = 198
ETHERNET_40GBASE_DWDM_1532_68 = 199
ETHERNET_40GBASE_DWDM_1531_90 = 200
ETHERNET_40GBASE_DWDM_1531_12 = 201
ETHERNET_40GBASE_DWDM_1530_33 = 202
ETHERNET_40GBASE_DWDM_TUNABLE = 203
ETHERNET_100GBASE_DWDM_1561_42 = 204
ETHERNET_100GBASE_DWDM_1560_61 = 205
ETHERNET_100GBASE_DWDM_1559_79 = 206
ETHERNET_100GBASE_DWDM_1558_98 = 207
ETHERNET_100GBASE_DWDM_1558_17 = 208
ETHERNET_100GBASE_DWDM_1557_36 = 209
ETHERNET_100GBASE_DWDM_1556_55 = 210
ETHERNET_100GBASE_DWDM_1555_75 = 211
ETHERNET_100GBASE_DWDM_1554_94 = 212
ETHERNET_100GBASE_DWDM_1554_13 = 213
ETHERNET_100GBASE_DWDM_1553_33 = 214
ETHERNET_100GBASE_DWDM_1552_52 = 215
ETHERNET_100GBASE_DWDM_1551_72 = 216
ETHERNET_100GBASE_DWDM_1550_92 = 217
ETHERNET_100GBASE_DWDM_1550_12 = 218
ETHERNET_100GBASE_DWDM_1549_32 = 219
ETHERNET_100GBASE_DWDM_1548_51 = 220
ETHERNET_100GBASE_DWDM_1547_72 = 221
ETHERNET_100GBASE_DWDM_1546_92 = 222
ETHERNET_100GBASE_DWDM_1546_12 = 223
ETHERNET_100GBASE_DWDM_1545_32 = 224
ETHERNET_100GBASE_DWDM_1544_53 = 225
ETHERNET_100GBASE_DWDM_1543_73 = 226
ETHERNET_100GBASE_DWDM_1542_94 = 227
ETHERNET_100GBASE_DWDM_1542_14 = 228
ETHERNET_100GBASE_DWDM_1541_35 = 229
ETHERNET_100GBASE_DWDM_1540_56 = 230
ETHERNET_100GBASE_DWDM_1539_77 = 231
ETHERNET_100GBASE_DWDM_1538_98 = 232
ETHERNET_100GBASE_DWDM_1538_19 = 233
ETHERNET_100GBASE_DWDM_1537_40 = 234
ETHERNET_100GBASE_DWDM_1536_61 = 235
ETHERNET_100GBASE_DWDM_1535_82 = 236
ETHERNET_100GBASE_DWDM_1535_04 = 237
ETHERNET_100GBASE_DWDM_1534_25 = 238
ETHERNET_100GBASE_DWDM_1533_47 = 239
ETHERNET_100GBASE_DWDM_1532_68 = 240
ETHERNET_100GBASE_DWDM_1531_90 = 241
ETHERNET_100GBASE_DWDM_1531_12 = 242
ETHERNET_100GBASE_DWDM_1530_33 = 243
ETHERNET_100GBASE_DWDM_TUNABLE = 244
ETHERNET_40GBASE_KR4 = 245
ETHERNET_40GBASE_CR4 = 246
ETHERNET_40GBASE_SR4 = 247
ETHERNET_40GBASE_FR = 248
ETHERNET_100GBASE_CR10 = 249
ETHERNET_100GBASE_SR10 = 250
ETHERNET_40GBASE_CSR4 = 251
ETHERNET_10GBASE_CWDM = 252
ETHERNET_10GBASE_CWDM_TUNABLE = 253
ETHERNET_10GBASE_CWDM_1470 = 254
ETHERNET_10GBASE_CWDM_1490 = 255
ETHERNET_10GBASE_CWDM_1510 = 256
ETHERNET_10GBASE_CWDM_1530 = 257
ETHERNET_10GBASE_CWDM_1550 = 258
ETHERNET_10GBASE_CWDM_1570 = 259
ETHERNET_10GBASE_CWDM_1590 = 260
ETHERNET_10GBASE_CWDM_1610 = 261
ETHERNET_40GBASE_CWDM = 262
ETHERNET_40GBASE_CWDM_TUNABLE = 263
ETHERNET_40GBASE_CWDM_1470 = 264
ETHERNET_40GBASE_CWDM_1490 = 265
ETHERNET_40GBASE_CWDM_1510 = 266
ETHERNET_40GBASE_CWDM_1530 = 267
ETHERNET_40GBASE_CWDM_1550 = 268
ETHERNET_40GBASE_CWDM_1570 = 269
ETHERNET_40GBASE_CWDM_1590 = 270
ETHERNET_40GBASE_CWDM_1610 = 271
ETHERNET_100GBASE_CWDM = 272
ETHERNET_100GBASE_CWDM_TUNABLE = 273
ETHERNET_100GBASE_CWDM_1470 = 274
ETHERNET_100GBASE_CWDM_1490 = 275
ETHERNET_100GBASE_CWDM_1510 = 276
ETHERNET_100GBASE_CWDM_1530 = 277
ETHERNET_100GBASE_CWDM_1550 = 278
ETHERNET_100GBASE_CWDM_1570 = 279
ETHERNET_100GBASE_CWDM_1590 = 280
ETHERNET_100GBASE_CWDM_1610 = 281
ETHERNET_40GBASE_ELPB = 282
ETHERNET_100GBASE_ELPB = 283
ETHERNET_100GBASE_LR10 = 284
ETHERNET_40GBASE = 285
ETHERNET_100GBASE_KP4 = 286
ETHERNET_100GBASE_KR4 = 287
ETHERNET_10GBASE_LRM = 288
ETHERNET_10GBASE_CX4 = 289
ETHERNET_10GBASE = 290
ETHERNET_10GBASE_KX4 = 291
ETHERNET_10GBASE_KR = 292
ETHERNET_10GBASE_PR = 293
ETHERNET_100BASE_LX = 294
ETHERNET_100BASE_ZX = 295
ETHERNET_1000BASE_BX_D = 296
ETHERNET_1000BASE_BX_U = 297
ETHERNET_1000BASE_BX20_D = 298
ETHERNET_1000BASE_BX20_U = 299
ETHERNET_1000BASE_BX40_D = 300
ETHERNET_1000BASE_BX40_DA = 301
ETHERNET_1000BASE_BX40_U = 302
ETHERNET_1000BASE_BX80_D = 303
ETHERNET_1000BASE_BX80_U = 304
ETHERNET_1000BASE_BX120_D = 305
ETHERNET_1000BASE_BX120_U = 306
ETHERNET_10GBASE_BX_D = 307
ETHERNET_10GBASE_BX_U = 308
ETHERNET_10GBASE_BX10_D = 309
ETHERNET_10GBASE_BX10_U = 310
ETHERNET_10GBASE_BX20_D = 311
ETHERNET_10GBASE_BX20_U = 312
ETHERNET_10GBASE_BX40_D = 313
ETHERNET_10GBASE_BX40_U = 314
ETHERNET_10GBASE_BX80_D = 315
ETHERNET_10GBASE_BX80_U = 316
ETHERNET_10GBASE_BX120_D = 317
ETHERNET_10GBASE_BX120_U = 318
ETHERNET_1000BASE_DR_LX = 319
ETHERNET_100GBASE_ER4L = 320
ETHERNET_100GBASE_SR4 = 321
ETHERNET_40GBASE_SR_BD = 322
ETHERNET_BASE_MAX = 323
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_drivers_media_eth_oper as meta
return meta._meta_table['EthernetMediaEnum']
class EthernetPortEnableEnum(Enum):
"""
EthernetPortEnableEnum
Port admin state
.. data:: DISABLED = 0
Port disabled, both directions
.. data:: RX_ENABLED = 1
Port enabled rx direction only
.. data:: TX_ENABLED = 2
Port enabled tx direction only
.. data:: ENABLED = 3
Port enabled, both directions
"""
DISABLED = 0
RX_ENABLED = 1
TX_ENABLED = 2
ENABLED = 3
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_drivers_media_eth_oper as meta
return meta._meta_table['EthernetPortEnableEnum']
class EthernetSpeedEnum(Enum):
"""
EthernetSpeedEnum
Speed
.. data:: ETHERNET_SPEED_INVALID = 0
ethernet speed invalid
.. data:: TEN_MBPS = 1
ten mbps
.. data:: HUNDRED_MBPS = 2
hundred mbps
.. data:: ONE_GBPS = 3
one gbps
.. data:: TEN_GBPS = 4
ten gbps
.. data:: FORTY_GBPS = 5
forty gbps
.. data:: HUNDRED_GBPS = 6
hundred gbps
.. data:: ETHERNET_SPEED_TYPES_COUNT = 7
ethernet speed types count
"""
ETHERNET_SPEED_INVALID = 0
TEN_MBPS = 1
HUNDRED_MBPS = 2
ONE_GBPS = 3
TEN_GBPS = 4
FORTY_GBPS = 5
HUNDRED_GBPS = 6
ETHERNET_SPEED_TYPES_COUNT = 7
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_drivers_media_eth_oper as meta
return meta._meta_table['EthernetSpeedEnum']
class EthernetInterface(object):
"""
Ethernet operational data
.. attribute:: berts
Ethernet controller BERT table
**type**\: :py:class:`Berts <ydk.models.cisco_ios_xr.Cisco_IOS_XR_drivers_media_eth_oper.EthernetInterface.Berts>`
.. attribute:: interfaces
Ethernet controller info table
**type**\: :py:class:`Interfaces <ydk.models.cisco_ios_xr.Cisco_IOS_XR_drivers_media_eth_oper.EthernetInterface.Interfaces>`
.. attribute:: statistics
Ethernet controller statistics table
**type**\: :py:class:`Statistics <ydk.models.cisco_ios_xr.Cisco_IOS_XR_drivers_media_eth_oper.EthernetInterface.Statistics>`
"""
_prefix = 'drivers-media-eth-oper'
_revision = '2015-10-14'
def __init__(self):
self.berts = EthernetInterface.Berts()
self.berts.parent = self
self.interfaces = EthernetInterface.Interfaces()
self.interfaces.parent = self
self.statistics = EthernetInterface.Statistics()
self.statistics.parent = self
class Statistics(object):
"""
Ethernet controller statistics table
.. attribute:: statistic
Ethernet statistics information
**type**\: list of :py:class:`Statistic <ydk.models.cisco_ios_xr.Cisco_IOS_XR_drivers_media_eth_oper.EthernetInterface.Statistics.Statistic>`
"""
_prefix = 'drivers-media-eth-oper'
_revision = '2015-10-14'
def __init__(self):
self.parent = None
self.statistic = YList()
self.statistic.parent = self
self.statistic.name = 'statistic'
class Statistic(object):
"""
Ethernet statistics information
.. attribute:: interface_name <key>
The name of the interface
**type**\: str
**pattern:** (([a\-zA\-Z0\-9\_]\*\\d+/){3}\\d+)\|(([a\-zA\-Z0\-9\_]\*\\d+/){4}\\d+)\|(([a\-zA\-Z0\-9\_]\*\\d+/){3}\\d+\\.\\d+)\|(([a\-zA\-Z0\-9\_]\*\\d+/){2}([a\-zA\-Z0\-9\_]\*\\d+))\|(([a\-zA\-Z0\-9\_]\*\\d+/){2}([a\-zA\-Z0\-9\_]+))\|([a\-zA\-Z0\-9\_\-]\*\\d+)\|([a\-zA\-Z0\-9\_\-]\*\\d+\\.\\d+)\|(mpls)\|(dwdm)
.. attribute:: aborted_packet_drops
Drops due to packet abort
**type**\: long
**range:** 0..18446744073709551615
.. attribute:: buffer_underrun_packet_drops
Drops due to buffer underrun
**type**\: long
**range:** 0..18446744073709551615
.. attribute:: dropped_ether_stats_fragments
Bad Frames < 64 Octet, dropped
**type**\: long
**range:** 0..18446744073709551615
.. attribute:: dropped_ether_stats_undersize_pkts
Good frames < 64 Octet, dropped
**type**\: long
**range:** 0..18446744073709551615
.. attribute:: dropped_giant_packets_greaterthan_mru
Good frames > MRU, dropped
**type**\: long
**range:** 0..18446744073709551615
.. attribute:: dropped_jabbers_packets_greaterthan_mru
Bad Frames > MRU, dropped
**type**\: long
**range:** 0..18446744073709551615
.. attribute:: dropped_miscellaneous_error_packets
Any other errors not counted
**type**\: long
**range:** 0..18446744073709551615
.. attribute:: dropped_packets_with_crc_align_errors
Frames 64 \- MRU with CRC error
**type**\: long
**range:** 0..18446744073709551615
.. attribute:: ether_stats_collisions
All collision events
**type**\: long
**range:** 0..18446744073709551615
.. attribute:: invalid_dest_mac_drop_packets
Drops due to the destination MAC not matching
**type**\: long
**range:** 0..18446744073709551615
.. attribute:: invalid_encap_drop_packets
Drops due to the encapsulation or ether type not matching
**type**\: long
**range:** 0..18446744073709551615
.. attribute:: miscellaneous_output_errors
Any other errors not counted
**type**\: long
**range:** 0..18446744073709551615
.. attribute:: number_of_aborted_packets_dropped
Drops due to packet abort
**type**\: long
**range:** 0..18446744073709551615
.. attribute:: number_of_buffer_overrun_packets_dropped
Drops due to buffer overrun
**type**\: long
**range:** 0..18446744073709551615
.. attribute:: number_of_miscellaneous_packets_dropped
Any other drops not counted
**type**\: long
**range:** 0..18446744073709551615
.. attribute:: numberof_invalid_vlan_id_packets_dropped
Drops due to invalid VLAN id
**type**\: long
**range:** 0..18446744073709551615
.. attribute:: received8021q_frames
All 802.1Q frames
**type**\: long
**range:** 0..18446744073709551615
.. attribute:: received_broadcast_frames
Received broadcast Frames
**type**\: long
**range:** 0..18446744073709551615
.. attribute:: received_good_bytes
Total octets of all good frames
**type**\: long
**range:** 0..18446744073709551615
.. attribute:: received_good_frames
Received Good Frames
**type**\: long
**range:** 0..18446744073709551615
.. attribute:: received_multicast_frames
Received multicast Frames
**type**\: long
**range:** 0..18446744073709551615
.. attribute:: received_pause_frames
All pause frames
**type**\: long
**range:** 0..18446744073709551615
.. attribute:: received_total64_octet_frames
All 64 Octet Frame Count
**type**\: long
**range:** 0..18446744073709551615
.. attribute:: received_total_bytes
Total octets of all frames
**type**\: long
**range:** 0..18446744073709551615
.. attribute:: received_total_frames
All frames, good or bad
**type**\: long
**range:** 0..18446744073709551615
.. attribute:: received_total_octet_frames_from1024_to1518
All 1024\-1518 Octet Frame Count
**type**\: long
**range:** 0..18446744073709551615
.. attribute:: received_total_octet_frames_from128_to255
All 128\-255 Octet Frame Count
**type**\: long
**range:** 0..18446744073709551615
.. attribute:: received_total_octet_frames_from1519_to_max
All > 1518 Octet Frame Count
**type**\: long
**range:** 0..18446744073709551615
.. attribute:: received_total_octet_frames_from256_to511
All 256\-511 Octet Frame Count
**type**\: long
**range:** 0..18446744073709551615
.. attribute:: received_total_octet_frames_from512_to1023
All 512\-1023 Octet Frame Count
**type**\: long
**range:** 0..18446744073709551615
.. attribute:: received_total_octet_frames_from65_to127
All 65\-127 Octet Frame Count
**type**\: long
**range:** 0..18446744073709551615
.. attribute:: received_unicast_frames
Received unicast Frames
**type**\: long
**range:** 0..18446744073709551615
.. attribute:: received_unknown_opcodes
Unsupported MAC Control frames
**type**\: long
**range:** 0..18446744073709551615
.. attribute:: rfc2819_ether_stats_crc_align_errors
RFC2819 etherStatsCRCAlignErrors
**type**\: long
**range:** 0..18446744073709551615
.. attribute:: rfc2819_ether_stats_jabbers
RFC2819 etherStatsJabbers
**type**\: long
**range:** 0..18446744073709551615
.. attribute:: rfc2819_ether_stats_oversized_pkts
RFC2819 etherStatsOversizedPkts
**type**\: long
**range:** 0..18446744073709551615
.. attribute:: rfc3635dot3_stats_alignment_errors
RFC3635 dot3StatsAlignmentErrors
**type**\: long
**range:** 0..18446744073709551615
.. attribute:: symbol_errors
Symbol errors
**type**\: long
**range:** 0..18446744073709551615
.. attribute:: total_bytes_transmitted
Total octets of all frames
**type**\: long
**range:** 0..18446744073709551615
.. attribute:: total_frames_transmitted
All frames, good or bad
**type**\: long
**range:** 0..18446744073709551615
.. attribute:: total_good_bytes_transmitted
Total octets of all good frames
**type**\: long
**range:** 0..18446744073709551615
.. attribute:: transmitted8021q_frames
All 802.1Q frames
**type**\: long
**range:** 0..18446744073709551615
.. attribute:: transmitted_broadcast_frames
Transmitted broadcast Frames
**type**\: long
**range:** 0..18446744073709551615
.. attribute:: transmitted_good_frames
Transmitted Good Frames
**type**\: long
**range:** 0..18446744073709551615
.. attribute:: transmitted_multicast_frames
Transmitted multicast Frames
**type**\: long
**range:** 0..18446744073709551615
.. attribute:: transmitted_total64_octet_frames
All 64 Octet Frame Count
**type**\: long
**range:** 0..18446744073709551615
.. attribute:: transmitted_total_octet_frames_from1024_to1518
All 1024\-1518 Octet Frame Count
**type**\: long
**range:** 0..18446744073709551615
.. attribute:: transmitted_total_octet_frames_from128_to255
All 128\-255 Octet Frame Count
**type**\: long
**range:** 0..18446744073709551615
.. attribute:: transmitted_total_octet_frames_from1518_to_max
All > 1518 Octet Frame Count
**type**\: long
**range:** 0..18446744073709551615
.. attribute:: transmitted_total_octet_frames_from256_to511
All 256\-511 Octet Frame Count
**type**\: long
**range:** 0..18446744073709551615
.. attribute:: transmitted_total_octet_frames_from512_to1023
All 512\-1023 Octet Frame Count
**type**\: long
**range:** 0..18446744073709551615
.. attribute:: transmitted_total_octet_frames_from65_to127
All 65\-127 Octet Frame Count
**type**\: long
**range:** 0..18446744073709551615
.. attribute:: transmitted_total_pause_frames
All pause frames
**type**\: long
**range:** 0..18446744073709551615
.. attribute:: transmitted_unicast_frames
Transmitted unicast Frames
**type**\: long
**range:** 0..18446744073709551615
.. attribute:: uncounted_dropped_frames
Any other drops not counted
**type**\: long
**range:** 0..18446744073709551615
"""
_prefix = 'drivers-media-eth-oper'
_revision = '2015-10-14'
def __init__(self):
self.parent = None
self.interface_name = None
self.aborted_packet_drops = None
self.buffer_underrun_packet_drops = None
self.dropped_ether_stats_fragments = None
self.dropped_ether_stats_undersize_pkts = None
self.dropped_giant_packets_greaterthan_mru = None
self.dropped_jabbers_packets_greaterthan_mru = None
self.dropped_miscellaneous_error_packets = None
self.dropped_packets_with_crc_align_errors = None
self.ether_stats_collisions = None
self.invalid_dest_mac_drop_packets = None
self.invalid_encap_drop_packets = None
self.miscellaneous_output_errors = None
self.number_of_aborted_packets_dropped = None
self.number_of_buffer_overrun_packets_dropped = None
self.number_of_miscellaneous_packets_dropped = None
self.numberof_invalid_vlan_id_packets_dropped = None
self.received8021q_frames = None
self.received_broadcast_frames = None
self.received_good_bytes = None
self.received_good_frames = None
self.received_multicast_frames = None
self.received_pause_frames = None
self.received_total64_octet_frames = None
self.received_total_bytes = None
self.received_total_frames = None
self.received_total_octet_frames_from1024_to1518 = None
self.received_total_octet_frames_from128_to255 = None
self.received_total_octet_frames_from1519_to_max = None
self.received_total_octet_frames_from256_to511 = None
self.received_total_octet_frames_from512_to1023 = None
self.received_total_octet_frames_from65_to127 = None
self.received_unicast_frames = None
self.received_unknown_opcodes = None
self.rfc2819_ether_stats_crc_align_errors = None
self.rfc2819_ether_stats_jabbers = None
self.rfc2819_ether_stats_oversized_pkts = None
self.rfc3635dot3_stats_alignment_errors = None
self.symbol_errors = None
self.total_bytes_transmitted = None
self.total_frames_transmitted = None
self.total_good_bytes_transmitted = None
self.transmitted8021q_frames = None
self.transmitted_broadcast_frames = None
self.transmitted_good_frames = None
self.transmitted_multicast_frames = None
self.transmitted_total64_octet_frames = None
self.transmitted_total_octet_frames_from1024_to1518 = None
self.transmitted_total_octet_frames_from128_to255 = None
self.transmitted_total_octet_frames_from1518_to_max = None
self.transmitted_total_octet_frames_from256_to511 = None
self.transmitted_total_octet_frames_from512_to1023 = None
self.transmitted_total_octet_frames_from65_to127 = None
self.transmitted_total_pause_frames = None
self.transmitted_unicast_frames = None
self.uncounted_dropped_frames = None
@property
def _common_path(self):
if self.interface_name is None:
raise YPYModelError('Key property interface_name is None')
return '/Cisco-IOS-XR-drivers-media-eth-oper:ethernet-interface/Cisco-IOS-XR-drivers-media-eth-oper:statistics/Cisco-IOS-XR-drivers-media-eth-oper:statistic[Cisco-IOS-XR-drivers-media-eth-oper:interface-name = ' + str(self.interface_name) + ']'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if not self.is_config():
return False
if self.interface_name is not None:
return True
if self.aborted_packet_drops is not None:
return True
if self.buffer_underrun_packet_drops is not None:
return True
if self.dropped_ether_stats_fragments is not None:
return True
if self.dropped_ether_stats_undersize_pkts is not None:
return True
if self.dropped_giant_packets_greaterthan_mru is not None:
return True
if self.dropped_jabbers_packets_greaterthan_mru is not None:
return True
if self.dropped_miscellaneous_error_packets is not None:
return True
if self.dropped_packets_with_crc_align_errors is not None:
return True
if self.ether_stats_collisions is not None:
return True
if self.invalid_dest_mac_drop_packets is not None:
return True
if self.invalid_encap_drop_packets is not None:
return True
if self.miscellaneous_output_errors is not None:
return True
if self.number_of_aborted_packets_dropped is not None:
return True
if self.number_of_buffer_overrun_packets_dropped is not None:
return True
if self.number_of_miscellaneous_packets_dropped is not None:
return True
if self.numberof_invalid_vlan_id_packets_dropped is not None:
return True
if self.received8021q_frames is not None:
return True
if self.received_broadcast_frames is not None:
return True
if self.received_good_bytes is not None:
return True
if self.received_good_frames is not None:
return True
if self.received_multicast_frames is not None:
return True
if self.received_pause_frames is not None:
return True
if self.received_total64_octet_frames is not None:
return True
if self.received_total_bytes is not None:
return True
if self.received_total_frames is not None:
return True
if self.received_total_octet_frames_from1024_to1518 is not None:
return True
if self.received_total_octet_frames_from128_to255 is not None:
return True
if self.received_total_octet_frames_from1519_to_max is not None:
return True
if self.received_total_octet_frames_from256_to511 is not None:
return True
if self.received_total_octet_frames_from512_to1023 is not None:
return True
if self.received_total_octet_frames_from65_to127 is not None:
return True
if self.received_unicast_frames is not None:
return True
if self.received_unknown_opcodes is not None:
return True
if self.rfc2819_ether_stats_crc_align_errors is not None:
return True
if self.rfc2819_ether_stats_jabbers is not None:
return True
if self.rfc2819_ether_stats_oversized_pkts is not None:
return True
if self.rfc3635dot3_stats_alignment_errors is not None:
return True
if self.symbol_errors is not None:
return True
if self.total_bytes_transmitted is not None:
return True
if self.total_frames_transmitted is not None:
return True
if self.total_good_bytes_transmitted is not None:
return True
if self.transmitted8021q_frames is not None:
return True
if self.transmitted_broadcast_frames is not None:
return True
if self.transmitted_good_frames is not None:
return True
if self.transmitted_multicast_frames is not None:
return True
if self.transmitted_total64_octet_frames is not None:
return True
if self.transmitted_total_octet_frames_from1024_to1518 is not None:
return True
if self.transmitted_total_octet_frames_from128_to255 is not None:
return True
if self.transmitted_total_octet_frames_from1518_to_max is not None:
return True
if self.transmitted_total_octet_frames_from256_to511 is not None:
return True
if self.transmitted_total_octet_frames_from512_to1023 is not None:
return True
if self.transmitted_total_octet_frames_from65_to127 is not None:
return True
if self.transmitted_total_pause_frames is not None:
return True
if self.transmitted_unicast_frames is not None:
return True
if self.uncounted_dropped_frames is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_drivers_media_eth_oper as meta
return meta._meta_table['EthernetInterface.Statistics.Statistic']['meta_info']
@property
def _common_path(self):
return '/Cisco-IOS-XR-drivers-media-eth-oper:ethernet-interface/Cisco-IOS-XR-drivers-media-eth-oper:statistics'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if not self.is_config():
return False
if self.statistic is not None:
for child_ref in self.statistic:
if child_ref._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_drivers_media_eth_oper as meta
return meta._meta_table['EthernetInterface.Statistics']['meta_info']
class Interfaces(object):
"""
Ethernet controller info table
.. attribute:: interface
Ethernet controller information
**type**\: list of :py:class:`Interface <ydk.models.cisco_ios_xr.Cisco_IOS_XR_drivers_media_eth_oper.EthernetInterface.Interfaces.Interface>`
"""
_prefix = 'drivers-media-eth-oper'
_revision = '2015-10-14'
def __init__(self):
self.parent = None
self.interface = YList()
self.interface.parent = self
self.interface.name = 'interface'
class Interface(object):
"""
Ethernet controller information
.. attribute:: interface_name <key>
The name of the interface
**type**\: str
**pattern:** (([a\-zA\-Z0\-9\_]\*\\d+/){3}\\d+)\|(([a\-zA\-Z0\-9\_]\*\\d+/){4}\\d+)\|(([a\-zA\-Z0\-9\_]\*\\d+/){3}\\d+\\.\\d+)\|(([a\-zA\-Z0\-9\_]\*\\d+/){2}([a\-zA\-Z0\-9\_]\*\\d+))\|(([a\-zA\-Z0\-9\_]\*\\d+/){2}([a\-zA\-Z0\-9\_]+))\|([a\-zA\-Z0\-9\_\-]\*\\d+)\|([a\-zA\-Z0\-9\_\-]\*\\d+\\.\\d+)\|(mpls)\|(dwdm)
.. attribute:: admin_state
Port Administrative State
**type**\: :py:class:`EthernetPortEnableEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_drivers_media_eth_oper.EthernetPortEnableEnum>`
.. attribute:: layer1_info
Layer 1 information
**type**\: :py:class:`Layer1Info <ydk.models.cisco_ios_xr.Cisco_IOS_XR_drivers_media_eth_oper.EthernetInterface.Interfaces.Interface.Layer1Info>`
.. attribute:: mac_info
MAC Layer information
**type**\: :py:class:`MacInfo <ydk.models.cisco_ios_xr.Cisco_IOS_XR_drivers_media_eth_oper.EthernetInterface.Interfaces.Interface.MacInfo>`
.. attribute:: oper_state_up
Port Operational state \- TRUE if up
**type**\: bool
.. attribute:: phy_info
PHY information
**type**\: :py:class:`PhyInfo <ydk.models.cisco_ios_xr.Cisco_IOS_XR_drivers_media_eth_oper.EthernetInterface.Interfaces.Interface.PhyInfo>`
.. attribute:: transport_info
Transport state information
**type**\: :py:class:`TransportInfo <ydk.models.cisco_ios_xr.Cisco_IOS_XR_drivers_media_eth_oper.EthernetInterface.Interfaces.Interface.TransportInfo>`
"""
_prefix = 'drivers-media-eth-oper'
_revision = '2015-10-14'
def __init__(self):
self.parent = None
self.interface_name = None
self.admin_state = None
self.layer1_info = EthernetInterface.Interfaces.Interface.Layer1Info()
self.layer1_info.parent = self
self.mac_info = EthernetInterface.Interfaces.Interface.MacInfo()
self.mac_info.parent = self
self.oper_state_up = None
self.phy_info = EthernetInterface.Interfaces.Interface.PhyInfo()
self.phy_info.parent = self
self.transport_info = EthernetInterface.Interfaces.Interface.TransportInfo()
self.transport_info.parent = self
class PhyInfo(object):
"""
PHY information
.. attribute:: fec_details
Forward Error Correction information
**type**\: :py:class:`FecDetails <ydk.models.cisco_ios_xr.Cisco_IOS_XR_drivers_media_eth_oper.EthernetInterface.Interfaces.Interface.PhyInfo.FecDetails>`
.. attribute:: loopback
Port operational loopback
**type**\: :py:class:`EthernetLoopbackEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_drivers_media_eth_oper.EthernetLoopbackEnum>`
.. attribute:: media_type
Port media type
**type**\: :py:class:`EthernetMediaEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_drivers_media_eth_oper.EthernetMediaEnum>`
.. attribute:: phy_details
Details about the PHY
**type**\: :py:class:`PhyDetails <ydk.models.cisco_ios_xr.Cisco_IOS_XR_drivers_media_eth_oper.EthernetInterface.Interfaces.Interface.PhyInfo.PhyDetails>`
.. attribute:: phy_present
Presence of PHY
**type**\: :py:class:`EtherPhyPresentEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_drivers_media_eth_oper.EtherPhyPresentEnum>`
"""
_prefix = 'drivers-media-eth-oper'
_revision = '2015-10-14'
def __init__(self):
self.parent = None
self.fec_details = EthernetInterface.Interfaces.Interface.PhyInfo.FecDetails()
self.fec_details.parent = self
self.loopback = None
self.media_type = None
self.phy_details = EthernetInterface.Interfaces.Interface.PhyInfo.PhyDetails()
self.phy_details.parent = self
self.phy_present = None
class PhyDetails(object):
"""
Details about the PHY
.. attribute:: dig_opt_mon_alarm_thresholds
Digital Optical Monitoring alarm thresholds
**type**\: :py:class:`DigOptMonAlarmThresholds <ydk.models.cisco_ios_xr.Cisco_IOS_XR_drivers_media_eth_oper.EthernetInterface.Interfaces.Interface.PhyInfo.PhyDetails.DigOptMonAlarmThresholds>`
.. attribute:: dig_opt_mon_alarms
Digital Optical Monitoring alarms
**type**\: :py:class:`DigOptMonAlarms <ydk.models.cisco_ios_xr.Cisco_IOS_XR_drivers_media_eth_oper.EthernetInterface.Interfaces.Interface.PhyInfo.PhyDetails.DigOptMonAlarms>`
.. attribute:: lane
Digital Optical Monitoring (per lane information)
**type**\: list of :py:class:`Lane <ydk.models.cisco_ios_xr.Cisco_IOS_XR_drivers_media_eth_oper.EthernetInterface.Interfaces.Interface.PhyInfo.PhyDetails.Lane>`
.. attribute:: lane_field_validity
Digital Optical Monitoring (per lane information) validity
**type**\: :py:class:`LaneFieldValidity <ydk.models.cisco_ios_xr.Cisco_IOS_XR_drivers_media_eth_oper.EthernetInterface.Interfaces.Interface.PhyInfo.PhyDetails.LaneFieldValidity>`
.. attribute:: optics_wavelength
Wavelength of the optics being used in nm \* 1000
**type**\: int
**range:** 0..4294967295
.. attribute:: transceiver_temperature
The temperature of the transceiver (mDegrees C)
**type**\: int
**range:** \-2147483648..2147483647
.. attribute:: transceiver_voltage
The input voltage to the transceiver (mV)
**type**\: int
**range:** \-2147483648..2147483647
.. attribute:: vendor
Name of the port optics manufacturer
**type**\: str
.. attribute:: vendor_part_number
Part number for the port optics
**type**\: str
.. attribute:: vendor_serial_number
Serial number for the port optics
**type**\: str
"""
_prefix = 'drivers-media-eth-oper'
_revision = '2015-10-14'
def __init__(self):
self.parent = None
self.dig_opt_mon_alarm_thresholds = EthernetInterface.Interfaces.Interface.PhyInfo.PhyDetails.DigOptMonAlarmThresholds()
self.dig_opt_mon_alarm_thresholds.parent = self
self.dig_opt_mon_alarms = EthernetInterface.Interfaces.Interface.PhyInfo.PhyDetails.DigOptMonAlarms()
self.dig_opt_mon_alarms.parent = self
self.lane = YList()
self.lane.parent = self
self.lane.name = 'lane'
self.lane_field_validity = EthernetInterface.Interfaces.Interface.PhyInfo.PhyDetails.LaneFieldValidity()
self.lane_field_validity.parent = self
self.optics_wavelength = None
self.transceiver_temperature = None
self.transceiver_voltage = None
self.vendor = None
self.vendor_part_number = None
self.vendor_serial_number = None
class LaneFieldValidity(object):
"""
Digital Optical Monitoring (per lane
information) validity
.. attribute:: laser_bias_valid
The laser bias 'per lane' field is valid
**type**\: int
**range:** \-2147483648..2147483647
.. attribute:: receive_power_valid
The receive power 'per lane' field is valid
**type**\: int
**range:** \-2147483648..2147483647
.. attribute:: transmit_power_valid
The transmit power 'per lane' field is valid
**type**\: int
**range:** \-2147483648..2147483647
.. attribute:: wavelength_valid
The wavelength 'per lane' field is valid
**type**\: int
**range:** \-2147483648..2147483647
"""
_prefix = 'drivers-media-eth-oper'
_revision = '2015-10-14'
def __init__(self):
self.parent = None
self.laser_bias_valid = None
self.receive_power_valid = None
self.transmit_power_valid = None
self.wavelength_valid = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-drivers-media-eth-oper:lane-field-validity'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if not self.is_config():
return False
if self.laser_bias_valid is not None:
return True
if self.receive_power_valid is not None:
return True
if self.transmit_power_valid is not None:
return True
if self.wavelength_valid is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_drivers_media_eth_oper as meta
return meta._meta_table['EthernetInterface.Interfaces.Interface.PhyInfo.PhyDetails.LaneFieldValidity']['meta_info']
class DigOptMonAlarmThresholds(object):
"""
Digital Optical Monitoring alarm thresholds
.. attribute:: field_validity
Field validity
**type**\: :py:class:`FieldValidity <ydk.models.cisco_ios_xr.Cisco_IOS_XR_drivers_media_eth_oper.EthernetInterface.Interfaces.Interface.PhyInfo.PhyDetails.DigOptMonAlarmThresholds.FieldValidity>`
.. attribute:: laser_bias_alarm_high
Laser bias high alarm threshold (mA)
**type**\: int
**range:** 0..4294967295
.. attribute:: laser_bias_alarm_low
Laser bias low alarm threshold (mA)
**type**\: int
**range:** 0..4294967295
.. attribute:: laser_bias_warning_high
Laser bias high warning threshold (mA)
**type**\: int
**range:** 0..4294967295
.. attribute:: laser_bias_warning_low
Laser bias low warning threshold (mA)
**type**\: int
**range:** 0..4294967295
.. attribute:: optical_receive_power_alarm_high
High optical receive power alarm threshold (mW)
**type**\: int
**range:** 0..4294967295
.. attribute:: optical_receive_power_alarm_low
Low optical receive power alarm threshold (mW)
**type**\: int
**range:** 0..4294967295
.. attribute:: optical_receive_power_warning_high
High optical receive power warning threshold (mW)
**type**\: int
**range:** 0..4294967295
.. attribute:: optical_receive_power_warning_low
Low optical receive power warning threshold (mW)
**type**\: int
**range:** 0..4294967295
.. attribute:: optical_transmit_power_alarm_high
High optical transmit power alarm threshold (mW)
**type**\: int
**range:** 0..4294967295
.. attribute:: optical_transmit_power_alarm_low
Low optical transmit power alarm threshold (mW)
**type**\: int
**range:** 0..4294967295
.. attribute:: optical_transmit_power_warning_high
High optical transmit power warning threshold (mW)
**type**\: int
**range:** 0..4294967295
.. attribute:: optical_transmit_power_warning_low
Low optical transmit power warning threshold (mW)
**type**\: int
**range:** 0..4294967295
.. attribute:: transceiver_temperature_alarm_high
Transceiver high temperature alarm threshold (mDegrees C)
**type**\: int
**range:** \-2147483648..2147483647
.. attribute:: transceiver_temperature_alarm_low
Transceiver low temperature alarm threshold (mDegrees C)
**type**\: int
**range:** \-2147483648..2147483647
.. attribute:: transceiver_temperature_warning_high
Transceiver high temperature warning threshold (mDegrees C)
**type**\: int
**range:** \-2147483648..2147483647
.. attribute:: transceiver_temperature_warning_low
Transceiver low temperature warning threshold (mDegrees C)
**type**\: int
**range:** \-2147483648..2147483647
.. attribute:: transceiver_voltage_alarm_high
Transceiver high voltage alarm threshold (mV)
**type**\: int
**range:** 0..4294967295
.. attribute:: transceiver_voltage_alarm_low
Transceiver low voltage alarm threshold (mV)
**type**\: int
**range:** 0..4294967295
.. attribute:: transceiver_voltage_warning_high
Transceiver high voltage warning threshold (mV)
**type**\: int
**range:** 0..4294967295
.. attribute:: transceiver_voltage_warning_low
Transceiver low voltage warning threshold (mV)
**type**\: int
**range:** 0..4294967295
"""
_prefix = 'drivers-media-eth-oper'
_revision = '2015-10-14'
def __init__(self):
self.parent = None
self.field_validity = EthernetInterface.Interfaces.Interface.PhyInfo.PhyDetails.DigOptMonAlarmThresholds.FieldValidity()
self.field_validity.parent = self
self.laser_bias_alarm_high = None
self.laser_bias_alarm_low = None
self.laser_bias_warning_high = None
self.laser_bias_warning_low = None
self.optical_receive_power_alarm_high = None
self.optical_receive_power_alarm_low = None
self.optical_receive_power_warning_high = None
self.optical_receive_power_warning_low = None
self.optical_transmit_power_alarm_high = None
self.optical_transmit_power_alarm_low = None
self.optical_transmit_power_warning_high = None
self.optical_transmit_power_warning_low = None
self.transceiver_temperature_alarm_high = None
self.transceiver_temperature_alarm_low = None
self.transceiver_temperature_warning_high = None
self.transceiver_temperature_warning_low = None
self.transceiver_voltage_alarm_high = None
self.transceiver_voltage_alarm_low = None
self.transceiver_voltage_warning_high = None
self.transceiver_voltage_warning_low = None
class FieldValidity(object):
"""
Field validity
.. attribute:: laser_bias_valid
The laser bias fields are valid
**type**\: int
**range:** \-2147483648..2147483647
.. attribute:: receive_power_valid
The receive power fields are valid
**type**\: int
**range:** \-2147483648..2147483647
.. attribute:: temperature_valid
The temperature fields are valid
**type**\: int
**range:** \-2147483648..2147483647
.. attribute:: transmit_power_valid
The transmit power fields are valid
**type**\: int
**range:** \-2147483648..2147483647
.. attribute:: voltage_valid
The voltage fields are valid
**type**\: int
**range:** \-2147483648..2147483647
"""
_prefix = 'drivers-media-eth-oper'
_revision = '2015-10-14'
def __init__(self):
self.parent = None
self.laser_bias_valid = None
self.receive_power_valid = None
self.temperature_valid = None
self.transmit_power_valid = None
self.voltage_valid = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-drivers-media-eth-oper:field-validity'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if not self.is_config():
return False
if self.laser_bias_valid is not None:
return True
if self.receive_power_valid is not None:
return True
if self.temperature_valid is not None:
return True
if self.transmit_power_valid is not None:
return True
if self.voltage_valid is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_drivers_media_eth_oper as meta
return meta._meta_table['EthernetInterface.Interfaces.Interface.PhyInfo.PhyDetails.DigOptMonAlarmThresholds.FieldValidity']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-drivers-media-eth-oper:dig-opt-mon-alarm-thresholds'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if not self.is_config():
return False
if self.field_validity is not None and self.field_validity._has_data():
return True
if self.laser_bias_alarm_high is not None:
return True
if self.laser_bias_alarm_low is not None:
return True
if self.laser_bias_warning_high is not None:
return True
if self.laser_bias_warning_low is not None:
return True
if self.optical_receive_power_alarm_high is not None:
return True
if self.optical_receive_power_alarm_low is not None:
return True
if self.optical_receive_power_warning_high is not None:
return True
if self.optical_receive_power_warning_low is not None:
return True
if self.optical_transmit_power_alarm_high is not None:
return True
if self.optical_transmit_power_alarm_low is not None:
return True
if self.optical_transmit_power_warning_high is not None:
return True
if self.optical_transmit_power_warning_low is not None:
return True
if self.transceiver_temperature_alarm_high is not None:
return True
if self.transceiver_temperature_alarm_low is not None:
return True
if self.transceiver_temperature_warning_high is not None:
return True
if self.transceiver_temperature_warning_low is not None:
return True
if self.transceiver_voltage_alarm_high is not None:
return True
if self.transceiver_voltage_alarm_low is not None:
return True
if self.transceiver_voltage_warning_high is not None:
return True
if self.transceiver_voltage_warning_low is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_drivers_media_eth_oper as meta
return meta._meta_table['EthernetInterface.Interfaces.Interface.PhyInfo.PhyDetails.DigOptMonAlarmThresholds']['meta_info']
class DigOptMonAlarms(object):
"""
Digital Optical Monitoring alarms
.. attribute:: laser_bias_current
Laser Bias Current Alarm
**type**\: :py:class:`EtherDomAlarmEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_drivers_media_eth_oper.EtherDomAlarmEnum>`
.. attribute:: received_laser_power
Received Optical Power Alarm
**type**\: :py:class:`EtherDomAlarmEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_drivers_media_eth_oper.EtherDomAlarmEnum>`
.. attribute:: transceiver_temperature
Transceiver Temperature Alarm
**type**\: :py:class:`EtherDomAlarmEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_drivers_media_eth_oper.EtherDomAlarmEnum>`
.. attribute:: transceiver_voltage
Transceiver Voltage Alarm
**type**\: :py:class:`EtherDomAlarmEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_drivers_media_eth_oper.EtherDomAlarmEnum>`
.. attribute:: transmit_laser_power
Transmit Laser Power Alarm
**type**\: :py:class:`EtherDomAlarmEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_drivers_media_eth_oper.EtherDomAlarmEnum>`
"""
_prefix = 'drivers-media-eth-oper'
_revision = '2015-10-14'
def __init__(self):
self.parent = None
self.laser_bias_current = None
self.received_laser_power = None
self.transceiver_temperature = None
self.transceiver_voltage = None
self.transmit_laser_power = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-drivers-media-eth-oper:dig-opt-mon-alarms'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if not self.is_config():
return False
if self.laser_bias_current is not None:
return True
if self.received_laser_power is not None:
return True
if self.transceiver_temperature is not None:
return True
if self.transceiver_voltage is not None:
return True
if self.transmit_laser_power is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_drivers_media_eth_oper as meta
return meta._meta_table['EthernetInterface.Interfaces.Interface.PhyInfo.PhyDetails.DigOptMonAlarms']['meta_info']
class Lane(object):
"""
Digital Optical Monitoring (per lane
information)
.. attribute:: center_wavelength
Center Wavelength (nm\*1000)
**type**\: int
**range:** 0..4294967295
.. attribute:: dig_opt_mon_alarm
Digital Optical Monitoring alarms
**type**\: :py:class:`DigOptMonAlarm <ydk.models.cisco_ios_xr.Cisco_IOS_XR_drivers_media_eth_oper.EthernetInterface.Interfaces.Interface.PhyInfo.PhyDetails.Lane.DigOptMonAlarm>`
.. attribute:: laser_bias_current
Laser Bias Current (uAmps)
**type**\: int
**range:** 0..4294967295
.. attribute:: received_laser_power
Received Optical Power (dBm\*1000)
**type**\: int
**range:** \-2147483648..2147483647
.. attribute:: transmit_laser_power
Transmit Laser Power (dBm\*1000)
**type**\: int
**range:** \-2147483648..2147483647
"""
_prefix = 'drivers-media-eth-oper'
_revision = '2015-10-14'
def __init__(self):
self.parent = None
self.center_wavelength = None
self.dig_opt_mon_alarm = EthernetInterface.Interfaces.Interface.PhyInfo.PhyDetails.Lane.DigOptMonAlarm()
self.dig_opt_mon_alarm.parent = self
self.laser_bias_current = None
self.received_laser_power = None
self.transmit_laser_power = None
class DigOptMonAlarm(object):
"""
Digital Optical Monitoring alarms
.. attribute:: laser_bias_current
Laser Bias Current Alarm
**type**\: :py:class:`EtherDomAlarmEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_drivers_media_eth_oper.EtherDomAlarmEnum>`
.. attribute:: received_laser_power
Received Optical Power Alarm
**type**\: :py:class:`EtherDomAlarmEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_drivers_media_eth_oper.EtherDomAlarmEnum>`
.. attribute:: transmit_laser_power
Transmit Laser Power Alarm
**type**\: :py:class:`EtherDomAlarmEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_drivers_media_eth_oper.EtherDomAlarmEnum>`
"""
_prefix = 'drivers-media-eth-oper'
_revision = '2015-10-14'
def __init__(self):
self.parent = None
self.laser_bias_current = None
self.received_laser_power = None
self.transmit_laser_power = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-drivers-media-eth-oper:dig-opt-mon-alarm'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if not self.is_config():
return False
if self.laser_bias_current is not None:
return True
if self.received_laser_power is not None:
return True
if self.transmit_laser_power is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_drivers_media_eth_oper as meta
return meta._meta_table['EthernetInterface.Interfaces.Interface.PhyInfo.PhyDetails.Lane.DigOptMonAlarm']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-drivers-media-eth-oper:lane'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if not self.is_config():
return False
if self.center_wavelength is not None:
return True
if self.dig_opt_mon_alarm is not None and self.dig_opt_mon_alarm._has_data():
return True
if self.laser_bias_current is not None:
return True
if self.received_laser_power is not None:
return True
if self.transmit_laser_power is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_drivers_media_eth_oper as meta
return meta._meta_table['EthernetInterface.Interfaces.Interface.PhyInfo.PhyDetails.Lane']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-drivers-media-eth-oper:phy-details'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if not self.is_config():
return False
if self.dig_opt_mon_alarm_thresholds is not None and self.dig_opt_mon_alarm_thresholds._has_data():
return True
if self.dig_opt_mon_alarms is not None and self.dig_opt_mon_alarms._has_data():
return True
if self.lane is not None:
for child_ref in self.lane:
if child_ref._has_data():
return True
if self.lane_field_validity is not None and self.lane_field_validity._has_data():
return True
if self.optics_wavelength is not None:
return True
if self.transceiver_temperature is not None:
return True
if self.transceiver_voltage is not None:
return True
if self.vendor is not None:
return True
if self.vendor_part_number is not None:
return True
if self.vendor_serial_number is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_drivers_media_eth_oper as meta
return meta._meta_table['EthernetInterface.Interfaces.Interface.PhyInfo.PhyDetails']['meta_info']
class FecDetails(object):
"""
Forward Error Correction information
.. attribute:: corrected_codeword_count
Corrected codeword error count
**type**\: long
**range:** 0..18446744073709551615
.. attribute:: fec
Port operational FEC type
**type**\: :py:class:`EthernetFecEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_drivers_media_eth_oper.EthernetFecEnum>`
.. attribute:: uncorrected_codeword_count
Uncorrected codeword error count
**type**\: long
**range:** 0..18446744073709551615
"""
_prefix = 'drivers-media-eth-oper'
_revision = '2015-10-14'
def __init__(self):
self.parent = None
self.corrected_codeword_count = None
self.fec = None
self.uncorrected_codeword_count = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-drivers-media-eth-oper:fec-details'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if not self.is_config():
return False
if self.corrected_codeword_count is not None:
return True
if self.fec is not None:
return True
if self.uncorrected_codeword_count is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_drivers_media_eth_oper as meta
return meta._meta_table['EthernetInterface.Interfaces.Interface.PhyInfo.FecDetails']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-drivers-media-eth-oper:phy-info'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if not self.is_config():
return False
if self.fec_details is not None and self.fec_details._has_data():
return True
if self.loopback is not None:
return True
if self.media_type is not None:
return True
if self.phy_details is not None and self.phy_details._has_data():
return True
if self.phy_present is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_drivers_media_eth_oper as meta
return meta._meta_table['EthernetInterface.Interfaces.Interface.PhyInfo']['meta_info']
class Layer1Info(object):
"""
Layer 1 information
.. attribute:: autoneg
Port autonegotiation configuration settings
**type**\: :py:class:`Autoneg <ydk.models.cisco_ios_xr.Cisco_IOS_XR_drivers_media_eth_oper.EthernetInterface.Interfaces.Interface.Layer1Info.Autoneg>`
.. attribute:: bandwidth_utilization
Bandwidth utilization (hundredths of a percent)
**type**\: int
**range:** 0..4294967295
.. attribute:: ber_monitoring
BER monitoring details
**type**\: :py:class:`BerMonitoring <ydk.models.cisco_ios_xr.Cisco_IOS_XR_drivers_media_eth_oper.EthernetInterface.Interfaces.Interface.Layer1Info.BerMonitoring>`
.. attribute:: current_alarms
Current alarms
**type**\: :py:class:`CurrentAlarms <ydk.models.cisco_ios_xr.Cisco_IOS_XR_drivers_media_eth_oper.EthernetInterface.Interfaces.Interface.Layer1Info.CurrentAlarms>`
.. attribute:: duplex
Port operational duplexity
**type**\: :py:class:`EthernetDuplexEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_drivers_media_eth_oper.EthernetDuplexEnum>`
.. attribute:: error_counts
Statistics for detected errors
**type**\: :py:class:`ErrorCounts <ydk.models.cisco_ios_xr.Cisco_IOS_XR_drivers_media_eth_oper.EthernetInterface.Interfaces.Interface.Layer1Info.ErrorCounts>`
.. attribute:: flowcontrol
Port operational flow control
**type**\: :py:class:`EtherFlowcontrolEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_drivers_media_eth_oper.EtherFlowcontrolEnum>`
.. attribute:: ipg
Port operational inter\-packet\-gap
**type**\: :py:class:`EthernetIpgEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_drivers_media_eth_oper.EthernetIpgEnum>`
.. attribute:: laser_squelch_enabled
Laser Squelch \- TRUE if enabled
**type**\: bool
.. attribute:: led_state
State of the LED
**type**\: :py:class:`EtherLedStateEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_drivers_media_eth_oper.EtherLedStateEnum>`
.. attribute:: link_state
Link state
**type**\: :py:class:`EtherLinkStateEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_drivers_media_eth_oper.EtherLinkStateEnum>`
.. attribute:: previous_alarms
Previous alarms
**type**\: :py:class:`PreviousAlarms <ydk.models.cisco_ios_xr.Cisco_IOS_XR_drivers_media_eth_oper.EthernetInterface.Interfaces.Interface.Layer1Info.PreviousAlarms>`
.. attribute:: speed
Port operational speed
**type**\: :py:class:`EthernetSpeedEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_drivers_media_eth_oper.EthernetSpeedEnum>`
"""
_prefix = 'drivers-media-eth-oper'
_revision = '2015-10-14'
def __init__(self):
self.parent = None
self.autoneg = EthernetInterface.Interfaces.Interface.Layer1Info.Autoneg()
self.autoneg.parent = self
self.bandwidth_utilization = None
self.ber_monitoring = EthernetInterface.Interfaces.Interface.Layer1Info.BerMonitoring()
self.ber_monitoring.parent = self
self.current_alarms = EthernetInterface.Interfaces.Interface.Layer1Info.CurrentAlarms()
self.current_alarms.parent = self
self.duplex = None
self.error_counts = EthernetInterface.Interfaces.Interface.Layer1Info.ErrorCounts()
self.error_counts.parent = self
self.flowcontrol = None
self.ipg = None
self.laser_squelch_enabled = None
self.led_state = None
self.link_state = None
self.previous_alarms = EthernetInterface.Interfaces.Interface.Layer1Info.PreviousAlarms()
self.previous_alarms.parent = self
self.speed = None
class Autoneg(object):
"""
Port autonegotiation configuration settings
.. attribute:: autoneg_enabled
TRUE if autonegotiation is enabled
**type**\: int
**range:** \-2147483648..2147483647
.. attribute:: config_override
If true, configuration overrides negotiated settings. If false, negotiated settings in effect
**type**\: int
**range:** \-2147483648..2147483647
.. attribute:: duplex
Restricted duplex (if relevant bit is set in mask)
**type**\: :py:class:`EthernetDuplexEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_drivers_media_eth_oper.EthernetDuplexEnum>`
.. attribute:: flowcontrol
Restricted flowcontrol (if relevant bit is set in mask)
**type**\: :py:class:`EtherFlowcontrolEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_drivers_media_eth_oper.EtherFlowcontrolEnum>`
.. attribute:: mask
Validity mask\: 0x1 speed, 0x2 duplex, 0x4 flowcontrol
**type**\: int
**range:** 0..4294967295
.. attribute:: speed
Restricted speed (if relevant bit is set in mask)
**type**\: :py:class:`EthernetSpeedEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_drivers_media_eth_oper.EthernetSpeedEnum>`
"""
_prefix = 'drivers-media-eth-oper'
_revision = '2015-10-14'
def __init__(self):
self.parent = None
self.autoneg_enabled = None
self.config_override = None
self.duplex = None
self.flowcontrol = None
self.mask = None
self.speed = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-drivers-media-eth-oper:autoneg'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if not self.is_config():
return False
if self.autoneg_enabled is not None:
return True
if self.config_override is not None:
return True
if self.duplex is not None:
return True
if self.flowcontrol is not None:
return True
if self.mask is not None:
return True
if self.speed is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_drivers_media_eth_oper as meta
return meta._meta_table['EthernetInterface.Interfaces.Interface.Layer1Info.Autoneg']['meta_info']
class CurrentAlarms(object):
"""
Current alarms
.. attribute:: hi_ber_alarm
Hi BER
**type**\: :py:class:`EthCtrlrAlarmStateEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_drivers_media_eth_oper.EthCtrlrAlarmStateEnum>`
.. attribute:: local_fault_alarm
Local Fault
**type**\: :py:class:`EthCtrlrAlarmStateEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_drivers_media_eth_oper.EthCtrlrAlarmStateEnum>`
.. attribute:: loss_of_synchronization_data_alarm
Loss of Synchronization Data
**type**\: :py:class:`EthCtrlrAlarmStateEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_drivers_media_eth_oper.EthCtrlrAlarmStateEnum>`
.. attribute:: pcs_loss_of_block_lock_alarm
PCS Loss of Block Lock
**type**\: :py:class:`EthCtrlrAlarmStateEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_drivers_media_eth_oper.EthCtrlrAlarmStateEnum>`
.. attribute:: received_loss_of_signal_alarm
Received Loss of Signal
**type**\: :py:class:`EthCtrlrAlarmStateEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_drivers_media_eth_oper.EthCtrlrAlarmStateEnum>`
.. attribute:: remote_fault_alarm
Remote Fault
**type**\: :py:class:`EthCtrlrAlarmStateEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_drivers_media_eth_oper.EthCtrlrAlarmStateEnum>`
.. attribute:: sd_ber_alarm
SD BER
**type**\: :py:class:`EthCtrlrAlarmStateEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_drivers_media_eth_oper.EthCtrlrAlarmStateEnum>`
.. attribute:: sf_ber_alarm
SF BER
**type**\: :py:class:`EthCtrlrAlarmStateEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_drivers_media_eth_oper.EthCtrlrAlarmStateEnum>`
.. attribute:: squelch_alarm
Squelch
**type**\: :py:class:`EthCtrlrAlarmStateEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_drivers_media_eth_oper.EthCtrlrAlarmStateEnum>`
"""
_prefix = 'drivers-media-eth-oper'
_revision = '2015-10-14'
def __init__(self):
self.parent = None
self.hi_ber_alarm = None
self.local_fault_alarm = None
self.loss_of_synchronization_data_alarm = None
self.pcs_loss_of_block_lock_alarm = None
self.received_loss_of_signal_alarm = None
self.remote_fault_alarm = None
self.sd_ber_alarm = None
self.sf_ber_alarm = None
self.squelch_alarm = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-drivers-media-eth-oper:current-alarms'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if not self.is_config():
return False
if self.hi_ber_alarm is not None:
return True
if self.local_fault_alarm is not None:
return True
if self.loss_of_synchronization_data_alarm is not None:
return True
if self.pcs_loss_of_block_lock_alarm is not None:
return True
if self.received_loss_of_signal_alarm is not None:
return True
if self.remote_fault_alarm is not None:
return True
if self.sd_ber_alarm is not None:
return True
if self.sf_ber_alarm is not None:
return True
if self.squelch_alarm is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_drivers_media_eth_oper as meta
return meta._meta_table['EthernetInterface.Interfaces.Interface.Layer1Info.CurrentAlarms']['meta_info']
class PreviousAlarms(object):
"""
Previous alarms
.. attribute:: hi_ber_alarm
Hi BER
**type**\: :py:class:`EthCtrlrAlarmStateEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_drivers_media_eth_oper.EthCtrlrAlarmStateEnum>`
.. attribute:: local_fault_alarm
Local Fault
**type**\: :py:class:`EthCtrlrAlarmStateEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_drivers_media_eth_oper.EthCtrlrAlarmStateEnum>`
.. attribute:: loss_of_synchronization_data_alarm
Loss of Synchronization Data
**type**\: :py:class:`EthCtrlrAlarmStateEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_drivers_media_eth_oper.EthCtrlrAlarmStateEnum>`
.. attribute:: pcs_loss_of_block_lock_alarm
PCS Loss of Block Lock
**type**\: :py:class:`EthCtrlrAlarmStateEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_drivers_media_eth_oper.EthCtrlrAlarmStateEnum>`
.. attribute:: received_loss_of_signal_alarm
Received Loss of Signal
**type**\: :py:class:`EthCtrlrAlarmStateEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_drivers_media_eth_oper.EthCtrlrAlarmStateEnum>`
.. attribute:: remote_fault_alarm
Remote Fault
**type**\: :py:class:`EthCtrlrAlarmStateEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_drivers_media_eth_oper.EthCtrlrAlarmStateEnum>`
.. attribute:: sd_ber_alarm
SD BER
**type**\: :py:class:`EthCtrlrAlarmStateEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_drivers_media_eth_oper.EthCtrlrAlarmStateEnum>`
.. attribute:: sf_ber_alarm
SF BER
**type**\: :py:class:`EthCtrlrAlarmStateEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_drivers_media_eth_oper.EthCtrlrAlarmStateEnum>`
.. attribute:: squelch_alarm
Squelch
**type**\: :py:class:`EthCtrlrAlarmStateEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_drivers_media_eth_oper.EthCtrlrAlarmStateEnum>`
"""
_prefix = 'drivers-media-eth-oper'
_revision = '2015-10-14'
def __init__(self):
self.parent = None
self.hi_ber_alarm = None
self.local_fault_alarm = None
self.loss_of_synchronization_data_alarm = None
self.pcs_loss_of_block_lock_alarm = None
self.received_loss_of_signal_alarm = None
self.remote_fault_alarm = None
self.sd_ber_alarm = None
self.sf_ber_alarm = None
self.squelch_alarm = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-drivers-media-eth-oper:previous-alarms'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if not self.is_config():
return False
if self.hi_ber_alarm is not None:
return True
if self.local_fault_alarm is not None:
return True
if self.loss_of_synchronization_data_alarm is not None:
return True
if self.pcs_loss_of_block_lock_alarm is not None:
return True
if self.received_loss_of_signal_alarm is not None:
return True
if self.remote_fault_alarm is not None:
return True
if self.sd_ber_alarm is not None:
return True
if self.sf_ber_alarm is not None:
return True
if self.squelch_alarm is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_drivers_media_eth_oper as meta
return meta._meta_table['EthernetInterface.Interfaces.Interface.Layer1Info.PreviousAlarms']['meta_info']
class ErrorCounts(object):
"""
Statistics for detected errors
.. attribute:: pcsbip_errors
PCS BIP error count
**type**\: long
**range:** 0..18446744073709551615
.. attribute:: sync_header_errors
Sync\-header error count
**type**\: long
**range:** 0..18446744073709551615
"""
_prefix = 'drivers-media-eth-oper'
_revision = '2015-10-14'
def __init__(self):
self.parent = None
self.pcsbip_errors = None
self.sync_header_errors = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-drivers-media-eth-oper:error-counts'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if not self.is_config():
return False
if self.pcsbip_errors is not None:
return True
if self.sync_header_errors is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_drivers_media_eth_oper as meta
return meta._meta_table['EthernetInterface.Interfaces.Interface.Layer1Info.ErrorCounts']['meta_info']
class BerMonitoring(object):
"""
BER monitoring details
.. attribute:: settings
The BER monitoring settings to be applied
**type**\: :py:class:`Settings <ydk.models.cisco_ios_xr.Cisco_IOS_XR_drivers_media_eth_oper.EthernetInterface.Interfaces.Interface.Layer1Info.BerMonitoring.Settings>`
.. attribute:: supported
Whether or not BER monitoring is supported
**type**\: int
**range:** \-2147483648..2147483647
"""
_prefix = 'drivers-media-eth-oper'
_revision = '2015-10-14'
def __init__(self):
self.parent = None
self.settings = EthernetInterface.Interfaces.Interface.Layer1Info.BerMonitoring.Settings()
self.settings.parent = self
self.supported = None
class Settings(object):
"""
The BER monitoring settings to be applied
.. attribute:: signal_degrade_alarm
Report alarm to indicate signal degrade
**type**\: int
**range:** \-2147483648..2147483647
.. attribute:: signal_degrade_threshold
BER threshold for signal to degrade
**type**\: int
**range:** 0..4294967295
.. attribute:: signal_fail_alarm
Report alarm to indicate signal failure
**type**\: int
**range:** \-2147483648..2147483647
.. attribute:: signal_fail_threshold
BER threshold for signal to fail
**type**\: int
**range:** 0..4294967295
.. attribute:: signal_remote_fault
Whether drivers should signal remote faults
**type**\: int
**range:** \-2147483648..2147483647
"""
_prefix = 'drivers-media-eth-oper'
_revision = '2015-10-14'
def __init__(self):
self.parent = None
self.signal_degrade_alarm = None
self.signal_degrade_threshold = None
self.signal_fail_alarm = None
self.signal_fail_threshold = None
self.signal_remote_fault = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-drivers-media-eth-oper:settings'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if not self.is_config():
return False
if self.signal_degrade_alarm is not None:
return True
if self.signal_degrade_threshold is not None:
return True
if self.signal_fail_alarm is not None:
return True
if self.signal_fail_threshold is not None:
return True
if self.signal_remote_fault is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_drivers_media_eth_oper as meta
return meta._meta_table['EthernetInterface.Interfaces.Interface.Layer1Info.BerMonitoring.Settings']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-drivers-media-eth-oper:ber-monitoring'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if not self.is_config():
return False
if self.settings is not None and self.settings._has_data():
return True
if self.supported is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_drivers_media_eth_oper as meta
return meta._meta_table['EthernetInterface.Interfaces.Interface.Layer1Info.BerMonitoring']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-drivers-media-eth-oper:layer1-info'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if not self.is_config():
return False
if self.autoneg is not None and self.autoneg._has_data():
return True
if self.bandwidth_utilization is not None:
return True
if self.ber_monitoring is not None and self.ber_monitoring._has_data():
return True
if self.current_alarms is not None and self.current_alarms._has_data():
return True
if self.duplex is not None:
return True
if self.error_counts is not None and self.error_counts._has_data():
return True
if self.flowcontrol is not None:
return True
if self.ipg is not None:
return True
if self.laser_squelch_enabled is not None:
return True
if self.led_state is not None:
return True
if self.link_state is not None:
return True
if self.previous_alarms is not None and self.previous_alarms._has_data():
return True
if self.speed is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_drivers_media_eth_oper as meta
return meta._meta_table['EthernetInterface.Interfaces.Interface.Layer1Info']['meta_info']
class MacInfo(object):
"""
MAC Layer information
.. attribute:: burned_in_mac_address
Port Burned\-In MAC address
**type**\: str
**pattern:** [0\-9a\-fA\-F]{2}(\:[0\-9a\-fA\-F]{2}){5}
.. attribute:: mru
Port operational MRU
**type**\: int
**range:** 0..4294967295
.. attribute:: mtu
Port operational MTU
**type**\: int
**range:** 0..4294967295
.. attribute:: multicast_mac_filters
Port multicast MAC filter information
**type**\: :py:class:`MulticastMacFilters <ydk.models.cisco_ios_xr.Cisco_IOS_XR_drivers_media_eth_oper.EthernetInterface.Interfaces.Interface.MacInfo.MulticastMacFilters>`
.. attribute:: operational_mac_address
Port operational MAC address
**type**\: str
**pattern:** [0\-9a\-fA\-F]{2}(\:[0\-9a\-fA\-F]{2}){5}
.. attribute:: unicast_mac_filters
Port unicast MAC filter information
**type**\: :py:class:`UnicastMacFilters <ydk.models.cisco_ios_xr.Cisco_IOS_XR_drivers_media_eth_oper.EthernetInterface.Interfaces.Interface.MacInfo.UnicastMacFilters>`
"""
_prefix = 'drivers-media-eth-oper'
_revision = '2015-10-14'
def __init__(self):
self.parent = None
self.burned_in_mac_address = None
self.mru = None
self.mtu = None
self.multicast_mac_filters = EthernetInterface.Interfaces.Interface.MacInfo.MulticastMacFilters()
self.multicast_mac_filters.parent = self
self.operational_mac_address = None
self.unicast_mac_filters = EthernetInterface.Interfaces.Interface.MacInfo.UnicastMacFilters()
self.unicast_mac_filters.parent = self
class UnicastMacFilters(object):
"""
Port unicast MAC filter information
.. attribute:: unicast_mac_address
MAC addresses in the unicast ingress destination MAC filter
**type**\: list of str
**pattern:** [0\-9a\-fA\-F]{2}(\:[0\-9a\-fA\-F]{2}){5}
"""
_prefix = 'drivers-media-eth-oper'
_revision = '2015-10-14'
def __init__(self):
self.parent = None
self.unicast_mac_address = YLeafList()
self.unicast_mac_address.parent = self
self.unicast_mac_address.name = 'unicast_mac_address'
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-drivers-media-eth-oper:unicast-mac-filters'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if not self.is_config():
return False
if self.unicast_mac_address is not None:
for child in self.unicast_mac_address:
if child is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_drivers_media_eth_oper as meta
return meta._meta_table['EthernetInterface.Interfaces.Interface.MacInfo.UnicastMacFilters']['meta_info']
class MulticastMacFilters(object):
"""
Port multicast MAC filter information
.. attribute:: multicast_mac_address
MAC addresses in the multicast ingress destination MAC filter
**type**\: list of :py:class:`MulticastMacAddress <ydk.models.cisco_ios_xr.Cisco_IOS_XR_drivers_media_eth_oper.EthernetInterface.Interfaces.Interface.MacInfo.MulticastMacFilters.MulticastMacAddress>`
.. attribute:: multicast_promiscuous
Whether the port is in multicast promiscuous mode
**type**\: bool
"""
_prefix = 'drivers-media-eth-oper'
_revision = '2015-10-14'
def __init__(self):
self.parent = None
self.multicast_mac_address = YList()
self.multicast_mac_address.parent = self
self.multicast_mac_address.name = 'multicast_mac_address'
self.multicast_promiscuous = None
class MulticastMacAddress(object):
"""
MAC addresses in the multicast ingress
destination MAC filter
.. attribute:: mac_address
MAC address
**type**\: str
**pattern:** [0\-9a\-fA\-F]{2}(\:[0\-9a\-fA\-F]{2}){5}
.. attribute:: mask
Mask for this MAC address
**type**\: str
**pattern:** [0\-9a\-fA\-F]{2}(\:[0\-9a\-fA\-F]{2}){5}
"""
_prefix = 'drivers-media-eth-oper'
_revision = '2015-10-14'
def __init__(self):
self.parent = None
self.mac_address = None
self.mask = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-drivers-media-eth-oper:multicast-mac-address'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if not self.is_config():
return False
if self.mac_address is not None:
return True
if self.mask is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_drivers_media_eth_oper as meta
return meta._meta_table['EthernetInterface.Interfaces.Interface.MacInfo.MulticastMacFilters.MulticastMacAddress']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-drivers-media-eth-oper:multicast-mac-filters'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if not self.is_config():
return False
if self.multicast_mac_address is not None:
for child_ref in self.multicast_mac_address:
if child_ref._has_data():
return True
if self.multicast_promiscuous is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_drivers_media_eth_oper as meta
return meta._meta_table['EthernetInterface.Interfaces.Interface.MacInfo.MulticastMacFilters']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-drivers-media-eth-oper:mac-info'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if not self.is_config():
return False
if self.burned_in_mac_address is not None:
return True
if self.mru is not None:
return True
if self.mtu is not None:
return True
if self.multicast_mac_filters is not None and self.multicast_mac_filters._has_data():
return True
if self.operational_mac_address is not None:
return True
if self.unicast_mac_filters is not None and self.unicast_mac_filters._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_drivers_media_eth_oper as meta
return meta._meta_table['EthernetInterface.Interfaces.Interface.MacInfo']['meta_info']
class TransportInfo(object):
"""
Transport state information
.. attribute:: ains_status
AINS Soak status
**type**\: :py:class:`EtherAinsStatusEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_drivers_media_eth_oper.EtherAinsStatusEnum>`
.. attribute:: maintenance_mode_enabled
Maintenance Mode \- TRUE if enabled
**type**\: bool
.. attribute:: remaining_duration
Remaining duration (seconds) of AINS soak timer
**type**\: int
**range:** 0..4294967295
.. attribute:: total_duration
Total duration (seconds) of AINS soak timer
**type**\: int
**range:** 0..4294967295
"""
_prefix = 'drivers-media-eth-oper'
_revision = '2015-10-14'
def __init__(self):
self.parent = None
self.ains_status = None
self.maintenance_mode_enabled = None
self.remaining_duration = None
self.total_duration = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-drivers-media-eth-oper:transport-info'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if not self.is_config():
return False
if self.ains_status is not None:
return True
if self.maintenance_mode_enabled is not None:
return True
if self.remaining_duration is not None:
return True
if self.total_duration is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_drivers_media_eth_oper as meta
return meta._meta_table['EthernetInterface.Interfaces.Interface.TransportInfo']['meta_info']
@property
def _common_path(self):
if self.interface_name is None:
raise YPYModelError('Key property interface_name is None')
return '/Cisco-IOS-XR-drivers-media-eth-oper:ethernet-interface/Cisco-IOS-XR-drivers-media-eth-oper:interfaces/Cisco-IOS-XR-drivers-media-eth-oper:interface[Cisco-IOS-XR-drivers-media-eth-oper:interface-name = ' + str(self.interface_name) + ']'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if not self.is_config():
return False
if self.interface_name is not None:
return True
if self.admin_state is not None:
return True
if self.layer1_info is not None and self.layer1_info._has_data():
return True
if self.mac_info is not None and self.mac_info._has_data():
return True
if self.oper_state_up is not None:
return True
if self.phy_info is not None and self.phy_info._has_data():
return True
if self.transport_info is not None and self.transport_info._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_drivers_media_eth_oper as meta
return meta._meta_table['EthernetInterface.Interfaces.Interface']['meta_info']
@property
def _common_path(self):
return '/Cisco-IOS-XR-drivers-media-eth-oper:ethernet-interface/Cisco-IOS-XR-drivers-media-eth-oper:interfaces'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if not self.is_config():
return False
if self.interface is not None:
for child_ref in self.interface:
if child_ref._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_drivers_media_eth_oper as meta
return meta._meta_table['EthernetInterface.Interfaces']['meta_info']
class Berts(object):
"""
Ethernet controller BERT table
.. attribute:: bert
Ethernet BERT information
**type**\: list of :py:class:`Bert <ydk.models.cisco_ios_xr.Cisco_IOS_XR_drivers_media_eth_oper.EthernetInterface.Berts.Bert>`
"""
_prefix = 'drivers-media-eth-oper'
_revision = '2015-10-14'
def __init__(self):
self.parent = None
self.bert = YList()
self.bert.parent = self
self.bert.name = 'bert'
class Bert(object):
"""
Ethernet BERT information
.. attribute:: interface_name <key>
The name of the interface
**type**\: str
**pattern:** (([a\-zA\-Z0\-9\_]\*\\d+/){3}\\d+)\|(([a\-zA\-Z0\-9\_]\*\\d+/){4}\\d+)\|(([a\-zA\-Z0\-9\_]\*\\d+/){3}\\d+\\.\\d+)\|(([a\-zA\-Z0\-9\_]\*\\d+/){2}([a\-zA\-Z0\-9\_]\*\\d+))\|(([a\-zA\-Z0\-9\_]\*\\d+/){2}([a\-zA\-Z0\-9\_]+))\|([a\-zA\-Z0\-9\_\-]\*\\d+)\|([a\-zA\-Z0\-9\_\-]\*\\d+\\.\\d+)\|(mpls)\|(dwdm)
.. attribute:: bert_status
Current test status
**type**\: :py:class:`BertStatus <ydk.models.cisco_ios_xr.Cisco_IOS_XR_drivers_media_eth_oper.EthernetInterface.Berts.Bert.BertStatus>`
.. attribute:: port_bert_interval
Port BERT interval
**type**\: int
**range:** 0..4294967295
.. attribute:: time_left
Remaining time for this test in seconds
**type**\: int
**range:** 0..4294967295
"""
_prefix = 'drivers-media-eth-oper'
_revision = '2015-10-14'
def __init__(self):
self.parent = None
self.interface_name = None
self.bert_status = EthernetInterface.Berts.Bert.BertStatus()
self.bert_status.parent = self
self.port_bert_interval = None
self.time_left = None
class BertStatus(object):
"""
Current test status
.. attribute:: bert_state_enabled
State
**type**\: bool
.. attribute:: data_availability
Flag indicating available data
**type**\: int
**range:** 0..4294967295
.. attribute:: device_under_test
Device being tested
**type**\: :py:class:`EthernetDevEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_drivers_media_eth_oper.EthernetDevEnum>`
.. attribute:: error_type
Bit, block or frame error
**type**\: :py:class:`EthernetBertErrCntEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_drivers_media_eth_oper.EthernetBertErrCntEnum>`
.. attribute:: interface_device
Interface being tested
**type**\: :py:class:`EthernetDevIfEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_drivers_media_eth_oper.EthernetDevIfEnum>`
.. attribute:: receive_count
Receive count (if 0x1 set in flag)
**type**\: long
**range:** 0..18446744073709551615
.. attribute:: receive_errors
Received errors (if 0x4 set in flag)
**type**\: long
**range:** 0..18446744073709551615
.. attribute:: test_pattern
Test pattern
**type**\: :py:class:`EthernetBertPatternEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_drivers_media_eth_oper.EthernetBertPatternEnum>`
.. attribute:: transmit_count
Transmit count (if 0x2 set in flag)
**type**\: long
**range:** 0..18446744073709551615
"""
_prefix = 'drivers-media-eth-oper'
_revision = '2015-10-14'
def __init__(self):
self.parent = None
self.bert_state_enabled = None
self.data_availability = None
self.device_under_test = None
self.error_type = None
self.interface_device = None
self.receive_count = None
self.receive_errors = None
self.test_pattern = None
self.transmit_count = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-drivers-media-eth-oper:bert-status'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if not self.is_config():
return False
if self.bert_state_enabled is not None:
return True
if self.data_availability is not None:
return True
if self.device_under_test is not None:
return True
if self.error_type is not None:
return True
if self.interface_device is not None:
return True
if self.receive_count is not None:
return True
if self.receive_errors is not None:
return True
if self.test_pattern is not None:
return True
if self.transmit_count is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_drivers_media_eth_oper as meta
return meta._meta_table['EthernetInterface.Berts.Bert.BertStatus']['meta_info']
@property
def _common_path(self):
if self.interface_name is None:
raise YPYModelError('Key property interface_name is None')
return '/Cisco-IOS-XR-drivers-media-eth-oper:ethernet-interface/Cisco-IOS-XR-drivers-media-eth-oper:berts/Cisco-IOS-XR-drivers-media-eth-oper:bert[Cisco-IOS-XR-drivers-media-eth-oper:interface-name = ' + str(self.interface_name) + ']'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if not self.is_config():
return False
if self.interface_name is not None:
return True
if self.bert_status is not None and self.bert_status._has_data():
return True
if self.port_bert_interval is not None:
return True
if self.time_left is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_drivers_media_eth_oper as meta
return meta._meta_table['EthernetInterface.Berts.Bert']['meta_info']
@property
def _common_path(self):
return '/Cisco-IOS-XR-drivers-media-eth-oper:ethernet-interface/Cisco-IOS-XR-drivers-media-eth-oper:berts'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if not self.is_config():
return False
if self.bert is not None:
for child_ref in self.bert:
if child_ref._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_drivers_media_eth_oper as meta
return meta._meta_table['EthernetInterface.Berts']['meta_info']
@property
def _common_path(self):
return '/Cisco-IOS-XR-drivers-media-eth-oper:ethernet-interface'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if not self.is_config():
return False
if self.berts is not None and self.berts._has_data():
return True
if self.interfaces is not None and self.interfaces._has_data():
return True
if self.statistics is not None and self.statistics._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_drivers_media_eth_oper as meta
return meta._meta_table['EthernetInterface']['meta_info']
|
apache-2.0
| 5,103,785,533,784,643,000
| 29.011349
| 325
| 0.500112
| false
| 3.974147
| false
| false
| false
|
gurneyalex/odoo
|
addons/stock/wizard/stock_picking_return.py
|
4
|
10775
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo import api, fields, models, _
from odoo.exceptions import UserError
from odoo.tools.float_utils import float_round
class ReturnPickingLine(models.TransientModel):
_name = "stock.return.picking.line"
_rec_name = 'product_id'
_description = 'Return Picking Line'
product_id = fields.Many2one('product.product', string="Product", required=True, domain="[('id', '=', product_id)]")
quantity = fields.Float("Quantity", digits='Product Unit of Measure', required=True)
uom_id = fields.Many2one('uom.uom', string='Unit of Measure', related='move_id.product_uom', readonly=False)
wizard_id = fields.Many2one('stock.return.picking', string="Wizard")
move_id = fields.Many2one('stock.move', "Move")
class ReturnPicking(models.TransientModel):
_name = 'stock.return.picking'
_description = 'Return Picking'
@api.model
def default_get(self, fields):
if len(self.env.context.get('active_ids', list())) > 1:
raise UserError(_("You may only return one picking at a time."))
res = super(ReturnPicking, self).default_get(fields)
if self.env.context.get('active_id') and self.env.context.get('active_model') == 'stock.picking':
picking = self.env['stock.picking'].browse(self.env.context.get('active_id'))
if picking.exists():
res.update({'picking_id': picking.id})
return res
picking_id = fields.Many2one('stock.picking')
product_return_moves = fields.One2many('stock.return.picking.line', 'wizard_id', 'Moves')
move_dest_exists = fields.Boolean('Chained Move Exists', readonly=True)
original_location_id = fields.Many2one('stock.location')
parent_location_id = fields.Many2one('stock.location')
company_id = fields.Many2one(related='picking_id.company_id')
location_id = fields.Many2one(
'stock.location', 'Return Location',
domain="['|', ('id', '=', original_location_id), '|', '&', ('return_location', '=', True), ('company_id', '=', False), '&', ('return_location', '=', True), ('company_id', '=', company_id)]")
@api.onchange('picking_id')
def _onchange_picking_id(self):
move_dest_exists = False
product_return_moves = [(5,)]
if self.picking_id and self.picking_id.state != 'done':
raise UserError(_("You may only return Done pickings."))
# In case we want to set specific default values (e.g. 'to_refund'), we must fetch the
# default values for creation.
line_fields = [f for f in self.env['stock.return.picking.line']._fields.keys()]
product_return_moves_data_tmpl = self.env['stock.return.picking.line'].default_get(line_fields)
for move in self.picking_id.move_lines:
if move.state == 'cancel':
continue
if move.scrapped:
continue
if move.move_dest_ids:
move_dest_exists = True
product_return_moves_data = dict(product_return_moves_data_tmpl)
product_return_moves_data.update(self._prepare_stock_return_picking_line_vals_from_move(move))
product_return_moves.append((0, 0, product_return_moves_data))
if self.picking_id and not product_return_moves:
raise UserError(_("No products to return (only lines in Done state and not fully returned yet can be returned)."))
if self.picking_id:
self.product_return_moves = product_return_moves
self.move_dest_exists = move_dest_exists
self.parent_location_id = self.picking_id.picking_type_id.warehouse_id and self.picking_id.picking_type_id.warehouse_id.view_location_id.id or self.picking_id.location_id.location_id.id
self.original_location_id = self.picking_id.location_id.id
location_id = self.picking_id.location_id.id
if self.picking_id.picking_type_id.return_picking_type_id.default_location_dest_id.return_location:
location_id = self.picking_id.picking_type_id.return_picking_type_id.default_location_dest_id.id
self.location_id = location_id
@api.model
def _prepare_stock_return_picking_line_vals_from_move(self, stock_move):
quantity = stock_move.product_qty
for move in stock_move.move_dest_ids:
if move.origin_returned_move_id and move.origin_returned_move_id != stock_move:
continue
if move.state in ('partially_available', 'assigned'):
quantity -= sum(move.move_line_ids.mapped('product_qty'))
elif move.state in ('done'):
quantity -= move.product_qty
quantity = float_round(quantity, precision_rounding=stock_move.product_uom.rounding)
return {
'product_id': stock_move.product_id.id,
'quantity': quantity,
'move_id': stock_move.id,
'uom_id': stock_move.product_id.uom_id.id,
}
def _prepare_move_default_values(self, return_line, new_picking):
vals = {
'product_id': return_line.product_id.id,
'product_uom_qty': return_line.quantity,
'product_uom': return_line.product_id.uom_id.id,
'picking_id': new_picking.id,
'state': 'draft',
'date_expected': fields.Datetime.now(),
'location_id': return_line.move_id.location_dest_id.id,
'location_dest_id': self.location_id.id or return_line.move_id.location_id.id,
'picking_type_id': new_picking.picking_type_id.id,
'warehouse_id': self.picking_id.picking_type_id.warehouse_id.id,
'origin_returned_move_id': return_line.move_id.id,
'procure_method': 'make_to_stock',
}
return vals
def _create_returns(self):
# TODO sle: the unreserve of the next moves could be less brutal
for return_move in self.product_return_moves.mapped('move_id'):
return_move.move_dest_ids.filtered(lambda m: m.state not in ('done', 'cancel'))._do_unreserve()
# create new picking for returned products
picking_type_id = self.picking_id.picking_type_id.return_picking_type_id.id or self.picking_id.picking_type_id.id
new_picking = self.picking_id.copy({
'move_lines': [],
'picking_type_id': picking_type_id,
'state': 'draft',
'origin': _("Return of %s") % self.picking_id.name,
'location_id': self.picking_id.location_dest_id.id,
'location_dest_id': self.location_id.id})
new_picking.message_post_with_view('mail.message_origin_link',
values={'self': new_picking, 'origin': self.picking_id},
subtype_id=self.env.ref('mail.mt_note').id)
returned_lines = 0
for return_line in self.product_return_moves:
if not return_line.move_id:
raise UserError(_("You have manually created product lines, please delete them to proceed."))
# TODO sle: float_is_zero?
if return_line.quantity:
returned_lines += 1
vals = self._prepare_move_default_values(return_line, new_picking)
r = return_line.move_id.copy(vals)
vals = {}
# +--------------------------------------------------------------------------------------------------------+
# | picking_pick <--Move Orig-- picking_pack --Move Dest--> picking_ship
# | | returned_move_ids ↑ | returned_move_ids
# | ↓ | return_line.move_id ↓
# | return pick(Add as dest) return toLink return ship(Add as orig)
# +--------------------------------------------------------------------------------------------------------+
move_orig_to_link = return_line.move_id.move_dest_ids.mapped('returned_move_ids')
# link to original move
move_orig_to_link |= return_line.move_id
# link to siblings of original move, if any
move_orig_to_link |= return_line.move_id\
.mapped('move_dest_ids').filtered(lambda m: m.state not in ('cancel'))\
.mapped('move_orig_ids').filtered(lambda m: m.state not in ('cancel'))
move_dest_to_link = return_line.move_id.move_orig_ids.mapped('returned_move_ids')
# link to children of originally returned moves, if any. Note that the use of
# 'return_line.move_id.move_orig_ids.returned_move_ids.move_orig_ids.move_dest_ids'
# instead of 'return_line.move_id.move_orig_ids.move_dest_ids' prevents linking a
# return directly to the destination moves of its parents. However, the return of
# the return will be linked to the destination moves.
move_dest_to_link |= return_line.move_id.move_orig_ids.mapped('returned_move_ids')\
.mapped('move_orig_ids').filtered(lambda m: m.state not in ('cancel'))\
.mapped('move_dest_ids').filtered(lambda m: m.state not in ('cancel'))
vals['move_orig_ids'] = [(4, m.id) for m in move_orig_to_link]
vals['move_dest_ids'] = [(4, m.id) for m in move_dest_to_link]
r.write(vals)
if not returned_lines:
raise UserError(_("Please specify at least one non-zero quantity."))
new_picking.action_confirm()
new_picking.action_assign()
return new_picking.id, picking_type_id
def create_returns(self):
for wizard in self:
new_picking_id, pick_type_id = wizard._create_returns()
# Override the context to disable all the potential filters that could have been set previously
ctx = dict(self.env.context)
ctx.update({
'default_partner_id': self.picking_id.partner_id.id,
'search_default_picking_type_id': pick_type_id,
'search_default_draft': False,
'search_default_assigned': False,
'search_default_confirmed': False,
'search_default_ready': False,
'search_default_late': False,
'search_default_available': False,
})
return {
'name': _('Returned Picking'),
'view_mode': 'form,tree,calendar',
'res_model': 'stock.picking',
'res_id': new_picking_id,
'type': 'ir.actions.act_window',
'context': ctx,
}
|
agpl-3.0
| 6,509,121,793,401,225,000
| 54.225641
| 198
| 0.58297
| false
| 3.840585
| false
| false
| false
|
hrishioa/Aviato
|
flask/Scripts/gdal_polygonize.py
|
1
|
6499
|
#!C:\Users\SeanSaito\Dev\aviato\flask\Scripts\python.exe
# -*- coding: utf-8 -*-
#******************************************************************************
# $Id$
#
# Project: GDAL Python Interface
# Purpose: Application for converting raster data to a vector polygon layer.
# Author: Frank Warmerdam, warmerdam@pobox.com
#
#******************************************************************************
# Copyright (c) 2008, Frank Warmerdam
# Copyright (c) 2009-2013, Even Rouault <even dot rouault at mines-paris dot org>
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
# DEALINGS IN THE SOFTWARE.
#******************************************************************************
try:
from osgeo import gdal, ogr, osr
except ImportError:
import gdal, ogr, osr
import sys
import os.path
def Usage():
print("""
gdal_polygonize [-8] [-nomask] [-mask filename] raster_file [-b band]
[-q] [-f ogr_format] out_file [layer] [fieldname]
""")
sys.exit(1)
# =============================================================================
# Mainline
# =============================================================================
format = 'GML'
options = []
quiet_flag = 0
src_filename = None
src_band_n = 1
dst_filename = None
dst_layername = None
dst_fieldname = None
dst_field = -1
mask = 'default'
gdal.AllRegister()
argv = gdal.GeneralCmdLineProcessor( sys.argv )
if argv is None:
sys.exit( 0 )
# Parse command line arguments.
i = 1
while i < len(argv):
arg = argv[i]
if arg == '-f':
i = i + 1
format = argv[i]
elif arg == '-q' or arg == '-quiet':
quiet_flag = 1
elif arg == '-8':
options.append('8CONNECTED=8')
elif arg == '-nomask':
mask = 'none'
elif arg == '-mask':
i = i + 1
mask = argv[i]
elif arg == '-b':
i = i + 1
src_band_n = int(argv[i])
elif src_filename is None:
src_filename = argv[i]
elif dst_filename is None:
dst_filename = argv[i]
elif dst_layername is None:
dst_layername = argv[i]
elif dst_fieldname is None:
dst_fieldname = argv[i]
else:
Usage()
i = i + 1
if src_filename is None or dst_filename is None:
Usage()
if dst_layername is None:
dst_layername = 'out'
# =============================================================================
# Verify we have next gen bindings with the polygonize method.
# =============================================================================
try:
gdal.Polygonize
except:
print('')
print('gdal.Polygonize() not available. You are likely using "old gen"')
print('bindings or an older version of the next gen bindings.')
print('')
sys.exit(1)
# =============================================================================
# Open source file
# =============================================================================
src_ds = gdal.Open( src_filename )
if src_ds is None:
print('Unable to open %s' % src_filename)
sys.exit(1)
srcband = src_ds.GetRasterBand(src_band_n)
if mask is 'default':
maskband = srcband.GetMaskBand()
elif mask is 'none':
maskband = None
else:
mask_ds = gdal.Open( mask )
maskband = mask_ds.GetRasterBand(1)
# =============================================================================
# Try opening the destination file as an existing file.
# =============================================================================
try:
gdal.PushErrorHandler( 'CPLQuietErrorHandler' )
dst_ds = ogr.Open( dst_filename, update=1 )
gdal.PopErrorHandler()
except:
dst_ds = None
# =============================================================================
# Create output file.
# =============================================================================
if dst_ds is None:
drv = ogr.GetDriverByName(format)
if not quiet_flag:
print('Creating output %s of format %s.' % (dst_filename, format))
dst_ds = drv.CreateDataSource( dst_filename )
# =============================================================================
# Find or create destination layer.
# =============================================================================
try:
dst_layer = dst_ds.GetLayerByName(dst_layername)
except:
dst_layer = None
if dst_layer is None:
srs = None
if src_ds.GetProjectionRef() != '':
srs = osr.SpatialReference()
srs.ImportFromWkt( src_ds.GetProjectionRef() )
dst_layer = dst_ds.CreateLayer(dst_layername, srs = srs )
if dst_fieldname is None:
dst_fieldname = 'DN'
fd = ogr.FieldDefn( dst_fieldname, ogr.OFTInteger )
dst_layer.CreateField( fd )
dst_field = 0
else:
if dst_fieldname is not None:
dst_field = dst_layer.GetLayerDefn().GetFieldIndex(dst_fieldname)
if dst_field < 0:
print("Warning: cannot find field '%s' in layer '%s'" % (dst_fieldname, dst_layername))
# =============================================================================
# Invoke algorithm.
# =============================================================================
if quiet_flag:
prog_func = None
else:
prog_func = gdal.TermProgress
result = gdal.Polygonize( srcband, maskband, dst_layer, dst_field, options,
callback = prog_func )
srcband = None
src_ds = None
dst_ds = None
mask_ds = None
|
gpl-2.0
| -8,949,319,689,744,370,000
| 28.274775
| 99
| 0.511002
| false
| 4.163357
| false
| false
| false
|
3ffusi0on/Addicted-to-XDCC
|
Addict7ed.py
|
1
|
3211
|
#!/usr/bin/env python3.4
import sys
from PyQt4 import QtGui
import re
import os, sys
import subprocess
import urllib.request
import urllib.error
import hashlib
#TODO
#-input for the link of xdcc server
#-dl button ? or automatize the action
#- /!\ Configuration file /!\
def get_hash(name):
readsize = 64 * 1024
with open(name, 'rb') as f:
size = os.path.getsize(name)
data = f.read(readsize)
f.seek(-readsize, os.SEEK_END)
data += f.read(readsize)
return hashlib.md5(data).hexdigest()
class UI(QtGui.QWidget):
def __init__(self):
super(UI, self).__init__()
self.initUI()
def initUI(self):
self.setGeometry(20, 40, 300, 120)
self.setWindowTitle('Addict7ed-to-Xdcc')
self.link = QtGui.QLineEdit()
#TODO make it like a promt
self.link.setText("Xdcc link...")
#xdcc file download button
downloadMovieButton = QtGui.QPushButton('Get movie')
downloadMovieButton.resize(downloadMovieButton.sizeHint())
downloadMovieButton.clicked.connect(self.downloadXdccFile)
#pick file button
pickButton = QtGui.QPushButton('Open...')
pickButton.resize(pickButton.sizeHint())
pickButton.clicked.connect(self.selectFile)
#selected file
self.filename = QtGui.QLabel()
self.filename.setText("...")
#subtitle download button
downloadSubButton = QtGui.QPushButton('Get Subtitle')
downloadSubButton.resize(downloadSubButton.sizeHint())
downloadSubButton.clicked.connect(self.downloadSubtitle)
## Layouts
vbox = QtGui.QVBoxLayout()
vbox.addStretch(1)
vbox.addWidget(self.link)
vbox.addWidget(downloadMovieButton)
vbox.addWidget(pickButton)
vbox.addWidget(self.filename)
vbox.addWidget(downloadSubButton)
self.setLayout(vbox)
self.show()
def selectFile(self):
self.filename.setText(QtGui.QFileDialog.getOpenFileName())
print(self.filename.text())
def downloadXdccFile(self):
print("TODO")
def downloadSubtitle(self):
filename = self.filename.text()
track_hash = get_hash(filename)
headers = { 'User-Agent' : 'SubDB/1.0 (Addict7ed-to-Xdcc/1.0; http://github.com/3ffusi0on/Addict7ed-to-Xdcc)' }
url = "http://api.thesubdb.com/?action=download&hash=" + track_hash + "&language=en"
try:
request = urllib.request.Request(url, None, headers)
response = urllib.request.urlopen(request).read()
print(response)
#Saving the subtitle fileo
dest_file = filename.replace(filename[-3:], 'srt')
print("Saving subtitle as :" + dest_file)
subtitle_file = open(dest_file, 'wb')
subtitle_file.write(response)
subtitle_file.close()
except urllib.error.HTTPError as e:
#TODO check error (missing subtitle on server)
if e.code == 404:
print("404 Not Found: No subtitle available for the movie")
if __name__ == '__main__':
app = QtGui.QApplication(sys.argv)
ui = UI()
sys.exit(app.exec_())
|
gpl-2.0
| -566,545,068,900,936,960
| 29.875
| 119
| 0.624416
| false
| 3.827175
| false
| false
| false
|
fp12/achallonge
|
challonge/enums.py
|
1
|
1529
|
from enum import Enum
class TournamentState(Enum):
""" State a tournament can be in """
pending = 'pending'
open_ = 'open' #: can't use `open`
complete = 'complete'
in_progress = 'in progress'
class TournamentType(Enum):
""" Type of a tournament """
single_elimination = 'single elimination'
double_elimination = 'double elimination'
round_robin = 'round robin'
swiss = 'swiss'
class TournamentStateResult(Enum):
""" State given from the Challonge API.
Can be different from :class:`TournamentState`
"""
underway = 0
pending = 1
class DoubleEliminationEnding(Enum):
""" Type of ending for double elimination tournaments """
default = None #: give the winners bracket finalist two chances to beat the losers bracket finalist
single_match = 'single_match' #: create only one grand finals match
no_grand_finals = 'skip' #: don't create a finals match between winners and losers bracket finalists
class RankingOrder(Enum):
""" Order the ranking should be built upon """
match_wins = 'match wins'
game_wins = 'game wins'
points_scored = 'points scored'
points_difference = 'points difference'
custom = 'custom'
class Pairing(Enum):
""" Method of participant pairing when building matches """
seeds = 0
sequential = 1
class MatchState(Enum):
""" State a match can be in """
all_ = 'all' #: can't use `all`
open_ = 'open' #: can't use `open`
pending = 'pending'
complete = 'complete'
|
mit
| 7,507,871,851,981,742,000
| 26.8
| 105
| 0.657292
| false
| 3.675481
| false
| false
| false
|
BhallaLab/moose
|
moose-gui/plugins/kkitOrdinateUtil.py
|
1
|
19921
|
__author__ = "HarshaRani"
__credits__ = ["Upi Lab"]
__license__ = "GPL3"
__version__ = "1.0.0"
__maintainer__ = "HarshaRani"
__email__ = "hrani@ncbs.res.in"
__status__ = "Development"
__updated__ = "Oct 26 2018"
'''
2018
Oct 26: xfer molecules are not put into screen
Sep 28: to zoom the kkit co-ordinates a factor of w=1000 and h=800 is multipled here
2017
Oct 18: moved some function to kkitUtil
getxyCord, etc function are added
'''
import collections
from moose import *
import numpy as np
from moose import wildcardFind,element,PoolBase,CplxEnzBase,Annotator,exists
from networkx.drawing.nx_agraph import graphviz_layout
import numpy as np
import networkx as nx
from kkitUtil import getRandColor,colorCheck,findCompartment, findGroup, findGroup_compt, mooseIsInstance
from PyQt4.QtGui import QColor
import re
import moose._moose as moose
def getxyCord(xcord,ycord,list1):
for item in list1:
# if isinstance(item,Function):
# objInfo = element(item.parent).path+'/info'
# else:
# objInfo = item.path+'/info'
if not isinstance(item,Function):
objInfo = item.path+'/info'
xcord.append(xyPosition(objInfo,'x'))
ycord.append(xyPosition(objInfo,'y'))
def xyPosition(objInfo,xory):
try:
return(float(element(objInfo).getField(xory)))
except ValueError:
return (float(0))
'''
def mooseIsInstance(melement, classNames):
return element(melement).__class__.__name__ in classNames
def findCompartment(melement):
while not mooseIsInstance(melement, ["CubeMesh", "CyclMesh"]):
melement = melement.parent
return melement
def findGroup(melement):
while not mooseIsInstance(melement, ["Neutral"]):
melement = melement.parent
return melement
def findGroup_compt(melement):
while not (mooseIsInstance(melement, ["Neutral","CubeMesh", "CyclMesh"])):
melement = melement.parent
return melement
'''
def populateMeshEntry(meshEntry,parent,types,obj):
#print " parent ",parent, "types ",types, " obj ",obj
try:
value = meshEntry[element(parent.path)][types]
except KeyError:
# Key is not present
meshEntry[element(parent.path)].update({types :[element(obj)]})
else:
mlist = meshEntry[element(parent.path)][types]
mlist.append(element(obj))
def updateMeshObj(modelRoot):
print " updateMeshObj "
meshEntry = {}
if meshEntry:
meshEntry.clear()
else:
meshEntry = {}
objPar = collections.OrderedDict()
for compt in wildcardFind(modelRoot+'/##[ISA=ChemCompt]'):
groupColor = []
try:
value = meshEntry[element(compt)]
except KeyError:
# Compt is not present
meshEntry[element(compt)] = {}
objPar[element(compt)] = element('/')
for grp in wildcardFind(compt.path+'/##[TYPE=Neutral]'):
test = [x for x in wildcardFind(element(grp).path+'/#') if x.className in ["Pool","Reac","Enz"]]
grp_cmpt = findGroup_compt(grp.parent)
try:
value = meshEntry[element(grp)]
except KeyError:
# Grp is not present
meshEntry[element(grp)] = {}
objPar[element(grp)] = element(grp_cmpt)
for compt in wildcardFind(modelRoot+'/##[ISA=ChemCompt]'):
for m in wildcardFind(compt.path+'/##[ISA=PoolBase]'):
grp_cmpt = findGroup_compt(m)
if isinstance(element(grp_cmpt),Neutral):
if isinstance(element(m.parent),EnzBase):
populateMeshEntry(meshEntry,grp_cmpt,"cplx",m)
else:
populateMeshEntry(meshEntry,grp_cmpt,"pool",m)
else:
if isinstance(element(m.parent),EnzBase):
populateMeshEntry(meshEntry,compt,"cplx",m)
else:
populateMeshEntry(meshEntry,compt,"pool",m)
for r in wildcardFind(compt.path+'/##[ISA=ReacBase]'):
rgrp_cmpt = findGroup_compt(r)
if isinstance(element(rgrp_cmpt),Neutral):
populateMeshEntry(meshEntry,rgrp_cmpt,"reaction",r)
else:
populateMeshEntry(meshEntry,compt,"reaction",r)
for e in wildcardFind(compt.path+'/##[ISA=EnzBase]'):
egrp_cmpt = findGroup_compt(e)
if isinstance(element(egrp_cmpt),Neutral):
populateMeshEntry(meshEntry,egrp_cmpt,"enzyme",e)
else:
populateMeshEntry(meshEntry,compt,"enzyme",e)
for f in wildcardFind(compt.path+'/##[ISA=Function]'):
fgrp_cmpt = findGroup_compt(f)
if isinstance(element(fgrp_cmpt),Neutral):
populateMeshEntry(meshEntry,fgrp_cmpt,"function",f)
else:
populateMeshEntry(meshEntry,compt,"function",f)
for t in wildcardFind(compt.path+'/##[ISA=StimulusTable]'):
tgrp_cmpt = findGroup_compt(t)
if isinstance(element(tgrp_cmpt),Neutral):
populateMeshEntry(meshEntry,tgrp_cmpt,"stimTab",t)
else:
populateMeshEntry(meshEntry,compt,"stimTab",t)
return(objPar,meshEntry)
def setupMeshObj(modelRoot):
''' Setup compartment and its members pool,reaction,enz cplx under self.meshEntry dictionaries \
self.meshEntry with "key" as compartment,
value is key2:list where key2 represents moose object type,list of objects of a perticular type
e.g self.meshEntry[meshEnt] = { 'reaction': reaction_list,'enzyme':enzyme_list,'pool':poollist,'cplx': cplxlist }
'''
xmin = 0.0
xmax = 1.0
ymin = 0.0
ymax = 1.0
positionInfoExist = True
meshEntry = {}
if meshEntry:
meshEntry.clear()
else:
meshEntry = {}
xcord = []
ycord = []
n = 1
objPar = collections.OrderedDict()
for compt in wildcardFind(modelRoot+'/##[ISA=ChemCompt]'):
groupColor = []
try:
value = meshEntry[element(compt)]
except KeyError:
# Compt is not present
meshEntry[element(compt)] = {}
objPar[element(compt)] = element('/')
for grp in wildcardFind(compt.path+'/##[TYPE=Neutral]'):
test = [x for x in wildcardFind(element(grp).path+'/#') if x.className in ["Pool","Reac","Enz"]]
#if len(test) >1:
grpinfo = Annotator(element(grp).path+'/info')
validatecolor = colorCheck(grpinfo.color,"bg")
validatedgrpcolor = str(QColor(validatecolor).name())
groupColor.append(validatedgrpcolor)
grp_cmpt = findGroup_compt(grp.parent)
try:
value = meshEntry[element(grp)]
except KeyError:
# Grp is not present
meshEntry[element(grp)] = {}
objPar[element(grp)] = element(grp_cmpt)
# if n > 1:
# validatecolor = colorCheck(grpinfo.color,"bg")
# validatedgrpcolor = str(QColor(validatecolor).name())
# if validatedgrpcolor in groupColor:
# print " inside "
# c = getRandColor()
# print " c ",c, c.name()
# grpinfo.color = str(c.name())
# groupColor.append(str(c.name()))
# print " groupColor ",grpinfo,grpinfo.color, groupColor
# n =n +1
for compt in wildcardFind(modelRoot+'/##[ISA=ChemCompt]'):
for m in wildcardFind(compt.path+'/##[ISA=PoolBase]'):
if not re.search("xfer",m.name):
grp_cmpt = findGroup_compt(m)
xcord.append(xyPosition(m.path+'/info','x'))
ycord.append(xyPosition(m.path+'/info','y'))
if isinstance(element(grp_cmpt),Neutral):
if isinstance(element(m.parent),EnzBase):
populateMeshEntry(meshEntry,grp_cmpt,"cplx",m)
else:
populateMeshEntry(meshEntry,grp_cmpt,"pool",m)
else:
if isinstance(element(m.parent),EnzBase):
populateMeshEntry(meshEntry,compt,"cplx",m)
else:
populateMeshEntry(meshEntry,compt,"pool",m)
for r in wildcardFind(compt.path+'/##[ISA=ReacBase]'):
rgrp_cmpt = findGroup_compt(r)
xcord.append(xyPosition(r.path+'/info','x'))
ycord.append(xyPosition(r.path+'/info','y'))
if isinstance(element(rgrp_cmpt),Neutral):
populateMeshEntry(meshEntry,rgrp_cmpt,"reaction",r)
else:
populateMeshEntry(meshEntry,compt,"reaction",r)
for e in wildcardFind(compt.path+'/##[ISA=EnzBase]'):
egrp_cmpt = findGroup_compt(e)
xcord.append(xyPosition(e.path+'/info','x'))
ycord.append(xyPosition(e.path+'/info','y'))
if isinstance(element(egrp_cmpt),Neutral):
populateMeshEntry(meshEntry,egrp_cmpt,"enzyme",e)
else:
populateMeshEntry(meshEntry,compt,"enzyme",e)
for f in wildcardFind(compt.path+'/##[ISA=Function]'):
fgrp_cmpt = findGroup_compt(f)
if isinstance(element(fgrp_cmpt),Neutral):
populateMeshEntry(meshEntry,fgrp_cmpt,"function",f)
else:
populateMeshEntry(meshEntry,compt,"function",f)
for t in wildcardFind(compt.path+'/##[ISA=StimulusTable]'):
tgrp_cmpt = findGroup_compt(t)
xcord.append(xyPosition(t.path+'/info','x'))
ycord.append(xyPosition(t.path+'/info','y'))
if isinstance(element(tgrp_cmpt),Neutral):
populateMeshEntry(meshEntry,tgrp_cmpt,"stimTab",t)
else:
populateMeshEntry(meshEntry,compt,"stimTab",t)
xmin = min(xcord)
xmax = max(xcord)
ymin = min(ycord)
ymax = max(ycord)
positionInfoExist = not(len(np.nonzero(xcord)[0]) == 0 and len(np.nonzero(ycord)[0]) == 0)
return(objPar,meshEntry,xmin,xmax,ymin,ymax,positionInfoExist)
'''
def setupMeshObj(modelRoot):
# Setup compartment and its members pool,reaction,enz cplx under self.meshEntry dictionaries \
# self.meshEntry with "key" as compartment,
# value is key2:list where key2 represents moose object type,list of objects of a perticular type
# e.g self.meshEntry[meshEnt] = { 'reaction': reaction_list,'enzyme':enzyme_list,'pool':poollist,'cplx': cplxlist }
meshEntry = {}
if meshEntry:
meshEntry.clear()
else:
meshEntry = {}
xcord = []
ycord = []
meshEntryWildcard = '/##[ISA=ChemCompt]'
if modelRoot != '/':
meshEntryWildcard = modelRoot+meshEntryWildcard
for meshEnt in wildcardFind(meshEntryWildcard):
mollist = []
realist = []
enzlist = []
cplxlist = []
tablist = []
funclist = []
mol_cpl = wildcardFind(meshEnt.path+'/##[ISA=PoolBase]')
funclist = wildcardFind(meshEnt.path+'/##[ISA=Function]')
enzlist = wildcardFind(meshEnt.path+'/##[ISA=EnzBase]')
realist = wildcardFind(meshEnt.path+'/##[ISA=ReacBase]')
tablist = wildcardFind(meshEnt.path+'/##[ISA=StimulusTable]')
if mol_cpl or funclist or enzlist or realist or tablist:
for m in mol_cpl:
if isinstance(element(m.parent),CplxEnzBase):
cplxlist.append(m)
elif isinstance(element(m),moose.PoolBase):
mollist.append(m)
meshEntry[meshEnt] = {'enzyme':enzlist,
'reaction':realist,
'pool':mollist,
'cplx':cplxlist,
'table':tablist,
'function':funclist
}
for mert in [mollist,enzlist,realist,tablist]:
for merts in mert:
objInfo = merts.path+'/info'
if exists(objInfo):
xcord.append(element(objInfo).x)
ycord.append(element(objInfo).y)
return(meshEntry,xcord,ycord)
def sizeHint(self):
return QtCore.QSize(800,400)
'''
def setupItem(modelPath,cntDict):
# This function collects information of what is connected to what. \
# eg. substrate and product connectivity to reaction's and enzyme's \
# sumtotal connectivity to its pool are collected
#print " setupItem"
sublist = []
prdlist = []
zombieType = ['ReacBase','EnzBase','Function','StimulusTable']
for baseObj in zombieType:
path = '/##[ISA='+baseObj+']'
if modelPath != '/':
path = modelPath+path
if ( (baseObj == 'ReacBase') or (baseObj == 'EnzBase')):
for items in wildcardFind(path):
sublist = []
prdlist = []
uniqItem,countuniqItem = countitems(items,'subOut')
subNo = uniqItem
for sub in uniqItem:
sublist.append((element(sub),'s',countuniqItem[sub]))
uniqItem,countuniqItem = countitems(items,'prd')
prdNo = uniqItem
if (len(subNo) == 0 or len(prdNo) == 0):
print ("Substrate Product is empty ",path, " ",items)
for prd in uniqItem:
prdlist.append((element(prd),'p',countuniqItem[prd]))
if (baseObj == 'CplxEnzBase') :
uniqItem,countuniqItem = countitems(items,'toEnz')
for enzpar in uniqItem:
sublist.append((element(enzpar),'t',countuniqItem[enzpar]))
uniqItem,countuniqItem = countitems(items,'cplxDest')
for cplx in uniqItem:
prdlist.append((element(cplx),'cplx',countuniqItem[cplx]))
if (baseObj == 'EnzBase'):
uniqItem,countuniqItem = countitems(items,'enzDest')
for enzpar in uniqItem:
sublist.append((element(enzpar),'t',countuniqItem[enzpar]))
cntDict[items] = sublist,prdlist
elif baseObj == 'Function':
for items in wildcardFind(path):
sublist = []
prdlist = []
item = items.path+'/x[0]'
uniqItem,countuniqItem = countitems(item,'input')
for funcpar in uniqItem:
sublist.append((element(funcpar),'sts',countuniqItem[funcpar]))
uniqItem,countuniqItem = countitems(items,'valueOut')
for funcpar in uniqItem:
prdlist.append((element(funcpar),'stp',countuniqItem[funcpar]))
cntDict[items] = sublist,prdlist
else:
for tab in wildcardFind(path):
tablist = []
uniqItem,countuniqItem = countitems(tab,'output')
for tabconnect in uniqItem:
tablist.append((element(tabconnect),'tab',countuniqItem[tabconnect]))
cntDict[tab] = tablist
def countitems(mitems,objtype):
items = []
items = element(mitems).neighbors[objtype]
uniqItems = set(items)
#countuniqItemsauto = Counter(items)
countuniqItems = dict((i, items.count(i)) for i in items)
return(uniqItems,countuniqItems)
def recalculatecoordinatesforKkit(mObjlist,xcord,ycord):
positionInfoExist = not(len(np.nonzero(xcord)[0]) == 0 \
and len(np.nonzero(ycord)[0]) == 0)
if positionInfoExist:
#Here all the object has been taken now recalculate and reassign back x and y co-ordinates
xmin = min(xcord)
xmax = max(xcord)
ymin = min(ycord)
ymax = max(ycord)
for merts in mObjlist:
objInfo = merts.path+'/info'
if moose.exists(objInfo):
Ix = (xyPosition(objInfo,'x')-xmin)/(xmax-xmin)
Iy = (ymin-xyPosition(objInfo,'y'))/(ymax-ymin)
element(objInfo).x = Ix*1000
element(objInfo).y = Iy*800
def xyPosition(objInfo,xory):
try:
return(float(element(objInfo).getField(xory)))
except ValueError:
return (float(0))
def autoCoordinates(meshEntry,srcdesConnection):
G = nx.Graph()
for cmpt,memb in meshEntry.items():
if memb in ["enzyme"]:
for enzObj in find_index(memb,'enzyme'):
#G.add_node(enzObj.path)
G.add_node(enzObj.path,label='',shape='ellipse',color='',style='filled',fontname='Helvetica',fontsize=12,fontcolor='blue')
for cmpt,memb in meshEntry.items():
#if memb.has_key
if memb in ["pool","cplx","reaction"]:
for poolObj in find_index(memb,'pool'):
#G.add_node(poolObj.path)
G.add_node(poolObj.path,label = poolObj.name,shape = 'box',color = '',style = 'filled',fontname = 'Helvetica',fontsize = 9,fontcolor = 'blue')
for cplxObj in find_index(memb,'cplx'):
G.add_node(cplxObj.path)
G.add_node(cplxObj.path,label = cplxObj.name,shape = 'box',color = '',style = 'filled',fontname = 'Helvetica',fontsize = 12,fontcolor = 'blue')
#G.add_edge((cplxObj.parent).path,cplxObj.path)
for reaObj in find_index(memb,'reaction'):
#G.add_node(reaObj.path)
G.add_node(reaObj.path,label='',shape='circle',color='')
for inn,out in srcdesConnection.items():
if (inn.className =='ZombieReac'): arrowcolor = 'green'
elif(inn.className =='ZombieEnz'): arrowcolor = 'red'
else: arrowcolor = 'blue'
if isinstance(out,tuple):
if len(out[0])== 0:
print (inn.className + ':' +inn.name + " doesn't have input message")
else:
for items in (items for items in out[0] ):
G.add_edge(element(items[0]).path,inn.path)
if len(out[1]) == 0:
print (inn.className + ':' + inn.name + "doesn't have output mssg")
else:
for items in (items for items in out[1] ):
G.add_edge(inn.path,element(items[0]).path)
elif isinstance(out,list):
if len(out) == 0:
print ("Func pool doesn't have sumtotal")
else:
for items in (items for items in out ):
G.add_edge(element(items[0]).path,inn.path)
position = graphviz_layout(G)
xcord, ycord = [],[]
for item in position.items():
xy = item[1]
xroundoff = round(xy[0],0)
yroundoff = round(xy[1],0)
xcord.append(xroundoff)
ycord.append(yroundoff)
xmin = min(xcord)
xmax = max(xcord)
ymin = min(ycord)
ymax = max(ycord)
for item in position.items():
xy = item[1]
anno = Annotator(item[0]+'/info')
Ax = (xy[0]-xmin)/(xmax-xmin)
Ay = (xy[1]-ymin)/(ymax-ymin)
#anno.x = round(Ax,1)
#anno.y = round(Ay,1)
#not roundingoff to max and min the co-ordinates for bigger model would overlay the co-ordinates
anno.x = xy[0]
anno.y = xy[1]
def find_index(value, key):
""" Value.get(key) to avoid expection which would raise if empty value in dictionary for a given key """
if value.get(key) != None:
return value.get(key)
else:
raise ValueError('no dict with the key found')
|
gpl-3.0
| -3,179,577,821,720,425,000
| 39.821721
| 159
| 0.560765
| false
| 3.637874
| false
| false
| false
|
strongswan/strongTNC
|
apps/devices/paging.py
|
1
|
5001
|
# -*- coding: utf-8 -*-
from __future__ import print_function, division, absolute_import, unicode_literals
import math
from .models import Device, Product
from apps.core.models import Session
from apps.devices.models import Device
from apps.swid.models import Event
from apps.front.paging import ProducerFactory
# PAGING PRODUCER
device_producer_factory = ProducerFactory(Device, 'description__icontains')
product_producer_factory = ProducerFactory(Product, 'name__icontains')
def device_session_list_producer(from_idx, to_idx, filter_query, dynamic_params=None,
static_params=None):
device_id = dynamic_params['device_id']
session_list = Session.objects.filter(device=device_id)
return session_list[from_idx:to_idx]
def device_session_stat_producer(page_size, filter_query, dynamic_params=None,
static_params=None):
device_id = dynamic_params['device_id']
count = Session.objects.filter(device=device_id).count()
return math.ceil(count / page_size)
def device_event_list_producer(from_idx, to_idx, filter_query, dynamic_params=None,
static_params=None):
device_id = dynamic_params['device_id']
event_list = Event.objects.filter(device=device_id)
return event_list[from_idx:to_idx]
def device_event_stat_producer(page_size, filter_query, dynamic_params=None,
static_params=None):
device_id = dynamic_params['device_id']
count = Event.objects.filter(device=device_id).count()
return math.ceil(count / page_size)
def device_vulnerability_list_producer(from_idx, to_idx, filter_query, dynamic_params=None,
static_params=None):
device_id = dynamic_params['device_id']
device = Device.objects.get(pk=device_id)
vulnerabilities = device.get_vulnerabilities()
return vulnerabilities[from_idx:to_idx]
def device_vulnerability_stat_producer(page_size, filter_query, dynamic_params=None,
static_params=None):
device_id = dynamic_params['device_id']
device = Device.objects.get(pk=device_id)
count = device.get_vulnerabilities().count()
return math.ceil(count / page_size)
def product_device_list_producer(from_idx, to_idx, filter_query, dynamic_params=None,
static_params=None):
if not dynamic_params:
return []
product_id = dynamic_params['product_id']
return Device.objects.filter(product__id=product_id)[from_idx:to_idx]
def product_device_stat_producer(page_size, filter_query, dynamic_params=None,
static_params=None):
if not dynamic_params:
return []
product_id = dynamic_params['product_id']
count = Device.objects.filter(product__id=product_id).count()
return math.ceil(count / page_size)
# PAGING CONFIGS
device_list_paging = {
'template_name': 'front/paging/default_list',
'list_producer': device_producer_factory.list(),
'stat_producer': device_producer_factory.stat(),
'static_producer_args': None,
'var_name': 'object_list',
'url_name': 'devices:device_detail',
'page_size': 50,
}
product_list_paging = {
'template_name': 'front/paging/default_list',
'list_producer': product_producer_factory.list(),
'stat_producer': product_producer_factory.stat(),
'static_producer_args': None,
'var_name': 'object_list',
'url_name': 'devices:product_detail',
'page_size': 50,
}
product_devices_list_paging = {
'template_name': 'devices/paging/device_list',
'list_producer': product_device_list_producer,
'stat_producer': product_device_stat_producer,
'url_name': 'devices:device_detail',
'page_size': 10,
}
device_session_list_paging = {
'template_name': 'devices/paging/device_report_sessions',
'list_producer': device_session_list_producer,
'stat_producer': device_session_stat_producer,
'static_producer_args': None,
'var_name': 'sessions',
'url_name': 'devices:session_detail',
'page_size': 10,
}
device_event_list_paging = {
'template_name': 'devices/paging/device_report_events',
'list_producer': device_event_list_producer,
'stat_producer': device_event_stat_producer,
'static_producer_args': None,
'var_name': 'events',
'url_name': 'devices:event_detail',
'page_size': 10,
}
device_vulnerability_list_paging = {
'template_name': 'devices/paging/device_report_vulnerabilities',
'list_producer': device_vulnerability_list_producer,
'stat_producer': device_vulnerability_stat_producer,
'static_producer_args': None,
'var_name': 'vulnerabilities',
'url_name': None,
'page_size': 10,
}
|
agpl-3.0
| -8,092,494,106,131,860,000
| 34.978417
| 92
| 0.634473
| false
| 3.685335
| false
| false
| false
|
openstack/sahara
|
sahara/utils/files.py
|
1
|
1190
|
# Copyright (c) 2013 Mirantis Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from os import path
import pkg_resources as pkg
from sahara import version
def get_file_text(file_name, package='sahara'):
full_name = pkg.resource_filename(
package, file_name)
return open(full_name).read()
def get_file_binary(file_name):
full_name = pkg.resource_filename(
version.version_info.package, file_name)
return open(full_name, "rb").read()
def try_get_file_text(file_name, package='sahara'):
full_name = pkg.resource_filename(
package, file_name)
return (
open(full_name, "rb").read()
if path.isfile(full_name) else False)
|
apache-2.0
| 3,584,808,584,476,616,000
| 28.75
| 69
| 0.708403
| false
| 3.617021
| false
| false
| false
|
imclab/confer
|
server/auth.py
|
1
|
12729
|
import json, sys, re, hashlib, smtplib, base64, urllib, os
from django.http import *
from django.shortcuts import render_to_response
from django.views.decorators.csrf import csrf_exempt
from django.core.context_processors import csrf
from django.core.validators import email_re
from django.db.utils import IntegrityError
from django.utils.http import urlquote_plus
from multiprocessing import Pool
from utils import *
from models import *
p = os.path.abspath(os.path.dirname(__file__))
if(os.path.abspath(p+"/..") not in sys.path):
sys.path.append(os.path.abspath(p+"/.."))
'''
@author: Anant Bhardwaj
@date: Feb 12, 2012
'''
kLogIn = "SESSION_LOGIN"
kConf = "SESSION_CONF"
kName = "SESSION_NAME"
kFName = "SESSION_F_NAME"
kLName = "SESSION_L_NAME"
# for async calls
pool = Pool(processes=1)
'''
LOGIN/REGISTER/RESET
'''
def login_required (f):
def wrap (request, *args, **kwargs):
if kLogIn not in request.session.keys():
if(len(args)>0):
redirect_url = urlquote_plus("/%s/%s" %(args[0], f.__name__))
else:
redirect_url = "/"
return HttpResponseRedirect("/login?redirect_url=%s" %(redirect_url))
return f(request, *args, **kwargs)
wrap.__doc__ = f.__doc__
wrap.__name__ = f.__name__
return wrap
def login_form (request, redirect_url='/', errors=[]):
c = {'redirect_url':redirect_url, 'errors':errors, 'values':request.REQUEST}
c.update(csrf(request))
return render_to_response('login.html', c)
def register_form (request, redirect_url='/', errors=[]):
c = {'redirect_url':redirect_url, 'errors':errors, 'values':request.REQUEST}
c.update(csrf(request))
return render_to_response('register.html', c)
def login (request):
redirect_url = '/'
if('redirect_url' in request.GET.keys()):
redirect_url = urllib.unquote_plus(request.GET['redirect_url'])
if not redirect_url or redirect_url == '':
redirect_url = '/'
if request.method == "POST":
errors = []
login_email = ''
if('redirect_url' in request.POST.keys()):
redirect_url = urllib.unquote_plus(request.POST['redirect_url'])
try:
login_email = request.POST["login_email"].lower()
login_password = hashlib.sha1(request.POST["login_password"]).hexdigest()
user = User.objects.get(email=login_email, password=login_password)
clear_session(request)
request.session[kLogIn] = user.email
request.session[kName] = user.f_name
request.session[kFName] = user.f_name
request.session[kLName] = user.l_name
return HttpResponseRedirect(redirect_url)
except User.DoesNotExist:
try:
User.objects.get(email=login_email)
errors.append(
'Wrong password. Please try again.<br /><br />'
'<a class="blue bold" href="/forgot?email=%s">Click Here</a> '
'to reset your password.' %(urllib.quote_plus(login_email)))
except User.DoesNotExist:
errors.append(
'Could not find any account associated with email address: '
'<a href="mailto:%s">%s</a>.<br /><br /><a class="blue bold" '
'href="/register?redirect_url=%s&email=%s">Click Here</a> '
'to create an account.' %(login_email, login_email,
urllib.quote_plus(redirect_url), urllib.quote_plus(login_email)))
return login_form(
request, redirect_url = urllib.quote_plus(redirect_url),
errors = errors)
except:
errors.append('Login failed.')
return login_form(
request, redirect_url = urllib.quote_plus(redirect_url),
errors = errors)
else:
return login_form(request, urllib.quote_plus(redirect_url))
def register (request):
redirect_url = '/'
if('redirect_url' in request.GET.keys()):
redirect_url = urllib.unquote_plus(request.GET['redirect_url'])
if request.method == "POST":
errors = []
email = ''
try:
error = False
if('redirect_url' in request.POST.keys()):
redirect_url = urllib.unquote_plus(request.POST['redirect_url'])
email = request.POST["email"].lower()
password = request.POST["password"]
f_name = request.POST["f_name"]
l_name = request.POST["l_name"]
if(email_re.match(email.strip()) == None):
errors.append("Invalid Email.")
error = True
if(f_name.strip() == ""):
errors.append("Empty First Name.")
error = True
if(l_name.strip() == ""):
errors.append("Empty Last Name.")
error = True
if(password == ""):
errors.append("Empty Password.")
error = True
if(error):
return register_form(request, redirect_url = urllib.quote_plus(redirect_url), errors = errors)
hashed_password = hashlib.sha1(password).hexdigest()
user = User(email=email, password=hashed_password, f_name=f_name, l_name=l_name)
user.save()
clear_session(request)
request.session[kLogIn] = user.email
request.session[kName] = user.f_name
request.session[kFName] = user.f_name
request.session[kLName] = user.l_name
encrypted_email = encrypt_text(user.email)
subject = "Welcome to Confer"
msg_body = '''
Dear %s,
Thanks for registering to Confer.
Please click the link below to start using Confer:
http://confer.csail.mit.edu/verify/%s
''' % (user.f_name + ' ' + user.l_name, encrypted_email)
pool.apply_async(send_email, [user.email, subject, msg_body])
return HttpResponseRedirect(redirect_url)
except IntegrityError:
errors.append(
'Account already exists. Please <a class="blue bold" href="/login?login_email=%s">Log In</a>.'
% (urllib.quote_plus(email)))
return register_form(request, redirect_url = urllib.quote_plus(redirect_url), errors = errors)
except:
errors.append("Some error happened while trying to create an account. Please try again.")
return register_form(request, redirect_url = urllib.quote_plus(redirect_url), errors = errors)
else:
return register_form(request, redirect_url = urllib.quote_plus(redirect_url))
def clear_session (request):
request.session.flush()
if kLogIn in request.session.keys():
del request.session[kLogIn]
if kName in request.session.keys():
del request.session[kName]
if kFName in request.session.keys():
del request.session[kFName]
if kLName in request.session.keys():
del request.session[kLName]
def logout (request):
clear_session(request)
c = {
'msg_title': 'Thank you for using Confer!',
'msg_body': 'Your have been logged out.<br /><br /><ul><li><a class= "blue bold" href="/home">Click Here</a> to browse confer as guest.<br/><br /></li><li><a class= "blue bold" href="/login">Click Here</a> to log in again.</li></ul>'
}
c.update(csrf(request))
return render_to_response('confirmation.html', c)
def forgot (request):
if request.method == "POST":
errors = []
try:
user_email = request.POST["email"].lower()
User.objects.get(email=user_email)
encrypted_email = encrypt_text(user_email)
subject = "Confer Password Reset"
msg_body = '''
Dear %s,
Please click the link below to reset your confer password:
http://confer.csail.mit.edu/reset/%s
''' % (user_email, encrypted_email)
pool.apply_async(send_email, [user_email, subject, msg_body])
c = {
'msg_title': 'Confer Reset Password',
'msg_body': 'A link to reset your password has been sent to your email address.'
}
c.update(csrf(request))
return render_to_response('confirmation.html', c)
except User.DoesNotExist:
errors.append(
"Invalid Email Address.")
except:
errors.append(
'Some unknown error happened.'
'Please try again or send an email to '
'<a href="mailto:confer@csail.mit.edu">confer@csail.mit.edu</a>.')
c = {'errors': errors, 'values': request.POST}
c.update(csrf(request))
return render_to_response('forgot.html', c)
else:
c = {'values': request.REQUEST}
c.update(csrf(request))
return render_to_response('forgot.html', c)
def verify (request, encrypted_email):
errors = []
c = {'msg_title': 'Confer Account Verification'}
try:
user_email = decrypt_text(encrypted_email)
user = User.objects.get(email=user_email)
c.update({
'msg_body': 'Thanks for verifying your email address! <a class= "blue bold" href="/home">Click Here</a> to start using Confer.'
})
clear_session(request)
request.session[kLogIn] = user.email
request.session[kName] = user.f_name
request.session[kFName] = user.f_name
request.session[kLName] = user.l_name
except:
errors.append(
'Wrong verify code in the URL. '
'Please try again or send an email to '
'<a href="mailto:confer@csail.mit.edu">confer@csail.mit.edu</a>')
c.update({'errors': errors})
c.update(csrf(request))
return render_to_response('confirmation.html', c)
def reset (request, encrypted_email):
errors = []
error = False
if request.method == "POST":
try:
user_email = request.POST["user_email"].lower()
password = request.POST["new_password"]
password2 = request.POST["new_password2"]
if password == "":
errors.append("Empty Password.")
error = True
if password2 != password:
errors.append("Password and Confirm Password don't match.")
error = True
if error:
c = {
'user_email': user_email,
'encrypted_email': encrypted_email,
'errors': errors
}
c.update(csrf(request))
return render_to_response('reset.html', c)
else:
hashed_password = hashlib.sha1(password).hexdigest()
user = User.objects.get(email=user_email)
user.password = hashed_password
user.save()
c = {
'msg_title': 'Confer Reset Password',
'msg_body': 'Your password has been changed successfully.'
}
c.update(csrf(request))
return render_to_response('confirmation.html', c)
except:
errors.append(
'Some unknown error happened. '
'Please try again or send an email to '
'<a href="mailto:confer@csail.mit.edu">confer@csail.mit.edu</a>')
c = {'errors': errors}
c.update(csrf(request))
return render_to_response('reset.html', c)
else:
try:
user_email = decrypt_text(encrypted_email)
User.objects.get(email=user_email)
c = {
'user_email': user_email,
'encrypted_email': encrypted_email
}
c.update(csrf(request))
return render_to_response('reset.html', c)
except:
errors.append(
'Wrong reset code in the URL. '
'Please try again or send an email to '
'<a href="mailto:confer@csail.mit.edu">confer@csail.mit.edu</a>')
c = {'msg_title': 'Confer Reset Password', 'errors': errors}
c.update(csrf(request))
return render_to_response('confirmation.html', c)
@login_required
def settings (request):
errors = []
error = False
redirect_url = '/'
if('redirect_url' in request.GET.keys()):
redirect_url = request.GET['redirect_url']
if request.method == "POST":
try:
if('redirect_url' in request.POST.keys()):
redirect_url = request.POST['redirect_url']
user_email = request.POST["user_email"].lower()
meetups = request.POST["meetups_enabled"]
user = User.objects.get(email=user_email)
if meetups == 'enabled':
user.meetups_enabled = True
else:
user.meetups_enabled = False
user.save()
return HttpResponseRedirect(redirect_url)
except Exception, e:
errors.append(
'Some unknown error happened. '
'Please try again or send an email to '
'<a href="mailto:confer@csail.mit.edu">confer@csail.mit.edu</a>')
c = {'errors': errors}
c.update(csrf(request))
return render_to_response('settings.html', c)
else:
login = get_login(request)
user = User.objects.get(email=login[0])
meetups_enabled = user.meetups_enabled
c = {
'user_email': login[0],
'login_id': login[0],
'login_name': login[1],
'meetups_enabled': meetups_enabled,
'redirect_url': redirect_url}
c.update(csrf(request))
return render_to_response('settings.html', c)
def get_login(request):
login_id = None
login_name = ''
try:
login_id = request.session[kLogIn]
login_name = request.session[kName]
except:
pass
return [login_id, login_name]
|
mit
| -7,501,670,995,491,163,000
| 30.585608
| 237
| 0.623537
| false
| 3.582606
| false
| false
| false
|
RealTimeWeb/wikisite
|
MoinMoin/action/thread_monitor.py
|
1
|
1932
|
# -*- coding: iso-8859-1 -*-
"""
MoinMoin - Thread monitor action
Shows the current traceback of all threads.
@copyright: 2006 MoinMoin:AlexanderSchremmer
@license: GNU GPL, see COPYING for details.
"""
import os, time
from StringIO import StringIO
from MoinMoin import Page, wikiutil
from MoinMoin.util import thread_monitor
def execute_fs(pagename, request):
_ = request.getText
# check for superuser
if not request.user.isSuperUser():
request.theme.add_msg(_('You are not allowed to use this action.'), "error")
return Page.Page(request, pagename).send_page()
if thread_monitor.hook_enabled():
s = StringIO()
thread_monitor.trigger_dump(s)
time.sleep(5) # allow for all threads to dump to request
data = s.getvalue()
timestamp = time.time()
dump_fname = os.path.join(request.cfg.data_dir, "tm_%d.log" % timestamp)
f = file(dump_fname, "w")
f.write(data)
f.close()
else:
dump_fname = "nowhere"
request.write('<html><body>A dump has been saved to %s.</body></html>' % dump_fname)
def execute_wiki(pagename, request):
_ = request.getText
# be extra paranoid in dangerous actions
actname = __name__.split('.')[-1]
if not request.user.isSuperUser():
request.theme.add_msg(_('You are not allowed to use this action.'), "error")
return Page.Page(request, pagename).send_page()
request.theme.send_title("Thread monitor")
request.write('<pre>')
if not thread_monitor.hook_enabled():
request.write("Hook is not enabled.")
else:
s = StringIO()
thread_monitor.trigger_dump(s)
time.sleep(5) # allow for all threads to dump to request
request.write(wikiutil.escape(s.getvalue()))
request.write('</pre>')
request.theme.send_footer(pagename)
request.theme.send_closing_html()
execute = execute_fs
|
apache-2.0
| 5,275,388,841,327,920,000
| 30.16129
| 88
| 0.64234
| false
| 3.645283
| false
| false
| false
|
avrem/ardupilot
|
libraries/AP_Terrain/tools/create_terrain.py
|
1
|
11287
|
#!/usr/bin/env python
'''
create ardupilot terrain database files
'''
from MAVProxy.modules.mavproxy_map import srtm
import math, struct, os, sys
import crc16, time, struct
# MAVLink sends 4x4 grids
TERRAIN_GRID_MAVLINK_SIZE = 4
# a 2k grid_block on disk contains 8x7 of the mavlink grids. Each
# grid block overlaps by one with its neighbour. This ensures that
# the altitude at any point can be calculated from a single grid
# block
TERRAIN_GRID_BLOCK_MUL_X = 7
TERRAIN_GRID_BLOCK_MUL_Y = 8
# this is the spacing between 32x28 grid blocks, in grid_spacing units
TERRAIN_GRID_BLOCK_SPACING_X = ((TERRAIN_GRID_BLOCK_MUL_X-1)*TERRAIN_GRID_MAVLINK_SIZE)
TERRAIN_GRID_BLOCK_SPACING_Y = ((TERRAIN_GRID_BLOCK_MUL_Y-1)*TERRAIN_GRID_MAVLINK_SIZE)
# giving a total grid size of a disk grid_block of 32x28
TERRAIN_GRID_BLOCK_SIZE_X = (TERRAIN_GRID_MAVLINK_SIZE*TERRAIN_GRID_BLOCK_MUL_X)
TERRAIN_GRID_BLOCK_SIZE_Y = (TERRAIN_GRID_MAVLINK_SIZE*TERRAIN_GRID_BLOCK_MUL_Y)
# format of grid on disk
TERRAIN_GRID_FORMAT_VERSION = 1
IO_BLOCK_SIZE = 2048
GRID_SPACING = 100
def to_float32(f):
'''emulate single precision float'''
return struct.unpack('f', struct.pack('f',f))[0]
LOCATION_SCALING_FACTOR = to_float32(0.011131884502145034)
LOCATION_SCALING_FACTOR_INV = to_float32(89.83204953368922)
def longitude_scale(lat):
'''get longitude scale factor'''
scale = to_float32(math.cos(to_float32(math.radians(lat))))
return max(scale, 0.01)
def get_distance_NE_e7(lat1, lon1, lat2, lon2):
'''get distance tuple between two positions in 1e7 format'''
return ((lat2 - lat1) * LOCATION_SCALING_FACTOR, (lon2 - lon1) * LOCATION_SCALING_FACTOR * longitude_scale(lat1*1.0e-7))
def add_offset(lat_e7, lon_e7, ofs_north, ofs_east):
'''add offset in meters to a position'''
dlat = int(float(ofs_north) * LOCATION_SCALING_FACTOR_INV)
dlng = int((float(ofs_east) * LOCATION_SCALING_FACTOR_INV) / longitude_scale(lat_e7*1.0e-7))
return (int(lat_e7+dlat), int(lon_e7+dlng))
def east_blocks(lat_e7, lon_e7):
'''work out how many blocks per stride on disk'''
lat2_e7 = lat_e7
lon2_e7 = lon_e7 + 10*1000*1000
# shift another two blocks east to ensure room is available
lat2_e7, lon2_e7 = add_offset(lat2_e7, lon2_e7, 0, 2*GRID_SPACING*TERRAIN_GRID_BLOCK_SIZE_Y)
offset = get_distance_NE_e7(lat_e7, lon_e7, lat2_e7, lon2_e7)
return int(offset[1] / (GRID_SPACING*TERRAIN_GRID_BLOCK_SPACING_Y))
def pos_from_file_offset(lat_degrees, lon_degrees, file_offset):
'''return a lat/lon in 1e7 format given a file offset'''
ref_lat = int(lat_degrees*10*1000*1000)
ref_lon = int(lon_degrees*10*1000*1000)
stride = east_blocks(ref_lat, ref_lon)
blocks = file_offset // IO_BLOCK_SIZE
grid_idx_x = blocks // stride
grid_idx_y = blocks % stride
idx_x = grid_idx_x * TERRAIN_GRID_BLOCK_SPACING_X
idx_y = grid_idx_y * TERRAIN_GRID_BLOCK_SPACING_Y
offset = (idx_x * GRID_SPACING, idx_y * GRID_SPACING)
(lat_e7, lon_e7) = add_offset(ref_lat, ref_lon, offset[0], offset[1])
offset = get_distance_NE_e7(ref_lat, ref_lon, lat_e7, lon_e7)
grid_idx_x = int(idx_x / TERRAIN_GRID_BLOCK_SPACING_X)
grid_idx_y = int(idx_y / TERRAIN_GRID_BLOCK_SPACING_Y)
(lat_e7, lon_e7) = add_offset(ref_lat, ref_lon,
grid_idx_x * TERRAIN_GRID_BLOCK_SPACING_X * float(GRID_SPACING),
grid_idx_y * TERRAIN_GRID_BLOCK_SPACING_Y * float(GRID_SPACING))
return (lat_e7, lon_e7)
class GridBlock(object):
def __init__(self, lat_int, lon_int, lat, lon):
'''
a grid block is a structure in a local file containing height
information. Each grid block is 2048 bytes in size, to keep file IO to
block oriented SD cards efficient
'''
# crc of whole block, taken with crc=0
self.crc = 0
# format version number
self.version = TERRAIN_GRID_FORMAT_VERSION
# grid spacing in meters
self.spacing = GRID_SPACING
# heights in meters over a 32*28 grid
self.height = []
for x in range(TERRAIN_GRID_BLOCK_SIZE_X):
self.height.append([0]*TERRAIN_GRID_BLOCK_SIZE_Y)
# bitmap of 4x4 grids filled in from GCS (56 bits are used)
self.bitmap = (1<<56)-1
lat_e7 = int(lat * 1.0e7)
lon_e7 = int(lon * 1.0e7)
# grids start on integer degrees. This makes storing terrain data on
# the SD card a bit easier. Note that this relies on the python floor
# behaviour with integer division
self.lat_degrees = lat_int
self.lon_degrees = lon_int
# create reference position for this rounded degree position
ref_lat = self.lat_degrees*10*1000*1000
ref_lon = self.lon_degrees*10*1000*1000
# find offset from reference
offset = get_distance_NE_e7(ref_lat, ref_lon, lat_e7, lon_e7)
offset = (round(offset[0]), round(offset[1]))
# get indices in terms of grid_spacing elements
idx_x = int(offset[0] / GRID_SPACING)
idx_y = int(offset[1] / GRID_SPACING)
# find indexes into 32*28 grids for this degree reference. Note
# the use of TERRAIN_GRID_BLOCK_SPACING_{X,Y} which gives a one square
# overlap between grids
self.grid_idx_x = idx_x // TERRAIN_GRID_BLOCK_SPACING_X
self.grid_idx_y = idx_y // TERRAIN_GRID_BLOCK_SPACING_Y
# calculate lat/lon of SW corner of 32*28 grid_block
(ref_lat, ref_lon) = add_offset(ref_lat, ref_lon,
self.grid_idx_x * TERRAIN_GRID_BLOCK_SPACING_X * float(GRID_SPACING),
self.grid_idx_y * TERRAIN_GRID_BLOCK_SPACING_Y * float(GRID_SPACING))
self.lat = ref_lat
self.lon = ref_lon
def fill(self, gx, gy, altitude):
'''fill a square'''
self.height[gx][gy] = int(altitude)
def blocknum(self):
'''find IO block number'''
stride = east_blocks(self.lat_degrees*1e7, self.lon_degrees*1e7)
return stride * self.grid_idx_x + self.grid_idx_y
class DataFile(object):
def __init__(self, lat, lon):
if lat < 0:
NS = 'S'
else:
NS = 'N'
if lon < 0:
EW = 'W'
else:
EW = 'E'
name = "terrain/%c%02u%c%03u.DAT" % (NS, min(abs(int(lat)), 99),
EW, min(abs(int(lon)), 999))
try:
os.mkdir("terrain")
except Exception:
pass
if not os.path.exists(name):
self.fh = open(name, 'w+b')
else:
self.fh = open(name, 'r+b')
def seek_offset(self, block):
'''seek to right offset'''
# work out how many longitude blocks there are at this latitude
file_offset = block.blocknum() * IO_BLOCK_SIZE
self.fh.seek(file_offset)
def pack(self, block):
'''pack into a block'''
buf = bytes()
buf += struct.pack("<QiiHHH", block.bitmap, block.lat, block.lon, block.crc, block.version, block.spacing)
for gx in range(TERRAIN_GRID_BLOCK_SIZE_X):
buf += struct.pack("<%uh" % TERRAIN_GRID_BLOCK_SIZE_Y, *block.height[gx])
buf += struct.pack("<HHhb", block.grid_idx_x, block.grid_idx_y, block.lon_degrees, block.lat_degrees)
return buf
def write(self, block):
'''write a grid block'''
self.seek_offset(block)
block.crc = 0
buf = self.pack(block)
block.crc = crc16.crc16xmodem(buf)
buf = self.pack(block)
self.fh.write(buf)
def check_filled(self, block):
'''read a grid block and check if already filled'''
self.seek_offset(block)
buf = self.fh.read(IO_BLOCK_SIZE)
if len(buf) != IO_BLOCK_SIZE:
return False
(bitmap, lat, lon, crc, version, spacing) = struct.unpack("<QiiHHH", buf[:22])
if (version != TERRAIN_GRID_FORMAT_VERSION or
abs(lat - block.lat)>2 or
abs(lon - block.lon)>2 or
spacing != GRID_SPACING or
bitmap != (1<<56)-1):
return False
buf = buf[:16] + struct.pack("<H", 0) + buf[18:]
crc2 = crc16.crc16xmodem(buf[:1821])
if crc2 != crc:
return False
return True
def create_degree(lat, lon):
'''create data file for one degree lat/lon'''
lat_int = int(math.floor(lat))
lon_int = int(math.floor((lon)))
tiles = {}
dfile = DataFile(lat_int, lon_int)
print("Creating for %d %d" % (lat_int, lon_int))
total_blocks = east_blocks(lat_int*1e7, lon_int*1e7) * 47
for blocknum in range(total_blocks):
(lat_e7, lon_e7) = pos_from_file_offset(lat_int, lon_int, blocknum * IO_BLOCK_SIZE)
lat = lat_e7 * 1.0e-7
lon = lon_e7 * 1.0e-7
grid = GridBlock(lat_int, lon_int, lat, lon)
if grid.blocknum() != blocknum:
continue
if not args.force and dfile.check_filled(grid):
continue
for gx in range(TERRAIN_GRID_BLOCK_SIZE_X):
for gy in range(TERRAIN_GRID_BLOCK_SIZE_Y):
lat_e7, lon_e7 = add_offset(lat*1.0e7, lon*1.0e7, gx*GRID_SPACING, gy*GRID_SPACING)
lat2_int = int(math.floor(lat_e7*1.0e-7))
lon2_int = int(math.floor(lon_e7*1.0e-7))
tile_idx = (lat2_int, lon2_int)
while not tile_idx in tiles:
tile = downloader.getTile(lat2_int, lon2_int)
waited = False
if tile == 0:
print("waiting on download of %d,%d" % (lat2_int, lon2_int))
time.sleep(0.3)
waited = True
continue
if waited:
print("downloaded %d,%d" % (lat2_int, lon2_int))
tiles[tile_idx] = tile
altitude = tiles[tile_idx].getAltitudeFromLatLon(lat_e7*1.0e-7, lon_e7*1.0e-7)
grid.fill(gx, gy, altitude)
dfile.write(grid)
from argparse import ArgumentParser
parser = ArgumentParser(description='terrain data creator')
parser.add_argument("lat", type=float, default=-35.363261)
parser.add_argument("lon", type=float, default=149.165230)
parser.add_argument("--force", action='store_true', help="overwrite existing full blocks")
parser.add_argument("--radius", type=int, default=100, help="radius in km")
parser.add_argument("--debug", action='store_true', default=False)
parser.add_argument("--spacing", type=int, default=100, help="grid spacing in meters")
args = parser.parse_args()
downloader = srtm.SRTMDownloader(debug=args.debug)
downloader.loadFileList()
GRID_SPACING = args.spacing
done = set()
for dx in range(-args.radius, args.radius):
for dy in range(-args.radius, args.radius):
(lat2,lon2) = add_offset(args.lat*1e7, args.lon*1e7, dx*1000.0, dy*1000.0)
lat_int = int(round(lat2 * 1.0e-7))
lon_int = int(round(lon2 * 1.0e-7))
tag = (lat_int, lon_int)
if tag in done:
continue
done.add(tag)
create_degree(lat_int, lon_int)
create_degree(args.lat, args.lon)
|
gpl-3.0
| 3,621,082,822,801,622,500
| 36.749164
| 124
| 0.602552
| false
| 3.094872
| false
| false
| false
|
sxslex/rows
|
rows/utils.py
|
1
|
4354
|
# coding: utf-8
# Copyright 2014-2015 Álvaro Justen <https://github.com/turicas/rows/>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from __future__ import unicode_literals
import os
import tempfile
from collections import Iterator
from unicodedata import normalize
import requests
import rows
# TODO: create functions to serialize/deserialize data
SLUG_CHARS = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789_'
def slug(text, encoding=None, separator='_', permitted_chars=SLUG_CHARS,
replace_with_separator=' -_'):
if isinstance(text, str):
text = text.decode(encoding or 'ascii')
clean_text = text.strip()
for char in replace_with_separator:
clean_text = clean_text.replace(char, separator)
double_separator = separator + separator
while double_separator in clean_text:
clean_text = clean_text.replace(double_separator, separator)
ascii_text = normalize('NFKD', clean_text).encode('ascii', 'ignore')
strict_text = [x for x in ascii_text if x in permitted_chars]
text = ''.join(strict_text).lower()
if text.startswith(separator):
text = text[len(separator):]
if text.endswith(separator):
text = text[:-len(separator)]
return text
def ipartition(iterable, partition_size):
if not isinstance(iterable, Iterator):
iterator = iter(iterable)
else:
iterator = iterable
finished = False
while not finished:
data = []
for _ in range(partition_size):
try:
data.append(iterator.next())
except StopIteration:
finished = True
break
yield data
def download_file(uri):
response = requests.get(uri)
content = response.content
# TODO: try to guess with uri.split('/')[-1].split('.')[-1].lower()
try:
content_type = response.headers['content-type']
plugin_name = content_type.split('/')[-1]
except (KeyError, IndexError):
try:
plugin_name = uri.split('/')[-1].split('.')[-1].lower()
except IndexError:
raise RuntimeError('Could not identify file type.')
tmp = tempfile.NamedTemporaryFile()
filename = '{}.{}'.format(tmp.name, plugin_name)
tmp.close()
with open(filename, 'wb') as fobj:
fobj.write(content)
return filename
def get_uri_information(uri):
if uri.startswith('http://') or uri.startswith('https://'):
should_delete = True
filename = download_file(uri)
else:
should_delete = False
filename = uri
plugin_name = filename.split('.')[-1].lower()
if plugin_name == 'htm':
plugin_name = 'html'
elif plugin_name == 'text':
plugin_name = 'txt'
elif plugin_name == 'json':
plugin_name = 'pjson'
return should_delete, filename, plugin_name
def import_from_uri(uri, *args, **kwargs):
# TODO: support '-' also
should_delete, filename, plugin_name = get_uri_information(uri)
try:
import_function = getattr(rows, 'import_from_{}'.format(plugin_name))
except AttributeError:
raise ValueError('Plugin (import) "{}" not found'.format(plugin_name))
with open(filename) as fobj:
table = import_function(fobj, *args, **kwargs)
if should_delete:
os.unlink(filename)
return table
def export_to_uri(uri, table, *args, **kwargs):
# TODO: support '-' also
plugin_name = uri.split('.')[-1].lower()
try:
export_function = getattr(rows, 'export_to_{}'.format(plugin_name))
except AttributeError:
raise ValueError('Plugin (export) "{}" not found'.format(plugin_name))
export_function(table, uri, *args, **kwargs)
|
gpl-3.0
| -4,946,662,462,204,558,000
| 29.229167
| 78
| 0.643924
| false
| 4.049302
| false
| false
| false
|
goosechooser/cps2-zmq
|
cps2zmq/gather/Broker.py
|
1
|
10126
|
# pylint: disable=E1101
"""
Contains Broker, WorkerRepresentative, and ServiceQueue classes.
"""
import sys
import logging
import zmq
from zmq.eventloop.zmqstream import ZMQStream
from zmq.eventloop.ioloop import IOLoop, PeriodicCallback
from cps2zmq.gather import mdp, log
HB_INTERVAL = 1000
HB_LIVENESS = 3
class Broker(object):
"""
MameServer receives messages sent by an instance of MAME, and passes it to workers \
for processing.
Attributes:
context (:obj:`zmq.Context`): required by ZMQ to make the magic happen.
port (str): the port the serversub socket binds to.
serversub (:obj:`zmq.Context.socket`): A zmq socket set to SUB.\
MameClients connect and send messages here.
toworkers (str): the address to push work out on
backend (:obj:`zmq.Context.socket`): A zmq socket set to ROUTER. \
Routes work to the worker that requested it.
backstream (:obj:`zmq.eventloop.zmqstream.ZMQStream`): Used for registering callbacks \
with the backend socket.
msgs_recv (int): Total number of messages received.
workers (list of threads): Pool to keep track of workers.
"""
WPROTOCOL = b'MDPW01'
msgs_recv = 0
def __init__(self, front_addr, toworkers, log_to_file=False):
loop = IOLoop.instance()
context = zmq.Context.instance()
self.front_addr = front_addr
front = context.socket(zmq.ROUTER)
front.setsockopt(zmq.LINGER, 0)
back = context.socket(zmq.ROUTER)
back.setsockopt(zmq.LINGER, 0)
self.frontstream = ZMQStream(front, loop)
self.frontstream.on_recv(self.handle_frontend)
self.frontstream.bind(front_addr)
self.backstream = ZMQStream(back, loop)
self.backstream.on_recv(self.handle_backend)
self.backstream.bind(toworkers)
self._logger = None
self.workers = {}
self.services = {}
self.heartbeater = None
self.setup_logging(log_to_file)
def setup(self):
"""
Sets up the heartbeater callback.
"""
self.heartbeater = PeriodicCallback(self.beat, HB_INTERVAL)
self.heartbeater.start()
def setup_logging(self, log_to_file):
name = self.__class__.__name__
self._logger = log.configure(name, fhandler=log_to_file)
def shutdown(self):
"""
Closes all associated zmq sockets and streams.
"""
self._logger.info('Closing\n')
if self.frontstream:
self.frontstream.socket.close()
self.frontstream.close()
self.frontstream = None
if self.backstream:
self.backstream.socket.close()
self.backstream.close()
self.backstream = None
if self.heartbeater:
self.heartbeater.stop()
self.heartbeater = None
self.workers = {}
self.services = {}
def start(self):
"""
Start the server
"""
self._logger.info('Starting at address %s', self.front_addr)
self.setup()
IOLoop.instance().start()
def report(self):
self._logger.info('Received %s messages', self.msgs_recv)
def beat(self):
"""
Checks for dead workers and removes them.
"""
for w in list(self.workers.values()):
if not w.is_alive():
self.unregister_worker(w.idn)
def register_worker(self, idn, service):
"""
Registers any worker who sends a READY message.
Allows the broker to keep track of heartbeats.
Args:
idn (bytes): the id of the worker.
service (byte-string): the service the work does work for.
"""
self._logger.info('Registering worker %s', idn)
if idn not in self.workers:
self.workers[idn] = WorkerRepresentative(self.WPROTOCOL, idn, service, self.backstream)
if service in self.services:
wq, wr = self.services[service]
wq.put(idn)
else:
self._logger.info('Adding %s to services', service)
q = ServiceQueue()
q.put(idn)
self.services[service] = (q, [])
def unregister_worker(self, idn):
"""
Unregisters a worker from the server.
Args:
idn (bytes): the id of the worker
"""
self._logger.info('Unregistering worker %s', idn)
self.workers[idn].shutdown()
service = self.workers[idn].service
if service in self.services:
wq, wr = self.services[service]
wq.remove(idn)
del self.workers[idn]
def disconnect_worker(self, idn, socket):
"""
Tells worker to disconnect from the server, then unregisters the worker.
Args:
idn (bytes): id of the worker
socket (zmq.socket): which socket to send the message out from
"""
try:
socket.send_multipart([idn, b'', self.WPROTOCOL, mdp.DISCONNECT])
except TypeError as err:
self._logger.error('Encountered error', exc_info=True)
self._logger.info('Disconnecting worker %s', idn)
self.unregister_worker(idn)
def handle_frontend(self, msg):
"""
Callback. Handles messages received from clients.
"""
client_addr = msg.pop(0)
empty = msg.pop(0)
protocol = msg.pop(0)
service = msg.pop(0)
service = service.decode('utf-8')
request = msg[0]
if service == 'disconnect':
# Need to determine how many packets are lost doing this.
self._logger.info('Received disconnect command. Server disconnecting workers')
for w in list(self.workers):
self.disconnect_worker(w, self.backstream.socket)
IOLoop.instance().stop()
else:
self.msgs_recv += 1
try:
wq, wr = self.services[service]
idn = wq.get()
if idn:
self.send_request(self.backstream, idn, client_addr, request)
else:
wr.append(request)
except KeyError:
self._logger.error('Encountered error with service %s', service, exc_info=True)
def handle_backend(self, msg):
"""
Callback. Handles messages received from workers.
"""
worker_idn = msg.pop(0)
empty = msg.pop(0)
protocol = msg.pop(0)
command = msg.pop(0)
if command == mdp.READY:
self.register_worker(worker_idn, msg.pop().decode('utf-8'))
elif command == mdp.REPLY:
client_addr, _, message = msg
service = self.workers[worker_idn].service
try:
wq, wr = self.services[service]
# send it wherever
wq.put(worker_idn)
if wr:
msg = wr.pop(0)
self.send_request(self.backstream, worker_idn, client_addr, msg)
except KeyError as err:
self._logger.error('Encountered error with service %s', service, exc_info=True)
elif command == mdp.HEARTBEAT:
worker = self.workers[worker_idn]
if worker.is_alive():
worker.recv_heartbeat()
elif command == mdp.DISCONNECT:
self.unregister_worker(worker_idn)
else:
self.disconnect_worker(worker_idn, self.backstream)
def send_request(self, socket, idn, client_addr, msg):
"""
Helper function. Formats and sends a request.
Args:
socket (zmq.socket): socket to send message out from
idn (bytes): id of worker to label message with
client_addr (bytes): addr of client requesting the work
msg (list): the message to be processed
"""
request_msg = [idn, b'', self.WPROTOCOL, mdp.REQUEST, client_addr, b'', msg]
socket.send_multipart(request_msg)
class WorkerRepresentative(object):
"""
Represents a worker connected to the server.
Handles heartbeats between the server and a specific worker.
"""
def __init__(self, protocol, idn, service, stream):
self.protocol = protocol
self.idn = idn
self.service = service
self.current_liveness = HB_LIVENESS
self.stream = stream
self.last_heartbeat = 0
self.heartbeater = PeriodicCallback(self.heartbeat, HB_INTERVAL)
self.heartbeater.start()
def heartbeat(self):
"""
Callback. Periodically sends a heartbeat message to associated worker.
"""
self.current_liveness -= 1
self.stream.send_multipart([self.idn, b'', self.protocol, mdp.HEARTBEAT])
def recv_heartbeat(self):
"""
Refreshes current_liveness when a heartbeat message is received from associated worker.
"""
self.current_liveness = HB_LIVENESS
def is_alive(self):
"""
Helper function.
Returns:
False if current_liveness is under 0, True otherwise
"""
return self.current_liveness > 0
def shutdown(self):
"""
Cleans up!
"""
self.heartbeater.stop()
self.heartbeater = None
self.stream = None
class ServiceQueue(object):
"""
Its a queue.
"""
def __init__(self):
self.q = []
def __contains__(self, idn):
return idn in self.queue
def __len__(self):
return len(self.q)
def remove(self, idn):
"""
Removes from the queue.
"""
try:
self.q.remove(idn)
except ValueError:
pass
def put(self, idn):
"""
Put something in the queue.
"""
if idn not in self.q:
self.q.append(idn)
def get(self):
"""
Get something from the queue.
"""
if not self.q:
return None
return self.q.pop(0)
|
mit
| 1,896,932,177,797,409,500
| 29.408408
| 99
| 0.569326
| false
| 4.045545
| false
| false
| false
|
NicolasKiely/Ackermann
|
Ackermann.py
|
1
|
3454
|
''' Evaluates Ackermann function
Adopted from here: http://www.eprg.org/computerphile/recursion.htm
Usage:
python Ackermann.py <brute|cache> <m> <n>
Where
<brute|cache> specifies whether to enable the cache
<m> is the first parameter of the Ackermann function
<n> is the second parameter of the Ackermann function
'''
import sys
class Ackermann(object):
''' Wrapper class for the ackerman function '''
def __init__(self, use_cache):
''' Initialize, setup cache if use_cache==True '''
# Number of function calls
self.call_count = 0
self.use_cache = use_cache
if use_cache:
# Cache of evaluated (m,n) => f(m,n) pairs
self.cache = {}
def evaluate(self, m, n):
''' Evaluates ackermann function recursively '''
# Increment call count
self.call_count += 1
if self.use_cache:
# Check cache
if (m, n) in self.cache:
return self.cache[(m, n)]
if m == 0:
results = n + 1
elif n == 0:
results = self.evaluate(m-1, 1)
else:
results = self.evaluate(m-1, self.evaluate(m, n-1))
if self.use_cache:
# Save to cache
self.cache[(m, n)] = results
return results
def print_usage():
print 'Program Usage:'
print '\tpython %s <brute|cache> <m> <n>' % sys.argv[0]
print 'Where:'
print '\t<brute|cache> specifies whether to enable the cache'
print '\t<m> is the first parameter of the Ackermann function'
print '\t<n> is the second parameter of the Ackermann function'
# Acceptable arguments for setting cache
acceptable_nocache_args = ('brute', 'no', 'n')
acceptable_yescache_args = ('cache', 'yes', 'y')
# Message shown when bad ackermann argument passed
bad_number_msg = 'Error, expected positive integer %s argument, got "%s"'
# main()
if __name__ == '__main__':
# Check number of arguments
if len(sys.argv) != 4:
print_usage()
exit()
# Check cache argument
par_cache = sys.argv[1].lower()
if par_cache in acceptable_nocache_args:
use_cache = False
elif par_cache in acceptable_yescache_args:
use_cache = True
else:
# Could not parse first argument
print 'Error, could not understand cache arg %s'
print 'To use the cache, valid strings are: '
print '\t' + ', '.join(acceptable_yescache_args)
print 'To not use the cache, valid strings are: '
print '\t' + ', '.join(acceptable_nocache_args)
print
print_usage()
exit()
# Check m and arguments
ack_pars = [0, 0]
for i, name in enumerate(('<m>', '<n>')):
try:
# Cast parameter to integer
par = sys.argv[2+i]
ack_pars[i] = int(par)
# Make sure parameter is positive
if ack_pars[i] < 0:
raise ValueError
except ValueError:
# Handle casting error
print bad_number_msg % (name, par)
print
print_usage()
exit()
# Argument parsing done, now setup ackermann function and evaluate
ack = Ackermann(use_cache)
results = ack.evaluate(*ack_pars)
# Show results
print 'Ackermann(%d, %d) is: %d' % (ack_pars[0], ack_pars[1], results)
print 'Number of calls: %d' % ack.call_count
|
mit
| 879,025,367,012,459,400
| 27.081301
| 74
| 0.571801
| false
| 3.742145
| false
| false
| false
|
alberthdev/nclayer
|
nc_diag_attr/nc_diag_attr.py
|
1
|
9821
|
# nc_diag_attr
from netCDF4 import Dataset, getlibversion
import netCDF4
import argparse
import sys
import traceback
import numpy
try:
import ujson as json
except:
import json
# Version information
__version__ = "0.9b"
VERSION_STR = 'nc_diag_attr v' + __version__ + "\n\n" + \
"Using the following library/runtime versions:\n" + \
(" netcdf4-python v%s\n" % netCDF4.__version__) + \
(" NetCDF v%s\n" % getlibversion()) + \
(" HDF5 v%s\n" % netCDF4.__hdf5libversion__) + \
(" Python v%s\n" % sys.version.split("\n")[0].strip())
# CLI Arguments
global args
def parse_cli_args():
global args
parser = argparse.ArgumentParser( #prog='ipush',
formatter_class=argparse.RawDescriptionHelpFormatter,
description="Tool to add/modify global and variable attributes for NetCDF files",
version = VERSION_STR)
disable_group = parser.add_mutually_exclusive_group()
parser.add_argument("-V", "--verbose",
dest="verbose", action="store_true", default=False,
help = "enable verbose output")
parser.add_argument("-p", "--pretty",
dest="pretty_output", action="store_true", default=False,
help = "enable colorful, pretty output - don't enable if logging")
disable_group.add_argument("-ng", "--no-global",
dest="global_attributes", action="store_false", default=True,
help = "disable global attribute adding/modifying")
disable_group.add_argument("-nv", "--no-var",
dest="var_attributes", action="store_false", default=True,
help = "disable variable attribute adding/modifying")
parser.add_argument("-rc", metavar = "RESOURCE_FILE", dest="resource_file",
help = "input JSON resource file name with attributes to write", required = True)
parser.add_argument("nc4_files", help = "NetCDF4 files to apply attributes to", nargs="+")
args = parser.parse_args()
def error_msg(msg):
global args
if args.pretty_output:
print("\033[31m ** ERROR: %s\033[0m" % msg)
else:
print(" ** ERROR: %s" % msg)
def warning_msg(msg):
global args
if args.verbose:
if args.pretty_output:
print("\033[33m ** WARNING: %s\033[0m" % msg)
else:
print(" ** WARNING: %s" % msg)
def info_msg(msg):
global args
if args.verbose:
if args.pretty_output:
print("\033[34m ** INFO: %s\033[0m" % msg)
else:
print(" ** INFO: %s" % msg)
global current_line
current_line = ""
# ANSI line updater - if enabled!
def line_msg(msg):
global args, current_line
if args.pretty_output:
# Move cursor to beginning:
sys.stdout.write("\r")
# Erase the current line
sys.stdout.write(len(current_line) * " ")
# Backspace back to the beginning (we could use \r here...)
sys.stdout.write(len(current_line) * "\b")
# Print new message
sys.stdout.write(msg)
# Go back to beginning
sys.stdout.write(len(msg) * "\b")
# Flush output - if not flushed, output may not show up
sys.stdout.flush()
# Set new current line
current_line = msg
else:
print(msg)
def line_msg_done():
global args, current_line
if args.verbose and args.pretty_output:
# Move down from current line and erase current line buffer
sys.stdout.write("\n")
sys.stdout.flush()
current_line = ""
global entry_num, entry_total, entry_str
def init_counter(total_ele, entry):
global entry_num, entry_total, entry_str
if args.verbose:
entry_num = 0
entry_total = total_ele
entry_str = entry
def progress_counter(filename):
global entry_num, entry_total, entry_str
if args.verbose:
entry_num += 1
line_msg("%s %i/%i: %s" % (entry_str, entry_num, entry_total, filename))
def main():
# Parse arguments
parse_cli_args()
# Sanity checks
# Check to make sure that the JSON resource file exists!
try:
resource_file_fh = open(args.resource_file, "r")
except IOError:
error_msg("Resource file '%s' is not accessible or does not exist!" % args.resource_file)
exit(1)
# Check to make sure that the JSON resource file is valid!
try:
resource_data = json.loads(resource_file_fh.read())
except KeyboardInterrupt:
info_msg("CTRL-C detected, exiting.")
exit(0)
except:
error_msg("Resource file '%s' is not a valid JSON file!" % args.resource_file)
print(traceback.format_exc())
exit(1)
# Close file - we got the data already!
resource_file_fh.close()
# Print verbose version information
if args.verbose:
info_msg("Using following versions:")
info_msg(" netcdf4-python v%s" % netCDF4.__version__)
info_msg(" NetCDF v%s" % getlibversion())
info_msg(" HDF5 v%s" % netCDF4.__hdf5libversion__)
info_msg(" Python v%s\n" % sys.version.split("\n")[0].strip())
info_msg("Reading and validating NetCDF4 files...")
# Check to make sure the NetCDF4 files are legitimate!
nc4_files_root = []
init_counter(len(args.nc4_files), "Reading/verifying file")
for nc4_file in args.nc4_files:
try:
open(nc4_file, "r").close()
except KeyboardInterrupt:
info_msg("CTRL-C detected, exiting.")
exit(0)
except IOError:
error_msg("The NetCDF4 file '%s' does not exist!" % nc4_file)
exit(1)
progress_counter(nc4_file)
try:
rootgrp = Dataset(nc4_file, "a", format="NETCDF4")
nc4_files_root.append({ "file" : nc4_file, "group" : rootgrp })
except KeyboardInterrupt:
info_msg("CTRL-C detected, exiting.")
exit(0)
except:
error_msg("'%s' is not a valid NetCDF4 file!" % nc4_file)
exit(1)
line_msg_done()
# Global attributes
if args.global_attributes:
# Check if we have a global attributes entry in the resource file
if not "global_attributes" in resource_data:
warning_msg("Resource file '%s' does not have any global attributes, skipping." % args.resource_file)
else:
# Initialize our counter
init_counter(len(nc4_files_root), "Applying global attributes to file")
for nc4_entry in nc4_files_root:
# Update progress counter
progress_counter(nc4_entry["file"])
for global_attr_key in resource_data["global_attributes"]:
global_attr_val = resource_data["global_attributes"][global_attr_key]
# We need to convert unicode to ASCII
if type(global_attr_val) == unicode:
global_attr_val = str(global_attr_val)
# BUG fix - NetCDF really, really, REALLY does not like
# 64-bit integers. We forcefully convert the value to a
# 32-bit signed integer, with some help from numpy!
if type(global_attr_val) == int:
global_attr_val = numpy.int32(global_attr_val)
setattr(nc4_entry["group"], global_attr_key, global_attr_val)
line_msg_done()
# Variable attributes
if args.var_attributes:
# Check if we have a variable attributes entry in the resource file
if not "variable_attributes" in resource_data:
warning_msg("Resource file '%s' does not have any variable attributes, skipping." % args.resource_file)
else:
# Initialize our counter
init_counter(len(nc4_files_root), "Applying variable attributes to file")
for nc4_entry in nc4_files_root:
# Update progress counter
progress_counter(nc4_entry["file"])
# Iterate through all of our var_attr variables
for var in resource_data["variable_attributes"]:
if var in nc4_entry["group"].variables.keys():
for var_attr_key in resource_data["variable_attributes"][var]:
var_attr_val = resource_data["variable_attributes"][var][var_attr_key]
var_attr_key = str(var_attr_key)
# We need to convert unicode to ASCII
if type(var_attr_val) == unicode:
var_attr_val = list(str(var_attr_val))
# BUG fix - NetCDF really, really, REALLY does not like
# 64-bit integers. We forcefully convert the value to a
# 32-bit signed integer, with some help from numpy!
if type(var_attr_val) == int:
var_attr_val = numpy.int32(var_attr_val)
setattr(nc4_entry["group"].variables[var], var_attr_key, var_attr_val)
else:
warning_msg("Can't find variable %s in file %s!" % (var, nc4_entry["file"]))
line_msg_done()
# Close everything
init_counter(len(nc4_files_root), "Saving changes to file")
for nc4_entry in nc4_files_root:
progress_counter(nc4_entry["file"])
nc4_entry["group"].close()
line_msg_done()
info_msg("Attribute appending complete!")
if __name__ == "__main__":
main()
|
apache-2.0
| 8,458,928,210,845,927,000
| 36.060377
| 115
| 0.562875
| false
| 3.982563
| false
| false
| false
|
BitcoinUnlimited/BitcoinUnlimited
|
qa/rpc-tests/excessive.py
|
1
|
14205
|
#!/usr/bin/env python3
# Copyright (c) 2015-2017 The Bitcoin Unlimited developers
# Copyright (c) 2014-2015 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
import test_framework.loginit
# Test emergent consensus scenarios
import time
import random
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import assert_equal
from test_framework.util import *
from test_framework.blocktools import *
import test_framework.script as script
import pdb
import sys
if sys.version_info[0] < 3:
raise "Use Python 3"
import logging
def mostly_sync_mempools(rpc_connections, difference=50, wait=1, verbose=1):
"""
Wait until everybody has the most of the same transactions in their memory
pools. There is no guarantee that mempools will ever sync due to the
filterInventoryKnown bloom filter.
"""
iterations = 0
while True:
iterations += 1
pool = set(rpc_connections[0].getrawmempool())
num_match = 1
poolLen = [len(pool)]
for i in range(1, len(rpc_connections)):
tmp = set(rpc_connections[i].getrawmempool())
if tmp == pool:
num_match = num_match + 1
if iterations > 10 and len(tmp.symmetric_difference(pool)) < difference:
num_match = num_match + 1
poolLen.append(len(tmp))
if verbose:
logging.info("sync mempool: " + str(poolLen))
if num_match == len(rpc_connections):
break
time.sleep(wait)
class ExcessiveBlockTest (BitcoinTestFramework):
def __init__(self, extended=False):
self.extended = extended
BitcoinTestFramework.__init__(self)
def setup_network(self, split=False):
self.nodes = []
self.nodes.append(start_node(0, self.options.tmpdir, ["-debug=net", "-debug=graphene", "-usecashaddr=0", "-rpcservertimeout=0"], timewait=60 * 10))
self.nodes.append(start_node(1, self.options.tmpdir, ["-debug=net", "-debug=graphene", "-usecashaddr=0", "-rpcservertimeout=0"], timewait=60 * 10))
self.nodes.append(start_node(2, self.options.tmpdir, ["-debug=net", "-debug=graphene", "-usecashaddr=0", "-rpcservertimeout=0"], timewait=60 * 10))
self.nodes.append(start_node(3, self.options.tmpdir, ["-debug=net", "-debug=graphene", "-usecashaddr=0", "-rpcservertimeout=0"], timewait=60 * 10))
interconnect_nodes(self.nodes)
self.is_network_split = False
self.sync_all()
if 0: # getnewaddress can be painfully slow. This bit of code can be used to during development to
# create a wallet with lots of addresses, which then can be used in subsequent runs of the test.
# It is left here for developers to manually enable.
TEST_SIZE = 100 # TMP 00
print("Creating addresses...")
self.nodes[0].keypoolrefill(TEST_SIZE + 1)
addrs = [self.nodes[0].getnewaddress() for _ in range(TEST_SIZE + 1)]
with open("walletAddrs.json", "w") as f:
f.write(str(addrs))
pdb.set_trace()
def run_test(self):
BitcoinTestFramework.run_test(self)
self.testCli()
# clear out the mempool
for n in self.nodes:
while len(n.getrawmempool()):
n.generate(1)
sync_blocks(self.nodes)
logging.info("cleared mempool: %s" % str([len(x) for x in [y.getrawmempool() for y in self.nodes]]))
self.testExcessiveBlockSize()
def testCli(self):
# Assumes the default excessive at 32MB and mining at 8MB
try:
self.nodes[0].setminingmaxblock(33000000)
except JSONRPCException as e:
pass
else:
assert(0) # was able to set the mining size > the excessive size
try:
self.nodes[0].setminingmaxblock(99)
except JSONRPCException as e:
pass
else:
assert(0) # was able to set the mining size below our arbitrary minimum
try:
self.nodes[0].setexcessiveblock(1000, 10)
except JSONRPCException as e:
pass
else:
assert(0) # was able to set the excessive size < the mining size
def sync_all(self):
"""Synchronizes blocks and mempools (mempools may never fully sync)"""
if self.is_network_split:
sync_blocks(self.nodes[:2])
sync_blocks(self.nodes[2:])
mostly_sync_mempools(self.nodes[:2])
mostly_sync_mempools(self.nodes[2:])
else:
sync_blocks(self.nodes)
mostly_sync_mempools(self.nodes)
def expectHeights(self, blockHeights, waittime=10):
loop = 0
count = []
while loop < waittime:
counts = [x.getblockcount() for x in self.nodes]
if counts == blockHeights:
return True # success!
else:
for (a,b) in zip(counts, blockHeights):
if counts > blockHeights:
assert("blockchain synced too far")
time.sleep(.25)
loop += .25
if int(loop) == loop and (int(loop) % 10) == 0:
logging.info("...waiting %f %s != %s" % (loop, counts, blockHeights))
return False
def repeatTx(self, count, node, addr, amt=1.0):
for i in range(0, count):
node.sendtoaddress(addr, amt)
def generateAndPrintBlock(self, node):
hsh = node.generate(1)
inf = node.getblock(hsh[0])
logging.info("block %d size %d" % (inf["height"], inf["size"]))
return hsh
def testExcessiveBlockSize(self):
# get spendable coins
if 0:
for n in self.nodes:
n.generate(1)
self.sync_all()
self.nodes[0].generate(100)
# Set the accept depth at 1, 2, and 3 and watch each nodes resist the chain for that long
self.nodes[0].setminingmaxblock(5000) # keep the generated blocks within 16*the EB so no disconnects
self.nodes[1].setminingmaxblock(1000)
self.nodes[2].setminingmaxblock(1000)
self.nodes[3].setminingmaxblock(1000)
self.nodes[1].setexcessiveblock(1000, 1)
self.nodes[2].setexcessiveblock(1000, 2)
self.nodes[3].setexcessiveblock(1000, 3)
logging.info("Test excessively sized block, not propagating until accept depth is exceeded")
addr = self.nodes[3].getnewaddress()
# By using a very small value, it is likely that a single input is used. This is important because
# our mined block size is so small in this test that if multiple inputs are used the transactions
# might not fit in the block. This will give us a short block when the test expects a larger one.
# To catch any of these short-block test malfunctions, the block size is printed out.
self.repeatTx(8, self.nodes[0], addr, .001)
counts = [x.getblockcount() for x in self.nodes]
base = counts[0]
logging.info("Starting counts: %s" % str(counts))
logging.info("node0")
self.generateAndPrintBlock(self.nodes[0])
assert_equal(True, self.expectHeights([base + 1, base, base, base]))
logging.info("node1")
self.nodes[0].generate(1)
assert_equal(True, self.expectHeights([base + 2, base + 2, base, base]))
logging.info("node2")
self.nodes[0].generate(1)
assert_equal(True, self.expectHeights([base + 3, base + 3, base + 3, base]))
logging.info("node3")
self.nodes[0].generate(1)
assert_equal(True, self.expectHeights([base + 4] * 4))
# Now generate another excessive block, but all nodes should snap right to
# it because they have an older excessive block
logging.info("Test immediate propagation of additional excessively sized block, due to prior excessive")
self.repeatTx(8, self.nodes[0], addr, .001)
self.nodes[0].generate(1)
assert_equal(True, self.expectHeights([base + 5] * 4))
logging.info("Test daily excessive reset")
# Now generate a day's worth of small blocks which should re-enable the
# node's reluctance to accept a large block
self.nodes[0].generate(6 * 24)
sync_blocks(self.nodes)
self.nodes[0].generate(5) # plus the accept depths
sync_blocks(self.nodes)
self.repeatTx(8, self.nodes[0], addr, .001)
base = self.nodes[0].getblockcount()
self.generateAndPrintBlock(self.nodes[0])
time.sleep(2) # give blocks a chance to fully propagate
counts = [x.getblockcount() for x in self.nodes]
assert_equal(counts, [base + 1, base, base, base])
self.repeatTx(8, self.nodes[0], addr, .001)
self.generateAndPrintBlock(self.nodes[0])
time.sleep(2) # give blocks a chance to fully propagate
sync_blocks(self.nodes[0:2])
counts = [x.getblockcount() for x in self.nodes]
assert_equal(counts, [base + 2, base + 2, base, base])
self.repeatTx(5, self.nodes[0], addr, .001)
self.generateAndPrintBlock(self.nodes[0])
time.sleep(2) # give blocks a chance to fully propagate
sync_blocks(self.nodes[0:3])
counts = [x.getblockcount() for x in self.nodes]
assert_equal(counts, [base + 3, base + 3, base + 3, base])
self.repeatTx(5, self.nodes[0], addr, .001)
self.generateAndPrintBlock(self.nodes[0])
sync_blocks(self.nodes)
counts = [x.getblockcount() for x in self.nodes]
assert_equal(counts, [base + 4] * 4)
self.repeatTx(5, self.nodes[0], addr, .001)
self.generateAndPrintBlock(self.nodes[0])
sync_blocks(self.nodes)
counts = [x.getblockcount() for x in self.nodes]
assert_equal(counts, [base + 5] * 4)
if self.extended:
logging.info("Test daily excessive reset #2")
# Now generate a day's worth of small blocks which should re-enable the
# node's reluctance to accept a large block + 10 because we have to get
# beyond all the node's accept depths
self.nodes[0].generate(6 * 24 + 10)
sync_blocks(self.nodes)
# counts = [ x.getblockcount() for x in self.nodes ]
self.nodes[1].setexcessiveblock(100000, 1) # not sure how big the txns will be but smaller than this
self.nodes[1].setminingmaxblock(100000) # not sure how big the txns will be but smaller than this
self.repeatTx(20, self.nodes[0], addr, .001)
base = self.nodes[0].getblockcount()
self.generateAndPrintBlock(self.nodes[0])
time.sleep(2) # give blocks a chance to fully propagate
sync_blocks(self.nodes[0:2])
counts = [x.getblockcount() for x in self.nodes]
assert_equal(counts, [base + 1, base + 1, base, base])
if self.extended:
logging.info("Random test")
randomRange = 3
else:
randomRange = 0
for i in range(0, randomRange):
logging.info("round %d" % i)
for n in self.nodes:
size = random.randint(1, 1000) * 1000
try: # since miningmaxblock must be <= excessiveblock, raising/lowering may need to run these in different order
n.setminingmaxblock(size)
n.setexcessiveblock(size, random.randint(0, 10))
except JSONRPCException:
n.setexcessiveblock(size, random.randint(0, 10))
n.setminingmaxblock(size)
addrs = [x.getnewaddress() for x in self.nodes]
ntxs = 0
for i in range(0, random.randint(1, 20)):
try:
n = random.randint(0, 3)
logging.info("%s: Send to %d" % (ntxs, n))
self.nodes[n].sendtoaddress(addrs[random.randint(0, 3)], .1)
ntxs += 1
except JSONRPCException: # could be spent all the txouts
pass
logging.info("%d transactions" % ntxs)
time.sleep(1) # allow txns a chance to propagate
self.nodes[random.randint(0, 3)].generate(1)
logging.info("mined a block")
# TODO: rather than sleeping we should really be putting a check in here
# based on what the random excessive seletions were from above
time.sleep(5) # allow block a chance to propagate
# the random test can cause disconnects if the block size is very large compared to excessive size
# so reconnect
interconnect_nodes(self.nodes)
if __name__ == '__main__':
if "--extensive" in sys.argv:
longTest = True
# we must remove duplicate 'extensive' arg here
while True:
try:
sys.argv.remove('--extensive')
except:
break
logging.info("Running extensive tests")
else:
longTest = False
ExcessiveBlockTest(longTest).main()
def info(type, value, tb):
if hasattr(sys, 'ps1') or not sys.stderr.isatty():
# we are in interactive mode or we don't have a tty-like
# device, so we call the default hook
sys.__excepthook__(type, value, tb)
else:
import traceback
import pdb
# we are NOT in interactive mode, print the exception...
traceback.print_exception(type, value, tb)
print
# ...then start the debugger in post-mortem mode.
pdb.pm()
sys.excepthook = info
def Test():
t = ExcessiveBlockTest()
t.drop_to_pdb = True
bitcoinConf = {
"debug": ["rpc", "net", "blk", "thin", "mempool", "req", "bench", "evict"],
"blockprioritysize": 2000000, # we don't want any transactions rejected due to insufficient fees...
"blockminsize": 1000000
}
flags = standardFlags()
t.main(flags, bitcoinConf, None)
|
mit
| -6,959,768,802,692,988,000
| 40.173913
| 155
| 0.600282
| false
| 3.80627
| true
| false
| false
|
UCSD-E4E/radio_collar_tracker_drone
|
scripts/ground_control_software/heatMap.py
|
1
|
1174
|
#!/usr/bin/env python3
import generateKML
import pos_estimate
import numpy as np
import utm
def findMax( someList ):
tempMax = someList[0]
for i in someList:
if tempMax < i:
tempMax = i
return tempMax
def findMin( someList ):
tempMin = someList[0]
for i in someList:
if tempMin > i:
tempMin = i
return tempMin
# data is in form [[x,y,z,rd],[x,y,z,rd],...] in utm
def generateHeatMap( data ):
minHeatDim = [ int( min( data[:,1] ) ), int( min( data[:,0] ) ) ]
maxHeatDim = [ int( max( data[:,1] ) ), int( max( data[:,0] ) ) ]
heatMap = np.zeros(( maxHeatDim[0] - minHeatDim[0] + 1, \
maxHeatDim[1] - minHeatDim[1] + 1 ))
for x, y, z, rd in data:
heatMap[int(y-minHeatDim[1]),int(x-minHeatDim[0])] = 1
zonenum = data.getUTMZone[0]
zone = data.getUTMZone[1]
coords = [[minHeatDim[0],maxHeatDim[1]],
[maxHeatDim[0],maxHeatDim[1]],
[maxHeatDim[0],minHeatDim[1]],
[minHeatDim[0],minHeatDim[1]]]
ll = [utm.to_latlon( x[0], x[1], zonenum, zone_letter=zone ) for x in coords]
ll = [ [x[1],x[0]] for x in ll ]
testKML = generateKML.kmlPackage( "NOTICE", estimate, [heatMap, ll] )
generateKML.generateKML( [ testKML ] )
|
gpl-3.0
| 1,732,163,343,495,752,000
| 26.952381
| 78
| 0.626917
| false
| 2.430642
| false
| false
| false
|
kooksee/myblog
|
BAE.py
|
1
|
11250
|
# -*- coding=utf-8 -*-
import sys
reload(sys)
sys.setdefaultencoding('utf-8')
import tornado.wsgi
import tornado.options
import os.path,os,datetime,sys,time,codecs
import markdown
import tohtml
import db
import json
import tohtml
import base64
import uuid
def conf(): #全局设定信息
global NAME,Subtitle,description,keywords,Category,UUID
conf = db.db("SELECT SITENAME,subtitle,description,keywords,uuid FROM CONFIG")[0]
NAME = conf[0]
Subtitle = conf[1]
description = conf[2]
keywords = conf[3]
UUID= conf[4]
if not UUID:
UUID=base64.b64encode(uuid.uuid4().bytes + uuid.uuid4().bytes)
print db.exe("UPDATE config SET uuid='%s' WHERE ID=1" % UUID)
Category = [(i[0],i[1]) for i in db.db("SELECT ID,Category FROM Category")]
Category.append((' ',' '))
sys.path.append(os.path.dirname(os.path.abspath(__file__)))
conf()
settings = {
"static_path" : os.path.join(os.path.dirname(__file__), "./static/"),
"template_path" : os.path.join(os.path.dirname(__file__), "./templates/"),
"cookie_secret" : UUID,
"xsrf_cookies" : True,
"login_url": "/login"}
class index(tornado.web.RequestHandler):
def get(self):
self.redirect('/index.html')
class static(tornado.web.RequestHandler):
def get(self,url):
self.write(codecs.open('./html/%s' % url,'r','utf-8').read())
class LoginHandler(tornado.web.RequestHandler):
def get_current_user(self):
return self.get_secure_cookie("WLBLOG")
class Manager(LoginHandler):
@tornado.web.authenticated
def post(self):
pass
@tornado.web.authenticated
def get(self):
conf()
s = db.db("SELECT ID,TITLE,CREATETIME,LEIBIE FROM MY")
LIST = [(i[0],i[1],i[2],i[3]) for i in s]
self.render("admin.html",LIST = LIST,title=NAME,tags='tags',NAME=NAME,Category = Category,Subtitle = Subtitle,description=description,keywords=keywords,)
class Edit(LoginHandler): #文章编辑、新建,类别管理
@tornado.web.authenticated
def post(self):
TYPE=self.get_argument('TYPE','')
ID=self.get_argument('ID','')
subject = self.get_argument('subject','')
tags = self.get_argument('tags','')
markdown = self.get_argument('markdown','')
Category = self.get_argument('Category','')
if TYPE=='DEL':
Category = Category[0:-1]
SQL = "DELETE FROM Category WHERE ID IN (%s)" % Category
self.write(db.exe(SQL))
elif TYPE=='NEW':
SQL = "INSERT INTO Category (Category) VALUES ('%s')" % Category
self.write(db.exe(SQL))
elif None or "" in (subject,tags,markdown):
self.write(u"主题、标签、类别及内容均不可为空!")
else:
if db.edit(TYPE,subject.encode("utf-8"),tags.encode("utf-8"),markdown.encode("utf-8"),Category.encode("utf-8"),ID):
tohtml.html().ALL()
self.write(u'OK,Thanks!')
else:
self.write(u'Error!')
@tornado.web.authenticated
def get(self):
conf()
markdown = tags = subject = LEIBIE = ID = ''
ID = self.get_argument('id','')
TYPE = self.get_argument('TYPE','')
if ID:
data=db.MARKDOWN(ID)
subject=data[0]
markdown=data[2].replace('\'\'','\'').replace('\\\\','\\')
tags=data[1]
LEIBIE = data[3]
else:
TYPE="ADD"
self.render("Edit.html",markdown=markdown,
subject=subject,
tags=tags,
title=NAME,
NAME=NAME,
description=description,
keywords=keywords,
Category = Category,
Subtitle = Subtitle,
LEIBIE = LEIBIE,
TYPE = TYPE,ID=ID)
class delete(LoginHandler): #文章删除
@tornado.web.authenticated
def get(self):
ID=self.get_argument('ID','')
if db.delete(ID):
tohtml.html().ALL()
os.remove("./html/%s.html" % ID)
self.write("0")
else:
self.write("数据库异常,刪除失败!")
class update(LoginHandler): #系统全局设定更新
@tornado.web.authenticated
def post(self):
Result = True
NAME=self.get_argument('bkname','')
Subtitle=self.get_argument('subtitle','')
description=self.get_argument('description','')
keywords=self.get_argument('keywords','')
try:
db.db("update CONFIG SET SITENAME='%s',subtitle='%s',description='%s',keywords='%s' WHERE ID=1 " % (NAME,Subtitle,description,keywords))
except:
self.write("ERROR")
else:
tohtml.html().ALL()
self.write("0")
class userupdate(LoginHandler): #用户管理
@tornado.web.authenticated
def post(self):
user = self.get_secure_cookie("WLBLOG")
username=self.get_argument('newuser','')
oldpwd=self.get_argument('oldpwd','')
pwd1=self.get_argument('pwd1','')
if db.check(user,oldpwd):
if not username:
username=user
db.db("UPDATE Ver SET PASSWORD='%s',USERNAME='%s' WHERE USERNAME='%s'" % (pwd1,username,user))
self.write("0")
else:
self.write("密码修改失败,请确认你的输入!")
class custom(LoginHandler): #友情链接、统计代码、多说留言板、文章尾部内容管理
@tornado.web.authenticated
def get(self):
conf()
try:
DUOSHUO = db.db("SELECT DUOSHUO FROM Ver")[0][0]
except:
DUOSHUO = ''
NAV = db.db("SELECT ID,NAME,LINK FROM LINK WHERE TYPE='nav'")
LINK = db.db("SELECT ID,NAME,LINK FROM LINK WHERE TYPE='link'")
LAST = db.db("SELECT ID,NAME,Remark,HTML FROM Other WHERE LOCATION='last'")
self.render("custom.html",title=NAME,NAME=NAME,
Category = Category,
Subtitle = Subtitle,
description=description,
keywords=keywords,DUOSHUO = DUOSHUO,NAV = NAV,
LINK = LINK,LAST = LAST)
def post(self):
CMD = self.get_argument('CMD','')
ID = self.get_argument('ID','')
name = self.get_argument('name','')
TYPE = self.get_argument('TYPE','')
remark = self.get_argument('remark','')
HTML = self.get_argument('EHTML','')
LINK = self.get_argument('LINK','')
if CMD=='DEL':
if TYPE in ('NAV','LINK'):
try:
db.db("DELETE FROM LINK WHERE ID='%s' " % ID)
except:
pass
elif TYPE=='LAST':
try:
db.db("DELETE FROM Other WHERE ID='%s' " % ID)
except:
pass
tohtml.html().ALL()
self.redirect('/custom')
elif CMD=='UP':
if TYPE=="LAST":
db.db("UPDATE Other SET NAME='%s',HTML='%s',Remark='%s' WHERE ID='%s'" % (name,HTML.replace('\'','\'\'').replace('\\','\\\\'),remark,ID))
elif TYPE in ('NAV','LINK'):
db.db("UPDATE LINK SET NAME='%s',LINK='%s' WHERE ID='%s'" % (name,LINK,ID))
tohtml.html().ALL()
self.redirect('/custom')
elif CMD=='NEW':
if TYPE=="LAST":
db.db("INSERT INTO Other (NAME,HTML,Remark,LOCATION,TYPE) VALUES ('%s','%s','%s','%s','belong')" % (name,HTML.replace('\'','\'\'').replace('\\','\\\\'),remark,TYPE.lower()))
elif TYPE in ('NAV','LINK'):
db.db("INSERT INTO LINK (NAME,LINK,TYPE) VALUES ('%s','%s','%s')" % (name,LINK,TYPE.lower()))
tohtml.html().ALL()
self.redirect('/custom')
elif CMD == 'HTML':
try:
HTML = db.db("SELECT HTML FROM Other WHERE ID='%s' " % ID)[0][0]
except:
pass
else:
self.write(HTML.strip().replace('\'\'','\'').replace('\\\\','\\'))
elif CMD=="DUOSHUO":
try:
db.db("UPDATE Ver SET DUOSHUO='%s' WHERE ID='1' " % name)
except Exception as e:
self.write("设定失败,原因:%s" % e)
else:
tohtml.html().ALL()
self.write("多说ID已成功设定为:%s" % name)
elif CMD=="JS":
if TYPE=='CX':
try:
JSCODE = db.db("SELECT HTML FROM Other WHERE NAME='JSCODE' ")[0][0]
except:
self.write('')
else:
self.write(JSCODE.replace('\'\'','\'').replace('\\\\','\\'))
elif TYPE=='UP':
try:
db.db("UPDATE Other SET HTML='%s' WHERE NAME='JSCODE'" % HTML.replace('\'','\'\'').replace('\\','\\\\'))
except Exception as e:
self.write(u'修改失败!')
else:
tohtml.html().ALL()
self.write(u'修改成功!')
class generate(LoginHandler):
@tornado.web.authenticated
def get(self):
tohtml.html().ALL()
self.redirect('/')
class upload(LoginHandler):
@tornado.web.authenticated
def post(self):
upload_path=os.path.join(os.path.dirname(__file__),'static/image/')
file_metas = self.request.files['editormd-image-file']
filename = ''
for meta in file_metas:
filename=time.strftime("%Y%m%d%H%M%S", time.localtime()) + meta['filename']
filepath=os.path.join(upload_path,filename)
with open(filepath,'wb') as up:
up.write(meta['body'])
print filename
s = {'success':1,'message': 'OK','url':'static/image/%s' % filename}
self.write(json.dumps(s))
class login(tornado.web.RequestHandler):
def get(self):
conf()
if self.get_secure_cookie("WLBLOG"):
self.redirect("/admin")
else:
self.render("login.html",title=NAME,NAME=NAME,
Category = Category,
Subtitle = Subtitle,
description=description,
keywords=keywords,)
def post(self):
username = self.get_argument('username','')
password = self.get_argument('password','')
if db.check(username,password):
self.set_secure_cookie("WLBLOG",username)
self.write("1")
else:
self.write("0")
class logout(tornado.web.RequestHandler):
def get(self):
self.clear_all_cookies()
self.redirect("/admin")
App = tornado.wsgi.WSGIApplication([
(r'/',index),
(r'/(.*\.html$)',static),
(r'/admin',Manager),
(r'/edit',Edit),
(r'/del',delete),
(r'/update',update),
(r'/upload',upload),
(r'/userupdate',userupdate),
(r'/custom',custom),
(r'/generate',generate),
(r'/login',login),
(r'/logout',logout)
],**settings)
from bae.core.wsgi import WSGIApplication
application = WSGIApplication(App)
|
gpl-2.0
| 1,647,006,971,386,371,600
| 37.166667
| 190
| 0.525109
| false
| 3.626526
| false
| false
| false
|
tmetsch/pyssf
|
docs/source/conf.py
|
1
|
8255
|
# -*- coding: utf-8 -*-
#
# pyssf documentation build configuration file, created by
# sphinx-quickstart on Fri Feb 25 10:29:07 2011.
#
# This file is execfile()d with the current directory set to its containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
# -- General configuration -----------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['sphinx.ext.autodoc']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.txt'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'Service Sharing Facility'
copyright = u'2010-2012, Platform Computing'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '0.4.6'
# The full version, including alpha/beta/rc tags.
release = '0.4.6'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = []
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'nature'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
html_title = 'Service Sharing Facility'
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
html_logo = 'images/pyssf_logo.png'
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
#html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
html_use_index = False
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
html_show_sourcelink = False
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'ServiceSharingFacilitydoc'
# -- Options for LaTeX output --------------------------------------------------
# The paper size ('letter' or 'a4').
latex_paper_size = 'a4'
# The font size ('10pt', '11pt' or '12pt').
#latex_font_size = '10pt'
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
('index', 'pyssf.tex', u'Service Sharing Facility Documentation',
u'Platform Computing', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
latex_logo = 'images/pyssf_logo.png'
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Additional stuff for the LaTeX preamble.
#latex_preamble = ''
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output --------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'pyssf', u'pyssf Documentation',
[u'Platform Computing'], 1)
]
# -- Options for Epub output ---------------------------------------------------
# Bibliographic Dublin Core info.
epub_title = u'pyssf'
epub_author = u'Platform Computing'
epub_publisher = u'Platform Computing'
epub_copyright = u'2010-2012, Platform Computing'
# The language of the text. It defaults to the language option
# or en if the language is not set.
#epub_language = ''
# The scheme of the identifier. Typical schemes are ISBN or URL.
#epub_scheme = ''
# The unique identifier of the text. This can be a ISBN number
# or the project homepage.
#epub_identifier = ''
# A unique identification for the text.
#epub_uid = ''
# HTML files that should be inserted before the pages created by sphinx.
# The format is a list of tuples containing the path and title.
#epub_pre_files = []
# HTML files shat should be inserted after the pages created by sphinx.
# The format is a list of tuples containing the path and title.
#epub_post_files = []
# A list of files that should not be packed into the epub file.
#epub_exclude_files = []
# The depth of the table of contents in toc.ncx.
#epub_tocdepth = 3
# Allow duplicate toc entries.
#epub_tocdup = True
|
lgpl-3.0
| 2,553,244,000,291,390,500
| 31.5
| 80
| 0.709267
| false
| 3.733605
| true
| false
| false
|
reclosedev/mitm_relay
|
socket_relay.py
|
1
|
6570
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import socket
import select
import logging
log = logging.getLogger(__name__)
class Server:
def __init__(self, relays, timeout=0.3):
self._relays = list(relays)
self.timeout = timeout
self.input_map = {}
self.links = {}
def main_loop(self):
for relay in self._relays:
self.add_relay(relay)
while True:
rlist, _, _ = select.select(self.input_map, [], [], self.timeout)
#log.debug("%s %s", len(rlist), len(self.input_map))
for sock in rlist:
obj = self.input_map[sock]
#log.debug("SO: %s, %s", sock, obj)
if isinstance(obj, Relay):
pipes = obj.new_client()
for pipe in pipes:
self.input_map[pipe.input_socket] = pipe
self.links[pipes[0]] = pipes[1]
self.links[pipes[1]] = pipes[0]
elif isinstance(obj, Pipe):
obj.on_read()
self.close_link_if_finished(obj)
def add_relay(self, relay):
self.input_map[relay.listen_socket] = relay
relay.listen()
def close_link_if_finished(self, pipe1):
if pipe1.work_done:
self.input_map.pop(pipe1.input_socket, None)
else:
return
pipe2 = self.links.get(pipe1)
if not (pipe2 and pipe2.work_done):
return
for pipe in pipe1, pipe2:
pipe.close()
self.links.pop(pipe, None)
self.input_map.pop(pipe.input_socket, None)
class Relay(object):
def __init__(self, listen_port, target_host=None, to_port=None, listen_host="127.0.0.1", backlog=200,
input_transform=None, output_transform=None):
self.listen_port = listen_port
self.target_host = target_host or listen_host
self.target_port = to_port or listen_port
self.listen_host = listen_host
self.listen_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.listen_socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
self.backlog = backlog
self.input_transform = input_transform
self.output_transform = output_transform
def listen(self):
log.info("%s listen", self)
self.listen_socket.bind((self.listen_host, self.listen_port))
self.listen_socket.listen(self.backlog)
def _accept_client(self):
client_socket, client_address = self.listen_socket.accept()
log.info("New client %s:%s", *client_address)
return client_socket
def _connect_upstream(self):
upstream_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
log.info("Connecting to %s:%s", self.target_host, self.target_port)
upstream_socket.connect((self.target_host, self.target_port))
return upstream_socket
def new_client(self):
client_socket = self._accept_client()
upstream_socket = self._connect_upstream()
log.debug("Create pipes")
receiver = Pipe(self, client_socket, upstream_socket, transform=self.input_transform)
sender = Pipe(self, upstream_socket, client_socket, transform=self.output_transform)
return receiver, sender
def __repr__(self):
return "<%s(%s, %s, %s)>" % (self.__class__.__name__, self.listen_port, self.target_host, self.target_port)
class ProxiedRelay(Relay):
def __init__(self, proxy_host, proxy_port, *args, **kwargs):
super(ProxiedRelay, self).__init__(*args, **kwargs)
self.proxy_host = proxy_host
self.proxy_port = proxy_port
def _connect_upstream(self):
upstream_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
log.info("Connecting to proxy %s:%s", self.proxy_host, self.proxy_port)
upstream_socket.connect((self.proxy_host, self.proxy_port))
data = "CONNECT %s:%d HTTP/1.0\r\n\r\n" % (self.target_host, self.target_port)
data = data.encode("ascii")
log.debug("Proxy query: %r", data)
upstream_socket.sendall(data)
fp = upstream_socket.makefile("rb")
while True:
data = fp.readline()
if data in (b"", b"\n", b"\r\n"):
break
log.debug("Proxy response: %r", data)
return upstream_socket
class Pipe(object):
data_debug = 1
def __init__(self, relay, input_socket, output_socket,
buffer_size=1024 * 1024, transform=None):
self.relay = relay
self.input_socket = input_socket
self.output_socket = output_socket
self.buffer_size = buffer_size
self.transform = transform
self.input_peername = self.input_socket.getpeername()
self.output_peername = self.output_socket.getpeername()
self.work_done = False
def on_read(self):
try:
data = self.input_socket.recv(self.buffer_size)
except socket.error:
log.exception("%s exception in recv():", self)
self.work_done = True
return
if not data:
if self.data_debug:
log.debug("%s no data received", self)
self.work_done = True
return
if self.data_debug:
log.debug("%s data: %r", self, data)
if self.transform:
data = self.transform(data)
if not data:
return
try:
self.output_socket.sendall(data)
except socket.error:
log.exception("%s exception in sendall():", self)
self.work_done = True
def close(self):
log.info("%s closing", self)
self.input_socket.close()
self.output_socket.close()
def __repr__(self):
return "<Pipe(%s, %s)>" % (self.input_peername, self.output_peername)
if __name__ == '__main__':
logging.basicConfig(level=logging.DEBUG, format="%(levelname)s %(message)s")
def in_transform(data):
print("INPUT TRANSFORM %r" % data)
return data.replace(b"/ip", b"/cookies")
def out_transform(data):
print("OUTPUT TRANSFORM %r" % data)
return data + b"transformed"
server = Server([
Relay(8080, "httpbin.org", 80, input_transform=in_transform, output_transform=out_transform),
ProxiedRelay("127.0.0.1", 8888, 9080, "httpbin.org", 80)
])
try:
server.main_loop()
except KeyboardInterrupt:
print("Stopping server...")
|
mit
| -6,196,941,404,122,939,000
| 31.524752
| 115
| 0.576712
| false
| 3.713963
| false
| false
| false
|
erpletzerp/letzerpcore
|
frappe/core/doctype/user/user.py
|
1
|
15029
|
# Copyright (c) 2013, Web Notes Technologies Pvt. Ltd. and Contributors
# MIT License. See license.txt
from __future__ import unicode_literals
import frappe
from frappe.utils import cint, now, get_gravatar
from frappe import throw, msgprint, _
from frappe.auth import _update_password
from frappe.desk.notifications import clear_notifications
import frappe.permissions
STANDARD_USERS = ("Guest", "Administrator")
from frappe.model.document import Document
class User(Document):
def autoname(self):
"""set name as email id"""
if self.name not in STANDARD_USERS:
self.email = self.email.strip()
self.name = self.email
def validate(self):
self.in_insert = self.get("__islocal")
if self.name not in STANDARD_USERS:
self.validate_email_type(self.email)
self.add_system_manager_role()
self.validate_system_manager_user_type()
self.check_enable_disable()
self.update_gravatar()
self.ensure_unique_roles()
self.remove_all_roles_for_guest()
if self.language == "Loading...":
self.language = None
def check_enable_disable(self):
# do not allow disabling administrator/guest
if not cint(self.enabled) and self.name in STANDARD_USERS:
frappe.throw(_("User {0} cannot be disabled").format(self.name))
if not cint(self.enabled):
self.a_system_manager_should_exist()
# clear sessions if disabled
if not cint(self.enabled) and getattr(frappe.local, "login_manager", None):
frappe.local.login_manager.logout(user=self.name)
def add_system_manager_role(self):
# if adding system manager, do nothing
if not cint(self.enabled) or ("System Manager" in [user_role.role for user_role in
self.get("user_roles")]):
return
if self.name not in STANDARD_USERS and self.user_type == "System User" and not self.get_other_system_managers():
msgprint(_("Adding System Manager to this User as there must be atleast one System Manager"))
self.append("user_roles", {
"doctype": "UserRole",
"role": "System Manager"
})
def validate_system_manager_user_type(self):
#if user has system manager role then user type should be system user
if ("System Manager" in [user_role.role for user_role in
self.get("user_roles")]) and self.get("user_type") != "System User":
frappe.throw(_("User with System Manager Role should always have User Type: System User"))
def email_new_password(self, new_password=None):
if new_password and not self.in_insert:
_update_password(self.name, new_password)
self.password_update_mail(new_password)
frappe.msgprint(_("New password emailed"))
def on_update(self):
# owner is always name
frappe.db.set(self, 'owner', self.name)
# clear new password
new_password = self.new_password
self.db_set("new_password", "")
clear_notifications(user=self.name)
frappe.clear_cache(user=self.name)
try:
if self.in_insert:
if self.name not in STANDARD_USERS:
if new_password:
# new password given, no email required
_update_password(self.name, new_password)
if not getattr(self, "no_welcome_mail", False):
self.send_welcome_mail()
msgprint(_("Welcome email sent"))
return
else:
self.email_new_password(new_password)
except frappe.OutgoingEmailError:
pass # email server not set, don't send email
def update_gravatar(self):
if not self.user_image:
self.user_image = get_gravatar(self.name)
@Document.hook
def validate_reset_password(self):
pass
def reset_password(self):
from frappe.utils import random_string, get_url
key = random_string(32)
self.db_set("reset_password_key", key)
self.password_reset_mail(get_url("/update-password?key=" + key))
def get_other_system_managers(self):
return frappe.db.sql("""select distinct user.name from tabUserRole user_role, tabUser user
where user_role.role='System Manager'
and user.docstatus<2
and ifnull(user.enabled,0)=1
and user_role.parent = user.name
and user_role.parent not in ('Administrator', %s) limit 1""", (self.name,))
def get_fullname(self):
"""get first_name space last_name"""
return (self.first_name or '') + \
(self.first_name and " " or '') + (self.last_name or '')
def password_reset_mail(self, link):
self.send_login_mail(_("Password Reset"), "templates/emails/password_reset.html", {"link": link})
def password_update_mail(self, password):
self.send_login_mail(_("Password Update"), "templates/emails/password_update.html", {"new_password": password})
def send_welcome_mail(self):
from frappe.utils import random_string, get_url
key = random_string(32)
self.db_set("reset_password_key", key)
link = get_url("/update-password?key=" + key)
self.send_login_mail(_("Verify Your Account"), "templates/emails/new_user.html", {"link": link})
def send_login_mail(self, subject, template, add_args):
"""send mail with login details"""
from frappe.utils.user import get_user_fullname
from frappe.utils import get_url
mail_titles = frappe.get_hooks().get("login_mail_title", [])
title = frappe.db.get_default('company') or (mail_titles and mail_titles[0]) or ""
full_name = get_user_fullname(frappe.session['user'])
if full_name == "Guest":
full_name = "Administrator"
args = {
'first_name': self.first_name or self.last_name or "user",
'user': self.name,
'title': title,
'login_url': get_url(),
'user_fullname': full_name
}
args.update(add_args)
sender = frappe.session.user not in STANDARD_USERS and frappe.session.user or None
frappe.sendmail(recipients=self.email, sender=sender, subject=subject,
message=frappe.get_template(template).render(args))
def a_system_manager_should_exist(self):
if not self.get_other_system_managers():
throw(_("There should remain at least one System Manager"))
def on_trash(self):
frappe.clear_cache(user=self.name)
if self.name in STANDARD_USERS:
throw(_("User {0} cannot be deleted").format(self.name))
self.a_system_manager_should_exist()
# disable the user and log him/her out
self.enabled = 0
if getattr(frappe.local, "login_manager", None):
frappe.local.login_manager.logout(user=self.name)
# delete their password
frappe.db.sql("""delete from __Auth where user=%s""", (self.name,))
# delete todos
frappe.db.sql("""delete from `tabToDo` where owner=%s""", (self.name,))
frappe.db.sql("""update tabToDo set assigned_by=null where assigned_by=%s""",
(self.name,))
# delete events
frappe.db.sql("""delete from `tabEvent` where owner=%s
and event_type='Private'""", (self.name,))
frappe.db.sql("""delete from `tabEvent User` where person=%s""", (self.name,))
# delete messages
frappe.db.sql("""delete from `tabComment` where comment_doctype='Message'
and (comment_docname=%s or owner=%s)""", (self.name, self.name))
def before_rename(self, olddn, newdn, merge=False):
frappe.clear_cache(user=olddn)
self.validate_rename(olddn, newdn)
def validate_rename(self, olddn, newdn):
# do not allow renaming administrator and guest
if olddn in STANDARD_USERS:
throw(_("User {0} cannot be renamed").format(self.name))
self.validate_email_type(newdn)
def validate_email_type(self, email):
from frappe.utils import validate_email_add
email = email.strip()
if not validate_email_add(email):
throw(_("{0} is not a valid email id").format(email))
def after_rename(self, olddn, newdn, merge=False):
tables = frappe.db.sql("show tables")
for tab in tables:
desc = frappe.db.sql("desc `%s`" % tab[0], as_dict=1)
has_fields = []
for d in desc:
if d.get('Field') in ['owner', 'modified_by']:
has_fields.append(d.get('Field'))
for field in has_fields:
frappe.db.sql("""\
update `%s` set `%s`=%s
where `%s`=%s""" % \
(tab[0], field, '%s', field, '%s'), (newdn, olddn))
# set email
frappe.db.sql("""\
update `tabUser` set email=%s
where name=%s""", (newdn, newdn))
# update __Auth table
if not merge:
frappe.db.sql("""update __Auth set user=%s where user=%s""", (newdn, olddn))
def add_roles(self, *roles):
for role in roles:
if role in [d.role for d in self.get("user_roles")]:
continue
self.append("user_roles", {
"doctype": "UserRole",
"role": role
})
self.save()
def remove_roles(self, *roles):
existing_roles = dict((d.role, d) for d in self.get("user_roles"))
for role in roles:
if role in existing_roles:
self.get("user_roles").remove(existing_roles[role])
self.save()
def remove_all_roles_for_guest(self):
if self.name == "Guest":
self.set("user_roles", list(set(d for d in self.get("user_roles") if d.role == "Guest")))
def ensure_unique_roles(self):
exists = []
for i, d in enumerate(self.get("user_roles")):
if (not d.role) or (d.role in exists):
self.get("user_roles").remove(d)
else:
exists.append(d.role)
@frappe.whitelist()
def get_languages():
from frappe.translate import get_lang_dict
import pytz
languages = get_lang_dict().keys()
languages.sort()
return {
"languages": [""] + languages,
"timezones": pytz.all_timezones
}
@frappe.whitelist()
def get_all_roles(arg=None):
"""return all roles"""
return [r[0] for r in frappe.db.sql("""select name from tabRole
where name not in ('Administrator', 'Guest', 'All') order by name""")]
@frappe.whitelist()
def get_user_roles(arg=None):
"""get roles for a user"""
return frappe.get_roles(frappe.form_dict['uid'])
@frappe.whitelist()
def get_perm_info(arg=None):
"""get permission info"""
return frappe.db.sql("""select * from tabDocPerm where role=%s
and docstatus<2 order by parent, permlevel""", (frappe.form_dict['role'],), as_dict=1)
@frappe.whitelist(allow_guest=True)
def update_password(new_password, key=None, old_password=None):
# verify old password
if key:
user = frappe.db.get_value("User", {"reset_password_key":key})
if not user:
return _("Cannot Update: Incorrect / Expired Link.")
elif old_password:
user = frappe.session.user
if not frappe.db.sql("""select user from __Auth where password=password(%s)
and user=%s""", (old_password, user)):
return _("Cannot Update: Incorrect Password")
_update_password(user, new_password)
frappe.db.set_value("User", user, "reset_password_key", "")
frappe.local.login_manager.logout()
return _("Password Updated")
@frappe.whitelist(allow_guest=True)
def sign_up(args):
args=eval(args)
from frappe.utils import get_url, cstr
import json
import requests
if get_url()=='http://demo.letzerp.com':
#frappe.errprint(['url',get_url()])
#frappe.db.sql("""insert into `tabDemo Sites` (email,full_name,domain_name,company_name) values(%s,%s,%s,%s);""",(args['email'],args['full_name'],args['subdomain'],args['company_name']))
s = requests.session()
login_details = {'usr': 'administrator', 'pwd': 'admin'}
url = 'http://letzerp.com/api/method/login?usr=firstuser@example.com&pwd=password'
headers = {'content-type': 'application/x-www-form-urlencoded'}
#frappe.errprint([url, 'data='+json.dumps(login_details)])
response = s.post(url)
url='http://letzerp.com/api/resource/Lead/?fields=["domain_name", "name"]&filters=[["Lead", "domain_name", "=", "%s"]]'%(args['subdomain']+'.letzerp.com')
requests= s.get(url, headers=headers)
if requests.text :
frappe.errprint(requests.text)
lead_dict=json.loads(requests.text)
if len(lead_dict['data']) > 0 :
return (_("Domain already exist with same name..Please choose another domain..!"))
else:
url = 'http://letzerp.com/api/resource/Lead'
headers = {'content-type': 'application/x-www-form-urlencoded'}
data={}
data['lead_name']=args['full_name']
data['company_name']=args['company_name']
data['email_id']=args['email']
data['domain_name']=args['subdomain']+'.letzerp.com'
# frappe.errprint('data='+json.dumps(data))
response = s.post(url, data='data='+json.dumps(data), headers=headers)
# frappe.errprint(response.text)
return (_("Registration Details will be send on your email id soon. "))
@frappe.whitelist(allow_guest=True)
def reset_password(user):
if user=="Administrator":
return _("Not allowed to reset the password of {0}").format(user)
try:
user = frappe.get_doc("User", user)
user.validate_reset_password()
user.reset_password()
return _("Password reset instructions have been sent to your email")
except frappe.DoesNotExistError:
return _("User {0} does not exist").format(user)
def user_query(doctype, txt, searchfield, start, page_len, filters):
from frappe.desk.reportview import get_match_cond
txt = "%{}%".format(txt)
return frappe.db.sql("""select name, concat_ws(' ', first_name, middle_name, last_name)
from `tabUser`
where ifnull(enabled, 0)=1
and docstatus < 2
and name not in ({standard_users})
and user_type != 'Website User'
and ({key} like %s
or concat_ws(' ', first_name, middle_name, last_name) like %s)
{mcond}
order by
case when name like %s then 0 else 1 end,
case when concat_ws(' ', first_name, middle_name, last_name) like %s
then 0 else 1 end,
name asc
limit %s, %s""".format(standard_users=", ".join(["%s"]*len(STANDARD_USERS)),
key=searchfield, mcond=get_match_cond(doctype)),
tuple(list(STANDARD_USERS) + [txt, txt, txt, txt, start, page_len]))
def get_total_users(exclude_users=None):
"""Returns total no. of system users"""
return len(get_system_users(exclude_users=exclude_users))
def get_system_users(exclude_users=None):
if not exclude_users:
exclude_users = []
elif not isinstance(exclude_users, (list, tuple)):
exclude_users = [exclude_users]
exclude_users += list(STANDARD_USERS)
system_users = frappe.db.sql_list("""select name from `tabUser`
where enabled=1 and user_type != 'Website User'
and name not in ({})""".format(", ".join(["%s"]*len(exclude_users))),
exclude_users)
return system_users
def get_active_users():
"""Returns No. of system users who logged in, in the last 3 days"""
return frappe.db.sql("""select count(*) from `tabUser`
where enabled = 1 and user_type != 'Website User'
and name not in ({})
and hour(timediff(now(), last_login)) < 72""".format(", ".join(["%s"]*len(STANDARD_USERS))), STANDARD_USERS)[0][0]
def get_website_users():
"""Returns total no. of website users"""
return frappe.db.sql("""select count(*) from `tabUser`
where enabled = 1 and user_type = 'Website User'""")[0][0]
def get_active_website_users():
"""Returns No. of website users who logged in, in the last 3 days"""
return frappe.db.sql("""select count(*) from `tabUser`
where enabled = 1 and user_type = 'Website User'
and hour(timediff(now(), last_login)) < 72""")[0][0]
def get_permission_query_conditions(user):
if user=="Administrator":
return ""
else:
return """(`tabUser`.name not in ({standard_users}))""".format(
standard_users='"' + '", "'.join(STANDARD_USERS) + '"')
def has_permission(doc, user):
if (user != "Administrator") and (doc.name in STANDARD_USERS):
# dont allow non Administrator user to view / edit Administrator user
return False
else:
return True
|
mit
| 3,309,310,681,578,158,600
| 32.621924
| 188
| 0.682214
| false
| 3.104524
| false
| false
| false
|
edm1/error-aware-demultiplexer
|
src/demultiplexer.py
|
1
|
16772
|
# -*- coding: utf-8 -*-
#
# The MIT License (MIT)
#
# Copyright (c) 2014 Edward Mountjoy
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
#
from src.probabilisticSeqMatch import sequences_match_prob
from src.probabilisticSeqMatch import base_prob
from src.fastqparser import phred_score_dict
from src.fastqparser import fastqIterator
from src.fastqparser import Fastq
from src.fastqparser import fastqWriter
from src.progressbar import Bar
from operator import itemgetter
from datetime import timedelta
from shutil import rmtree
import glob
import gzip
import sys
import os
#import concurrent.futures as cf
def run(args):
print("Precomputing base probabilities...")
# Precompute string to phred scores dictionary
phred_dict = phred_score_dict(args.phredOffset)
# Precompute base probabilities for phredscores up to 50
base_prob_precompute = {}
for letter in phred_dict:
base_prob_precompute[letter] = base_prob(phred_dict[letter])
# Convert index qual argument to a qual character
args.indexQual = chr(args.indexQual + args.phredOffset)
print("Searching for fastqs...")
# Check that the multiplexed path exists
multiplexed_dir = os.path.join(args.inDir, "multiplexed")
if not os.path.exists(multiplexed_dir):
sys.exit("Directory '<inDir>/multiplexed' does not exist. Re-run with"
" different <inDir>")
# Create out directory
out_dir = "demultiplexed"
if args.uniqID != None:
out_dir += "_{0}".format(args.uniqID)
out_dir = os.path.join(args.inDir, out_dir)
create_folder(out_dir)
# Initiate multiplexed class
multiplexed = Multiplex(multiplexed_dir)
print("Loading index sequences...")
# Initiate sample sheet and read possible indexes
sampleSheet = SampleSheet(args.sampleSheet)
sampleSheet.parse(args.indexQual, base_prob_precompute)
# Check that there are the same number of indexes in sample sheet and
# multiplexed fastqs
if sampleSheet.is_dualindexed != multiplexed.is_dualindexed:
sys.exit("Error: Different number of indexes in sampleSheet and "
"multiplexed reads. Exiting!")
print("Initiating...")
# Open output class for each sample, and a not_assigned group
sample_out = {}
for sample in list(sampleSheet.sample_indexes.keys()) + ['not_assigned']:
sample_out[sample] = Sample(sample, out_dir, multiplexed.is_pairend,
multiplexed.is_dualindexed)
# Initiate progress bar
num_records = file_len(multiplexed.barcode_paths[0]) / 4
bar = Bar('Demultiplexing', max=int(num_records/10000),
suffix='%(percent)d%% %(eta)a secs')
c = 1
for variables in futures_iterate_reads(base_prob_precompute,
multiplexed, sampleSheet, args.minProb):
# Get output
output = futures_barcode_to_indexes(variables)
# Unpack output
((read_records, barcode_records), sample, prob, _) = output
# Write record to correct sample file
sample_out[sample].write(read_records, barcode_records)
# Update progress
if c % 10000 == 0:
bar.next()
c += 1
# Close progress bar
bar.finish()
# Close all sample handles
for sample_name in sample_out:
sample_out[sample_name].close_handles()
print("Finished!")
"""
# Send each read/barcode record to futures to match up to sample
with cf.ProcessPoolExecutor(max_workers=args.numCPU) as executor:
c = 1
# Map read/barcode records
for output in executor.map(futures_barcode_to_indexes,
futures_iterate_reads(multiplexed, sampleSheet,
base_prob_precompute, args.minProb)):
# Unpack output
((read_records, barcode_records), sample, prob, _) = output
# Write record to correct sample file
sample_out[sample].write(read_records, barcode_records)
# Update progress
if c % 1000 == 0:
print(c)
c += 1
"""
return 0
def futures_iterate_reads(base_prob_precompute, multiplexed, sampleSheet,
min_prob):
""" Returns an iterator that contains everything needed for futures.
"""
for combined_record in multiplexed.iterate(base_prob_precompute):
yield (combined_record, sampleSheet, min_prob)
def futures_barcode_to_indexes(variables):
""" Compares the reads barcodes to sample indexes and returns matching
sample name.
"""
# Unpack variables
(combined_record, sampleSheet, min_prob) = variables
# Get barcode records
_, barcode_records = combined_record
# Find sample
b1_header, sample, prob = match_barcode_to_indexes(barcode_records,
sampleSheet, min_prob)
if sample == None:
sample = 'not_assigned'
# Append probability to barcode1 header
b1_header = "{0} {1}".format(b1_header, prob)
# Change header
combined_record[1][0].id = b1_header
return combined_record, sample, prob, b1_header
def match_barcode_to_indexes(barcode_records, sampleSheet, min_prob):
""" For the barcode pair, caluclates probability of a match against each set
of indexes
"""
index_probs = {}
for sample_name in sampleSheet.sample_indexes:
index_records = sampleSheet.sample_indexes[sample_name]
# Calculate the match probability for barcode 1
b1_prob = sequences_match_prob(index_records[0].seq,
index_records[0].qual_prob,
barcode_records[0].seq,
barcode_records[0].qual_prob, 0)
# Do for second barcode if present
if sampleSheet.is_dualindexed:
# Skip if already below the threshold, else assign same prob as b1
if b1_prob >= min_prob:
b2_prob = sequences_match_prob(index_records[1].seq,
index_records[1].qual_prob,
barcode_records[1].seq,
barcode_records[1].qual_prob, 0)
else:
b2_prob = b1_prob
# Caluclate combined probability
if sampleSheet.is_dualindexed:
overall_prob = b1_prob * b2_prob
else:
overall_prob = b1_prob
# Save result
index_probs[sample_name] = overall_prob
# Sort the results by their probability
sorted_probs = sorted(index_probs.items(), key=itemgetter(1),
reverse=True)
# Return header, sample, prob
header = barcode_records[0].id
if sorted_probs[0][1] > min_prob:
return header, sorted_probs[0][0], sorted_probs[0][1]
else:
return header, None, sorted_probs[0][1]
class Sample:
# Class for each possible sample. 1) Holds the output directory for that
# sample. 2) Opens handles. 3) Writes record to sample.
def __init__(self, name, out_dir, is_pe, id_dual):
self.read_paths = []
self.barcode_paths = []
self.read_handles = None
self.barcode_handles = None
# Create directory for sample
name = name.replace(' ', '_')
self.sample_dir = os.path.join(out_dir, name)
create_folder(self.sample_dir)
# Create read paths
self.read_paths.append(os.path.join(self.sample_dir,
'{0}.R1.fastq.gz'.format(name)))
if is_pe:
self.read_paths.append(os.path.join(self.sample_dir,
'{0}.R2.fastq.gz'.format(name)))
# Create barcode paths
self.barcode_paths.append(os.path.join(self.sample_dir,
'{0}.barcode_1.fastq.gz'.format(name)))
if id_dual:
self.barcode_paths.append(os.path.join(self.sample_dir,
'{0}.barcode_2.fastq.gz'.format(name)))
def open_handles(self):
""" For the reads and barcodes, opens output handles.
"""
self.read_handles = [get_handle(read_path, 'w') for read_path
in self.read_paths]
self.barcode_handles = [get_handle(barcode_path, 'w') for barcode_path
in self.barcode_paths]
return 0
def write(self, read_records, barcode_records):
""" Writes the demultiplexed read and barcode records to sample file.
"""
# Open handles if not open
if self.read_handles == None:
self.open_handles()
# Write read records
for i in range(len(read_records)):
fastqWriter(read_records[i], self.read_handles[i])
# Write barcode records
for i in range(len(barcode_records)):
fastqWriter(barcode_records[i], self.barcode_handles[i])
return 0
def close_handles(self):
""" Closes any open handles.
"""
if self.read_handles != None:
for handle in self.read_handles + self.barcode_handles:
handle.close()
return 0
class SampleSheet:
# Class to hold the sample sheet and retrieve indexes from it.
def __init__(self, path):
self.path = path
def parse(self, index_qual, base_prob_precompute):
""" Parses the sample sheet to retrieve the indexes for each sample.
"""
sample_indexes = {}
with open(self.path, 'r') as in_h:
# Skip to line after [Data]
line = in_h.readline()
while not line.startswith('[Data]'):
line = in_h.readline()
# Get header
header = in_h.readline().rstrip().lower().split(',')
col_ind = dict(zip(header, range(len(header))))
# Save whether it is dual indexed
if "index2" in col_ind.keys():
self.is_dualindexed = True
else:
self.is_dualindexed = False
# Get indexes
for line in in_h:
# Break if EOF
if line.strip() == "":
break
# Get info
parts = line.rstrip().split(',')
sample_name = parts[col_ind['sample_name']]
# If sample_name is empty, take sample_id instead
if sample_name == "":
sample_name = parts[col_ind['sample_id']]
# Get first index
index1 = parts[col_ind['index']]
sample_indexes[sample_name] = [index1]
# Get second index
if self.is_dualindexed:
index2 = parts[col_ind['index2']]
sample_indexes[sample_name].append(index2)
# Convert indexes to seqIO seqRecords
self.sample_indexes = self.convert_index_to_fastqRecord(sample_indexes,
index_qual, base_prob_precompute)
return 0
def convert_index_to_fastqRecord(self, sample_indexes, index_qual,
base_prob_precompute):
""" Converts each index sequence to a seqIO seqRecord.
"""
# For each sample
for sample in sample_indexes:
# For each index
for i in range(len(sample_indexes[sample])):
raw_seq = sample_indexes[sample][i]
qual = [index_qual] * len(raw_seq)
# Convert to fastqRecord
record = Fastq(None, raw_seq, qual)
# Calculate base probabilities
record.qual_to_prob(base_prob_precompute)
# Save record
sample_indexes[sample][i] = record
return sample_indexes
class Multiplex:
# Class for the folder of multiplexed reads + barcodes
def __init__(self, folder):
""" Make list of read and barcode files.
"""
self.dir = folder
# Get list of read and barcode paths
self.read_paths = []
self.barcode_paths = []
for fastq in sorted(glob.glob(os.path.join(folder, "*.fastq*"))):
if "barcode_" in os.path.split(fastq)[1]:
self.barcode_paths.append(fastq)
else:
self.read_paths.append(fastq)
# Save whether pairend
if len(self.read_paths) == 1:
self.is_pairend = False
elif len(self.read_paths) == 2:
self.is_pairend = True
else:
sys.exit("There must be 1 or 2 input read fastqs, not {0}".format(
len(self.read_paths)))
# Save whether dualindex
if len(self.barcode_paths) == 1:
self.is_dualindexed = False
elif len(self.barcode_paths) == 2:
self.is_dualindexed = True
else:
sys.exit("There must be 1 or 2 input barcode fastqs, not"
" {0}".format(len(self.barcode_paths)))
return None
def open_handles(self):
""" Opens the file names for reading.
"""
read_handles = [get_handle(filen, 'r') for filen in self.read_paths]
barcode_handles = [get_handle(filen, 'r') for filen
in self.barcode_paths]
return read_handles, barcode_handles
def open_iterators(self, read_handles, barcode_handles):
""" Opens fastq iterators using biopythons SeqIO
"""
# Open iterators for each handle
read_iterators = [fastqIterator(handle) for handle
in read_handles]
barcode_iterators = [fastqIterator(handle) for handle
in barcode_handles]
return read_iterators, barcode_iterators
def iterate(self, base_prob_precompute):
""" Loads the reads and barcode fastqs and yields 1 set at a time.
"""
# Open handles
read_handles, barcode_handles = self.open_handles()
# Open iterators for each handle
read_iterators, barcode_iterators = self.open_iterators(
read_handles, barcode_handles)
# Iterate through records
for r1_record in read_iterators[0]:
# Get read records
read_records = [r1_record]
if self.is_pairend:
read_records.append(next(read_iterators[1]))
# Get barcode records
barcode_records = [next(barcode_iterators[0])]
if self.is_dualindexed:
barcode_records.append(next(barcode_iterators[1]))
# Check that they all have the same title
titles = [record.id.split(" ")[0] for record in read_records + barcode_records]
if len(set(titles)) > 1:
sys.exit('Reads and/or barcodes are not in sync\n'
'{0}'.format(titles))
# Calculate base probabilities for barcodes
for i in range(len(barcode_records)):
barcode_records[i].qual_to_prob(base_prob_precompute)
yield [read_records, barcode_records]
# Close handles
for handle in read_handles + barcode_handles:
handle.close()
def create_folder(folder):
""" Check out folder exists and create a new one.
"""
# Check if it exists
if os.path.exists(folder):
response = input('{0} exists. Would you like to overwrite it? [y/n] '.format(folder))
if response == 'y':
rmtree(folder)
else:
sys.exit()
os.makedirs(folder)
return folder
def get_handle(filen, rw):
""" Returns file handle using gzip if file ends in .gz
"""
if filen.split('.')[-1] == 'gz':
return gzip.open(filen, rw)
else:
return open(filen, rw)
def file_len(fname):
""" Count number of lines in a file.
"""
with get_handle(fname, 'r') as f:
for i, l in enumerate(f):
pass
return i + 1
|
mit
| 5,872,654,845,560,061,000
| 35.30303
| 93
| 0.599273
| false
| 4.083759
| false
| false
| false
|
t-wissmann/qutebrowser
|
tests/helpers/stubs.py
|
1
|
17146
|
# vim: ft=python fileencoding=utf-8 sts=4 sw=4 et:
# Copyright 2014-2020 Florian Bruhin (The Compiler) <mail@qutebrowser.org>
#
# This file is part of qutebrowser.
#
# qutebrowser is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# qutebrowser is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with qutebrowser. If not, see <http://www.gnu.org/licenses/>.
# pylint: disable=invalid-name,abstract-method
"""Fake objects/stubs."""
from unittest import mock
import contextlib
import shutil
import attr
from PyQt5.QtCore import pyqtSignal, QPoint, QProcess, QObject, QUrl
from PyQt5.QtGui import QIcon
from PyQt5.QtNetwork import (QNetworkRequest, QAbstractNetworkCache,
QNetworkCacheMetaData)
from PyQt5.QtWidgets import QCommonStyle, QLineEdit, QWidget, QTabBar
from qutebrowser.browser import browsertab, downloads
from qutebrowser.utils import usertypes
from qutebrowser.commands import runners
class FakeNetworkCache(QAbstractNetworkCache):
"""Fake cache with no data."""
def cacheSize(self):
return 0
def data(self, _url):
return None
def insert(self, _dev):
pass
def metaData(self, _url):
return QNetworkCacheMetaData()
def prepare(self, _metadata):
return None
def remove(self, _url):
return False
def updateMetaData(self, _url):
pass
class FakeKeyEvent:
"""Fake QKeyPressEvent stub."""
def __init__(self, key, modifiers=0, text=''):
self.key = mock.Mock(return_value=key)
self.text = mock.Mock(return_value=text)
self.modifiers = mock.Mock(return_value=modifiers)
class FakeWebFrame:
"""A stub for QWebFrame."""
def __init__(self, geometry=None, *, scroll=None, plaintext=None,
html=None, parent=None, zoom=1.0):
"""Constructor.
Args:
geometry: The geometry of the frame as QRect.
scroll: The scroll position as QPoint.
plaintext: Return value of toPlainText
html: Return value of tohtml.
zoom: The zoom factor.
parent: The parent frame.
"""
if scroll is None:
scroll = QPoint(0, 0)
self.geometry = mock.Mock(return_value=geometry)
self.scrollPosition = mock.Mock(return_value=scroll)
self.parentFrame = mock.Mock(return_value=parent)
self.toPlainText = mock.Mock(return_value=plaintext)
self.toHtml = mock.Mock(return_value=html)
self.zoomFactor = mock.Mock(return_value=zoom)
class FakeChildrenFrame:
"""A stub for QWebFrame to test get_child_frames."""
def __init__(self, children=None):
if children is None:
children = []
self.childFrames = mock.Mock(return_value=children)
class FakeQApplication:
"""Stub to insert as QApplication module."""
UNSET = object()
def __init__(self, *, style=None, all_widgets=None, active_window=None,
instance=UNSET, arguments=None, platform_name=None):
if instance is self.UNSET:
self.instance = mock.Mock(return_value=self)
else:
self.instance = mock.Mock(return_value=instance)
self.style = mock.Mock(spec=QCommonStyle)
self.style().metaObject().className.return_value = style
self.allWidgets = lambda: all_widgets
self.activeWindow = lambda: active_window
self.arguments = lambda: arguments
self.platformName = lambda: platform_name
class FakeNetworkReply:
"""QNetworkReply stub which provides a Content-Disposition header."""
KNOWN_HEADERS = {
QNetworkRequest.ContentTypeHeader: 'Content-Type',
}
def __init__(self, headers=None, url=None):
if url is None:
url = QUrl()
if headers is None:
self.headers = {}
else:
self.headers = headers
self.url = mock.Mock(return_value=url)
def hasRawHeader(self, name):
"""Check if the reply has a certain header.
Args:
name: The name of the header as ISO-8859-1 encoded bytes object.
Return:
True if the header is present, False if not.
"""
return name.decode('iso-8859-1') in self.headers
def rawHeader(self, name):
"""Get the raw header data of a header.
Args:
name: The name of the header as ISO-8859-1 encoded bytes object.
Return:
The header data, as ISO-8859-1 encoded bytes() object.
"""
name = name.decode('iso-8859-1')
return self.headers[name].encode('iso-8859-1')
def header(self, known_header):
"""Get a known header.
Args:
known_header: A QNetworkRequest::KnownHeaders member.
"""
key = self.KNOWN_HEADERS[known_header]
try:
return self.headers[key]
except KeyError:
return None
def setHeader(self, known_header, value):
"""Set a known header.
Args:
known_header: A QNetworkRequest::KnownHeaders member.
value: The value to set.
"""
key = self.KNOWN_HEADERS[known_header]
self.headers[key] = value
def fake_qprocess():
"""Factory for a QProcess mock which has the QProcess enum values."""
m = mock.Mock(spec=QProcess)
for name in ['NormalExit', 'CrashExit', 'FailedToStart', 'Crashed',
'Timedout', 'WriteError', 'ReadError', 'UnknownError']:
setattr(m, name, getattr(QProcess, name))
return m
class FakeWebTabScroller(browsertab.AbstractScroller):
"""Fake AbstractScroller to use in tests."""
def __init__(self, tab, pos_perc):
super().__init__(tab)
self._pos_perc = pos_perc
def pos_perc(self):
return self._pos_perc
class FakeWebTabHistory(browsertab.AbstractHistory):
"""Fake for Web{Kit,Engine}History."""
def __init__(self, tab, *, can_go_back, can_go_forward):
super().__init__(tab)
self._can_go_back = can_go_back
self._can_go_forward = can_go_forward
def can_go_back(self):
assert self._can_go_back is not None
return self._can_go_back
def can_go_forward(self):
assert self._can_go_forward is not None
return self._can_go_forward
class FakeWebTabAudio(browsertab.AbstractAudio):
def is_muted(self):
return False
def is_recently_audible(self):
return False
class FakeWebTabPrivate(browsertab.AbstractTabPrivate):
def shutdown(self):
pass
class FakeWebTab(browsertab.AbstractTab):
"""Fake AbstractTab to use in tests."""
def __init__(self, url=QUrl(), title='', tab_id=0, *,
scroll_pos_perc=(0, 0),
load_status=usertypes.LoadStatus.success,
progress=0, can_go_back=None, can_go_forward=None):
super().__init__(win_id=0, private=False)
self._load_status = load_status
self._title = title
self._url = url
self._progress = progress
self.history = FakeWebTabHistory(self, can_go_back=can_go_back,
can_go_forward=can_go_forward)
self.scroller = FakeWebTabScroller(self, scroll_pos_perc)
self.audio = FakeWebTabAudio(self)
self.private_api = FakeWebTabPrivate(tab=self, mode_manager=None)
wrapped = QWidget()
self._layout.wrap(self, wrapped)
def url(self, *, requested=False):
assert not requested
return self._url
def title(self):
return self._title
def progress(self):
return self._progress
def load_status(self):
return self._load_status
def icon(self):
return QIcon()
class FakeSignal:
"""Fake pyqtSignal stub which does nothing.
Attributes:
signal: The name of the signal, like pyqtSignal.
_func: The function to be invoked when the signal gets called.
"""
def __init__(self, name='fake', func=None):
self.signal = '2{}(int, int)'.format(name)
self._func = func
def __call__(self):
if self._func is None:
raise TypeError("'FakeSignal' object is not callable")
return self._func()
def connect(self, slot):
"""Connect the signal to a slot.
Currently does nothing, but could be improved to do some sanity
checking on the slot.
"""
def disconnect(self, slot=None):
"""Disconnect the signal from a slot.
Currently does nothing, but could be improved to do some sanity
checking on the slot and see if it actually got connected.
"""
def emit(self, *args):
"""Emit the signal.
Currently does nothing, but could be improved to do type checking based
on a signature given to __init__.
"""
@attr.s(frozen=True)
class FakeCommand:
"""A simple command stub which has a description."""
name = attr.ib('')
desc = attr.ib('')
hide = attr.ib(False)
debug = attr.ib(False)
deprecated = attr.ib(False)
completion = attr.ib(None)
maxsplit = attr.ib(None)
takes_count = attr.ib(lambda: False)
modes = attr.ib((usertypes.KeyMode.normal, ))
class FakeTimer(QObject):
"""Stub for a usertypes.Timer."""
timeout_signal = pyqtSignal()
def __init__(self, parent=None, name=None):
super().__init__(parent)
self.timeout = mock.Mock(spec=['connect', 'disconnect', 'emit'])
self.timeout.connect.side_effect = self.timeout_signal.connect
self.timeout.disconnect.side_effect = self.timeout_signal.disconnect
self.timeout.emit.side_effect = self._emit
self._started = False
self._singleshot = False
self._interval = 0
self._name = name
def __repr__(self):
return '<{} name={!r}>'.format(self.__class__.__name__, self._name)
def _emit(self):
"""Called when the timeout "signal" gets emitted."""
if self._singleshot:
self._started = False
self.timeout_signal.emit()
def setInterval(self, interval):
self._interval = interval
def interval(self):
return self._interval
def setSingleShot(self, singleshot):
self._singleshot = singleshot
def isSingleShot(self):
return self._singleshot
def start(self, interval=None):
if interval:
self._interval = interval
self._started = True
def stop(self):
self._started = False
def isActive(self):
return self._started
class InstaTimer(QObject):
"""Stub for a QTimer that fires instantly on start().
Useful to test a time-based event without inserting an artificial delay.
"""
timeout = pyqtSignal()
def start(self, interval=None):
self.timeout.emit()
def setSingleShot(self, yes):
pass
def setInterval(self, interval):
pass
@staticmethod
def singleShot(_interval, fun):
fun()
class StatusBarCommandStub(QLineEdit):
"""Stub for the statusbar command prompt."""
got_cmd = pyqtSignal(str)
clear_completion_selection = pyqtSignal()
hide_completion = pyqtSignal()
update_completion = pyqtSignal()
show_cmd = pyqtSignal()
hide_cmd = pyqtSignal()
def prefix(self):
return self.text()[0]
class UrlMarkManagerStub(QObject):
"""Stub for the quickmark-manager or bookmark-manager object."""
added = pyqtSignal(str, str)
removed = pyqtSignal(str)
def __init__(self, parent=None):
super().__init__(parent)
self.marks = {}
def delete(self, key):
del self.marks[key]
self.removed.emit(key)
class BookmarkManagerStub(UrlMarkManagerStub):
"""Stub for the bookmark-manager object."""
class QuickmarkManagerStub(UrlMarkManagerStub):
"""Stub for the quickmark-manager object."""
def quickmark_del(self, key):
self.delete(key)
class SessionManagerStub:
"""Stub for the session-manager object."""
def __init__(self):
self.sessions = []
def list_sessions(self):
return self.sessions
def save_autosave(self):
pass
class TabbedBrowserStub(QObject):
"""Stub for the tabbed-browser object."""
def __init__(self, parent=None):
super().__init__(parent)
self.widget = TabWidgetStub()
self.shutting_down = False
self.loaded_url = None
self.cur_url = None
def on_tab_close_requested(self, idx):
del self.widget.tabs[idx]
def widgets(self):
return self.widget.tabs
def tabopen(self, url):
self.loaded_url = url
def load_url(self, url, *, newtab):
self.loaded_url = url
def current_url(self):
if self.current_url is None:
raise ValueError("current_url got called with cur_url None!")
return self.cur_url
class TabWidgetStub(QObject):
"""Stub for the tab-widget object."""
new_tab = pyqtSignal(browsertab.AbstractTab, int)
def __init__(self, parent=None):
super().__init__(parent)
self.tabs = []
self._qtabbar = QTabBar()
self.index_of = None
self.current_index = None
def count(self):
return len(self.tabs)
def widget(self, i):
return self.tabs[i]
def page_title(self, i):
return self.tabs[i].title()
def tabBar(self):
return self._qtabbar
def indexOf(self, _tab):
if self.index_of is None:
raise ValueError("indexOf got called with index_of None!")
if self.index_of is RuntimeError:
raise RuntimeError
return self.index_of
def currentIndex(self):
if self.current_index is None:
raise ValueError("currentIndex got called with current_index "
"None!")
return self.current_index
def currentWidget(self):
idx = self.currentIndex()
if idx == -1:
return None
return self.tabs[idx - 1]
class HTTPPostStub(QObject):
"""A stub class for HTTPClient.
Attributes:
url: the last url send by post()
data: the last data send by post()
"""
success = pyqtSignal(str)
error = pyqtSignal(str)
def __init__(self, parent=None):
super().__init__(parent)
self.url = None
self.data = None
def post(self, url, data=None):
self.url = url
self.data = data
class FakeDownloadItem(QObject):
"""Mock browser.downloads.DownloadItem."""
finished = pyqtSignal()
def __init__(self, fileobj, name, parent=None):
super().__init__(parent)
self.fileobj = fileobj
self.name = name
self.successful = False
class FakeDownloadManager:
"""Mock browser.downloads.DownloadManager."""
def __init__(self, tmpdir):
self._tmpdir = tmpdir
self.downloads = []
@contextlib.contextmanager
def _open_fileobj(self, target):
"""Ensure a DownloadTarget's fileobj attribute is available."""
if isinstance(target, downloads.FileDownloadTarget):
target.fileobj = open(target.filename, 'wb')
try:
yield target.fileobj
finally:
target.fileobj.close()
else:
yield target.fileobj
def get(self, url, target, **kwargs):
"""Return a FakeDownloadItem instance with a fileobj.
The content is copied from the file the given url links to.
"""
with self._open_fileobj(target):
download_item = FakeDownloadItem(target.fileobj, name=url.path())
with (self._tmpdir / url.path()).open('rb') as fake_url_file:
shutil.copyfileobj(fake_url_file, download_item.fileobj)
self.downloads.append(download_item)
return download_item
class FakeHistoryProgress:
"""Fake for a WebHistoryProgress object."""
def __init__(self):
self._started = False
self._finished = False
self._value = 0
def start(self, _text, _maximum):
self._started = True
def tick(self):
self._value += 1
def finish(self):
self._finished = True
class FakeCommandRunner(runners.AbstractCommandRunner):
def __init__(self, parent=None):
super().__init__(parent)
self.commands = []
def run(self, text, count=None, *, safely=False):
self.commands.append((text, count))
class FakeHintManager:
def __init__(self):
self.keystr = None
def handle_partial_key(self, keystr):
self.keystr = keystr
|
gpl-3.0
| 7,100,110,923,038,181,000
| 25.217125
| 79
| 0.613904
| false
| 4.016397
| false
| false
| false
|
sgnn7/sgfc
|
communication/comms.py
|
1
|
2263
|
#!/usr/bin/env python2
import time
from devices.zigbee_xbee import XBeeCommDevice
from protobufs import sgfc_pb2 as fc_proto
def test_comms():
dev1 = None
dev2 = None
fc_message = fc_proto.FlightMessage()
fc_message.sender = "Me"
payload = fc_proto.Payload()
payload.type = fc_proto.GPS_POSITION
payload.gps_position.has_fix = False
payload.gps_position.latitude = 1.1111
payload.gps_position.longitude = 22.222
payload.gps_position.altitude = 333.33
payload.gps_position.speed = 4444.4
fc_message.payload.extend([payload])
print(fc_message)
def callback(data):
print("Client got a message!")
proto_message = fc_proto.FlightMessage()
proto_message.ParseFromString(data)
print("Size: %d bytes" % (len(data),))
print('=' * 40)
print(proto_message)
print('=' * 40)
def error_callback(error):
print("Client got error: %s" % (error,))
# TODO: argparse the device
try:
dev1 = XBeeCommDevice('/dev/ttyUSB0', '\x00\x01',
callback=callback,
error_callback=error_callback,
network_id='\xab\xcd')
dev2 = XBeeCommDevice('/dev/ttyUSB1', '\x00\x02',
callback=callback,
error_callback=error_callback,
network_id='\xab\xcd')
print('')
dev2.tx('\x00\x01', fc_message.SerializeToString())
time.sleep(1)
print('')
dev1.tx('\x00\x02', fc_message.SerializeToString())
time.sleep(1)
print('')
print("Testing high-speed transfer")
serialized_message = fc_message.SerializeToString()
start = time.time()
for index in range(100):
dev1.tx('\x00\x02', serialized_message)
dev2.tx('\x00\x02', serialized_message)
end = time.time()
time.sleep(1)
print("Elapsed: %.2fs" % (end - start,))
except Exception as e:
print(e)
print('')
print("Cleaning up")
if dev1:
dev1.close()
if dev2:
dev2.close()
print("Done")
if __name__ == '__main__':
test_comms()
|
lgpl-2.1
| 3,578,514,160,517,611,000
| 23.333333
| 60
| 0.549271
| false
| 3.765391
| false
| false
| false
|
daisychainme/daisychain
|
daisychain/channel_dropbox/tests/test_models.py
|
1
|
1492
|
from django.contrib.auth.models import User
from django.test import TestCase
from .models import DropboxAccount, DropboxUser
class TestModelsDropboxAccount(TestCase):
def test_account_str_len(self):
user = User.objects.create_user('John')
dbx_account = DropboxAccount(
user = user,
access_token = 'test_access_token',
cursor = ''
)
dbx_account.save()
string = str(dbx_account)
self.assertEqual(string,
"DropboxAccount belongs to user {}".format(
user))
self.assertEqual(len(DropboxAccount.objects.all()), 1)
class TestModelsDropboxUser(TestCase):
def test_user_str_len(self):
user = User.objects.create_user('John')
dbx_account = DropboxAccount(
user = user,
access_token = '_test_access_token',
cursor = '',
)
dbx_account.save()
dbx_user = DropboxUser(
dropbox_account = dbx_account,
dropbox_userid = 4211,
display_name = "John Doe",
email = "john.doe@test.org",
profile_photo_url = "url.to/the_profile_photo",
disk_used = 4234.234,
disk_allocated = 12345678.4444
)
dbx_user.save()
string = str(dbx_user)
self.assertEqual(string, "Dropbox User #4211 belongs to DropboxAccount {}".format(
dbx_account))
self.assertEqual(len(User.objects.all()), 1)
|
mit
| 4,904,110,218,897,328,000
| 32.909091
| 90
| 0.577748
| false
| 3.885417
| true
| false
| false
|
cloudkeep/symantecssl
|
symantecssl/order.py
|
1
|
4443
|
from __future__ import absolute_import, division, print_function
import requests
from lxml import etree
from symantecssl.request_models import RequestEnvelope as ReqEnv
class FailedRequest(Exception):
def __init__(self, response):
super(FailedRequest, self).__init__()
self.response = response
def _prepare_request(request_model, credentials):
"""
Prepare the request for execution.
:param request_model: an object with a ``serialize`` method that returns
some LXML Etrees.
:param dict credentials: A dictionary containing the following keys:
- ``partner_code``
- ``username``
- ``password``
:return: a 2-tuple of C{bytes} - the contents of the request and C{dict}
mapping C{bytes} to C{bytes} - the HTTP headers for the request.
"""
request_model.set_credentials(**credentials)
model = ReqEnv(request_model=request_model)
serialized_xml = etree.tostring(model.serialize(), pretty_print=True)
headers = {'Content-Type': 'application/soap+xml'}
return (serialized_xml, headers)
def _parse_response(request_model, response, status_code, response_content):
"""
Parse a response from Symantec.
:param request_model: an object with a ``response_model`` attribute,
representing the request that this response maps to.
:param response: An HTTP response object; used only to instantiate
:obj:`FailedRequest`.
:param int status_code: The HTTP status code of the response.
:param bytes response_content: The bytes of the response.
:return: some LXML DOM nodes.
"""
# Symantec not expected to return 2xx range; only 200
if status_code != 200:
raise FailedRequest(response)
xml_root = etree.fromstring(response_content)
return request_model.response_model.deserialize(xml_root)
def post_request(endpoint, request_model, credentials):
"""Create a post request against Symantec's SOAPXML API.
Currently supported Request Models are:
GetModifiedOrders
QuickOrderRequest
note:: the request can take a considerable amount of time if the
date range covers a large amount of changes.
note:: credentials should be a dictionary with the following values:
partner_code
username
password
Access all data from response via models
:param endpoint: Symantec endpoint to hit directly
:param request_model: request model instance to initiate call type
:type request_model: :obj:`symantecssl.request_models.Request`
:param credentials: Symantec specific credentials for orders.
:return response: deserialized response from API
"""
serialized_xml, headers = _prepare_request(request_model, credentials)
response = requests.post(endpoint, serialized_xml, headers=headers)
setattr(response, "model", None)
deserialized = _parse_response(request_model, response,
response.status_code, response.content)
setattr(response, "model", deserialized)
return response
def _after(something):
def decorator(decoratee):
return something.addCallback(decoratee)
return decorator
def post_request_treq(treq, endpoint, request_model, credentials):
"""
Like ``post_request``, but using the Twisted HTTP client in ``treq``.
:param treq: the ``treq`` module to use; either the treq module itself or
an HTTPClient with an added ``.content`` attribute like
``treq.content``.
:param text_type endpoint: the URL of the full Symantec endpoint for either
orders or queries
:param request_model: the request to issue to symantec.
:type request_model: :obj:`symantecssl.request_models.Request`
:return: a Deferred firing with an instance of the appropriate response
model for ``request_model`` looked up via the ``.response_model``
attribute on it, or failing with ``FailedRequest``.
"""
serialized_xml, headers = _prepare_request(request_model, credentials)
@_after(treq.post(endpoint, serialized_xml, headers=headers))
def posted(response):
@_after(treq.content(response))
def content(response_content):
deserialized = _parse_response(request_model, response,
response.code, response_content)
return deserialized
return content
return posted
|
apache-2.0
| -4,661,121,772,519,694,000
| 33.984252
| 79
| 0.684898
| false
| 4.334634
| false
| false
| false
|
Dangerpuss/Dumpster
|
winDATget/Forget.py
|
1
|
1457
|
from subprocess import Popen
import os
import csv
srcfile = "/" + input('File Input Name: ')
dirpath = os.path.dirname(__file__)
srcpath = os.path.dirname(__file__) + srcfile
with open(srcpath, newline='') as f:
reader = csv.reader(f)
for row in reader:
host = (row[0])
user = (row[1])
newpath = os.path.dirname(__file__) + "\\" + host
os.mkdir(newpath)
p = open(newpath + '\{}'.format(host) + '.bat', 'w')
p.write('net use x: \\\{}'.format(host) + '\c$' + '\n')
p.write(r'xcopy /H x:\Users\{}'.format(user) + r'\AppData\Local\Microsoft\Windows\History\History.IE5\index.dat ' + newpath + '\n')
p.write(r'attrib -s -h ' + newpath + '/index.dat' + '\n')
p.write(r'ren ' + newpath + '\index.dat {}'.format(user) +'_History.dat' + '\n')
p.write(r'xcopy /H "x:\Users\{}'.format(user) + r'\AppData\Local\Microsoft\Windows\Temporary Internet Files\Low\Content.IE5\index.dat" ' + newpath + '\n')
p.write(r'attrib -s -h ' + newpath + '\index.dat' + '\n')
p.write(r'ren ' + newpath + '\index.dat {}'.format(user) +'_Temp.dat' + '\n')
p.write(r'xcopy /H x:\Windows\System32\winevt\Logs\Security.evtx ' + newpath + '\n')
p.write(r'xcopy /H x:\Windows\System32\winevt\Logs\System.evtx ' + newpath + '\n')
p.write(r'xcopy /H x:\Windows\System32\winevt\Logs\Application.evtx ' + newpath + '\n')
p.write('net use x: /d')
p.close()
p = Popen(newpath + '\{}'.format(host) + '.bat')
stdout, stderr = p.communicate()
|
gpl-2.0
| 2,042,619,565,739,580,000
| 39.472222
| 156
| 0.607412
| false
| 2.649091
| false
| false
| false
|
novafloss/django-formidable
|
formidable/forms/__init__.py
|
1
|
7650
|
# -*- coding: utf-8 -*-
"""
This module exposes everything needed to generate a standard django form class
from a formidable object.
Given a formidable object, you can use :func:`get_dynamic_form_class` to get
its corresponding django form class.
"""
from __future__ import unicode_literals
from collections import OrderedDict
from django import forms
from django.db.models import Prefetch
from formidable.forms import field_builder
from formidable.forms.conditions import conditions_register
from formidable.models import Access, Formidable, Item
class FormidableBoundFieldCache(dict):
"""
In Django 1.8, bound fields are handled in the form context (__getitem__).
However, we want to inject our own BoundField for FormatField in order to
handle labels differently.
This can be achieved by implementing the get_bound_field method in our
field (available in Django >= 1.9). For now, if the method exists,
the bound_field is switched-in at the form level.
"""
def __setitem__(self, key, bf):
form, field, name = bf.form, bf.field, bf.name
if hasattr(field, 'get_bound_field'):
bf = field.get_bound_field(form, name)
return super(FormidableBoundFieldCache, self).__setitem__(key, bf)
class BaseDynamicForm(forms.Form):
"""
This class is used to generate the final Django form class corresponding to
the formidable object.
Please do not use this class directly, rather, you should check the
endpoint :func:`get_dynamic_form_class`
"""
def __init__(self, *args, **kwargs):
super(BaseDynamicForm, self).__init__(*args, **kwargs)
self._bound_fields_cache = FormidableBoundFieldCache()
def get_removed_fields(self, cleaned_data):
"""
Build the list of fields to be removed due to conditional displays
"""
# build a catalog of fields **targeted** by the conditions
condition_targets = {}
# For each condition, extract its status (should I display or not)
for condition in self._conditions:
# should we keep these fields?
keep_fields = condition.keep_fields(cleaned_data)
for field_id in condition.fields_ids:
# Fill the catalog
if field_id not in condition_targets:
condition_targets[field_id] = []
condition_targets[field_id].append(keep_fields)
# Here, the catalog contains fields targeted by 1 or many conditions.
# If only one condition says "please display X", we'll keep X
# That's why we gather the conditions using "any"
condition_targets = {k: any(v) for k, v in condition_targets.items()}
# We'll only remove fields that are targeted by conditions **and**
# those conditions are false
return (k for k, v in condition_targets.items() if not v)
def clean(self):
cleaned_data = super(BaseDynamicForm, self).clean()
removed_fields = self.get_removed_fields(cleaned_data)
for field_id in removed_fields:
# Remove field from cleaned_data
cleaned_data.pop(field_id, None)
# Remove from eventual existing errors
self.errors.pop(field_id, None)
# The field might have been removed if it was a file field.
if field_id in self.fields:
del self.fields[field_id]
return cleaned_data
def get_dynamic_form_class_from_schema(schema, field_factory=None):
"""
Return a dynamically generated and contextualized form class
"""
attrs = OrderedDict()
field_factory = field_factory or field_builder.FormFieldFactory()
doc = schema['description']
for field in schema['fields']:
try:
form_field = field_factory.produce(field)
except field_builder.SkipField:
pass
else:
attrs[field['slug']] = form_field
conditions = schema.get('conditions', None) or []
attrs['_conditions'] = conditions_register.build(
attrs,
conditions
)
form_class = type(str('DynamicForm'), (BaseDynamicForm,), attrs)
form_class.__doc__ = doc
return form_class
def get_dynamic_form_class(formidable, role=None, field_factory=None):
"""
This is the main method for getting a django form class from a formidable
object.
.. code-block:: python
form_obj = Formidable.objects.get(pk=42)
django_form_class = get_dynamic_form_class(form_obj)
The optional :params:`role` argument provides a way to get the form class
according to the access rights you specify by role. The :params:`role` must
be a role id, as defined by the code pointed to in
settings.FORMIDABLE_ACCESS_RIGHTS_LOADER.
.. code-block:: python
form_obj = Formidable.objects.get(pk=42)
django_form_class = get_dynamic_form_class(form_obj, role='jedi')
"""
attrs = OrderedDict()
field_factory = field_factory or field_builder.FormFieldFactory()
access_qs = Access.objects.all()
if role:
access_qs = access_qs.filter(access_id=role)
fields = formidable.fields.prefetch_related(
Prefetch('items', queryset=Item.objects.order_by('order')),
Prefetch('accesses', queryset=access_qs),
'validations', 'defaults'
)
for field in fields.order_by('order').all():
try:
form_field = field_factory.produce(field, role)
except field_builder.SkipField:
pass
else:
attrs[field.slug] = form_field
conditions_json = formidable.conditions or []
attrs['_conditions'] = conditions_register.build(attrs, conditions_json)
return type(str('DynamicForm'), (BaseDynamicForm,), attrs)
class FormidableForm(forms.Form):
"""
This is the main class available to build a formidable object with Django's
form API syntax.
It provides a class method :meth:`to_formidable` which saves the declared
form as a formidable objects.
Check the formidable.forms.fields module to see what fields are available
when defining your form.
"""
@classmethod
def to_formidable(cls, label=None, description=None, instance=None):
if not instance:
if not label:
raise ValueError("Label is required on creation mode")
description = description or ''
form = Formidable.objects.create(
label=label, description=description
)
else:
form = cls.get_clean_form(instance, label, description)
order = 0
for slug, field in cls.declared_fields.items():
field.to_formidable(form, order, slug)
order += 1
return form
@classmethod
def get_clean_form(cls, form, label, description):
"""
From a form definition and label and description value, the method
clean all fields and validations attached to the form.
If the label or description are not empty, those values are updated
in the database *and* in memory.
The returned object is a form without fields or validations , and
new label and description if needed.
"""
form.fields.all().delete()
if description or label:
kwargs = {
'description': description or form.description,
'label': label or form.label,
}
Formidable.objects.filter(pk=form.pk).update(**kwargs)
form.label = kwargs['label']
form.description = kwargs['description']
return form
|
mit
| -4,146,912,984,624,898,000
| 34.091743
| 79
| 0.642484
| false
| 4.207921
| false
| false
| false
|
SleepyDeveloper/alexa-cookbook
|
tools/TestFlow/sampleskill3/index.py
|
2
|
7822
|
"""
This sample demonstrates a simple skill built with the Amazon Alexa Skills Kit.
The Intent Schema, Custom Slots, and Sample Utterances for this skill, as well
as testing instructions are located at http://amzn.to/1LzFrj6
For additional samples, visit the Alexa Skills Kit Getting Started guide at
http://amzn.to/1LGWsLG
"""
from __future__ import print_function
# --------------- Helpers that build all of the responses ----------------------
def build_speechlet_response(title, output, reprompt_text, should_end_session):
return {
'outputSpeech': {
'type': 'PlainText',
'text': output
},
'card': {
'type': 'Simple',
'title': "SessionSpeechlet - " + title,
'content': "SessionSpeechlet - " + output
},
'reprompt': {
'outputSpeech': {
'type': 'PlainText',
'text': reprompt_text
}
},
'shouldEndSession': should_end_session
}
def build_response(session_attributes, speechlet_response):
return {
'version': '1.0',
'sessionAttributes': session_attributes,
'response': speechlet_response
}
# --------------- Functions that control the skill's behavior ------------------
def get_welcome_response():
""" If we wanted to initialize the session to have some attributes we could
add those here
"""
session_attributes = {}
card_title = "Welcome"
speech_output = "Welcome to the Alexa Skills Kit sample. " \
"Please tell me your favorite color by saying, " \
"my favorite color is red"
# If the user either does not reply to the welcome message or says something
# that is not understood, they will be prompted again with this text.
reprompt_text = "Please tell me your favorite color by saying, " \
"my favorite color is red."
should_end_session = False
return build_response(session_attributes, build_speechlet_response(
card_title, speech_output, reprompt_text, should_end_session))
def handle_session_end_request():
card_title = "Session Ended"
speech_output = "Thank you for trying the Alexa Skills Kit sample. " \
"Have a nice day! "
# Setting this to true ends the session and exits the skill.
should_end_session = True
return build_response({}, build_speechlet_response(
card_title, speech_output, None, should_end_session))
def create_favorite_color_attributes(favorite_color):
return {"favoriteColor": favorite_color}
def set_color_in_session(intent, session):
""" Sets the color in the session and prepares the speech to reply to the
user.
"""
card_title = intent['name']
session_attributes = {}
should_end_session = False
if 'Color' in intent['slots']:
favorite_color = intent['slots']['Color']['value']
session_attributes = create_favorite_color_attributes(favorite_color)
speech_output = "I now know your favorite color is " + \
favorite_color + \
". You can ask me your favorite color by saying, " \
"what's my favorite color?"
reprompt_text = "You can ask me your favorite color by saying, " \
"what's my favorite color?"
else:
speech_output = "I'm not sure what your favorite color is. " \
"Please try again."
reprompt_text = "I'm not sure what your favorite color is. " \
"You can tell me your favorite color by saying, " \
"my favorite color is red."
return build_response(session_attributes, build_speechlet_response(
card_title, speech_output, reprompt_text, should_end_session))
def get_color_from_session(intent, session):
session_attributes = {}
reprompt_text = None
if session.get('attributes', {}) and "favoriteColor" in session.get('attributes', {}):
favorite_color = session['attributes']['favoriteColor']
speech_output = "Your favorite color is " + favorite_color + \
". Goodbye."
should_end_session = True
else:
speech_output = "I'm not sure what your favorite color is. " \
"You can say, my favorite color is red."
should_end_session = False
# Setting reprompt_text to None signifies that we do not want to reprompt
# the user. If the user does not respond or says something that is not
# understood, the session will end.
return build_response(session_attributes, build_speechlet_response(
intent['name'], speech_output, reprompt_text, should_end_session))
# --------------- Events ------------------
def on_session_started(session_started_request, session):
""" Called when the session starts """
print("on_session_started requestId=" + session_started_request['requestId']
+ ", sessionId=" + session['sessionId'])
def on_launch(launch_request, session):
""" Called when the user launches the skill without specifying what they
want
"""
# print("on_launch requestId=" + launch_request['requestId'] + ", sessionId=" + session['sessionId'])
# Dispatch to your skill's launch
return get_welcome_response()
def on_intent(intent_request, session):
""" Called when the user specifies an intent for this skill """
# print("on_intent requestId=" + intent_request['requestId'] + ", sessionId=" + session['sessionId'])
# print("print comment from intent ", intent_request['intent']['name'])
intent = intent_request['intent']
intent_name = intent_request['intent']['name']
# Dispatch to your skill's intent handlers
if intent_name == "MyColorIsIntent":
return set_color_in_session(intent, session)
elif intent_name == "WhatsMyColorIntent":
return get_color_from_session(intent, session)
elif intent_name == "AMAZON.HelpIntent":
return get_welcome_response()
elif intent_name == "AMAZON.CancelIntent" or intent_name == "AMAZON.StopIntent":
return handle_session_end_request()
else:
raise ValueError("Invalid intent")
def on_session_ended(session_ended_request, session):
""" Called when the user ends the session.
Is not called when the skill returns should_end_session=true
"""
print("on_session_ended requestId=" + session_ended_request['requestId'] +
", sessionId=" + session['sessionId'])
# add cleanup logic here
# --------------- Main handler ------------------
def lambda_handler(event, context):
""" Route the incoming request based on type (LaunchRequest, IntentRequest,
etc.) The JSON body of the request is provided in the event parameter.
"""
# print("event.session.application.applicationId=" + event['session']['application']['applicationId'])
"""
Uncomment this if statement and populate with your skill's application ID to
prevent someone else from configuring a skill that sends requests to this
function.
"""
# if (event['session']['application']['applicationId'] !=
# "amzn1.echo-sdk-ams.app.[unique-value-here]"):
# raise ValueError("Invalid Application ID")
if event['session']['new']:
on_session_started({'requestId': event['request']['requestId']},
event['session'])
if event['request']['type'] == "LaunchRequest":
return on_launch(event['request'], event['session'])
elif event['request']['type'] == "IntentRequest":
return on_intent(event['request'], event['session'])
elif event['request']['type'] == "SessionEndedRequest":
return on_session_ended(event['request'], event['session'])
|
apache-2.0
| 4,030,710,574,117,284,000
| 37.156098
| 106
| 0.623114
| false
| 4.11901
| false
| false
| false
|
MG-group-tools/MGFunc
|
mgfunc_v2/swiss2tab.py
|
1
|
7278
|
from __future__ import division
import argparse
from Bio import SeqIO
from datetime import datetime as dt
import time
import os
import sys
import gzip
class main:
def __init__(self):
self.start = time.time()
self.d_ = dt.today()
self.timestarted = self.d_.strftime("%d-%m-%Y %H:%M:%S")
self.parseArgs()
def parseArgs(self):###GETTING ARGUMENTS FROM COMMANDLINE###
parser = argparse.ArgumentParser(prog="swiss2tab",usage="swiss2tab.py -i <input UNIPROT> -o <output-file>",epilog="Example: python2.7 swiss2tab.py -i uniprot_sprot.dat -o uniprot_sprot.tab\n\nWritten by Kosai+Asli, OCT 2013. Last modified MAY 2014.",description="Desctription: Extracts AC,ID,DE,GN,Taxonomy,AC(cession),Organism,ncbi_taxID,GO-term,KEGG-id from STOCKHOLM-formatted file and converts it to tabular-format")
parser.add_argument("-i",metavar="database", help="STOCKHOLM-formatted database",nargs=1,required=True)
parser.add_argument("-o",metavar="OUTPUT NAME",help="output-name, put the whole output name, fx '-o uniprot.dat.tab'",nargs=1,required=True)
# parser.add_argument("-q","--quiet",help="Quiet-mode, suppresses all stdout output. Write \"-q\" with no arguments in commandline. Default is off.",action="store_true")
parser.add_argument("-v",help="Verbose. Prints out progress and details to stdout output. Write \"-v\" with no arguments in commandline. Default is off.",action="store_true")
# return parser.parse_args(), parser
self.parser = parser
def makeTAB(self):
fid = self.gzipopen(self.args.i[0]) #input_database
fout = open(self.args.o[0],"w") #output_tab-file-name
dbfile = os.popen("grep \"ID \" "+self.args.i[0] + " | wc -l")
ctot = dbfile.read()
dbfile.close()
ctot = int(ctot.split(" ")[0])
rangelist = range(0,ctot,10000)
timeEST = ctot*17/536489
self.printer("Estimated time usage: "+str(round(timeEST,1))+" minutes ("+str(round(timeEST/60,1))+" hours)\n")
input_seq_iterator = SeqIO.parse(fid, "swiss")
fout.write("AC(name)\tID\tDE\tGN\tTaxonomy\tAccession\tOrganism\tncbi_taxID\tGO_term\tKEGG_id\n")
rowstring = ""
c = 0
for record in input_seq_iterator:
if record.name:
rowstring += record.name+"\t"
else:
rowstring += "N/A\t"
if record.id:
rowstring += record.id+"\t"
else:
rowstring += "N/A\t"
if record.description:
rowstring += record.description+"\t"
else:
rowstring += "N/A\t"
if record.annotations:
if 'gene_name' in record.annotations:
rowstring += str(record.annotations['gene_name'])+"\t"
else:
rowstring += "N/A\t"
if "taxonomy" in record.annotations:
rowstring += str(record.annotations["taxonomy"])+"\t"
else:
rowstring += "N/A\t"
if "accessions" in record.annotations:
rowstring += str(record.annotations['accessions'])+"\t"
else:
rowstring += "N/A\t"
if "organism" in record.annotations:
rowstring += str(record.annotations['organism'])+"\t"
else:
rowstring += "N/A\t"
if "ncbi_taxid" in record.annotations:
rowstring += str(record.annotations['ncbi_taxid'])+"\t"
else:
rowstring += "N/A\t"
KEGG = []
GO = []
if record.dbxrefs:
for el in record.dbxrefs:
if el[0:3] == "GO:":
# rowstring += el[3:]+";"
GO.append(el[3:])
if el[0:5] == "KEGG:":
KEGG.append(el[5:])
if not KEGG:
# rowstring += "N/A"
KEGG.append("N/A")
if not GO:
GO.append("N/A")
go = ";".join(GO)
kegg = ";".join(KEGG)
rowstring += go + "\t" + kegg
fout.write(rowstring+"\n")
rowstring = ""
c += 1
if c in rangelist or c==1:
self.printer("FINISHED "+str(c)+" ENTRIES out of "+str(ctot)+"\n")
sys.stdout.flush()
self.printer("FINISHED "+str(c)+" ENTRIES out of "+str(ctot)+"\n")
fid.close()
fout.close()
self.indextab()
def printer(self,string): #surpressing output print if -q (quiet) is on
# if not self.args.quiet:
if self.args.v:
print string,
def indextab(self):
fid = open(self.args.o[0],"r")
fout = open(self.args.o[0]+".indexed","w")
line = fid.readline()
while 1:
start = fid.tell()
line = fid.readline()
if not line or not len(line):
# stop = fid.tell()
# header = line.split("\t")[0]
# fout.write(header + "\t" + str(start) + "," + str(stop)+"\n")
break
stop = fid.tell()
header = line.split("\t")[0]
fout.write(header + "\t" + str(start) + "," + str(stop)+"\n")
fout.close()
fid.close()
def gzipopen(self,fileID):
if fileID[-3:] == ".gz":
return gzip.open(fileID)
else:
return open(fileID,"rU")
def mainthing(self):
# self.printer("Cluster2Fasta initialized at"+str(self.timestarted)+"\n")
self.makeTAB()
timeused = (time.time() - self.start) / 60
self.printer("### Time used: "+str(round(timeused)) + " min ("+str(round(timeused/60,1))+" hours)\n")
if __name__ == "__main__":
try:
myclass = main()
myclass.args = myclass.parser.parse_args(sys.argv[1:])
myclass.printer("\n### "+sys.argv[0]+" initialized at "+ myclass.timestarted + "\n")
myclass.printer("### OPTIONS: "+str(myclass.args)+"\n")
myclass.mainthing()
except IOError as i:
print "I/O error({0}): {1}".format(i.errno, i.strerror)
except Exception,e:
print str(e)
import traceback
traceback.print_exc()
# myclass = main()
# myclass.args = myclass.parser.parse_args(sys.argv[1:])
# myclass.mainthing()
'''
handle=open(swissfilename, "rU")
input_seq_iterator = SeqIO.parse(handle, "swiss")
for record in input_seq_iterator:
print record.id, record.name, record.description,record.annotations["taxonomy"],record.annotations['accessions'], record.annotations['ncbi_taxid'], record.annotations['organism'], record.annotations['gene_name']
handle.close()
'''
######################
'''
INPUT:
Extracts AC,ID,DE,GN,Taxonomy,AC(cession),Organism,ncbi_taxID,GO_term,KEGG-id
from STOCKHOLM-formatted file and converts it to tabular-format
OUTPUT:
Tabular form of a stockholm-formatted file, where each line is
an entry.
OPTIONS LIST:
-i database: STOCKHOLM-formatted database
-o OUTPUT NAME: output-name, tab-formatted
-q quiet: Quiet-mode, suppresses all stdout output. Write "-q" with noarguments in commandline. Default is off.
'''
|
gpl-3.0
| 3,271,182,841,077,309,400
| 36.515464
| 423
| 0.554411
| false
| 3.315718
| false
| false
| false
|
UrbanCCD-UChicago/plenario
|
plenario/sensor_network/api/ifttt.py
|
1
|
6149
|
import json
import time
import uuid
from os import environ
from dateutil.parser import parse
from flask import make_response, request
from plenario.api.common import crossdomain, unknown_object_json_handler
from plenario.api.response import bad_request
from plenario.api.validator import IFTTTValidator, sensor_network_validate
from plenario.sensor_network.api.sensor_networks import get_observation_queries, get_raw_metadata, \
sanitize_validated_args
# dictionary mapping the curated drop-down list name to the correct feature and property
curated_map = {'temperature': 'temperature.temperature'}
# TODO: error list?
@crossdomain(origin='*')
def get_ifttt_observations():
if request.headers.get('IFTTT-Channel-Key') != environ.get('IFTTT_CHANNEL_KEY'):
return make_ifttt_error('incorrect channel key', 401)
input_args = request.json
args = dict()
try:
args['network'] = 'plenario_development'
args['nodes'] = [input_args['triggerFields']['node']]
args['feature'] = curated_map[input_args['triggerFields']['curated_property']].split('.')[0]
args['limit'] = input_args['limit'] if 'limit' in list(input_args.keys()) else 50
args['filter'] = json.dumps({'prop': curated_map[input_args['triggerFields']['curated_property']].split('.')[1],
'op': input_args['triggerFields']['op'],
'val': float(input_args['triggerFields']['val'])})
# pass through the curated input property so we can return it to the user for display purposes
curated_property = input_args['triggerFields']['curated_property']
except (KeyError, ValueError) as err:
return make_ifttt_error(str(err), 400)
# override the normal limit 0 behaviour, which is to apply no limit
if args['limit'] == 0:
return make_ifttt_response([])
fields = ('network', 'nodes', 'feature', 'sensors',
'start_datetime', 'end_datetime', 'limit', 'filter')
validated_args = sensor_network_validate(IFTTTValidator(only=fields), args)
if validated_args.errors:
return bad_request(validated_args.errors)
validated_args.data.update({
'features': [validated_args.data['feature']],
'feature': None
})
validated_args = sanitize_validated_args(validated_args)
observation_queries = get_observation_queries(validated_args)
if type(observation_queries) != list:
return observation_queries
return run_ifttt_queries(observation_queries, curated_property)
@crossdomain(origin='*')
def get_ifttt_meta(field):
if request.headers.get('IFTTT-Channel-Key') != environ.get('IFTTT_CHANNEL_KEY'):
return make_ifttt_error('incorrect channel key', 401)
data = []
if field == 'node':
args = {'network': 'plenario_development'}
fields = ('network',)
validated_args = sensor_network_validate(IFTTTValidator(only=fields), args)
data = [{'label': node.id,
'value': node.id} for node in get_raw_metadata('nodes', validated_args)]
elif field == 'curated_property':
data = [{'label': curated_property,
'value': curated_property} for curated_property in list(curated_map.keys())]
return make_ifttt_response(data)
def format_ifttt_observations(obs, curated_property):
obs_response = {
'node': obs.node_id,
'datetime': obs.datetime.isoformat() + '+05:00',
'curated_property': curated_property,
'value': getattr(obs, curated_map[curated_property].split('.')[1]),
'meta': {
'id': uuid.uuid1().hex,
'timestamp': int(time.time())
}
}
return obs_response
def run_ifttt_queries(queries, curated_property):
data = list()
for query, table in queries:
data += [format_ifttt_observations(obs, curated_property) for obs in query.all()]
data.sort(key=lambda x: parse(x['datetime']), reverse=True)
return make_ifttt_response(data)
def make_ifttt_response(data):
resp = {
'data': data
}
resp = make_response(json.dumps(resp, default=unknown_object_json_handler), 200)
resp.headers['Content-Type'] = 'application/json; charset=utf-8'
return resp
def make_ifttt_error(err, status_code):
resp = {
'errors': [{'message': err}]
}
resp = make_response(json.dumps(resp, default=unknown_object_json_handler), status_code)
resp.headers['Content-Type'] = 'application/json; charset=utf-8'
return resp
# ========================
# IFTTT testing endpoints
# ========================
@crossdomain(origin='*')
def ifttt_status():
if request.headers.get('IFTTT-Channel-Key') != environ.get('IFTTT_CHANNEL_KEY'):
return make_ifttt_error('incorrect channel key', 401)
resp = make_response('{}', 200)
resp.headers['Content-Type'] = 'application/json'
return resp
@crossdomain(origin='*')
def ifttt_test_setup():
if request.headers.get('IFTTT-Channel-Key') != environ.get('IFTTT_CHANNEL_KEY'):
return make_ifttt_error('incorrect channel key', 401)
resp = {
'data': {
'samples': {
'triggers': {
'property_comparison': {
'node': 'node_dev_1',
'curated_property': 'temperature',
'op': 'gt',
'val': 0
}
},
'triggerFieldValidations': {
'property_comparison': {
'node': {
'valid': 'node_dev_1',
'invalid': 'invalid_node'
},
'curated_property': {
'valid': 'temperature',
'invalid': 'invalid_property'
}
}
}
}
}
}
resp = make_response(json.dumps(resp, default=unknown_object_json_handler), 200)
resp.headers['Content-Type'] = 'application/json; charset=utf-8'
return resp
|
mit
| 2,408,303,469,001,245,000
| 34.137143
| 120
| 0.593267
| false
| 3.941667
| false
| false
| false
|
bycoffe/django-liveblog
|
models.py
|
1
|
1303
|
import datetime
from django.db import models
from django.conf import settings
from django.template import loader, Context
from markdown import markdown
blog = __import__(settings.BLOG_APP)
Entry = blog.models.__getattribute__(settings.BLOG_ENTRY_MODEL)
if Entry.objects.count():
default_blog_entry = Entry.objects.all()[0]
else:
default_blog_entry = None
class LiveBlogEntry(models.Model):
pub_date = models.DateTimeField(default=datetime.datetime.now)
body = models.TextField()
body_html = models.TextField(editable=False, blank=True)
blog_entry = models.ForeignKey(Entry,
default=(Entry.objects.all()[0].id
if Entry.objects.count()
else None))
class Meta:
verbose_name_plural = "Live Blog Entries"
ordering = ['-pub_date', ]
def __unicode__(self):
self.sample_size = 100 # Used only in admin.
return '%s: %s %s' % (self.blog_entry.title,
self.body[:self.sample_size],
'...' if len(self.body) > self.sample_size else '')
def save(self, *args, **kwargs):
self.body_html = markdown(self.body)
super(LiveBlogEntry, self).save()
|
bsd-3-clause
| 335,256,228,754,973,900
| 34.216216
| 81
| 0.584804
| false
| 4.071875
| false
| false
| false
|
cgmb/d2lmf
|
d2lmf/d2lmf.py
|
1
|
11079
|
# -*- coding: utf-8 -*-
# Copyright (C) 2015-2018 Cordell Bloor
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""d2lmf.d2lmf: provides entry point main()."""
from __future__ import print_function
import argparse
import os
import errno
import shutil
import sys
__version__ = "1.0.0"
def makedirs_exist(path):
"""
Makes a directory at the given path without raising an error if it already exists
"""
try:
os.makedirs(path)
except OSError as exception:
if exception.errno != errno.EEXIST:
raise
def copytree_exist(src, dst):
"""
Copies a directory tree at the given path into the destination directory
without raising an error if the destination already exists
"""
for item in os.listdir(src):
s = os.path.join(src, item)
d = os.path.join(dst, item)
if os.path.isdir(s):
shutil.copytree(s, d)
else:
shutil.copy2(s, d)
def dir_empty_or_nonexistent(folder):
try:
return len(os.listdir(folder)) == 0
except OSError as e:
if e.errno != errno.ENOENT:
raise
return True
class ParserError(Exception):
pass
def parse_submission_dirname(dirname):
"""
Parses a directory name in the form '<id_number> - <student_name> - <timestamp>'
"""
seperator = ' - '
tokens = dirname.split(seperator)
if len(tokens) < 3:
raise ParserError('Expected hyphen-separated id, name and timestamp'
' in "%s"' % dirname)
id_number = tokens[0]
# we'll assume the extra hyphens are a part of the student's name
student_name = seperator.join(tokens[1:-1])
# ':' is not valid in NTFS filenames, so on Windows the time will have
# a '_' where there should be a ':'
timestamp = tokens[-1].replace('_',':')
return (id_number, student_name, timestamp)
def merge(src, dest):
"""
Merges the src folder into the dest folder
"""
vprint('Merging "%s" into "%s"' % (src, dest))
for src_root, dirs, files in os.walk(src):
dest_root = src_root.replace(src, dest, 1)
if not os.path.exists(dest_root):
os.makedirs(dest_root)
for f in files:
src_file = os.path.join(src_root, f)
dest_file = os.path.join(dest_root, f)
if os.path.exists(dest_file):
os.remove(dest_file)
shutil.move(src_file, dest_root)
shutil.rmtree(src)
def rename(input_folder, seperator):
"""
Rename all child folders, using their complicated D2L-given name to infer
the submitter's name. Use the submitter's name to create a short, easy
name for a folder to move the data to.
There may be multiple folders created by the same submitter, as they make
one for each submission. We'll merge those together, overwriting files
from the oldest with files from the newest whenever there's a conflict.
"""
from operator import itemgetter
from datetime import datetime
submissions = []
for name in os.listdir(input_folder):
if os.path.isdir(os.path.join(input_folder, name)):
try:
id_num, student, timestamp = parse_submission_dirname(name)
parsed_timestamp = datetime.strptime(timestamp,
'%b %d, %Y %I:%M %p') # Sep 29, 2015 4:17 PM
shortname = student.replace(' ', seperator)
submissions.append((name, shortname, parsed_timestamp))
except (ParserError,ValueError) as e:
print(e, file=sys.stderr)
# sort by student name, then by date
submissions.sort(key=itemgetter(1,2))
for dirname, student_name, timestamp in submissions:
try:
oldpath = os.path.join(input_folder, dirname)
newpath = os.path.join(input_folder, student_name)
if os.path.exists(newpath):
merge(oldpath, newpath)
else:
os.rename(oldpath, newpath)
except OSError as e:
print(e, file=sys.stderr)
print('Failed to merge "%s"' % oldpath, file=sys.stderr)
def extract_nested(folder):
"""
Unzip, untar, unrar, or whatever any file found in the student submission.
"""
import patoolib
supported_suffixes = ('.zip', '.rar', '.tar.gz', '.tgz', '.tar.bz2',
'.tar.xz', '.7z', '.tar')
for root, dirs, files in os.walk(folder):
for f in files:
if f.endswith(supported_suffixes):
try:
archive = os.path.join(root, f)
vprint('Extracting archive: "%s"' % archive)
patoolib.extract_archive(archive, verbosity=-1,
interactive=False, outdir=root)
os.remove(archive)
except patoolib.util.PatoolError as e:
print(e, file=sys.stderr)
print('Failed to extract "%s"' % archive, file=sys.stderr)
def collapse_lonely(folder):
"""
Collapse 'lonely' folders into their parents. These are folders that are
needlessly nested. They have no sibling files or folders, so their existence
does not separate their from anything.
"""
for submission in os.listdir(folder):
submission_path = os.path.join(folder, submission)
if os.path.isdir(submission_path):
submitted_files = os.listdir(submission_path)
if len(submitted_files) == 1:
submitted_file_path = os.path.join(submission_path, submitted_files[0])
if os.path.isdir(submitted_file_path):
vprint('Collapsing directory into parent: "%s"' % submitted_file_path)
for f in os.listdir(submitted_file_path):
f_path = os.path.join(submitted_file_path, f)
shutil.move(f_path, submission_path)
os.rmdir(submitted_file_path)
def clean_junk(folder):
"""
Deletes useless files from the given directory tree
"""
for root, dirs, files in os.walk(folder):
for f in files:
if f in ['.DS_Store']:
try:
junk = os.path.join(root, f)
vprint('Removing: "%s"' % junk)
os.remove(junk)
except OSError as e:
print(e, file=sys.stderr)
print('Failed to remove "%s"' % junk, file=sys.stderr)
for d in dirs:
if d in ['__MACOSX']:
try:
junk = os.path.join(root, d)
vprint('Removing: "%s"' % junk)
shutil.rmtree(junk)
except (shutil.Error,OSError) as e:
print(e, file=sys.stderr)
print('Failed to remove "%s"' % junk, file=sys.stderr)
class ExtractError(Exception):
pass
def extract(args):
import zipfile
if not dir_empty_or_nonexistent(args.output_folder):
raise ExtractError('Output folder must be empty')
if os.path.isdir(args.input_path):
copytree_exist(args.input_path, args.output_folder)
else:
makedirs_exist(args.output_folder)
with zipfile.ZipFile(args.input_path, 'r') as z:
z.extractall(args.output_folder)
if args.extract_nested:
extract_nested(args.output_folder)
if args.junk:
clean_junk(args.output_folder)
if args.collapse:
collapse_lonely(args.output_folder)
if args.merge:
rename(args.output_folder, args.seperator)
def setup_vprint(args):
"""
Defines the function vprint, which only prints when --verbose is set
"""
global vprint
vprint = print if args.verbose else lambda *a, **k: None
def expand_aliases(args):
"""
Expands all arguments that are aliases for collections of other arguments.
"""
if args.recommended:
args.extract_nested = True
args.junk = True
args.collapse = True
args.merge = True
def main():
parser = argparse.ArgumentParser(prog='d2lmf',
description='d2lmf is a suite of tools to help mark assignments '
'submitted to D2L.')
parser.add_argument('-v','--verbose',
action='store_true',
help='Display more information about files being changed.')
parser.add_argument('--version', action='version',
version='%(prog)s ' + __version__)
subparsers = parser.add_subparsers(help='')
extract_parser = subparsers.add_parser('extract',
help='Extract student submissions from the D2L zip file and '
'optionally process them to be easier to work with.')
extract_parser.add_argument('input_path',
help='The zip file or unzipped directory to extract data from.')
extract_parser.add_argument('output_folder',
help='The folder in which to put extracted data.')
extract_parser.add_argument('-R','--recommended',
action='store_true',
help='Use the recommended extraction settings. This is an alias '
'for -xjcm.')
extract_parser.add_argument('-x','--extract-nested',
action='store_true',
help='Uses command-line tools to attempt to extract submitted '
'archive files, like zip files, tar files, rar files and 7zip '
'files.')
extract_parser.add_argument('-j','--junk',
action='store_true',
help='Clean up any unnecessary files and folders in the '
"submission, like '.DS_Store'.")
extract_parser.add_argument('-c','--collapse',
action='store_true',
help='Collapse pointless subdirectories whose parent directory '
'contains nothing else.')
extract_parser.add_argument('-m','--merge',
action='store_true',
help="Merge all of a student's submissions into a single folder.")
extract_parser.add_argument('-s','--seperator', default='_',
help="The seperator to replace spaces in the merged folder's name.")
extract_parser.add_argument('-v','--verbose',
action='store_true',
help='Display more information about files being changed.')
extract_parser.set_defaults(func=extract)
args = parser.parse_args()
setup_vprint(args)
expand_aliases(args)
try:
args.func(args)
except ExtractError as e:
print(e, file=sys.stderr)
sys.exit(1)
|
gpl-3.0
| -6,830,093,550,474,210,000
| 37.203448
| 90
| 0.601498
| false
| 4.039008
| false
| false
| false
|
codingforentrepreneurs/DjangoGap
|
src/postings/migrations/0003_auto_20141113_2257.py
|
1
|
1504
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import datetime
from django.utils.timezone import utc
class Migration(migrations.Migration):
dependencies = [
('postings', '0002_auto_20141111_0540'),
]
operations = [
migrations.AlterModelOptions(
name='posting',
options={'ordering': ['-updated', '-timestamp']},
),
migrations.RemoveField(
model_name='posting',
name='post',
),
migrations.AddField(
model_name='posting',
name='title',
field=models.CharField(default=b'Title', max_length=200),
preserve_default=True,
),
migrations.AddField(
model_name='posting',
name='url',
field=models.URLField(default=b'http://youtube.com/', max_length=400),
preserve_default=True,
),
migrations.AlterField(
model_name='posting',
name='timestamp',
field=models.DateTimeField(default=datetime.datetime(2014, 11, 13, 22, 57, 38, 90833, tzinfo=utc), auto_now_add=True),
preserve_default=True,
),
migrations.AlterField(
model_name='posting',
name='updated',
field=models.DateTimeField(default=datetime.datetime(2014, 11, 13, 22, 57, 38, 90874, tzinfo=utc), auto_now=True),
preserve_default=True,
),
]
|
gpl-2.0
| 6,243,464,691,273,709,000
| 30.333333
| 130
| 0.563165
| false
| 4.321839
| false
| false
| false
|
apmichaud/vitess-apm
|
test/queryservice_test.py
|
1
|
2644
|
#!/usr/bin/env python
import logging
import optparse
import traceback
import unittest
import sys
import utils
import framework
from queryservice_tests import cache_tests
from queryservice_tests import nocache_tests
from queryservice_tests import stream_tests
from queryservice_tests import status_tests
from queryservice_tests import test_env
if __name__ == "__main__":
parser = optparse.OptionParser(usage="usage: %prog [options] [test_names]")
parser.add_option("-m", "--memcache", action="store_true", default=False,
help="starts a memcache d, and tests rowcache")
parser.add_option("-e", "--env", default='vttablet,vtocc',
help="Environment that will be used. Valid options: vttablet, vtocc")
parser.add_option("-q", "--quiet", action="store_const", const=0, dest="verbose", default=1)
parser.add_option("-v", "--verbose", action="store_const", const=2, dest="verbose", default=0)
(options, args) = parser.parse_args()
utils.options = options
logging.getLogger().setLevel(logging.ERROR)
suite = unittest.TestSuite()
if args:
for arg in args:
if hasattr(nocache_tests.TestNocache, arg):
suite.addTest(nocache_tests.TestNocache(arg))
elif hasattr(stream_tests.TestStream, arg):
suite.addTest(stream_tests.TestStream(arg))
elif hasattr(cache_tests.TestCache, arg) and options.memcache:
suite.addTest(cache_tests.TestCache(arg))
elif hasattr(cache_tests.TestWillNotBeCached, arg) and options.memcache:
suite.addTest(cache_tests.TestWillNotBeCached(arg))
else:
raise Exception(arg, "not found in tests")
else:
modules = [nocache_tests, stream_tests, status_tests]
if options.memcache:
modules.append(cache_tests)
for m in modules:
suite.addTests(unittest.TestLoader().loadTestsFromModule(m))
try:
for env_name in options.env.split(','):
try:
if env_name == 'vttablet':
env = test_env.VttabletTestEnv()
elif env_name == 'vtocc':
env = test_env.VtoccTestEnv()
else:
raise Exception("Valid options for -e: vtocc, vttablet")
env.memcache = options.memcache
env.setUp()
print "Starting queryservice_test.py: %s" % env_name
sys.stdout.flush()
framework.TestCase.setenv(env)
result = unittest.TextTestRunner(verbosity=options.verbose).run(suite)
if not result.wasSuccessful():
raise Exception("test failures")
finally:
try:
env.tearDown()
except:
traceback.print_exc()
finally:
utils.remove_tmp_files()
|
bsd-3-clause
| 2,655,012,504,257,989,000
| 33.789474
| 96
| 0.664145
| false
| 3.804317
| true
| false
| false
|
pavelponomarev/Elmer_IM2D_cases
|
IM_one_pole/cage/cage_generator.py
|
1
|
9486
|
# Elmer circuit equations generator for a cage winding taking into account periodicity
# Author: P. Ponomarev
# July 2016
# changelog:
# version 1.3 (03.2017) by PP:
# - added 'boffset' parameter - offset of the body numbers
# version 1.2 (01.2017) by PP:
# - added 'ns' parameter - number of slices for multi-slice model
from __future__ import print_function
# Settings:
ns = 1
nob = 10 # number of rotor bars simulated
boffset = 1 # number of the first bar body
antiperiodic = 1 # periodic or antiperiodic boundary
cn = 4 # circuit number which describes the rotor bars
ctype = "Stranded" # Coil type Massive/Stranded
OUTFILE = 'cage.definitions'
# Rotor circuit
# Bar 1 to Bar N are FEM components of the modelled domain
# L_N and R_N are bar-to-bar inductance and resistance of the rotor end rings
# For 1-pole model (antiperiodic):
# terminal 1 is connected to 2' and 2 is connected to 1'
# For 2-pole model (periodic):
# terminal 1 is connected to 1' and 2 is connected to 2'
# (i,v)
# 1' (0,1) 2'
# O + _________ - I_bar O
# |________| Bar 1 |____\____|
# + | |_________| / |
# C C +
# C L_1l _____________ C L_1r
# C | U_loop | C
# | (4,5) | | | (2,3)
# < | \|/ <
# < R_1l |___ V < R_1r
# < <
# - | + _________ - |-
# |________| Bar 2 |_________|
# | |_________| |
# C (6,7) C
# C L_2l C L_2r
# C C
# | (10,11) | (8,9)
# < <
# < R_2l < R_2r
# < <
# | |
#
# ...
# _________
# |________| Bar N |_________|
# | |_________| |
# C C
# C L_Nl C L_Nr
# C C
# | |
# < <
# < R_Nl < R_Nr
# < <
# | |
# O O
# 1 2
barstxt = ""
###############################################################################
### Filling components section
###############################################################################
# Coil Type can be Massive or Stranded
# assuming that rotor bar bodies are numbered
# consequently starting from 1 onwards to N, where 1 and N are closest to
# periodic boundaries bars:
for nbar in range(1,nob+1):
s = "Component " + str(nbar) + "\n" + \
" Name = String RB" + str(nbar) + "\n" + \
" Body = Integer " + str(nbar+boffset-1) + "\n" + \
" Coil Type = String "+ ctype + "\n" + \
" Number of Turns = Real 1" + "\n" + \
"End" + "\n\n"
barstxt = barstxt + s
###############################################################################
### Declare variables
###############################################################################
# first, the dimensions of the variable arrays are declared
s = "!----------------------------------------------------------\n" + \
"! Equations for " + str(nob) + " rotor bars\n" + \
"!----------------------------------------------------------\n\n" + \
"$ C." + str(cn) + ".source.1 = 0\n\n" + \
"! init matrices of Ax' + Bx = Source\n" + \
"$ C." + str(cn) + ".variables = " + str(nob*3*2)+ "\n" + \
"$ C." + str(cn) + ".perm = zeros(" + str(nob*3*2)+ ")\n" + \
"$ C." + str(cn) + ".A = zeros(" + str(nob*3*2) + ", " + str(nob*3*2) + ")\n" + \
"$ C." + str(cn) + ".B = zeros(" + str(nob*3*2) + ", " + str(nob*3*2) + ")\n" + \
"$ C." + str(cn) + ".Mre = zeros(" + str(nob*3*2) + ", " + str(nob*3*2) + ")\n" + \
"$ C." + str(cn) + ".Mim = zeros(" + str(nob*3*2) + ", " + str(nob*3*2) + ")\n" + \
"! define circuit variables\n\n"
barstxt = barstxt + s
# then, each variable receives its unique name
# each component and element is described by 2 circuit variables - "u" and "i"
# each bar is associated with 2 sections of the end ring - left (l) and right (r)
# each section is described by one single element of the circuit possesing R and L.
for nbar in range(0,nob):
s = "$ C." + str(cn) + ".name." + str(nbar*6 + 1) + " = \"i_component(" + str(nbar+1) + ")\"\n" + \
"$ C." + str(cn) + ".name." + str(nbar*6 + 2) + " = \"v_component(" + str(nbar+1) + ")\"\n" + \
"$ C." + str(cn) + ".name." + str(nbar*6 + 3) + " = \"i_r" + str(nbar+1) + "\"\n" + \
"$ C." + str(cn) + ".name." + str(nbar*6 + 4) + " = \"v_r" + str(nbar+1) + "\"\n" + \
"$ C." + str(cn) + ".name." + str(nbar*6 + 5) + " = \"i_l" + str(nbar+1) + "\"\n" + \
"$ C." + str(cn) + ".name." + str(nbar*6 + 6) + " = \"v_l" + str(nbar+1) + "\"\n\n\n"
barstxt = barstxt + s
###############################################################################
### Kirchoff voltage law
###############################################################################
# describes voltages in each loop between two bars. Hence, each circuit segment
# contains 4 components(elements)
# loops directed clockwise
s = "! Kirchoff voltage law\n\n"
barstxt = barstxt + s
for nbar in range(0,nob-1):
s = "!Bar" + str(nbar+1) + "\n" + \
"$ C." + str(cn) + ".B(" + str(nbar*6+2) + "," + str(nbar*6+1) + ") = 1/" + str(ns) + "\n" + \
"$ C." + str(cn) + ".B(" + str(nbar*6+2) + "," + str(nbar*6+3) + ") = 1\n" + \
"$ C." + str(cn) + ".B(" + str(nbar*6+2) + "," + str(nbar*6+5) + ") = -1\n" + \
"$ C." + str(cn) + ".B(" + str(nbar*6+2) + "," + str(nbar*6+7) + ") = -1/" + str(ns) + "\n\n"
barstxt = barstxt + s
# last bar includes periodicity definition
s = "!Bar" + str(nob) + "\n" + \
"$ C." + str(cn) + ".B(" + str((nob-1)*6+2) + "," + str((nob-1)*6+1) + ") = 1/" + str(ns) + "\n" + \
"$ C." + str(cn) + ".B(" + str((nob-1)*6+2) + "," + str((nob-1)*6+3) + ") = 1\n" + \
"$ C." + str(cn) + ".B(" + str((nob-1)*6+2) + "," + str((nob-1)*6+5) + ") = -1\n" + \
"$ C." + str(cn) + ".B(" + str((nob-1)*6+2) + "," + str(1) + ") = " + str(1 if antiperiodic==1 else -1) +"/" + str(ns) + "\n\n\n"
barstxt = barstxt + s
###############################################################################
### Kirchoff current law
###############################################################################
# each bar is connected to two knots -- left and right
s = "! Kirchoff current law\n\n"
barstxt = barstxt + s
# bar 1 knots contain periodicity information
s = "!Bar" + str(1) + " right knot\n" + \
"$ C." + str(cn) + ".B(" + str(0+0) + "," + str(0+0) + ") = 1\n" + \
"$ C." + str(cn) + ".B(" + str(0+0) + "," + str(nob*6-(2 if antiperiodic == 1 else 4)) + ") = 1\n" + \
"$ C." + str(cn) + ".B(" + str(0+0) + "," + str(0+2) + ") = -1\n" + \
"!Bar" + str(1) + " left knot\n" + \
"$ C." + str(cn) + ".B(" + str(0+4) + "," + str(0+4) + ") = -1\n" + \
"$ C." + str(cn) + ".B(" + str(0+4) + "," + str(nob*6-(4 if antiperiodic == 1 else 2)) + ") = 1\n" + \
"$ C." + str(cn) + ".B(" + str(0+4) + "," + str(0+0) + ") = -1\n\n"
barstxt = barstxt + s
# other bars are composed similarly
for nbar in range(1,nob):
s = "!Bar" + str(nbar+1) + " right knot\n" + \
"$ C." + str(cn) + ".B(" + str(nbar*6+0) + "," + str(nbar*6+0) + ") = 1\n" + \
"$ C." + str(cn) + ".B(" + str(nbar*6+0) + "," + str(nbar*6-4) + ") = 1\n" + \
"$ C." + str(cn) + ".B(" + str(nbar*6+0) + "," + str(nbar*6+2) + ") = -1\n" + \
"!Bar" + str(nbar+1) + " left knot\n" + \
"$ C." + str(cn) + ".B(" + str(nbar*6+4) + "," + str(nbar*6+4) + ") = -1\n" + \
"$ C." + str(cn) + ".B(" + str(nbar*6+4) + "," + str(nbar*6-2) + ") = 1\n" + \
"$ C." + str(cn) + ".B(" + str(nbar*6+4) + "," + str(nbar*6+0) + ") = -1\n\n"
barstxt = barstxt + s
###############################################################################
### Elemental equations
###############################################################################
# these equations describe R and L elements in the circuit
# v = vr+vl
# v -iR - Li' = 0
s = "! Elemental equations\n\n"
barstxt = barstxt + s
for nbar in range(0,nob):
s = "$ C." + str(cn) + ".B(" + str(nbar*6+3) + "," + str(nbar*6+3) + ") = -1\n" + \
"$ C." + str(cn) + ".B(" + str(nbar*6+3) + "," + str(nbar*6+2) + ") = R_er\n" + \
"$ C." + str(cn) + ".A(" + str(nbar*6+3) + "," + str(nbar*6+2) + ") = L_er\n" + \
"$ C." + str(cn) + ".B(" + str(nbar*6+5) + "," + str(nbar*6+5) + ") = -1\n" + \
"$ C." + str(cn) + ".B(" + str(nbar*6+5) + "," + str(nbar*6+4) + ") = R_er\n" + \
"$ C." + str(cn) + ".A(" + str(nbar*6+5) + "," + str(nbar*6+4) + ") = L_er\n\n"
barstxt = barstxt + s
with open(OUTFILE, 'w+') as f:
f.write(barstxt)
print('Cage circuit equations for circuit number', cn,
'with', ns, 'slices',
'for', nob, 'bars with',
'antiperiodic' if antiperiodic == 1 else 'periodic',
'boundary conditions are saved to', OUTFILE)
|
gpl-3.0
| -1,045,329,439,116,759,700
| 43.12093
| 133
| 0.37666
| false
| 2.776119
| false
| false
| false
|
oddt/oddt
|
oddt/docking/AutodockVina.py
|
1
|
15674
|
import sys
import subprocess
import re
import os
import warnings
from tempfile import mkdtemp
from shutil import rmtree
from distutils.spawn import find_executable
from tempfile import gettempdir
from six import string_types
import oddt
from oddt.utils import (is_openbabel_molecule,
is_molecule,
check_molecule)
from oddt.spatial import rmsd
class autodock_vina(object):
def __init__(self,
protein=None,
auto_ligand=None,
size=(20, 20, 20),
center=(0, 0, 0),
exhaustiveness=8,
num_modes=9,
energy_range=3,
seed=None,
prefix_dir=None,
n_cpu=1,
executable=None,
autocleanup=True,
skip_bad_mols=True):
"""Autodock Vina docking engine, which extends it's capabilities:
automatic box (auto-centering on ligand).
Other software compatible with Vina API can also be used (e.g. QuickVina).
Parameters
----------
protein: oddt.toolkit.Molecule object (default=None)
Protein object to be used while generating descriptors.
auto_ligand: oddt.toolkit.Molecule object or string (default=None)
Ligand use to center the docking box. Either ODDT molecule or
a file (opened based on extension and read to ODDT molecule).
Box is centered on geometric center of molecule.
size: tuple, shape=[3] (default=(20, 20, 20))
Dimensions of docking box (in Angstroms)
center: tuple, shape=[3] (default=(0,0,0))
The center of docking box in cartesian space.
exhaustiveness: int (default=8)
Exhaustiveness parameter of Autodock Vina
num_modes: int (default=9)
Number of conformations generated by Autodock Vina. The maximum
number of docked poses is 9 (due to Autodock Vina limitation).
energy_range: int (default=3)
Energy range cutoff for Autodock Vina
seed: int or None (default=None)
Random seed for Autodock Vina
prefix_dir: string or None (default=None)
Temporary directory for Autodock Vina files.
By default (None) system temporary directory is used,
for reference see `tempfile.gettempdir`.
executable: string or None (default=None)
Autodock Vina executable location in the system.
It's really necessary if autodetection fails.
autocleanup: bool (default=True)
Should the docking engine clean up after execution?
skip_bad_mols: bool (default=True)
Should molecules that crash Autodock Vina be skipped.
"""
self.dir = prefix_dir or gettempdir()
self._tmp_dir = None
# define binding site
self.size = size
self.center = center
# center automaticaly on ligand
if auto_ligand:
if isinstance(auto_ligand, string_types):
extension = auto_ligand.split('.')[-1]
auto_ligand = next(oddt.toolkit.readfile(extension, auto_ligand))
self.center = auto_ligand.coords.mean(axis=0).round(3)
# autodetect Vina executable
if not executable:
self.executable = find_executable('vina')
if not self.executable:
raise Exception('Could not find Autodock Vina binary.'
'You have to install it globally or supply binary'
'full directory via `executable` parameter.')
else:
self.executable = executable
# detect version
self.version = (subprocess.check_output([self.executable, '--version'])
.decode('ascii').split(' ')[2])
self.autocleanup = autocleanup
self.cleanup_dirs = set()
# share protein to class
self.protein = None
self.protein_file = None
if protein:
self.set_protein(protein)
self.skip_bad_mols = skip_bad_mols
self.n_cpu = n_cpu
if self.n_cpu > exhaustiveness:
warnings.warn('Exhaustiveness is lower than n_cpus, thus CPU will '
'not be saturated.')
# pregenerate common Vina parameters
self.params = []
self.params += ['--center_x', str(self.center[0]),
'--center_y', str(self.center[1]),
'--center_z', str(self.center[2])]
self.params += ['--size_x', str(self.size[0]),
'--size_y', str(self.size[1]),
'--size_z', str(self.size[2])]
self.params += ['--exhaustiveness', str(exhaustiveness)]
if seed is not None:
self.params += ['--seed', str(seed)]
if num_modes > 9 or num_modes < 1:
raise ValueError('The number of docked poses must be between 1 and 9'
' (due to Autodock Vina limitation).')
self.params += ['--num_modes', str(num_modes)]
self.params += ['--energy_range', str(energy_range)]
@property
def tmp_dir(self):
if not self._tmp_dir:
self._tmp_dir = mkdtemp(dir=self.dir, prefix='autodock_vina_')
self.cleanup_dirs.add(self._tmp_dir)
return self._tmp_dir
@tmp_dir.setter
def tmp_dir(self, value):
self._tmp_dir = value
def set_protein(self, protein):
"""Change protein to dock to.
Parameters
----------
protein: oddt.toolkit.Molecule object
Protein object to be used.
"""
# generate new directory
self._tmp_dir = None
if protein:
if isinstance(protein, string_types):
extension = protein.split('.')[-1]
if extension == 'pdbqt':
self.protein_file = protein
self.protein = next(oddt.toolkit.readfile(extension, protein))
self.protein.protein = True
else:
self.protein = next(oddt.toolkit.readfile(extension, protein))
self.protein.protein = True
else:
self.protein = protein
# skip writing if we have PDBQT protein
if self.protein_file is None:
self.protein_file = write_vina_pdbqt(self.protein, self.tmp_dir,
flexible=False)
def score(self, ligands, protein=None):
"""Automated scoring procedure.
Parameters
----------
ligands: iterable of oddt.toolkit.Molecule objects
Ligands to score
protein: oddt.toolkit.Molecule object or None
Protein object to be used. If None, then the default
one is used, else the protein is new default.
Returns
-------
ligands : array of oddt.toolkit.Molecule objects
Array of ligands (scores are stored in mol.data method)
"""
if protein:
self.set_protein(protein)
if not self.protein_file:
raise IOError("No receptor.")
if is_molecule(ligands):
ligands = [ligands]
ligand_dir = mkdtemp(dir=self.tmp_dir, prefix='ligands_')
output_array = []
for n, ligand in enumerate(ligands):
check_molecule(ligand, force_coords=True)
ligand_file = write_vina_pdbqt(ligand, ligand_dir, name_id=n)
try:
scores = parse_vina_scoring_output(
subprocess.check_output([self.executable, '--score_only',
'--receptor', self.protein_file,
'--ligand', ligand_file] + self.params,
stderr=subprocess.STDOUT))
except subprocess.CalledProcessError as e:
sys.stderr.write(e.output.decode('ascii'))
if self.skip_bad_mols:
continue
else:
raise Exception('Autodock Vina failed. Command: "%s"' %
' '.join(e.cmd))
ligand.data.update(scores)
output_array.append(ligand)
rmtree(ligand_dir)
return output_array
def dock(self, ligands, protein=None):
"""Automated docking procedure.
Parameters
----------
ligands: iterable of oddt.toolkit.Molecule objects
Ligands to dock
protein: oddt.toolkit.Molecule object or None
Protein object to be used. If None, then the default one
is used, else the protein is new default.
Returns
-------
ligands : array of oddt.toolkit.Molecule objects
Array of ligands (scores are stored in mol.data method)
"""
if protein:
self.set_protein(protein)
if not self.protein_file:
raise IOError("No receptor.")
if is_molecule(ligands):
ligands = [ligands]
ligand_dir = mkdtemp(dir=self.tmp_dir, prefix='ligands_')
output_array = []
for n, ligand in enumerate(ligands):
check_molecule(ligand, force_coords=True)
ligand_file = write_vina_pdbqt(ligand, ligand_dir, name_id=n)
ligand_outfile = ligand_file[:-6] + '_out.pdbqt'
try:
scores = parse_vina_docking_output(
subprocess.check_output([self.executable, '--receptor',
self.protein_file,
'--ligand', ligand_file,
'--out', ligand_outfile] +
self.params +
['--cpu', str(self.n_cpu)],
stderr=subprocess.STDOUT))
except subprocess.CalledProcessError as e:
sys.stderr.write(e.output.decode('ascii'))
if self.skip_bad_mols:
continue # TODO: print some warning message
else:
raise Exception('Autodock Vina failed. Command: "%s"' %
' '.join(e.cmd))
# docked conformations may have wrong connectivity - use source ligand
if is_openbabel_molecule(ligand):
# find the order of PDBQT atoms assigned by OpenBabel
with open(ligand_file) as f:
write_order = [int(line[7:12].strip())
for line in f
if line[:4] == 'ATOM']
new_order = sorted(range(len(write_order)),
key=write_order.__getitem__)
new_order = [i + 1 for i in new_order] # OBMol has 1 based idx
assert len(new_order) == len(ligand.atoms)
docked_ligands = oddt.toolkit.readfile('pdbqt', ligand_outfile)
for docked_ligand, score in zip(docked_ligands, scores):
# Renumber atoms to match the input ligand
if is_openbabel_molecule(docked_ligand):
docked_ligand.OBMol.RenumberAtoms(new_order)
# HACK: copy docked coordinates onto source ligand
# We assume that the order of atoms match between ligands
clone = ligand.clone
clone.clone_coords(docked_ligand)
clone.data.update(score)
# Calculate RMSD to the input pose
try:
clone.data['vina_rmsd_input'] = rmsd(ligand, clone)
clone.data['vina_rmsd_input_min'] = rmsd(ligand, clone,
method='min_symmetry')
except Exception:
pass
output_array.append(clone)
rmtree(ligand_dir)
return output_array
def clean(self):
for d in self.cleanup_dirs:
rmtree(d)
def predict_ligand(self, ligand):
"""Local method to score one ligand and update it's scores.
Parameters
----------
ligand: oddt.toolkit.Molecule object
Ligand to be scored
Returns
-------
ligand: oddt.toolkit.Molecule object
Scored ligand with updated scores
"""
return self.score([ligand])[0]
def predict_ligands(self, ligands):
"""Method to score ligands lazily
Parameters
----------
ligands: iterable of oddt.toolkit.Molecule objects
Ligands to be scored
Returns
-------
ligand: iterator of oddt.toolkit.Molecule objects
Scored ligands with updated scores
"""
return self.score(ligands)
def write_vina_pdbqt(mol, directory, flexible=True, name_id=None):
"""Write single PDBQT molecule to a given directory. For proteins use
`flexible=False` to avoid encoding torsions. Additionally an name ID can
be appended to a name to avoid conflicts.
"""
if name_id is None:
name_id = ''
# We expect name such as 0_ZINC123456.pdbqt or simply ZINC123456.pdbqt if no
# name_id is specified. All non alpha-numeric signs are replaced with underscore.
mol_file = ('_'.join(filter(None, [str(name_id),
re.sub('[^A-Za-z0-9]+', '_', mol.title)]
)) + '.pdbqt')
# prepend path to filename
mol_file = os.path.join(directory, mol_file)
if is_openbabel_molecule(mol):
if flexible:
# auto bonding (b), perserve atom indices (p) and Hs (h)
kwargs = {'opt': {'b': None, 'p': None, 'h': None}}
else:
# for proteins write rigid mol (r) and combine all frags in one (c)
kwargs = {'opt': {'r': None, 'c': None, 'h': None}}
else:
kwargs = {'flexible': flexible}
mol.write('pdbqt', mol_file, overwrite=True, **kwargs)
return mol_file
def parse_vina_scoring_output(output):
"""Function parsing Autodock Vina scoring output to a dictionary
Parameters
----------
output : string
Autodock Vina standard ouptud (STDOUT).
Returns
-------
out : dict
dicitionary containing scores computed by Autodock Vina
"""
out = {}
r = re.compile(r'^(Affinity:|\s{4})')
for line in output.decode('ascii').split('\n')[13:]: # skip some output
if r.match(line):
m = line.replace(' ', '').split(':')
if m[0] == 'Affinity':
m[1] = m[1].replace('(kcal/mol)', '')
out[str('vina_' + m[0].lower())] = float(m[1])
return out
def parse_vina_docking_output(output):
"""Function parsing Autodock Vina docking output to a dictionary
Parameters
----------
output : string
Autodock Vina standard ouptud (STDOUT).
Returns
-------
out : dict
dicitionary containing scores computed by Autodock Vina
"""
out = []
r = re.compile(r'^\s+\d\s+')
for line in output.decode('ascii').split('\n')[13:]: # skip some output
if r.match(line):
s = line.split()
out.append({'vina_affinity': s[1],
'vina_rmsd_lb': s[2],
'vina_rmsd_ub': s[3]})
return out
|
bsd-3-clause
| 8,064,540,954,755,132,000
| 36.859903
| 85
| 0.534133
| false
| 4.168617
| false
| false
| false
|
clarin-eric/Centre-Registry
|
centre-registry-app/centre_registry/migrations/0001_initial.py
|
1
|
13641
|
# pylint: disable=invalid-name
import centre_registry.models
from django.db import migrations
from django.db import models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = []
operations = [
migrations.CreateModel(
name='Centre',
fields=[
('id', models.AutoField(
serialize=False,
primary_key=True,
auto_created=True,
verbose_name='ID')),
('name', models.CharField(
max_length=200, unique=True, verbose_name='Name')),
('shorthand', models.CharField(
max_length=20, unique=True,
verbose_name='Shorthand code')),
('organisation_name', models.CharField(
max_length=100, verbose_name='Organisation')),
('institution', models.CharField(
max_length=200, verbose_name='Institution')),
('working_unit', models.CharField(
max_length=200, verbose_name='Working unit')),
('address', models.CharField(
max_length=100, verbose_name='Address')),
('postal_code', models.CharField(
max_length=8, verbose_name='Postal code')),
('city', models.CharField(
max_length=100, verbose_name='City')),
('latitude', models.CharField(
max_length=20,
validators=[centre_registry.models.validate_latitude],
verbose_name='Latitude')),
('longitude', models.CharField(
max_length=20,
validators=[centre_registry.models.validate_longitude],
verbose_name='Longitude')),
('type_status', models.CharField(
max_length=100,
blank=True,
verbose_name="Comments about centre's type")),
('website_url', models.URLField(
max_length=2000, verbose_name='Website URL')),
('description', models.CharField(
max_length=500, blank=True, verbose_name='Description')),
('expertise', models.CharField(
max_length=200, blank=True, verbose_name='Expertise')),
('type_certificate_url', models.URLField(
max_length=2000,
blank=True,
verbose_name='Centre type certificate URL')),
('dsa_url', models.URLField(
max_length=2000,
blank=True,
verbose_name='Data Seal of Approval URL')),
('pid_status', models.CharField(
max_length=200,
blank=True,
verbose_name='Persistent Identifier usage status')),
('long_term_archiving_policy', models.CharField(
max_length=200,
blank=True,
verbose_name='Long Time Archiving Policy')),
('repository_system', models.CharField(
max_length=200,
blank=True,
verbose_name='Repository system')),
('strict_versioning', models.BooleanField(
default=False, verbose_name='Strict versioning?')),
],
options={
'verbose_name_plural': 'centres',
'verbose_name': 'centre',
},
bases=(models.Model, ), ),
migrations.CreateModel(
name='CentreType',
fields=[
('id', models.AutoField(
serialize=False,
primary_key=True,
auto_created=True,
verbose_name='ID')),
('type', models.CharField(
max_length=1,
unique=True,
verbose_name='Certified centre type')),
],
options={
'verbose_name_plural': 'formal centre types',
'verbose_name': 'formal centre type',
},
bases=(models.Model, ), ),
migrations.CreateModel(
name='Consortium',
fields=[
('id', models.AutoField(
serialize=False,
primary_key=True,
auto_created=True,
verbose_name='ID')),
('country_code', models.CharField(
max_length=3, unique=True, verbose_name='Country code')),
('country_name', models.CharField(
max_length=20, unique=True, verbose_name='Country name')),
('is_observer', models.BooleanField(
default=False, verbose_name='Is observer (not member)?')),
('name', models.CharField(
max_length=20, verbose_name='Name')),
('website_url', models.URLField(
max_length=2000, verbose_name='Website URL')),
('alias', models.CharField(
max_length=25, verbose_name='Alias (... .clarin.eu)')),
],
options={
'verbose_name_plural': 'consortia',
'verbose_name': 'consortium',
},
bases=(models.Model, ), ),
migrations.CreateModel(
name='Contact',
fields=[
('id', models.AutoField(
serialize=False,
primary_key=True,
auto_created=True,
verbose_name='ID')),
('email_address', models.EmailField(
max_length=75, verbose_name='E-mail address')),
('name', models.CharField(
max_length=200, unique=True, verbose_name='Name')),
('telephone_number', models.CharField(
max_length=30,
blank=True,
verbose_name='Telephone number (E.123 international '
'notation)')),
('website', models.URLField(
max_length=2000, blank=True, verbose_name='Website')),
],
options={
'verbose_name_plural': 'contacts',
'verbose_name': 'contact',
},
bases=(models.Model, ), ),
migrations.CreateModel(
name='FCSEndpoint',
fields=[
('id', models.AutoField(
serialize=False,
primary_key=True,
auto_created=True,
verbose_name='ID')),
('uri', models.URLField(
max_length=2000, unique=True, verbose_name='Base URI')),
('centre', models.ForeignKey(to='centre_registry.Centre', on_delete=django.db.models.deletion.SET_NULL,
null=True)),
],
options={
'verbose_name_plural': 'FCS endpoints',
'verbose_name': 'FCS endpoint',
},
bases=(models.Model, ), ),
migrations.CreateModel(
name='MetadataFormat',
fields=[
('id', models.AutoField(
serialize=False,
primary_key=True,
auto_created=True,
verbose_name='ID')),
('name', models.CharField(
max_length=30,
unique=True,
verbose_name='Metadata format name')),
],
options={
'verbose_name_plural': 'metadata formats',
'verbose_name': 'metadata format',
},
bases=(models.Model, ), ),
migrations.CreateModel(
name='OAIPMHEndpoint',
fields=[
('id', models.AutoField(
serialize=False,
primary_key=True,
auto_created=True,
verbose_name='ID')),
('web_services_set', models.CharField(
max_length=100,
blank=True,
verbose_name='Web services set')),
('web_services_type', models.CharField(
max_length=10,
blank=True,
verbose_name='Web services type (e.g. SOAP; REST)')),
('uri', models.URLField(
max_length=2000, unique=True, verbose_name='Base URI')),
('centre', models.ForeignKey(
to='centre_registry.Centre',
on_delete=django.db.models.deletion.SET_NULL, null=True)),
('metadata_format', models.ForeignKey(
to='centre_registry.MetadataFormat',
verbose_name='Metadata format',
null=True,
on_delete=django.db.models.deletion.SET_NULL)),
],
options={
'verbose_name_plural': 'OAI-PMH endpoints',
'verbose_name': 'OAI-PMH endpoint',
},
bases=(models.Model, ), ),
migrations.CreateModel(
name='SAMLIdentityFederation',
fields=[
('id', models.AutoField(
serialize=False,
primary_key=True,
auto_created=True,
verbose_name='ID')),
('shorthand', models.CharField(
max_length=20, unique=True,
verbose_name='Shorthand code')),
('information_url', models.URLField(
max_length=1024, verbose_name='Information URL')),
('saml_metadata_url', models.URLField(
max_length=1024, verbose_name='SAML metadata URL')),
],
options={
'verbose_name_plural': 'SAML Identity Federations',
'verbose_name': 'SAML Identity Federation',
},
bases=(models.Model, ), ),
migrations.CreateModel(
name='SAMLServiceProvider',
fields=[
('id', models.AutoField(
serialize=False,
primary_key=True,
auto_created=True,
verbose_name='ID')),
('entity_id', models.URLField(
max_length=1024, unique=True, verbose_name='Entity ID')),
('status_url', models.URLField(
max_length=1024, blank=True, verbose_name='Status URL')),
('centre', models.ForeignKey(
to='centre_registry.Centre',
on_delete=django.db.models.deletion.SET_NULL, null=True)),
],
options={
'verbose_name_plural': 'SAML Service Providers',
'verbose_name': 'SAML Service Provider',
},
bases=(models.Model, ), ),
migrations.CreateModel(
name='URLReference',
fields=[
('id', models.AutoField(
serialize=False,
primary_key=True,
auto_created=True,
verbose_name='ID')),
('description', models.CharField(
max_length=300, verbose_name='Content description')),
('url', models.URLField(
max_length=2000, unique=True, verbose_name='URL')),
('centre', models.ForeignKey(
to='centre_registry.Centre',
on_delete=django.db.models.deletion.CASCADE)),
],
options={
'verbose_name_plural': 'URL references',
'verbose_name': 'URL reference',
},
bases=(models.Model, ), ),
migrations.AddField(
model_name='samlidentityfederation',
name='saml_sps_registered',
field=models.ManyToManyField(
to='centre_registry.SAMLServiceProvider',
blank=True,
verbose_name='SAML SPs Registered'),
preserve_default=True, ),
migrations.AddField(
model_name='centre',
name='administrative_contact',
field=models.ForeignKey(
related_name='administrative_contact',
to='centre_registry.Contact',
on_delete=django.db.models.deletion.PROTECT),
preserve_default=True, ),
migrations.AddField(
model_name='centre',
name='consortium',
field=models.ForeignKey(
to='centre_registry.Consortium',
on_delete=django.db.models.deletion.SET_NULL),
preserve_default=True, ),
migrations.AddField(
model_name='centre',
name='technical_contact',
field=models.ForeignKey(
related_name='technical_contact',
to='centre_registry.Contact',
on_delete=django.db.models.deletion.SET_NULL),
preserve_default=True, ),
migrations.AddField(
model_name='centre',
name='type',
field=models.ManyToManyField(to='centre_registry.CentreType'),
preserve_default=True, ),
]
|
gpl-3.0
| -708,127,677,141,282,700
| 41.761755
| 119
| 0.467268
| false
| 5.074777
| false
| false
| false
|
google/sqlcommenter
|
python/sqlcommenter-python/tests/sqlalchemy/tests.py
|
1
|
4346
|
#!/usr/bin/python
#
# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from unittest import TestCase
import sqlalchemy
from google.cloud.sqlcommenter.sqlalchemy.executor import BeforeExecuteFactory
from ..compat import mock, skipIfPy2
from ..opencensus_mock import mock_opencensus_tracer
from ..opentelemetry_mock import mock_opentelemetry_context
class MockConnection:
@property
def engine(self):
class Engine:
@property
def driver(self):
return 'driver'
return Engine()
class SQLAlchemyTestCase(TestCase):
def assertSQL(self, expected_sql, **kwargs):
before_cursor_execute = BeforeExecuteFactory(**kwargs)
sql, params = before_cursor_execute(
MockConnection(), None, 'SELECT 1;', ('param,'), None, None,
)
self.assertEqual(sql, expected_sql)
self.assertEqual(params, ('param,'))
class Tests(SQLAlchemyTestCase):
def test_no_args(self):
self.assertSQL('SELECT 1;')
def test_db_driver(self):
self.assertSQL(
"SELECT 1; /*db_driver='driver'*/",
with_db_driver=True,
)
def test_db_framework(self):
self.assertSQL(
"SELECT 1; /*db_framework='sqlalchemy%%3A{}'*/".format(sqlalchemy.__version__),
with_db_framework=True,
)
def test_opencensus(self):
with mock_opencensus_tracer():
self.assertSQL(
"SELECT 1; /*traceparent='00-trace%%20id-span%%20id-00',"
"tracestate='congo%%3Dt61rcWkgMzE%%2Crojo%%3D00f067aa0ba902b7'*/",
with_opencensus=True,
)
@skipIfPy2
def test_opentelemetry(self):
with mock_opentelemetry_context():
self.assertSQL(
"SELECT 1; /*traceparent='00-000000000000000000000000deadbeef-000000000000beef-00',"
"tracestate='some_key%%3Dsome_value'*/",
with_opentelemetry=True,
)
@skipIfPy2
def test_both_opentelemetry_and_opencensus_warn(self):
with mock.patch(
"google.cloud.sqlcommenter.sqlalchemy.executor.logger"
) as logger_mock, mock_opencensus_tracer(), mock_opentelemetry_context():
self.assertSQL(
"SELECT 1; /*traceparent='00-000000000000000000000000deadbeef-000000000000beef-00',"
"tracestate='some_key%%3Dsome_value'*/",
with_opentelemetry=True,
with_opencensus=True,
)
self.assertEqual(len(logger_mock.warning.mock_calls), 1)
class FlaskTests(SQLAlchemyTestCase):
flask_info = {
'framework': 'flask',
'controller': 'c',
'route': '/',
}
@mock.patch('google.cloud.sqlcommenter.sqlalchemy.executor.get_flask_info', return_value=flask_info)
def test_all_data(self, get_info):
self.assertSQL(
"SELECT 1; /*controller='c',framework='flask',route='/'*/",
)
@mock.patch('google.cloud.sqlcommenter.sqlalchemy.executor.get_flask_info', return_value=flask_info)
def test_framework_disabled(self, get_info):
self.assertSQL(
"SELECT 1; /*controller='c',route='/'*/",
with_framework=False,
)
@mock.patch('google.cloud.sqlcommenter.sqlalchemy.executor.get_flask_info', return_value=flask_info)
def test_controller_disabled(self, get_info):
self.assertSQL(
"SELECT 1; /*framework='flask',route='/'*/",
with_controller=False,
)
@mock.patch('google.cloud.sqlcommenter.sqlalchemy.executor.get_flask_info', return_value=flask_info)
def test_route_disabled(self, get_info):
self.assertSQL(
"SELECT 1; /*controller='c',framework='flask'*/",
with_route=False,
)
|
apache-2.0
| -5,383,465,248,542,579,000
| 32.953125
| 104
| 0.627474
| false
| 3.832451
| true
| false
| false
|
olix0r/vtwt
|
vtwt/util.py
|
1
|
1493
|
import re
from htmlentitydefs import name2codepoint
from twisted.python.text import greedyWrap
from twisted.web.error import Error as WebError
# From http://wiki.python.org/moin/EscapingHtml
_HTMLENT_CODEPOINT_RE = re.compile('&({0}|#\d+);'.format(
'|'.join(name2codepoint.keys())))
def recodeText(text):
"""Parses things like & and ὔ into real characters."""
def _entToUnichr(match):
ent = match.group(1)
try:
if ent.startswith("#"):
char = unichr(int(ent[1:]))
else:
char = unichr(name2codepoint[ent])
except:
char = match.group(0)
return char
return _HTMLENT_CODEPOINT_RE.sub(_entToUnichr, text)
_whaleFmt = """\
_{lines}__
|\\/{space} x \\
}} {body} |
|/\\{lines}__-/"""
_whalePaddingLen = 6
def failWhale(error, columns=80):
if isinstance(error, WebError):
emsg = "{0.status} {0.message}".format(error)
else:
emsg = str(error)
width = columns - _whalePaddingLen
lines = []
for line in emsg.splitlines():
lines.extend(greedyWrap(line, width))
lineLength = max(map(len, lines))
msg = "{0}|\n|{0}".format((_whalePaddingLen/2)*" ").join(
map(lambda l: "{0:{1}}".format(l, lineLength),
lines))
return _whaleFmt.format(
space = " "*lineLength,
lines = "_"*lineLength,
length = lineLength,
body = msg)
|
bsd-3-clause
| -5,111,688,798,574,299,000
| 24.305085
| 68
| 0.561956
| false
| 3.370203
| false
| false
| false
|
onlynight/wechat-dump
|
wechat/smiley.py
|
1
|
4076
|
#!/usr/bin/env python2
# -*- coding: UTF-8 -*-
# File: smiley.py
# Date: Thu Jun 18 00:02:43 2015 +0800
# Author: Yuxin Wu <ppwwyyxxc@gmail.com>
import os
import re
import json
import struct
from common.textutil import get_file_b64
STATIC_PATH = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'static')
UNICODE_SMILEY_FILE = os.path.join(STATIC_PATH, 'unicode-smiley.json')
TENCENT_SMILEY_FILE = os.path.join(STATIC_PATH, 'tencent-smiley.json')
TENCENT_EXTRASMILEY_FILE = os.path.join(STATIC_PATH, 'tencent-smiley-extra.json')
try:
UNICODE_SMILEY_RE = re.compile(
u'[\U00010000-\U0010ffff]|[\u2600-\u2764]|\u2122|\u00a9|\u00ae|[\ue000-\ue5ff]'
)
except re.error:
# UCS-2 build
UNICODE_SMILEY_RE = re.compile(
u'[\uD800-\uDBFF][\uDC00-\uDFFF]|[\u2600-\u2764]|\u2122|\u00a9|\u00ae|[\ue000-\ue5ff]'
)
HEAD = """.smiley {
padding: 1px;
background-position: -1px -1px;
background-repeat: no-repeat;
width: 20px;
height: 20px;
display: inline-block;
vertical-align: top;
zoom: 1;
}
"""
TEMPLATE = """.smiley{name} {{
background-image: url("data:image/png;base64,{b64}");
}}"""
class SmileyProvider(object):
def __init__(self, html_replace=True):
""" html_replace: replace smileycode by html.
otherwise, replace by plain text
"""
self.html_replace = html_replace
if not html_replace:
raise NotImplementedError()
# [微笑] -> 0
self.tencent_smiley = json.load(open(TENCENT_SMILEY_FILE))
# some extra smiley from javascript on wx.qq.com
extra_smiley = json.load(open(TENCENT_EXTRASMILEY_FILE))
extra_smiley = {u'[' + k + u']': v for k, v in
extra_smiley.iteritems()}
self.tencent_smiley.update(extra_smiley)
# 1f35c -> "\ue340"
#self.unicode_smiley_code = gUnicodeCodeMap
# u'\U0001f35c' -> "e340" # for iphone
# u'\ue415' -> 'e415' # for android
unicode_smiley_dict = json.load(open(UNICODE_SMILEY_FILE))
self.unicode_smiley = {(self.unichar(int(k, 16))): hex(ord(v))[2:] for k, v in
unicode_smiley_dict.iteritems()}
self.unicode_smiley.update({v: hex(ord(v))[2:] for _, v in
unicode_smiley_dict.iteritems()})
self.used_smiley_id = set()
def unichar(self, i):
try:
return unichr(i)
except ValueError:
return struct.pack('i', i).decode('utf-32')
def gen_replace_elem(self, smiley_id):
self.used_smiley_id.add(str(smiley_id))
return '<span class="smiley smiley{}"></span>'.format(smiley_id)
def _replace_unicode(self, msg):
if not UNICODE_SMILEY_RE.findall(msg):
# didn't find the code
return msg
for k, v in self.unicode_smiley.iteritems():
if k in msg:
msg = msg.replace(k, self.gen_replace_elem(v))
return msg
def _replace_tencent(self, msg):
if (not '[' in msg or not ']' in msg) \
and (not '/:' in msg) and (not '/' in msg):
return msg
for k, v in self.tencent_smiley.iteritems():
if k in msg:
msg = msg.replace(k, self.gen_replace_elem(v))
return msg
def replace_smileycode(self, msg):
""" replace the smiley code in msg
return a html
"""
msg = self._replace_unicode(msg)
msg = self._replace_tencent(msg)
return msg
def gen_used_smiley_css(self):
ret = HEAD
for sid in self.used_smiley_id:
fname = os.path.join(STATIC_PATH, 'smileys', '{}.png'.format(sid))
b64 = get_file_b64(fname)
ret = ret + TEMPLATE.format(name=sid, b64=b64)
return ret
if __name__ == '__main__':
smiley = SmileyProvider()
msg = u"[挥手]哈哈呵呵hihi\U0001f684\u2728\u0001 /::<\ue415"
msg = smiley.replace_smileycode(msg)
#print msg
smiley.gen_used_smiley_css()
|
gpl-3.0
| -4,862,418,928,206,776,000
| 31.15873
| 94
| 0.57848
| false
| 2.981604
| false
| false
| false
|
molpopgen/fwdpy11
|
examples/discrete_demography/localadaptation.py
|
1
|
7832
|
#
# Copyright (C) 2019 Kevin Thornton <krthornt@uci.edu>
#
# This file is part of fwdpy11.
#
# fwdpy11 is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# fwdpy11 is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with fwdpy11. If not, see <http://www.gnu.org/licenses/>.
#
"""
Local adaptation of a quantitative trait to differing optima.
"""
import argparse
import math
import sys
from collections import namedtuple
import numpy as np
import pandas as pd
import fwdpy11
# Simulations with tree sequence recording need
# to know the max position in a genome. Here,
# we use a length of 1.0. Thus, all mutation
# and recombination events will be uniform
# random variables on the continuous interval
# [0, GENOME_LENGTH).
GENOME_LENGTH = 1.0
# When recording quant-genetic statistics during a simulation,
# we will use this type. Named tuples are extremely efficient,
# and they are easily converted into Pandas DataFrame objects,
# which is very convenient for analysis and output.
Datum = namedtuple("Data", ["generation", "deme", "gbar", "vg", "wbar"])
def make_parser():
"""
Create a command-line interface to the script.
"""
parser = argparse.ArgumentParser(
formatter_class=argparse.ArgumentDefaultsHelpFormatter
)
required = parser.add_argument_group("Required arguments")
required.add_argument("--popsize", "-N", type=int, help="Diploid population size")
required.add_argument(
"--mu", "-m", type=float, help="Mutation rate (per gamete, per generation)"
)
required.add_argument(
"--sigma",
"-s",
type=float,
help="Standard deviation of Gaussian" "distribution of mutational effects",
)
optional = parser.add_argument_group("Optional arguments")
optional.add_argument(
"--rho", type=float, default=1000.0, help="Scaled recombination rate, rho=4Nr"
)
optional.add_argument(
"--VS",
type=float,
default=10.0,
help="Inverse strength of stabilizing selection",
)
optional.add_argument(
"--opt", type=float, default=1.0, help="Value of new phenotypic optimum"
)
optional.add_argument(
"--migrates",
type=float,
nargs=2,
default=None,
help="Migration rates from 0 to 1 and 1 to 0, respectively.",
)
optional.add_argument(
"--time",
type=float,
default=0.1,
help="Amount of time to simulate past" "optimum shift, in units of N",
)
optional.add_argument(
"--plotfile", type=str, default=None, help="File name for plot"
)
optional.add_argument("--seed", type=int, default=42, help="Random number seed.")
return parser
def validate_arguments(args):
"""
Validate input arguments.
Note: this is likely incomplete.
"""
if args.popsize is None:
raise ValueError("popsize cannot be None")
if args.mu < 0:
raise ValueError("mu must be non-negative")
if args.mu is None:
raise ValueError("mu cannot be None")
if args.mu < 0 or math.isfinite(args.mu) is False:
raise ValueError("Mutation rate must be non-negative and finite")
if args.sigma is None:
raise ValueError("sigma cannot be none")
if args.sigma < 0 or math.isfinite(args.sigma) is False:
raise ValueError(
"Std. dev. of distribution of effect sizes"
"must be non-negative and finite"
)
if args.migrates is not None:
for m in args.migrates:
if m < 0 or m > 1:
raise ValueError("migration rates must be 0 <= m <= 1")
def make_migmatrix(migrates):
if migrates is None:
return None
mm = np.zeros(4).reshape(2, 2)
mm[0, 1] = migrates[1]
mm[1, 0] = migrates[0]
rs = np.sum(mm, axis=1)
np.fill_diagonal(mm, 1.0 - rs)
return fwdpy11.MigrationMatrix(mm)
class Recorder(object):
"""
fwdpy11 allows you to define objects that record data
from populations during simulation. Such objects must
be callable, and the easiest way to do things is to
create a class with a __call__ function.
"""
def __init__(self, start):
self.data = []
self.start = start
def __call__(self, pop, recorder):
if pop.generation >= self.start:
# Record mean trait value each generation.
md = np.array(pop.diploid_metadata, copy=False)
demes = np.unique(md["deme"])
for d in demes:
w = np.where(md["deme"] == d)[0]
gbar = md["g"][w].mean()
vg = md["g"][w].var()
wbar = md["w"][w].mean()
self.data.append(Datum(pop.generation, d, gbar, vg, wbar))
def plot_output(data, filename):
import matplotlib.pyplot as plt
import matplotlib.gridspec as gridspec
fig = plt.figure(figsize=(9, 3))
gs = gridspec.GridSpec(ncols=3, nrows=1, figure=fig)
ax_gbar = fig.add_subplot(gs[0, 0])
ax_vg = fig.add_subplot(gs[0, 1])
ax_wbar = fig.add_subplot(gs[0, 2])
df = pd.DataFrame(data, columns=Datum._fields)
g = df.groupby(["deme"])
for n, gi in g:
ax_gbar.plot(gi["generation"], gi["gbar"], label="Deme {}".format(n))
ax_vg.plot(gi["generation"], gi["vg"], label="Deme {}".format(n))
ax_wbar.plot(gi["generation"], gi["wbar"], label="Deme {}".format(n))
for ax in [ax_gbar, ax_vg, ax_wbar]:
ax.set_xlabel("Generation")
ax_gbar.set_ylabel(r"$\bar{g}$")
ax_vg.set_ylabel(r"$V(G)$")
ax_wbar.set_ylabel(r"$\bar{w}$")
ax_gbar.legend()
plt.tight_layout()
plt.savefig(filename)
def runsim(args):
"""
Run the simulation.
"""
pop = fwdpy11.DiploidPopulation(2 * args.popsize, GENOME_LENGTH)
np.random.seed(args.seed)
rng = fwdpy11.GSLrng(args.seed)
GSSmo0 = fwdpy11.GSSmo(
[
fwdpy11.Optimum(when=0, optimum=0.0, VS=args.VS),
fwdpy11.Optimum(when=10 * args.popsize, optimum=args.opt, VS=args.VS),
]
)
GSSmo1 = fwdpy11.GSSmo(
[
fwdpy11.Optimum(when=0, optimum=0.0, VS=args.VS),
fwdpy11.Optimum(
when=10 * args.popsize, optimum=-1.0 * args.opt, VS=args.VS
),
]
)
mm = make_migmatrix(args.migrates)
dd = fwdpy11.DiscreteDemography(
mass_migrations=[fwdpy11.move_individuals(0, 0, 1, 0.5)], migmatrix=mm
)
p = {
"nregions": [], # No neutral mutations -- add them later!
"gvalue": [fwdpy11.Additive(2.0, GSSmo0), fwdpy11.Additive(2.0, GSSmo1)],
"sregions": [fwdpy11.GaussianS(0, GENOME_LENGTH, 1, args.sigma)],
"recregions": [fwdpy11.Region(0, GENOME_LENGTH, 1)],
"rates": (0.0, args.mu, args.rho / float(4 * args.popsize)),
# Keep mutations at frequency 1 in the pop if they affect fitness.
"prune_selected": False,
"demography": dd,
"simlen": 10 * args.popsize + int(args.popsize * args.time),
}
params = fwdpy11.ModelParams(**p)
r = Recorder(10 * args.popsize)
fwdpy11.evolvets(rng, pop, params, 100, r, suppress_table_indexing=True)
if args.plotfile is not None:
plot_output(r.data, args.plotfile)
if __name__ == "__main__":
parser = make_parser()
args = parser.parse_args(sys.argv[1:])
validate_arguments(args)
runsim(args)
|
gpl-3.0
| -7,623,108,832,788,986,000
| 30.967347
| 86
| 0.621425
| false
| 3.408181
| false
| false
| false
|
GbalsaC/bitnamiP
|
django-wiki/wiki/plugins/links/wiki_plugin.py
|
1
|
1271
|
# -*- coding: utf-8 -*-
from django.conf.urls.defaults import patterns, url
from django.utils.translation import ugettext_lazy as _
from wiki.conf import settings
from wiki.core.plugins import registry
from wiki.core.plugins.base import BasePlugin
from wiki.plugins.links import views
from wiki.plugins.links.mdx.urlize import makeExtension
from wiki.plugins.links.mdx.djangowikilinks import WikiPathExtension
from django.core.urlresolvers import reverse_lazy
class LinkPlugin(BasePlugin):
slug = 'links'
urlpatterns = patterns('',
url(r'^json/query-urlpath/$', views.QueryUrlPath.as_view(), name='links_query_urlpath'),
)
sidebar = {'headline': _('Links'),
'icon_class': 'icon-bookmark',
'template': 'wiki/plugins/links/sidebar.html',
'form_class': None,
'get_form_kwargs': (lambda a: {})}
wikipath_config = [
('base_url', reverse_lazy('wiki:get', kwargs={'path': ''}) ),
('live_lookups', settings.LINK_LIVE_LOOKUPS ),
('default_level', settings.LINK_DEFAULT_LEVEL ),
]
markdown_extensions = [makeExtension(), WikiPathExtension(wikipath_config)]
def __init__(self):
pass
registry.register(LinkPlugin)
|
agpl-3.0
| 1,642,203,236,917,021,200
| 32.447368
| 96
| 0.651456
| false
| 3.875
| false
| false
| false
|
OPU-Surveillance-System/monitoring
|
master/scripts/planner/solvers/test_penalization_plot.py
|
1
|
1040
|
import matplotlib.pyplot as plt
with open("test_pen", "r") as f:
data = f.read()
data = data.split("\n")[:-1]
data = [data[i].split(" ") for i in range(0, len(data))]
pen = [float(data[i][0]) for i in range(len(data))]
u = [float(data[i][1]) for i in range(len(data))]
d = [float(data[i][2]) for i in range(len(data))]
gain = [((d[i-1] - d[i])) / (u[i] - u[i - 1]) for i in range(1, len(data))]
gain = [gain[0]] + gain
print(u, d, gain)
fig, ax1 = plt.subplots()
pu, = ax1.plot(pen, u, color="r", label="Uncertainty rate")
ax1.scatter(pen, u, color="k")
#ax1.axhline(9000, color="r", linestyle="--")
#ax1.set_title("Cost evolution according to the number of iterations")
ax1.set_xlabel("Penalization coefficient")
ax1.set_ylabel("Uncertainty rate")
ax2 = ax1.twinx()
pd, = ax2.plot(pen, d, color="b", linestyle="--", label="Distance")
ax2.scatter(pen, d, color="k")
ax2.set_ylabel("Distance")
#ax2.axhline(0.99, color="b", linestyle="--")
#plt.axvline(4000000, color="k",linestyle = ":")
plt.legend(handles=[pu, pd], loc=7)
plt.show()
|
mit
| 1,042,806,748,452,897,400
| 37.518519
| 75
| 0.632692
| false
| 2.512077
| false
| false
| false
|
Silvian/samaritan
|
emailservice/views.py
|
1
|
2372
|
"""
@author: Silvian Dragan
@Date: 17/06/2016
@Copyright: Copyright 2016, Samaritan CMA - Published under GNU General Public Licence v3
@Details: https://github.com/Silvian/samaritan
"""
import json
from django.contrib.auth import get_user
from django.contrib.auth.decorators import login_required
from django.http import HttpResponse
from api.views import success_response, failure_response
from emailservice.forms import EmailOutboxForm
from samaritan.models import Member, ChurchGroup
from django.shortcuts import get_object_or_404
from emailservice.tasks import send_email_task
def send_emails(request, members):
user = get_user(request)
form = EmailOutboxForm(request.POST or None)
if form.is_valid():
outbox = form.save()
outbox.created_by = user
outbox.save()
attachment = request.FILES.get(['attachment'][0], default=None)
if attachment:
outbox.attachment = attachment
outbox.save()
for member in members:
if member.email:
send_email_task.delay(
outbox_id=outbox.id, member_id=member.id
)
return HttpResponse(json.dumps(success_response), content_type='application/json')
return HttpResponse(json.dumps(failure_response), content_type='application/json')
@login_required
def send_members_mail(request):
if request.method == 'POST':
members = Member.objects.filter(
is_active=True, is_member=True
).order_by('last_name')
return send_emails(request, members)
@login_required
def send_guests_mail(request):
if request.method == 'POST':
members = Member.objects.filter(
is_active=True, is_member=False
).order_by('last_name')
return send_emails(request, members)
@login_required
def send_everyone_mail(request):
if request.method == 'POST':
members = Member.objects.filter(
is_active=True
).order_by('last_name')
return send_emails(request, members)
@login_required
def send_group_mail(request):
if request.method == 'POST':
church_group = get_object_or_404(ChurchGroup, id=request.POST['id'])
group_members = church_group.members.filter(
is_active=True
).order_by('last_name')
return send_emails(request, group_members)
|
gpl-3.0
| -769,699,822,619,569,500
| 28.283951
| 90
| 0.664418
| false
| 3.71205
| false
| false
| false
|
kyubifire/softlayer-python
|
SoftLayer/fixtures/SoftLayer_Virtual_Guest.py
|
1
|
19232
|
getObject = {
'id': 100,
'hostname': 'vs-test1',
'domain': 'test.sftlyr.ws',
'fullyQualifiedDomainName': 'vs-test1.test.sftlyr.ws',
'status': {'keyName': 'ACTIVE', 'name': 'Active'},
'billingItem': {
'id': 6327,
'nextInvoiceTotalRecurringAmount': 1.54,
'children': [
{'nextInvoiceTotalRecurringAmount': 1},
{'nextInvoiceTotalRecurringAmount': 1},
{'nextInvoiceTotalRecurringAmount': 1},
{'nextInvoiceTotalRecurringAmount': 1},
{'nextInvoiceTotalRecurringAmount': 1},
],
'package': {
"id": 835,
"keyName": "PUBLIC_CLOUD_SERVER"
},
'orderItem': {
'order': {
'userRecord': {
'username': 'chechu',
}
}
}
},
'datacenter': {'id': 50, 'name': 'TEST00',
'description': 'Test Data Center'},
'powerState': {'keyName': 'RUNNING', 'name': 'Running'},
'maxCpu': 2,
'maxMemory': 1024,
'primaryIpAddress': '172.16.240.2',
'globalIdentifier': '1a2b3c-1701',
'primaryBackendIpAddress': '10.45.19.37',
'primaryNetworkComponent': {'speed': 10, 'maxSpeed': 100},
'hourlyBillingFlag': False,
'createDate': '2013-08-01 15:23:45',
'blockDevices': [{'device': 0, 'mountType': 'Disk', 'uuid': 1},
{'device': 1, 'mountType': 'Disk',
'diskImage': {'type': {'keyName': 'SWAP'}}},
{'device': 2, 'mountType': 'CD'},
{'device': 3, 'mountType': 'Disk', 'uuid': 3},
{'device': 4, 'mountType': 'Disk', 'uuid': 4,
'diskImage': {'metadataFlag': True}}],
'notes': 'notes',
'networkVlans': [{'networkSpace': 'PUBLIC',
'vlanNumber': 23,
'id': 1}],
'dedicatedHost': {'id': 37401},
'transientGuestFlag': False,
'operatingSystem': {
'passwords': [{'username': 'user', 'password': 'pass'}],
'softwareLicense': {
'softwareDescription': {'version': '12.04-64 Minimal for VSI',
'name': 'Ubuntu'}}
},
'softwareComponents': [{
'passwords': [{'username': 'user', 'password': 'pass'}],
'softwareLicense': {
'softwareDescription': {'name': 'Ubuntu'}}
}],
'tagReferences': [{'tag': {'name': 'production'}}],
}
getCreateObjectOptions = {
'flavors': [
{
'flavor': {
'keyName': 'B1_1X2X25'
},
'template': {
'supplementalCreateObjectOptions': {
'flavorKeyName': 'B1_1X2X25'
}
}
},
{
'flavor': {
'keyName': 'B1_1X2X25_TRANSIENT'
},
'template': {
'supplementalCreateObjectOptions': {
'flavorKeyName': 'B1_1X2X25_TRANSIENT'
},
'transientGuestFlag': True
}
},
{
'flavor': {
'keyName': 'B1_1X2X100'
},
'template': {
'supplementalCreateObjectOptions': {
'flavorKeyName': 'B1_1X2X100'
}
}
},
{
'flavor': {
'keyName': 'BL1_1X2X100'
},
'template': {
'supplementalCreateObjectOptions': {
'flavorKeyName': 'BL1_1X2X100'
}
}
},
{
'flavor': {
'keyName': 'BL2_1X2X100'
},
'template': {
'supplementalCreateObjectOptions': {
'flavorKeyName': 'BL2_1X2X100'
}
}
},
{
'flavor': {
'keyName': 'C1_1X2X25'
},
'template': {
'supplementalCreateObjectOptions': {
'flavorKeyName': 'C1_1X2X25'
}
}
},
{
'flavor': {
'keyName': 'M1_1X2X100'
},
'template': {
'supplementalCreateObjectOptions': {
'flavorKeyName': 'M1_1X2X100'
}
}
},
{
'flavor': {
'keyName': 'AC1_1X2X100'
},
'template': {
'supplementalCreateObjectOptions': {
'flavorKeyName': 'AC1_1X2X100'
}
}
},
{
'flavor': {
'keyName': 'ACL1_1X2X100'
},
'template': {
'supplementalCreateObjectOptions': {
'flavorKeyName': 'ACL1_1X2X100'
}
}
},
],
'processors': [
{
'itemPrice': {
'item': {'description': '1 x 2.0 GHz Core'},
'hourlyRecurringFee': '.07',
'recurringFee': '29'
},
'template': {'startCpus': 1}
},
{
'itemPrice': {
'item': {'description': '2 x 2.0 GHz Cores'},
'hourlyRecurringFee': '.14',
'recurringFee': '78'
},
'template': {'startCpus': 2}
},
{
'itemPrice': {
'item': {'description': '3 x 2.0 GHz Cores'},
'hourlyRecurringFee': '.205',
'recurringFee': '123.5'
},
'template': {'startCpus': 3}
},
{
'itemPrice': {
'item': {'description': '4 x 2.0 GHz Cores'},
'hourlyRecurringFee': '.265',
'recurringFee': '165.5'
},
'template': {'startCpus': 4}
},
{
'itemPrice': {
'hourlyRecurringFee': '.209',
'recurringFee': '139',
'dedicatedHostInstanceFlag': False,
'item': {
'description': '1 x 2.0 GHz Cores (Dedicated)'
}
},
'template': {
'dedicatedAccountHostOnlyFlag': True,
'startCpus': 1
}
},
{
'itemPrice': {
'hourlyRecurringFee': '0',
'recurringFee': '0',
'dedicatedHostInstanceFlag': True,
'item': {
'description': '56 x 2.0 GHz Cores (Dedicated Host)'
}
},
'template': {
'startCpus': 56,
'dedicatedHost': {
'id': None
}
}
},
{
'itemPrice': {
'hourlyRecurringFee': '0',
'recurringFee': '0',
'dedicatedHostInstanceFlag': True,
'item': {
'description': '4 x 2.0 GHz Cores (Dedicated Host)'
}
},
'template': {
'startCpus': 4,
'dedicatedHost': {
'id': None
}
}
},
],
'memory': [
{
'itemPrice': {
'item': {'description': '1 GB'},
'hourlyRecurringFee': '.03',
'recurringFee': '21'
},
'template': {'maxMemory': 1024}
},
{
'itemPrice': {
'item': {'description': '2 GB'},
'hourlyRecurringFee': '.06',
'recurringFee': '42'
},
'template': {'maxMemory': 2048}
},
{
'itemPrice': {
'item': {'description': '3 GB'},
'hourlyRecurringFee': '.085',
'recurringFee': '59.5'},
'template': {'maxMemory': 3072}
},
{
'itemPrice': {
'item': {'description': '4 GB'},
'hourlyRecurringFee': '.11',
'recurringFee': '77'
},
'template': {'maxMemory': 4096}
},
{
'itemPrice': {
'hourlyRecurringFee': '0',
'recurringFee': '0',
'dedicatedHostInstanceFlag': True,
'item': {
'description': '64 GB (Dedicated Host)'
}
},
'template': {
'maxMemory': 65536
}
},
{
'itemPrice': {
'hourlyRecurringFee': '0',
'recurringFee': '0',
'dedicatedHostInstanceFlag': True,
'item': {
'description': '8 GB (Dedicated Host)'
}
},
'template': {
'maxMemory': 8192
}
},
],
'blockDevices': [
{
'itemPrice': {
'item': {'description': '25 GB (LOCAL)'},
'hourlyRecurringFee': '0',
'recurringFee': '0'},
'template': {
'blockDevices': [
{'device': '0', 'diskImage': {'capacity': 25}}
],
'localDiskFlag': True
}
},
{
'itemPrice': {
'item': {'description': '100 GB (LOCAL)'},
'hourlyRecurringFee': '.01',
'recurringFee': '7'
},
'template': {
'blockDevices': [
{'device': '0', 'diskImage': {'capacity': 100}}
],
'localDiskFlag': True
}
},
],
'operatingSystems': [
{
'itemPrice': {
'item': {
'description': 'CentOS 6.0 - Minimal Install (64 bit)'
},
'hourlyRecurringFee': '0',
'recurringFee': '0'
},
'template': {
'operatingSystemReferenceCode': 'CENTOS_6_64'
}
},
{
'itemPrice': {
'item': {
'description': 'Debian GNU/Linux 7.0 Wheezy/Stable -'
' Minimal Install (64 bit)'
},
'hourlyRecurringFee': '0',
'recurringFee': '0'
},
'template': {
'operatingSystemReferenceCode': 'DEBIAN_7_64'
}
},
{
'itemPrice': {
'item': {
'description': 'Ubuntu Linux 12.04 LTS Precise'
' Pangolin - Minimal Install (64 bit)'
},
'hourlyRecurringFee': '0',
'recurringFee': '0'
},
'template': {
'operatingSystemReferenceCode': 'UBUNTU_12_64'
}
},
],
'networkComponents': [
{
'itemPrice': {
'item': {
'description': '10 Mbps Public & Private Networks'
},
'hourlyRecurringFee': '0',
'recurringFee': '0'},
'template': {
'networkComponents': [{'maxSpeed': 10}]
}
},
{
'itemPrice': {
'item': {'description': '100 Mbps Private Network'},
'hourlyRecurringFee': '0',
'recurringFee': '0'},
'template': {
'networkComponents': [{'maxSpeed': 100}]
}
},
{
'itemPrice': {
'item': {'description': '1 Gbps Private Network'},
'hourlyRecurringFee': '.02',
'recurringFee': '10'
},
'template': {
'networkComponents': [{'maxSpeed': 1000}]
}
},
{
'itemPrice': {
'hourlyRecurringFee': '0',
'recurringFee': '0',
'dedicatedHostInstanceFlag': True,
'item': {
'description': '1 Gbps Public & Private Network Uplinks (Dedicated Host)'
}
},
'template': {
'networkComponents': [
{
'maxSpeed': 1000
}
],
'privateNetworkOnlyFlag': False
}
},
],
'datacenters': [
{'template': {'datacenter': {'name': 'ams01'}}},
{'template': {'datacenter': {'name': 'dal05'}}},
],
}
getReverseDomainRecords = [{
'networkAddress': '12.34.56.78',
'name': '12.34.56.78.in-addr.arpa',
'resourceRecords': [{'data': 'test.softlayer.com.', 'id': 987654}],
'updateDate': '2013-09-11T14:36:57-07:00',
'serial': 1234665663,
'id': 123456,
}]
editObject = True
deleteObject = True
setPrivateNetworkInterfaceSpeed = True
setPublicNetworkInterfaceSpeed = True
createObject = getObject
createObjects = [getObject]
generateOrderTemplate = {
"imageTemplateId": None,
"location": "1854895",
"packageId": 835,
"presetId": 405,
"prices": [
{
"hourlyRecurringFee": "0",
"id": 45466,
"recurringFee": "0",
"item": {
"description": "CentOS 7.x - Minimal Install (64 bit)"
}
},
{
"hourlyRecurringFee": "0",
"id": 2202,
"recurringFee": "0",
"item": {
"description": "25 GB (SAN)"
}
},
{
"hourlyRecurringFee": "0",
"id": 905,
"recurringFee": "0",
"item": {
"description": "Reboot / Remote Console"
}
},
{
"hourlyRecurringFee": ".02",
"id": 899,
"recurringFee": "10",
"item": {
"description": "1 Gbps Private Network Uplink"
}
},
{
"hourlyRecurringFee": "0",
"id": 1800,
"item": {
"description": "0 GB Bandwidth Allotment"
}
},
{
"hourlyRecurringFee": "0",
"id": 21,
"recurringFee": "0",
"item": {
"description": "1 IP Address"
}
},
{
"hourlyRecurringFee": "0",
"id": 55,
"recurringFee": "0",
"item": {
"description": "Host Ping"
}
},
{
"hourlyRecurringFee": "0",
"id": 57,
"recurringFee": "0",
"item": {
"description": "Email and Ticket"
}
},
{
"hourlyRecurringFee": "0",
"id": 58,
"recurringFee": "0",
"item": {
"description": "Automated Notification"
}
},
{
"hourlyRecurringFee": "0",
"id": 420,
"recurringFee": "0",
"item": {
"description": "Unlimited SSL VPN Users & 1 PPTP VPN User per account"
}
},
{
"hourlyRecurringFee": "0",
"id": 418,
"recurringFee": "0",
"item": {
"description": "Nessus Vulnerability Assessment & Reporting"
}
}
],
"quantity": 1,
"sourceVirtualGuestId": None,
"sshKeys": [],
"useHourlyPricing": True,
"virtualGuests": [
{
"domain": "test.local",
"hostname": "test"
}
],
"complexType": "SoftLayer_Container_Product_Order_Virtual_Guest"
}
setUserMetadata = ['meta']
reloadOperatingSystem = 'OK'
setTags = True
createArchiveTransaction = {
'createDate': '2018-12-10T17:29:18-06:00',
'elapsedSeconds': 0,
'guestId': 12345678,
'hardwareId': None,
'id': 12345,
'modifyDate': '2018-12-10T17:29:18-06:00',
'statusChangeDate': '2018-12-10T17:29:18-06:00'
}
executeRescueLayer = True
getUpgradeItemPrices = [
{
'id': 1007,
'categories': [{'id': 80,
'name': 'Computing Instance',
'categoryCode': 'guest_core'}],
'item': {
'capacity': '4',
'units': 'PRIVATE_CORE',
'description': 'Computing Instance (Dedicated)',
}
},
{
'id': 1144,
'locationGroupId': None,
'categories': [{'id': 80,
'name': 'Computing Instance',
'categoryCode': 'guest_core'}],
'item': {
'capacity': '4',
'units': 'CORE',
'description': 'Computing Instance',
}
},
{
'id': 332211,
'locationGroupId': 1,
'categories': [{'id': 80,
'name': 'Computing Instance',
'categoryCode': 'guest_core'}],
'item': {
'capacity': '4',
'units': 'CORE',
'description': 'Computing Instance',
}
},
{
'id': 1122,
'categories': [{'id': 26,
'name': 'Uplink Port Speeds',
'categoryCode': 'port_speed'}],
'item': {
'capacity': '1000',
'description': 'Public & Private Networks',
}
},
{
'id': 1144,
'categories': [{'id': 26,
'name': 'Uplink Port Speeds',
'categoryCode': 'port_speed'}],
'item': {
'capacity': '1000',
'description': 'Private Networks',
}
},
{
'id': 1133,
'categories': [{'id': 3,
'name': 'RAM',
'categoryCode': 'ram'}],
'item': {
'capacity': '2',
'description': 'RAM',
}
},
]
DEDICATED_GET_UPGRADE_ITEM_PRICES = [
{
'id': 115566,
'categories': [{'id': 80,
'name': 'Computing Instance',
'categoryCode': 'guest_core'}],
'item': {
'capacity': '4',
'units': 'DEDICATED_CORE',
'description': 'Computing Instance (Dedicated Host)',
}
},
]
getMetricTrackingObjectId = 1000
getBandwidthAllotmentDetail = {
'allocationId': 25465663,
'bandwidthAllotmentId': 138442,
'effectiveDate': '2019-04-03T23:00:00-06:00',
'endEffectiveDate': None,
'id': 25888247,
'serviceProviderId': 1,
'allocation': {
'amount': '250'
}
}
getBillingCycleBandwidthUsage = [
{
'amountIn': '.448',
'amountOut': '.52157',
'type': {
'alias': 'PUBLIC_SERVER_BW'
}
},
{
'amountIn': '.03842',
'amountOut': '.01822',
'type': {
'alias': 'PRIVATE_SERVER_BW'
}
}
]
|
mit
| -4,988,771,412,386,916,000
| 27.619048
| 93
| 0.391275
| false
| 4.179961
| false
| false
| false
|
d120/pyfeedback
|
src/feedback/models/fragebogen2016.py
|
1
|
14676
|
# coding=utf-8
from django.db import models
from feedback.models import Fragebogen, Ergebnis
class Fragebogen2016(Fragebogen):
fach = models.CharField(max_length=5, choices=Fragebogen.FACH_CHOICES, blank=True)
abschluss = models.CharField(max_length=5, choices=Fragebogen.ABSCHLUSS_CHOICES, blank=True)
semester = models.CharField(max_length=4, choices=Fragebogen.SEMESTER_CHOICES16, blank=True)
geschlecht = models.CharField(max_length=1, choices=Fragebogen.GESCHLECHT_CHOICES, blank=True)
studienberechtigung = models.CharField(max_length=1, choices=Fragebogen.STUDIENBERECHTIGUNG_CHOICES, blank=True)
pflichveranstaltung = models.CharField(max_length=1, choices=Fragebogen.BOOLEAN_CHOICES, blank=True)
male_veranstaltung_gehoert = models.CharField(max_length=1, choices=Fragebogen.VERANSTALTUNG_GEHOERT, blank=True)
pruefung_angetreten = models.CharField(max_length=1, choices=Fragebogen.KLAUSUR_ANGETRETEN, blank=True)
v_wie_oft_besucht = models.PositiveSmallIntegerField(blank=True, null=True)
v_besuch_ueberschneidung = models.CharField(max_length=1, choices=Fragebogen.BOOLEAN_CHOICES, blank=True)
v_besuch_qualitaet = models.CharField(max_length=1, choices=Fragebogen.BOOLEAN_CHOICES, blank=True)
v_besuch_verhaeltnisse = models.CharField(max_length=1, choices=Fragebogen.BOOLEAN_CHOICES, blank=True)
v_besuch_privat = models.CharField(max_length=1, choices=Fragebogen.BOOLEAN_CHOICES, blank=True)
v_besuch_elearning = models.CharField(max_length=1, choices=Fragebogen.BOOLEAN_CHOICES, blank=True)
v_besuch_zufrueh = models.CharField(max_length=1, choices=Fragebogen.BOOLEAN_CHOICES, blank=True)
v_besuch_sonstiges = models.CharField(max_length=1, choices=Fragebogen.BOOLEAN_CHOICES, blank=True)
v_3_1 = models.PositiveSmallIntegerField(blank=True, null=True)
v_3_2 = models.PositiveSmallIntegerField(blank=True, null=True)
v_3_3 = models.PositiveSmallIntegerField(blank=True, null=True)
v_3_4 = models.PositiveSmallIntegerField(blank=True, null=True)
v_3_5 = models.PositiveSmallIntegerField(blank=True, null=True)
v_3_6 = models.PositiveSmallIntegerField(blank=True, null=True)
v_3_7 = models.PositiveSmallIntegerField(blank=True, null=True)
v_3_8 = models.PositiveSmallIntegerField(blank=True, null=True)
v_3_9 = models.PositiveSmallIntegerField(blank=True, null=True)
v_3_10 = models.PositiveSmallIntegerField(blank=True, null=True)
v_3_11 = models.PositiveSmallIntegerField(blank=True, null=True)
v_3_12 = models.PositiveSmallIntegerField(blank=True, null=True)
v_3_13 = models.PositiveSmallIntegerField(blank=True, null=True)
v_4_1 = models.PositiveSmallIntegerField(blank=True, null=True)
v_4_2 = models.PositiveSmallIntegerField(blank=True, null=True)
v_4_3 = models.PositiveSmallIntegerField(blank=True, null=True)
v_4_4 = models.PositiveSmallIntegerField(blank=True, null=True)
v_4_5 = models.PositiveSmallIntegerField(blank=True, null=True)
v_4_6 = models.PositiveSmallIntegerField(blank=True, null=True)
v_4_7 = models.PositiveSmallIntegerField(blank=True, null=True)
v_4_8 = models.PositiveSmallIntegerField(blank=True, null=True)
v_4_9 = models.PositiveSmallIntegerField(blank=True, null=True)
v_5_1 = models.PositiveSmallIntegerField(blank=True, null=True)
v_5_2 = models.PositiveSmallIntegerField(blank=True, null=True)
v_6_1 = models.CharField(max_length=1, choices=Fragebogen.STUNDEN_NACHBEARBEITUNG, blank=True)
v_6_2 = models.CharField(max_length=3, blank=True)
v_6_3 = models.PositiveSmallIntegerField(blank=True, null=True)
v_6_4 = models.PositiveSmallIntegerField(blank=True, null=True)
v_6_5 = models.PositiveSmallIntegerField(blank=True, null=True)
v_6_8 = models.CharField(max_length=1, choices=Fragebogen.BOOLEAN_CHOICES, blank=True)
class Meta:
verbose_name = 'Fragebogen 2016'
verbose_name_plural = 'Fragebögen 2016'
ordering = ['semester', 'veranstaltung']
app_label = 'feedback'
class Ergebnis2016(Ergebnis):
parts_vl = [
['v_6_5', 'Vorlesung: Gesamtnote',
['6.5 Welche Gesamtnote würdest Du der Vorlesung (ohne Übungen) geben?']],
['v_didaktik', 'Vorlesung: Didaktik',
['3.3 Die Lernziele der Veranstaltung sind mir klar geworden.',
'3.4 Der Stoff wurde anhand von Beispielen verdeutlicht.',
'3.9 Ich habe durch diese Veranstaltung viel gelernt.',
'3.10 Mein Vorwissen war ausreichend, um der Vorlesung folgen zu können.',
'3.11 Ich kann abschätzen, was in der Prüfung von mir erwartet wird.',
'4.1 Die Lehrkraft hat Kompliziertes verständlich dargelegt.',
'4.3 Die Lehrkraft hat die Vorlesung rhetorisch gut gestaltet.',
'4.4 Die Lehrkraft hat die Vorlesung didaktisch gut gestaltet.',
'4.6 Der Lehrende regte gezielt zur eigenen Mitarbeit / zum Mitdenken in der Vorlesung an.',
'4.7 Die Lehrkraft hat elektronische Plattformen sinnvoll und hilfreich eingesetzt.']],
['v_organisation', 'Vorlesung: Organisation',
['3.1 Die Vorlesung war inhaltlich gut strukturiert, ein roter Faden war erkennbar.',
'3.2 Die Organisation der Vorlesung war gut.',
'3.6 Die (Zwischen-)Fragen der Studierenden wurden angemessen beantwortet.',
'4.2 Die Lehrkraft zeigte sich gut vorbereitet.',
'4.5 Der Lehrende war auch außerhalb der Vorlesung ansprechbar.',
'4.8 Die Sprachkenntnisse der Lehrkraft in der Vorlesungssprache waren gut.',
'4.9 Die Lehrkraft hielt die Vorlesung größtenteils selbst.']],
['v_praxisbezug_motivation', 'Vorlesung: Praxisbezug und Motivation',
['3.5 Der Bezug zwischen Theorie und praktischem Arbeiten / praktischen Anwendungen wurde hergestellt.',
'3.8 Die Vorlesung motivierte dazu, sich außerhalb der Veranstaltung selbstständig mit den behandelten Themen auseinanderzusetzen.']],
]
parts_ue = [
['ue_didaktik', 'Übung: Didaktik',
['4.1 Die Übung war inhaltlich gut strukturiert.',
'4.2 Die Lernziele der Übung sind mir klar geworden.',
'5.2 Der*Die Tutor*in hat gut und verständlich erklärt.',
'5.3 Der*Die Tutor*in hat die Gruppe motiviert.',
'5.4 Der*Die Tutor*in war fachlich kompetent.',
'5.5 Der*Die Tutor*in zeigte sich gut vorbereitet.',
'5.6 Der*Die Tutor*in hat die Übungstunde gut strukturiert.',
'5.7 Der*Die Tutor*in war engagiert.',
'5.8 Der*Die Tutor*in stellte wesentliche Punkte zur Bearbeitung der Aufgaben vor.',
'5.9 Der*Die Tutor*in regte mich gezielt zum Mitdenken und zu eigener Mitarbeit an.',
'5.10 Der*Die Tutor*in setzte verfügbare Medien (z. B. Tafel, Projektor, Beamer) sinnvoll ein.',
'5.11 Der*Die Tutor*in hat elektronische Plattformen sinnvoll und hilfreich eingesetzt.',
'5.15 Der*Die Tutor*in hat konstruktives bzw. gutes Feedback gegeben.']],
['ue_organisation', 'Übung: Organisation',
['3.3 Die Aufgabenstellungen waren verständlich.',
'3.4 Die Übungsaufgaben hatten inhaltlich eine klare Struktur.',
'3.5 Die Übungsaufgaben waren motivierend.',
'3.6 Es wurden ausreichend Lösungsvorschläge bereitgestellt bzw. präsentiert.',
'3.7 Der Stoff der Vorlesung war gut auf die Übungen abgestimmt.',
'3.8 Mein Vorwissen war ausreichend, um die Übungsaufgaben bearbeiten zu können.',
'4.3 Die Organisation des Übungsbetriebs war gut.',
'4.4 Es wurde genug Übungsmaterial (Aufgaben, etc.) zur Verfügung gestellt.',
'4.5 Es stand genug Zeit für die Bearbeitung der Aufgaben zur Verfügung.',
'4.6 Die Abgaben waren gut vereinbar mit anderen Veranstaltungen laut Regelstudienplan.']],
['ue_arbeitsbedingungen', 'Übung: Arbeitsbedingungen',
['4.7 Die Auswahlmöglichkeiten der Termine waren angemessen bzw. der Übungszeitpunkt war passend.',
'4.8 Die Gruppengröße war zufriedenstellend.',
'4.9 Der Raum für die Übungen war zum Arbeiten und Lernen geeignet.']],
['ue_umgang', 'Übung: Umgang',
['5.12 Der*Die Tutor*in erschien pünktlich.',
'5.13 Der*Die Tutor*in behandelte alle Studierenden respektvoll.',
'5.14 Der*Die Tutor*in teilte die Zeit zwischen den Studierenden angemessen auf.',
'5.16 Der*Die Tutor*in hat nachvollziehbar bewertet bzw. benotet.']],
['ue_lernerfolg', 'Übung: Lernerfolg',
['3.1 Durch die Aufgaben und den Übungsbetrieb habe ich viel gelernt.',
'3.2 Die Übungen haben mir geholfen, den Stoff der Vorlesung besser zu verstehen.']],
]
parts = parts_vl + parts_ue
hidden_parts = [
['v_feedbackpreis', 'Feedbackpreis: Beste Vorlesung',
['2.4 Die Vorlesung war inhaltlich gut strukturiert, ein roter Faden war erkennbar.',
'2.5 Die Lernziele der Veranstaltung sind mir klar geworden.',
'2.6 Die Lehrkraft hat Kompliziertes verständlich dargelegt.',
'2.7 Der Stoff wurde anhand von Beispielen verdeutlicht.',
'2.8 Die Lehrkraft zeigte Bezüge zur aktuellen Forschung auf.',
'2.9 Der Bezug zwischen Theorie und praktischem Arbeiten / praktischen Anwendungen wurde hergestellt.',
'2.10 Das Tempo der Vorlesung war angemessen.',
'2.11 Die Lehrkraft zeigte sich gut vorbereitet.',
'2.12 Die (Zwischen-)Fragen der Studierenden wurden angemessen beantwortet.',
'2.13 Der Lehrende war auch außerhalb der Veranstaltung ansprechbar.',
'2.14 Der Lehrende regte gezielt zur eigenen Mitarbeit / zum Mitdenken in der Veranstaltung an.',
'3.8 Die Vorlesung motivierte dazu, sich außerhalb der Veranstaltungselbstständig mit den behandelten Themen auseinander zu setzen.',
'3.7 Die Vorlesungsmaterialien (Folien, Skripte, Tafelanschrieb, Lehrbücher,e-Learning, etc.) haben das Lernen wirkungsvoll unterstützt.',
'6.5 Welche Gesamtnote würdest Du der Vorlesung (ohne Übungen) geben?']],
['ue_feedbackpreis', 'Feedbackpreis: Beste Übung',
['3.1 Durch die Aufgaben und den Übungsbetrieb habe ich viel gelernt.',
'3.2 Die Übungen haben mir geholfen, den Stoff der Vorlesung besser zu verstehen.',
'3.3 Die Aufgabenstellungen waren verständlich.',
'3.4 Die Übungsaufgaben hatten inhaltlich eine klare Struktur.',
'3.5 Die Übungsaufgaben waren motivierend.',
'3.7 Der Stoff der Vorlesung war gut auf die Übungen abgestimmt.',
'4.1 Die Übung war inhaltlich gut strukturiert.',
'4.2 Die Lernziele der Übung sind mir klar geworden.',
'4.3 Die Organisation des Übungsbetriebs war gut.',
'4.4 Es wurde genug Übungsmaterial (Aufgaben, etc.) zur Verfügung gestellt.',
'4.5 Es stand genug Zeit für die Bearbeitung der Aufgaben zur Verfügung.',
'6.3 Welche Gesamtnote gibst du der Übung?']],
]
weight = {
'v_feedbackpreis': [1] * 13 + [13],
'ue_feedbackpreis': [1] * 10 + [10],
}
#TODO: decimal statt float benutzen
v_didaktik = models.FloatField(blank=True, null=True)
v_didaktik_count = models.PositiveIntegerField(default=0)
v_didaktik_parts = ['v_3_3', 'v_3_4', 'v_3_9', 'v_3_10', 'v_4_1', 'v_4_3', 'v_4_4', 'v_4_6', 'v_4_7']
v_organisation = models.FloatField(blank=True, null=True)
v_organisation_count = models.PositiveIntegerField(default=0)
v_organisation_parts = ['v_3_1', 'v_3_2', 'v_3_6', 'v_4_2', 'v_4_5', 'v_4_7', 'v_4_8', 'v_4_9']
v_praxisbezug_motivation = models.FloatField(blank=True, null=True)
v_praxisbezug_motivation_count = models.PositiveIntegerField(default=0)
v_praxisbezug_motivation_parts = ['v_3_5', 'v_4_8']
v_6_5 = models.FloatField(blank=True, null=True)
v_6_5_count = models.PositiveIntegerField(default=0)
v_feedbackpreis = models.FloatField(blank=True, null=True)
v_feedbackpreis_count = models.PositiveIntegerField(default=0)
v_feedbackpreis_parts = ['v_3_1', 'v_3_2', 'v_3_3', 'v_3_4', 'v_3_5', 'v_3_6', 'v_3_7', 'v_3_8', 'v_3_9', 'v_4_1', 'v_4_2', 'v_4_3', 'v_4_4',
'v_4_5', 'v_4_6', 'v_4_9', 'v_6_2', 'v_6_5', 'v_gesamt']
ue_didaktik = models.FloatField(blank=True, null=True)
ue_didaktik_count = models.PositiveIntegerField(default=0)
ue_didaktik_parts = ['ue_4_1', 'ue_4_2', 'ue_5_2', 'ue_5_3', 'ue_5_4', 'ue_5_5', 'ue_5_6', 'ue_5_7', 'ue_5_8', 'ue_5_9', 'ue_5_10', 'ue_5_11', 'ue_5_15']
ue_organisation = models.FloatField(blank=True, null=True)
ue_organisation_count = models.PositiveIntegerField(default=0)
ue_organisation_parts = ['ue_3_3', 'ue_3_4', 'ue_3_5', 'ue_3_6', 'ue_3_7', 'ue_3_8', 'ue_4_3', 'ue_4_4', 'ue_4_5', 'ue_4_6']
ue_arbeitsbedingungen = models.FloatField(blank=True, null=True)
ue_arbeitsbedingungen_count = models.PositiveIntegerField(default=0)
ue_arbeitsbedingungen_parts = ['ue_4_7', 'ue_4_8', 'ue_4_9']
ue_umgang = models.FloatField(blank=True, null=True)
ue_umgang_count = models.PositiveIntegerField(default=0)
ue_umgang_parts = ['ue_5_12', 'ue_5_13', 'ue_5_14', 'ue_5_16']
ue_lernerfolg = models.FloatField(blank=True, null=True)
ue_lernerfolg_count = models.PositiveIntegerField(default=0)
ue_lernerfolg_parts = ['ue_3_1', 'ue_3_2']
ue_feedbackpreis = models.FloatField(blank=True, null=True)
ue_feedbackpreis_count = models.PositiveIntegerField(default=0)
ue_feedbackpreis_parts = ['ue_3_1', 'ue_3_2', 'ue_3_3', 'ue_3_4', 'ue_3_5', 'ue_3_7', 'ue_4_1', 'ue_4_2', 'ue_4_3', 'ue_4_4', 'ue_4_5', 'ue_6_3']
gesamt = models.FloatField(blank=True, null=True)
gesamt_count = models.PositiveIntegerField(default=0)
class Meta:
verbose_name = 'Ergebnis 2016'
verbose_name_plural = 'Ergebnisse 2016'
ordering = ['veranstaltung']
app_label = 'feedback'
|
agpl-3.0
| 4,768,601,658,121,352,000
| 66
| 157
| 0.658291
| false
| 2.726017
| false
| false
| false
|
littley/network_cjl
|
network_cjl/ReceiveRequest.py
|
1
|
1833
|
import time
class ReceiveRequest(object):
"""
A ReceiveRequest is generated every time the first packet from a message is received. The ReceiveRequest
then keeps track of all of the message's packets that have already ben received.
"""
def __init__(self, inital_packet, (host, port)):
self.packets = set()
self.total_packets = inital_packet.total_packets
self.sequence_number = inital_packet.sequence_number
self.hash = inital_packet.hash
self.start_time = time.time()
self.register_packet(inital_packet)
self.host = host
self.port = port
def complete(self):
"""
Returns True if this receive request has received all of its required packets
:return:
"""
return len(self.packets) == self.total_packets
def get_payload(self):
"""
Call this after completed. Will return the original payload
"""
self.packets = list(self.packets)
self.packets = sorted(self.packets, key=lambda pkt: pkt.packet_number)
payload = []
for packet in self.packets:
payload.append(packet.payload)
return ''.join(payload)
def owns_packet(self, packet):
"""
This function returns True if this ReceiveRequest corresponds to the given packet
:param packet: a Packet
"""
return self.hash == packet.hash \
and self.sequence_number == packet.sequence_number
def register_packet(self, packet):
self.packets.add(packet)
def packets_observed(self):
return len(self.packets)
def __eq__(self, other):
return self.sequence_number == other.sequence_number \
and self.hash == other.hash
def __hash__(self):
return hash(self.hash)
|
apache-2.0
| -5,376,255,074,754,944,000
| 29.55
| 109
| 0.61593
| false
| 4.302817
| false
| false
| false
|
DedMemez/ODS-August-2017
|
golf/GolfGlobals.py
|
1
|
13007
|
# Fuck you Disyer. Stealing my fucking paypal. GET FUCKED: toontown.golf.GolfGlobals
from direct.directnotify import DirectNotifyGlobal
import random
MAX_PLAYERS_PER_HOLE = 4
GOLF_BALL_RADIUS = 0.25
GOLF_BALL_VOLUME = 4.0 / 3.0 * 3.14159 * GOLF_BALL_RADIUS ** 3
GOLF_BALL_MASS = 0.5
GOLF_BALL_DENSITY = GOLF_BALL_MASS / GOLF_BALL_VOLUME
GRASS_SURFACE = 0
BALL_SURFACE = 1
HARD_SURFACE = 2
HOLE_SURFACE = 3
SLICK_SURFACE = 4
OOB_RAY_COLLIDE_ID = -1
GRASS_COLLIDE_ID = 2
HARD_COLLIDE_ID = 3
TOON_RAY_COLLIDE_ID = 4
MOVER_COLLIDE_ID = 7
WINDMILL_BASE_COLLIDE_ID = 8
CAMERA_RAY_COLLIDE_ID = 10
BALL_COLLIDE_ID = 42
HOLE_CUP_COLLIDE_ID = 64
SKY_RAY_COLLIDE_ID = 78
SLICK_COLLIDE_ID = 13
BALL_CONTACT_FRAME = 9
BALL_CONTACT_TIME = (BALL_CONTACT_FRAME + 1) / 24.0
AIM_DURATION = 60
TEE_DURATION = 15
RANDOM_HOLES = True
KICKOUT_SWINGS = 2
TIME_TIE_BREAKER = True
CourseInfo = {0: {'name': '',
'numHoles': 3,
'holeIds': (2, 3, 4, 5, 6, 7, 8, 12, 13, 15, 16)},
1: {'name': '',
'numHoles': 6,
'holeIds': ((0, 5),
(1, 5),
2,
3,
4,
5,
6,
7,
8,
9,
10,
(11, 5),
12,
13,
(14, 5),
15,
16,
(17, 5),
(20, 5),
(21, 5),
(22, 5),
(23, 5),
(24, 5),
(25, 5),
(26, 5),
(28, 5),
(30, 5),
(31, 5),
(33, 5),
(34, 5))},
2: {'name': '',
'numHoles': 9,
'holeIds': ((1, 5),
4,
5,
6,
7,
8,
9,
10,
11,
12,
13,
(14, 5),
15,
(17, 5),
(18, 20),
(19, 20),
(20, 20),
(21, 5),
(22, 5),
(23, 20),
(24, 20),
(25, 20),
(26, 20),
(27, 20),
(28, 20),
(29, 20),
(30, 5),
(31, 20),
(32, 20),
(33, 5),
(34, 20),
(35, 20))}}
HoleInfo = {0: {'name': '',
'par': 3,
'maxSwing': 6,
'terrainModel': 'phase_6/models/golf/hole18',
'physicsData': 'golfGreen18',
'blockers': (),
'optionalMovers': ()},
1: {'name': '',
'par': 3,
'maxSwing': 6,
'terrainModel': 'phase_6/models/golf/hole1',
'physicsData': 'golfGreen1',
'blockers': ()},
2: {'name': '',
'par': 3,
'maxSwing': 6,
'terrainModel': 'phase_6/models/golf/hole2',
'physicsData': 'golfGreen2',
'blockers': ()},
3: {'name': '',
'par': 2,
'maxSwing': 6,
'terrainModel': 'phase_6/models/golf/hole3',
'physicsData': 'golfGreen3',
'blockers': ()},
4: {'name': '',
'par': 3,
'maxSwing': 6,
'terrainModel': 'phase_6/models/golf/hole4',
'physicsData': 'golfGreen4',
'blockers': ()},
5: {'name': '',
'par': 3,
'maxSwing': 6,
'terrainModel': 'phase_6/models/golf/hole5',
'physicsData': 'golfGreen2',
'blockers': ()},
6: {'name': '',
'par': 3,
'maxSwing': 6,
'terrainModel': 'phase_6/models/golf/hole6',
'physicsData': 'golfGreen6',
'blockers': ()},
7: {'name': '',
'par': 3,
'maxSwing': 6,
'terrainModel': 'phase_6/models/golf/hole7',
'physicsData': 'golfGreen7',
'blockers': ()},
8: {'name': '',
'par': 3,
'maxSwing': 6,
'terrainModel': 'phase_6/models/golf/hole8',
'physicsData': 'golfGreen8',
'blockers': ()},
9: {'name': '',
'par': 3,
'maxSwing': 6,
'terrainModel': 'phase_6/models/golf/hole9',
'physicsData': 'golfGreen9',
'blockers': 2},
10: {'name': '',
'par': 3,
'maxSwing': 6,
'terrainModel': 'phase_6/models/golf/hole10',
'physicsData': 'golfGreen10',
'blockers': ()},
11: {'name': '',
'par': 3,
'maxSwing': 6,
'terrainModel': 'phase_6/models/golf/hole11',
'physicsData': 'golfGreen11',
'blockers': ()},
12: {'name': '',
'par': 3,
'maxSwing': 6,
'terrainModel': 'phase_6/models/golf/hole12',
'physicsData': 'golfGreen12',
'blockers': ()},
13: {'name': '',
'par': 3,
'maxSwing': 6,
'terrainModel': 'phase_6/models/golf/hole13',
'physicsData': 'golfGreen13',
'blockers': ()},
14: {'name': '',
'par': 3,
'maxSwing': 6,
'terrainModel': 'phase_6/models/golf/hole14',
'physicsData': 'golfGreen14',
'blockers': ()},
15: {'name': '',
'par': 3,
'maxSwing': 6,
'terrainModel': 'phase_6/models/golf/hole15',
'physicsData': 'golfGreen15',
'blockers': ()},
16: {'name': '',
'par': 3,
'maxSwing': 6,
'terrainModel': 'phase_6/models/golf/hole16',
'physicsData': 'golfGreen16',
'blockers': ()},
17: {'name': '',
'par': 3,
'maxSwing': 6,
'terrainModel': 'phase_6/models/golf/hole17',
'physicsData': 'golfGreen17',
'blockers': ()},
18: {'name': '',
'par': 3,
'maxSwing': 6,
'terrainModel': 'phase_6/models/golf/hole18',
'physicsData': 'golfGreen18',
'blockers': (1, 2),
'optionalMovers': 1},
19: {'name': '',
'par': 3,
'maxSwing': 6,
'terrainModel': 'phase_6/models/golf/hole1',
'physicsData': 'golfGreen1',
'blockers': (2, 5)},
20: {'name': '',
'par': 3,
'maxSwing': 6,
'terrainModel': 'phase_6/models/golf/hole2',
'physicsData': 'golfGreen2',
'blockers': (1, 3)},
21: {'name': '',
'par': 3,
'maxSwing': 6,
'terrainModel': 'phase_6/models/golf/hole3',
'physicsData': 'golfGreen3',
'blockers': (1, 2, 3)},
22: {'name': '',
'par': 3,
'maxSwing': 6,
'terrainModel': 'phase_6/models/golf/hole4',
'physicsData': 'golfGreen4',
'blockers': 2},
23: {'name': '',
'par': 3,
'maxSwing': 6,
'terrainModel': 'phase_6/models/golf/hole5',
'physicsData': 'golfGreen5',
'blockers': (3, 4),
'optionalMovers': 1},
24: {'name': '',
'par': 3,
'maxSwing': 6,
'terrainModel': 'phase_6/models/golf/hole6',
'physicsData': 'golfGreen6',
'blockers': 1,
'optionalMovers': 1},
25: {'name': '',
'par': 3,
'maxSwing': 6,
'terrainModel': 'phase_6/models/golf/hole7',
'physicsData': 'golfGreen7',
'blockers': 3,
'optionalMovers': 1},
26: {'name': '',
'par': 3,
'maxSwing': 6,
'terrainModel': 'phase_6/models/golf/hole8',
'physicsData': 'golfGreen8',
'blockers': (),
'optionalMovers': 1},
27: {'name': '',
'par': 3,
'maxSwing': 6,
'terrainModel': 'phase_6/models/golf/hole9',
'physicsData': 'golfGreen9',
'blockers': (),
'optionalMovers': (1, 2)},
28: {'name': '',
'par': 3,
'maxSwing': 6,
'terrainModel': 'phase_6/models/golf/hole10',
'physicsData': 'golfGreen10',
'blockers': (),
'optionalMovers': (1, 2)},
29: {'name': '',
'par': 3,
'maxSwing': 6,
'terrainModel': 'phase_6/models/golf/hole11',
'physicsData': 'golfGreen11',
'blockers': (),
'optionalMovers': 1},
30: {'name': '',
'par': 3,
'maxSwing': 6,
'terrainModel': 'phase_6/models/golf/hole12',
'physicsData': 'golfGreen12',
'blockers': (1, 2, 3)},
31: {'name': '',
'par': 4,
'maxSwing': 7,
'terrainModel': 'phase_6/models/golf/hole13',
'physicsData': 'golfGreen13',
'blockers': (3, 4),
'optionalMovers': 1},
32: {'name': '',
'par': 3,
'maxSwing': 6,
'terrainModel': 'phase_6/models/golf/hole14',
'physicsData': 'golfGreen14',
'blockers': 1,
'optionalMovers': 1},
33: {'name': '',
'par': 3,
'maxSwing': 6,
'terrainModel': 'phase_6/models/golf/hole15',
'physicsData': 'golfGreen15',
'blockers': (1, 2, 3),
'optionalMovers': (1, 2)},
34: {'name': '',
'par': 3,
'maxSwing': 6,
'terrainModel': 'phase_6/models/golf/hole16',
'physicsData': 'golfGreen16',
'blockers': (1, 2, 5, 6),
'optionalMovers': 1},
35: {'name': '',
'par': 4,
'maxSwing': 7,
'terrainModel': 'phase_6/models/golf/hole17',
'physicsData': 'golfGreen17',
'blockers': (3, 4, 5)}}
for holeId in HoleInfo:
if type(HoleInfo[holeId]['blockers']) == type(0):
blockerNum = HoleInfo[holeId]['blockers']
HoleInfo[holeId]['blockers'] = (blockerNum,)
if HoleInfo[holeId].has_key('optionalMovers'):
if type(HoleInfo[holeId]['optionalMovers']) == type(0):
blockerNum = HoleInfo[holeId]['optionalMovers']
HoleInfo[holeId]['optionalMovers'] = (blockerNum,)
DistanceToBeInHole = 0.75
CoursesCompleted = 0
CoursesUnderPar = 1
HoleInOneShots = 2
EagleOrBetterShots = 3
BirdieOrBetterShots = 4
ParOrBetterShots = 5
MultiPlayerCoursesCompleted = 6
CourseZeroWins = 7
CourseOneWins = 8
CourseTwoWins = 9
TwoPlayerWins = 10
ThreePlayerWins = 11
FourPlayerWins = 12
MaxHistoryIndex = 9
NumHistory = MaxHistoryIndex + 1
CalcOtherHoleBest = False
CalcOtherCourseBest = False
TrophyRequirements = {CoursesCompleted: (6, 30, 60),
CoursesUnderPar: (1, 10, 50),
HoleInOneShots: (1, 10, 40),
EagleOrBetterShots: (1, 20, 50),
BirdieOrBetterShots: (1, 50, 100),
ParOrBetterShots: (1, 100, 150),
MultiPlayerCoursesCompleted: (10, 30, 60),
CourseZeroWins: (1, 10, 30),
CourseOneWins: (1, 10, 20),
CourseTwoWins: (1, 5, 10)}
PlayerColors = [(0.925, 0.168, 0.168, 1),
(0.13, 0.59, 0.973, 1),
(0.973, 0.809, 0.129, 1),
(0.598, 0.402, 0.875, 1)]
KartColors = [[[0, 50], [90, 255], [0, 85]], [[160, 255], [-15, 15], [0, 120]], [[160, 255], [0, 110], [0, 110]]]
NumTrophies = 0
for key in TrophyRequirements:
NumTrophies += len(TrophyRequirements[key])
NumCups = 3
TrophiesPerCup = NumTrophies / NumCups
def calcTrophyListFromHistory(history):
retval = []
historyIndex = 0
for trophyIndex in xrange(NumHistory):
requirements = TrophyRequirements[trophyIndex]
for amountNeeded in requirements:
if history[historyIndex] >= amountNeeded:
retval.append(True)
else:
retval.append(False)
historyIndex += 1
return retval
def calcCupListFromHistory(history):
retval = [False] * NumCups
trophyList = calcTrophyListFromHistory(history)
numTrophiesWon = 0
for gotTrophy in trophyList:
if gotTrophy:
numTrophiesWon += 1
for cupIndex in xrange(len(retval)):
threshold = (cupIndex + 1) * TrophiesPerCup
if threshold <= numTrophiesWon:
retval[cupIndex] = True
return retval
def getCourseName(courseId):
from toontown.toonbase import TTLocalizer
if courseId in CourseInfo:
if not CourseInfo[courseId]['name']:
CourseInfo[courseId]['name'] = TTLocalizer.GolfCourseNames[courseId]
return CourseInfo[courseId]['name']
else:
return ''
def getHoleName(holeId):
from toontown.toonbase import TTLocalizer
if holeId in HoleInfo:
if not HoleInfo[holeId]['name']:
HoleInfo[holeId]['name'] = TTLocalizer.GolfHoleNames[holeId]
return HoleInfo[holeId]['name']
else:
return ''
def getHistoryIndexForTrophy(trophyIndex):
retval = -1
divBy3 = int(trophyIndex / 3)
if divBy3 < NumHistory:
retval = divBy3
return retval
def packGolfHoleBest(holeBest):
retval = []
shiftLeft = False
for hole in holeBest:
hole &= 15
if shiftLeft:
retval[-1] |= hole << 4
shiftLeft = False
else:
retval.append(hole)
shiftLeft = True
return retval
def unpackGolfHoleBest(packedHoleBest):
retval = []
for packedHole in packedHoleBest:
lowbitHole = packedHole & 15
retval.append(lowbitHole)
highBitHole = (packedHole & 240) >> 4
retval.append(highBitHole)
return retval
|
apache-2.0
| -2,305,301,541,882,050,600
| 26.158009
| 113
| 0.496041
| false
| 3.012274
| false
| false
| false
|
fluxer/spm
|
nuitka/nuitka/Builtins.py
|
1
|
6584
|
# Copyright 2016, Kay Hayen, mailto:kay.hayen@gmail.com
#
# Part of "Nuitka", an optimizing Python compiler that is compatible and
# integrates with CPython, but also works on its own.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
""" Built-ins module. Information about built-ins of the running Python.
"""
import functools
import sys
from types import BuiltinFunctionType, FunctionType, GeneratorType
from nuitka.__past__ import iterItems
from nuitka.PythonVersions import python_version
def _getBuiltinExceptionNames():
def isExceptionName(builtin_name):
if builtin_name.endswith("Error") or \
builtin_name.endswith("Exception"):
return True
elif builtin_name in ("StopIteration", "GeneratorExit", "SystemExit",
"NotImplemented", "KeyboardInterrupt",
"StopAsyncIteration"):
return True
else:
return False
# Hide Python3 changes for built-in exception names
try:
import exceptions
names = [
str(name) for name in dir(exceptions)
if isExceptionName(name)
]
values = {}
for key in names:
values[key] = getattr(exceptions, key)
for key in dir(sys.modules["__builtin__"]):
name = str(key)
if isExceptionName(name):
names.append(key)
values[name] = getattr(sys.modules["__builtin__"], key)
except ImportError:
exceptions = {}
for key, value in sys.modules["builtins"].__dict__.items():
if isExceptionName(key):
exceptions[key] = value
names = [
key for key, value in exceptions.items()
]
values = {}
for key, value in exceptions.items():
values[key] = value
return names, values
builtin_exception_names, builtin_exception_values = _getBuiltinExceptionNames()
# Just to make sure it's covering these cases correctly.
assert "TypeError" in builtin_exception_names
assert "ValueError" in builtin_exception_names
assert "StopIteration" in builtin_exception_names
assert "GeneratorExit" in builtin_exception_names
assert "AssertionError" in builtin_exception_names
assert "BaseException" in builtin_exception_names
assert "Exception" in builtin_exception_names
assert "NotImplemented" in builtin_exception_names
assert "StopAsyncIteration" in builtin_exception_names or python_version < 350
def _getBuiltinNames():
names = [
str(x)
for x in __builtins__.keys()
]
for builtin_exception_name in builtin_exception_names:
if builtin_exception_name in names:
names.remove(builtin_exception_name)
names.remove("__doc__")
names.remove("__name__")
names.remove("__package__")
warnings = []
for builtin_name in names:
if builtin_name.endswith("Warning"):
warnings.append(builtin_name)
for builtin_name in warnings:
names.remove(builtin_name)
return names, warnings
builtin_names, builtin_warnings = _getBuiltinNames()
assert "__import__" in builtin_names
assert "int" in builtin_names
assert "__doc__" not in builtin_names
assert "sys" not in builtin_names
builtin_all_names = builtin_names + builtin_exception_names + builtin_warnings
def getBuiltinTypeNames():
result = []
for builtin_name in builtin_names:
if isinstance(__builtins__[builtin_name],type):
result.append(builtin_name)
return tuple(sorted(result))
builtin_type_names = getBuiltinTypeNames()
def _getAnonBuiltins():
with open(sys.executable) as any_file:
anon_names = {
# Strangely not Python3 types module
"NoneType" : type(None),
"ellipsis" : type(Ellipsis), # see above
"NotImplementedType" : type(NotImplemented),
"function" : FunctionType,
"builtin_function_or_method" : BuiltinFunctionType,
# Can't really have it any better way.
"compiled_function" : BuiltinFunctionType,
"generator" : GeneratorType,
"compiled_generator" : GeneratorType, # see above
"code" : type(_getAnonBuiltins.__code__),
"file" : type(any_file)
}
anon_codes = {
"NoneType" : "Py_TYPE( Py_None )",
"ellipsis" : "&PyEllipsis_Type",
"NotImplementedType" : "Py_TYPE( Py_NotImplemented )",
"function" : "&PyFunction_Type",
"builtin_function_or_method" : "&PyCFunction_Type",
"compiled_function" : "&Nuitka_Function_Type",
"compiled_generator" : "&Nuitka_Generator_Type",
"code" : "&PyCode_Type",
"file" : "&PyFile_Type"
}
if python_version < 300:
from types import ClassType, InstanceType, MethodType
anon_names["classobj"] = ClassType
anon_codes["classobj"] = "&PyClass_Type"
anon_names["instance"] = InstanceType
anon_codes["instance"] = "&PyInstance_Type"
anon_names["instancemethod"] = MethodType
anon_codes["instancemethod"] = "&PyMethod_Type"
return anon_names, anon_codes
builtin_anon_names, builtin_anon_codes = _getAnonBuiltins()
def calledWithBuiltinArgumentNamesDecorator(f):
""" Allow a function to be called with an "_arg" if a built-in name.
This avoids using built-in names in Nuitka source, while enforcing
a policy how to make them pretty.
"""
@functools.wraps(f)
def wrapper(*args, **kw):
new_kw = {}
for key, value in iterItems(kw):
if key in builtin_all_names:
key = key + "_arg"
new_kw[key] = value
return f(*args, **new_kw)
return wrapper
|
gpl-2.0
| 8,219,563,310,531,405,000
| 30.806763
| 79
| 0.605559
| false
| 4.255979
| false
| false
| false
|
seanbell/opensurfaces
|
server/intrinsic/algorithm/grosse2009/intrinsic.py
|
1
|
10552
|
import itertools
import numpy as np
import os
import png
import sys
import poisson
############################### Data ###########################################
def load_png(fname):
reader = png.Reader(fname)
w, h, pngdata, params = reader.read()
image = np.vstack(itertools.imap(np.uint16, pngdata))
if image.size == 3*w*h:
image = np.reshape(image, (h, w, 3))
return image.astype(float) / 255.
def load_object_helper(tag, condition):
"""Load an image of a given object as a NumPy array. The values condition may take are:
'mask', 'original', 'diffuse', 'shading', 'reflectance', 'specular'
'shading' returns a grayscale image, and all the other options return color images."""
assert condition in ['mask', 'original', 'diffuse', 'shading', 'reflectance', 'specular']
obj_dir = os.path.join('data', tag)
if condition == 'mask':
filename = os.path.join(obj_dir, 'mask.png')
mask = load_png(filename)
return (mask > 0)
if condition == 'original':
filename = os.path.join(obj_dir, 'original.png')
return load_png(filename)
if condition == 'diffuse':
filename = os.path.join(obj_dir, 'diffuse.png')
return load_png(filename)
if condition == 'shading':
filename = os.path.join(obj_dir, 'shading.png')
return load_png(filename)
if condition == 'reflectance':
filename = os.path.join(obj_dir, 'reflectance.png')
return load_png(filename)
if condition == 'specular':
filename = os.path.join(obj_dir, 'specular.png')
return load_png(filename)
# cache for efficiency because PyPNG is pure Python
cache = {}
def load_object(tag, condition):
if (tag, condition) not in cache:
cache[tag, condition] = load_object_helper(tag, condition)
return cache[tag, condition]
def load_multiple(tag):
"""Load the images of a given object for all lighting conditions. Returns an
m x n x 3 x 10 NumPy array, where the third dimension is the color channel and
the fourth dimension is the image number."""
obj_dir = os.path.join('data', tag)
filename = os.path.join(obj_dir, 'light01.png')
img0 = load_png(filename)
result = np.zeros(img0.shape + (10,))
for i in range(10):
filename = os.path.join(obj_dir, 'light%02d.png' % (i+1))
result[:,:,:,i] = load_png(filename)
return result
############################# Error metric #####################################
def ssq_error(correct, estimate, mask):
"""Compute the sum-squared-error for an image, where the estimate is
multiplied by a scalar which minimizes the error. Sums over all pixels
where mask is True. If the inputs are color, each color channel can be
rescaled independently."""
assert correct.ndim == 2
if np.sum(estimate**2 * mask) > 1e-5:
alpha = np.sum(correct * estimate * mask) / np.sum(estimate**2 * mask)
else:
alpha = 0.
return np.sum(mask * (correct - alpha*estimate) ** 2)
def local_error(correct, estimate, mask, window_size, window_shift):
"""Returns the sum of the local sum-squared-errors, where the estimate may
be rescaled within each local region to minimize the error. The windows are
window_size x window_size, and they are spaced by window_shift."""
M, N = correct.shape[:2]
ssq = total = 0.
for i in range(0, M - window_size + 1, window_shift):
for j in range(0, N - window_size + 1, window_shift):
correct_curr = correct[i:i+window_size, j:j+window_size]
estimate_curr = estimate[i:i+window_size, j:j+window_size]
mask_curr = mask[i:i+window_size, j:j+window_size]
ssq += ssq_error(correct_curr, estimate_curr, mask_curr)
total += np.sum(mask_curr * correct_curr**2)
assert -np.isnan(ssq/total)
return ssq / total
def score_image(true_shading, true_refl, estimate_shading, estimate_refl, mask, window_size=20):
return 0.5 * local_error(true_shading, estimate_shading, mask, window_size, window_size//2) + \
0.5 * local_error(true_refl, estimate_refl, mask, window_size, window_size//2)
################################## Algorithms ##################################
def retinex(image, mask, threshold, L1=False):
image = np.clip(image, 3., np.infty)
log_image = np.where(mask, np.log(image), 0.)
i_y, i_x = poisson.get_gradients(log_image)
r_y = np.where(np.abs(i_y) > threshold, i_y, 0.)
r_x = np.where(np.abs(i_x) > threshold, i_x, 0.)
if L1:
log_refl = poisson.solve_L1(r_y, r_x, mask)
else:
log_refl = poisson.solve(r_y, r_x, mask)
refl = mask * np.exp(log_refl)
return np.where(mask, image / refl, 0.), refl
def project_gray(i_y):
i_y_mean = np.mean(i_y, axis=2)
result = np.zeros(i_y.shape)
for i in range(3):
result[:,:,i] = i_y_mean
return result
def project_chromaticity(i_y):
return i_y - project_gray(i_y)
def color_retinex(image, mask, threshold_gray, threshold_color, L1=False):
image = np.clip(image, 3., np.infty)
log_image = np.log(image)
i_y_orig, i_x_orig = poisson.get_gradients(log_image)
i_y_gray, i_y_color = project_gray(i_y_orig), project_chromaticity(i_y_orig)
i_x_gray, i_x_color = project_gray(i_x_orig), project_chromaticity(i_x_orig)
image_grayscale = np.mean(image, axis=2)
image_grayscale = np.clip(image_grayscale, 3., np.infty)
log_image_grayscale = np.log(image_grayscale)
i_y, i_x = poisson.get_gradients(log_image_grayscale)
norm = np.sqrt(np.sum(i_y_color**2, axis=2))
i_y_match = (norm > threshold_color) + (np.abs(i_y_gray[:,:,0]) > threshold_gray)
norm = np.sqrt(np.sum(i_x_color**2, axis=2))
i_x_match = (norm > threshold_color) + (np.abs(i_x_gray[:,:,0]) > threshold_gray)
r_y = np.where(i_y_match, i_y, 0.)
r_x = np.where(i_x_match, i_x, 0.)
if L1:
log_refl = poisson.solve_L1(r_y, r_x, mask)
else:
log_refl = poisson.solve(r_y, r_x, mask)
refl = np.exp(log_refl)
return image_grayscale / refl, refl
def weiss(image, multi_images, mask, L1=False):
multi_images = np.clip(multi_images, 3., np.infty)
log_multi_images = np.log(multi_images)
i_y_all, i_x_all = poisson.get_gradients(log_multi_images)
r_y = np.median(i_y_all, axis=2)
r_x = np.median(i_x_all, axis=2)
if L1:
log_refl = poisson.solve_L1(r_y, r_x, mask)
else:
log_refl = poisson.solve(r_y, r_x, mask)
refl = np.where(mask, np.exp(log_refl), 0.)
shading = np.where(mask, image / refl, 0.)
return shading, refl
def weiss_retinex(image, multi_images, mask, threshold, L1=False):
multi_images = np.clip(multi_images, 3., np.infty)
log_multi_images = np.log(multi_images)
i_y_all, i_x_all = poisson.get_gradients(log_multi_images)
r_y = np.median(i_y_all, axis=2)
r_x = np.median(i_x_all, axis=2)
r_y *= (np.abs(r_y) > threshold)
r_x *= (np.abs(r_x) > threshold)
if L1:
log_refl = poisson.solve_L1(r_y, r_x, mask)
else:
log_refl = poisson.solve(r_y, r_x, mask)
refl = np.where(mask, np.exp(log_refl), 0.)
shading = np.where(mask, image / refl, 0.)
return shading, refl
#################### Wrapper classes for experiments ###########################
class BaselineEstimator:
"""Assume every image is entirely shading or entirely reflectance."""
def __init__(self, mode, L1=False):
assert mode in ['refl', 'shading']
self.mode = mode
def estimate_shading_refl(self, image, mask, L1=False):
if self.mode == 'refl':
refl = image
shading = 1. * mask
else:
refl = 1. * mask
shading = image
return shading, refl
@staticmethod
def get_input(tag):
image = load_object(tag, 'diffuse')
image = np.mean(image, axis=2)
mask = load_object(tag, 'mask')
return image, mask
@staticmethod
def param_choices():
return [{'mode': m} for m in ['shading', 'refl']]
class GrayscaleRetinexEstimator:
def __init__(self, threshold):
self.threshold = threshold
def estimate_shading_refl(self, image, mask, L1=False):
return retinex(image, mask, self.threshold, L1)
@staticmethod
def get_input(tag):
image = load_object(tag, 'diffuse')
image = np.mean(image, axis=2)
mask = load_object(tag, 'mask')
return image, mask
@staticmethod
def param_choices():
return [{'threshold': t} for t in np.logspace(-3., 1., 15)]
class ColorRetinexEstimator:
def __init__(self, threshold_gray, threshold_color, L1=False):
self.threshold_gray = threshold_gray
self.threshold_color = threshold_color
def estimate_shading_refl(self, image, mask, L1=False):
return color_retinex(image, mask, self.threshold_gray, self.threshold_color, L1)
@staticmethod
def get_input(tag):
image = load_object(tag, 'diffuse')
mask = load_object(tag, 'mask')
return image, mask
@staticmethod
def param_choices():
return [{'threshold_gray': tg, 'threshold_color': tc}
for tg in np.logspace(-1.5, 0., 5)
for tc in np.logspace(-1.5, 0., 5)]
class WeissEstimator:
def estimate_shading_refl(self, image, multi_images, mask, L1=False):
return weiss(image, multi_images, mask, L1)
@staticmethod
def get_input(tag):
image = load_object(tag, 'diffuse')
image = np.mean(image, axis=2)
mask = load_object(tag, 'mask')
multi_images = load_multiple(tag)
multi_images = np.mean(multi_images, axis=2)
return image, multi_images, mask
@staticmethod
def param_choices():
return [{}]
class WeissRetinexEstimator:
def __init__(self, threshold=0.1, L1=False):
self.threshold = threshold
def estimate_shading_refl(self, image, multi_images, mask, L1=False):
return weiss_retinex(image, multi_images, mask, self.threshold, L1)
@staticmethod
def get_input(tag):
image = load_object(tag, 'diffuse')
image = np.mean(image, axis=2)
mask = load_object(tag, 'mask')
multi_images = load_multiple(tag)
multi_images = np.mean(multi_images, axis=2)
return image, multi_images, mask
@staticmethod
def param_choices():
return [{'threshold': t} for t in np.logspace(-3., 1., 15)]
|
mit
| -6,676,248,594,824,166,000
| 32.18239
| 99
| 0.60453
| false
| 3.157391
| false
| false
| false
|
sigurdga/samklang-blog
|
samklang_blog/views.py
|
1
|
2247
|
from django.http import HttpResponseRedirect
from django.views.generic.edit import CreateView, UpdateView
from django.views.generic.dates import ArchiveIndexView, YearArchiveView, MonthArchiveView, DateDetailView
from django.contrib.auth.decorators import login_required
from django.utils.decorators import method_decorator
from django.contrib.sites.models import Site
from samklang_blog.models import Entry
from samklang_blog.forms import EntryForm
from datetime import datetime
MONTH_FORMAT = '%m'
class EntryCreateView(CreateView):
model = Entry
form_class = EntryForm
initial = {'pub_date': datetime.now().strftime("%Y-%m-%d %H:%M:%S")}
month_format = MONTH_FORMAT
def form_valid(self, form):
self.object = form.save(commit=False)
self.object.user = self.request.user
if hasattr(self.request, 'site'):
self.object.site = self.request.site
else:
self.object.site = Site.objects.get(pk=1)
self.object.save()
return HttpResponseRedirect(self.object.get_absolute_url())
@method_decorator(login_required)
def dispatch(self, *args, **kwargs):
return super(EntryCreateView, self).dispatch(*args, **kwargs)
class EntryUpdateView(UpdateView):
model = Entry
form_class = EntryForm
month_format = MONTH_FORMAT
#def form_valid(self, form):
# self.object = form.save()
# return HttpResponseRedirect(self.object.get_absolute_url())
@method_decorator(login_required)
def dispatch(self, *args, **kwargs):
return super(EntryUpdateView, self).dispatch(*args, **kwargs)
class EntryArchiveIndexView(ArchiveIndexView):
model = Entry
date_field = 'pub_date'
month_format = MONTH_FORMAT
allow_empty = True
def get_queryset(self):
return Entry.live.all()
class EntryYearArchiveView(YearArchiveView):
model = Entry
date_field = 'pub_date'
month_format = MONTH_FORMAT
allow_empty = True
class EntryMonthArchiveView(MonthArchiveView):
model = Entry
date_field = 'pub_date'
month_format = MONTH_FORMAT
allow_empty = True
class EntryDateDetailView(DateDetailView):
model = Entry
date_field = 'pub_date'
month_format = MONTH_FORMAT
|
agpl-3.0
| -2,393,359,891,051,174,000
| 29.780822
| 106
| 0.70227
| false
| 3.732558
| false
| false
| false
|
czpython/django-cms
|
cms/page_rendering.py
|
1
|
2938
|
# -*- coding: utf-8 -*-
from django.conf import settings
from django.core.urlresolvers import resolve, Resolver404, reverse
from django.http import Http404
from django.shortcuts import render
from django.template.response import TemplateResponse
from cms import __version__
from cms.cache.page import set_page_cache
from cms.models import Page
from cms.utils.conf import get_cms_setting
from cms.utils.page import get_page_template_from_request
from cms.utils.page_permissions import user_can_change_page, user_can_view_page
def render_page(request, page, current_language, slug):
"""
Renders a page
"""
context = {}
context['lang'] = current_language
context['current_page'] = page
context['has_change_permissions'] = user_can_change_page(request.user, page)
context['has_view_permissions'] = user_can_view_page(request.user, page)
if not context['has_view_permissions']:
return _handle_no_page(request)
template = get_page_template_from_request(request)
response = TemplateResponse(request, template, context)
response.add_post_render_callback(set_page_cache)
# Add headers for X Frame Options - this really should be changed upon moving to class based views
xframe_options = page.get_xframe_options()
# xframe_options can be None if there's no xframe information on the page
# (eg. a top-level page which has xframe options set to "inherit")
if xframe_options == Page.X_FRAME_OPTIONS_INHERIT or xframe_options is None:
# This is when we defer to django's own clickjacking handling
return response
# We want to prevent django setting this in their middlewear
response.xframe_options_exempt = True
if xframe_options == Page.X_FRAME_OPTIONS_ALLOW:
# Do nothing, allowed is no header.
return response
elif xframe_options == Page.X_FRAME_OPTIONS_SAMEORIGIN:
response['X-Frame-Options'] = 'SAMEORIGIN'
elif xframe_options == Page.X_FRAME_OPTIONS_DENY:
response['X-Frame-Options'] = 'DENY'
return response
def render_object_structure(request, obj):
context = {
'object': obj,
'cms_toolbar': request.toolbar,
}
return render(request, 'cms/toolbar/structure.html', context)
def _handle_no_page(request):
try:
#add a $ to the end of the url (does not match on the cms anymore)
resolve('%s$' % request.path)
except Resolver404 as e:
# raise a django http 404 page
exc = Http404(dict(path=request.path, tried=e.args[0]['tried']))
raise exc
raise Http404('CMS Page not found: %s' % request.path)
def _render_welcome_page(request):
context = {
'cms_version': __version__,
'cms_edit_on': get_cms_setting('CMS_TOOLBAR_URL__EDIT_ON'),
'django_debug': settings.DEBUG,
'next_url': reverse('pages-root'),
}
return TemplateResponse(request, "cms/welcome.html", context)
|
bsd-3-clause
| 3,639,318,368,674,407,400
| 35.725
| 102
| 0.687543
| false
| 3.742675
| false
| false
| false
|
rivasd/djPsych
|
djreceive/migrations/0019_singleaudiotrial.py
|
1
|
1328
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9 on 2017-01-04 19:35
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
import jsonfield.fields
class Migration(migrations.Migration):
dependencies = [
('djreceive', '0018_auto_20170104_1418'),
]
operations = [
migrations.CreateModel(
name='SingleAudioTrial',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('internal_node_id', models.CharField(max_length=24)),
('trial_index', models.IntegerField()),
('trial_type', models.CharField(max_length=32)),
('time_elapsed', models.IntegerField()),
('timeout', models.BooleanField(default=False)),
('extra_data', jsonfield.fields.JSONField(blank=True, null=True)),
('stimulus', models.CharField(max_length=128)),
('key_press', models.IntegerField()),
('rt', models.IntegerField()),
('run', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='djreceive.Run')),
],
options={
'abstract': False,
},
),
]
|
gpl-3.0
| -2,971,340,042,235,738,600
| 35.888889
| 114
| 0.565512
| false
| 4.311688
| false
| false
| false
|
lrocheWB/navitia
|
source/jormungandr/jormungandr/scenarios/helpers.py
|
1
|
8430
|
# Copyright (c) 2001-2014, Canal TP and/or its affiliates. All rights reserved.
#
# This file is part of Navitia,
# the software to build cool stuff with public transport.
#
# Hope you'll enjoy and contribute to this project,
# powered by Canal TP (www.canaltp.fr).
# Help us simplify mobility and open public transport:
# a non ending quest to the responsive locomotion way of traveling!
#
# LICENCE: This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# Stay tuned using
# twitter @navitia
# IRC #navitia on freenode
# https://groups.google.com/d/forum/navitia
# www.navitia.io
from __future__ import absolute_import, print_function, unicode_literals, division
from navitiacommon import response_pb2
from operator import attrgetter
def has_walking_first(journey):
for section in journey.sections:
if section.type == response_pb2.PUBLIC_TRANSPORT:
return True
elif section.type == response_pb2.CROW_FLY \
and section.street_network.mode != response_pb2.Walking:
return False
elif section.type == response_pb2.STREET_NETWORK \
and section.street_network.mode != response_pb2.Walking:
return False
return True
def has_bike_first(journey):
for section in journey.sections:
if section.type == response_pb2.PUBLIC_TRANSPORT:
return True
elif section.type == response_pb2.CROW_FLY \
and section.street_network.mode != response_pb2.Bike:
return False
elif section.type == response_pb2.STREET_NETWORK \
and section.street_network.mode != response_pb2.Bike:
return False
return True
def has_bss_first(journey):
has_bss = False
for section in journey.sections:
if section.type == response_pb2.PUBLIC_TRANSPORT:
return False
elif section.type == response_pb2.BSS_RENT:
return True
return False
def has_walking_last(journey):
has_pt = False
for section in journey.sections:
if section.type == response_pb2.PUBLIC_TRANSPORT:
has_pt = True
elif has_pt \
and section.type == response_pb2.CROW_FLY \
and section.street_network.mode != response_pb2.Walking:
return False
elif has_pt \
and section.type == response_pb2.STREET_NETWORK \
and section.street_network.mode != response_pb2.Walking:
return False
return has_pt#we will not be here if there is another fallback mode used after the pt section
def has_bike_last(journey):
has_pt = False
for section in journey.sections:
if section.type == response_pb2.PUBLIC_TRANSPORT:
has_pt = True
elif has_pt \
and section.type == response_pb2.CROW_FLY \
and section.street_network.mode != response_pb2.Bike:
return False
elif has_pt \
and section.type == response_pb2.STREET_NETWORK \
and section.street_network.mode != response_pb2.Bike:
return False
return has_pt#we will not be here if there is another fallback mode used after the pt section
def has_bss_last(journey):
has_pt = False
for section in journey.sections:
if section.type == response_pb2.PUBLIC_TRANSPORT:
has_pt = True
elif has_pt and section.type == response_pb2.BSS_RENT:
return True
return False
def has_bss_first_and_walking_last(journey):
return has_bss_first(journey) and has_walking_last(journey)
def has_walking_first_and_bss_last(journey):
return has_walking_first(journey) and has_bss_last(journey)
def has_bss_first_and_bss_last(journey):
return has_bss_first(journey) and has_bss_last(journey)
def has_bike_first_and_walking_last(journey):
return has_bike_first(journey) and has_walking_last(journey)
def has_bike_first_and_bss_last(journey):
return has_bike_first(journey) and has_bss_last(journey)
def bike_duration(journey):
duration = 0
in_bss = False
for section in journey.sections:
if section.type == response_pb2.BSS_RENT:
in_bss = True
if section.type == response_pb2.BSS_PUT_BACK:
in_bss = False
if section.type in (response_pb2.STREET_NETWORK, response_pb2.CROW_FLY) \
and section.street_network.mode == response_pb2.Bike \
and not in_bss:
duration = duration + section.duration
return duration
def bss_duration(journey):
duration = 0
in_bss = False
for section in journey.sections:
if section.type == response_pb2.BSS_RENT:
in_bss = True
duration += section.duration
if section.type == response_pb2.BSS_PUT_BACK:
in_bss = False
duration += section.duration
if section.type in (response_pb2.STREET_NETWORK, response_pb2.CROW_FLY) \
and section.street_network.mode == response_pb2.Bike \
and in_bss:
duration = duration + section.duration
return duration
def car_duration(journey):
duration = 0
for section in journey.sections:
if section.type in (response_pb2.STREET_NETWORK, response_pb2.CROW_FLY) \
and section.street_network.mode == response_pb2.Car:
duration = duration + section.duration
return duration
def walking_duration(journey):
duration = 0
for section in journey.sections:
if section.type in (response_pb2.STREET_NETWORK, response_pb2.CROW_FLY) \
and section.street_network.mode == response_pb2.Walking:
duration = duration + section.duration
return duration
def pt_duration(journey):
duration = 0
for section in journey.sections:
if section.type == response_pb2.PUBLIC_TRANSPORT:
duration = duration + section.duration
return duration
def is_non_pt_bss(journey):
return journey.type == 'non_pt_bss'
def is_non_pt_walk(journey):
return journey.type == 'non_pt_walk'
def is_non_pt_bike(journey):
return journey.type == 'non_pt_bike'
max_duration_fallback_modes = {'walking': [response_pb2.Walking],
'bss': [response_pb2.Walking, response_pb2.Bss],
'bike': [response_pb2.Walking, response_pb2.Bss, response_pb2.Bike],
'car': [response_pb2.Walking, response_pb2.Bss, response_pb2.Bike, response_pb2.Car],
}
def filter_journeys_by_fallback_modes(journeys, fallback_modes):
section_is_fallback_or_pt = lambda section: section.type not in \
(response_pb2.STREET_NETWORK, response_pb2.CROW_FLY) \
or section.street_network.mode in fallback_modes
filter_journey = lambda journey: all(section_is_fallback_or_pt(section) for section in journey.sections) \
and journey.duration > 0
return filter(filter_journey, journeys)
def select_best_journey_by_time(journeys, clockwise, fallback_modes):
list_journeys = filter_journeys_by_fallback_modes(journeys, fallback_modes)
if not list_journeys:
return None
if clockwise:
return min(list_journeys, key=attrgetter('arrival_date_time'))
else:
return max(list_journeys, key=attrgetter('departure_date_time'))
def select_best_journey_by_duration(journeys, clockwise, fallback_modes):
list_journeys = filter_journeys_by_fallback_modes(journeys, fallback_modes)
if not list_journeys:
return None
return min(list_journeys, key=attrgetter('duration'))
fallback_mode_order = ['walking', 'bss', 'bike', 'car']
def fallback_mode_comparator(a, b):
return fallback_mode_order.index(a) - fallback_mode_order.index(b)
|
agpl-3.0
| -456,811,872,450,253,060
| 37.318182
| 116
| 0.655753
| false
| 3.442221
| false
| false
| false
|
ruhan/django-silk-mongoengine
|
setup.py
|
1
|
1322
|
import os
from setuptools import setup
with open(os.path.join(os.path.dirname(__file__), 'README.rst')) as readme_file:
README = readme_file.read()
# allow setup.py to be run from any path
os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir)))
setup(
name='django-silk',
version='0.5.2',
packages=['silk'],
include_package_data=True,
license='MIT License',
description='Silky smooth profiling for the Django Framework',
long_description=README,
url='http://www.mtford.co.uk/projects/silk/',
author='Michael Ford',
author_email='mtford@gmail.com',
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
],
install_requires= [
'Django',
'Pygments',
'six',
'simplejson',
'python-dateutil',
'requests',
'sqlparse',
'Jinja2',
'autopep8',
'pytz'
]
)
|
mit
| -4,198,657,876,189,128,000
| 28.377778
| 80
| 0.587746
| false
| 3.777143
| false
| false
| false
|
HaraldWeber/client
|
src/ladder/__init__.py
|
1
|
1144
|
from PyQt4 import QtCore
from PyQt4 import QtWebKit
import logging
import urllib
import util
logger = logging.getLogger(__name__)
class Ladder(QtCore.QObject):
def __init__(self, client, *args, **kwargs):
QtCore.QObject.__init__(self, *args, **kwargs)
logger.debug("Ladder tab instantiating.")
self.client = client
self.ui = QtWebKit.QWebView()
self.client.ladderTab.layout().addWidget(self.ui)
self.loaded = False
self.client.showLadder.connect(self.reloadView)
self.ui.loadFinished.connect(self.ui.show)
@QtCore.pyqtSlot()
def reloadView(self):
if (self.loaded):
return
self.loaded = True
self.ui.setVisible(False)
#If a local theme CSS exists, skin the WebView with it
if util.themeurl("ladder/style.css"):
self.ui.settings().setUserStyleSheetUrl(util.themeurl("ladder/style.css"))
self.ui.setUrl(QtCore.QUrl("http://faforever.com/faf/leaderboards/read-leader.php?board=global&username=%s" % (self.client.login)))
|
gpl-3.0
| -2,618,006,111,668,638,000
| 27.6
| 139
| 0.615385
| false
| 3.838926
| false
| false
| false
|
anshengme/Angelina
|
apps/users/views.py
|
1
|
15715
|
import json
from django.contrib.auth import authenticate, login, logout
from django.contrib.auth.backends import ModelBackend
from django.contrib.auth.hashers import make_password
from django.core.exceptions import ObjectDoesNotExist # ORM get查询不到数据
from django.core.urlresolvers import reverse
from django.db.models import Q
from django.shortcuts import render, HttpResponseRedirect, HttpResponse
from django.views.generic.base import View
from pure_pagination import Paginator, PageNotAnInteger
from courses.models import Course
from operation.models import UserCourse, UserFavorite, UserMessage
from organization.models import CourseOrg, Teacher
from utils.email_send import send_register_email
from utils.mixin_utils import LoginRequiredMixin
from .forms import LoginForm, RegisterForm, ForgetForm, ModifyPwdForm, UploadImageForm, UserInfoForm
from .models import UserProfile, EmailVerifyRecord, Banner
class CustomBackend(ModelBackend):
"""自定义auth验证,可以通过用户名邮箱登录"""
def authenticate(self, username=None, password=None, **kwargs):
try:
user = UserProfile.objects.get(Q(username=username) | Q(email=username)) # 通过用户名或邮箱获取用户是否存在
if user.check_password(password): # 如果用户密码正确返回user对象
return user
else: # 出错或者用户密码错误就返回None
return None
except Exception as e:
return None
__all__ = [
'IndexView',
'LoginView',
'LogoutView',
'ActiveUserView',
'RegisterView',
'ForgetPwdView',
'ResetView',
'ModifyPwdView',
'UserInfoView',
'UploadImageView',
'UpdatePwdView',
'SendEmailCodeView',
'UpdateEmailView',
'MyCourseView',
'MyFavOrgVIew',
'MyFavTeacherVIew',
'MyFavCourseVIew',
'MyMessageVIew'
]
# Create your views here.
class IndexView(View):
"""首页"""
def get(self, request):
all_banner = Banner.objects.all().order_by('index') # 轮播图
courses = Course.objects.filter(is_banner=False)[:6] # 课程
banner_course = Course.objects.filter(is_banner=True)[:3] # 轮播图课程
course_orgs = CourseOrg.objects.all()[:15] # 课程机构
return render(request, 'index.html', {
'all_banner': all_banner,
'courses': courses,
'banner_course': banner_course,
'course_orgs': course_orgs
})
class LoginView(View):
def get(self, request):
"""返回登录页面"""
return render(request, 'login.html', {})
def post(self, request):
"""验证用户是否是否可以成功登录"""
login_form = LoginForm(request.POST) # FORM验证传过来的值是否合法
if login_form.is_valid(): # 验证是否错误
user_name = request.POST.get('username', '') # 获取用户名
pass_word = request.POST.get('password', '') # 获取密码
user = authenticate(username=user_name, password=pass_word) # 验证用户名和密码
if user is not None: # 如果用户名和密码匹配
if user.is_active: # 如果用户是激活状态
login(request, user) # 把SESSION和COOKIE写入request
return HttpResponseRedirect(reverse('index')) # 返回首页
else: # 用户未激活
return render(request, 'login.html', {'msg': '用户尚未激活!'})
else: # 用户名和密码错误
return render(request, 'login.html', {'msg': '用户名或密码错误!'})
else: # FORM验证出错,并吧出错信息传递到前端
return render(request, 'login.html', {'login_form': login_form})
class LogoutView(View):
def get(self, request):
logout(request)
return HttpResponseRedirect(reverse('index'))
class RegisterView(View):
"""用户注册"""
def get(self, request):
register_form = RegisterForm() # 获取验证码
return render(request, 'register.html', {'register_form': register_form})
def post(self, request):
register_form = RegisterForm(request.POST) # FORM验证
if register_form.is_valid(): # 验证是否错误
user_name = request.POST.get('email', '') # 获取用户注册的邮箱
try:
UserProfile.objects.get(email=user_name) # 如果用户名已存在
return render(request, 'register.html', {'msg': '用户已存在!', 'register_form': register_form})
except ObjectDoesNotExist as e:
pass_word = request.POST.get('password', '') # 获取密码
# 保存用户信息
user_profile = UserProfile()
user_profile.username = user_name
user_profile.email = user_name
user_profile.password = make_password(pass_word) # 密码使用make_password加密之后保存
user_profile.is_active = False # 用户默认未激活
user_profile.save()
# 写入欢迎注册消息
user_message = UserMessage()
user_message.user = user_profile.id
user_message.message = "欢迎注册慕学在线网"
user_message.save()
send_register_email(email=user_name, send_type='register') # 发送用户注册邮件
return HttpResponseRedirect(reverse('login')) # 跳转到登录页面
else:
return render(request, 'register.html', {'register_form': register_form})
class ActiveUserView(View):
"""用户激活"""
def get(self, request, active_code):
"""
:param active_code: 激活的字符串
"""
try:
all_records = EmailVerifyRecord.objects.get(code=active_code) # 获取到这个CODE
except Exception as e:
# 如果没有这个code存在则返回一个错误页面
return render(request, 'active_fail.html')
if all_records:
email = all_records.email # 获取用户邮箱
user = UserProfile.objects.get(email=email) # 获取这个用户
user.is_active = True # 把用户状态改为激活
user.save() # 保存
all_records.delete() # 删除激活码
else:
# 验证码不存在
return render(request, 'active_fail.html')
return HttpResponseRedirect(reverse('login')) # 激活之后跳转到登录页面
class ForgetPwdView(View):
"""密码重置"""
def get(self, request):
forget_form = ForgetForm() # 获取重置密码Form
return render(request, 'forgetpwd.html', {'forget_form': forget_form})
def post(self, request):
forget_form = ForgetForm(request.POST)
if forget_form.is_valid(): # Form验证成功
email = request.POST.get('email', '') # 获取用户邮箱
send_register_email(email=email, send_type='forget') # 发送密码重置链接
return render(request, 'send_success.html')
else:
return render(request, 'forgetpwd.html', {'forget_form': forget_form})
class ResetView(View):
"""修改密码"""
def get(self, request, reset_code):
try:
all_records = EmailVerifyRecord.objects.get(code=reset_code) # 取出验证码
except Exception as e:
return render(request, 'active_fail.html')
if all_records:
email = all_records.email # 获取email
all_records.delete() # 删除验证码
return render(request, 'password_reset.html', {'email': email})
else:
return render(request, 'active_fail.html')
class ModifyPwdView(View):
"""修改用户密码"""
def post(self, request):
modify_form = ModifyPwdForm(request.POST) # 验证参数
if modify_form.is_valid(): # 验证是否铸错
# 取出用户的用户名和密码
pwd1 = request.POST.get('password1', '')
pwd2 = request.POST.get('password2', '')
email = request.POST.get('email', '')
if pwd1 != pwd2: # 两个密码是否一致
return render(request, 'password_reset.html',
{'email': email, 'msg': '密码不一致!'})
user = UserProfile.objects.get(email=email) # 获取用户
user.password = make_password(pwd2) # 修改密码
user.save() # 保存到数据库
return HttpResponseRedirect(reverse('login')) # 跳转到登录页面
else:
email = request.POST.get('email', None)
return render(request, 'password_reset.html',
{'email': email, 'modify_form': modify_form})
class UserInfoView(LoginRequiredMixin, View):
"""用户个人信息"""
def get(self, request):
return render(request, 'usercenter-info.html')
def post(self, request):
user_info_form = UserInfoForm(request.POST, instance=request.user) # 通过Form保存用户信息
if user_info_form.is_valid(): # 是否报错
user_info_form.save() # 保存到数据库
return HttpResponse('{"status":"success"}', content_type='application/json')
else:
return HttpResponse(json.dumps(user_info_form.errors), content_type='application/json')
class UploadImageView(LoginRequiredMixin, View):
"""用户头像上传"""
def post(self, request):
# 文件类型需要传递两个参数,第三个参数返回一个UserProfile对象
image_form = UploadImageForm(request.POST, request.FILES, instance=request.user)
if image_form.is_valid(): # 是否验证通过
request.user.save() # 保存到数据库
return HttpResponse('{"status":"success"}', content_type='application/json')
return HttpResponse('{"status":"fail"}', content_type='application/json')
class UpdatePwdView(View):
"""个人中心修改用户密码"""
def post(self, request):
modify_form = ModifyPwdForm(request.POST) # 验证密码
if modify_form.is_valid(): # 是否验证成功
pwd1 = request.POST.get('password1', '')
pwd2 = request.POST.get('password2', '')
if pwd1 != pwd2: # 密码是否一致
return HttpResponse('{"status":"fail","msg":"密码不一致"}', content_type='application/json')
user = request.user # 获取用户
user.password = make_password(pwd2) # 更新密码
user.save() # 保存到数据库
return HttpResponse('{"status":"success","msg":"密码修改成功"}', content_type='application/json')
else:
return HttpResponse(json.dumps(modify_form.errors), content_type='application/json')
class SendEmailCodeView(LoginRequiredMixin, View):
"""发送邮箱验证码"""
def get(self, request):
email = request.GET.get('email', '') # 获取邮箱
if UserProfile.objects.filter(email=email): # 邮箱是否存在
return HttpResponse('"email":"邮箱已经存在"}', content_type='application/json')
send_register_email(email, 'update_email') # 发送邮件
return HttpResponse('{"status":"success"}', content_type='application/json')
class UpdateEmailView(LoginRequiredMixin, View):
"""修改个人邮箱"""
def post(self, request):
email = request.POST.get('email', '') # 获取邮箱
code = request.POST.get('code', '') # 获取验证码
existed_records = EmailVerifyRecord.objects.filter(email=email, code=code,
send_type='update_email') # 邮箱是否能匹配到验证码
if existed_records: # 如果有
user = request.user # 获取的用户
user.email = email # 更改邮箱
user.save() # 保存到数据库
return HttpResponse('{"status":"success"}', content_type='application/json')
else:
return HttpResponse('"email":"验证码出错"}', content_type='application/json')
class MyCourseView(LoginRequiredMixin, View):
"""我学习的课程"""
def get(self, request):
user_courses = UserCourse.objects.filter(user=request.user) # 获取用户的所有课程
return render(request, 'usercenter-mycourse.html', {
'user_courses': user_courses
})
class MyFavOrgVIew(LoginRequiredMixin, View):
"""我收藏的课程机构"""
def get(self, request):
org_list = [] # 机构列表
fav_orgs = UserFavorite.objects.filter(user=request.user, fav_type=2) # 当前用户收藏的课程机构
for fav_org in fav_orgs:
org_id = fav_org.fav_id # 获取机构ID
org = CourseOrg.objects.get(id=org_id) # 获取指定的机构
org_list.append(org) # 把机构添加到列表中
return render(request, 'usercenter-fav-org.html', {
'org_list': org_list
})
class MyFavTeacherVIew(LoginRequiredMixin, View):
"""我收藏的机构讲师"""
def get(self, request):
teacher_list = []
fav_teacher = UserFavorite.objects.filter(user=request.user, fav_type=3)
for teacher in fav_teacher:
teacher_id = teacher.fav_id
teacher = Teacher.objects.get(id=teacher_id)
teacher_list.append(teacher)
return render(request, 'usercenter-fav-teacher.html', {
'teacher_list': teacher_list
})
class MyFavCourseVIew(LoginRequiredMixin, View):
"""我收藏的机构课程"""
def get(self, request):
course_list = []
fav_course = UserFavorite.objects.filter(user=request.user, fav_type=1)
for course in fav_course:
course_id = course.fav_id
course = Course.objects.get(id=course_id)
course_list.append(course)
return render(request, 'usercenter-fav-course.html', {
'course_list': course_list
})
class MyMessageVIew(LoginRequiredMixin, View):
def get(self, request):
all_message = UserMessage.objects.filter(user=request.user.id) # 获取用户的所有消息
all_unread_message = UserMessage.objects.filter(user=request.user.id, has_read=False) # 获取用户未读的所有消息
for unread_message in all_unread_message: # 用户进入个人消息后清空未读消息记录
unread_message.has_read = True
unread_message.save()
try:
page = request.GET.get('page', 1)
except PageNotAnInteger:
page = 1
p = Paginator(all_message, 10, request=request) # 每页显示10条
messages = p.page(page) # 获取指定页的消息
return render(request, 'usercenter-message.html', {
'messages': messages
})
def page_not_found(request):
# 全局404处理函数
from django.shortcuts import render_to_response
response = render_to_response('404.html', {})
response.status_code = 404
return response
def forbidden(request):
# 全局403处理函数
from django.shortcuts import render_to_response
response = render_to_response('403.html', {})
response.status_code = 403
return response
def page_error(request):
# 全局500处理函数
from django.shortcuts import render_to_response
response = render_to_response('500.html', {})
response.status_code = 500
return response
|
mit
| -7,679,907,315,270,004,000
| 35.074359
| 108
| 0.605942
| false
| 3.053831
| false
| false
| false
|
frobnitzem/slack
|
gen/plan.py
|
1
|
1926
|
# Plan a parallel copy using n workers into output shape s.
# The algorithm requires prod(s) to be a multiple of n and
# works by matching factors from n with those of s,
# with preference to the right (for R) or left (for L).
# This means as many workers as possible for the most sig. dimensions,
# each doing as many copies as possible on the least sig. ones.
#
# The output is a pair of shapes, with the same length as s:
# index_shape -- outer loops, used to decode the worker starting index
# copy_shape -- shape copied by each worker
#
# prod(index_shape) = n
# index_shape * copy_shape = s
prod = lambda x: reduce(lambda a,b: a*b, x, 1)
def divide_work(s, n, right_side=True):
sz = prod(s)
if n > sz:
raise ValueError, "Have too many workers."
if sz % n != 0:
raise ValueError, "Workers don't evenly divide number of copies."
f = factor(n) # Map (prime factors) (multiplicity)
index = [1 for i in s]
copy = [i for i in s]
pri = range(len(s))
if right_side == True:
pri = reversed(pri)
for i in pri:
for x in factors(s[i]):
try:
if f[x] > 0: # parallelize this one
copy[i] /= x # fewer copies
index[i] *= x # more workers
f[x] -= 1
except KeyError:
pass
if any(v != 0 for k,v in f.iteritems()):
raise ValueError, "Internal Error! Leftover workers (factors = %s)"%(str(f))
return index, copy
def factors(n):
j = 2
while j <= n/2:
if n%j == 0:
yield j
n /= j
else:
j += 1
yield n
def factor(n):
f = {}
for x in factors(n):
try:
f[x] += 1
except KeyError:
f[x] = 1
return f
def test():
for n in range(1, 10):
print n, [i for i in factors(n)]
print plan_copy((4,4,9), 2*3)
|
gpl-3.0
| -4,937,704,351,101,991,000
| 26.913043
| 84
| 0.548806
| false
| 3.47027
| false
| false
| false
|
agdsn/hades
|
src/hades/common/cli.py
|
1
|
5189
|
"""Functionality for the Hades command-line utilities in :mod:`hades.bin`."""
import argparse
import logging.handlers
import os
import sys
import textwrap
from gettext import gettext as _
from hades import constants
class ArgumentParser(argparse.ArgumentParser):
"""ArgumentParser subclass that exists with :data:`os.EX_USAGE` exit code if
parsing fails."""
def error(self, message):
self.print_usage(sys.stderr)
args = {'prog': self.prog, 'message': message}
self.exit(os.EX_USAGE, _('%(prog)s: error: %(message)s\n') % args)
class VersionAction(argparse.Action):
# noinspection PyShadowingBuiltins
def __init__(self,
option_strings,
version_info=None,
dest=argparse.SUPPRESS,
default=argparse.SUPPRESS,
help="show program's version number, configure options, copyright notice and exit"):
super(VersionAction, self).__init__(
option_strings=option_strings,
dest=dest,
default=default,
nargs=0,
help=help)
self.version_info = version_info
def __call__(self, parser: argparse.ArgumentParser, namespace: argparse.Namespace, values, option_string=None):
version_info = self.version_info
print(version_info)
parser.exit()
parser = ArgumentParser(add_help=False)
parser.add_argument('-c', '--config', default=None, help="Path to config file")
parser.add_argument('-v', '--verbose', dest='verbosity',
default=None, action='count', help='Be more verbose')
parser.add_argument('-q', '--quiet', dest='verbosity',
action='store_const', const=0, help='Be quiet')
parser.add_argument(
'-V', '--version', action=VersionAction, version_info=textwrap.dedent(
"""\
{PACKAGE_NAME} version {PACKAGE_VERSION}
Configure Options: {CONFIGURE_ARGS}
Copyright (c) 2015-2020 {PACKAGE_AUTHOR}
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
"""
).rstrip().format(
PACKAGE_NAME=constants.PACKAGE_NAME,
PACKAGE_VERSION=constants.PACKAGE_VERSION,
CONFIGURE_ARGS=constants.CONFIGURE_ARGS,
PACKAGE_AUTHOR=constants.PACKAGE_AUTHOR,
)
)
parser.add_argument('--syslog', nargs='?', const='/dev/log',
help="Log to syslog instead of stderr. A path to the log "
"socket may be provided, defaults to /dev/log "
"otherwise")
VERBOSITY_LEVELS = [logging.ERROR, logging.WARNING, logging.INFO, logging.DEBUG]
DEFAULT_VERBOSITY = 1
def setup_cli_logging(program, args):
"""
Setup logging for CLI applications, that do not configure logging
themselves.
Set log level using command-line options parsed with :data:`parser`, the
:std:envvar:`HADES_CONFIG` environment variable or finally the default value
:data:`DEFAULT_VERBOSITY`-
Flask and Celery are quite opinionated about logging, so this function
should probably not be called in their launchers.
:param program: The name of the program
:param args: The parsed arguments of the program with :data:`parser` or a
subparser.
"""
reset_cli_logging()
if args.verbosity is None:
verbosity = os.environ.get('HADES_VERBOSITY', DEFAULT_VERBOSITY)
try:
verbosity = int(verbosity)
except ValueError:
verbosity = DEFAULT_VERBOSITY
else:
verbosity = args.verbosity
effective_verbosity = max(0, min(len(VERBOSITY_LEVELS) - 1, verbosity))
level = VERBOSITY_LEVELS[effective_verbosity]
if level <= logging.DEBUG:
fmt = ("[%(asctime)s] %(levelname)s in %(filename)s:%(lineno)d: "
"%(message)s")
else:
fmt = "%(message)s"
stderr_handler = logging.StreamHandler(stream=sys.stderr)
stderr_handler.name = "stderr"
if args.syslog is not None:
# Also log critical messages to stderr
stderr_handler.setLevel(logging.CRITICAL)
syslog_handler = logging.handlers.SysLogHandler(address=args.syslog)
syslog_handler.name = "syslog"
handlers = [syslog_handler, stderr_handler]
else:
handlers = [stderr_handler]
logging.basicConfig(level=level, style='%', format=fmt, handlers=handlers)
def reset_cli_logging():
"""Reset root logger configuration"""
root = logging.root
for h in root.handlers:
try:
h.acquire()
h.flush()
h.close()
except (OSError, ValueError):
pass
finally:
h.release()
root.removeHandler(h)
for f in root.filters:
root.removeFilter(f)
|
mit
| -2,648,820,304,603,994,000
| 36.601449
| 115
| 0.63943
| false
| 4.171222
| true
| false
| false
|
sunlightlabs/tcamp
|
tcamp/sked/migrations/0012_auto__add_field_location_has_sessions.py
|
1
|
9931
|
# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'Location.has_sessions'
db.add_column(u'sked_location', 'has_sessions',
self.gf('django.db.models.fields.BooleanField')(default=True, db_index=True),
keep_default=False)
def backwards(self, orm):
# Deleting field 'Location.has_sessions'
db.delete_column(u'sked_location', 'has_sessions')
models = {
u'auth.group': {
'Meta': {'object_name': 'Group'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
u'sked.event': {
'Meta': {'ordering': "('-start_date',)", 'object_name': 'Event'},
'_description_rendered': ('django.db.models.fields.TextField', [], {}),
'_overview_rendered': ('django.db.models.fields.TextField', [], {}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'sked_events'", 'to': u"orm['auth.User']"}),
'description': ('markupfield.fields.MarkupField', [], {'rendered_field': 'True', 'blank': 'True'}),
'description_markup_type': ('django.db.models.fields.CharField', [], {'default': 'None', 'max_length': '30', 'blank': 'True'}),
'end_date': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_public': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'label': ('django.db.models.fields.CharField', [], {'default': "'event'", 'max_length': '64'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'overview': ('markupfield.fields.MarkupField', [], {'rendered_field': 'True', 'blank': 'True'}),
'overview_markup_type': ('django.db.models.fields.CharField', [], {'default': 'None', 'max_length': '30', 'blank': 'True'}),
'registration_is_open': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'registration_url': ('django.db.models.fields.URLField', [], {'default': "''", 'max_length': '200', 'blank': 'True'}),
'session_label': ('django.db.models.fields.CharField', [], {'default': "'session'", 'max_length': '64'}),
'session_length': ('timedelta.fields.TimedeltaField', [], {}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '50'}),
'start_date': ('django.db.models.fields.DateField', [], {}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'sked.location': {
'Meta': {'ordering': "('-event__start_date', 'name')", 'object_name': 'Location'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'event': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'locations'", 'to': u"orm['sked.Event']"}),
'has_sessions': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'db_index': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_official': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'sked.session': {
'Meta': {'ordering': "('-event__start_date', 'start_time')", 'unique_together': "(('event', 'slug'),)", 'object_name': 'Session'},
'_description_rendered': ('django.db.models.fields.TextField', [], {}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'description': ('markupfield.fields.MarkupField', [], {'rendered_field': 'True', 'blank': 'True'}),
'description_markup_type': ('django.db.models.fields.CharField', [], {'default': "'markdown'", 'max_length': '30', 'blank': 'True'}),
'end_time': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'event': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'sessions'", 'to': u"orm['sked.Event']"}),
'extra_data': ('jsonfield.fields.JSONField', [], {'default': "'{}'", 'blank': 'True'}),
'has_notes': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_public': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'location': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'sessions'", 'null': 'True', 'to': u"orm['sked.Location']"}),
'published_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'approved_sked_sessions'", 'null': 'True', 'to': u"orm['auth.User']"}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '50'}),
'speakers': ('jsonfield.fields.JSONField', [], {'default': "'[]'", 'db_index': 'True', 'blank': 'True'}),
'start_time': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'taggit.tag': {
'Meta': {'object_name': 'Tag'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '100'})
},
u'taggit.taggeditem': {
'Meta': {'object_name': 'TaggedItem'},
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'taggit_taggeditem_tagged_items'", 'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'object_id': ('django.db.models.fields.IntegerField', [], {'db_index': 'True'}),
'tag': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'taggit_taggeditem_items'", 'to': u"orm['taggit.Tag']"})
}
}
complete_apps = ['sked']
|
bsd-3-clause
| -2,078,722,091,345,254,700
| 77.204724
| 187
| 0.551908
| false
| 3.633736
| false
| false
| false
|
tangowhisky37/RaspiPythonProjects
|
Write_To_LCD_Screen/RPi_I2C_driver.py
|
1
|
4851
|
# -*- coding: utf-8 -*-
"""
Compiled, mashed and generally mutilated 2014-2015 by Denis Pleic
Made available under GNU GENERAL PUBLIC LICENSE
# Modified Python I2C library for Raspberry Pi
# as found on http://www.recantha.co.uk/blog/?p=4849
# Joined existing 'i2c_lib.py' and 'lcddriver.py' into a single library
# added bits and pieces from various sources
# By DenisFromHR (Denis Pleic)
# 2015-02-10, ver 0.1
"""
#
#
import smbus
from time import *
class i2c_device:
def __init__(self, addr, port=1):
self.addr = addr
self.bus = smbus.SMBus(port)
# Write a single command
def write_cmd(self, cmd):
self.bus.write_byte(self.addr, cmd)
sleep(0.0001)
# Write a command and argument
def write_cmd_arg(self, cmd, data):
self.bus.write_byte_data(self.addr, cmd, data)
sleep(0.0001)
# Write a block of data
def write_block_data(self, cmd, data):
self.bus.write_block_data(self.addr, cmd, data)
sleep(0.0001)
# Read a single byte
def read(self):
return self.bus.read_byte(self.addr)
# Read
def read_data(self, cmd):
return self.bus.read_byte_data(self.addr, cmd)
# Read a block of data
def read_block_data(self, cmd):
return self.bus.read_block_data(self.addr, cmd)
# LCD Address
#ADDRESS = 0x27
ADDRESS = 0x3f
# commands
LCD_CLEARDISPLAY = 0x01
LCD_RETURNHOME = 0x02
LCD_ENTRYMODESET = 0x04
LCD_DISPLAYCONTROL = 0x08
LCD_CURSORSHIFT = 0x10
LCD_FUNCTIONSET = 0x20
LCD_SETCGRAMADDR = 0x40
LCD_SETDDRAMADDR = 0x80
# flags for display entry mode
LCD_ENTRYRIGHT = 0x00
LCD_ENTRYLEFT = 0x02
LCD_ENTRYSHIFTINCREMENT = 0x01
LCD_ENTRYSHIFTDECREMENT = 0x00
# flags for display on/off control
LCD_DISPLAYON = 0x04
LCD_DISPLAYOFF = 0x00
LCD_CURSORON = 0x02
LCD_CURSOROFF = 0x00
LCD_BLINKON = 0x01
LCD_BLINKOFF = 0x00
# flags for display/cursor shift
LCD_DISPLAYMOVE = 0x08
LCD_CURSORMOVE = 0x00
LCD_MOVERIGHT = 0x04
LCD_MOVELEFT = 0x00
# flags for function set
LCD_8BITMODE = 0x10
LCD_4BITMODE = 0x00
LCD_2LINE = 0x08
LCD_1LINE = 0x00
LCD_5x10DOTS = 0x04
LCD_5x8DOTS = 0x00
# flags for backlight control
LCD_BACKLIGHT = 0x08
LCD_NOBACKLIGHT = 0x00
En = 0b00000100 # Enable bit
Rw = 0b00000010 # Read/Write bit
Rs = 0b00000001 # Register select bit
class lcd:
#initializes objects and lcd
def __init__(self):
self.lcd_device = i2c_device(ADDRESS)
self.lcd_write(0x03)
self.lcd_write(0x03)
self.lcd_write(0x03)
self.lcd_write(0x02)
self.lcd_write(LCD_FUNCTIONSET | LCD_2LINE | LCD_5x8DOTS | LCD_4BITMODE)
self.lcd_write(LCD_DISPLAYCONTROL | LCD_DISPLAYON)
self.lcd_write(LCD_CLEARDISPLAY)
self.lcd_write(LCD_ENTRYMODESET | LCD_ENTRYLEFT)
sleep(0.2)
# clocks EN to latch command
def lcd_strobe(self, data):
self.lcd_device.write_cmd(data | En | LCD_BACKLIGHT)
sleep(.0005)
self.lcd_device.write_cmd(((data & ~En) | LCD_BACKLIGHT))
sleep(.0001)
def lcd_write_four_bits(self, data):
self.lcd_device.write_cmd(data | LCD_BACKLIGHT)
self.lcd_strobe(data)
# write a command to lcd
def lcd_write(self, cmd, mode=0):
self.lcd_write_four_bits(mode | (cmd & 0xF0))
self.lcd_write_four_bits(mode | ((cmd << 4) & 0xF0))
# write a character to lcd (or character rom) 0x09: backlight | RS=DR<
# works!
def lcd_write_char(self, charvalue, mode=1):
self.lcd_write_four_bits(mode | (charvalue & 0xF0))
self.lcd_write_four_bits(mode | ((charvalue << 4) & 0xF0))
# put string function
def lcd_display_string(self, string, line):
if line == 1:
self.lcd_write(0x80)
if line == 2:
self.lcd_write(0xC0)
if line == 3:
self.lcd_write(0x94)
if line == 4:
self.lcd_write(0xD4)
for char in string:
self.lcd_write(ord(char), Rs)
# clear lcd and set to home
def lcd_clear(self):
self.lcd_write(LCD_CLEARDISPLAY)
self.lcd_write(LCD_RETURNHOME)
# define backlight on/off (lcd.backlight(1); off= lcd.backlight(0)
def backlight(self, state): # for state, 1 = on, 0 = off
if state == 1:
self.lcd_device.write_cmd(LCD_BACKLIGHT)
elif state == 0:
self.lcd_device.write_cmd(LCD_NOBACKLIGHT)
# add custom characters (0 - 7)
def lcd_load_custom_chars(self, fontdata):
self.lcd_write(0x40);
for char in fontdata:
for line in char:
self.lcd_write_char(line)
# define precise positioning (addition from the forum)
def lcd_display_string_pos(self, string, line, pos):
if line == 1:
pos_new = pos
elif line == 2:
pos_new = 0x40 + pos
elif line == 3:
pos_new = 0x14 + pos
elif line == 4:
pos_new = 0x54 + pos
self.lcd_write(0x80 + pos_new)
for char in string:
self.lcd_write(ord(char), Rs)
|
gpl-3.0
| 9,119,408,727,036,929,000
| 24.803191
| 78
| 0.652031
| false
| 2.85689
| false
| false
| false
|
santoshsahoo/personfinder
|
app/admin_review.py
|
1
|
5813
|
#!/usr/bin/python2.5
# Copyright 2010 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
from google.appengine.ext import db
from google.appengine.api import users
import const
import model
import utils
NOTES_PER_PAGE = 50
STATUS_CODES = {
None: 'u',
'': 'u',
'information_sought': 's',
'believed_alive': 'a',
'believed_missing': 'm',
'believed_dead': 'd',
'is_note_author': 'i',
}
class Handler(utils.BaseHandler):
def get(self):
if not self.is_current_user_authorized():
return self.redirect(users.create_login_url('/admin/review'))
#
# Make the navigation links.
status = self.request.get('status') or 'all'
source = self.request.get('source') or 'all'
status_nav_html = ''
for option in [
'all', 'unspecified', 'information_sought', 'is_note_author',
'believed_alive', 'believed_missing', 'believed_dead']:
if option == status:
status_nav_html += '<b>%s</b> ' % option
else:
status_nav_html += '<a href="%s">%s</a> ' % (
self.get_url('/admin/review', status=option, source=source),
option)
source_nav_html = ''
source_options = ['all', '%s.%s' % (self.repo, const.HOME_DOMAIN)]
for auth_key in model.Authorization.all().filter('repo =', self.repo):
if auth_key.domain_write_permission:
source_options.append(auth_key.domain_write_permission)
for option in source_options:
if option == source:
source_nav_html += '<b>%s</b> ' % option
else:
source_nav_html += '<a href="%s">%s</a> ' % (
self.get_url('/admin/review', status=status, source=option),
option)
#
# Construct the query for notes.
query = model.Note.all_in_repo(self.repo
).filter('reviewed =', False
).filter('hidden =', False)
if status == 'unspecified':
query.filter('status =', '')
elif status != 'all':
query.filter('status =', status)
if source != 'all':
query.filter('person_record_id >=', '%s/' % source)
query.filter('person_record_id <', '%s0' % source)
# TODO(ryok): we really want to order by entry_date, but GAE
# restriction applies here, and we can not use two different
# properties for comparison and ordering. The proper solution seems
# to add a property source_domain to Note.
query.order('-person_record_id')
else:
query.order('-entry_date')
skip = self.params.skip or 0
notes = query.fetch(NOTES_PER_PAGE + 1, skip)
for note in notes[:NOTES_PER_PAGE]:
person = model.Person.get(self.repo, note.person_record_id)
if person:
# Copy in the fields of the associated Person.
for name in person.properties():
setattr(note, 'person_' + name, getattr(person, name))
# Get the statuses of the other notes on this Person.
status_codes = ''
for other_note in person.get_notes():
code = STATUS_CODES[other_note.status]
if other_note.note_record_id == note.note_record_id:
code = code.upper()
status_codes += code
note.person_status_codes = status_codes
if len(notes) > NOTES_PER_PAGE:
notes = notes[:NOTES_PER_PAGE]
next_skip = skip + NOTES_PER_PAGE
next_url = self.get_url(
'/admin/review', skip=str(next_skip),
status=status, source=source)
else:
next_url = None
return self.render(
'admin_review.html',
notes=notes,
status_nav_html=status_nav_html,
source_nav_html=source_nav_html,
next_url=next_url,
first=skip + 1,
last=skip + len(notes[:NOTES_PER_PAGE]))
def post(self):
if not self.is_current_user_authorized():
return self.redirect(users.create_login_url('/admin/review'))
notes = []
for name, value in self.request.params.items():
if name.startswith('note.'):
note = model.Note.get(self.repo, name[5:])
if note:
if value in ['accept', 'flag']:
note.reviewed = True
if value == 'flag':
note.hidden = True
notes.append(note)
db.put(notes)
self.redirect('/admin/review',
status=self.params.status,
source=self.params.source)
def is_current_user_authorized(self):
if users.is_current_user_admin(): # admins can always review
return True
domain = self.config.authorized_reviewer_domain
if domain: # also allow any user from the configured domain
user = users.get_current_user()
return user and user.email().endswith('@' + domain)
|
apache-2.0
| 6,470,416,251,959,256,000
| 37.243421
| 80
| 0.54963
| false
| 4.070728
| false
| false
| false
|
ximion/Clementine-LibDanceTag
|
data/pythonlibs/uic/properties.py
|
1
|
14787
|
import logging
import sys
from uic.exceptions import UnsupportedPropertyError
from uic.icon_cache import IconCache
if sys.hexversion >= 0x03000000:
from uic.port_v3.ascii_upper import ascii_upper
else:
from uic.port_v2.ascii_upper import ascii_upper
logger = logging.getLogger(__name__)
DEBUG = logger.debug
QtCore = None
QtGui = None
def int_list(prop):
return [int(child.text) for child in prop]
def float_list(prop):
return [float(child.text) for child in prop]
bool_ = lambda v: v == "true"
def needsWidget(func):
func.needsWidget = True
return func
class Properties(object):
def __init__(self, factory, QtCore_mod, QtGui_mod):
global QtGui, QtCore
QtGui = QtGui_mod
QtCore = QtCore_mod
self.factory = factory
self.reset()
def reset(self):
self.buddies = []
self.delayed_props = []
self.icon_cache = IconCache(self.factory, QtGui)
def _pyEnumMember(self, cpp_name):
try:
prefix, membername = cpp_name.split("::")
DEBUG(membername)
if prefix == "Qt":
return getattr(QtCore.Qt, membername)
else:
return getattr(getattr(QtGui, prefix), membername)
except ValueError:
pass
try:
return getattr(QtCore.Qt, cpp_name)
except AttributeError:
# There seems to be a bug where this can succeed when it shouldn't.
# If so it will be picked up when the generated code is run.
return getattr(getattr(QtGui, self.wclass), cpp_name)
def _set(self, prop):
expr = [self._pyEnumMember(v) for v in prop.text.split('|')]
value = expr[0]
for v in expr[1:]:
value |= v
return value
def _enum(self, prop):
return self._pyEnumMember(prop.text)
def _number(self, prop):
return int(prop.text)
_uInt = _longLong = _uLongLong = _number
def _double(self, prop):
return float(prop.text)
def _bool(self, prop):
return prop.text == 'true'
def _stringlist(self, prop):
return [self._string(p, notr='true') for p in prop]
def _string(self, prop, notr=None):
if prop.get('notr', notr) == 'true':
return self._cstring(prop)
if prop.text is None:
return ""
return QtGui.QApplication.translate(self.uiname, prop.text, None,
QtGui.QApplication.UnicodeUTF8)
_char = _string
def _cstring(self, prop):
return str(prop.text)
def _color(self, prop):
args = int_list(prop)
# Handle the optional alpha component.
alpha = int(prop.get("alpha", "255"))
if alpha != 255:
args.append(alpha)
return QtGui.QColor(*args)
def _point(self, prop):
return QtCore.QPoint(*int_list(prop))
def _pointf(self, prop):
return QtCore.QPointF(*float_list(prop))
def _rect(self, prop):
return QtCore.QRect(*int_list(prop))
def _rectf(self, prop):
return QtCore.QRectF(*float_list(prop))
def _size(self, prop):
return QtCore.QSize(*int_list(prop))
def _sizef(self, prop):
return QtCore.QSizeF(*float_list(prop))
def _pixmap(self, prop):
if prop.text:
return QtGui.QPixmap(prop.text.replace("\\", "\\\\"))
# Don't bother to set the property if the pixmap is empty.
return None
def _iconset(self, prop):
return self.icon_cache.get_icon(prop)
def _url(self, prop):
return QtCore.QUrl(prop[0].text)
def _locale(self, prop):
lang = getattr(QtCore.QLocale, prop.attrib['language'])
country = getattr(QtCore.QLocale, prop.attrib['country'])
return QtCore.QLocale(lang, country)
def _cursor(self, prop):
return QtGui.QCursor(QtCore.Qt.CursorShape(int(prop.text)))
def _date(self, prop):
return QtCore.QDate(*int_list(prop))
def _datetime(self, prop):
args = int_list(prop)
return QtCore.QDateTime(QtCore.QDate(*args[-3:]), QtCore.QTime(*args[:-3]))
def _time(self, prop):
return QtCore.QTime(*int_list(prop))
def _gradient(self, prop):
name = 'gradient'
# Create the specific gradient.
gtype = prop.get('type', '')
if gtype == 'LinearGradient':
startx = float(prop.get('startx'))
starty = float(prop.get('starty'))
endx = float(prop.get('endx'))
endy = float(prop.get('endy'))
gradient = self.factory.createQObject('QLinearGradient', name,
(startx, starty, endx, endy), is_attribute=False)
elif gtype == 'ConicalGradient':
centralx = float(prop.get('centralx'))
centraly = float(prop.get('centraly'))
angle = float(prop.get('angle'))
gradient = self.factory.createQObject('QConicalGradient', name,
(centralx, centraly, angle), is_attribute=False)
elif gtype == 'RadialGradient':
centralx = float(prop.get('centralx'))
centraly = float(prop.get('centraly'))
radius = float(prop.get('radius'))
focalx = float(prop.get('focalx'))
focaly = float(prop.get('focaly'))
gradient = self.factory.createQObject('QRadialGradient', name,
(centralx, centraly, radius, focalx, focaly),
is_attribute=False)
else:
raise UnsupportedPropertyError(prop.tag)
# Set the common values.
spread = prop.get('spread')
if spread:
gradient.setSpread(getattr(QtGui.QGradient, spread))
cmode = prop.get('coordinatemode')
if cmode:
gradient.setCoordinateMode(getattr(QtGui.QGradient, cmode))
# Get the gradient stops.
for gstop in prop:
if gstop.tag != 'gradientstop':
raise UnsupportedPropertyError(gstop.tag)
position = float(gstop.get('position'))
color = self._color(gstop[0])
gradient.setColorAt(position, color)
return name
def _palette(self, prop):
palette = self.factory.createQObject("QPalette", "palette", (),
is_attribute=False)
for palette_elem in prop:
sub_palette = getattr(QtGui.QPalette, palette_elem.tag.title())
for role, color in enumerate(palette_elem):
if color.tag == 'color':
# Handle simple colour descriptions where the role is
# implied by the colour's position.
palette.setColor(sub_palette,
QtGui.QPalette.ColorRole(role), self._color(color))
elif color.tag == 'colorrole':
role = getattr(QtGui.QPalette, color.get('role'))
brushstyle = color[0].get('brushstyle')
if brushstyle in ('LinearGradientPattern', 'ConicalGradientPattern', 'RadialGradientPattern'):
gradient = self._gradient(color[0][0])
brush = self.factory.createQObject("QBrush", "brush",
(gradient, ), is_attribute=False)
else:
color = self._color(color[0][0])
brush = self.factory.createQObject("QBrush", "brush",
(color, ), is_attribute=False)
brushstyle = getattr(QtCore.Qt, brushstyle)
brush.setStyle(brushstyle)
palette.setBrush(sub_palette, role, brush)
else:
raise UnsupportedPropertyError(color.tag)
return palette
#@needsWidget
def _sizepolicy(self, prop, widget):
values = [int(child.text) for child in prop]
if len(values) == 2:
# Qt v4.3.0 and later.
horstretch, verstretch = values
hsizetype = getattr(QtGui.QSizePolicy, prop.get('hsizetype'))
vsizetype = getattr(QtGui.QSizePolicy, prop.get('vsizetype'))
else:
hsizetype, vsizetype, horstretch, verstretch = values
hsizetype = QtGui.QSizePolicy.Policy(hsizetype)
vsizetype = QtGui.QSizePolicy.Policy(vsizetype)
sizePolicy = self.factory.createQObject("QSizePolicy", "sizePolicy",
(hsizetype, vsizetype), is_attribute=False)
sizePolicy.setHorizontalStretch(horstretch)
sizePolicy.setVerticalStretch(verstretch)
sizePolicy.setHeightForWidth(widget.sizePolicy.hasHeightForWidth())
return sizePolicy
_sizepolicy = needsWidget(_sizepolicy)
# font needs special handling/conversion of all child elements.
_font_attributes = (("Family", str),
("PointSize", int),
("Weight", int),
("Italic", bool_),
("Underline", bool_),
("StrikeOut", bool_),
("Bold", bool_))
def _font(self, prop):
newfont = self.factory.createQObject("QFont", "font", (),
is_attribute = False)
for attr, converter in self._font_attributes:
v = prop.findtext("./%s" % (attr.lower(),))
if v is None:
continue
getattr(newfont, "set%s" % (attr,))(converter(v))
return newfont
def _cursorShape(self, prop):
return getattr(QtCore.Qt, prop.text)
def convert(self, prop, widget=None):
try:
func = getattr(self, "_" + prop[0].tag)
except AttributeError:
raise UnsupportedPropertyError(prop[0].tag)
else:
args = {}
if getattr(func, "needsWidget", False):
assert widget is not None
args["widget"] = widget
return func(prop[0], **args)
def _getChild(self, elem_tag, elem, name, default=None):
for prop in elem.findall(elem_tag):
if prop.attrib["name"] == name:
return self.convert(prop)
else:
return default
def getProperty(self, elem, name, default=None):
return self._getChild("property", elem, name, default)
def getAttribute(self, elem, name, default=None):
return self._getChild("attribute", elem, name, default)
def setProperties(self, widget, elem):
try:
self.wclass = elem.attrib["class"]
except KeyError:
pass
for prop in elem.findall("property"):
prop_name = prop.attrib["name"]
DEBUG("setting property %s" % (prop_name,))
try:
stdset = bool(int(prop.attrib["stdset"]))
except KeyError:
stdset = True
if not stdset:
self._setViaSetProperty(widget, prop)
elif hasattr(self, prop_name):
getattr(self, prop_name)(widget, prop)
else:
prop_value = self.convert(prop, widget)
if prop_value is not None:
getattr(widget, "set%s%s" % (ascii_upper(prop_name[0]), prop_name[1:]))(prop_value)
# SPECIAL PROPERTIES
# If a property has a well-known value type but needs special,
# context-dependent handling, the default behaviour can be overridden here.
# Delayed properties will be set after the whole widget tree has been
# populated.
def _delay(self, widget, prop):
prop_value = self.convert(prop)
if prop_value is not None:
prop_name = prop.attrib["name"]
self.delayed_props.append((
getattr(widget, "set%s%s" % (ascii_upper(prop_name[0]), prop_name[1:])),
prop_value))
# These properties will be set with a widget.setProperty call rather than
# calling the set<property> function.
def _setViaSetProperty(self, widget, prop):
prop_value = self.convert(prop)
if prop_value is not None:
widget.setProperty(prop.attrib["name"], prop_value)
# Ignore the property.
def _ignore(self, widget, prop):
pass
# Define properties that use the canned handlers.
currentIndex = _delay
currentRow = _delay
showDropIndicator = _setViaSetProperty
intValue = _setViaSetProperty
value = _setViaSetProperty
objectName = _ignore
leftMargin = _ignore
topMargin = _ignore
rightMargin = _ignore
bottomMargin = _ignore
horizontalSpacing = _ignore
verticalSpacing = _ignore
# buddy setting has to be done after the whole widget tree has been
# populated. We can't use delay here because we cannot get the actual
# buddy yet.
def buddy(self, widget, prop):
buddy_name = prop[0].text
if buddy_name:
self.buddies.append((widget, buddy_name))
# geometry is handled specially if set on the toplevel widget.
def geometry(self, widget, prop):
if widget.objectName == self.uiname:
geom = int_list(prop[0])
widget.resize(geom[2], geom[3])
else:
widget.setGeometry(self._rect(prop[0]))
def orientation(self, widget, prop):
# If the class is a QFrame, it's a line.
if widget.className() == "QFrame":
widget.setFrameShape(
{"Qt::Horizontal": QtGui.QFrame.HLine,
"Qt::Vertical" : QtGui.QFrame.VLine}[prop[0].text])
# In Qt Designer, lines appear to be sunken, QFormBuilder loads
# them as such, uic generates plain lines. We stick to the look in
# Qt Designer.
widget.setFrameShadow(QtGui.QFrame.Sunken)
else:
widget.setOrientation(self._enum(prop[0]))
# The isWrapping attribute of QListView is named inconsistently, it should
# be wrapping.
def isWrapping(self, widget, prop):
widget.setWrapping(self.convert(prop))
# This is a pseudo-property injected to deal with setContentsMargin()
# introduced in Qt v4.3.
def pyuicContentsMargins(self, widget, prop):
widget.setContentsMargins(*int_list(prop))
# This is a pseudo-property injected to deal with setHorizontalSpacing()
# and setVerticalSpacing() introduced in Qt v4.3.
def pyuicSpacing(self, widget, prop):
horiz, vert = int_list(prop)
if horiz == vert:
widget.setSpacing(horiz)
else:
if horiz >= 0:
widget.setHorizontalSpacing(horiz)
if vert >= 0:
widget.setVerticalSpacing(vert)
|
gpl-3.0
| 281,255,060,788,967,330
| 32.454751
| 114
| 0.57368
| false
| 4.055677
| false
| false
| false
|
villaverde/iredadmin
|
libs/iredutils.py
|
1
|
17090
|
# encoding: utf-8
# Author: Zhang Huangbin <zhb@iredmail.org>
from os import urandom, getloadavg
import re
import time
import urllib2
import socket
from base64 import b64encode, b64decode
from xml.dom.minidom import parseString as parseXMLString
import random
import subprocess
import web
import settings
from libs import md5crypt
######################
# Regular expressions.
#
# Email.
reEmail = r'''[\w\-][\w\-\.\+\=]*@[\w\-][\w\-\.]*\.[a-zA-Z0-9\-]{2,15}'''
# Domain.
reDomain = r'''[\w\-][\w\-\.]*\.[a-z0-9\-]{2,15}'''
# End Regular expressions.
####
#####################################
# Pre-defined values of SQL functions.
sqlUnixTimestamp = web.sqlliteral('UNIX_TIMESTAMP()')
#####
##############
# Validators
#
INVALID_EMAIL_CHARS = '~!#$%^&*()\\/\ '
INVALID_DOMAIN_CHARS = '~!#$%^&*()+\\/\ '
def is_email(s):
s = str(s)
if len(set(s) & set(INVALID_EMAIL_CHARS)) > 0 \
or '.' not in s \
or s.count('@') != 1:
return False
reCompEmail = re.compile(reEmail + '$', re.IGNORECASE)
if reCompEmail.match(s):
return True
else:
return False
def is_domain(s):
s = str(s)
if len(set(s) & set(INVALID_DOMAIN_CHARS)) > 0 or '.' not in s:
return False
reCompDomain = re.compile(reDomain + '$', re.IGNORECASE)
if reCompDomain.match(s):
return True
else:
return False
def isStrictIP(s):
s = str(s)
fields = s.split('.')
if len(fields) != 4:
return False
# Must be an interger number (0 < number < 255)
for fld in fields:
if fld.isdigit():
if not 0 < int(fld) < 255:
return False
else:
return False
return True
#
# End Validators
##################
#########################
# Custom Jinja2 filters.
#
def filesizeformat(value, baseMB=False):
"""Format the value like a 'human-readable' file size (i.e. 13 KB,
4.1 MB, 102 bytes, etc). Per default decimal prefixes are used (mega,
giga etc.), if the second parameter is set to `True` the binary
prefixes are (mebi, gibi).
"""
try:
bytes = float(value)
except:
return 0
if baseMB is True:
bytes = bytes * 1024 * 1024
base = 1024
if bytes == 0:
return '0'
ret = '0'
if bytes < base:
ret = '%d Bytes' % (bytes)
elif bytes < base * base:
ret = '%d KB' % (bytes / base)
elif bytes < base * base * base:
ret = '%d MB' % (bytes / (base * base))
elif bytes < base * base * base * base:
if bytes % (base * base * base) == 0:
ret = '%d GB' % (bytes / (base * base * base))
else:
ret = "%d MB" % (bytes / (base * base))
else:
ret = '%.1f TB' % (bytes / (base * base * base * base))
return ret
def set_datetime_format(t, hour=True,):
"""Format LDAP timestamp and Amavisd msgs.time_iso to YYYY-MM-DD HH:MM:SS.
>>> set_datetime_format('20100925T113256Z')
'2010-09-25 11:32:56'
>>> set_datetime_format('20100925T113256Z', hour=False)
'2010-09-25'
>>> set_datetime_format('INVALID_TIME_STAMP') # Return original string
'INVALID_TIME_STAMP'
"""
if t is None:
return '--'
else:
t = str(t)
if not hour:
time_format = '%Y-%m-%d'
else:
time_format = '%Y-%m-%d %H:%M:%S'
# LDAP timestamp
if 'T' not in t and t.endswith('Z'):
try:
return time.strftime(time_format, time.strptime(t, '%Y%m%d%H%M%SZ'))
except:
pass
# MySQL TIMESTAMP(): yyyymmddTHHMMSSZ
if 'T' in t and t.endswith('Z'):
try:
return time.strftime(time_format, time.strptime(t, '%Y%m%dT%H%M%SZ'))
except:
pass
# MySQL NOW(): yyyy-mm-dd HH:MM:SS
if '-' in t and ' ' in t and ':' in t:
# DBMail default last login date.
if t == '1979-11-03 22:05:58':
return '--'
try:
return time.strftime(time_format, time.strptime(t, '%Y-%m-%d %H:%M:%S'))
except:
pass
# ISO8601 UTC ascii time. Used in table: amavisd.msgs.
if len(t) == 14:
try:
return time.strftime(time_format, time.strptime(t, '%Y%m%d%H%M%S'))
except:
pass
return t
def cut_string(s, length=40):
try:
if len(s) != len(s.encode('utf-8', 'replace')):
length = length / 2
if len(s) >= length:
return s[:length] + '...'
else:
return s
except UnicodeDecodeError:
return unicode(s, 'utf-8', 'replace')
except:
return s
#
# End Jinja2 filters.
########################
def get_server_uptime():
try:
# Works on Linux.
f = open("/proc/uptime")
contents = f.read().split()
f.close()
except:
return None
total_seconds = float(contents[0])
MINUTE = 60
HOUR = MINUTE * 60
DAY = HOUR * 24
# Get the days, hours, minutes.
days = int(total_seconds / DAY)
hours = int((total_seconds % DAY) / HOUR)
minutes = int((total_seconds % HOUR) / MINUTE)
return (days, hours, minutes)
def get_system_load_average():
try:
(a1, a2, a3) = getloadavg()
a1 = '%.3f' % a1
a2 = '%.3f' % a2
a3 = '%.3f' % a3
return (a1, a2, a3)
except:
return (0, 0, 0)
def get_gmttime():
# Convert local time to UTC
return time.strftime('%Y-%m-%d %H:%M:%S', time.gmtime())
def convertSQLQueryRecords(qr=[]):
"""Convert SQL record value to avoid incorrect unicode handle in Jinja2.
>>> db = web.DB(None, {})
>>> qr = db.query('SELECT * FROM msgs')
>>> convertSQLQueryRecords(qr)
>>> qr = db.select('msgs')
>>> convertSQLQueryRecords(qr)
"""
rcds = []
for record in qr:
for k in record:
try:
record[k] = web.safeunicode(record.get(k))
except UnicodeDecodeError:
record[k] = '<<< DECODE FAILED >>>'
rcds += [record]
return rcds
def verify_new_password(newpw, confirmpw,
min_passwd_length=settings.min_passwd_length,
max_passwd_length=settings.max_passwd_length):
# Get new passwords from user input.
newpw = str(newpw).strip()
confirmpw = str(confirmpw).strip()
# Empty password is not allowed.
if newpw == confirmpw:
passwd = newpw
else:
return (False, 'PW_MISMATCH')
if not len(passwd) > 0:
return (False, 'PW_EMPTY')
if not len(passwd) >= int(min_passwd_length):
return (False, 'PW_LESS_THAN_MIN_LENGTH')
if int(max_passwd_length) != 0:
if not len(passwd) <= int(max_passwd_length):
return (False, 'PW_GREATER_THAN_MAX_LENGTH')
return (True, passwd)
def generate_random_strings(length=10):
"""Create a random password of specified length"""
try:
length = int(length) or 10
except:
length = 10
# Characters used to generate the random password
chars = '23456789' + 'abcdefghjkmnpqrstuvwxyz' + '23456789' + \
'ABCDEFGHJKLMNPQRSTUVWXYZ' + '23456789' # + '@#&*-+'
return "".join(random.choice(chars) for x in range(length))
def generate_bcrypt_password(p):
try:
import bcrypt
except:
return generate_ssha_password(p)
return '{CRYPT}' + bcrypt.hashpw(p, bcrypt.gensalt())
def verify_bcrypt_password(challenge_password, plain_password):
try:
import bcrypt
except:
return False
if challenge_password.startswith('{CRYPT}$2a$') \
or challenge_password.startswith('{CRYPT}$2b$') \
or challenge_password.startswith('{crypt}$2a$') \
or challenge_password.startswith('{crypt}$2b$'):
challenge_password = challenge_password[7:]
return bcrypt.checkpw(plain_password, challenge_password)
def generate_md5_password(p):
p = str(p).strip()
return md5crypt.unix_md5_crypt(p, generate_random_strings(length=8))
def verify_md5_password(challenge_password, plain_password):
"""Verify salted MD5 password"""
if challenge_password.startswith('{MD5}') or challenge_password.startswith('{md5}'):
challenge_password = challenge_password[5:]
if not (
challenge_password.startswith('$') \
and len(challenge_password) == 34 \
and challenge_password.count('$') == 3):
return False
# Get salt from hashed string
salt = challenge_password.split('$')
salt[-1] = ''
salt = '$'.join(salt)
if md5crypt.md5crypt(plain_password, salt) == challenge_password:
return True
else:
return False
def generate_plain_md5_password(p):
p = str(p).strip()
try:
from hashlib import md5
return md5(p).hexdigest()
except ImportError:
import md5
return md5.new(p).hexdigest()
return p
def verify_plain_md5_password(challenge_password, plain_password):
if challenge_password.startswith('{PLAIN-MD5}') \
or challenge_password.startswith('{plain-md5}'):
challenge_password = challenge_password[11:]
if challenge_password == generate_plain_md5_password(plain_password):
return True
else:
return False
def generate_ssha_password(p):
p = str(p).strip()
salt = urandom(8)
try:
from hashlib import sha1
pw = sha1(p)
except ImportError:
import sha
pw = sha.new(p)
pw.update(salt)
return "{SSHA}" + b64encode(pw.digest() + salt)
def verify_ssha_password(challenge_password, plain_password):
"""Verify SSHA (salted SHA) hash with or without prefix '{SSHA}'"""
if challenge_password.startswith('{SSHA}') \
or challenge_password.startswith('{ssha}'):
challenge_password = challenge_password[6:]
if not len(challenge_password) > 20:
# Not a valid SSHA hash
return False
try:
challenge_bytes = b64decode(challenge_password)
digest = challenge_bytes[:20]
salt = challenge_bytes[20:]
try:
from hashlib import sha1
hr = sha1(plain_password)
except ImportError:
import sha
hr = sha.new(plain_password)
hr.update(salt)
return digest == hr.digest()
except:
return False
def generate_ssha512_password(p):
"""Generate salted SHA512 password with prefix '{SSHA512}'.
Return salted SHA hash if python is older than 2.5 (module hashlib)."""
p = str(p).strip()
try:
from hashlib import sha512
salt = urandom(8)
pw = sha512(p)
pw.update(salt)
return "{SSHA512}" + b64encode(pw.digest() + salt)
except ImportError:
# Use SSHA password instead if python is older than 2.5.
return generate_ssha_password(p)
def verify_ssha512_password(challenge_password, plain_password):
"""Verify SSHA512 password with or without prefix '{SSHA512}'.
Python-2.5 is required since it requires module hashlib."""
if challenge_password.startswith('{SSHA512}') \
or challenge_password.startswith('{ssha512}'):
challenge_password = challenge_password[9:]
# With SSHA512, hash itself is 64 bytes (512 bits/8 bits per byte),
# everything after that 64 bytes is the salt.
if not len(challenge_password) > 64:
return False
try:
challenge_bytes = b64decode(challenge_password)
digest = challenge_bytes[:64]
salt = challenge_bytes[64:]
from hashlib import sha512
hr = sha512(plain_password)
hr.update(salt)
return digest == hr.digest()
except:
return False
def generate_cram_md5_password(p):
"""Generate CRAM-MD5 hash with `doveadm pw` command with prefix '{CRAM-MD5}'.
Return SSHA instead if no 'doveadm' command found or other error raised."""
p = str(p).strip()
try:
pp = subprocess.Popen(['doveadm', 'pw', '-s', 'CRAM-MD5', '-p', p],
stdout=subprocess.PIPE)
return pp.communicate()[0]
except:
return generate_ssha_password(p)
def verify_cram_md5_password(challenge_password, plain_password):
"""Verify CRAM-MD5 hash with 'doveadm pw' command."""
if not challenge_password.startswith('{CRAM-MD5}') \
or not challenge_password.startswith('{cram-md5}'):
return False
try:
exit_status = subprocess.call(['doveadm',
'pw',
'-t',
challenge_password,
'-p',
plain_password])
if exit_status == 0:
return True
except:
pass
return False
def generate_password_hash(p, pwscheme=None):
"""Generate password for LDAP mail user and admin."""
pw = str(p).strip()
if not pwscheme:
pwscheme = settings.DEFAULT_PASSWORD_SCHEME
if pwscheme == 'BCRYPT':
pw = generate_bcrypt_password(p)
elif pwscheme == 'SSHA512':
pw = generate_ssha512_password(p)
elif pwscheme == 'SSHA':
pw = generate_ssha_password(p)
elif pwscheme == 'MD5':
pw = '{CRYPT}' + generate_md5_password(p)
elif pwscheme == 'PLAIN-MD5':
pw = generate_plain_md5_password(p)
elif pwscheme == 'PLAIN':
if settings.SQL_PASSWORD_PREFIX_SCHEME is True:
pw = '{PLAIN}' + p
else:
pw = p
else:
# Plain password
pw = p
return pw
def verify_password_hash(challenge_password, plain_password):
# Check plain password and MD5 first.
if challenge_password in [plain_password,
'{PLAIN}' + plain_password,
'{plain}' + plain_password]:
return True
elif verify_md5_password(challenge_password, plain_password):
return True
upwd = challenge_password.upper()
if upwd.startswith('{SSHA}'):
return verify_ssha_password(challenge_password, plain_password)
elif upwd.startswith('{SSHA512}'):
return verify_ssha512_password(challenge_password, plain_password)
elif upwd.startswith('{PLAIN-MD5}'):
return verify_plain_md5_password(challenge_password, plain_password)
elif upwd.startswith('{CRAM-MD5}'):
return verify_cram_md5_password(challenge_password, plain_password)
elif upwd.startswith('{CRYPT}$2A$') or upwd.startswith('{CRYPT}$2B$'):
return verify_bcrypt_password(challenge_password, plain_password)
return False
def generate_maildir_path(mail,
hashedMaildir=settings.MAILDIR_HASHED,
prependDomainName=settings.MAILDIR_PREPEND_DOMAIN,
appendTimestamp=settings.MAILDIR_APPEND_TIMESTAMP,
):
"""Generate path of mailbox."""
mail = web.safestr(mail)
if not is_email(mail):
return (False, 'INVALID_EMAIL_ADDRESS')
# Get user/domain part from mail address.
username, domain = mail.split('@', 1)
# Get current timestamp.
timestamp = ''
if appendTimestamp:
timestamp = time.strftime('-%Y.%m.%d.%H.%M.%S')
if hashedMaildir is True:
if len(username) >= 3:
maildir = "%s/%s/%s/%s%s/" % (
username[0], username[1], username[2], username, timestamp,
)
elif len(username) == 2:
maildir = "%s/%s/%s/%s%s/" % (
username[0], username[1], username[1], username, timestamp,
)
else:
maildir = "%s/%s/%s/%s%s/" % (
username[0], username[0], username[0], username, timestamp,
)
mailMessageStore = maildir
else:
mailMessageStore = "%s%s/" % (username, timestamp,)
if prependDomainName:
mailMessageStore = domain + '/' + mailMessageStore
return mailMessageStore.lower()
def getNewVersion(urlOfXML):
'''Checking new version via parsing XML string to extract version number.
>>> getNewVersion('http://xxx/sample.xml') # New version available.
(True, {'version': '1.3.0',
'date': '2010-10-01',
'url': 'http://xxx/release-notes-1.3.0.html'
})
>>> getNewVersion('http://xxx/sample.xml') # Error while checking.
(False, 'HTTP Error 404: Not Found')
'''
try:
socket.setdefaulttimeout(5)
dom = parseXMLString(urllib2.urlopen(urlOfXML).read())
version = dom.documentElement.getElementsByTagName('version')[0].childNodes[0].data
date = dom.documentElement.getElementsByTagName('date')[0].childNodes[0].data
urlOfReleaseNotes = dom.documentElement.getElementsByTagName('releasenotes')[0].childNodes[0].data
d = {'version': str(version),
'date': str(date),
'url': str(urlOfReleaseNotes),
}
return (True, d)
except Exception, e:
return (False, str(e))
|
gpl-2.0
| 3,642,822,410,555,801,600
| 26.788618
| 106
| 0.572089
| false
| 3.64937
| false
| false
| false
|
RPGOne/Skynet
|
pytorch-master/torch/nn/modules/linear.py
|
1
|
1934
|
import math
import torch
from torch.nn.parameter import Parameter
from .module import Module
class Linear(Module):
r"""Applies a linear transformation to the incoming data: :math:`y = Ax + b`
Args:
in_features: size of each input sample
out_features: size of each output sample
bias: If set to False, the layer will not learn an additive bias. Default: True
Shape:
- Input: :math:`(N, in\_features)`
- Output: :math:`(N, out\_features)`
Attributes:
weight: the learnable weights of the module of shape (out_features x in_features)
bias: the learnable bias of the module of shape (out_features)
Examples::
>>> m = nn.Linear(20, 30)
>>> input = autograd.Variable(torch.randn(128, 20))
>>> output = m(input)
>>> print(output.size())
"""
def __init__(self, in_features, out_features, bias=True):
super(Linear, self).__init__()
self.in_features = in_features
self.out_features = out_features
self.weight = Parameter(torch.Tensor(out_features, in_features))
if bias:
self.bias = Parameter(torch.Tensor(out_features))
else:
self.register_parameter('bias', None)
self.reset_parameters()
def reset_parameters(self):
stdv = 1. / math.sqrt(self.weight.size(1))
self.weight.data.uniform_(-stdv, stdv)
if self.bias is not None:
self.bias.data.uniform_(-stdv, stdv)
def forward(self, input):
if self.bias is None:
return self._backend.Linear()(input, self.weight)
else:
return self._backend.Linear()(input, self.weight, self.bias)
def __repr__(self):
return self.__class__.__name__ + ' (' \
+ str(self.in_features) + ' -> ' \
+ str(self.out_features) + ')'
# TODO: Bilinear
# TODO: PartialLinear - maybe in sparse?
|
bsd-3-clause
| -3,095,174,257,862,721,500
| 29.698413
| 89
| 0.592037
| false
| 3.75534
| false
| false
| false
|
andrewklau/openshift-tools
|
openshift/installer/vendored/openshift-ansible-3.5.13/filter_plugins/oo_filters.py
|
2
|
41534
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# vim: expandtab:tabstop=4:shiftwidth=4
# pylint: disable=no-name-in-module, import-error, wrong-import-order, ungrouped-imports
"""
Custom filters for use in openshift-ansible
"""
import os
import pdb
import pkg_resources
import re
import json
import yaml
import random
from ansible import errors
from collections import Mapping
from distutils.util import strtobool
from distutils.version import LooseVersion
from operator import itemgetter
from ansible.parsing.yaml.dumper import AnsibleDumper
from urlparse import urlparse
from six import string_types
HAS_OPENSSL = False
try:
import OpenSSL.crypto
HAS_OPENSSL = True
except ImportError:
pass
try:
# ansible-2.2
# ansible.utils.unicode.to_unicode is deprecated in ansible-2.2,
# ansible.module_utils._text.to_text should be used instead.
from ansible.module_utils._text import to_text
except ImportError:
# ansible-2.1
from ansible.utils.unicode import to_unicode as to_text
def oo_pdb(arg):
""" This pops you into a pdb instance where arg is the data passed in
from the filter.
Ex: "{{ hostvars | oo_pdb }}"
"""
pdb.set_trace()
return arg
def get_attr(data, attribute=None):
""" This looks up dictionary attributes of the form a.b.c and returns
the value.
If the key isn't present, None is returned.
Ex: data = {'a': {'b': {'c': 5}}}
attribute = "a.b.c"
returns 5
"""
if not attribute:
raise errors.AnsibleFilterError("|failed expects attribute to be set")
ptr = data
for attr in attribute.split('.'):
if attr in ptr:
ptr = ptr[attr]
else:
ptr = None
break
return ptr
def oo_flatten(data):
""" This filter plugin will flatten a list of lists
"""
if not isinstance(data, list):
raise errors.AnsibleFilterError("|failed expects to flatten a List")
return [item for sublist in data for item in sublist]
def oo_merge_dicts(first_dict, second_dict):
""" Merge two dictionaries where second_dict values take precedence.
Ex: first_dict={'a': 1, 'b': 2}
second_dict={'b': 3, 'c': 4}
returns {'a': 1, 'b': 3, 'c': 4}
"""
if not isinstance(first_dict, dict) or not isinstance(second_dict, dict):
raise errors.AnsibleFilterError("|failed expects to merge two dicts")
merged = first_dict.copy()
merged.update(second_dict)
return merged
def oo_merge_hostvars(hostvars, variables, inventory_hostname):
""" Merge host and play variables.
When ansible version is greater than or equal to 2.0.0,
merge hostvars[inventory_hostname] with variables (ansible vars)
otherwise merge hostvars with hostvars['inventory_hostname'].
Ex: hostvars={'master1.example.com': {'openshift_variable': '3'},
'openshift_other_variable': '7'}
variables={'openshift_other_variable': '6'}
inventory_hostname='master1.example.com'
returns {'openshift_variable': '3', 'openshift_other_variable': '7'}
hostvars=<ansible.vars.hostvars.HostVars object> (Mapping)
variables={'openshift_other_variable': '6'}
inventory_hostname='master1.example.com'
returns {'openshift_variable': '3', 'openshift_other_variable': '6'}
"""
if not isinstance(hostvars, Mapping):
raise errors.AnsibleFilterError("|failed expects hostvars is dictionary or object")
if not isinstance(variables, dict):
raise errors.AnsibleFilterError("|failed expects variables is a dictionary")
if not isinstance(inventory_hostname, string_types):
raise errors.AnsibleFilterError("|failed expects inventory_hostname is a string")
# pylint: disable=no-member
ansible_version = pkg_resources.get_distribution("ansible").version
merged_hostvars = {}
if LooseVersion(ansible_version) >= LooseVersion('2.0.0'):
merged_hostvars = oo_merge_dicts(
hostvars[inventory_hostname], variables)
else:
merged_hostvars = oo_merge_dicts(
hostvars[inventory_hostname], hostvars)
return merged_hostvars
def oo_collect(data, attribute=None, filters=None):
""" This takes a list of dict and collects all attributes specified into a
list. If filter is specified then we will include all items that
match _ALL_ of filters. If a dict entry is missing the key in a
filter it will be excluded from the match.
Ex: data = [ {'a':1, 'b':5, 'z': 'z'}, # True, return
{'a':2, 'z': 'z'}, # True, return
{'a':3, 'z': 'z'}, # True, return
{'a':4, 'z': 'b'}, # FAILED, obj['z'] != obj['z']
]
attribute = 'a'
filters = {'z': 'z'}
returns [1, 2, 3]
"""
if not isinstance(data, list):
raise errors.AnsibleFilterError("|failed expects to filter on a List")
if not attribute:
raise errors.AnsibleFilterError("|failed expects attribute to be set")
if filters is not None:
if not isinstance(filters, dict):
raise errors.AnsibleFilterError("|failed expects filter to be a"
" dict")
retval = [get_attr(d, attribute) for d in data if (
all([d.get(key, None) == filters[key] for key in filters]))]
else:
retval = [get_attr(d, attribute) for d in data]
retval = [val for val in retval if val is not None]
return retval
def oo_select_keys_from_list(data, keys):
""" This returns a list, which contains the value portions for the keys
Ex: data = { 'a':1, 'b':2, 'c':3 }
keys = ['a', 'c']
returns [1, 3]
"""
if not isinstance(data, list):
raise errors.AnsibleFilterError("|failed expects to filter on a list")
if not isinstance(keys, list):
raise errors.AnsibleFilterError("|failed expects first param is a list")
# Gather up the values for the list of keys passed in
retval = [oo_select_keys(item, keys) for item in data]
return oo_flatten(retval)
def oo_select_keys(data, keys):
""" This returns a list, which contains the value portions for the keys
Ex: data = { 'a':1, 'b':2, 'c':3 }
keys = ['a', 'c']
returns [1, 3]
"""
if not isinstance(data, Mapping):
raise errors.AnsibleFilterError("|failed expects to filter on a dict or object")
if not isinstance(keys, list):
raise errors.AnsibleFilterError("|failed expects first param is a list")
# Gather up the values for the list of keys passed in
retval = [data[key] for key in keys if key in data]
return retval
def oo_prepend_strings_in_list(data, prepend):
""" This takes a list of strings and prepends a string to each item in the
list
Ex: data = ['cart', 'tree']
prepend = 'apple-'
returns ['apple-cart', 'apple-tree']
"""
if not isinstance(data, list):
raise errors.AnsibleFilterError("|failed expects first param is a list")
if not all(isinstance(x, string_types) for x in data):
raise errors.AnsibleFilterError("|failed expects first param is a list"
" of strings")
retval = [prepend + s for s in data]
return retval
def oo_combine_key_value(data, joiner='='):
"""Take a list of dict in the form of { 'key': 'value'} and
arrange them as a list of strings ['key=value']
"""
if not isinstance(data, list):
raise errors.AnsibleFilterError("|failed expects first param is a list")
rval = []
for item in data:
rval.append("%s%s%s" % (item['key'], joiner, item['value']))
return rval
def oo_combine_dict(data, in_joiner='=', out_joiner=' '):
"""Take a dict in the form of { 'key': 'value', 'key': 'value' } and
arrange them as a string 'key=value key=value'
"""
if not isinstance(data, dict):
# pylint: disable=line-too-long
raise errors.AnsibleFilterError("|failed expects first param is a dict [oo_combine_dict]. Got %s. Type: %s" % (str(data), str(type(data))))
return out_joiner.join([in_joiner.join([k, str(v)]) for k, v in data.items()])
def oo_dict_to_list_of_dict(data, key_title='key', value_title='value'):
"""Take a dict and arrange them as a list of dicts
Input data:
{'region': 'infra', 'test_k': 'test_v'}
Return data:
[{'key': 'region', 'value': 'infra'}, {'key': 'test_k', 'value': 'test_v'}]
Written for use of the oc_label module
"""
if not isinstance(data, dict):
# pylint: disable=line-too-long
raise errors.AnsibleFilterError("|failed expects first param is a dict. Got %s. Type: %s" % (str(data), str(type(data))))
rval = []
for label in data.items():
rval.append({key_title: label[0], value_title: label[1]})
return rval
def oo_ami_selector(data, image_name):
""" This takes a list of amis and an image name and attempts to return
the latest ami.
"""
if not isinstance(data, list):
raise errors.AnsibleFilterError("|failed expects first param is a list")
if not data:
return None
else:
if image_name is None or not image_name.endswith('_*'):
ami = sorted(data, key=itemgetter('name'), reverse=True)[0]
return ami['ami_id']
else:
ami_info = [(ami, ami['name'].split('_')[-1]) for ami in data]
ami = sorted(ami_info, key=itemgetter(1), reverse=True)[0][0]
return ami['ami_id']
def oo_ec2_volume_definition(data, host_type, docker_ephemeral=False):
""" This takes a dictionary of volume definitions and returns a valid ec2
volume definition based on the host_type and the values in the
dictionary.
The dictionary should look similar to this:
{ 'master':
{ 'root':
{ 'volume_size': 10, 'device_type': 'gp2',
'iops': 500
},
'docker':
{ 'volume_size': 40, 'device_type': 'gp2',
'iops': 500, 'ephemeral': 'true'
}
},
'node':
{ 'root':
{ 'volume_size': 10, 'device_type': 'io1',
'iops': 1000
},
'docker':
{ 'volume_size': 40, 'device_type': 'gp2',
'iops': 500, 'ephemeral': 'true'
}
}
}
"""
if not isinstance(data, dict):
# pylint: disable=line-too-long
raise errors.AnsibleFilterError("|failed expects first param is a dict [oo_ec2_volume_def]. Got %s. Type: %s" % (str(data), str(type(data))))
if host_type not in ['master', 'node', 'etcd']:
raise errors.AnsibleFilterError("|failed expects etcd, master or node"
" as the host type")
root_vol = data[host_type]['root']
root_vol['device_name'] = '/dev/sda1'
root_vol['delete_on_termination'] = True
if root_vol['device_type'] != 'io1':
root_vol.pop('iops', None)
if host_type in ['master', 'node'] and 'docker' in data[host_type]:
docker_vol = data[host_type]['docker']
docker_vol['device_name'] = '/dev/xvdb'
docker_vol['delete_on_termination'] = True
if docker_vol['device_type'] != 'io1':
docker_vol.pop('iops', None)
if docker_ephemeral:
docker_vol.pop('device_type', None)
docker_vol.pop('delete_on_termination', None)
docker_vol['ephemeral'] = 'ephemeral0'
return [root_vol, docker_vol]
elif host_type == 'etcd' and 'etcd' in data[host_type]:
etcd_vol = data[host_type]['etcd']
etcd_vol['device_name'] = '/dev/xvdb'
etcd_vol['delete_on_termination'] = True
if etcd_vol['device_type'] != 'io1':
etcd_vol.pop('iops', None)
return [root_vol, etcd_vol]
return [root_vol]
def oo_split(string, separator=','):
""" This splits the input string into a list. If the input string is
already a list we will return it as is.
"""
if isinstance(string, list):
return string
return string.split(separator)
def oo_haproxy_backend_masters(hosts, port):
""" This takes an array of dicts and returns an array of dicts
to be used as a backend for the haproxy role
"""
servers = []
for idx, host_info in enumerate(hosts):
server = dict(name="master%s" % idx)
server_ip = host_info['openshift']['common']['ip']
server['address'] = "%s:%s" % (server_ip, port)
server['opts'] = 'check'
servers.append(server)
return servers
def oo_filter_list(data, filter_attr=None):
""" This returns a list, which contains all items where filter_attr
evaluates to true
Ex: data = [ { a: 1, b: True },
{ a: 3, b: False },
{ a: 5, b: True } ]
filter_attr = 'b'
returns [ { a: 1, b: True },
{ a: 5, b: True } ]
"""
if not isinstance(data, list):
raise errors.AnsibleFilterError("|failed expects to filter on a list")
if not isinstance(filter_attr, string_types):
raise errors.AnsibleFilterError("|failed expects filter_attr is a str or unicode")
# Gather up the values for the list of keys passed in
return [x for x in data if filter_attr in x and x[filter_attr]]
def oo_nodes_with_label(nodes, label, value=None):
""" Filters a list of nodes by label and value (if provided)
It handles labels that are in the following variables by priority:
openshift_node_labels, cli_openshift_node_labels, openshift['node']['labels']
Examples:
data = ['a': {'openshift_node_labels': {'color': 'blue', 'size': 'M'}},
'b': {'openshift_node_labels': {'color': 'green', 'size': 'L'}},
'c': {'openshift_node_labels': {'size': 'S'}}]
label = 'color'
returns = ['a': {'openshift_node_labels': {'color': 'blue', 'size': 'M'}},
'b': {'openshift_node_labels': {'color': 'green', 'size': 'L'}}]
data = ['a': {'openshift_node_labels': {'color': 'blue', 'size': 'M'}},
'b': {'openshift_node_labels': {'color': 'green', 'size': 'L'}},
'c': {'openshift_node_labels': {'size': 'S'}}]
label = 'color'
value = 'green'
returns = ['b': {'labels': {'color': 'green', 'size': 'L'}}]
Args:
nodes (list[dict]): list of node to node variables
label (str): label to filter `nodes` by
value (Optional[str]): value of `label` to filter by Defaults
to None.
Returns:
list[dict]: nodes filtered by label and value (if provided)
"""
if not isinstance(nodes, list):
raise errors.AnsibleFilterError("failed expects to filter on a list")
if not isinstance(label, string_types):
raise errors.AnsibleFilterError("failed expects label to be a string")
if value is not None and not isinstance(value, string_types):
raise errors.AnsibleFilterError("failed expects value to be a string")
def label_filter(node):
""" filter function for testing if node should be returned """
if not isinstance(node, dict):
raise errors.AnsibleFilterError("failed expects to filter on a list of dicts")
if 'openshift_node_labels' in node:
labels = node['openshift_node_labels']
elif 'cli_openshift_node_labels' in node:
labels = node['cli_openshift_node_labels']
elif 'openshift' in node and 'node' in node['openshift'] and 'labels' in node['openshift']['node']:
labels = node['openshift']['node']['labels']
else:
return False
if isinstance(labels, string_types):
labels = yaml.safe_load(labels)
if not isinstance(labels, dict):
raise errors.AnsibleFilterError(
"failed expected node labels to be a dict or serializable to a dict"
)
return label in labels and (value is None or labels[label] == value)
return [n for n in nodes if label_filter(n)]
def oo_parse_heat_stack_outputs(data):
""" Formats the HEAT stack output into a usable form
The goal is to transform something like this:
+---------------+-------------------------------------------------+
| Property | Value |
+---------------+-------------------------------------------------+
| capabilities | [] | |
| creation_time | 2015-06-26T12:26:26Z | |
| description | OpenShift cluster | |
| … | … |
| outputs | [ |
| | { |
| | "output_value": "value_A" |
| | "description": "This is the value of Key_A" |
| | "output_key": "Key_A" |
| | }, |
| | { |
| | "output_value": [ |
| | "value_B1", |
| | "value_B2" |
| | ], |
| | "description": "This is the value of Key_B" |
| | "output_key": "Key_B" |
| | }, |
| | ] |
| parameters | { |
| … | … |
+---------------+-------------------------------------------------+
into something like this:
{
"Key_A": "value_A",
"Key_B": [
"value_B1",
"value_B2"
]
}
"""
# Extract the “outputs” JSON snippet from the pretty-printed array
in_outputs = False
outputs = ''
line_regex = re.compile(r'\|\s*(.*?)\s*\|\s*(.*?)\s*\|')
for line in data['stdout_lines']:
match = line_regex.match(line)
if match:
if match.group(1) == 'outputs':
in_outputs = True
elif match.group(1) != '':
in_outputs = False
if in_outputs:
outputs += match.group(2)
outputs = json.loads(outputs)
# Revamp the “outputs” to put it in the form of a “Key: value” map
revamped_outputs = {}
for output in outputs:
revamped_outputs[output['output_key']] = output['output_value']
return revamped_outputs
# pylint: disable=too-many-branches
def oo_parse_named_certificates(certificates, named_certs_dir, internal_hostnames):
""" Parses names from list of certificate hashes.
Ex: certificates = [{ "certfile": "/root/custom1.crt",
"keyfile": "/root/custom1.key",
"cafile": "/root/custom-ca1.crt" },
{ "certfile": "custom2.crt",
"keyfile": "custom2.key",
"cafile": "custom-ca2.crt" }]
returns [{ "certfile": "/etc/origin/master/named_certificates/custom1.crt",
"keyfile": "/etc/origin/master/named_certificates/custom1.key",
"cafile": "/etc/origin/master/named_certificates/custom-ca1.crt",
"names": [ "public-master-host.com",
"other-master-host.com" ] },
{ "certfile": "/etc/origin/master/named_certificates/custom2.crt",
"keyfile": "/etc/origin/master/named_certificates/custom2.key",
"cafile": "/etc/origin/master/named_certificates/custom-ca-2.crt",
"names": [ "some-hostname.com" ] }]
"""
if not isinstance(named_certs_dir, string_types):
raise errors.AnsibleFilterError("|failed expects named_certs_dir is str or unicode")
if not isinstance(internal_hostnames, list):
raise errors.AnsibleFilterError("|failed expects internal_hostnames is list")
if not HAS_OPENSSL:
raise errors.AnsibleFilterError("|missing OpenSSL python bindings")
for certificate in certificates:
if 'names' in certificate.keys():
continue
else:
certificate['names'] = []
if not os.path.isfile(certificate['certfile']) or not os.path.isfile(certificate['keyfile']):
raise errors.AnsibleFilterError("|certificate and/or key does not exist '%s', '%s'" %
(certificate['certfile'], certificate['keyfile']))
try:
st_cert = open(certificate['certfile'], 'rt').read()
cert = OpenSSL.crypto.load_certificate(OpenSSL.crypto.FILETYPE_PEM, st_cert)
certificate['names'].append(str(cert.get_subject().commonName.decode()))
for i in range(cert.get_extension_count()):
if cert.get_extension(i).get_short_name() == 'subjectAltName':
for name in str(cert.get_extension(i)).replace('DNS:', '').split(', '):
certificate['names'].append(name)
except Exception:
raise errors.AnsibleFilterError(("|failed to parse certificate '%s', " % certificate['certfile'] +
"please specify certificate names in host inventory"))
certificate['names'] = list(set(certificate['names']))
if 'cafile' not in certificate:
certificate['names'] = [name for name in certificate['names'] if name not in internal_hostnames]
if not certificate['names']:
raise errors.AnsibleFilterError(("|failed to parse certificate '%s' or " % certificate['certfile'] +
"detected a collision with internal hostname, please specify " +
"certificate names in host inventory"))
for certificate in certificates:
# Update paths for configuration
certificate['certfile'] = os.path.join(named_certs_dir, os.path.basename(certificate['certfile']))
certificate['keyfile'] = os.path.join(named_certs_dir, os.path.basename(certificate['keyfile']))
if 'cafile' in certificate:
certificate['cafile'] = os.path.join(named_certs_dir, os.path.basename(certificate['cafile']))
return certificates
def oo_pretty_print_cluster(data, prefix='tag_'):
""" Read a subset of hostvars and build a summary of the cluster
in the following layout:
"c_id": {
"master": {
"default": [
{ "name": "c_id-master-12345", "public IP": "172.16.0.1", "private IP": "192.168.0.1" }
]
"node": {
"infra": [
{ "name": "c_id-node-infra-23456", "public IP": "172.16.0.2", "private IP": "192.168.0.2" }
],
"compute": [
{ "name": "c_id-node-compute-23456", "public IP": "172.16.0.3", "private IP": "192.168.0.3" },
...
]
}
"""
def _get_tag_value(tags, key):
""" Extract values of a map implemented as a set.
Ex: tags = { 'tag_foo_value1', 'tag_bar_value2', 'tag_baz_value3' }
key = 'bar'
returns 'value2'
"""
for tag in tags:
if tag[:len(prefix) + len(key)] == prefix + key:
return tag[len(prefix) + len(key) + 1:]
raise KeyError(key)
def _add_host(clusters,
clusterid,
host_type,
sub_host_type,
host):
""" Add a new host in the clusters data structure """
if clusterid not in clusters:
clusters[clusterid] = {}
if host_type not in clusters[clusterid]:
clusters[clusterid][host_type] = {}
if sub_host_type not in clusters[clusterid][host_type]:
clusters[clusterid][host_type][sub_host_type] = []
clusters[clusterid][host_type][sub_host_type].append(host)
clusters = {}
for host in data:
try:
_add_host(clusters=clusters,
clusterid=_get_tag_value(host['group_names'], 'clusterid'),
host_type=_get_tag_value(host['group_names'], 'host-type'),
sub_host_type=_get_tag_value(host['group_names'], 'sub-host-type'),
host={'name': host['inventory_hostname'],
'public IP': host['oo_public_ipv4'],
'private IP': host['oo_private_ipv4']})
except KeyError:
pass
return clusters
def oo_generate_secret(num_bytes):
""" generate a session secret """
if not isinstance(num_bytes, int):
raise errors.AnsibleFilterError("|failed expects num_bytes is int")
secret = os.urandom(num_bytes)
return secret.encode('base-64').strip()
def to_padded_yaml(data, level=0, indent=2, **kw):
""" returns a yaml snippet padded to match the indent level you specify """
if data in [None, ""]:
return ""
try:
transformed = yaml.dump(data, indent=indent, allow_unicode=True,
default_flow_style=False,
Dumper=AnsibleDumper, **kw)
padded = "\n".join([" " * level * indent + line for line in transformed.splitlines()])
return to_text("\n{0}".format(padded))
except Exception as my_e:
raise errors.AnsibleFilterError('Failed to convert: %s' % my_e)
def oo_openshift_env(hostvars):
''' Return facts which begin with "openshift_" and translate
legacy facts to their openshift_env counterparts.
Ex: hostvars = {'openshift_fact': 42,
'theyre_taking_the_hobbits_to': 'isengard'}
returns = {'openshift_fact': 42}
'''
if not issubclass(type(hostvars), dict):
raise errors.AnsibleFilterError("|failed expects hostvars is a dict")
facts = {}
regex = re.compile('^openshift_.*')
for key in hostvars:
if regex.match(key):
facts[key] = hostvars[key]
migrations = {'openshift_router_selector': 'openshift_hosted_router_selector',
'openshift_registry_selector': 'openshift_hosted_registry_selector'}
for old_fact, new_fact in migrations.items():
if old_fact in facts and new_fact not in facts:
facts[new_fact] = facts[old_fact]
return facts
# pylint: disable=too-many-branches, too-many-nested-blocks
def oo_persistent_volumes(hostvars, groups, persistent_volumes=None):
""" Generate list of persistent volumes based on oo_openshift_env
storage options set in host variables.
"""
if not issubclass(type(hostvars), dict):
raise errors.AnsibleFilterError("|failed expects hostvars is a dict")
if not issubclass(type(groups), dict):
raise errors.AnsibleFilterError("|failed expects groups is a dict")
if persistent_volumes is not None and not issubclass(type(persistent_volumes), list):
raise errors.AnsibleFilterError("|failed expects persistent_volumes is a list")
if persistent_volumes is None:
persistent_volumes = []
if 'hosted' in hostvars['openshift']:
for component in hostvars['openshift']['hosted']:
if 'storage' in hostvars['openshift']['hosted'][component]:
params = hostvars['openshift']['hosted'][component]['storage']
kind = params['kind']
create_pv = params['create_pv']
if kind is not None and create_pv:
if kind == 'nfs':
host = params['host']
if host is None:
if 'oo_nfs_to_config' in groups and len(groups['oo_nfs_to_config']) > 0:
host = groups['oo_nfs_to_config'][0]
else:
raise errors.AnsibleFilterError("|failed no storage host detected")
directory = params['nfs']['directory']
volume = params['volume']['name']
path = directory + '/' + volume
size = params['volume']['size']
access_modes = params['access']['modes']
persistent_volume = dict(
name="{0}-volume".format(volume),
capacity=size,
access_modes=access_modes,
storage=dict(
nfs=dict(
server=host,
path=path)))
persistent_volumes.append(persistent_volume)
elif kind == 'openstack':
volume = params['volume']['name']
size = params['volume']['size']
access_modes = params['access']['modes']
filesystem = params['openstack']['filesystem']
volume_id = params['openstack']['volumeID']
persistent_volume = dict(
name="{0}-volume".format(volume),
capacity=size,
access_modes=access_modes,
storage=dict(
cinder=dict(
fsType=filesystem,
volumeID=volume_id)))
persistent_volumes.append(persistent_volume)
elif not (kind == 'object' or kind == 'dynamic'):
msg = "|failed invalid storage kind '{0}' for component '{1}'".format(
kind,
component)
raise errors.AnsibleFilterError(msg)
return persistent_volumes
def oo_persistent_volume_claims(hostvars, persistent_volume_claims=None):
""" Generate list of persistent volume claims based on oo_openshift_env
storage options set in host variables.
"""
if not issubclass(type(hostvars), dict):
raise errors.AnsibleFilterError("|failed expects hostvars is a dict")
if persistent_volume_claims is not None and not issubclass(type(persistent_volume_claims), list):
raise errors.AnsibleFilterError("|failed expects persistent_volume_claims is a list")
if persistent_volume_claims is None:
persistent_volume_claims = []
if 'hosted' in hostvars['openshift']:
for component in hostvars['openshift']['hosted']:
if 'storage' in hostvars['openshift']['hosted'][component]:
params = hostvars['openshift']['hosted'][component]['storage']
kind = params['kind']
create_pv = params['create_pv']
create_pvc = params['create_pvc']
if kind not in [None, 'object'] and create_pv and create_pvc:
volume = params['volume']['name']
size = params['volume']['size']
access_modes = params['access']['modes']
persistent_volume_claim = dict(
name="{0}-claim".format(volume),
capacity=size,
access_modes=access_modes)
persistent_volume_claims.append(persistent_volume_claim)
return persistent_volume_claims
def oo_31_rpm_rename_conversion(rpms, openshift_version=None):
""" Filters a list of 3.0 rpms and return the corresponding 3.1 rpms
names with proper version (if provided)
If 3.1 rpms are passed in they will only be augmented with the
correct version. This is important for hosts that are running both
Masters and Nodes.
"""
if not isinstance(rpms, list):
raise errors.AnsibleFilterError("failed expects to filter on a list")
if openshift_version is not None and not isinstance(openshift_version, string_types):
raise errors.AnsibleFilterError("failed expects openshift_version to be a string")
rpms_31 = []
for rpm in rpms:
if 'atomic' not in rpm:
rpm = rpm.replace("openshift", "atomic-openshift")
if openshift_version:
rpm = rpm + openshift_version
rpms_31.append(rpm)
return rpms_31
def oo_pods_match_component(pods, deployment_type, component):
""" Filters a list of Pods and returns the ones matching the deployment_type and component
"""
if not isinstance(pods, list):
raise errors.AnsibleFilterError("failed expects to filter on a list")
if not isinstance(deployment_type, string_types):
raise errors.AnsibleFilterError("failed expects deployment_type to be a string")
if not isinstance(component, string_types):
raise errors.AnsibleFilterError("failed expects component to be a string")
image_prefix = 'openshift/origin-'
if deployment_type in ['enterprise', 'online', 'openshift-enterprise']:
image_prefix = 'openshift3/ose-'
elif deployment_type == 'atomic-enterprise':
image_prefix = 'aep3_beta/aep-'
matching_pods = []
image_regex = image_prefix + component + r'.*'
for pod in pods:
for container in pod['spec']['containers']:
if re.search(image_regex, container['image']):
matching_pods.append(pod)
break # stop here, don't add a pod more than once
return matching_pods
def oo_get_hosts_from_hostvars(hostvars, hosts):
""" Return a list of hosts from hostvars """
retval = []
for host in hosts:
try:
retval.append(hostvars[host])
except errors.AnsibleError:
# host does not exist
pass
return retval
def oo_image_tag_to_rpm_version(version, include_dash=False):
""" Convert an image tag string to an RPM version if necessary
Empty strings and strings that are already in rpm version format
are ignored. Also remove non semantic version components.
Ex. v3.2.0.10 -> -3.2.0.10
v1.2.0-rc1 -> -1.2.0
"""
if not isinstance(version, string_types):
raise errors.AnsibleFilterError("|failed expects a string or unicode")
if version.startswith("v"):
version = version[1:]
# Strip release from requested version, we no longer support this.
version = version.split('-')[0]
if include_dash and version and not version.startswith("-"):
version = "-" + version
return version
def oo_hostname_from_url(url):
""" Returns the hostname contained in a URL
Ex: https://ose3-master.example.com/v1/api -> ose3-master.example.com
"""
if not isinstance(url, string_types):
raise errors.AnsibleFilterError("|failed expects a string or unicode")
parse_result = urlparse(url)
if parse_result.netloc != '':
return parse_result.netloc
else:
# netloc wasn't parsed, assume url was missing scheme and path
return parse_result.path
# pylint: disable=invalid-name, unused-argument
def oo_openshift_loadbalancer_frontends(
api_port, servers_hostvars, use_nuage=False, nuage_rest_port=None):
"""TODO: Document me."""
loadbalancer_frontends = [{'name': 'atomic-openshift-api',
'mode': 'tcp',
'options': ['tcplog'],
'binds': ["*:{0}".format(api_port)],
'default_backend': 'atomic-openshift-api'}]
if bool(strtobool(str(use_nuage))) and nuage_rest_port is not None:
loadbalancer_frontends.append({'name': 'nuage-monitor',
'mode': 'tcp',
'options': ['tcplog'],
'binds': ["*:{0}".format(nuage_rest_port)],
'default_backend': 'nuage-monitor'})
return loadbalancer_frontends
# pylint: disable=invalid-name
def oo_openshift_loadbalancer_backends(
api_port, servers_hostvars, use_nuage=False, nuage_rest_port=None):
"""TODO: Document me."""
loadbalancer_backends = [{'name': 'atomic-openshift-api',
'mode': 'tcp',
'option': 'tcplog',
'balance': 'source',
'servers': oo_haproxy_backend_masters(servers_hostvars, api_port)}]
if bool(strtobool(str(use_nuage))) and nuage_rest_port is not None:
# pylint: disable=line-too-long
loadbalancer_backends.append({'name': 'nuage-monitor',
'mode': 'tcp',
'option': 'tcplog',
'balance': 'source',
'servers': oo_haproxy_backend_masters(servers_hostvars, nuage_rest_port)})
return loadbalancer_backends
def oo_chomp_commit_offset(version):
"""Chomp any "+git.foo" commit offset string from the given `version`
and return the modified version string.
Ex:
- chomp_commit_offset(None) => None
- chomp_commit_offset(1337) => "1337"
- chomp_commit_offset("v3.4.0.15+git.derp") => "v3.4.0.15"
- chomp_commit_offset("v3.4.0.15") => "v3.4.0.15"
- chomp_commit_offset("v1.3.0+52492b4") => "v1.3.0"
"""
if version is None:
return version
else:
# Stringify, just in case it's a Number type. Split by '+' and
# return the first split. No concerns about strings without a
# '+', .split() returns an array of the original string.
return str(version).split('+')[0]
def oo_random_word(length, source='abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789'):
"""Generates a random string of given length from a set of alphanumeric characters.
The default source uses [a-z][A-Z][0-9]
Ex:
- oo_random_word(3) => aB9
- oo_random_word(4, source='012') => 0123
"""
return ''.join(random.choice(source) for i in range(length))
class FilterModule(object):
""" Custom ansible filter mapping """
# pylint: disable=no-self-use, too-few-public-methods
def filters(self):
""" returns a mapping of filters to methods """
return {
"oo_select_keys": oo_select_keys,
"oo_select_keys_from_list": oo_select_keys_from_list,
"oo_chomp_commit_offset": oo_chomp_commit_offset,
"oo_collect": oo_collect,
"oo_flatten": oo_flatten,
"oo_pdb": oo_pdb,
"oo_prepend_strings_in_list": oo_prepend_strings_in_list,
"oo_ami_selector": oo_ami_selector,
"oo_ec2_volume_definition": oo_ec2_volume_definition,
"oo_combine_key_value": oo_combine_key_value,
"oo_combine_dict": oo_combine_dict,
"oo_dict_to_list_of_dict": oo_dict_to_list_of_dict,
"oo_split": oo_split,
"oo_filter_list": oo_filter_list,
"oo_parse_heat_stack_outputs": oo_parse_heat_stack_outputs,
"oo_parse_named_certificates": oo_parse_named_certificates,
"oo_haproxy_backend_masters": oo_haproxy_backend_masters,
"oo_pretty_print_cluster": oo_pretty_print_cluster,
"oo_generate_secret": oo_generate_secret,
"oo_nodes_with_label": oo_nodes_with_label,
"oo_openshift_env": oo_openshift_env,
"oo_persistent_volumes": oo_persistent_volumes,
"oo_persistent_volume_claims": oo_persistent_volume_claims,
"oo_31_rpm_rename_conversion": oo_31_rpm_rename_conversion,
"oo_pods_match_component": oo_pods_match_component,
"oo_get_hosts_from_hostvars": oo_get_hosts_from_hostvars,
"oo_image_tag_to_rpm_version": oo_image_tag_to_rpm_version,
"oo_merge_dicts": oo_merge_dicts,
"oo_hostname_from_url": oo_hostname_from_url,
"oo_merge_hostvars": oo_merge_hostvars,
"oo_openshift_loadbalancer_frontends": oo_openshift_loadbalancer_frontends,
"oo_openshift_loadbalancer_backends": oo_openshift_loadbalancer_backends,
"to_padded_yaml": to_padded_yaml,
"oo_random_word": oo_random_word
}
|
apache-2.0
| -3,096,275,517,794,424,000
| 40.555556
| 149
| 0.55179
| false
| 4.168072
| false
| false
| false
|
PyPlanet/PyPlanet
|
pyplanet/core/storage/storage.py
|
1
|
3977
|
import asyncio_extras
import os
import importlib
from async_generator import yield_
from pyplanet.conf import settings
from pyplanet.core.storage import StorageDriver, StorageInterface
class Storage(StorageInterface):
"""
The storage component manager is managing the storage access trough drivers that can be customized.
.. warning::
Some drivers are work in progress!
"""
MAP_FOLDER = 'UserData/Maps'
MATCHSETTINGS_FOLDER = 'UserData/Maps/MatchSettings'
def __init__(self, instance, driver: StorageDriver, config):
"""
Initiate storage manager.
:param instance: Instance of the controller.
:param driver: Driver instance, must be init already!
:param config: Storage configuration (including driver + driver config).
:type instance: pyplanet.core.instance.Instance
:type driver: pyplanet.core.storage.interface.StorageDriver
:type config: dict
"""
self._instance = instance
self._driver = driver
self._config = config
self._game = None
# Create temp folders for driver.
self._tmp_root = os.path.join(settings.TMP_PATH, self._instance.process_name)
self._tmp_driver = os.path.join(self._tmp_root, )
@classmethod
def create_from_settings(cls, instance, storage_config):
driver_path, _, driver_cls_name = storage_config['DRIVER'].rpartition('.')
driver_options = storage_config['OPTIONS'] if 'OPTIONS' in storage_config else dict()
driver_cls = getattr(importlib.import_module(driver_path), driver_cls_name)
driver = driver_cls(instance, driver_options)
return cls(instance, driver, storage_config)
async def initialize(self):
self._game = self._instance.game
self._driver.map_dir = self._game.server_map_dir
self._driver.skin_dir = self._game.server_skin_dir
self._driver.data_dir = self._game.server_data_dir
self._driver.base_dir = self._game.server_data_dir[:len(self._game.server_data_dir)-9]
@property
def driver(self):
"""
Get the raw driver. Be careful with this!
:return: Driver Instance
:rtype: pyplanet.core.storage.interface.StorageDriver
"""
return self._driver
@asyncio_extras.async_contextmanager
async def open(self, file: str, mode: str = 'rb', **kwargs):
"""
Open a file on the server. Use relative path to the dedicated root. Use the other open methods to relative
from another base path.
:param file: Filename/path, relative to the dedicated root path.
:param mode: Mode to open, see the python `open` manual for supported modes.
:return: File handler.
"""
context = self._driver.open(file, mode, **kwargs)
await yield_(await context.__aenter__())
await context.__aexit__(None, None, None)
@asyncio_extras.async_contextmanager
async def open_match_settings(self, file: str, mode: str = 'r', **kwargs):
"""
Open a file on the server. Relative to the MatchSettings folder (UserData/Maps/MatchSettings).
:param file: Filename/path, relative to the dedicated matchsettings folder.
:param mode: Mode to open, see the python `open` manual for supported modes.
:return: File handler.
"""
context = self._driver.open('{}/{}'.format(self.MATCHSETTINGS_FOLDER, file), mode, **kwargs)
await yield_(await context.__aenter__())
await context.__aexit__(None, None, None)
@asyncio_extras.async_contextmanager
async def open_map(self, file: str, mode: str = 'rb', **kwargs):
"""
Open a file on the server. Relative to the Maps folder (UserData/Maps).
:param file: Filename/path, relative to the dedicated maps folder.
:param mode: Mode to open, see the python `open` manual for supported modes.
:return: File handler.
"""
context = self._driver.open('{}/{}'.format(self.MAP_FOLDER, file), mode, **kwargs)
await yield_(await context.__aenter__())
await context.__aexit__(None, None, None)
async def remove_map(self, file: str):
"""
Remove a map file with filename given.
:param file: Filename, relative to Maps folder.
"""
await self._driver.remove('{}/{}'.format(self.MAP_FOLDER, file))
|
gpl-3.0
| 4,460,439,379,608,528,000
| 33.885965
| 108
| 0.713603
| false
| 3.491659
| true
| false
| false
|
kg-bot/SupyBot
|
plugins/Mailbox/plugin.py
|
1
|
6690
|
###
# Copyright (c) 2005, Jeremiah Fincher
# Copyright (c) 2006, Jon Phillips
# Copyright (c) 2006, Creative Commons
# All rights reserved.
###
import time
import rfc822
import poplib
import textwrap
from cStringIO import StringIO as sio
import supybot.utils as utils
import supybot.world as world
from supybot.commands import *
import supybot.plugins as plugins
import supybot.ircmsgs as ircmsgs
import supybot.ircutils as ircutils
import supybot.callbacks as callbacks
from supybot.utils.iter import all
class Mailbox(callbacks.Privmsg):
"""Add the help for "@help Mailbox" here
This should describe *how* to use this plugin."""
"""
Module for checking a POP3 mailbox at a specified interval and posting it
to a specified chat channel.
"""
threaded = True
lastCheck = 0
# This provides a callback to self
def callCommand(self, method, irc, msg, *args, **kwargs):
try:
super(Mailbox, self).callCommand(method, irc, msg, *args, **kwargs)
except utils.web.Error, e:
irc.error(str(e))
def _checkServer(self, irc):
user = self.registryValue('user')
server = self.registryValue('server')
password = self.registryValue('password')
if not server:
raise callbacks.Error, 'There is no configured POP3 server.'
if not user:
raise callbacks.Error, 'There is no configured POP3 user.'
if not password:
raise callbacks.Error, 'There is no configured POP3 password.'
return (server, user, password)
def _connect(self, server, user, password):
pop = poplib.POP3(server)
pop.user(user)
pop.pass_(password)
return pop
def _getPop(self, irc):
return self._connect(*self._checkServer(irc))
def _getMsgs(self, pop):
n = len(pop.list()[1])
for i in range(1, n+1):
(_, lines, _) = pop.retr(i)
yield (i, '\r\n'.join(lines))
def _quit(self, pop, delete=True):
if delete:
n = len(pop.list()[1])
for i in range(1, n+1):
pop.dele(i)
pop.quit()
def __call__(self, irc, msg):
now = time.time()
if now - self.lastCheck > self.registryValue('period'):
try:
try:
t = world.SupyThread(target=self._checkForAnnouncements,
args=(irc,))
t.setDaemon(True)
t.start()
finally:
# If there's an error, we don't want to be checking every
# message.
self.lastCheck = now
except callbacks.Error, e:
self.log.warning('Couldn\'t check mail: %s', e)
except Exception:
self.log.exception('Uncaught exception checking for new mail:')
def _checkForAnnouncements(self, irc):
start = time.time()
self.log.info('Checking mailbox for announcements.')
pop = self._getPop(irc)
i = None
for (i, msg) in self._getMsgs(pop):
message = rfc822.Message(sio(msg))
frm = message.get('From')
if not frm:
self.log.warning('Received message without From header.')
continue
else:
frm = frm.rstrip()
subject = message.get('Subject', '').rstrip()
content = message.fp.read()
self.log.info('Received message with subject %q from %q.',
subject, frm)
if subject == 'all':
channels = list(irc.state.channels)
else:
channels = subject.split()
if not channels or not all(irc.isChannel, channels):
channels = list(self.registryValue('defaultChannels'))
if subject:
content = '%s: %s' % (subject, content)
if not channels:
self.log.info('Received message with improper subject '
'line from %s.', frm)
continue
prefix = self.registryValue('prefix')
content = utils.str.normalizeWhitespace(content)
self.log.info('Making announcement to %L.', channels)
chunks = textwrap.wrap(content, 350)
for channel in channels:
if channel in irc.state.channels:
maximum = self.registryValue('limit', channel)
for chunk in chunks[:maximum]:
s = self._formatChunk(
self._formatPrefix(prefix + " ")+chunk)
irc.queueMsg(ircmsgs.privmsg(channel, s))
prefix = ''
self._quit(pop)
self.log.info('Finished checking mailbox, time elapsed: %s',
utils.timeElapsed(time.time() - start))
# provides formatting for the prefix option
def _formatPrefix(self, s):
fancyprefix = self.registryValue('fancyprefix')
if fancyprefix:
return ircutils.bold(s)
else:
return s
# provides formatting for the email message
def _formatChunk(self, s):
fancystyle = self.registryValue('fancystyle')
if fancystyle:
return ircutils.bold(ircutils.mircColor(s, 'red'))
else:
return s
def check(self, irc, msg, args):
"""takes no arguments
Checks whether email is available at the configured mailbox.
"""
(server, user, password) = self._checkServer(irc)
pop = self._connect(server, user, password)
n = len(pop.list()[1])
irc.reply(format('I have %n waiting for me.', (n, 'message')))
def retrieve(self, irc, msg, args):
"""takes no arguments
Retrieves the emails from the configured mailbox and prints them to
stdout.
"""
(server, user, password) = self._checkServer(irc)
pop = self._connect(server, user, password)
for (_, msg) in self._getMsgs(pop):
print msg
irc.replySuccess()
# this is what is called when one asks supybot about Mailbox
def mailbox(self, irc, msg, args, email):
"""[<email>]
This is where one will get information about a registered email
account <email>.
"""
# copied the next line from the Webopedia plugin
# self._wpBackend(irc, msg, term)
mailbox = wrap(mailbox, [additional('text')])
Class = Mailbox
# vim:set shiftwidth=4 softtabstop=8 expandtab textwidth=78:
|
gpl-3.0
| 6,275,430,813,889,883,000
| 34.210526
| 79
| 0.557848
| false
| 4.247619
| false
| false
| false
|
Tancata/phylo
|
test_for_lgt_more_groups.py
|
1
|
6655
|
from ete3 import Tree, TreeStyle
import sys, re
#read in the bootstrapped consensus tree from one of Cedric's families. Ask whether the candidate LGT has phylogenetic support at some bootstrap threshold by checking various tree-based criteria for LGTs
#Arguments: treefile target_sequence_tag
#euk_supergroups = ['Viridiplantae','Oxymonadida','Alveolata'] #add more...
euk_supergroups = []
inh = open("List_that_matters.txt")
for line in inh:
euk_supergroups.append(line.rstrip())
inh.close()
#check tree string for sanity first
inh = open(sys.argv[1])
treestring = inh.readline()
treestr = treestring.replace(';','')
treestr = treestr + ";"
inh.close()
if len(treestr) == 0:
print sys.argv[1] + "\tEmpty tree"
quit()
tree = Tree(treestr)
out_tree = sys.argv[1] + ".pdf"
#target_sequence_tag = sys.argv[2]
target_sequence_tag = 'xxx'
#setup group assignments
group_assignments = {}
inh = open("Annotation_file_for_trees.txt")
for line in inh:
fields = re.split("\s+", line.rstrip())
if len(fields) >= 2:
group_assignments[fields[0]] = fields[1] #key = sequence ID, value = group assignment (e.g. Viridiplantae)
#setup a list of the eukaryotic sequences in the tree
eukaryote_seqs = []
target_leaf = ''
for node in tree:
node.add_features(domain="Other")
for leaf in tree:
if re.search(target_sequence_tag, leaf.name):
leaf.add_features(domain="Eukaryote")
eukaryote_seqs.append(leaf.name)
target_leaf = leaf
elif leaf.name in group_assignments:
if group_assignments[leaf.name] in euk_supergroups:
eukaryote_seqs.append(leaf.name)
leaf.add_features(domain="Eukaryote")
else:
leaf.add_features(domain="Other")
else:
leaf.add_features(domain="Other")
#print eukaryote_seqs
#root the tree on a clade (the biggest?) of bacteria, to avoid ridiculous problems with arbitrary roots on trees
biggest_other_node = 0
for node in tree.get_monophyletic(values=['Other'], target_attr="domain"):
if len(node) > biggest_other_node:
biggest_other_node = len(node)
tree.set_outgroup(node)
#test the various phylogenetic criteria for LGT.
print "Tree\tResult\tEuksInTree\tSupportEukMonophyly\tEuksInTargetGroup\tDistanceToClosestEukClade\tSupergroupsInTargetGroup"
#euk sequence is a singleton nested within a clade of bacteria, and there is only one eukaryote sequence in the tree
if len(eukaryote_seqs) == 1: #this is, I guess, an LGT candidate
print sys.argv[1] + "\tSingleton\t1\tN/A\tN/A\tN/A\t1"
#euk sequence is a singleton nested within a clade of bacteria, and the eukaryotes are not monophyletic in the tree
#print len(eukaryote_seqs)
else:
try:
answer = tree.check_monophyly(values=eukaryote_seqs, target_attr="name")
if answer[0] == True:
ca = tree.get_common_ancestor(eukaryote_seqs)
target_group_sgs = {}
for leaf in ca:
if leaf.name in group_assignments:
leaf_supergroup = group_assignments[leaf.name]
if leaf_supergroup in euk_supergroups:
target_group_sgs[leaf_supergroup] = 1
else:
print "Warning: a sequence in this tree doesn't have a supergroup assignment: " + str(leaf.name)
num_sgs = len(target_group_sgs.keys())
print sys.argv[1] + "\tEuks monophyletic\t" + str(len(eukaryote_seqs)) + "\t" + str(ca.support) + "\tN/A\tN/A\t" + str(num_sgs)
elif answer[0] == False:
mono_groups = []
target_group = ''
for node in tree.get_monophyletic(values=['Eukaryote'], target_attr="domain"):
for leaf in node:
if leaf.name == target_leaf.name:
target_group = node
else:
mono_groups.append(node)
size_target_group = len(target_group)
#get distance
shortest_distance = 999999999999999.0
closest_other_group = ''
for subtree in mono_groups:
curr_distance = tree.get_distance(target_group, subtree, topology_only=True)
if curr_distance < shortest_distance:
shortest_distance = curr_distance
closest_other_group = subtree
#find out what supergroups of eukaryotes are represented in the target group
target_group_sgs = {}
tg_names = []
for leaf in target_group:
tg_names.append(leaf.name)
if leaf.name in group_assignments:
leaf_supergroup = group_assignments[leaf.name]
if leaf_supergroup in euk_supergroups:
target_group_sgs[leaf_supergroup] = 1
else:
print "Warning: a sequence in this tree doesn't have a supergroup assignment: " + str(leaf.name)
num_sgs = len(target_group_sgs.keys())
print tg_names
c_a = tree.get_common_ancestor(tg_names)
#attempt to calculate distance on a version of the tree in which branches below some support threshold have been deleted
# closest_leaves = []
# for leaf in closest_other_group:
# closest_leaves.append(leaf.name)
# target_leaves = []
# for leaf in target_group:
# target_leaves.append(leaf.name)
# collapsed_tree = tree
# for node in collapsed_tree:
# if node.support < 0.5:
# node.delete()
# target_ca = collapsed_tree.get_common_ancestor(target_leaves)
# closest_ca = collapsed_tree.get_common_ancestor(closest_leaves)
# collapsed_distance = collapsed_tree.get_distance(target_ca, closest_ca, topology_only=True)
print sys.argv[1] + "\tEuks not monophyletic\t" + str(len(eukaryote_seqs)) + "\t" + str(c_a.support) + "\t" + str(size_target_group) + "\t" + str(shortest_distance) + "\t" + str(num_sgs)
else:
print sys.argv[1] + "\t" + answer[0]
#If euks are monophyletic, what is the max. number allowed for the gene to be considered a candidate LGT?
#euk sequence is part of a euk clade nested within bacteria, and the eukaryotes are not monophyletic in the tree [what about the case where the LGT is the only copy in euks?]
#tree.render(out_tree)
except:
raise
#uncomment the following to make a PDF of the tree
ts = TreeStyle()
ts.show_leaf_name = True
ts.show_branch_support = True
ts.show_branch_length = False
tree.render(out_tree, tree_style=ts)
|
mit
| -7,968,757,924,115,821,000
| 43.66443
| 203
| 0.6284
| false
| 3.46976
| false
| false
| false
|
taimur97/Feeder
|
server/flaskapp/feeder/rest.py
|
1
|
7265
|
# -*- coding: utf-8 -*-
'''
The REST-API of Feeder
'''
from feeder import app
from .database import db
from .models import (Feed, FeedItem, UserFeed, UserDeletion,
get_user, get_feed, get_userfeed)
#from flask_oauthlib.client import OAuth
from flask.ext.restful import (Resource, Api, reqparse, fields,
marshal_with)
from .util import parse_timestamp, datetime_to_string
from .sync import cache_feed
from .gauth import authorized
from datetime import datetime, timedelta
# Configure some logging
import logging
file_handler = logging.FileHandler('rest.log')
app.logger.addHandler(file_handler)
app.logger.setLevel(logging.INFO)
# Want a boolean class
class FieldBool(fields.Raw):
def format(self, value):
if value:
return 'true'
else:
return 'false'
# Parse dates properly
class FieldDateTime(fields.Raw):
def format(self, value):
if value is None:
return None
return datetime_to_string(value)
# Set up the REST API
api = Api(app)
# Set up argument parsers
## Listing feeds
getparser = reqparse.RequestParser()
getparser.add_argument('min_timestamp', type=str, required=False,
help='Timestamp to filter on (only newer)')
getparser.add_argument('link', type=str, required=False, action='append',
help='Url(s) to limit query for')
## Adding feed
postparser = reqparse.RequestParser()
postparser.add_argument('link', type=str, required=True,
help='URL to the feed')
postparser.add_argument('title', type=str, required=False,
help='Title of feed')
postparser.add_argument('tag', type=str, required=False,
help='Tag to categorize feed under')
## Deleting a feed
deleteparser = reqparse.RequestParser()
deleteparser.add_argument('link', type=str, required=True,
help='URL of the feed to delete')
# Set up return value mashers
## Get
### Single feed item
feeditem_fields = {
'title': fields.String,
'description': fields.String,
'link': fields.String,
'title_stripped': fields.String,
'snippet': fields.String,
'published': FieldDateTime,
'author': fields.String,
'comments': fields.String,
'enclosure': fields.String,
'tags': fields.List(fields.String),
'image': fields.String,
'read': FieldBool(default=False),
'json': fields.String
}
### Single feed with a possible list of items
feed_fields = {
'link': fields.String,
'title': fields.String,
'description': fields.String,
'published': FieldDateTime,
'tag': fields.String,
'timestamp': FieldDateTime,
'items': fields.List(fields.Nested(feeditem_fields))
}
### Single delete
delete_fields = {
'link': fields.String,
'timestamp': FieldDateTime
}
### Response with list of feeds, and list of deletes
feeds_response = {
'feeds': fields.List(fields.Nested(feed_fields)),
'deletes': fields.List(fields.Nested(delete_fields))
}
def log_errors(f):
'''Log errors in the wrapped function and re-raise them.'''
def wrapped_f(*args, **kwargs):
try:
return f(*args, **kwargs)
except Exception as e:
print(e)
app.logger.error(str(e))
raise e
return wrapped_f
class Feeds(Resource):
'''
This class is the entire REST-interface for dealing with feeds.
'''
@log_errors
@marshal_with(feeds_response)
@authorized
def get(self, userid):
'''Return all feeds'''
args = getparser.parse_args()
print("Getting user")
user = get_user(userid)
#Wrong
# Query for feeds using lazy relationship
q = user.feeds
dt = None
# Filters
if args['link'] is not None:
urls = [u for u in args['link']]
q = q.filter(Feed.link.in_(urls))
if args['min_timestamp'] is not None:
dt = parse_timestamp(args['min_timestamp'])
# Require a timestap. If one was not provided in decent form,
# default to x days ago
if dt is None:
dt = datetime.utcnow() - timedelta(days=7)
q = q.filter(Feed.timestamp > dt)
feeds = q.all()
for f in feeds:
# Make sure to only return items with correct timestamp
# Set the items on the outer object
if dt is None:
f.items = f.feed.items
else:
f.items = FeedItem.query.filter(FeedItem.timestamp > dt,
FeedItem.feed_id == f.feed.id).all()
# If we have a timestamp, also return deletes done
if args['min_timestamp'] is None:
deletes = []
else:
q = UserDeletion.query.filter(UserDeletion.timestamp > dt)
deletes = q.all()
return {"feeds": feeds, "deletes": deletes}
@log_errors
@marshal_with(feed_fields)
@authorized
def post(self, userid):
'''Add new/Edit feed'''
user = get_user(userid)
args = postparser.parse_args()
# Make sure feed exists
feed, new = get_feed(args.link, indicate_new=True)
if new:
cache_feed(feed)
# Set link between user and feed
userfeed = get_userfeed(user, feed, args.tag, args.title)
# Remove possible deletes
UserDeletion.query.\
filter_by(user_id=user.id).\
filter_by(link=feed.link).\
delete()
# If we should update tag or title
if userfeed.tag != args.tag or userfeed.title != args.title:
userfeed.tag = args.tag
userfeed.title = args.title
db.session.add(userfeed)
# Else, already saved
db.session.commit()
# TODO limit number of items instead of time
# TODO include read information
dt = datetime.utcnow() - timedelta(days=1)
userfeed.items = FeedItem.query.filter(FeedItem.timestamp > dt,
FeedItem.feed_id == feed.id)\
.all()
# Return feed
return userfeed
class FeedsDeleter(Resource):
@log_errors
@authorized
def post(self, userid):
'''Delete a feed'''
user = get_user(userid)
args = deleteparser.parse_args()
feed = Feed.query.filter_by(link=args.link).first()
if feed is None:
app.logger.error("No such feed: {}".format(args.link))
return None, 404
# Store delete for other devices
ud = UserDeletion(user, feed)
db.session.add(ud)
# Perform delete
UserFeed.query.\
filter_by(user_id=user.id).\
filter_by(feed_id=feed.id).\
delete()
db.session.commit()
return None, 204
class PingResponder(Resource):
'''
A method that allows the app to query if the server is alive.
'''
@log_errors
def get(self):
return {}, 200
# Connect with API URLs
api.add_resource(Feeds, '/feeds')
api.add_resource(FeedsDeleter, '/feeds/delete')
api.add_resource(PingResponder, '/ping')
|
gpl-2.0
| 6,649,980,418,391,360,000
| 27.490196
| 84
| 0.591053
| false
| 3.954818
| false
| false
| false
|
sparrow242/demandfs
|
demandfs/demandfs.py
|
1
|
13810
|
#!/usr/bin/env python
"""
demandfs.py - mount and umount sources on demand
Copyright (C) 2013 Sebastian Meyer <s.meyer@drachenjaeger.eu>
Based upon the the xmp.py-FS Example in the fuse-python distribtion:
Copyright (C) 2001 Jeff Epler <jepler@unpythonic.dhs.org>
Copyright (C) 2006 Csaba Henk <csaba.henk@creo.hu>
http://sourceforge.net/p/fuse/fuse-python/ci/master/tree/example/xmp.py
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see https://www.gnu.org/licenses/gpl-3.0.
"""
import errno
import fcntl
import subprocess
import sys
import threading
import time
import os
try:
import fuse
except ImportError as e:
print "Can't import the python fuse module."
print "If you use Linux, take a look into your repositories."
print "Mostly the package is known as python-fuse or fuse-python."
sys.exit(2)
fuse.fuse_python_api = (0, 2)
TIMER_CHECK_SECONDS = 30 # interval for the timer to check the fs for idle
STATE_LOCK = threading.Lock() # Lock to protect the mount-state of the fs
BACKDIR = None # Necessary global for the path to the backdir
VERBOSE = False
def verbose(message):
"""
Will print message only if VERBOSE is True
"""
if VERBOSE:
print message
class Timer(threading.Thread):
"""
Timer will check the idle-state of the Filesystem every
TIMER_CHECK_SECONDS seconds
"""
def __init__(self, dfs):
""" dfs: the instance of the DemandFileSystem """
threading.Thread.__init__(self)
self.dfs = dfs
self.run_thread = True
self.timer_event = threading.Event()
def run(self):
""" Thread loop to check the idle-state of the Filesystem """
while self.run_thread:
verbose("Timer checks for idle...")
STATE_LOCK.acquire()
if (dfs.backdir_is_mounted
and dfs.last_activity + dfs.timeout < time.time()):
dfs.umount_backdir()
STATE_LOCK.release()
self.timer_event.wait(TIMER_CHECK_SECONDS)
class DemandFS(fuse.Fuse):
"""
A Fuse-Layer between a mountpoint (where the FS is mounted) and another
directory (given as option backdir).
Every request will reset the timer.y
"""
def __init__(self, *args, **kw):
fuse.Fuse.__init__(self, *args, **kw)
self.backdir = None
self.timeout = 60
self.mountscript = None
self.umountscript = None
self.backdir_is_mounted = False
self.last_activity = time.time()
self.verbose = False
self.timer = None
def fsinit(self, *args):
self.timer = Timer(self)
self.timer.start()
def fsdestroy(self, *args):
verbose("fsdestroy called with args:" % args)
self.umount_backdir()
self.timer.run_thread = False
self.timer.timer_event.set()
def mount_backdir(self):
"""
Be sure you have acquired the STATE_LOCK before call this!
Calls the script to mount the backdir. If the script retuns a value
!= 0 we expect the backdir is not available.
"""
ret = self.run_script(self.mountscript)
if ret == 0:
self.backdir_is_mounted = True
def run_script(self, path):
""" Call this to run an external script """
try:
verbose("in try, want to run: %s " % path)
subprocess.check_output(path, stderr=subprocess.STDOUT)
#TODO: Log output here
return 0
except subprocess.CalledProcessError as e:
print "External script failed"
return e.returncode
def trigger_activity(self):
"""
Called everytime the filesystem is working. It mounts the
backdir if it is not mounted and renew the last_activity timestamp
"""
STATE_LOCK.acquire()
if not self.backdir_is_mounted:
self.mount_backdir()
if not self.backdir_is_mounted:
STATE_LOCK.release()
return False
self.last_activity = time.time()
STATE_LOCK.release()
return True
def umount_backdir(self):
"""
Be sure you have acquired the STATE_LOCK before call this!
Calls the script to mount the backdir. If the script retuns a value
> 0 we expect the backdir is still available, < 0 the backdir is
gone (but not mounted as planned, what is 0)
"""
if self.backdir_is_mounted:
ret = self.run_script(self.umountscript)
if ret == 0:
self.backdir_is_mounted = False
else:
# TODO: Log failure
print "Can't unmount the backdir"
# Methods for filesystem-operations:
def getattr(self, path):
verbose("gettattr path: %s" % path)
# don't call the mountscript if it is the root-dir.
# a "ls" in the parent dir would trigger the mount
if path == "/":
return os.lstat(self.backdir + path)
elif self.trigger_activity():
return os.lstat(self.backdir + path)
else:
return -errno.EIO
def readlink(self, path):
verbose("readlink path: %s" % path)
if self.trigger_activity():
return os.readlink(self.backdir + path)
else:
return -errno.EIO
def readdir(self, path, offset):
verbose("readdir path offst: %s %s" % (path, offset))
if not self.trigger_activity():
yield -errno.EIO
for e in os.listdir(self.backdir + path):
yield fuse.Direntry(e)
def unlink(self, path):
verbose("unlink path: %s" % path)
if self.trigger_activity():
os.unlink(self.backdir + path)
else:
return -errno.EIO
def rmdir(self, path):
verbose("rmdir: %s" % path)
if self.trigger_activity():
os.rmdir(self.backdir + path)
else:
return -errno.EIO
def symlink(self, path, path1):
verbose("symlink: %s %s" % (path, path1))
if self.trigger_activity():
os.symlink(path, self.backdir + path1)
else:
return -errno.EIO
def rename(self, path, path1):
verbose("rename path, path1: %s %s" % (path, path1))
if self.trigger_activity():
os.rename(self.backdir + path, self.backdir + path1)
else:
return -errno.EIO
def link(self, path, path1):
verbose("link path, path1): %s %s" % (path, path1))
if self.trigger_activity():
os.link(self.backdir + path, self.backdir + path1)
else:
return -errno.EIO
def chmod(self, path, mode):
verbose("chmod path, mode: %s %s" % (path, mode))
if self.trigger_activity():
os.chmod(self.backdir + path, mode)
else:
return -errno.EIO
def chown(self, path, user, group):
verbose("chown, path, user, group: %s %s %s" % (path, user, group))
if self.trigger_activity():
os.chown(self.backdir + path, user, group)
else:
return -errno.EIO
def truncate(self, path, len):
verbose("truncate: %s %s" % (path, len))
if self.trigger_activity():
f = open(self.backdir + path, "a")
f.truncate(len)
f.close()
else:
return -errno.EIO
def mknod(self, path, mode, dev):
verbose("mknot path, mode, dev: %s %s %s" % (path, mode, dev))
if self.trigger_activity():
os.mknod(self.backdir + path, mode, dev)
else:
return -errno.EIO
def mkdir(self, path, mode):
verbose("mkdir path, mode: %s %s" % (path, mode))
if self.trigger_activity():
os.mkdir(self.backdir + path, mode)
else:
return -errno.EIO
def utime(self, path, times):
verbose("utime path, times: %s %s" % (path, times))
if self.trigger_activity():
os.utime(self.backdir + path, times)
else:
return -errno.EIO
def access(self, path, mode):
verbose("access path, mode: %s %s" % (path, mode))
if self.trigger_activity():
if not os.access(self.backdir + path, mode):
return -EACCES
else:
return -errno.EIO
class DemandFile(object):
def __init__(self, path, flags, *mode):
self.keep_cache = False
self.direct_io = False
path = BACKDIR + path
verbose("init file with path: %s" % path)
self.file = os.fdopen(os.open(path, flags, *mode),
self.flag2mode(flags))
self.fd = self.file.fileno()
def flag2mode(self, flags):
md = {os.O_RDONLY: 'r', os.O_WRONLY: 'w', os.O_RDWR: 'w+'}
m = md[flags & (os.O_RDONLY | os.O_WRONLY | os.O_RDWR)]
if flags | os.O_APPEND:
m = m.replace('w', 'a', 1)
return m
def read(self, length, offset):
verbose("file read length, offset: %s %s" % (length, offset))
if self.trigger_activity():
self.file.seek(offset)
return self.file.read(length)
else:
return -errno.EIO
def write(self, buf, offset):
verbose("file write buf, offset: %s %s" % (buf, offset))
if self.trigger_activity():
self.file.seek(offset)
self.file.write(buf)
return len(buf)
else:
return -errno.EIO
def release(self, flags):
verbose("file release flags: %s" % flags)
if self.trigger_activity():
self.file.close()
else:
return -errno.EIO
def _fflush(self):
verbose("_fflush!")
if self.trigger_activity():
if 'w' in self.file.mode or 'a' in self.file.mode:
self.file.flush()
else:
return -errno.EIO
def fsync(self, isfsyncfile):
verbose("file fsync isfsyncfile %s:" % isfsyncfile)
if self.trigger_activity():
self._fflush()
if isfsyncfile and hasattr(os, 'fdatasync'):
os.fdatasync(self.fd)
else:
os.fsync(self.fd)
else:
return -errno.EIO
def flush(self):
verbose("file flush")
if self.trigger_activity():
self._fflush()
os.close(os.dup(self.fd))
else:
return -errno.EIO
def fgetattr(self):
verbose("file fgetattr")
if self.trigger_activity():
return os.fstat(self.fd)
else:
return -errno.EIO
def ftruncate(self, len):
verbose("file ftruncate len: %s" % len)
if self.trigger_activity():
self.file.truncate(len)
else:
return -errno.EIO
def lock(self, cmd, owner, **kw):
verbose("file lock cmd, owner: %s %s" % (cmd, owner))
if self.trigger_activity():
op = { fcntl.F_UNLCK : fcntl.LOCK_UN,
fcntl.F_RDLCK : fcntl.LOCK_SH,
fcntl.F_WRLCK : fcntl.LOCK_EX }[kw['l_type']]
if cmd == fcntl.F_GETLK:
return -EOPNOTSUPP
elif cmd == fcntl.F_SETLK:
if op != fcntl.LOCK_UN:
op |= fcntl.LOCK_NB
elif cmd == fcntl.F_SETLKW:
pass
else:
return -errno.EINVAL
fcntl.lockf(self.fd, op, kw['l_start'], kw['l_len'])
else:
return -errno.EIO
def main(self, *a, **kw):
self.file_class = self.DemandFile
self.file_class.trigger_activity = self.trigger_activity
return fuse.Fuse.main(self, *a, **kw)
if __name__ == "__main__":
dfs = DemandFS()
dfs.flags = 0
dfs.multithreaded = 1
dfs.parser.add_option(mountopt="backdir", metavar="PATH",
help="path to the backdir.")
dfs.parser.add_option(mountopt="timeout", metavar="SEC",
help="timeout in sec. before unmount the backdir")
dfs.parser.add_option(mountopt="mountscript", metavar="PATH",
help="path to the script which do the mount")
dfs.parser.add_option(mountopt="umountscript", metavar="PATH",
help="path to the script which do the unmount")
dfs.parser.add_option(mountopt="verbose", metavar="True/False",
default=False, help="Activate verbose mode")
dfs.parse(values=dfs, errex=1)
if isinstance(dfs.verbose, str) and dfs.verbose.lower() == "true":
dfs.verbose = True
VERBOSE = True
dfs.timeout = int(dfs.timeout)
BACKDIR = dfs.backdir
dfs.main()
|
gpl-3.0
| -5,716,152,501,423,935,000
| 32.933661
| 79
| 0.547791
| false
| 3.878124
| false
| false
| false
|
googleapis/googleapis-gen
|
google/cloud/dialogflow/cx/v3beta1/dialogflow-cx-v3beta1-py/google/cloud/dialogflowcx_v3beta1/types/fulfillment.py
|
1
|
7242
|
# -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import proto # type: ignore
from google.cloud.dialogflowcx_v3beta1.types import response_message
from google.protobuf import struct_pb2 # type: ignore
__protobuf__ = proto.module(
package='google.cloud.dialogflow.cx.v3beta1',
manifest={
'Fulfillment',
},
)
class Fulfillment(proto.Message):
r"""A fulfillment can do one or more of the following actions at the
same time:
- Generate rich message responses.
- Set parameter values.
- Call the webhook.
Fulfillments can be called at various stages in the
[Page][google.cloud.dialogflow.cx.v3beta1.Page] or
[Form][google.cloud.dialogflow.cx.v3beta1.Form] lifecycle. For
example, when a
[DetectIntentRequest][google.cloud.dialogflow.cx.v3beta1.DetectIntentRequest]
drives a session to enter a new page, the page's entry fulfillment
can add a static response to the
[QueryResult][google.cloud.dialogflow.cx.v3beta1.QueryResult] in the
returning
[DetectIntentResponse][google.cloud.dialogflow.cx.v3beta1.DetectIntentResponse],
call the webhook (for example, to load user data from a database),
or both.
Attributes:
messages (Sequence[google.cloud.dialogflowcx_v3beta1.types.ResponseMessage]):
The list of rich message responses to present
to the user.
webhook (str):
The webhook to call. Format:
``projects/<Project ID>/locations/<Location ID>/agents/<Agent ID>/webhooks/<Webhook ID>``.
return_partial_responses (bool):
Whether Dialogflow should return currently
queued fulfillment response messages in
streaming APIs. If a webhook is specified, it
happens before Dialogflow invokes webhook.
Warning:
1) This flag only affects streaming API.
Responses are still queued and returned once in
non-streaming API.
2) The flag can be enabled in any fulfillment
but only the first 3 partial responses will be
returned. You may only want to apply it to
fulfillments that have slow webhooks.
tag (str):
The tag used by the webhook to identify which fulfillment is
being called. This field is required if ``webhook`` is
specified.
set_parameter_actions (Sequence[google.cloud.dialogflowcx_v3beta1.types.Fulfillment.SetParameterAction]):
Set parameter values before executing the
webhook.
conditional_cases (Sequence[google.cloud.dialogflowcx_v3beta1.types.Fulfillment.ConditionalCases]):
Conditional cases for this fulfillment.
"""
class SetParameterAction(proto.Message):
r"""Setting a parameter value.
Attributes:
parameter (str):
Display name of the parameter.
value (google.protobuf.struct_pb2.Value):
The new value of the parameter. A null value
clears the parameter.
"""
parameter = proto.Field(
proto.STRING,
number=1,
)
value = proto.Field(
proto.MESSAGE,
number=2,
message=struct_pb2.Value,
)
class ConditionalCases(proto.Message):
r"""A list of cascading if-else conditions. Cases are mutually
exclusive. The first one with a matching condition is selected,
all the rest ignored.
Attributes:
cases (Sequence[google.cloud.dialogflowcx_v3beta1.types.Fulfillment.ConditionalCases.Case]):
A list of cascading if-else conditions.
"""
class Case(proto.Message):
r"""Each case has a Boolean condition. When it is evaluated to be
True, the corresponding messages will be selected and evaluated
recursively.
Attributes:
condition (str):
The condition to activate and select this case. Empty means
the condition is always true. The condition is evaluated
against [form parameters][Form.parameters] or [session
parameters][SessionInfo.parameters].
See the `conditions
reference <https://cloud.google.com/dialogflow/cx/docs/reference/condition>`__.
case_content (Sequence[google.cloud.dialogflowcx_v3beta1.types.Fulfillment.ConditionalCases.Case.CaseContent]):
A list of case content.
"""
class CaseContent(proto.Message):
r"""The list of messages or conditional cases to activate for
this case.
Attributes:
message (google.cloud.dialogflowcx_v3beta1.types.ResponseMessage):
Returned message.
additional_cases (google.cloud.dialogflowcx_v3beta1.types.Fulfillment.ConditionalCases):
Additional cases to be evaluated.
"""
message = proto.Field(
proto.MESSAGE,
number=1,
oneof='cases_or_message',
message=response_message.ResponseMessage,
)
additional_cases = proto.Field(
proto.MESSAGE,
number=2,
oneof='cases_or_message',
message='Fulfillment.ConditionalCases',
)
condition = proto.Field(
proto.STRING,
number=1,
)
case_content = proto.RepeatedField(
proto.MESSAGE,
number=2,
message='Fulfillment.ConditionalCases.Case.CaseContent',
)
cases = proto.RepeatedField(
proto.MESSAGE,
number=1,
message='Fulfillment.ConditionalCases.Case',
)
messages = proto.RepeatedField(
proto.MESSAGE,
number=1,
message=response_message.ResponseMessage,
)
webhook = proto.Field(
proto.STRING,
number=2,
)
return_partial_responses = proto.Field(
proto.BOOL,
number=8,
)
tag = proto.Field(
proto.STRING,
number=3,
)
set_parameter_actions = proto.RepeatedField(
proto.MESSAGE,
number=4,
message=SetParameterAction,
)
conditional_cases = proto.RepeatedField(
proto.MESSAGE,
number=5,
message=ConditionalCases,
)
__all__ = tuple(sorted(__protobuf__.manifest))
|
apache-2.0
| -547,233,568,092,019,840
| 35.39196
| 127
| 0.607705
| false
| 4.57197
| false
| false
| false
|
sampathweb/game_app
|
card_games/play_blackjack.py
|
1
|
1843
|
#!/usr/bin/env python
from __future__ import print_function
from blackjack import BlackJack
def play_blackjack(player):
game = BlackJack()
while True:
print('Your Hand %s is of value %d' % (game.player_hand, game.player_hand_value()))
action = raw_input('Enter: hit (1), stand (2) or split (3) or help (h): ').upper()
if action == '2': # Stand
result = game.game_result()
print('Dealer Hand %s is of value %d' % (game.dealer_hand, game.dealer_hand_value()))
print('Result is: ', result)
print('Round Over.')
return result
elif action == '1': # Hit
game.draw_card_player()
elif action == 'H': # Help
print('Your Hand Score is: ', game.player_hand_value())
print('You can Hit (1): Draw one more card to see if you get closer to 21, but not higher.')
print('You can Stand (2): Compare your current hand value with Dealer hand value to see if you scored higher, but still 21 or below.')
print('You can Split (3): ')
print('You can double down (4): ')
if __name__ == '__main__':
player = {}
player['chips'] = 100
player['round'] = 0
player['won'] = 0
player['lost'] = 0
player['push'] = 0
player['bust'] = 0
play = 'Y'
print('Welcome to BlackJack')
print('-' * 20)
print('You have 100 Chips to play this game. On each round, you will have to pitch atleast one chip. You can wager more.')
while play != 'N':
play = raw_input('Play a round of BlackJack (Y/N)? ').upper()
chips = raw_input('How many chips do you wager? (min 1, max %d): ' % player['chips'])
if play.upper() == 'Y':
player['round'] += 1
result = play_blackjack(player)
player[result] += 1
|
mit
| 2,510,335,775,484,046,300
| 40.886364
| 146
| 0.558871
| false
| 3.571705
| false
| false
| false
|
chakki-works/arXivTimesIndicator
|
main.py
|
1
|
1791
|
import os
from PIL import Image
from arxivtimes_indicator.data.github import filter_issue_by_ym, fetch_issues, get_icon_url, tally_by_labels, tally_by_users
from arxivtimes_indicator.data.twitter import fetch_tweets, rank_paper
from arxivtimes_indicator.data.utils import download, break_line, std_score
from arxivtimes_indicator.visualization.visualize import save_bar_graph, save_graph_with_icon, save_text_graph
TEMPORARY = 'data'
REPORT = 'reports'
def fetch_images(user_names, issues):
images_urls = [get_icon_url(user_name, issues) for user_name in user_names]
image_paths = [os.path.join(TEMPORARY, '{}.png'.format(name)) for name in user_names]
[download(url, path) for url, path in zip(images_urls, image_paths)]
images = [Image.open(p) for p in image_paths]
return images
def main():
# Fetch Issues
issues = fetch_issues()
# Process Issues
filtered_issues = filter_issue_by_ym(issues)
label_names, label_counts = tally_by_labels(filtered_issues)
user_names, user_counts = tally_by_users(filtered_issues)
images = fetch_images(user_names, issues)
# Save label and user graph
label_fig_path = os.path.join(REPORT, 'labels.png')
users_fig_path = os.path.join(REPORT, 'users.png')
label_names = break_line(label_names)
save_bar_graph(label_names, label_counts, label_fig_path)
save_graph_with_icon(list(range(len(user_names))), user_counts, images, users_fig_path)
# Fetch tweets
tweets = fetch_tweets()
# Process tweets
n = 10 # number of top papers
scores, titles = rank_paper(tweets)
scores, titles = scores[:n], titles[:n]
# Save paper rank graph
path = os.path.join(REPORT, 'rank.png')
save_text_graph(titles, scores, path)
if __name__ == '__main__':
main()
|
apache-2.0
| -7,062,447,111,258,028,000
| 35.571429
| 124
| 0.698492
| false
| 3.192513
| false
| false
| false
|
wkew/FTMSVisualization
|
3-HeteroClassPlotter.py
|
1
|
10441
|
# -*- coding: utf-8 -*-
"""
Created on Fri Apr 22 11:42:36 2016
@author: Will Kew
will.kew@gmail.com
Copyright Will Kew, 2016
This file is part of FTMS Visualisation (also known as i-van Krevelen).
FTMS Visualisation is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
FTMS Visualisation is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with FTMS Visualisation. If not, see <http://www.gnu.org/licenses/>.
This script will read in an assigned peaklist (example input file included) and calculate the heteroatomic class distribution.
The output is a vbar plot of heteroamtic class versus count. You can also have the calculated numbers output in a format for replotting.
This tool uses Seaborn - http://seaborn.pydata.org/
A number of (partially tested) other functions to plot output are included, though commented out.
This tool was used in our recent paper on Scotch Whisky - https://link.springer.com/article/10.1007/s13361-016-1513-y
The prompt for the user about whisky samples is thus borne from this - it also serves as an example of how to customise which classes to include.
"""
from __future__ import print_function # Python 2 compatibility
from __future__ import absolute_import # Python 2 compatibility
import os, sys
import pandas as pd
from collections import Counter
import matplotlib.pyplot as plt
import seaborn as sns
"""
# We import also the FTMSVizProcessingModule which contains a few useful functions.
# here we define where the scripts are stored.
# Make sure to change this to where you have saved these scripts.
"""
try: #test if running in ipython
__IPYTHON__
except NameError: #if not running in ipython....
import FTMSVizProcessingModule as FTPM
path = os.getcwd()+"data\\" #example data location
else: #if running in ipython
scriptlocation = "/LOCAL/FTMSVis/FTMSVisualization-master/"
sys.path.append(scriptlocation)
import FTMSVizProcessingModule as FTPM
path = "/LOCAL/FTMSVis/data/"
whisky = input("Are these Whisky samples - Y or N?" )
if whisky.upper() == "Y":
whisky = True
else:
whisky = False
inputpath = path +"OutputCSV/"
outputpath = path + "Images/Classes/"
FTPM.make_sure_path_exists(outputpath) #this function checks the output directory exists; if it doesnt, it creates it.
print("Looking for CSVs in " + inputpath)
filesA = os.listdir(inputpath)
filesB = []
for y in filesA:
if y[-8:] =="hits.csv" and y[-10:] != "nohits.csv" and y[-11:] !="isohits.csv":
filesB.append(y)
nfiles = len(filesB)
samplenames=[]
for x in filesB:
samplenames.append(x[:-9])
heteroclasses=[]
for z in filesB:
df1 = pd.read_csv(inputpath+z,index_col=0)
hetclas = df1["HeteroClass"]
hetclaslist = hetclas.tolist()
heteroclasses.append(hetclaslist)
heteroclasses = [item for sublist in heteroclasses for item in sublist]
hetclasset = list(set(heteroclasses))
indexlist = []
for i in samplenames:
for n in range(len(hetclasset)):
indexlist.append(i)
###This section is relevant to my whisky samples
if whisky == True:
columnnames = ["Sample","Class","WoodType","Region","Age","Peated","HeteroClass","HeteroClassCount"]
df4 = pd.read_csv(path+"SampleInfo-Dict.csv",index_col=0)
df4 = df4.T
dict4 = df4.to_dict()
outputdata = pd.DataFrame(index = range(len(indexlist)), columns=columnnames)
a = 0
for y in filesB:
df2 = pd.read_csv(inputpath+y,index_col=0)
counter = Counter(df2["HeteroClass"])
for x in counter:
outputdata.iloc[a][0] = y[:-9]
outputdata.iloc[a][1] = dict4[y[:-9]]["Class"]
outputdata.iloc[a][2] = dict4[y[:-9]]["Total Wood"]
outputdata.iloc[a][3] = dict4[y[:-9]]["Region"]
outputdata.iloc[a][4] = dict4[y[:-9]]["Age"]
outputdata.iloc[a][5] = dict4[y[:-9]]["Peated"]
outputdata.iloc[a][6] = x
outputdata.iloc[a][7] = counter[x]
a = a+1
outputdata = outputdata.dropna(how="all",axis=0)
else:
columnnames = ["Sample","Class","HeteroClass","HeteroClassCount"]
outputdata = pd.DataFrame(index = range(len(indexlist)), columns=columnnames)
a = 0
for y in filesB:
df2 = pd.read_csv(inputpath+y,index_col=0)
counter = Counter(df2["HeteroClass"])
for x in counter:
outputdata.iloc[a][0] = y[:-9]
outputdata.iloc[a][1] = y[:-9] #this is the Class variable, and should be defined as approrpriate for what you're plotting. In the case of single samples, it can be the sample name.
outputdata.iloc[a][2] = x
outputdata.iloc[a][3] = counter[x]
a = a+1
outputdata = outputdata.dropna(how="all",axis=0)
pd.to_numeric(outputdata["HeteroClassCount"],errors="raise")
saveoutputdata = input("Do you want to save the output data in a text file for later re-processing - Y or N? ")
if saveoutputdata.upper() == "Y":
outputdata.to_excel(inputpath+"HetClassByClass-longform.xlsx") #this saves the info out in a longform for plotting.
#outputdata = pd.read_excel(inputpath+"HetClassByClass-longform.xlsx") #this reads that data back in. Only necessary for manually re-running bits of script.
# This section creates a unique, naturally sorted list of heteroatom classes for plotting. Only really works for CHO formula.
# If you have exotic heteroatoms, will need to refigure this yourself, or just hardcode the order you want. easy to do in Excel.
order = outputdata["HeteroClass"].tolist()
order= list(set(order))
order.sort(key=FTPM.natural_sort_key) # this natural sort function ensures a logical order to your barplot.
if whisky == True:
CHOorder = ["O2","O3","O4","O5","O6","O7","O8","O9","O10","O11","O12","O13","O14","O15","O16","O17","O18","O19"]
Fullorder = ["O2","O3","O4","O5","O6","O7","O8","O9","O10","O11","O12","O13","O14","O15","O16","O17","O18",
"O19","O1S1","O2S1","O3S1","O4S1","O5S1","O6S1","O7S1","O8S1","O9S1","O10S1","O11S1","O12S1"]
CHOSorder =["O1S1","O2S1","O3S1","O4S1","O5S1","O6S1","O7S1","O8S1","O9S1","O10S1","O11S1","O12S1"]
CHOSorderNew = ["O2","O3","O4","O5","O6","O7","O8","O9","O10","O11","O12","O13","O14","O15","O16","O17","O18","O19","OnS"]
labels = ["O2","O3","O4","O5","O6","O7","O8","O9","O10","O11","O12","O13","O14","O15","O16","O17","O18","O19",r'O$\mathregular {_n}$S']
else:
df = outputdata
#colours = ["#a6cee3","#1f78b4","#b2df8a"] #colorblind and print friendly colours picked from http://colorbrewer2.org/
colours = ["#1b9e77","#d95f02","#7570b3"] #as above, but brighter
def barplot():
sns.set_style("white")
sns.set_context("paper",font_scale=2)
ax = sns.barplot(x="HeteroClass",y="HeteroClassCount",hue="Class",
data=outputdata,order=order,palette=sns.color_palette(colours))
ax.set(xlabel='Heteroatomic Class', ylabel='Count')
handles, labels = ax.get_legend_handles_labels()
if len(labels) == 1:
ax.legend_.remove()
sns.despine()
fig = ax.get_figure()
plt.xticks(rotation=90)
fig.set_size_inches(8, 6, forward=True)
fig.savefig(outputpath+"Barplot.png",dpi=600,bbox_inches="tight")
fig.savefig(outputpath+"Barplot.eps",dpi=600,bbox_inches="tight")
barplot() #plots a barplot.
"""
# Here are some further examples of the Seaborn Plotting library applied to this problem.
# Most of these rely on having many samples across a small number of classes you wish to compare
def violinplot():
sns.set_style("white")
sns.set_context("paper",font_scale=2)
ax = sns.violinplot(x="HeteroClass",y="HeteroClassCount",hue="Class",data=outputdata,
order=order,
palette=sns.color_palette("bright"),
split=False,bw="silverman",scale_hue=True,scale="width",
cut=2,linewidth=1.5,inner="quartiles",saturation=1)
ax.set(xlabel='Heteroatomic Class', ylabel='Count')
sns.despine()
fig = ax.get_figure()
locs, labels = plt.xticks()
plt.xticks(locs, labels, rotation=90)
cur_ylim = ax.get_ylim()
ax.set_ylim(0,cur_ylim[1])
fig.set_size_inches((POPM.mm2inch(171,80)), forward=True)
fig.savefig(outputpath+"violinplot-scalewidth.png",dpi=600,bbox_inches="tight")
fig.savefig(outputpath+"violinplot-scalewidth.eps",dpi=600,bbox_inches="tight")
def boxplot():
sns.set_style("white")
sns.set_context("paper",font_scale=2)
ax = sns.boxplot(x="HeteroClass",y="HeteroClassCount",hue="Class",data=outputdata,order=order,palette=sns.color_palette("bright"))
ax.set(xlabel='Heteroatomic Class', ylabel='Count')
sns.despine()
fig = ax.get_figure()
plt.xticks(rotation=90)
fig.set_size_inches(8, 6, forward=True)
fig.savefig(outputpath+"Boxplot-comparison-CHO-only.png",dpi=300,bbox_inches="tight")
def swarmplot():
sns.set_style("white")
sns.set_context("paper",font_scale=2)
ax = sns.swarmplot(x="HeteroClass",y="HeteroClassCount",hue="Class",data=outputdata,order=order,palette=sns.color_palette("bright"))
ax.set(xlabel='Heteroatomic Class', ylabel='Average Count')
sns.despine()
fig = ax.get_figure()
plt.xticks(rotation=90)
fig.set_size_inches(8, 6, forward=True)
fig.savefig(outputpath+"swarmplot-comparison-CHO-only.png",dpi=300,bbox_inches="tight")
def stripplot():
sns.set_style("white")
sns.set_context("paper",font_scale=2)
ax = sns.stripplot(x="HeteroClass",y="HeteroClassCount",hue="Class",data=outputdata,order=order,palette=sns.color_palette("bright"),jitter=False,split=True)
ax.set(xlabel='Heteroatomic Class', ylabel='Average Count')
sns.despine()
fig = ax.get_figure()
plt.xticks(rotation=90)
fig.set_size_inches(8, 6, forward=True)
fig.savefig(outputpath+"striplot-comparison-CHO-only.png",dpi=300,bbox_inches="tight")
"""
#EOF
|
gpl-3.0
| -3,138,422,466,994,427,400
| 42.690377
| 193
| 0.666507
| false
| 3.08814
| false
| false
| false
|
Parkayun/flask
|
flask/debughelpers.py
|
1
|
6024
|
# -*- coding: utf-8 -*-
"""
flask.debughelpers
~~~~~~~~~~~~~~~~~~
Various helpers to make the development experience better.
:copyright: (c) 2016 by Armin Ronacher.
:license: BSD, see LICENSE for more details.
"""
from ._compat import implements_to_string, text_type
from .app import Flask
from .blueprints import Blueprint
from .globals import _request_ctx_stack
class UnexpectedUnicodeError(AssertionError, UnicodeError):
"""Raised in places where we want some better error reporting for
unexpected unicode or binary data.
"""
@implements_to_string
class DebugFilesKeyError(KeyError, AssertionError):
"""Raised from request.files during debugging. The idea is that it can
provide a better error message than just a generic KeyError/BadRequest.
"""
def __init__(self, request, key):
form_matches = request.form.getlist(key)
buf = ['You tried to access the file "%s" in the request.files '
'dictionary but it does not exist. The mimetype for the request '
'is "%s" instead of "multipart/form-data" which means that no '
'file contents were transmitted. To fix this error you should '
'provide enctype="multipart/form-data" in your form.' %
(key, request.mimetype)]
if form_matches:
buf.append('\n\nThe browser instead transmitted some file names. '
'This was submitted: %s' % ', '.join('"%s"' % x
for x in form_matches))
self.msg = ''.join(buf)
def __str__(self):
return self.msg
class FormDataRoutingRedirect(AssertionError):
"""This exception is raised by Flask in debug mode if it detects a
redirect caused by the routing system when the request method is not
GET, HEAD or OPTIONS. Reasoning: form data will be dropped.
"""
def __init__(self, request):
exc = request.routing_exception
buf = ['A request was sent to this URL (%s) but a redirect was '
'issued automatically by the routing system to "%s".'
% (request.url, exc.new_url)]
# In case just a slash was appended we can be extra helpful
if request.base_url + '/' == exc.new_url.split('?')[0]:
buf.append(' The URL was defined with a trailing slash so '
'Flask will automatically redirect to the URL '
'with the trailing slash if it was accessed '
'without one.')
buf.append(' Make sure to directly send your %s-request to this URL '
'since we can\'t make browsers or HTTP clients redirect '
'with form data reliably or without user interaction.' %
request.method)
buf.append('\n\nNote: this exception is only raised in debug mode')
AssertionError.__init__(self, ''.join(buf).encode('utf-8'))
def attach_enctype_error_multidict(request):
"""Since Flask 0.8 we're monkeypatching the files object in case a
request is detected that does not use multipart form data but the files
object is accessed.
"""
oldcls = request.files.__class__
class newcls(oldcls):
def __getitem__(self, key):
try:
return oldcls.__getitem__(self, key)
except KeyError:
if key not in request.form:
raise
raise DebugFilesKeyError(request, key)
newcls.__name__ = oldcls.__name__
newcls.__module__ = oldcls.__module__
request.files.__class__ = newcls
def _dump_loader_info(loader):
yield 'class: %s.%s' % (type(loader).__module__, type(loader).__name__)
for key, value in sorted(loader.__dict__.items()):
if key.startswith('_'):
continue
if isinstance(value, (tuple, list)):
if not all(isinstance(x, (str, text_type)) for x in value):
continue
yield '%s:' % key
for item in value:
yield ' - %s' % item
continue
elif not isinstance(value, (str, text_type, int, float, bool)):
continue
yield '%s: %r' % (key, value)
def explain_template_loading_attempts(app, template, attempts):
"""This should help developers understand what failed"""
info = ['Locating template "%s":' % template]
total_found = 0
blueprint = None
reqctx = _request_ctx_stack.top
if reqctx is not None and reqctx.request.blueprint is not None:
blueprint = reqctx.request.blueprint
for idx, (loader, srcobj, triple) in enumerate(attempts):
if isinstance(srcobj, Flask):
src_info = 'application "%s"' % srcobj.import_name
elif isinstance(srcobj, Blueprint):
src_info = 'blueprint "%s" (%s)' % (srcobj.name,
srcobj.import_name)
else:
src_info = repr(srcobj)
info.append('% 5d: trying loader of %s' % (
idx + 1, src_info))
for line in _dump_loader_info(loader):
info.append(' %s' % line)
if triple is None:
detail = 'no match'
else:
detail = 'found (%r)' % (triple[1] or '<string>')
total_found += 1
info.append(' -> %s' % detail)
seems_fishy = False
if total_found == 0:
info.append('Error: the template could not be found.')
seems_fishy = True
elif total_found > 1:
info.append('Warning: multiple loaders returned a match for the template.')
seems_fishy = True
if blueprint is not None and seems_fishy:
info.append(' The template was looked up from an endpoint that '
'belongs to the blueprint "%s".' % blueprint)
info.append(' Maybe you did not place a template in the right folder?')
info.append(' See http://flask.pocoo.org/docs/blueprints/#templates')
app.logger.info('\n'.join(info))
|
bsd-3-clause
| -7,867,772,259,524,209,000
| 37.864516
| 83
| 0.586819
| false
| 4.239268
| false
| false
| false
|
stpx/canto-curses
|
canto_curses/main.py
|
1
|
9578
|
# -*- coding: utf-8 -*-
#Canto-curses - ncurses RSS reader
# Copyright (C) 2014 Jack Miller <jack@codezen.org>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 2 as
# published by the Free Software Foundation.
CANTO_PROTOCOL_COMPATIBLE = 0.9
from canto_next.client import CantoClient
from canto_next.plugins import try_plugins, set_program
from canto_next.rwlock import alllocks
from canto_next.hooks import call_hook
from .config import config, finalize_eval_settings
from .tagcore import tag_updater, alltagcores
from .gui import CantoCursesGui, GraphicalLog
from threading import Thread
from queue import Queue
import logging
logging.basicConfig(
format = "%(asctime)s : %(name)s -> %(message)s",
datefmt = "%H:%M:%S",
level = logging.INFO
)
log = logging.getLogger("CANTO-CURSES")
import traceback
import locale
import getopt
import signal
import errno
import fcntl
import time
import sys
import os
# It's the CantoCurses class' responsibility to provide the subsequent Gui
# object with a solid foundation with other components. This includes parsing
# command line arguments, starting a canto-daemon instance if necessary, signal
# handling, and wrapping the socket communication.
class CantoCurses(CantoClient):
def init(self):
# For good curses behavior.
locale.setlocale(locale.LC_ALL, '')
# Used for GUI-signalled death.
self.pid = os.getpid()
self.done = False
# Whether or not to append pid to logfile
# (debug option)
self.log_fname_pid = False
version = "canto-curses " + VERSION + " " + GIT_HASH
optl = self.common_args('hl', ["help"], version)
if optl == -1:
sys.exit(-1)
if self.args(optl):
sys.exit(-1)
rootlog = logging.getLogger()
rootlog.setLevel(max(rootlog.level - 10 * self.verbosity,0))
self.glog_handler = GraphicalLog()
try:
if self.port < 0:
# If we're running locally, ensure daemon is running
self.start_daemon()
CantoClient.__init__(self, self.socket_path)
else:
CantoClient.__init__(self, None,\
port = self.port, address = self.addr)
except Exception as e:
log.error("Error: %s" % e)
sys.exit(-1)
# __init__ above started one connection, start another
# for priority stuff.
self.connect()
# Make sure we have permissions on the relevant, non-daemon files in
# the target directory (None of these will be used until we set_log)
if self.ensure_paths():
sys.exit(-1)
self.set_log()
log.info(version)
# Evaluate anything in the target /plugins directory.
set_program("canto-curses")
self.plugin_errors = try_plugins(self.conf_dir, self.plugin_default, self.disabled_plugins,
self.enabled_plugins)
def print_help(self):
print("USAGE: canto-curses [options]")
print("\t-h/--help\tThis help")
print("\t-V/--version\tPrint version")
print("\t-v/\t\tVerbose logging (for debug)")
print("\t-D/--dir <dir>\tSet configuration directory.")
print("\t-l\t\tAppend pid to log file name")
print("\nPlugin control\n")
print("\t--noplugins\t\t\t\tDisable plugins")
print("\t--enableplugins 'plugin1 plugin2...'\tEnable single plugins (overrides --noplugins)")
print("\t--disableplugins 'plugin1 plugin2...'\tDisable single plugins")
print("\nNetwork control\n")
print("NOTE: These should be used in conjunction with SSH port forwarding to be secure\n")
print("\t-a/--address <IP>\tConnect to this address")
print("\t-p/--port <port>\tConnect to this port")
def args(self, optlist):
for opt, arg in optlist:
if opt in ["-h", "--help"]:
self.print_help()
return 1
elif opt in ["-l"]:
self.log_fname_pid = True
return 0
def winch(self, a = None, b = None):
if self.gui.alive:
self.gui.winch()
def sigusr1(self, a = None, b = None):
import threading
held_locks = {}
code = {}
curthreads = threading.enumerate()
for threadId, stack in sys._current_frames().items():
name = str(threadId)
for ct in curthreads:
if ct.ident == threadId:
name = ct.name
code[name] = ["NAME: %s" % name]
for filename, lineno, fname, line in traceback.extract_stack(stack):
code[name].append('FILE: "%s", line %d, in %s' % (filename, lineno, fname))
if line:
code[name].append(" %s" % (line.strip()))
held_locks[name] = ""
for lock in alllocks:
if lock.writer_id == threadId:
held_locks[name] += ("%s(w)" % lock.name)
continue
for reader_id, reader_stack in lock.reader_stacks:
if reader_id == threadId:
held_locks[name] += ("%s(r)" % lock.name)
for k in code:
log.info('\n\nLOCKS: %s \n%s' % (held_locks[k], '\n'.join(code[k])))
log.info("\n\nSTACKS:")
for lock in alllocks:
for (reader_id, reader_stack) in lock.reader_stacks:
log.info("Lock %s (%s readers)" % (lock.name, lock.readers))
log.info("Lock reader (thread %s):" % (reader_id,))
log.info(''.join(reader_stack))
for writer_stack in lock.writer_stacks:
log.info("Lock %s (%s readers)" % (lock.name, lock.readers))
log.info("Lock writer (thread %s):" % (lock.writer_id,))
log.info(''.join(writer_stack))
log.info("VARS: %s" % config.vars)
log.info("OPTS: %s" % config.config)
def child(self, a = None, b = None):
try:
while True:
pid, status = os.waitpid(-1, os.WNOHANG)
if pid == 0:
break
log.debug("CHLD %d has died: %d", pid, status)
except Exception as e:
if e.errno == errno.ECHILD:
log.debug("CHLD no children?")
else:
raise
def run(self):
# We want this as early as possible
signal.signal(signal.SIGUSR1, self.sigusr1)
# Get config from daemon
if not config.init(self, CANTO_PROTOCOL_COMPATIBLE):
print("Invalid daemon version")
print("Wanted: %s" % CANTO_PROTOCOL_COMPATIBLE)
print("Got: %s" % config.version)
sys.exit(-1)
else:
log.info("Version check passed: %s" % CANTO_PROTOCOL_COMPATIBLE)
# Create Tags for each TagCore
self.gui = CantoCursesGui(self, self.glog_handler)
tag_updater.init(self)
# Initial signal setup.
signal.signal(signal.SIGWINCH, self.winch)
signal.signal(signal.SIGCHLD, self.child)
finalize_eval_settings()
call_hook("curses_start", [])
if self.plugin_errors:
log.error("The following error occurred loading plugins:\n\n%s" % self.plugin_errors)
while self.gui.alive:
self.gui.tick()
time.sleep(1)
def ensure_paths(self):
if os.path.exists(self.conf_dir):
if not os.path.isdir(self.conf_dir):
log.error("Error: %s is not a directory." % self.conf_dir)
return -1
if not os.access(self.conf_dir, os.R_OK):
log.error("Error: %s is not readable." % self.conf_dir)
return -1
if not os.access(self.conf_dir, os.W_OK):
log.error("Error: %s is not writable." % self.conf_dir)
return -1
else:
try:
os.makedirs(self.conf_dir)
except Exception as e:
log.error("Exception making %s : %s" % (self.conf_dir, e))
return -1
return self.ensure_files()
def ensure_files(self):
logname = "curses-log"
if self.log_fname_pid:
logname += ".%d" % os.getpid()
for f in [ logname ] :
p = self.conf_dir + "/" + f
if os.path.exists(p):
if not os.path.isfile(p):
log.error("Error: %s is not a file." % p)
return -1
if not os.access(p, os.R_OK):
log.error("Error: %s is not readable." % p)
return -1
if not os.access(p, os.W_OK):
log.error("Error: %s is not writable." % p)
return -1
self.log_path = self.conf_dir + "/" + logname
def set_log(self):
f = open(self.log_path, "w")
os.dup2(f.fileno(), sys.stderr.fileno())
def start(self):
try:
self.init()
self.run()
except KeyboardInterrupt:
pass
except Exception as e:
tb = traceback.format_exc()
log.error("Exiting on exception:")
log.error("\n" + "".join(tb))
call_hook("curses_exit", [])
log.info("Exiting.")
sys.exit(0)
def __init__(self):
self.start()
|
gpl-2.0
| -2,279,344,980,120,224,800
| 32.256944
| 102
| 0.546774
| false
| 3.823553
| true
| false
| false
|
voytekresearch/neurodsp
|
neurodsp/tests/aperiodic/test_irasa.py
|
1
|
1456
|
"""Tests for IRASA functions."""
import numpy as np
from neurodsp.tests.settings import FS, N_SECONDS_LONG, EXP1
from neurodsp.sim import sim_combined
from neurodsp.spectral import compute_spectrum, trim_spectrum
from neurodsp.aperiodic.irasa import *
###################################################################################################
###################################################################################################
def test_compute_irasa(tsig_comb):
# Estimate periodic and aperiodic components with IRASA
f_range = [1, 30]
freqs, psd_ap, psd_pe = compute_irasa(tsig_comb, FS, f_range, noverlap=int(2*FS))
assert len(freqs) == len(psd_ap) == len(psd_pe)
# Compute r-squared for the full model, comparing to a standard power spectrum
_, powers = trim_spectrum(*compute_spectrum(tsig_comb, FS, nperseg=int(4*FS)), f_range)
r_sq = np.corrcoef(np.array([powers, psd_ap+psd_pe]))[0][1]
assert r_sq > .95
def test_fit_irasa(tsig_comb):
# Estimate periodic and aperiodic components with IRASA & fit aperiodic
freqs, psd_ap, _ = compute_irasa(tsig_comb, FS, noverlap=int(2*FS))
b0, b1 = fit_irasa(freqs, psd_ap)
assert round(b1) == EXP1
assert np.abs(b0 - np.log10((psd_ap)[0])) < 1
def test_fit_func():
freqs = np.arange(30)
intercept = -2
slope = -2
fit = fit_func(freqs, intercept, slope)
assert (fit == slope * freqs + intercept).all()
|
mit
| 5,145,470,417,897,858,000
| 32.090909
| 99
| 0.581044
| false
| 3.2
| false
| false
| false
|
tkrajina/cartesius
|
cartesius/colors.py
|
1
|
1051
|
# -*- coding: utf-8 -*-
""" Utility functions folr colors """
def get_color(color):
""" Can convert from integer to (r, g, b) """
if not color:
return None
if isinstance(color, int):
temp = color
blue = temp % 256
temp = int(temp / 256)
green = temp % 256
temp = int(temp / 256)
red = temp % 256
return (red, green, blue)
if not len(color) == 3:
raise Exception('Invalid color {0}'.format(color))
return color
def brighten(color, n):
return (int((color[0] + n) % 256), int((color[1] + n) % 256), int((color[2] + n) % 256))
def darken(color, n):
return brighten(color, -n)
def get_color_between(color1, color2, i):
""" i is a number between 0 and 1, if 0 then color1, if 1 color2, ... """
if i <= 0:
return color1
if i >= 1:
return color2
return (int(color1[0] + (color2[0] - color1[0]) * i),
int(color1[1] + (color2[1] - color1[1]) * i),
int(color1[2] + (color2[2] - color1[2]) * i))
|
apache-2.0
| 6,686,300,613,034,056,000
| 24.634146
| 92
| 0.527117
| false
| 3.091176
| false
| false
| false
|
ehooo/django_mqtt
|
test_web/settings.py
|
1
|
4373
|
"""
Django settings for web project.
For more information on this file, see
https://docs.djangoproject.com/en/1.9/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.9/ref/settings/
"""
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
import sys
BASE_DIR = os.path.dirname(os.path.dirname(__file__))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.9/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '#b68qv#(v-g26k3qt_-1ufg-prvsw2p)7@ctea*n!36-w23bv1'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
DB_DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'django_extensions',
'django_mqtt',
'django_mqtt.mosquitto.auth_plugin',
'django_mqtt.publisher',
]
FIXTURE_DIRS = [
os.path.join(BASE_DIR, 'test_web', 'fixtures')
]
MIDDLEWARE = (
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
ROOT_URLCONF = 'test_web.urls'
MQTT_CERTS_ROOT = os.path.join(BASE_DIR, 'private')
MQTT_ACL_ALLOW = False
MQTT_ACL_ALLOW_ANONIMOUS = MQTT_ACL_ALLOW
MQTT_ALLOW_EMPTY_CLIENT_ID = False
MQTT_SESSION_TIMEOUT = 5
WSGI_APPLICATION = 'test_web.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.9/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': ':memory:' if DB_DEBUG else os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/1.9/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/1.9/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.7/howto/static-files/
STATIC_URL = '/static/'
STATIC_ROOT = os.path.join(BASE_DIR, 'static')
LOGGING_LEVEL = 'DEBUG' if DEBUG else 'INFO'
if 'test' in sys.argv:
LOGGING_LEVEL = 'CRITICAL'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [os.path.join(BASE_DIR, 'templates')],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'formatters': {
'verbose': {
'format': '%(levelname)s %(asctime)s %(module)s %(process)d %(thread)d %(message)s'
},
'simple': {
'format': '%(levelname)s %(message)s'
},
},
'filters': {
'require_debug_true': {
'()': 'django.utils.log.RequireDebugTrue',
},
},
'handlers': {
'console': {
'level': LOGGING_LEVEL,
'class': 'logging.StreamHandler',
'formatter': 'simple'
}
},
'loggers': {
'django': {
'handlers': ['console'],
'propagate': True,
'filters': ['require_debug_true']
}
}
}
|
gpl-2.0
| 6,900,691,633,989,705,000
| 25.664634
| 95
| 0.641207
| false
| 3.435192
| false
| false
| false
|
matllubos/django-reversion-log
|
setup.py
|
1
|
1150
|
from setuptools import setup, find_packages
from reversion_log.version import get_version
setup(
name='django-reversion-log',
version=get_version(),
description="Log build on revisiions.",
keywords='django, reversion',
author='Lubos Matl',
author_email='matllubos@gmail.com',
url='https://github.com/matllubos/django-reversion-log',
license='LGPL',
package_dir={'is_core': 'is_core'},
include_package_data=True,
packages=find_packages(),
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'Intended Audience :: End Users/Desktop',
'License :: OSI Approved :: GNU LESSER GENERAL PUBLIC LICENSE (LGPL)',
'Natural Language :: Czech',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Internet :: WWW/HTTP :: Site Management',
],
install_requires=[
'django>=1.6',
'django-reversion==1.8.7',
],
zip_safe=False
)
|
lgpl-3.0
| -1,228,347,683,448,355,300
| 31.857143
| 78
| 0.618261
| false
| 3.911565
| false
| true
| false
|
MissionCriticalCloud/cosmic
|
cosmic-core/systemvm/patches/centos7/opt/cosmic/startup/setup_cpvm.py
|
1
|
2608
|
import logging
import os
from utils import Utils
def setup_iptable_rules(cmdline):
external_rules = ""
for cidr in cmdline.get('allowedcidrs', '').split(','):
if cidr != '':
external_rules += "-A INPUT -i " + cmdline['publicnic'] + " -s " + cidr.strip() + " -p tcp -m multiport --dports 80,443 -m tcp -j ACCEPT\n"
iptables_rules = """
*nat
:PREROUTING ACCEPT [0:0]
:POSTROUTING ACCEPT [0:0]
:OUTPUT ACCEPT [0:0]
COMMIT
*filter
:INPUT DROP [0:0]
:FORWARD DROP [0:0]
:OUTPUT ACCEPT [0:0]
-A INPUT -i lo -j ACCEPT
-A INPUT -i %s -m state --state RELATED,ESTABLISHED -j ACCEPT
-A INPUT -i %s -m state --state RELATED,ESTABLISHED -j ACCEPT
-A INPUT -i %s -m state --state RELATED,ESTABLISHED -j ACCEPT
-A INPUT -p icmp --icmp-type 13 -j DROP
-A INPUT -p icmp -j ACCEPT
-A INPUT -i %s -p tcp -m state --state NEW -m tcp -s 169.254.0.1/32 --dport 3922 -j ACCEPT
-A INPUT -i %s -p tcp -m state --state NEW -m tcp --dport 8001 -j ACCEPT
-A INPUT -i %s -p tcp -m state --state NEW -m tcp --dport 8001 -j ACCEPT
%s
COMMIT
""" % (
cmdline['controlnic'],
cmdline['mgtnic'],
cmdline['publicnic'],
cmdline['controlnic'],
cmdline['controlnic'],
cmdline['mgtnic'],
external_rules
)
with open("/tmp/iptables-consoleproxy", "w") as f:
f.write(iptables_rules)
os.system("iptables-restore < /tmp/iptables-consoleproxy")
class ConsoleProxyVM:
def __init__(self, cmdline) -> None:
super().__init__()
self.cmdline = cmdline
self.config_dir = "/etc/cosmic/agent/"
def start(self):
logging.info("Setting up configuration for %s" % self.cmdline["type"])
self.setup_agent_config()
setup_iptable_rules(self.cmdline)
if self.cmdline['setrfc1918routes'] == 'true':
logging.info("Setting rfc1918 routes")
Utils(self.cmdline).set_rfc1918_routes()
logging.info("Setting local routes")
Utils(self.cmdline).set_local_routes()
os.system("systemctl start cosmic-agent")
def setup_agent_config(self):
if not os.path.isdir(self.config_dir):
os.makedirs(self.config_dir, 0o644, True)
consoleproxy_properties = """
consoleproxy.tcpListenPort=0
consoleproxy.httpListenPort=80
consoleproxy.httpCmdListenPort=8001
consoleproxy.jarDir=./applet/
consoleproxy.viewerLinger=180
consoleproxy.reconnectMaxRetry=5
"""
with open(self.config_dir + "consoleproxy.properties", "w") as f:
f.write(consoleproxy_properties)
Utils(self.cmdline).setup_agent_properties()
|
apache-2.0
| -2,954,845,786,210,160,600
| 28.977011
| 151
| 0.635353
| false
| 3.272271
| true
| false
| false
|
Alberto-Beralix/Beralix
|
i386-squashfs-root/usr/share/pyshared/orca/structural_navigation.py
|
1
|
153933
|
# Orca
#
# Copyright 2005-2009 Sun Microsystems Inc.
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the
# Free Software Foundation, Inc., Franklin Street, Fifth Floor,
# Boston MA 02110-1301 USA.
"""Implements structural navigation. Right now this is only
being implemented by Gecko; however it can be used in any
script providing access to document content."""
__id__ = "$Id$"
__version__ = "$Revision$"
__date__ = "$Date$"
__copyright__ = "Copyright (c) 2005-2009 Sun Microsystems Inc."
__license__ = "LGPL"
import pyatspi
import debug
import input_event
import keybindings
import orca
import orca_state
import settings
import speech
from orca_i18n import _
from orca_i18n import ngettext
from orca_i18n import C_
#############################################################################
# #
# MatchCriteria #
# #
#############################################################################
class MatchCriteria:
"""Contains the criteria which will be used to generate a collection
matchRule. We don't want to create the rule until we need it and
are ready to use it. In addition, the creation of an AT-SPI match
rule requires you specify quite a few things (see the __init__),
most of which are irrelevant to the search at hand. This class
makes it possible for the StructuralNavigationObject creator to just
specify the few criteria that actually matter.
"""
def __init__(self,
collection,
states = [],
matchStates = None,
objAttrs = [],
matchObjAttrs = None,
roles = [],
matchRoles = None,
interfaces = "",
matchInterfaces = None,
invert = False,
applyPredicate = False):
"""Creates a new match criteria object.
Arguments:
- collection: the collection interface for the document in
which the accessible objects can be found.
- states: a list of pyatspi states of interest
- matchStates: whether an object must have all of the states
in the states list, any of the states in the list, or none
of the states in the list. Must be one of the collection
interface MatchTypes if provided.
- objAttrs: a list of object attributes (not text attributes)
- matchObjAttrs: whether an object must have all of the
attributes in the objAttrs list, any of the attributes in
the list, or none of the attributes in the list. Must be
one of the collection interface MatchTypes if provided.
- interfaces: (We aren't using this. According to the at-spi
idl, it is a string.)
- matchInterfaces: The collection MatchType for matching by
interface.
- invert: If true the match rule will find objects that don't
match. We always use False.
- applyPredicate: whether or not a predicate should be applied
as an additional check to see if an item is indeed a match.
This is necessary, for instance, when one of the things we
care about is a text attribute, something the collection
interface doesn't include in its criteria.
"""
self.collection = collection
self.matchStates = matchStates or collection.MATCH_ANY
self.objAttrs = objAttrs
self.matchObjAttrs = matchObjAttrs or collection.MATCH_ANY
self.roles = roles
self.matchRoles = matchRoles or collection.MATCH_ANY
self.interfaces = interfaces
self.matchInterfaces = matchInterfaces or collection.MATCH_ALL
self.invert = invert
self.applyPredicate = applyPredicate
self.states = pyatspi.StateSet()
for state in states:
self.states.add(state)
###########################################################################
# #
# StructuralNavigationObject #
# #
###########################################################################
class StructuralNavigationObject:
"""Represents a document object which has identifiable characteristics
which can be used for the purpose of navigation to and among instances
of that object. These characteristics may be something as simple as a
role and/or a state of interest. Or they may be something more complex
such as character counts, text attributes, and other object attributes.
"""
def __init__(self, structuralNavigation, objType, bindings, predicate,
criteria, presentation):
"""Creates a new structural navigation object.
Arguments:
- structuralNavigation: the StructuralNavigation class associated
with this object.
- objType: the type (e.g. BLOCKQUOTE) associated with this object.
- bindings: a dictionary of all of the possible bindings for this
object. In the case of all but the "atLevel" bindings, each
binding takes the form of [keysymstring, modifiers, description].
The goPreviousAtLevel and goNextAtLevel bindings are each a list
of bindings in that form.
- predicate: the predicate to use to determine if a given accessible
matches this structural navigation object. Used when a search via
collection is not possible or practical.
- criteria: a method which returns a MatchCriteria object which
can in turn be used to locate the next/previous matching accessible
via collection.
- presentation: the method which should be called after performing
the search for the structural navigation object.
"""
self.structuralNavigation = structuralNavigation
self.objType = objType
self.bindings = bindings
self.predicate = predicate
self.criteria = criteria
self.present = presentation
self.inputEventHandlers = {}
self.keyBindings = keybindings.KeyBindings()
self.functions = []
self._setUpHandlersAndBindings()
def _setUpHandlersAndBindings(self):
"""Adds the inputEventHandlers and keyBindings for this object."""
# Set up the basic handlers. These are our traditional goPrevious
# and goNext functions.
#
previousBinding = self.bindings.get("previous")
if previousBinding:
[keysymstring, modifiers, description] = previousBinding
handlerName = "%sGoPrevious" % self.objType
self.inputEventHandlers[handlerName] = \
input_event.InputEventHandler(self.goPrevious, description)
self.keyBindings.add(
keybindings.KeyBinding(
keysymstring,
settings.defaultModifierMask,
modifiers,
self.inputEventHandlers[handlerName]))
self.functions.append(self.goPrevious)
nextBinding = self.bindings.get("next")
if nextBinding:
[keysymstring, modifiers, description] = nextBinding
handlerName = "%sGoNext" % self.objType
self.inputEventHandlers[handlerName] = \
input_event.InputEventHandler(self.goNext, description)
self.keyBindings.add(
keybindings.KeyBinding(
keysymstring,
settings.defaultModifierMask,
modifiers,
self.inputEventHandlers[handlerName]))
self.functions.append(self.goNext)
# Set up the "at level" handlers (e.g. to navigate among headings
# at the specified level).
#
previousAtLevel = self.bindings.get("previousAtLevel") or []
for i, binding in enumerate(previousAtLevel):
level = i + 1
handler = self.goPreviousAtLevelFactory(level)
handlerName = "%sGoPreviousLevel%dHandler" % (self.objType, level)
keysymstring, modifiers, description = binding
self.inputEventHandlers[handlerName] = \
input_event.InputEventHandler(handler, description)
self.keyBindings.add(
keybindings.KeyBinding(
keysymstring,
settings.defaultModifierMask,
modifiers,
self.inputEventHandlers[handlerName]))
self.functions.append(handler)
nextAtLevel = self.bindings.get("nextAtLevel") or []
for i, binding in enumerate(nextAtLevel):
level = i + 1
handler = self.goNextAtLevelFactory(level)
handlerName = "%sGoNextLevel%dHandler" % (self.objType, level)
keysymstring, modifiers, description = binding
self.inputEventHandlers[handlerName] = \
input_event.InputEventHandler(handler, description)
self.keyBindings.add(
keybindings.KeyBinding(
keysymstring,
settings.defaultModifierMask,
modifiers,
self.inputEventHandlers[handlerName]))
self.functions.append(handler)
# Set up the "directional" handlers (e.g. for table cells. Live
# region support has a handler to go to the last live region,
# so we'll handle that here as well).
#
directions = {}
directions["Left"] = self.bindings.get("left")
directions["Right"] = self.bindings.get("right")
directions["Up"] = self.bindings.get("up")
directions["Down"] = self.bindings.get("down")
directions["First"] = self.bindings.get("first")
directions["Last"] = self.bindings.get("last")
for direction in directions:
binding = directions.get(direction)
if not binding:
continue
handler = self.goDirectionFactory(direction)
handlerName = "%sGo%s" % (self.objType, direction)
keysymstring, modifiers, description = binding
self.inputEventHandlers[handlerName] = \
input_event.InputEventHandler(handler, description)
self.keyBindings.add(
keybindings.KeyBinding(
keysymstring,
settings.defaultModifierMask,
modifiers,
self.inputEventHandlers[handlerName]))
self.functions.append(handler)
def addHandlerAndBinding(self, binding, handlerName, function):
"""Adds a custom inputEventHandler and keybinding to the object's
handlers and bindings. Right now this is unused, but here in
case a creator of a StructuralNavigationObject had some other
desired functionality in mind.
Arguments:
- binding: [keysymstring, modifiers, description]
- handlerName: a string uniquely identifying the handler
- function: the function associated with the binding
"""
[keysymstring, modifiers, description] = binding
handler = input_event.InputEventHandler(function, description)
keyBinding = keybindings.KeyBinding(
keysymstring,
settings.defaultModifierMask,
modifiers,
handler)
self.inputEventHandlers[handlerName] = handler
self.structuralNavigation.inputEventHandlers[handlerName] = handler
self.functions.append(function)
self.structuralNavigation.functions.append(function)
self.keyBindings.add(keyBinding)
self.structuralNavigation.keyBindings.add(keyBinding)
def goPrevious(self, script, inputEvent):
"""Go to the previous object."""
self.structuralNavigation.goObject(self, False)
def goNext(self, script, inputEvent):
"""Go to the next object."""
self.structuralNavigation.goObject(self, True)
def goPreviousAtLevelFactory(self, level):
"""Generates a goPrevious method for the specified level. Right
now, this is just for headings, but it may have applicability
for other objects such as list items (i.e. for level-based
navigation in an outline or other multi-tiered list.
Arguments:
- level: the desired level of the object as an int.
"""
def goPreviousAtLevel(script, inputEvent):
self.structuralNavigation.goObject(self, False, arg=level)
return goPreviousAtLevel
def goNextAtLevelFactory(self, level):
"""Generates a goNext method for the specified level. Right
now, this is just for headings, but it may have applicability
for other objects such as list items (i.e. for level-based
navigation in an outline or other multi-tiered list.
Arguments:
- level: the desired level of the object as an int.
"""
def goNextAtLevel(script, inputEvent):
self.structuralNavigation.goObject(self, True, arg=level)
return goNextAtLevel
def goDirectionFactory(self, direction):
"""Generates the methods for navigation in a particular direction
(i.e. left, right, up, down, first, last). Right now, this is
primarily for table cells, but it may have applicability for other
objects. For example, when navigating in an outline, one might
want the ability to navigate to the next item at a given level,
but then work his/her way up/down in the hierarchy.
Arguments:
- direction: the direction in which to navigate as a string.
"""
def goCell(script, inputEvent):
thisCell = self.structuralNavigation.getCellForObj(\
self.structuralNavigation.getCurrentObject())
currentCoordinates = \
self.structuralNavigation.getCellCoordinates(thisCell)
if direction == "Left":
desiredCoordinates = [currentCoordinates[0],
currentCoordinates[1] - 1]
elif direction == "Right":
desiredCoordinates = [currentCoordinates[0],
currentCoordinates[1] + 1]
elif direction == "Up":
desiredCoordinates = [currentCoordinates[0] - 1,
currentCoordinates[1]]
elif direction == "Down":
desiredCoordinates = [currentCoordinates[0] + 1,
currentCoordinates[1]]
elif direction == "First":
desiredCoordinates = [0, 0]
else:
desiredCoordinates = [-1, -1]
table = self.structuralNavigation.getTableForCell(thisCell)
if table:
iTable = table.queryTable()
lastRow = iTable.nRows - 1
lastCol = iTable.nColumns - 1
desiredCoordinates = [lastRow, lastCol]
self.structuralNavigation.goCell(self,
thisCell,
currentCoordinates,
desiredCoordinates)
def goLastLiveRegion(script, inputEvent):
"""Go to the last liveRegion."""
if settings.inferLiveRegions:
script.liveMngr.goLastLiveRegion()
else:
# Translators: this announces to the user that live region
# support has been turned off.
#
script.presentMessage(_("Live region support is off"))
if self.objType == StructuralNavigation.TABLE_CELL:
return goCell
elif self.objType == StructuralNavigation.LIVE_REGION \
and direction == "Last":
return goLastLiveRegion
#############################################################################
# #
# StructuralNavigation #
# #
#############################################################################
class StructuralNavigation:
"""This class implements the structural navigation functionality which
is available to scripts. Scripts interested in implementing structural
navigation need to override getEnabledStructuralNavigationTypes() and
return a list of StructuralNavigation object types which should be
enabled.
"""
# The available object types.
#
# Convenience methods have been put into place whereby one can
# create an object (FOO = "foo"), and then provide the following
# methods: _fooBindings(), _fooPredicate(), _fooCriteria(), and
# _fooPresentation(). With these in place, and with the object
# FOO included among the object types returned by the script's
# getEnabledStructuralNavigationTypes(), the StructuralNavigation
# object should be created and set up automagically. At least that
# is the idea. :-) This hopefully will also enable easy re-definition
# of existing StructuralNavigationObjects on a script-by-script basis.
# For instance, in the soffice script, overriding _blockquotePredicate
# should be all that is needed to implement navigation by blockquote
# in OOo Writer documents.
#
ANCHOR = "anchor"
BLOCKQUOTE = "blockquote"
BUTTON = "button"
CHECK_BOX = "checkBox"
CHUNK = "chunk"
COMBO_BOX = "comboBox"
ENTRY = "entry"
FORM_FIELD = "formField"
HEADING = "heading"
LANDMARK = "landmark"
LIST = "list" # Bulleted/numbered lists
LIST_ITEM = "listItem" # Bulleted/numbered list items
LIVE_REGION = "liveRegion"
PARAGRAPH = "paragraph"
RADIO_BUTTON = "radioButton"
SEPARATOR = "separator"
TABLE = "table"
TABLE_CELL = "tableCell"
UNVISITED_LINK = "unvisitedLink"
VISITED_LINK = "visitedLink"
# Whether or not to attempt to use collection. There's no point
# in bothering if we know that the collection interface has not
# been implemented in a given app (e.g. StarOffice/OOo) so this
# variable can be overridden.
#
collectionEnabled = settings.useCollection
# Roles which are recognized as being a form field. Note that this
# is for the purpose of match rules and predicates and refers to
# AT-SPI roles.
#
FORM_ROLES = [pyatspi.ROLE_CHECK_BOX,
pyatspi.ROLE_RADIO_BUTTON,
pyatspi.ROLE_COMBO_BOX,
pyatspi.ROLE_DOCUMENT_FRAME, # rich text editing
pyatspi.ROLE_LIST,
pyatspi.ROLE_ENTRY,
pyatspi.ROLE_PASSWORD_TEXT,
pyatspi.ROLE_PUSH_BUTTON,
pyatspi.ROLE_SPIN_BUTTON,
pyatspi.ROLE_TEXT]
# Roles which are recognized as being potential "large objects"
# or "chunks." Note that this refers to AT-SPI roles.
#
OBJECT_ROLES = [pyatspi.ROLE_HEADING,
pyatspi.ROLE_LIST,
pyatspi.ROLE_PARAGRAPH,
pyatspi.ROLE_TABLE,
pyatspi.ROLE_TABLE_CELL,
pyatspi.ROLE_TEXT,
pyatspi.ROLE_SECTION,
pyatspi.ROLE_DOCUMENT_FRAME]
def __init__(self, script, enabledTypes, enabled=False):
"""Creates an instance of the StructuralNavigation class.
Arguments:
- script: the script which which this instance is associated.
- enabledTypes: a list of StructuralNavigation object types
which the script is interested in supporting.
- enabled: Whether structural navigation should start out
enabled. For instance, in Gecko by default we do what it
enabled; in soffice, we would want to start out with it
disabled and have the user enable it via a keystroke when
desired.
"""
self._script = script
self.enabled = enabled
# Create all of the StructuralNavigationObject's in which the
# script is interested, using the convenience method
#
self.enabledObjects = {}
for objType in enabledTypes:
self.enabledObjects[objType] = \
self.structuralNavigationObjectCreator(objType)
self.functions = []
self.inputEventHandlers = {}
self.setupInputEventHandlers()
self.keyBindings = self.getKeyBindings()
# When navigating in a non-uniform table, one can move to a
# cell which spans multiple rows and/or columns. When moving
# beyond that cell, into a cell that does NOT span multiple
# rows/columns, we want to be sure we land in the right place.
# Therefore, we'll store the coordinates from "our perspective."
#
self.lastTableCell = [-1, -1]
def structuralNavigationObjectCreator(self, name):
"""This convenience method creates a StructuralNavigationObject
with the specified name and associated characterists. (See the
"Objects" section of code near the end of this class. Creators
of StructuralNavigationObject's can still do things the old
fashioned way should they so choose, by creating the instance
and then adding it via addObject().
Arguments:
- name: the name/objType associated with this object.
"""
# We're going to assume bindings. After all, a structural
# navigation object is by defintion an object which one can
# navigate to using the associated keybindings. For similar
# reasons we'll also assume a predicate and a presentation
# method. (See the Objects section towards the end of this
# class for examples of each.)
#
bindings = eval("self._%sBindings()" % name)
predicate = eval("self._%sPredicate" % name)
presentation = eval("self._%sPresentation" % name)
# We won't make this assumption for match criteria because
# the collection interface might not be implemented (e.g.
# StarOffice/OpenOffice) and/or its use might not be possible
# or practical for a given StructuralNavigationObject (e.g.
# matching by text attributes, spatial navigation within tables).
#
try:
criteria = eval("self._%sCriteria" % name)
except:
criteria = None
return StructuralNavigationObject(self, name, bindings, predicate,
criteria, presentation)
def addObject(self, objType, structuralNavigationObject):
"""Adds structuralNavigationObject to the dictionary of enabled
objects.
Arguments:
- objType: the name/object type of the StructuralNavigationObject.
- structuralNavigationObject: the StructuralNavigationObject to
add.
"""
self.enabledObjects[objType] = structuralNavigationObject
def setupInputEventHandlers(self):
"""Defines InputEventHandler fields for a script."""
if not len(self.enabledObjects):
return
self.inputEventHandlers["toggleStructuralNavigationHandler"] = \
input_event.InputEventHandler(
self.toggleStructuralNavigation,
# Translators: the structural navigation keys are designed
# to move the caret around the document content by object
# type. Thus H moves you to the next heading, Shift H to
# the previous heading, T to the next table, and so on.
# This feature needs to be toggle-able so that it does not
# interfere with normal writing functions.
#
_("Toggles structural navigation keys."))
for structuralNavigationObject in self.enabledObjects.values():
self.inputEventHandlers.update(\
structuralNavigationObject.inputEventHandlers)
self.functions.extend(structuralNavigationObject.functions)
def getKeyBindings(self):
"""Defines the structural navigation key bindings for a script.
Returns: an instance of keybindings.KeyBindings.
"""
keyBindings = keybindings.KeyBindings()
if not len(self.enabledObjects):
return keyBindings
keyBindings.add(
keybindings.KeyBinding(
"z",
settings.defaultModifierMask,
settings.ORCA_MODIFIER_MASK,
self.inputEventHandlers["toggleStructuralNavigationHandler"]))
for structuralNavigationObject in self.enabledObjects.values():
bindings = structuralNavigationObject.keyBindings.keyBindings
for keybinding in bindings:
keyBindings.add(keybinding)
return keyBindings
#########################################################################
# #
# Input Event Handler Methods #
# #
#########################################################################
def toggleStructuralNavigation(self, script, inputEvent):
"""Toggles structural navigation keys."""
self.enabled = not self.enabled
if self.enabled:
# Translators: the structural navigation keys are designed
# to move the caret around document content by object type.
# Thus H moves you to the next heading, Shift H to the
# previous heading, T to the next table, and so on. Some
# users prefer to turn this off to use Firefox's search
# when typing feature. This message is sent to both the
# braille display and the speech synthesizer when the user
# toggles the structural navigation feature of Orca.
# It should be a brief informative message.
#
string = _("Structural navigation keys on.")
else:
# Translators: the structural navigation keys are designed
# to move the caret around document content by object type.
# Thus H moves you to the next heading, Shift H to the
# previous heading, T to the next table, and so on. Some
# users prefer to turn this off to use Firefox's search
# when typing feature. This message is sent to both the
# braille display and the speech synthesizer when the user
# toggles the structural navigation feature of Orca.
# It should be a brief informative message.
#
string = _("Structural navigation keys off.")
debug.println(debug.LEVEL_CONFIGURATION, string)
self._script.presentMessage(string)
#########################################################################
# #
# Methods for Moving to Objects #
# #
#########################################################################
def goCell(self, structuralNavigationObject, thisCell,
currentCoordinates, desiredCoordinates):
"""The method used for navigation among cells in a table.
Arguments:
- structuralNavigationObject: the StructuralNavigationObject which
represents the table cell.
- thisCell: the pyatspi accessible TABLE_CELL we're currently in
- currentCoordinates: the [row, column] of thisCell. Note, we
cannot just get the coordinates because in table cells which
span multiple rows and/or columns, the value returned by
table.getRowAtIndex() is the first row the cell spans. Likewise,
the value returned by table.getColumnAtIndex() is the left-most
column. Therefore, we keep track of the row and column from
our perspective to ensure we stay in the correct row and column.
- desiredCoordinates: the [row, column] where we think we'd like to
be.
"""
table = self.getTableForCell(thisCell)
try:
iTable = table.queryTable()
except:
# Translators: this is for navigating document content by
# moving from table cell to table cell. If the user gives a
# table navigation command but is not in a table, Orca speaks
# this message.
#
self._script.presentMessage(_("Not in a table."))
return None
currentRow, currentCol = currentCoordinates
desiredRow, desiredCol = desiredCoordinates
rowDiff = desiredRow - currentRow
colDiff = desiredCol - currentCol
oldRowHeaders = self._getRowHeaders(thisCell)
oldColHeaders = self._getColumnHeaders(thisCell)
cell = thisCell
while cell:
cell = iTable.getAccessibleAt(desiredRow, desiredCol)
if not cell:
if desiredCol < 0:
# Translators: this is for navigating document
# content by moving from table cell to table cell.
# This is the message spoken when the user attempts
# to move to the left of the current cell and is
# already in the first column.
#
self._script.presentMessage(_("Beginning of row."))
desiredCol = 0
elif desiredCol > iTable.nColumns - 1:
# Translators: this is for navigating document
# content by moving from table cell to table cell.
# This is the message spoken when the user attempts
# to move to the right of the current cell and is
# already in the last column.
#
self._script.presentMessage(_("End of row."))
desiredCol = iTable.nColumns - 1
if desiredRow < 0:
# Translators: this is for navigating document
# content by moving from table cell to table cell.
# This is the message spoken when the user attempts
# to move to the cell above the current cell and is
# already in the first row.
#
self._script.presentMessage(_("Top of column."))
desiredRow = 0
elif desiredRow > iTable.nRows - 1:
# Translators: this is for navigating document
# content by moving from table cell to table cell.
# This is the message spoken when the user attempts
# to move to the cell below the current cell and is
# already in the last row.
#
self._script.presentMessage(_("Bottom of column."))
desiredRow = iTable.nRows - 1
elif self._script.utilities.isSameObject(thisCell, cell) \
or settings.skipBlankCells and self._isBlankCell(cell):
if colDiff < 0:
desiredCol -= 1
elif colDiff > 0:
desiredCol += 1
if rowDiff < 0:
desiredRow -= 1
elif rowDiff > 0:
desiredRow += 1
else:
break
self.lastTableCell = [desiredRow, desiredCol]
if cell:
arg = [rowDiff, colDiff, oldRowHeaders, oldColHeaders]
structuralNavigationObject.present(cell, arg)
def goObject(self, structuralNavigationObject, isNext, obj=None, arg=None):
"""The method used for navigation among StructuralNavigationObjects
which are not table cells.
Arguments:
- structuralNavigationObject: the StructuralNavigationObject which
represents the object of interest.
- isNext: If True, we're interested in the next accessible object
which matches structuralNavigationObject. If False, we're
interested in the previous accessible object which matches.
- obj: the current object (typically the locusOfFocus).
- arg: optional arguments which may need to be passed along to
the predicate, presentation method, etc. For instance, in the
case of navigating amongst headings at a given level, the level
is needed and passed in as arg.
"""
obj = obj or self.getCurrentObject()
# Yelp is seemingly fond of killing children for sport. Better
# check for that.
#
try:
state = obj.getState()
except:
return [None, False]
else:
if state.contains(pyatspi.STATE_DEFUNCT):
#print "goObject: defunct object", obj
debug.printException(debug.LEVEL_SEVERE)
return [None, False]
success = False
wrap = settings.wrappedStructuralNavigation
# Try to find it using Collection first. But don't do this with form
# fields for now. It's a bit faster moving to the next form field,
# but not on pages with huge forms (e.g. bugzilla's advanced search
# page). And due to bug #538680, we definitely don't want to use
# collection to go to the previous chunk or form field.
#
formObjects = [self.BUTTON, self.CHECK_BOX, self.COMBO_BOX,
self.ENTRY, self.FORM_FIELD, self.RADIO_BUTTON]
criteria = None
objType = structuralNavigationObject.objType
if self.collectionEnabled \
and not objType in formObjects \
and (isNext or objType != self.CHUNK):
try:
document = self._getDocument()
collection = document.queryCollection()
if structuralNavigationObject.criteria:
criteria = structuralNavigationObject.criteria(collection,
arg)
except:
debug.printException(debug.LEVEL_SEVERE)
else:
# If the document frame itself contains content and that is
# our current object, querying the collection interface will
# result in our starting at the top when looking for the next
# object rather than the current caret offset. See bug 567984.
#
if isNext \
and self._script.utilities.isSameObject(obj, document):
criteria = None
if criteria:
try:
rule = collection.createMatchRule(criteria.states.raw(),
criteria.matchStates,
criteria.objAttrs,
criteria.matchObjAttrs,
criteria.roles,
criteria.matchRoles,
criteria.interfaces,
criteria.matchInterfaces,
criteria.invert)
if criteria.applyPredicate:
predicate = structuralNavigationObject.predicate
else:
predicate = None
if not isNext:
[obj, wrapped] = self._findPrevByMatchRule(collection,
rule,
wrap,
obj,
predicate)
else:
[obj, wrapped] = self._findNextByMatchRule(collection,
rule,
wrap,
obj,
predicate)
success = True
collection.freeMatchRule(rule)
# print "collection", structuralNavigationObject.objType
except NotImplementedError:
debug.printException(debug.LEVEL_SEVERE)
except:
debug.printException(debug.LEVEL_SEVERE)
collection.freeMatchRule(rule)
# Do it iteratively when Collection failed or is disabled
#
if not success:
pred = structuralNavigationObject.predicate
if not isNext:
[obj, wrapped] = self._findPrevByPredicate(pred, wrap,
obj, arg)
else:
[obj, wrapped] = self._findNextByPredicate(pred, wrap,
obj, arg)
# print "predicate", structuralNavigationObject.objType
if wrapped:
if not isNext:
# Translators: when the user is attempting to locate a
# particular object and the top of a page or list is
# reached without that object being found, we "wrap" to
# the bottom and continue looking upwards. We need to
# inform the user when this is taking place.
#
self._script.presentMessage(_("Wrapping to bottom."))
else:
# Translators: when the user is attempting to locate a
# particular object and the bottom of a page or list is
# reached without that object being found, we "wrap" to the
# top and continue looking downwards. We need to inform the
# user when this is taking place.
#
self._script.presentMessage(_("Wrapping to top."))
structuralNavigationObject.present(obj, arg)
#########################################################################
# #
# Utility Methods for Finding Objects #
# #
#########################################################################
def getCurrentObject(self):
"""Returns the current object. Normally, the locusOfFocus. But
in the case of Gecko, that doesn't always work.
"""
return orca_state.locusOfFocus
def _findPrevByMatchRule(self, collection, matchRule, wrap, currentObj,
predicate=None):
"""Finds the previous object using the given match rule as a
pattern to match or not match.
Arguments:
-collection: the accessible collection interface
-matchRule: the collections match rule to use
-wrap: if True and the bottom of the document is reached, move
to the top and keep looking.
-currentObj: the object from which the search should begin
-predicate: an optional predicate to further test if the item
found via collection is indeed a match.
Returns: [obj, wrapped] where wrapped is a boolean reflecting
whether wrapping took place.
"""
currentObj = currentObj or self.getCurrentObject()
document = self._getDocument()
# If the current object is the document itself, find an actual
# object to use as the starting point. Otherwise we're in
# danger of skipping over the objects in between our present
# location and top of the document.
#
if self._script.utilities.isSameObject(currentObj, document):
currentObj = self._findNextObject(currentObj, document)
ancestors = []
obj = currentObj.parent
if obj.getRole() in [pyatspi.ROLE_LIST, pyatspi.ROLE_TABLE]:
ancestors.append(obj)
else:
while obj:
ancestors.append(obj)
obj = obj.parent
match, wrapped = None, False
results = collection.getMatchesTo(currentObj,
matchRule,
collection.SORT_ORDER_CANONICAL,
collection.TREE_INORDER,
True,
1,
True)
while not match:
if len(results) == 0:
if wrapped or not wrap:
break
elif wrap:
lastObj = self._findLastObject(document)
# Collection does not do an inclusive search, meaning
# that the start object is not part of the search. So
# we need to test the lastobj separately using the given
# matchRule. We don't have this problem for 'Next' because
# the startobj is the doc frame.
#
secondLastObj = self._findPreviousObject(lastObj, document)
results = collection.getMatchesFrom(\
secondLastObj,
matchRule,
collection.SORT_ORDER_CANONICAL,
collection.TREE_INORDER,
1,
True)
wrapped = True
if len(results) > 0 \
and (not predicate or predicate(results[0])):
match = results[0]
else:
results = collection.getMatchesTo(\
lastObj,
matchRule,
collection.SORT_ORDER_CANONICAL,
collection.TREE_INORDER,
True,
1,
True)
elif len(results) > 0:
if results[0] in ancestors \
or predicate and not predicate(results[0]):
results = collection.getMatchesTo(\
results[0],
matchRule,
collection.SORT_ORDER_CANONICAL,
collection.TREE_INORDER,
True,
1,
True)
else:
match = results[0]
return [match, wrapped]
def _findNextByMatchRule(self, collection, matchRule, wrap, currentObj,
predicate=None):
"""Finds the next object using the given match rule as a pattern
to match or not match.
Arguments:
-collection: the accessible collection interface
-matchRule: the collections match rule to use
-wrap: if True and the bottom of the document is reached, move
to the top and keep looking.
-currentObj: the object from which the search should begin
-predicate: an optional predicate to further test if the item
found via collection is indeed a match.
Returns: [obj, wrapped] where wrapped is a boolean reflecting
whether wrapping took place.
"""
currentObj = currentObj or self.getCurrentObject()
ancestors = []
[currentObj, offset] = self._script.getCaretContext()
obj = currentObj.parent
while obj:
ancestors.append(obj)
obj = obj.parent
match, wrapped = None, False
while not match:
results = collection.getMatchesFrom(\
currentObj,
matchRule,
collection.SORT_ORDER_CANONICAL,
collection.TREE_INORDER,
1,
True)
if len(results) > 0 and not results[0] in ancestors:
currentObj = results[0]
if not predicate or predicate(currentObj):
match = currentObj
elif wrap and not wrapped:
wrapped = True
ancestors = [currentObj]
currentObj = self._getDocument()
else:
break
return [match, wrapped]
def _findPrevByPredicate(self, pred, wrap, currentObj=None, arg=None):
"""Finds the caret offset at the beginning of the previous object
using the given predicate as a pattern to match.
Arguments:
-pred: a python callable that takes an accessible argument and
returns true/false based on some match criteria
-wrap: if True and the top of the document is reached, move
to the bottom and keep looking.
-currentObj: the object from which the search should begin
-arg: an additional value to be passed to the predicate
Returns: [obj, wrapped] where wrapped is a boolean reflecting
whether wrapping took place.
"""
currentObj = currentObj or self.getCurrentObject()
document = self._getDocument()
# If the current object is the document itself, find an actual
# object to use as the starting point. Otherwise we're in
# danger of skipping over the objects in between our present
# location and top of the document.
#
if self._script.utilities.isSameObject(currentObj, document):
currentObj = self._findNextObject(currentObj, document)
ancestors = []
nestableRoles = [pyatspi.ROLE_LIST, pyatspi.ROLE_TABLE]
obj = currentObj.parent
while obj:
ancestors.append(obj)
obj = obj.parent
obj = self._findPreviousObject(currentObj, document)
wrapped = obj is None
match = None
if wrapped:
obj = self._findLastObject(document)
while obj and not match:
isNested = (obj != currentObj.parent \
and currentObj.parent.getRole() == obj.getRole() \
and obj.getRole() in nestableRoles)
if (not obj in ancestors or isNested) and pred(obj):
if wrapped \
and self._script.utilities.isSameObject(currentObj, obj):
break
else:
match = obj
else:
obj = self._findPreviousObject(obj, document)
if not obj and wrap and not wrapped:
obj = self._findLastObject(document)
wrapped = True
return [match, wrapped]
def _findNextByPredicate(self, pred, wrap, currentObj=None, arg=None):
"""Finds the caret offset at the beginning of the next object
using the given predicate as a pattern to match or not match.
Arguments:
-pred: a python callable that takes an accessible argument and
returns true/false based on some match criteria
-wrap: if True and the bottom of the document is reached, move
to the top and keep looking.
-currentObj: the object from which the search should begin
-arg: an additional value to be passed to the predicate
Returns: [obj, wrapped] where wrapped is a boolean reflecting
whether wrapping took place.
"""
currentObj = currentObj or self.getCurrentObject()
ancestors = []
obj = currentObj.parent
while obj:
ancestors.append(obj)
obj = obj.parent
document = self._getDocument()
obj = self._findNextObject(currentObj, document)
wrapped = obj is None
match = None
if wrapped:
[obj, offset] = self._getCaretPosition(document)
while obj and not match:
if (not obj in ancestors) and pred(obj, arg):
if wrapped \
and self._script.utilities.isSameObject(currentObj, obj):
break
else:
match = obj
else:
obj = self._findNextObject(obj, document)
if not obj and wrap and not wrapped:
[obj, offset] = self._getCaretPosition(document)
wrapped = True
return [match, wrapped]
def _findPreviousObject(self, obj, stopAncestor):
"""Finds the object prior to this one, where the tree we're
dealing with is a DOM and 'prior' means the previous object
in a linear presentation sense.
Arguments:
-obj: the object where to start.
-stopAncestor: the ancestor at which the search should stop
"""
# NOTE: This method is based on some intial experimentation
# with OOo structural navigation. It might need refining
# or fixing and is being overridden by the Gecko method
# regardless, so this one can be modified as appropriate.
#
prevObj = None
index = obj.getIndexInParent() - 1
if index >= 0:
prevObj = obj.parent[index]
if prevObj.childCount:
prevObj = prevObj[prevObj.childCount - 1]
elif not self._script.utilities.isSameObject(obj.parent, stopAncestor):
prevObj = obj.parent
return prevObj
def _findNextObject(self, obj, stopAncestor):
"""Finds the object after to this one, where the tree we're
dealing with is a DOM and 'next' means the next object
in a linear presentation sense.
Arguments:
-obj: the object where to start.
-stopAncestor: the ancestor at which the search should stop
"""
# NOTE: This method is based on some intial experimentation
# with OOo structural navigation. It might need refining
# or fixing and is being overridden by the Gecko method
# regardless, so this one can be modified as appropriate.
#
nextObj = None
if obj and obj.childCount:
nextObj = obj[0]
while obj and obj.parent != obj and not nextObj:
index = obj.getIndexInParent() + 1
if 0 < index < obj.parent.childCount:
nextObj = obj.parent[index]
elif not self._script.utilities.isSameObject(
obj.parent, stopAncestor):
obj = obj.parent
else:
break
return nextObj
def _findLastObject(self, ancestor):
"""Returns the last object in ancestor.
Arguments:
- ancestor: the accessible object whose last (child) object
is sought.
"""
# NOTE: This method is based on some intial experimentation
# with OOo structural navigation. It might need refining
# or fixing and is being overridden by the Gecko method
# regardless, so this one can be modified as appropriate.
#
if not ancestor or not ancestor.childCount:
return ancestor
lastChild = ancestor[ancestor.childCount - 1]
while lastChild:
lastObj = self._findNextObject(lastChild, ancestor)
if lastObj:
lastChild = lastObj
else:
break
return lastChild
def _getDocument(self):
"""Returns the document or other object in which the object of
interest is contained.
"""
docRoles = [pyatspi.ROLE_DOCUMENT_FRAME]
stopRoles = [pyatspi.ROLE_FRAME, pyatspi.ROLE_SCROLL_PANE]
document = self._script.utilities.ancestorWithRole(
orca_state.locusOfFocus, docRoles, stopRoles)
return document
def _isInDocument(self, obj):
"""Returns True if the accessible object obj is inside of
the document.
Arguments:
-obj: the accessible object of interest.
"""
document = self._getDocument()
while obj and obj.parent:
if self._script.utilities.isSameObject(obj.parent, document):
return True
else:
obj = obj.parent
return False
def _isUselessObject(self, obj):
"""Returns True if the accessible object obj is an object
that doesn't have any meaning associated with it. Individual
scripts should override this method as needed. Gecko does.
Arguments:
- obj: the accessible object of interest.
"""
return False
#########################################################################
# #
# Methods for Presenting Objects #
# #
#########################################################################
def _getTableCaption(self, obj):
"""Returns a string which contains the table caption, or
None if a caption could not be found.
Arguments:
- obj: the accessible table whose caption we want.
"""
caption = obj.queryTable().caption
try:
caption.queryText()
except:
return None
else:
return self._script.utilities.displayedText(caption)
def _getTableDescription(self, obj):
"""Returns a string which describes the table."""
nonUniformString = ""
nonUniform = self._isNonUniformTable(obj)
if nonUniform:
# Translators: a uniform table is one in which each table
# cell occupies one row and one column (i.e. a perfect grid)
# In contrast, a non-uniform table is one in which at least
# one table cell occupies more than one row and/or column.
#
nonUniformString = _("Non-uniform") + " "
table = obj.queryTable()
nRows = table.nRows
nColumns = table.nColumns
# Translators: this represents the number of rows in a table.
#
rowString = ngettext("table with %d row",
"table with %d rows",
nRows) % nRows
# Translators: this represents the number of columns in a table.
#
colString = ngettext("%d column",
"%d columns",
nColumns) % nColumns
return (nonUniformString + rowString + " " + colString)
def _isNonUniformTable(self, obj):
"""Returns True if the obj is a non-uniform table (i.e. a table
where at least one cell spans multiple rows and/or columns).
Arguments:
- obj: the table to examine
"""
try:
table = obj.queryTable()
except:
pass
else:
for i in xrange(obj.childCount):
[isCell, row, col, rowExtents, colExtents, isSelected] = \
table.getRowColumnExtentsAtIndex(i)
if (rowExtents > 1) or (colExtents > 1):
return True
return False
def getCellForObj(self, obj):
"""Looks for a table cell in the ancestry of obj, if obj is not a
table cell.
Arguments:
- obj: the accessible object of interest.
"""
cellRoles = [pyatspi.ROLE_TABLE_CELL, pyatspi.ROLE_COLUMN_HEADER]
if obj and not obj.getRole() in cellRoles:
document = self._getDocument()
obj = self._script.utilities.ancestorWithRole(
obj, cellRoles, [document.getRole()])
return obj
def getTableForCell(self, obj):
"""Looks for a table in the ancestry of obj, if obj is not a table.
Arguments:
- obj: the accessible object of interest.
"""
if obj and obj.getRole() != pyatspi.ROLE_TABLE:
document = self._getDocument()
obj = self._script.utilities.ancestorWithRole(
obj, [pyatspi.ROLE_TABLE], [document.getRole()])
return obj
def _isBlankCell(self, obj):
"""Returns True if the table cell is empty or consists of whitespace.
Arguments:
- obj: the accessible table cell to examime
"""
if obj and obj.getRole() == pyatspi.ROLE_COLUMN_HEADER and obj.name:
return False
text = self._script.utilities.displayedText(obj)
if text and len(text.strip()) and text != obj.name:
return False
else:
for child in obj:
text = self._script.utilities.displayedText(child)
if text and len(text.strip()) \
or child.getRole() == pyatspi.ROLE_LINK:
return False
return True
def _getCellText(self, obj):
"""Looks at the table cell and tries to get its text.
Arguments:
- obj: the accessible table cell to examime
"""
text = ""
if obj and not obj.childCount:
text = self._script.utilities.displayedText(obj)
else:
for child in obj:
childText = self._script.utilities.displayedText(child)
text = self._script.utilities.appendString(text, childText)
return text
def _presentCellHeaders(self, cell, oldCellInfo):
"""Speaks the headers of the accessible table cell, cell.
Arguments:
- cell: the accessible table cell whose headers we wish to
present.
- oldCellInfo: [rowDiff, colDiff, oldRowHeaders, oldColHeaders]
"""
if not cell or not oldCellInfo:
return
rowDiff, colDiff, oldRowHeaders, oldColHeaders = oldCellInfo
if not (oldRowHeaders or oldColHeaders):
return
# We only want to speak the header information that has
# changed, and we don't want to speak headers if we're in
# a header row/col.
#
if rowDiff and not self._isInHeaderRow(cell):
rowHeaders = self._getRowHeaders(cell)
for header in rowHeaders:
if not header in oldRowHeaders:
text = self._getCellText(header)
speech.speak(text)
if colDiff and not self._isInHeaderColumn(cell):
colHeaders = self._getColumnHeaders(cell)
for header in colHeaders:
if not header in oldColHeaders:
text = self._getCellText(header)
speech.speak(text)
def _getCellSpanInfo(self, obj):
"""Returns a string reflecting the number of rows and/or columns
spanned by a table cell when multiple rows and/or columns are
spanned.
Arguments:
- obj: the accessible table cell whose cell span we want.
"""
if not obj or (obj.getRole() != pyatspi.ROLE_TABLE_CELL):
return
parentTable = self.getTableForCell(obj)
try:
table = parentTable.queryTable()
except:
return
[row, col] = self.getCellCoordinates(obj)
rowspan = table.getRowExtentAt(row, col)
colspan = table.getColumnExtentAt(row, col)
spanString = ""
if (colspan > 1) and (rowspan > 1):
# Translators: The cell here refers to a cell within a table
# within a document. We need to announce when the cell occupies
# or "spans" more than a single row and/or column.
#
spanString = ngettext("Cell spans %d row",
"Cell spans %d rows",
rowspan) % rowspan
# Translators: this represents the number of columns in a table.
#
spanString += ngettext(" %d column",
" %d columns",
colspan) % colspan
elif (colspan > 1):
# Translators: The cell here refers to a cell within a table
# within a document. We need to announce when the cell occupies
# or "spans" more than a single row and/or column.
#
spanString = ngettext("Cell spans %d column",
"Cell spans %d columns",
colspan) % colspan
elif (rowspan > 1):
# Translators: The cell here refers to a cell within a table
# within a document. We need to announce when the cell occupies
# or "spans" more than a single row and/or column.
#
spanString = ngettext("Cell spans %d row",
"Cell spans %d rows",
rowspan) % rowspan
return spanString
def getCellCoordinates(self, obj):
"""Returns the [row, col] of a ROLE_TABLE_CELL or [-1, -1]
if the coordinates cannot be found.
Arguments:
- obj: the accessible table cell whose coordinates we want.
"""
obj = self.getCellForObj(obj)
parent = self.getTableForCell(obj)
try:
table = parent.queryTable()
except:
pass
else:
# If we're in a cell that spans multiple rows and/or columns,
# thisRow and thisCol will refer to the upper left cell in
# the spanned range(s). We're storing the lastTableCell that
# we're aware of in order to facilitate more linear movement.
# Therefore, if the lastTableCell and this table cell are the
# same cell, we'll go with the stored coordinates.
#
lastRow, lastCol = self.lastTableCell
lastKnownCell = table.getAccessibleAt(lastRow, lastCol)
if self._script.utilities.isSameObject(lastKnownCell, obj):
return [lastRow, lastCol]
else:
index = self._script.utilities.cellIndex(obj)
thisRow = table.getRowAtIndex(index)
thisCol = table.getColumnAtIndex(index)
return [thisRow, thisCol]
return [-1, -1]
def _getRowHeaders(self, obj):
"""Returns a list of table cells that serve as a row header for
the specified TABLE_CELL.
Arguments:
- obj: the accessible table cell whose header(s) we want.
"""
rowHeaders = []
if not obj:
return rowHeaders
parentTable = self.getTableForCell(obj)
try:
table = parentTable.queryTable()
except:
pass
else:
[row, col] = self.getCellCoordinates(obj)
# Theoretically, we should be able to quickly get the text
# of a {row, column}Header via get{Row,Column}Description().
# Gecko doesn't expose the information that way, however.
# get{Row,Column}Header seems to work sometimes.
#
header = table.getRowHeader(row)
if header:
rowHeaders.append(header)
# Headers that are strictly marked up with <th> do not seem
# to be exposed through get{Row, Column}Header.
#
else:
# If our cell spans multiple rows, we want to get all of
# the headers that apply.
#
rowspan = table.getRowExtentAt(row, col)
for r in range(row, row+rowspan):
# We could have multiple headers for a given row, one
# header per column. Presumably all of the headers are
# prior to our present location.
#
for c in range(0, col):
cell = table.getAccessibleAt(r, c)
if self._isHeader(cell) and not cell in rowHeaders:
rowHeaders.append(cell)
return rowHeaders
def _getColumnHeaders(self, obj):
"""Returns a list of table cells that serve as a column header for
the specified TABLE_CELL.
Arguments:
- obj: the accessible table cell whose header(s) we want.
"""
columnHeaders = []
if not obj:
return columnHeaders
parentTable = self.getTableForCell(obj)
try:
table = parentTable.queryTable()
except:
pass
else:
[row, col] = self.getCellCoordinates(obj)
# Theoretically, we should be able to quickly get the text
# of a {row, column}Header via get{Row,Column}Description().
# Gecko doesn't expose the information that way, however.
# get{Row,Column}Header seems to work sometimes.
#
header = table.getColumnHeader(col)
if header:
columnHeaders.append(header)
# Headers that are strictly marked up with <th> do not seem
# to be exposed through get{Row, Column}Header.
#
else:
# If our cell spans multiple columns, we want to get all of
# the headers that apply.
#
colspan = table.getColumnExtentAt(row, col)
for c in range(col, col+colspan):
# We could have multiple headers for a given column, one
# header per row. Presumably all of the headers are
# prior to our present location.
#
for r in range(0, row):
cell = table.getAccessibleAt(r, c)
if self._isHeader(cell) and not cell in columnHeaders:
columnHeaders.append(cell)
return columnHeaders
def _isInHeaderRow(self, obj):
"""Returns True if all of the cells in the same row as this cell are
headers.
Arguments:
- obj: the accessible table cell whose row is to be examined.
"""
if obj and obj.getRole() == pyatspi.ROLE_TABLE_CELL:
parentTable = self.getTableForCell(obj)
try:
table = parentTable.queryTable()
except:
return True
index = self._script.utilities.cellIndex(obj)
row = table.getRowAtIndex(index)
for col in xrange(table.nColumns):
cell = table.getAccessibleAt(row, col)
if not self._isHeader(cell):
return False
return True
def _isInHeaderColumn(self, obj):
"""Returns True if all of the cells in the same column as this cell
are headers.
Arguments:
- obj: the accessible table cell whose column is to be examined.
"""
if obj and obj.getRole() == pyatspi.ROLE_TABLE_CELL:
parentTable = self.getTableForCell(obj)
try:
table = parentTable.queryTable()
except:
return True
index = self._script.utilities.cellIndex(obj)
col = table.getColumnAtIndex(index)
for row in xrange(table.nRows):
cell = table.getAccessibleAt(row, col)
if not self._isHeader(cell):
return False
return True
def _isHeader(self, obj):
"""Returns True if the table cell is a header.
Arguments:
- obj: the accessible table cell to examine.
"""
if not obj:
return False
elif obj.getRole() in [pyatspi.ROLE_TABLE_COLUMN_HEADER,
pyatspi.ROLE_TABLE_ROW_HEADER,
pyatspi.ROLE_COLUMN_HEADER]:
return True
else:
attributes = obj.getAttributes()
if attributes:
for attribute in attributes:
if attribute == "tag:TH":
return True
return False
def _getHeadingLevel(self, obj):
"""Determines the heading level of the given object. A value
of 0 means there is no heading level.
Arguments:
- obj: the accessible whose heading level we want.
"""
level = 0
if obj is None:
return level
if obj.getRole() == pyatspi.ROLE_HEADING:
attributes = obj.getAttributes()
if attributes is None:
return level
for attribute in attributes:
if attribute.startswith("level:"):
level = int(attribute.split(":")[1])
break
return level
def _getCaretPosition(self, obj):
"""Returns the [obj, characterOffset] where the caret should be
positioned. For most scripts, the object should not change and
the offset should be 0. That's not always the case with Gecko.
Arguments:
- obj: the accessible object in which the caret should be
positioned.
"""
return [obj, 0]
def _setCaretPosition(self, obj, characterOffset):
"""Sets the caret at the specified offset within obj.
Arguments:
- obj: the accessible object in which the caret should be
positioned.
- characterOffset: the offset at which to position the caret.
"""
try:
text = obj.queryText()
text.setCaretOffset(characterOffset)
except NotImplementedError:
try:
obj.queryComponent().grabFocus()
except:
debug.printException(debug.LEVEL_SEVERE)
except:
debug.printException(debug.LEVEL_SEVERE)
orca.setLocusOfFocus(None, obj, notifyScript=False)
def _presentLine(self, obj, offset):
"""Presents the first line of the object to the user.
Arguments:
- obj: the accessible object to be presented.
- offset: the character offset within obj.
"""
self._script.updateBraille(obj)
self._script.sayLine(obj)
def _presentObject(self, obj, offset):
"""Presents the entire object to the user.
Arguments:
- obj: the accessible object to be presented.
- offset: the character offset within obj.
"""
self._script.updateBraille(obj)
# [[[TODO: WDW - move the voice selection to formatting.py
# at some point.]]]
#
voices = self._script.voices
if obj.getRole() == pyatspi.ROLE_LINK:
voice = voices[settings.HYPERLINK_VOICE]
else:
voice = voices[settings.DEFAULT_VOICE]
utterances = self._script.speechGenerator.generateSpeech(obj)
speech.speak(utterances, voice)
#########################################################################
# #
# Objects #
# #
#########################################################################
# All structural navigation objects have the following essential
# characteristics:
#
# 1. Keybindings for goPrevious, goNext, and other such methods
# 2. A means of identification (at least a predicate and possibly
# also criteria for generating a collection match rule)
# 3. A definition of how the object should be presented (both
# when another instance of that object is found as well as
# when it is not)
#
# Convenience methods have been put into place whereby one can
# create an object (FOO = "foo"), and then provide the following
# methods: _fooBindings(), _fooPredicate(), _fooCriteria(), and
# _fooPresentation(). With these in place, and with the object
# FOO included among the StructuralNavigation.enabledTypes for
# the script, the structural navigation object should be created
# and set up automagically. At least that is the idea. :-) This
# hopefully will also enable easy re-definition of existing
# objects on a script-by-script basis. For instance, in the
# StarOffice script, overriding the _blockquotePredicate should
# be all that is needed to implement navigation by blockquote
# in OOo Writer documents.
#
########################
# #
# Anchors #
# #
########################
def _anchorBindings(self):
"""Returns a dictionary of [keysymstring, modifiers, description]
lists for navigating amongst anchors.
"""
# NOTE: This doesn't handle the case where the anchor is not an
# old-school <a name/id="foo"></a> anchor. For instance on the
# GNOME wiki, an "anchor" is actually an id applied to some other
# tag (e.g. <h2 id="foo">My Heading</h2>. We'll have to be a
# bit more clever for those. With the old-school anchors, this
# seems to work nicely and provides the user with a way to jump
# among defined areas without having to find a Table of Contents
# group of links (assuming such a thing is even present on the
# page).
bindings = {}
# Translators: this is for navigating among anchors in a document.
# An anchor is a named spot that one can jump to.
#
prevDesc = _("Goes to previous anchor.")
bindings["previous"] = ["a", settings.SHIFT_MODIFIER_MASK, prevDesc]
# Translators: this is for navigating among anchors in a document.
# An anchor is a named spot that one can jump to.
#
nextDesc = _("Goes to next anchor.")
bindings["next"] = ["a", settings.NO_MODIFIER_MASK, nextDesc]
return bindings
def _anchorCriteria(self, collection, arg=None):
"""Returns the MatchCriteria to be used for locating anchors
by collection.
Arguments:
- collection: the collection interface for the document
- arg: an optional argument which may need to be included in
the criteria (e.g. the level of a heading).
"""
role = [pyatspi.ROLE_LINK]
state = [pyatspi.STATE_FOCUSABLE]
stateMatch = collection.MATCH_NONE
return MatchCriteria(collection,
states=state,
matchStates=stateMatch,
roles=role)
def _anchorPredicate(self, obj, arg=None):
"""The predicate to be used for verifying that the object
obj is an anchor.
Arguments:
- obj: the accessible object under consideration.
- arg: an optional argument which may need to be included in
the criteria (e.g. the level of a heading).
"""
isMatch = False
if obj and obj.getRole() == pyatspi.ROLE_LINK:
state = obj.getState()
isMatch = not state.contains(pyatspi.STATE_FOCUSABLE)
return isMatch
def _anchorPresentation(self, obj, arg=None):
"""Presents the anchor or indicates that one was not found.
Arguments:
- obj: the accessible object under consideration.
- arg: an optional argument which may need to be included in
the criteria (e.g. the level of a heading).
"""
if obj:
[obj, characterOffset] = self._getCaretPosition(obj)
self._setCaretPosition(obj, characterOffset)
self._presentObject(obj, characterOffset)
else:
# Translators: this is for navigating document content by
# moving from anchor to anchor. (An anchor is a named spot
# that one can jump to.) This is a detailed message which
# will be presented to the user if no more anchors can be found.
#
full = _("No more anchors.")
# Translators: Orca has a command that allows the user to move
# to the next structural navigation object. In Orca, "structural
# navigation" refers to quickly moving through a document by
# jumping amongst objects of a given type, such as from link to
# link, or from heading to heading, or from form field to form
# field. This is a brief message which will be presented to the
# user if the desired structural navigation object could not be
# found.
#
brief = C_("structural navigation", "Not found")
self._script.presentMessage(full, brief)
########################
# #
# Blockquotes #
# #
########################
def _blockquoteBindings(self):
"""Returns a dictionary of [keysymstring, modifiers, description]
lists for navigating among blockquotes.
"""
bindings = {}
# Translators: this is for navigating among blockquotes in a
# document.
#
prevDesc = _("Goes to previous blockquote.")
bindings["previous"] = ["q", settings.SHIFT_MODIFIER_MASK, prevDesc]
# Translators: this is for navigating among blockquotes in a
# document.
#
nextDesc = _("Goes to next blockquote.")
bindings["next"] = ["q", settings.NO_MODIFIER_MASK, nextDesc]
return bindings
def _blockquoteCriteria(self, collection, arg=None):
"""Returns the MatchCriteria to be used for locating blockquotes
by collection.
Arguments:
- collection: the collection interface for the document
- arg: an optional argument which may need to be included in
the criteria (e.g. the level of a heading).
"""
attrs = ['tag:BLOCKQUOTE']
return MatchCriteria(collection, objAttrs=attrs)
def _blockquotePredicate(self, obj, arg=None):
"""The predicate to be used for verifying that the object
obj is a blockquote.
Arguments:
- obj: the accessible object under consideration.
- arg: an optional argument which may need to be included in
the criteria (e.g. the level of a heading).
"""
if not obj:
return False
attributes = obj.getAttributes()
if attributes:
for attribute in attributes:
if attribute == "tag:BLOCKQUOTE":
return True
return False
def _blockquotePresentation(self, obj, arg=None):
"""Presents the blockquote or indicates that one was not found.
Arguments:
- obj: the accessible object under consideration.
- arg: an optional argument which may need to be included in
the criteria (e.g. the level of a heading).
"""
if obj:
[obj, characterOffset] = self._getCaretPosition(obj)
self._setCaretPosition(obj, characterOffset)
# TODO: We currently present the line, so that's kept here.
# But we should probably present the object, which would
# be consistent with the change made recently for headings.
#
self._presentLine(obj, characterOffset)
else:
# Translators: this is for navigating document content by
# moving from blockquote to blockquote. This is a detailed
# message which will be presented to the user if no more
# blockquotes can be found.
#
full = _("No more blockquotes.")
# Translators: Orca has a command that allows the user to move
# to the next structural navigation object. In Orca, "structural
# navigation" refers to quickly moving through a document by
# jumping amongst objects of a given type, such as from link to
# link, or from heading to heading, or from form field to form
# field. This is a brief message which will be presented to the
# user if the desired structural navigation object could not be
# found.
#
brief = C_("structural navigation", "Not found")
self._script.presentMessage(full, brief)
########################
# #
# Buttons #
# #
########################
def _buttonBindings(self):
"""Returns a dictionary of [keysymstring, modifiers, description]
lists for navigating amongst buttons.
"""
bindings = {}
# Translators: this is for navigating among buttons in a form
# within a document.
#
prevDesc = _("Goes to previous button.")
bindings["previous"] = ["b", settings.SHIFT_MODIFIER_MASK, prevDesc]
# Translators: this is for navigating among buttons in a form
# within a document.
#
nextDesc = _("Goes to next button.")
bindings["next"] = ["b", settings.NO_MODIFIER_MASK, nextDesc]
return bindings
def _buttonCriteria(self, collection, arg=None):
"""Returns the MatchCriteria to be used for locating buttons
by collection.
Arguments:
- collection: the collection interface for the document
- arg: an optional argument which may need to be included in
the criteria (e.g. the level of a heading).
"""
role = [pyatspi.ROLE_PUSH_BUTTON]
state = [pyatspi.STATE_FOCUSABLE, pyatspi.STATE_SENSITIVE]
stateMatch = collection.MATCH_ALL
return MatchCriteria(collection,
states=state,
matchStates=stateMatch,
roles=role)
def _buttonPredicate(self, obj, arg=None):
"""The predicate to be used for verifying that the object
obj is a button.
Arguments:
- obj: the accessible object under consideration.
- arg: an optional argument which may need to be included in
the criteria (e.g. the level of a heading).
"""
isMatch = False
if obj and obj.getRole() == pyatspi.ROLE_PUSH_BUTTON:
state = obj.getState()
isMatch = state.contains(pyatspi.STATE_FOCUSABLE) \
and state.contains(pyatspi.STATE_SENSITIVE)
return isMatch
def _buttonPresentation(self, obj, arg=None):
"""Presents the button or indicates that one was not found.
Arguments:
- obj: the accessible object under consideration.
- arg: an optional argument which may need to be included in
the criteria (e.g. the level of a heading).
"""
if obj:
obj.queryComponent().grabFocus()
else:
# Translators: this is for navigating document content by
# moving from push button to push button in a form. This is
# a detailed message which will be presented to the user if
# no more push buttons can be found.
#
full = _("No more buttons.")
# Translators: Orca has a command that allows the user to move
# to the next structural navigation object. In Orca, "structural
# navigation" refers to quickly moving through a document by
# jumping amongst objects of a given type, such as from link to
# link, or from heading to heading, or from form field to form
# field. This is a brief message which will be presented to the
# user if the desired structural navigation object could not be
# found.
#
brief = C_("structural navigation", "Not found")
self._script.presentMessage(full, brief)
########################
# #
# Check boxes #
# #
########################
def _checkBoxBindings(self):
"""Returns a dictionary of [keysymstring, modifiers, description]
lists for navigating amongst check boxes.
"""
bindings = {}
# Translators: this is for navigating among check boxes in a form
# within a document.
#
prevDesc = _("Goes to previous check box.")
bindings["previous"] = ["x", settings.SHIFT_MODIFIER_MASK, prevDesc]
# Translators: this is for navigating among check boxes in a form
# within a document.
#
nextDesc = _("Goes to next check box.")
bindings["next"] = ["x", settings.NO_MODIFIER_MASK, nextDesc]
return bindings
def _checkBoxCriteria(self, collection, arg=None):
"""Returns the MatchCriteria to be used for locating check boxes
by collection.
Arguments:
- collection: the collection interface for the document
- arg: an optional argument which may need to be included in
the criteria (e.g. the level of a heading).
"""
role = [pyatspi.ROLE_CHECK_BOX]
state = [pyatspi.STATE_FOCUSABLE, pyatspi.STATE_SENSITIVE]
stateMatch = collection.MATCH_ALL
return MatchCriteria(collection,
states=state,
matchStates=stateMatch,
roles=role)
def _checkBoxPredicate(self, obj, arg=None):
"""The predicate to be used for verifying that the object
obj is a check box.
Arguments:
- obj: the accessible object under consideration.
- arg: an optional argument which may need to be included in
the criteria (e.g. the level of a heading).
"""
isMatch = False
if obj and obj.getRole() == pyatspi.ROLE_CHECK_BOX:
state = obj.getState()
isMatch = state.contains(pyatspi.STATE_FOCUSABLE) \
and state.contains(pyatspi.STATE_SENSITIVE)
return isMatch
def _checkBoxPresentation(self, obj, arg=None):
"""Presents the check box or indicates that one was not found.
Arguments:
- obj: the accessible object under consideration.
- arg: an optional argument which may need to be included in
the criteria (e.g. the level of a heading).
"""
if obj:
obj.queryComponent().grabFocus()
else:
# Translators: this is for navigating document content by
# moving from checkbox to checkbox in a form. This is a
# detailed message which will be presented to the user if
# no more checkboxes can be found.
#
full = _("No more check boxes.")
# Translators: Orca has a command that allows the user to move
# to the next structural navigation object. In Orca, "structural
# navigation" refers to quickly moving through a document by
# jumping amongst objects of a given type, such as from link to
# link, or from heading to heading, or from form field to form
# field. This is a brief message which will be presented to the
# user if the desired structural navigation object could not be
# found.
#
brief = C_("structural navigation", "Not found")
self._script.presentMessage(full, brief)
########################
# #
# Chunks/Large Objects #
# #
########################
def _chunkBindings(self):
"""Returns a dictionary of [keysymstring, modifiers, description]
lists for navigating amongst chunks/large objects.
"""
bindings = {}
# Translators: this is for navigating a document in a
# structural manner, where a 'large object' is a logical
# chunk of text, such as a paragraph, a list, a table, etc.
#
prevDesc = _("Goes to previous large object.")
bindings["previous"] = ["o", settings.SHIFT_MODIFIER_MASK, prevDesc]
# Translators: this is for navigating a document in a
# structural manner, where a 'large object' is a logical
# chunk of text, such as a paragraph, a list, a table, etc.
#
nextDesc = _("Goes to next large object.")
bindings["next"] = ["o", settings.NO_MODIFIER_MASK, nextDesc]
# I don't think it makes sense to add support for a list
# of chunks. But one could always change that here.
#
return bindings
def _chunkCriteria(self, collection, arg=None):
"""Returns the MatchCriteria to be used for locating chunks/
large objects by collection.
Arguments:
- collection: the collection interface for the document
- arg: an optional argument which may need to be included in
the criteria (e.g. the level of a heading).
"""
role = self.OBJECT_ROLES
roleMatch = collection.MATCH_ANY
return MatchCriteria(collection,
roles=role,
matchRoles=roleMatch,
applyPredicate=True)
def _chunkPredicate(self, obj, arg=None):
"""The predicate to be used for verifying that the object
obj is a chunk.
Arguments:
- obj: the accessible object under consideration.
- arg: an optional argument which may need to be included in
the criteria (e.g. the level of a heading).
"""
isMatch = False
if obj and obj.getRole() in self.OBJECT_ROLES:
try:
text = obj.queryText()
characterCount = text.characterCount
except:
characterCount = 0
if characterCount > settings.largeObjectTextLength \
and not self._isUselessObject(obj):
isMatch = True
return isMatch
def _chunkPresentation(self, obj, arg=None):
"""Presents the chunk or indicates that one was not found.
Arguments:
- obj: the accessible object under consideration.
- arg: an optional argument which may need to be included in
the criteria (e.g. the level of a heading).
"""
if obj:
[newObj, characterOffset] = self._getCaretPosition(obj)
self._setCaretPosition(newObj, characterOffset)
self._presentObject(obj, 0)
else:
# Translators: this is for navigating document content by
# moving from 'large object' to 'large object'. A 'large
# object' is a logical chunk of text, such as a paragraph,
# a list, a table, etc. This is a detailed message which
# will be presented to the user if no more large objects
# can be found.
#
full = _("No more large objects.")
# Translators: Orca has a command that allows the user to move
# to the next structural navigation object. In Orca, "structural
# navigation" refers to quickly moving through a document by
# jumping amongst objects of a given type, such as from link to
# link, or from heading to heading, or from form field to form
# field. This is a brief message which will be presented to the
# user if the desired structural navigation object could not be
# found.
#
brief = C_("structural navigation", "Not found")
self._script.presentMessage(full, brief)
########################
# #
# Combo Boxes #
# #
########################
def _comboBoxBindings(self):
"""Returns a dictionary of [keysymstring, modifiers, description]
lists for navigating amongst combo boxes.
"""
bindings = {}
# Translators: this is for navigating among combo boxes in a form
# within a document.
#
prevDesc = _("Goes to previous combo box.")
bindings["previous"] = ["c", settings.SHIFT_MODIFIER_MASK, prevDesc]
# Translators: this is for navigating among combo boxes in a form
# within a document.
#
nextDesc = _("Goes to next combo box.")
bindings["next"] = ["c", settings.NO_MODIFIER_MASK, nextDesc]
return bindings
def _comboBoxCriteria(self, collection, arg=None):
"""Returns the MatchCriteria to be used for locating combo boxes
by collection.
Arguments:
- collection: the collection interface for the document
- arg: an optional argument which may need to be included in
the criteria (e.g. the level of a heading).
"""
role = [pyatspi.ROLE_COMBO_BOX]
state = [pyatspi.STATE_FOCUSABLE, pyatspi.STATE_SENSITIVE]
stateMatch = collection.MATCH_ALL
return MatchCriteria(collection,
states=state,
matchStates=stateMatch,
roles=role)
def _comboBoxPredicate(self, obj, arg=None):
"""The predicate to be used for verifying that the object
obj is a combo box.
Arguments:
- obj: the accessible object under consideration.
- arg: an optional argument which may need to be included in
the criteria (e.g. the level of a heading).
"""
isMatch = False
if obj and obj.getRole() == pyatspi.ROLE_COMBO_BOX:
state = obj.getState()
isMatch = state.contains(pyatspi.STATE_FOCUSABLE) \
and state.contains(pyatspi.STATE_SENSITIVE)
return isMatch
def _comboBoxPresentation(self, obj, arg=None):
"""Presents the combo box or indicates that one was not found.
Arguments:
- obj: the accessible object under consideration.
- arg: an optional argument which may need to be included in
the criteria (e.g. the level of a heading).
"""
if obj:
obj.queryComponent().grabFocus()
else:
# Translators: this is for navigating document content by
# moving from combo box to combo box in a form. This is a
# detailed message which will be presented to the user if
# no more checkboxes can be found.
#
full = _("No more combo boxes.")
# Translators: Orca has a command that allows the user to move
# to the next structural navigation object. In Orca, "structural
# navigation" refers to quickly moving through a document by
# jumping amongst objects of a given type, such as from link to
# link, or from heading to heading, or from form field to form
# field. This is a brief message which will be presented to the
# user if the desired structural navigation object could not be
# found.
#
brief = C_("structural navigation", "Not found")
self._script.presentMessage(full, brief)
########################
# #
# Entries #
# #
########################
def _entryBindings(self):
"""Returns a dictionary of [keysymstring, modifiers, description]
lists for navigating amongst entries.
"""
bindings = {}
# Translators: this is for navigating among text entries in a form
# within a document.
#
prevDesc = _("Goes to previous entry.")
bindings["previous"] = ["e", settings.SHIFT_MODIFIER_MASK, prevDesc]
# Translators: this is for navigating among text entries
# in a form.
#
nextDesc = _("Goes to next entry.")
bindings["next"] = ["e", settings.NO_MODIFIER_MASK, nextDesc]
return bindings
def _entryCriteria(self, collection, arg=None):
"""Returns the MatchCriteria to be used for locating entries
by collection.
Arguments:
- collection: the collection interface for the document
- arg: an optional argument which may need to be included in
the criteria (e.g. the level of a heading).
"""
role = [pyatspi.ROLE_DOCUMENT_FRAME,
pyatspi.ROLE_ENTRY,
pyatspi.ROLE_PASSWORD_TEXT,
pyatspi.ROLE_TEXT]
roleMatch = collection.MATCH_ANY
state = [pyatspi.STATE_FOCUSABLE,
pyatspi.STATE_SENSITIVE,
pyatspi.STATE_EDITABLE]
stateMatch = collection.MATCH_ALL
return MatchCriteria(collection,
states=state,
matchStates=stateMatch,
roles=role,
matchRoles=roleMatch,
applyPredicate=True)
def _entryPredicate(self, obj, arg=None):
"""The predicate to be used for verifying that the object
obj is an entry.
Arguments:
- obj: the accessible object under consideration.
- arg: an optional argument which may need to be included in
the criteria (e.g. the level of a heading).
"""
isMatch = False
if obj and obj.getRole() in [pyatspi.ROLE_DOCUMENT_FRAME,
pyatspi.ROLE_ENTRY,
pyatspi.ROLE_PASSWORD_TEXT,
pyatspi.ROLE_TEXT]:
state = obj.getState()
isMatch = state.contains(pyatspi.STATE_FOCUSABLE) \
and state.contains(pyatspi.STATE_SENSITIVE) \
and state.contains(pyatspi.STATE_EDITABLE)
return isMatch
def _entryPresentation(self, obj, arg=None):
"""Presents the entry or indicates that one was not found.
Arguments:
- obj: the accessible object under consideration.
- arg: an optional argument which may need to be included in
the criteria (e.g. the level of a heading).
"""
if obj:
obj.queryComponent().grabFocus()
else:
# Translators: this is for navigating document content by
# moving from text entry to text entry in a form. This is
# a detailed message which will be presented to the user if
# no more text entries can be found.
#
full = _("No more entries.")
# Translators: Orca has a command that allows the user to move
# to the next structural navigation object. In Orca, "structural
# navigation" refers to quickly moving through a document by
# jumping amongst objects of a given type, such as from link to
# link, or from heading to heading, or from form field to form
# field. This is a brief message which will be presented to the
# user if the desired structural navigation object could not be
# found.
#
brief = C_("structural navigation", "Not found")
self._script.presentMessage(full, brief)
########################
# #
# Form Fields #
# #
########################
def _formFieldBindings(self):
"""Returns a dictionary of [keysymstring, modifiers, description]
lists for navigating amongst form fields.
"""
bindings = {}
# Translators: this is for navigating among fields in a form within
# a document.
#
prevDesc = _("Goes to previous form field.")
bindings["previous"] = ["Tab",
settings.ORCA_SHIFT_MODIFIER_MASK,
prevDesc]
# Translators: this is for navigating among fields in a form within
# a document.
#
nextDesc = _("Goes to next form field.")
bindings["next"] = ["Tab", settings.ORCA_MODIFIER_MASK, nextDesc]
return bindings
def _formFieldCriteria(self, collection, arg=None):
"""Returns the MatchCriteria to be used for locating form fields
by collection.
Arguments:
- collection: the collection interface for the document
- arg: an optional argument which may need to be included in
the criteria (e.g. the level of a heading).
"""
role = self.FORM_ROLES
roleMatch = collection.MATCH_ANY
state = [pyatspi.STATE_FOCUSABLE, pyatspi.STATE_SENSITIVE]
stateMatch = collection.MATCH_ALL
return MatchCriteria(collection,
states=state,
matchStates=stateMatch,
roles=role,
matchRoles=roleMatch,
applyPredicate=True)
def _formFieldPredicate(self, obj, arg=None):
"""The predicate to be used for verifying that the object
obj is a form field.
Arguments:
- obj: the accessible object under consideration.
- arg: an optional argument which may need to be included in
the criteria (e.g. the level of a heading).
"""
isMatch = False
if obj and obj.getRole() in self.FORM_ROLES:
state = obj.getState()
isMatch = state.contains(pyatspi.STATE_FOCUSABLE) \
and state.contains(pyatspi.STATE_SENSITIVE)
return isMatch
def _formFieldPresentation(self, obj, arg=None):
"""Presents the form field or indicates that one was not found.
Arguments:
- obj: the accessible object under consideration.
- arg: an optional argument which may need to be included in
the criteria (e.g. the level of a heading).
"""
if obj:
if obj.getRole() in [pyatspi.ROLE_LIST, pyatspi.ROLE_COMBO_BOX]:
obj.queryComponent().grabFocus()
else:
# TODO: I think we should just grab focus on the object
# regardless of the object type. But that's not what we
# do now, and it causes an extra newline character to show
# up in the regression test output for entries, so for the
# purpose of passing the regression tests, I'm not making
# that change yet.
#
[obj, characterOffset] = self._getCaretPosition(obj)
self._setCaretPosition(obj, characterOffset)
self._presentObject(obj, characterOffset)
else:
# Translators: this is for navigating document content by
# moving from form field to form filed. This is a detailed
# message which will be presented to the user if no more form
# field can be found.
#
full = _("No more form fields.")
# Translators: Orca has a command that allows the user to move
# to the next structural navigation object. In Orca, "structural
# navigation" refers to quickly moving through a document by
# jumping amongst objects of a given type, such as from link to
# link, or from heading to heading, or from form field to form
# field. This is a brief message which will be presented to the
# user if the desired structural navigation object could not be
# found.
#
brief = C_("structural navigation", "Not found")
self._script.presentMessage(full, brief)
########################
# #
# Headings #
# #
########################
def _headingBindings(self):
"""Returns a dictionary of [keysymstring, modifiers, description]
lists for navigating amongst headings.
"""
bindings = {}
# Translators: this is for navigating in a document by heading.
# (e.g. <h1>)
#
prevDesc = _("Goes to previous heading.")
bindings["previous"] = ["h", settings.SHIFT_MODIFIER_MASK, prevDesc]
# Translators: this is for navigating in a document by heading.
# (e.g., <h1>)
#
nextDesc = _("Goes to next heading.")
bindings["next"] = ["h", settings.NO_MODIFIER_MASK, nextDesc]
prevAtLevelBindings = []
nextAtLevelBindings = []
minLevel, maxLevel = self._headingLevels()
for i in range(minLevel, maxLevel + 1):
# Translators: this is for navigating in a document by heading.
# (e.g. <h1> is a heading at level 1).
#
prevDesc = _("Goes to previous heading at level %d.") % i
prevAtLevelBindings.append([str(i),
settings.SHIFT_MODIFIER_MASK,
prevDesc])
# Translators: this is for navigating in a document by heading.
# (e.g. <h1> is a heading at level 1).
#
nextDesc = _("Goes to next heading at level %d.") % i
nextAtLevelBindings.append([str(i),
settings.NO_MODIFIER_MASK,
nextDesc])
bindings["previousAtLevel"] = prevAtLevelBindings
bindings["nextAtLevel"] = nextAtLevelBindings
return bindings
def _headingLevels(self):
"""Returns the [minimum heading level, maximum heading level]
which should be navigable via structural navigation.
"""
return [1, 6]
def _headingCriteria(self, collection, arg=None):
"""Returns the MatchCriteria to be used for locating headings
by collection.
Arguments:
- collection: the collection interface for the document
- arg: an optional argument which may need to be included in
the criteria (e.g. the level of a heading).
"""
role = [pyatspi.ROLE_HEADING]
attrs = []
if arg:
attrs.append('level:%d' % arg)
return MatchCriteria(collection,
roles=role,
objAttrs=attrs)
def _headingPredicate(self, obj, arg=None):
"""The predicate to be used for verifying that the object
obj is a heading.
Arguments:
- obj: the accessible object under consideration.
- arg: an optional argument which may need to be included in
the criteria (e.g. the level of a heading).
"""
isMatch = False
if obj and obj.getRole() == pyatspi.ROLE_HEADING:
if arg:
isMatch = (arg == self._getHeadingLevel(obj))
else:
isMatch = True
return isMatch
def _headingPresentation(self, obj, arg=None):
"""Presents the heading or indicates that one was not found.
Arguments:
- obj: the accessible object under consideration.
- arg: an optional argument which may need to be included in
the criteria (e.g. the level of a heading).
"""
if obj:
[obj, characterOffset] = self._getCaretPosition(obj)
self._setCaretPosition(obj, characterOffset)
self._presentObject(obj, characterOffset)
elif not arg:
# Translators: this is for navigating HTML content by moving from
# heading to heading (e.g. <h1>, <h2>, etc). This string is the
# detailed message which Orca will present if there are no more
# headings found.
#
full = _("No more headings.")
# Translators: Orca has a command that allows the user to move
# to the next structural navigation object. In Orca, "structural
# navigation" refers to quickly moving through a document by
# jumping amongst objects of a given type, such as from link to
# link, or from heading to heading, or from form field to form
# field. This is a brief message which will be presented to the
# user if the desired structural navigation object could not be
# found.
#
brief = C_("structural navigation", "Not found")
self._script.presentMessage(full, brief)
else:
# Translators: this is for navigating HTML content by moving from
# heading to heading at a particular level (i.e. only <h1> or only
# <h2>, etc.) This string is the detailed message which Orca will
# present if there are no more headings found at the desired level.
#
full = _("No more headings at level %d.") % arg
# Translators: Orca has a command that allows the user to move
# to the next structural navigation object. In Orca, "structural
# navigation" refers to quickly moving through a document by
# jumping amongst objects of a given type, such as from link to
# link, or from heading to heading, or from form field to form
# field. This is a brief message which will be presented to the
# user if the desired structural navigation object could not be
# found.
#
brief = C_("structural navigation", "Not found")
self._script.presentMessage(full, brief)
########################
# #
# Landmarks #
# #
########################
def _landmarkBindings(self):
"""Returns a dictionary of [keysymstring, modifiers, description]
lists for navigating amongst landmarks.
"""
bindings = {}
# Translators: this is for navigating to the previous ARIA
# role landmark. ARIA role landmarks are the W3C defined
# HTML tag attribute 'role' used to identify important part
# of webpage like banners, main context, search etc.
#
prevDesc = _("Goes to previous landmark.")
bindings["previous"] = ["m", settings.SHIFT_MODIFIER_MASK, prevDesc]
# Translators: this is for navigating to the next ARIA
# role landmark. ARIA role landmarks are the W3C defined
# HTML tag attribute 'role' used to identify important part
# of webpage like banners, main context, search etc.
#
nextDesc = _("Goes to next landmark.")
bindings["next"] = ["m", settings.NO_MODIFIER_MASK, nextDesc]
return bindings
def _landmarkCriteria(self, collection, arg=None):
"""Returns the MatchCriteria to be used for locating landmarks
by collection.
Arguments:
- collection: the collection interface for the document
- arg: an optional argument which may need to be included in
the criteria (e.g. the level of a heading).
"""
# NOTE: there is a limitation in the AT-SPI Collections interface
# when it comes to an attribute whose value can be a list. For
# example, the xml-roles attribute can be a space-separate list
# of roles. We'd like to make a match if the xml-roles attribute
# has one (or any) of the roles we care about. Instead, we're
# restricted to an exact match. So, the below will only work in
# the cases where the xml-roles attribute value consists solely of a
# single role. In practice, this seems to be the case that we run
# into for the landmark roles.
#
attrs = []
for landmark in settings.ariaLandmarks:
attrs.append('xml-roles:' + landmark)
return MatchCriteria(collection, objAttrs=attrs)
def _landmarkPredicate(self, obj, arg=None):
"""The predicate to be used for verifying that the object
obj is a landmark.
Arguments:
- obj: the accessible object under consideration.
- arg: an optional argument which may need to be included in
the criteria (e.g. the level of a heading).
"""
if obj is None:
return False
attrs = dict([attr.split(':', 1) for attr in obj.getAttributes()])
try:
if set(attrs['xml-roles']).intersection(\
set(settings.ariaLandmarks)):
return True
else:
return False
except KeyError:
return False
def _landmarkPresentation(self, obj, arg=None):
"""Presents the landmark or indicates that one was not found.
Arguments:
- obj: the accessible object under consideration.
- arg: an optional argument which may need to be included in
the criteria (e.g. the level of a heading).
"""
if obj:
[obj, characterOffset] = self._getCaretPosition(obj)
self._setCaretPosition(obj, characterOffset)
self._presentObject(obj, characterOffset)
else:
# Translators: this is for navigating to the previous ARIA
# role landmark. ARIA role landmarks are the W3C defined
# HTML tag attribute 'role' used to identify important part
# of webpage like banners, main context, search etc. This
# is an indication that one was not found.
#
full = _("No landmark found.")
# Translators: Orca has a command that allows the user to move
# to the next structural navigation object. In Orca, "structural
# navigation" refers to quickly moving through a document by
# jumping amongst objects of a given type, such as from link to
# link, or from heading to heading, or from form field to form
# field. This is a brief message which will be presented to the
# user if the desired structural navigation object could not be
# found.
#
brief = C_("structural navigation", "Not found")
self._script.presentMessage(full, brief)
########################
# #
# Lists #
# #
########################
def _listBindings(self):
"""Returns a dictionary of [keysymstring, modifiers, description]
lists for navigating amongst (un)ordered lists.
"""
bindings = {}
# Translators: this is for navigating among bulleted/numbered
# lists in a document.
#
prevDesc = _("Goes to previous list.")
bindings["previous"] = ["l", settings.SHIFT_MODIFIER_MASK, prevDesc]
# Translators: this is for navigating among bulleted/numbered
# lists in a document.
#
nextDesc = _("Goes to next list.")
bindings["next"] = ["l", settings.NO_MODIFIER_MASK, nextDesc]
return bindings
def _listCriteria(self, collection, arg=None):
"""Returns the MatchCriteria to be used for locating (un)ordered
lists by collection.
Arguments:
- collection: the collection interface for the document
- arg: an optional argument which may need to be included in
the criteria (e.g. the level of a heading).
"""
role = [pyatspi.ROLE_LIST]
state = [pyatspi.STATE_FOCUSABLE]
stateMatch = collection.MATCH_NONE
return MatchCriteria(collection,
states=state,
matchStates=stateMatch,
roles=role)
def _listPredicate(self, obj, arg=None):
"""The predicate to be used for verifying that the object
obj is an (un)ordered list.
Arguments:
- obj: the accessible object under consideration.
- arg: an optional argument which may need to be included in
the criteria (e.g. the level of a heading).
"""
isMatch = False
if obj and obj.getRole() == pyatspi.ROLE_LIST:
isMatch = not obj.getState().contains(pyatspi.STATE_FOCUSABLE)
return isMatch
def _listPresentation(self, obj, arg=None):
"""Presents the (un)ordered list or indicates that one was not
found.
Arguments:
- obj: the accessible object under consideration.
- arg: an optional argument which may need to be included in
the criteria (e.g. the level of a heading).
"""
# TODO: Ultimately it should be the job of the speech (and braille)
# generator to present things like this.
#
if obj:
nItems = 0
for child in obj:
if child.getRole() == pyatspi.ROLE_LIST_ITEM:
nItems += 1
# Translators: this represents a list in HTML.
#
itemString = ngettext("List with %d item",
"List with %d items",
nItems) % nItems
self._script.presentMessage(itemString)
nestingLevel = 0
parent = obj.parent
while parent.getRole() == pyatspi.ROLE_LIST:
nestingLevel += 1
parent = parent.parent
if nestingLevel:
# Translators: this represents a list item in a document.
# The nesting level is how 'deep' the item is (e.g., a
# level of 2 represents a list item inside a list that's
# inside another list).
#
self._script.presentMessage(_("Nesting level %d") % \
nestingLevel)
[obj, characterOffset] = self._getCaretPosition(obj)
self._setCaretPosition(obj, characterOffset)
self._presentLine(obj, characterOffset)
else:
# Translators: this is for navigating document content by moving
# from bulleted/numbered list to bulleted/numbered list. This
# string is the detailed message which Orca will present if there
# are no more lists found.
#
full = _("No more lists.")
# Translators: Orca has a command that allows the user to move
# to the next structural navigation object. In Orca, "structural
# navigation" refers to quickly moving through a document by
# jumping amongst objects of a given type, such as from link to
# link, or from heading to heading, or from form field to form
# field. This is a brief message which will be presented to the
# user if the desired structural navigation object could not be
# found.
#
brief = C_("structural navigation", "Not found")
self._script.presentMessage(full, brief)
########################
# #
# List Items #
# #
########################
def _listItemBindings(self):
"""Returns a dictionary of [keysymstring, modifiers, description]
lists for navigating amongst items in an (un)ordered list.
"""
bindings = {}
# Translators: this is for navigating among bulleted/numbered list
# items in a document.
#
prevDesc = _("Goes to previous list item.")
bindings["previous"] = ["i", settings.SHIFT_MODIFIER_MASK, prevDesc]
# Translators: this is for navigating among bulleted/numbered list
# items in a document.
#
nextDesc = _("Goes to next list item.")
bindings["next"] = ["i", settings.NO_MODIFIER_MASK, nextDesc]
return bindings
def _listItemCriteria(self, collection, arg=None):
"""Returns the MatchCriteria to be used for locating items in an
(un)ordered list by collection.
Arguments:
- collection: the collection interface for the document
- arg: an optional argument which may need to be included in
the criteria (e.g. the level of a heading).
"""
role = [pyatspi.ROLE_LIST_ITEM]
state = [pyatspi.STATE_FOCUSABLE]
stateMatch = collection.MATCH_NONE
return MatchCriteria(collection,
states=state,
matchStates=stateMatch,
roles=role)
def _listItemPredicate(self, obj, arg=None):
"""The predicate to be used for verifying that the object
obj is an item in an (un)ordered list.
Arguments:
- obj: the accessible object under consideration.
- arg: an optional argument which may need to be included in
the criteria (e.g. the level of a heading).
"""
isMatch = False
if obj and obj.getRole() == pyatspi.ROLE_LIST_ITEM:
isMatch = not obj.getState().contains(pyatspi.STATE_FOCUSABLE)
return isMatch
def _listItemPresentation(self, obj, arg=None):
"""Presents the (un)ordered list item or indicates that one was not
found.
Arguments:
- obj: the accessible object under consideration.
- arg: an optional argument which may need to be included in
the criteria (e.g. the level of a heading).
"""
if obj:
[obj, characterOffset] = self._getCaretPosition(obj)
self._setCaretPosition(obj, characterOffset)
# TODO: We currently present the line, so that's kept here.
# But we should probably present the object, which would
# be consistent with the change made recently for headings.
#
self._presentLine(obj, characterOffset)
else:
# Translators: this is for navigating document content by
# moving from bulleted/numbered list item to bulleted/
# numbered list item. This string is the detailed message
# which Orca will present if there are no more list items found.
#
full = _("No more list items.")
# Translators: Orca has a command that allows the user to move
# to the next structural navigation object. In Orca, "structural
# navigation" refers to quickly moving through a document by
# jumping amongst objects of a given type, such as from link to
# link, or from heading to heading, or from form field to form
# field. This is a brief message which will be presented to the
# user if the desired structural navigation object could not be
# found.
#
brief = C_("structural navigation", "Not found")
self._script.presentMessage(full, brief)
########################
# #
# Live Regions #
# #
########################
def _liveRegionBindings(self):
"""Returns a dictionary of [keysymstring, modifiers, description]
lists for navigating amongst live regions.
"""
bindings = {}
# Translators: this is for navigating between live regions
#
prevDesc = _("Goes to previous live region.")
bindings["previous"] = ["d", settings.SHIFT_MODIFIER_MASK, prevDesc]
# Translators: this is for navigating between live regions
#
nextDesc = _("Goes to next live region.")
bindings["next"] = ["d", settings.NO_MODIFIER_MASK, nextDesc]
# Translators: this is for navigating to the last live region
# which made an announcement.
#
desc = _("Goes to the last live region which made an announcement.")
bindings["last"] = ["y", settings.NO_MODIFIER_MASK, desc]
return bindings
def _liveRegionPredicate(self, obj, arg=None):
"""The predicate to be used for verifying that the object
obj is a live region.
Arguments:
- obj: the accessible object under consideration.
- arg: an optional argument which may need to be included in
the criteria (e.g. the level of a heading).
"""
isMatch = False
regobjs = self._script.liveMngr.getLiveNoneObjects()
if self._script.liveMngr.matchLiveRegion(obj) or obj in regobjs:
isMatch = True
return isMatch
def _liveRegionPresentation(self, obj, arg=None):
"""Presents the live region or indicates that one was not found.
Arguments:
- obj: the accessible object under consideration.
- arg: an optional argument which may need to be included in
the criteria (e.g. the level of a heading).
"""
if obj:
# TODO: We don't want to move to a list item.
# Is this the best place to handle this?
#
if obj.getRole() == pyatspi.ROLE_LIST:
characterOffset = 0
else:
[obj, characterOffset] = self._getCaretPosition(obj)
self._setCaretPosition(obj, characterOffset)
self._presentObject(obj, characterOffset)
# For debugging
#
self._script.outlineAccessible(obj)
else:
# Translators: this is for navigating HTML in a structural
# manner, where a 'live region' is a location in a web page
# that are updated without having to refresh the entire page.
#
full = _("No more live regions.")
# Translators: Orca has a command that allows the user to move
# to the next structural navigation object. In Orca, "structural
# navigation" refers to quickly moving through a document by
# jumping amongst objects of a given type, such as from link to
# link, or from heading to heading, or from form field to form
# field. This is a brief message which will be presented to the
# user if the desired structural navigation object could not be
# found.
#
brief = C_("structural navigation", "Not found")
self._script.presentMessage(full, brief)
########################
# #
# Paragraphs #
# #
########################
def _paragraphBindings(self):
"""Returns a dictionary of [keysymstring, modifiers, description]
lists for navigating amongst paragraphs.
"""
bindings = {}
# Translators: this is for navigating among paragraphs in a document.
#
prevDesc = _("Goes to previous paragraph.")
bindings["previous"] = ["p", settings.SHIFT_MODIFIER_MASK, prevDesc]
# Translators: this is for navigating among paragraphs in a document.
#
nextDesc = _("Goes to next paragraph.")
bindings["next"] = ["p", settings.NO_MODIFIER_MASK, nextDesc]
return bindings
def _paragraphCriteria(self, collection, arg=None):
"""Returns the MatchCriteria to be used for locating paragraphs
by collection.
Arguments:
- collection: the collection interface for the document
- arg: an optional argument which may need to be included in
the criteria (e.g. the level of a heading).
"""
role = [pyatspi.ROLE_PARAGRAPH]
return MatchCriteria(collection, roles=role, applyPredicate=True)
def _paragraphPredicate(self, obj, arg=None):
"""The predicate to be used for verifying that the object
obj is a paragraph.
Arguments:
- obj: the accessible object under consideration.
- arg: an optional argument which may need to be included in
the criteria (e.g. the level of a heading).
"""
isMatch = False
if obj and obj.getRole() == pyatspi.ROLE_PARAGRAPH:
try:
text = obj.queryText()
# We're choosing 3 characters as the minimum because some
# paragraphs contain a single image or link and a text
# of length 2: An embedded object character and a space.
# We want to skip these.
#
isMatch = text.characterCount > 2
except:
pass
return isMatch
def _paragraphPresentation(self, obj, arg=None):
"""Presents the paragraph or indicates that one was not found.
Arguments:
- obj: the accessible object under consideration.
- arg: an optional argument which may need to be included in
the criteria (e.g. the level of a heading).
"""
if obj:
[newObj, characterOffset] = self._getCaretPosition(obj)
self._setCaretPosition(newObj, characterOffset)
self._presentObject(obj, 0)
else:
# Translators: this is for navigating document content by moving
# from paragraph to paragraph. This string is the detailed message
# which Orca will present if there are no more paragraphs found.
#
full = _("No more paragraphs.")
# Translators: Orca has a command that allows the user to move
# to the next structural navigation object. In Orca, "structural
# navigation" refers to quickly moving through a document by
# jumping amongst objects of a given type, such as from link to
# link, or from heading to heading, or from form field to form
# field. This is a brief message which will be presented to the
# user if the desired structural navigation object could not be
# found.
#
brief = C_("structural navigation", "Not found")
self._script.presentMessage(full, brief)
########################
# #
# Radio Buttons #
# #
########################
def _radioButtonBindings(self):
"""Returns a dictionary of [keysymstring, modifiers, description]
lists for navigating amongst radio buttons.
"""
bindings = {}
# Translators: this is for navigating among radio buttons in a
# form within a document.
#
prevDesc = _("Goes to previous radio button.")
bindings["previous"] = ["r", settings.SHIFT_MODIFIER_MASK, prevDesc]
# Translators: this is for navigating among radio buttons in a
# form within a document.
#
nextDesc = _("Goes to next radio button.")
bindings["next"] = ["r", settings.NO_MODIFIER_MASK, nextDesc]
return bindings
def _radioButtonCriteria(self, collection, arg=None):
"""Returns the MatchCriteria to be used for locating radio buttons
by collection.
Arguments:
- collection: the collection interface for the document
- arg: an optional argument which may need to be included in
the criteria (e.g. the level of a heading).
"""
role = [pyatspi.ROLE_RADIO_BUTTON]
state = [pyatspi.STATE_FOCUSABLE, pyatspi.STATE_SENSITIVE]
stateMatch = collection.MATCH_ALL
return MatchCriteria(collection,
states=state,
matchStates=stateMatch,
roles=role)
def _radioButtonPredicate(self, obj, arg=None):
"""The predicate to be used for verifying that the object
obj is a radio button.
Arguments:
- obj: the accessible object under consideration.
- arg: an optional argument which may need to be included in
the criteria (e.g. the level of a heading).
"""
isMatch = False
if obj and obj.getRole() == pyatspi.ROLE_RADIO_BUTTON:
state = obj.getState()
isMatch = state.contains(pyatspi.STATE_FOCUSABLE) \
and state.contains(pyatspi.STATE_SENSITIVE)
return isMatch
def _radioButtonPresentation(self, obj, arg=None):
"""Presents the radio button or indicates that one was not found.
Arguments:
- obj: the accessible object under consideration.
- arg: an optional argument which may need to be included in
the criteria (e.g. the level of a heading).
"""
if obj:
obj.queryComponent().grabFocus()
else:
# Translators: this is for navigating in document content by moving
# from radio button to radio button in a form. This string is the
# detailed message which Orca will present if there are no more
# radio buttons found.
#
full = _("No more radio buttons.")
# Translators: Orca has a command that allows the user to move
# to the next structural navigation object. In Orca, "structural
# navigation" refers to quickly moving through a document by
# jumping amongst objects of a given type, such as from link to
# link, or from heading to heading, or from form field to form
# field. This is a brief message which will be presented to the
# user if the desired structural navigation object could not be
# found.
#
brief = C_("structural navigation", "Not found")
self._script.presentMessage(full, brief)
########################
# #
# Separators #
# #
########################
def _separatorBindings(self):
"""Returns a dictionary of [keysymstring, modifiers, description]
lists for navigating amongst separators.
"""
bindings = {}
# Translators: this is for navigating among separators, such as the
# <hr> tag, in a document.
#
prevDesc = _("Goes to previous separator.")
bindings["previous"] = ["s", settings.SHIFT_MODIFIER_MASK, prevDesc]
# Translators: this is for navigating among separators, such as the
# <hr> tag, in a document.
#
nextDesc = _("Goes to next separator.")
bindings["next"] = ["s", settings.NO_MODIFIER_MASK, nextDesc]
return bindings
def _separatorCriteria(self, collection, arg=None):
"""Returns the MatchCriteria to be used for locating separators
by collection.
Arguments:
- collection: the collection interface for the document
- arg: an optional argument which may need to be included in
the criteria (e.g. the level of a heading).
"""
role = [pyatspi.ROLE_SEPARATOR]
return MatchCriteria(collection, roles=role, applyPredicate=False)
def _separatorPredicate(self, obj, arg=None):
"""The predicate to be used for verifying that the object
obj is a separator.
Arguments:
- obj: the accessible object under consideration.
- arg: an optional argument which may need to be included in
the criteria (e.g. the level of a heading).
"""
return obj and obj.getRole() == pyatspi.ROLE_SEPARATOR
def _separatorPresentation(self, obj, arg=None):
"""Presents the separator or indicates that one was not found.
Arguments:
- obj: the accessible object under consideration.
- arg: an optional argument which may need to be included in
the criteria (e.g. the level of a heading).
"""
if obj:
[newObj, characterOffset] = self._getCaretPosition(obj)
self._setCaretPosition(newObj, characterOffset)
self._presentObject(obj, 0)
else:
# Translators: this is for navigating document content by moving
# amongst separators (e.g. <hr> tags). This string is the detailed
# message which Orca will present if there are no more separators
# found.
#
full = _("No more separators.")
# Translators: Orca has a command that allows the user to move
# to the next structural navigation object. In Orca, "structural
# navigation" refers to quickly moving through a document by
# jumping amongst objects of a given type, such as from link to
# link, or from heading to heading, or from form field to form
# field. This is a brief message which will be presented to the
# user if the desired structural navigation object could not be
# found.
#
brief = C_("structural navigation", "Not found")
self._script.presentMessage(full, brief)
########################
# #
# Tables #
# #
########################
def _tableBindings(self):
"""Returns a dictionary of [keysymstring, modifiers, description]
lists for navigating amongst tables.
"""
bindings = {}
# Translators: this is for navigating among tables in a document.
#
prevDesc = _("Goes to previous table.")
bindings["previous"] = ["t", settings.SHIFT_MODIFIER_MASK, prevDesc]
# Translators: this is for navigating among tables in a document.
#
nextDesc = _("Goes to next table.")
bindings["next"] = ["t", settings.NO_MODIFIER_MASK, nextDesc]
return bindings
def _tableCriteria(self, collection, arg=None):
"""Returns the MatchCriteria to be used for locating tables
by collection.
Arguments:
- collection: the collection interface for the document
- arg: an optional argument which may need to be included in
the criteria (e.g. the level of a heading).
"""
role = [pyatspi.ROLE_TABLE]
return MatchCriteria(collection, roles=role, applyPredicate=True)
def _tablePredicate(self, obj, arg=None):
"""The predicate to be used for verifying that the object
obj is a table.
Arguments:
- obj: the accessible object under consideration.
- arg: an optional argument which may need to be included in
the criteria (e.g. the level of a heading).
"""
if obj and obj.childCount and obj.getRole() == pyatspi.ROLE_TABLE:
try:
return obj.queryTable().nRows > 0
except:
pass
return False
def _tablePresentation(self, obj, arg=None):
"""Presents the table or indicates that one was not found.
Arguments:
- obj: the accessible object under consideration.
- arg: an optional argument which may need to be included in
the criteria (e.g. the level of a heading).
"""
if obj:
caption = self._getTableCaption(obj)
if caption:
self._script.presentMessage(caption)
self._script.presentMessage(self._getTableDescription(obj))
cell = obj.queryTable().getAccessibleAt(0, 0)
self.lastTableCell = [0, 0]
[cell, characterOffset] = self._getCaretPosition(cell)
self._setCaretPosition(cell, characterOffset)
self._presentObject(cell, characterOffset)
else:
# Translators: this is for navigating document content by moving
# from table to table. This string is the detailed message which
# Orca will present if there are no more tables found.
#
full = _("No more tables.")
# Translators: Orca has a command that allows the user to move
# to the next structural navigation object. In Orca, "structural
# navigation" refers to quickly moving through a document by
# jumping amongst objects of a given type, such as from link to
# link, or from heading to heading, or from form field to form
# field. This is a brief message which will be presented to the
# user if the desired structural navigation object could not be
# found.
#
brief = C_("structural navigation", "Not found")
self._script.presentMessage(full, brief)
########################
# #
# Table Cells #
# #
########################
def _tableCellBindings(self):
"""Returns a dictionary of [keysymstring, modifiers, description]
lists for navigating spatially amongst table cells.
"""
bindings = {}
# Translators: this is for navigating among table cells in a document.
#
desc = _("Goes left one cell.")
bindings["left"] = ["Left", settings.SHIFT_ALT_MODIFIER_MASK, desc]
# Translators: this is for navigating among table cells in a document.
#
desc = _("Goes right one cell.")
bindings["right"] = ["Right", settings.SHIFT_ALT_MODIFIER_MASK, desc]
# Translators: this is for navigating among table cells in a document.
#
desc = _("Goes up one cell.")
bindings["up"] = ["Up", settings.SHIFT_ALT_MODIFIER_MASK, desc]
# Translators: this is for navigating among table cells in a document.
#
desc = _("Goes down one cell.")
bindings["down"] = ["Down", settings.SHIFT_ALT_MODIFIER_MASK, desc]
# Translators: this is for navigating among table cells in a document.
#
desc = _("Goes to the first cell in a table.")
bindings["first"] = ["Home", settings.SHIFT_ALT_MODIFIER_MASK, desc]
# Translators: this is for navigating among table cells in a document.
#
desc = _("Goes to the last cell in a table.")
bindings["last"] = ["End", settings.SHIFT_ALT_MODIFIER_MASK, desc]
return bindings
def _tableCellCriteria(self, collection, arg=None):
"""Returns the MatchCriteria to be used for locating table cells
by collection.
Arguments:
- collection: the collection interface for the document
- arg: an optional argument which may need to be included in
the criteria (e.g. the level of a heading).
"""
role = [pyatspi.ROLE_TABLE_CELL, pyatspi.ROLE_COLUMN_HEADER]
return MatchCriteria(collection, roles=role)
def _tableCellPredicate(self, obj, arg=None):
"""The predicate to be used for verifying that the object
obj is a table cell.
Arguments:
- obj: the accessible object under consideration.
- arg: an optional argument which may need to be included in
the criteria (e.g. the level of a heading).
"""
return (obj and obj.getRole() in [pyatspi.ROLE_COLUMN_HEADER,
pyatspi.ROLE_TABLE_CELL])
def _tableCellPresentation(self, cell, arg):
"""Presents the table cell or indicates that one was not found.
Arguments:
- obj: the accessible object under consideration.
- arg: an optional argument which may need to be included in
the criteria (e.g. the level of a heading).
"""
if not cell:
return
if settings.speakCellHeaders:
self._presentCellHeaders(cell, arg)
[obj, characterOffset] = self._getCaretPosition(cell)
self._setCaretPosition(obj, characterOffset)
self._script.updateBraille(obj)
blank = self._isBlankCell(cell)
if not blank:
self._presentObject(cell, 0)
else:
# Translators: "blank" is a short word to mean the
# user has navigated to an empty line.
#
speech.speak(_("blank"))
if settings.speakCellCoordinates:
[row, col] = self.getCellCoordinates(cell)
# Translators: this represents the (row, col) position of
# a cell in a table.
#
self._script.presentMessage(_("Row %(row)d, column %(column)d.") \
% {"row" : row + 1, "column" : col + 1})
spanString = self._getCellSpanInfo(cell)
if spanString and settings.speakCellSpan:
self._script.presentMessage(spanString)
########################
# #
# Unvisited Links #
# #
########################
def _unvisitedLinkBindings(self):
"""Returns a dictionary of [keysymstring, modifiers, description]
lists for navigating amongst unvisited links.
"""
bindings = {}
# Translators: this is for navigating among unvisited links in a
# document.
#
prevDesc = _("Goes to previous unvisited link.")
bindings["previous"] = ["u", settings.SHIFT_MODIFIER_MASK, prevDesc]
# Translators: this is for navigating among unvisited links in a
# document.
#
nextDesc = _("Goes to next unvisited link.")
bindings["next"] = ["u", settings.NO_MODIFIER_MASK, nextDesc]
return bindings
def _unvisitedLinkCriteria(self, collection, arg=None):
"""Returns the MatchCriteria to be used for locating unvisited links
by collection.
Arguments:
- collection: the collection interface for the document
- arg: an optional argument which may need to be included in
the criteria (e.g. the level of a heading).
"""
role = [pyatspi.ROLE_LINK]
state = [pyatspi.STATE_VISITED]
stateMatch = collection.MATCH_NONE
return MatchCriteria(collection,
states=state,
matchStates=stateMatch,
roles=role)
def _unvisitedLinkPredicate(self, obj, arg=None):
"""The predicate to be used for verifying that the object
obj is an unvisited link.
Arguments:
- obj: the accessible object under consideration.
- arg: an optional argument which may need to be included in
the criteria (e.g. the level of a heading).
"""
isMatch = False
if obj and obj.getRole() == pyatspi.ROLE_LINK:
isMatch = not obj.getState().contains(pyatspi.STATE_VISITED)
return isMatch
def _unvisitedLinkPresentation(self, obj, arg=None):
"""Presents the unvisited link or indicates that one was not
found.
Arguments:
- obj: the accessible object under consideration.
- arg: an optional argument which may need to be included in
the criteria (e.g. the level of a heading).
"""
if obj:
# We were counting on the Gecko script's setCaretPosition
# to do the focus grab. It turns out that we do not always
# want setCaretPosition to grab focus on a link (e.g. when
# arrowing in the text of a paragraph which is a child of
# a link. Therefore, we need to grab focus here.
#
obj.queryComponent().grabFocus()
else:
# Translators: this is for navigating document content by moving
# from unvisited link to unvisited link. This string is the
# detailed message which Orca will present if there are no more
# unvisited links found.
#
full = _("No more unvisited links.")
# Translators: Orca has a command that allows the user to move
# to the next structural navigation object. In Orca, "structural
# navigation" refers to quickly moving through a document by
# jumping amongst objects of a given type, such as from link to
# link, or from heading to heading, or from form field to form
# field. This is a brief message which will be presented to the
# user if the desired structural navigation object could not be
# found.
#
brief = C_("structural navigation", "Not found")
self._script.presentMessage(full, brief)
########################
# #
# Visited Links #
# #
########################
def _visitedLinkBindings(self):
"""Returns a dictionary of [keysymstring, modifiers, description]
lists for navigating amongst visited links.
"""
bindings = {}
# Translators: this is for navigating among visited links in a
# document.
#
prevDesc = _("Goes to previous visited link.")
bindings["previous"] = ["v", settings.SHIFT_MODIFIER_MASK, prevDesc]
# Translators: this is for navigating among visited links in a
# document.
#
nextDesc = _("Goes to next visited link.")
bindings["next"] = ["v", settings.NO_MODIFIER_MASK, nextDesc]
return bindings
def _visitedLinkCriteria(self, collection, arg=None):
"""Returns the MatchCriteria to be used for locating visited links
by collection.
Arguments:
- collection: the collection interface for the document
- arg: an optional argument which may need to be included in
the criteria (e.g. the level of a heading).
"""
role = [pyatspi.ROLE_LINK]
state = [pyatspi.STATE_VISITED]
stateMatch = collection.MATCH_ANY
return MatchCriteria(collection,
states=state,
matchStates=stateMatch,
roles=role)
def _visitedLinkPredicate(self, obj, arg=None):
"""The predicate to be used for verifying that the object
obj is a visited link.
Arguments:
- obj: the accessible object under consideration.
- arg: an optional argument which may need to be included in
the criteria (e.g. the level of a heading).
"""
isMatch = False
if obj and obj.getRole() == pyatspi.ROLE_LINK:
isMatch = obj.getState().contains(pyatspi.STATE_VISITED)
return isMatch
def _visitedLinkPresentation(self, obj, arg=None):
"""Presents the visited link or indicates that one was not
found.
Arguments:
- obj: the accessible object under consideration.
- arg: an optional argument which may need to be included in
the criteria (e.g. the level of a heading).
"""
if obj:
obj.queryComponent().grabFocus()
else:
# Translators: this is for navigating document content by moving
# from visited link to visited link. This string is the detailed
# message which Orca will present if there are no more visited
# links found.
#
full = _("No more visited links.")
# Translators: Orca has a command that allows the user to move
# to the next structural navigation object. In Orca, "structural
# navigation" refers to quickly moving through a document by
# jumping amongst objects of a given type, such as from link to
# link, or from heading to heading, or from form field to form
# field. This is a brief message which will be presented to the
# user if the desired structural navigation object could not be
# found.
#
brief = C_("structural navigation", "Not found")
self._script.presentMessage(full, brief)
|
gpl-3.0
| 3,199,177,288,072,333,300
| 39.519347
| 80
| 0.565077
| false
| 4.794823
| false
| false
| false
|
kazuoteramoto/alot
|
alot/ui.py
|
1
|
18751
|
# Copyright (C) 2011-2012 Patrick Totzke <patricktotzke@gmail.com>
# This file is released under the GNU GPL, version 3 or a later revision.
# For further details see the COPYING file
import urwid
import logging
from twisted.internet import reactor, defer
from settings import settings
from buffers import BufferlistBuffer
import commands
from commands import commandfactory
from alot.commands import CommandParseError
from alot.helper import string_decode
import widgets
class InputWrap(urwid.WidgetWrap):
"""
This is the topmost widget used in the widget tree.
Its purpose is to capture and interpret keypresses
by instantiating and applying the relevant :class:`Command` objects
or relaying them to the wrapped `rootwidget`.
"""
def __init__(self, ui, rootwidget):
urwid.WidgetWrap.__init__(self, rootwidget)
self.ui = ui
self.rootwidget = rootwidget
self.select_cancel_only = False
def set_root(self, w):
self._w = w
def get_root(self):
return self._w
def allowed_command(self, cmd):
"""sanity check if the given command should be applied.
This is used in :meth:`keypress`"""
if not self.select_cancel_only:
return True
elif isinstance(cmd, commands.globals.SendKeypressCommand):
if cmd.key in ['select', 'cancel']:
return True
else:
return False
def keypress(self, size, key):
"""overwrites `urwid.WidgetWrap.keypress`"""
mode = self.ui.mode
if self.select_cancel_only:
mode = 'global'
cmdline = settings.get_keybinding(mode, key)
if cmdline:
try:
cmd = commandfactory(cmdline, mode)
if self.allowed_command(cmd):
self.ui.apply_command(cmd)
return None
except CommandParseError, e:
self.ui.notify(e.message, priority='error')
return self._w.keypress(size, key)
class UI(object):
"""
This class integrates all components of alot and offers
methods for user interaction like :meth:`prompt`, :meth:`notify` etc.
It handles the urwid widget tree and mainloop (we use twisted) and is
responsible for opening, closing and focussing buffers.
"""
buffers = []
"""list of active buffers"""
current_buffer = None
"""points to currently active :class:`~alot.buffers.Buffer`"""
dbman = None
"""Database manager (:class:`~alot.db.DBManager`)"""
def __init__(self, dbman, initialcmd):
"""
:param dbman: :class:`~alot.db.DBManager`
:param initialcmd: commandline applied after setting up interface
:type initialcmd: str
:param colourmode: determines which theme to chose
:type colourmode: int in [1,16,256]
"""
self.dbman = dbman
colourmode = int(settings.get('colourmode'))
logging.info('setup gui in %d colours' % colourmode)
global_att = settings.get_theming_attribute('global', 'body')
self.mainframe = urwid.Frame(urwid.SolidFill())
self.mainframe_themed = urwid.AttrMap(self.mainframe, global_att)
self.inputwrap = InputWrap(self, self.mainframe_themed)
self.mainloop = urwid.MainLoop(self.inputwrap,
handle_mouse=False,
event_loop=urwid.TwistedEventLoop(),
unhandled_input=self.unhandeled_input)
self.mainloop.screen.set_terminal_properties(colors=colourmode)
self.show_statusbar = settings.get('show_statusbar')
self.notificationbar = None
self.mode = 'global'
self.commandprompthistory = []
logging.debug('fire first command')
self.apply_command(initialcmd)
self.mainloop.run()
def unhandeled_input(self, key):
"""called if a keypress is not handled."""
logging.debug('unhandled input: %s' % key)
def keypress(self, key):
"""relay all keypresses to our `InputWrap`"""
self.inputwrap.keypress((150, 20), key)
def show_as_root_until_keypress(self, w, key, relay_rest=True,
afterwards=None):
def oe():
self.inputwrap.set_root(self.mainframe)
self.inputwrap.select_cancel_only = False
if callable(afterwards):
logging.debug('called')
afterwards()
logging.debug('relay: %s' % relay_rest)
helpwrap = widgets.CatchKeyWidgetWrap(w, key, on_catch=oe,
relay_rest=relay_rest)
self.inputwrap.set_root(helpwrap)
self.inputwrap.select_cancel_only = not relay_rest
def prompt(self, prefix, text=u'', completer=None, tab=0, history=[]):
"""prompt for text input
:param prefix: text to print before the input field
:type prefix: str
:param text: initial content of the input field
:type text: str
:param completer: completion object to use
:type completer: :meth:`alot.completion.Completer`
:param tab: number of tabs to press initially
(to select completion results)
:type tab: int
:param history: history to be used for up/down keys
:type history: list of str
:returns: a :class:`twisted.defer.Deferred`
"""
d = defer.Deferred() # create return deferred
oldroot = self.inputwrap.get_root()
def select_or_cancel(text):
# restore main screen and invoke callback
# (delayed return) with given text
self.inputwrap.set_root(oldroot)
self.inputwrap.select_cancel_only = False
d.callback(text)
prefix = prefix + settings.get('prompt_suffix')
#set up widgets
leftpart = urwid.Text(prefix, align='left')
editpart = widgets.CompleteEdit(completer, on_exit=select_or_cancel,
edit_text=text, history=history)
for i in range(tab): # hit some tabs
editpart.keypress((0,), 'tab')
# build promptwidget
both = urwid.Columns(
[
('fixed', len(prefix), leftpart),
('weight', 1, editpart),
])
att = settings.get_theming_attribute('global', 'prompt')
both = urwid.AttrMap(both, att)
# put promptwidget as overlay on main widget
overlay = urwid.Overlay(both, oldroot,
('fixed left', 0),
('fixed right', 0),
('fixed bottom', 1),
None)
self.inputwrap.set_root(overlay)
self.inputwrap.select_cancel_only = True
return d # return deferred
def exit(self):
"""
shuts down user interface without cleaning up.
Use a :class:`commands.globals.ExitCommand` for a clean shutdown.
"""
exit_msg = None
try:
reactor.stop()
except Exception as e:
exit_msg = 'Could not stop reactor: {}.'.format(e)
logging.error(exit_msg + '\nShutting down anyway..')
def buffer_open(self, buf):
"""register and focus new :class:`~alot.buffers.Buffer`."""
if self.current_buffer is not None:
offset = settings.get('bufferclose_focus_offset') * -1
currentindex = self.buffers.index(self.current_buffer)
self.buffers.insert(currentindex + offset, buf)
else:
self.buffers.append(buf)
self.buffer_focus(buf)
def buffer_close(self, buf):
"""
closes given :class:`~alot.buffers.Buffer`.
This it removes it from the bufferlist and calls its cleanup() method.
"""
buffers = self.buffers
if buf not in buffers:
string = 'tried to close unknown buffer: %s. \n\ni have:%s'
logging.error(string % (buf, self.buffers))
elif self.current_buffer == buf:
logging.info('closing current buffer %s' % buf)
index = buffers.index(buf)
buffers.remove(buf)
offset = settings.get('bufferclose_focus_offset')
nextbuffer = buffers[(index + offset) % len(buffers)]
self.buffer_focus(nextbuffer)
buf.cleanup()
else:
string = 'closing buffer %d:%s'
logging.info(string % (buffers.index(buf), buf))
buffers.remove(buf)
buf.cleanup()
def buffer_focus(self, buf):
"""focus given :class:`~alot.buffers.Buffer`."""
if buf not in self.buffers:
logging.error('tried to focus unknown buffer')
else:
if self.current_buffer != buf:
self.current_buffer = buf
self.inputwrap.set_root(self.mainframe_themed)
self.mode = buf.modename
if isinstance(self.current_buffer, BufferlistBuffer):
self.current_buffer.rebuild()
self.update()
def get_deep_focus(self, startfrom=None):
"""return the bottom most focussed widget of the widget tree"""
if not startfrom:
startfrom = self.current_buffer
if 'get_focus' in dir(startfrom):
focus = startfrom.get_focus()
if isinstance(focus, tuple):
focus = focus[0]
if isinstance(focus, urwid.Widget):
return self.get_deep_focus(startfrom=focus)
return startfrom
def get_buffers_of_type(self, t):
"""
returns currently open buffers for a given subclass of
:class:`alot.buffer.Buffer`
"""
return filter(lambda x: isinstance(x, t), self.buffers)
def clear_notify(self, messages):
"""
clears notification popups. Call this to ged rid of messages that don't
time out.
:param messages: The popups to remove. This should be exactly
what :meth:`notify` returned when creating the popup
"""
newpile = self.notificationbar.widget_list
for l in messages:
if l in newpile:
newpile.remove(l)
if newpile:
self.notificationbar = urwid.Pile(newpile)
else:
self.notificationbar = None
self.update()
def choice(self, message, choices={'y': 'yes', 'n': 'no'},
select=None, cancel=None, msg_position='above'):
"""
prompt user to make a choice
:param message: string to display before list of choices
:type message: unicode
:param choices: dict of possible choices
:type choices: dict: keymap->choice (both str)
:param select: choice to return if enter/return is hit. Ignored if set
to `None`.
:type select: str
:param cancel: choice to return if escape is hit. Ignored if set to
`None`.
:type cancel: str
:param msg_position: determines if `message` is above or left of the
prompt. Must be `above` or `left`.
:type msg_position: str
:returns: a :class:`twisted.defer.Deferred`
"""
assert select in choices.values() + [None]
assert cancel in choices.values() + [None]
assert msg_position in ['left', 'above']
d = defer.Deferred() # create return deferred
oldroot = self.inputwrap.get_root()
def select_or_cancel(text):
self.inputwrap.set_root(oldroot)
self.inputwrap.select_cancel_only = False
d.callback(text)
#set up widgets
msgpart = urwid.Text(message)
choicespart = widgets.ChoiceWidget(choices, callback=select_or_cancel,
select=select, cancel=cancel)
# build widget
if msg_position == 'left':
both = urwid.Columns(
[
('fixed', len(message), msgpart),
('weight', 1, choicespart),
], dividechars=1)
else: # above
both = urwid.Pile([msgpart, choicespart])
att = settings.get_theming_attribute('global', 'prompt')
both = urwid.AttrMap(both, att, att)
# put promptwidget as overlay on main widget
overlay = urwid.Overlay(both, oldroot,
('fixed left', 0),
('fixed right', 0),
('fixed bottom', 1),
None)
self.inputwrap.set_root(overlay)
self.inputwrap.select_cancel_only = True
return d # return deferred
def notify(self, message, priority='normal', timeout=0, block=False):
"""
opens notification popup
:param message: message to print
:type message: str
:param priority: priority string, used to format the popup: currently,
'normal' and 'error' are defined. If you use 'X' here,
the attribute 'global_notify_X' is used to format the
popup.
:type priority: str
:param timeout: seconds until message disappears. Defaults to the value
of 'notify_timeout' in the general config section.
A negative value means never time out.
:type timeout: int
:param block: this notification blocks until a keypress is made
:type block: bool
:returns: an urwid widget (this notification) that can be handed to
:meth:`clear_notify` for removal
"""
def build_line(msg, prio):
cols = urwid.Columns([urwid.Text(msg)])
att = settings.get_theming_attribute('global', 'notify_' + prio)
return urwid.AttrMap(cols, att)
msgs = [build_line(message, priority)]
if not self.notificationbar:
self.notificationbar = urwid.Pile(msgs)
else:
newpile = self.notificationbar.widget_list + msgs
self.notificationbar = urwid.Pile(newpile)
self.update()
def clear(*args):
self.clear_notify(msgs)
if block:
# put "cancel to continue" widget as overlay on main widget
txt = urwid.Text('(cancel continues)')
overlay = urwid.Overlay(txt, self.mainframe,
('fixed left', 0),
('fixed right', 0),
('fixed bottom', 0),
None)
self.show_as_root_until_keypress(overlay, 'cancel',
relay_rest=False,
afterwards=clear)
else:
if timeout >= 0:
if timeout == 0:
timeout = settings.get('notify_timeout')
self.mainloop.set_alarm_in(timeout, clear)
return msgs[0]
def update(self):
"""redraw interface"""
#who needs a header?
#head = urwid.Text('notmuch gui')
#h=urwid.AttrMap(head, 'header')
#self.mainframe.set_header(h)
# body
if self.current_buffer:
self.mainframe.set_body(self.current_buffer)
# footer
lines = []
if self.notificationbar: # .get_text()[0] != ' ':
lines.append(self.notificationbar)
if self.show_statusbar:
lines.append(self.build_statusbar())
if lines:
self.mainframe.set_footer(urwid.Pile(lines))
else:
self.mainframe.set_footer(None)
# force a screen redraw
if self.mainloop.screen.started:
self.mainloop.draw_screen()
def build_statusbar(self):
"""construct and return statusbar widget"""
info = {}
cb = self.current_buffer
btype = None
if cb is not None:
info = cb.get_info()
btype = cb.modename
info['buffer_no'] = self.buffers.index(cb)
info['buffer_type'] = btype
info['total_messages'] = self.dbman.count_messages('*')
info['pending_writes'] = len(self.dbman.writequeue)
lefttxt = righttxt = u''
if cb is not None:
lefttxt, righttxt = settings.get(btype + '_statusbar', (u'', u''))
lefttxt = string_decode(lefttxt, 'UTF-8')
lefttxt = lefttxt.format(**info)
righttxt = string_decode(righttxt, 'UTF-8')
righttxt = righttxt.format(**info)
footerleft = urwid.Text(lefttxt, align='left')
pending_writes = len(self.dbman.writequeue)
if pending_writes > 0:
righttxt = ('|' * pending_writes) + ' ' + righttxt
footerright = urwid.Text(righttxt, align='right')
columns = urwid.Columns([
footerleft,
('fixed', len(righttxt), footerright)])
footer_att = settings.get_theming_attribute('global', 'footer')
return urwid.AttrMap(columns, footer_att)
def apply_command(self, cmd):
"""
applies a command
This calls the pre and post hooks attached to the command,
as well as :meth:`cmd.apply`.
:param cmd: an applicable command
:type cmd: :class:`~alot.commands.Command`
"""
if cmd:
# call pre- hook
if cmd.prehook:
logging.info('calling pre-hook')
try:
cmd.prehook(ui=self, dbm=self.dbman)
except:
logging.exception('prehook failed')
return False
# define (callback) function that invokes post-hook
def call_posthook(retval_from_apply):
if cmd.posthook:
logging.info('calling post-hook')
try:
cmd.posthook(ui=self, dbm=self.dbman)
except:
logging.exception('posthook failed')
# define error handler for Failures/Exceptions
# raised in cmd.apply()
def errorHandler(failure):
logging.error(failure.getTraceback())
msg = "Error: %s,\n(check the log for details)"
self.notify(msg % failure.getErrorMessage(), priority='error')
# call cmd.apply
logging.info('apply command: %s' % cmd)
d = defer.maybeDeferred(cmd.apply, self)
d.addErrback(errorHandler)
d.addCallback(call_posthook)
|
gpl-3.0
| -3,591,981,657,515,845,000
| 36.880808
| 79
| 0.55997
| false
| 4.285943
| false
| false
| false
|
ardoi/datajuicer
|
lsjuicer/ui/widgets/panels/eventpanel.py
|
1
|
3918
|
from PyQt5 import QtWidgets as QW
from PyQt5 import QtCore as QC
from lsjuicer.inout.db.sqla import SyntheticData
from lsjuicer.ui.widgets.fileinfowidget import MyFormLikeLayout
from lsjuicer.ui.widgets.clicktrees import EventClickTree, Events
from actionpanel import ActionPanel
from lsjuicer.ui.widgets.mergewidget import MergeDialog
from lsjuicer.ui.widgets.deletewidget import DeleteDialog
class EventPanel(ActionPanel):
__doc__ = """Event display panel"""
__shortname__ = "Events"
active_events_changed = QC.pyqtSignal()
def setup_ui(self):
layout = QW.QVBoxLayout()
combo_layout = MyFormLikeLayout()
layout.addLayout(combo_layout)
self.setLayout(layout)
self.events = None
region_select = QW.QComboBox()
for i,reg in enumerate(self.analysis.fitregions):
region_select.addItem("{}".format(i))
region_select.currentIndexChanged.connect(self.region_changed)
combo_layout.add_row("Region:", region_select)
result_select = QW.QComboBox()
combo_layout.add_row("Result:", result_select)
self.result_select = result_select
result_select.currentIndexChanged.connect(self.result_changed)
clicktree = EventClickTree(self)
self.clicktree = clicktree
layout.addWidget(clicktree)
region_select.setCurrentIndex(0)
self.region_changed(0)
set_data_pb = QW.QPushButton("Set data")
set_data_pb.clicked.connect(self.set_data)
merge_pb = QW.QPushButton("Merge events")
merge_pb.clicked.connect(self.merge_events)
delete_pb = QW.QPushButton("Delete events")
delete_pb.clicked.connect(self.delete_events)
layout.addWidget(set_data_pb)
layout.addWidget(merge_pb)
layout.addWidget(delete_pb)
def _selected_events(self):
selected_events = []
for event_type in self.events.event_dict:
for i, event in enumerate(self.events.event_dict[event_type]):
status = self.events.status_dict[event_type][i]
if status:
selected_events.append(event.id)
return selected_events
def set_data(self):
events_to_show = self._selected_events()
sdata = SyntheticData(self.result)
new = sdata.get_events(events_to_show)
self.imagedata.replace_channel(new, 2)
self.active_events_changed.emit()
def merge_events(self):
events_to_merge = self._selected_events()
if len(events_to_merge) < 2:
QW.QMessageBox.warning(self,'Not enough events',
"At least two events have to be selected for merging")
return
dialog = MergeDialog(events_to_merge,self)
res = dialog.exec_()
if res:
self.result_changed(self.result_select.currentIndex())
def delete_events(self):
events_to_delete = self._selected_events()
if len(events_to_delete) < 1:
QW.QMessageBox.warning(self,'Not enough events',
"At least one event has to be selected for deletion")
return
dialog = DeleteDialog(events_to_delete,self)
res = dialog.exec_()
if res:
self.result_changed(self.result_select.currentIndex())
def region_changed(self, reg_no):
print "\nREgion changed"
self.region = self.analysis.fitregions[reg_no]
self.result_select.clear()
print reg_no, self.region
for i,res in enumerate(self.region.results):
self.result_select.addItem(str(i))
def result_changed(self, res_no):
print "\nResult changed"
self.result = self.region.results[res_no]
print res_no, self.result
self.events = Events()
for ev in self.result.events:
self.events.add_event(ev)
self.clicktree.set_events(self.events)
|
gpl-3.0
| 376,748,179,821,630,000
| 36.673077
| 74
| 0.639612
| false
| 3.837414
| false
| false
| false
|
nicholas-maltbie/Medina
|
AIPractice/tttTest.py
|
1
|
1416
|
from ttt import *
from tttGameSpec import TicTacToeGameSpec
def play_game(agent1, agent2, name1, name2):
"""Plays a game of tic tac toe with two agents and returns the winner."""
game_spec = TicTacToeGameSpec()
return game_spec.play_game(agent1, agent2)
"""board = make_board()
names = [name1, name2]
players = [agent1, agent2]
pieces = [-1,1]
current = random.randint(0,1)
while check_winner(board) == None:
print(get_board_as_numbers(board, pieces[current], pieces[(current + 1) % 2]))
move = players[current](board, pieces[current])
apply_move(board, move)
current = (current + 1) % 2
win = check_winner(board)
if win == 'o':
return name2
elif win == 'x':
return name1
else:
return 'tie'"""
if __name__ == "__main__":
distrib = {'player1':0, 'player2':0, 'tie':0}
plays = 1
for i in range(plays):
distrib[play_game(make_random_agent(), make_human_agent(), \
'player1', 'player2')] += 1;
print('player1 won ' + str(distrib['player1']) + ' times ' + \
str(int(distrib['player1'] / plays * 100)) + "%")
print('player2 won ' + str(distrib['player2']) + ' times ' + \
str(int(distrib['player2'] / plays * 100)) + "%")
print('tied ' + str(distrib['tie']) + ' times ' + \
str(int(distrib['tie'] / plays * 100)) + "%")
|
mit
| 6,103,644,358,303,188,000
| 36.263158
| 86
| 0.558616
| false
| 3.210884
| false
| false
| false
|
crs4/hl7apy
|
hl7apy/v2_8_2/__init__.py
|
1
|
3070
|
# -*- coding: utf-8 -*-
#
# Copyright (c) 2012-2018, CRS4
#
# Permission is hereby granted, free of charge, to any person obtaining a copy of
# this software and associated documentation files (the "Software"), to deal in
# the Software without restriction, including without limitation the rights to
# use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
# the Software, and to permit persons to whom the Software is furnished to do so,
# subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
# FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
# COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
# IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
from __future__ import absolute_import
import importlib
from hl7apy.base_datatypes import WD
from .messages import MESSAGES
from .segments import SEGMENTS
from .fields import FIELDS
from .datatypes import DATATYPES, DATATYPES_STRUCTS
from .groups import GROUPS
from .tables import TABLES
from ..v2_7.base_datatypes import ST, FT, ID, IS, TX, GTS, SNM
from hl7apy.exceptions import ChildNotFound
ELEMENTS = {'Message': MESSAGES, 'Group': GROUPS, 'Segment': SEGMENTS,
'Field': FIELDS, 'Component': DATATYPES, 'SubComponent': DATATYPES,
'Datatypes_Structs': DATATYPES_STRUCTS, 'Table': TABLES}
def get(name, element_type):
try:
return ELEMENTS[element_type][name]
except KeyError:
raise ChildNotFound(name)
def find(name, where):
"""
>>> from hl7apy.core import Segment
>>> from hl7apy import find_reference
>>> find_reference('UNKNOWN', (Segment, ), '2.8.2') # doctest: +IGNORE_EXCEPTION_DETAIL
Traceback (most recent call last):
...
ChildNotFound: No child named UNKNOWN
"""
for cls in where:
try:
return {'ref': get(name, cls.__name__), 'name': name, 'cls': cls}
except ChildNotFound:
pass
raise ChildNotFound(name)
def is_base_datatype(datatype):
return datatype in BASE_DATATYPES
def get_base_datatypes():
return BASE_DATATYPES
def _load_base_datatypes():
base_dts = ('DT', 'DTM', 'NM', 'SI', 'TM',)
module = importlib.import_module("hl7apy.base_datatypes")
dts = {}
for cls in base_dts:
cls = getattr(module, cls)
dts[cls.__name__] = cls
return dts
BASE_DATATYPES = _load_base_datatypes()
BASE_DATATYPES.update({
'ST': ST,
'FT': FT,
'ID': ID,
'IS': IS,
'TX': TX,
'GTS': GTS,
'SNM': SNM,
'WD': WD,
})
DT = BASE_DATATYPES['DT']
DTM = BASE_DATATYPES['DTM']
NM = BASE_DATATYPES['NM']
SI = BASE_DATATYPES['SI']
TM = BASE_DATATYPES['TM']
|
mit
| 2,264,156,724,461,546,500
| 30.010101
| 92
| 0.684691
| false
| 3.504566
| false
| false
| false
|
DTL-FAIRData/ODEX4all-UseCases
|
EKP/tmp/NIZO2.py
|
1
|
5816
|
# Load the required packages
import EKP
import csv
import os
import datetime
# Knowledge platform URL
url = ''
# User credentials: Please fill in!
username = ''
password = ''
# Set the output directory
os.chdir("NIZO input & Output/")
# Get the user token, required for access
t = EKP.getToken(username, password, url).json()['token']
# Get the semantic types contained in the database, and their codes
Types = EKP.getSemanticTypeDict(url, t)
# Read in the input file
input_file = open("List commensal species Qin et al 19_10_2015.csv", "r")
reader = csv.reader(input_file, delimiter=";")
commensals = []
for line in reader:
commensals.append(line[0])
input_file.close()
input_group = "Bacterium"
input_ids = {}
for c in commensals:
ID = EKP.getID(url, Types, t, c, input_group)
if len(ID) > 0:
input_ids.update({ID[0]['name']: ID[0]['id']})
endpoints = {"Gut dysmotility" : "C1839757",
"bowel/gut problem" : "C1656426",
"Inflammatory Bowel Diseases" : "C0021390",
"Intestinal mucosal permeability" : "C0232645",
"Permeability" : "C0232645",
"body barrier" : "C0682585"
}
intermediate_types = { "Food" : "Objects",
"Organ or Tissue Function" : "Physiology",
#"Gene or Genome" : "Genes & Molecular Sequences",
"Finding" : "Disorders",
"Disease or Syndrome" : "Disorders",
"Chemical Viewed Functionally" : "Chemicals & Drugs",
"Biologically Active Substance" : "Chemicals & Drugs",
"Tissue" : "Anatomy",
"Body Location or Region" : "Anatomy",
"Body Part, Organ, or Organ Component" : "Anatomy",
"Body Space or Junction" : "Anatomy",
"Body System" : "Anatomy",
"Cell" : "Anatomy"
}
# Alle concepten die met Gut te maken hebben gebruiken als filter
gut = EKP.getID(url, Types, t, "C0699819")
intestines = EKP.getID(url, Types, t, "C0021853")
endpoint_ids = []
for point in endpoints.values():
endpoint_ids.append(EKP.getID(url, Types, t, point)[0]['id'])
endpoint_ids = list(set(endpoint_ids))
for input in input_ids.values():
print(EKP.getRelationships([input], endpoint_ids, url, t))
indirect_all = []
gut_all = []
intestines_all = []
for key, value in intermediate_types.items():
gut_connected = EKP.getDirectlyConnectedConcepts(Types, t, url, [gut[0]['id']], value, key)
if 'content' in gut_connected.keys() and len(gut_connected['content']) > 0:
for g in gut_connected['content']:
gut_all.append(g['tier1Concept']['gi'])
intestines_connected = EKP.getDirectlyConnectedConcepts(Types, t, url, [intestines[0]['id']], value, key)
if 'content' in intestines_connected.keys() and len(intestines_connected['content']) > 0:
for g in intestines_connected['content']:
intestines_all.append(g['tier1Concept']['gi'])
response = EKP.getIndirectRelationships(list(input_ids.values()), endpoint_ids, Types, url, t, value, key)
print(response)
if 'content' in response.keys():
indirect_all.append(response['content'])
indirect_out = open("indirect_output_" + datetime.datetime.today().strftime("%Y_%m_%d") + ".csv", "w")
iw = csv.writer(indirect_out, delimiter = ";")
iw.writerow(["Starting concept", "Predicate1", "Sources1", "Connecting concept", "Semantic category", "Semantic types", "Found in gut?", "Found in intestines?", "Predicate2", "Sources2", "End concept", "Path weight"])
indirect_all2 = []
for ii in indirect_all:
indirect_all2 = indirect_all2 + ii
for i in indirect_all2:
start = i['tier0Concept']['name']
intermediate = i['tier1Concept']['name']
intermediate_cat = i['tier1Concept']['category']
intermediate_concept = EKP.getConcept(i['tier1Concept']['gi'], url, t)
output_STs = []
for g in intermediate_concept['semanticTypes']:
for key, value in Types[0].items():
if g == value:
output_STs.append(key)
# Hier logica om te filteren op gut & intestines
if i['tier1Concept']['gi'] in gut_all:
gut_bool = "gut"
if i['tier1Concept']['gi'] not in gut_all:
gut_bool = "no"
if i['tier1Concept']['gi'] in intestines_all:
intestines_bool = "intestines"
if i['tier1Concept']['gi'] not in intestines_all:
intestines_bool = "no"
end = i['tier2Concept']['name']
pw = i['pathWeight']
nrows = max([len(i['tier01TripleInformation']), len(i['tier12TripleInformation'])])
pubs1 = []
pubs2 = []
for w in range(0,nrows):
if w <= len(i['tier01TripleInformation']) - 1:
predicate1 = i['tier01TripleInformation'][w]['predicateName']
pub_info = EKP.getPublications(i['tier01TripleInformation'][w]['tripleUuid'], url, t)
for p1 in pub_info['publications']:
if p1['publicationInfo'] is not None and 'url' in p1['publicationInfo'].keys():
pubs1.append(p1['publicationInfo']['url'])
if w <= len(i['tier12TripleInformation']) - 1:
predicate2 = i['tier12TripleInformation'][w]['predicateName']
pub_info2 = EKP.getPublications(i['tier12TripleInformation'][w]['tripleUuid'], url, t)
for p2 in pub_info2['publications']:
if p2['publicationInfo'] is not None and 'url' in p2['publicationInfo'].keys():
pubs2.append(p2['publicationInfo']['url'])
iw.writerow([start, predicate1, pubs1, intermediate, intermediate_cat, output_STs, gut_bool, intestines_bool, predicate2, pubs2, end, pw])
indirect_out.close()
|
mit
| -8,227,296,890,107,231,000
| 39.388889
| 217
| 0.607634
| false
| 3.336776
| true
| false
| false
|
Khurramjaved96/Recursive-CNNs
|
data_augmentor/augmentData.py
|
1
|
2668
|
import os
import cv2
import numpy as np
import utils
def argsProcessor():
import argparse
parser = argparse.ArgumentParser()
parser.add_argument("-i", "--dataPath", help="DataPath")
parser.add_argument("-o", "--outputFiles", help="outputFiles", default="bar")
return parser.parse_args()
args = argsProcessor()
output_dir = args.outputFiles
if (not os.path.isdir(output_dir)):
os.mkdir(output_dir)
dir = args.dataPath
import csv
with open(output_dir+"/gt.csv", 'a') as csvfile:
spamwriter_1 = csv.writer(csvfile, delimiter=',',
quotechar='|', quoting=csv.QUOTE_MINIMAL)
for image in os.listdir(dir):
if image.endswith("jpg") or image.endswith("JPG"):
if os.path.isfile(dir+"/"+image+".csv"):
with open(dir+"/"+image+ ".csv", 'r') as csvfile:
spamwriter = csv.reader(csvfile, delimiter=' ',
quotechar='|', quoting=csv.QUOTE_MINIMAL)
img = cv2.imread(dir +"/"+ image)
print (image)
gt= []
for row in spamwriter:
gt.append(row)
# img = cv2.circle(img, (int(float(row[0])), int(float(row[1]))), 2,(255,0,0),90)
gt =np.array(gt).astype(np.float32)
gt = gt / (img.shape[1], img.shape[0])
gt = gt * (1080, 1080)
img = cv2.resize(img, (1080, 1080))
print (gt)
for angle in range(0,271,90):
img_rotate, gt_rotate = utils.rotate(img, gt, angle)
for random_crop in range(0,16):
img_crop, gt_crop = utils.random_crop(img_rotate, gt_rotate)
mah_size = img_crop.shape
img_crop = cv2.resize(img_crop, (64, 64))
gt_crop = np.array(gt_crop)
# gt_crop = gt_crop*(1.0 / mah_size[1],1.0 / mah_size[0])
# for a in range(0,4):
# no=0
# for a in range(0,4):
# no+=1
# cv2.circle(img_crop, tuple(((gt_crop[a]*64).astype(int))), 2,(255-no*60,no*60,0),9)
# # # cv2.imwrite("asda.jpg", img)
cv2.imwrite(output_dir + "/" +str(angle)+str(random_crop)+ image, img_crop)
spamwriter_1.writerow((str(angle)+str(random_crop)+ image, tuple(list(gt_crop))))
|
apache-2.0
| 4,897,393,317,941,922,000
| 38.820896
| 117
| 0.463268
| false
| 3.789773
| false
| false
| false
|
vsemionov/npamp
|
npamp/output.py
|
1
|
11750
|
# Copyright (C) 2012 Victor Semionov
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import sys
import os
import traceback
import numpy as np
import params
import plot
div_line = "=" * 32
status_writing = "generating output"
output_dir = None
models_rel_path = "pumping"
ref_pulse_rel_path = "ref_pulse"
optimization_rel_path = "optimization"
opt_pump_rel_path = os.path.join(optimization_rel_path, "pumping")
opt_geom_rel_path = os.path.join(optimization_rel_path, "geometry")
alt_plot_rel_path = "alt"
x_label = "x [mm]"
y_label = "y [mm]"
z_label = "z [mm]"
rho_label = "r [mm]"
t_amp_label = "t [ns]"
i_label = "pulse num."
norm_t_label = "t/T"
density_rel_label = "rel. photon density"
density_norm_rel_label = "norm. photon density"
upper_rel_label = "rel. upper state population"
lower_rel_label = "rel. lower state population"
inversion_rel_label = "rel. population inversion"
inversion_abs_label = "population inversion [cm^-3]"
t_pump_label = "t [us]"
pump_duration_label = "pump duration [us]"
pump_power_label = "pump power [W]"
eff_power_density_label = "absorbed power density [W/cm^3]"
rate_label = "depopulation rate [cm^-3 s^-1]"
rate_rel_label = "depop. rate / inversion [s^-1]"
gain_label = "small-signal gain"
error_label = "rel. error"
inversion_rdiff_label = "inversion rel. difference [%]"
gain_rdiff_label = "gain rel. difference [%]"
energy_rel_label = "energy gain"
energy_abs_pump_label = "optical pump energy [J]"
energy_abs_stored_label = "stored energy [J]"
energy_abs_pulse_label = "output energy [mJ]"
rel_gain_decrease_label = "rel. gain decrease [%]"
fluence_rel_label = "rel. fluence"
fluence_norm_rel_label = "norm. fluence"
fluence_abs_label_energy = "max. output fluence [J/cm^2]"
medium_radius_label = "medium diameter [mm]"
beam_radius_label = "beam diameter [mm]"
extraction_eff_label = "extraction efficiency [%]"
total_eff_label = "optical to optical efficiency [%]"
lower_lifetime_legend = r"$\tau_1 \, = \, %s$"
lower_lifetime_unit = "ns"
def warn(message):
print >>sys.stderr, "%s: %s" % ("warning:", message)
def print_error(message, hint=None):
print >>sys.stderr, "%s: %s" % ("error", message)
if hint:
print >>sys.stderr, hint
def print_exception():
t, v, _ = sys.exc_info()
fmt = traceback.format_exception_only(t, v)
exc_msg = fmt[-1][:-1]
print >>sys.stderr, exc_msg
def show_status((i, j), (si, sj), done):
def print_status():
if j is not None:
print "%d, %d" % (i, j)
else:
print i
if si != 0:
if done:
print_status()
else:
if i % si == 0:
if j is None:
print_status()
else:
if sj == 0:
if j == 0:
print_status()
else:
if j % sj == 0:
print_status()
def init_dir(name):
dirname = os.path.join(output_dir, name)
if not os.path.isdir(dirname):
os.makedirs(dirname)
return dirname
def plot_inversion(dirname, inv):
filename = lambda name: os.path.join(dirname, name)
T = inv.T
inversion = inv.inversion
tlim = (T[0], T[-1])
plot.plot_data(filename("inversion_evo"), "Population Inversion Evolution", (T, None, tlim, t_pump_label), (inversion, None, None, inversion_abs_label))
def plot_output(dirname, input_beam, input_pulse, fwhm, amp, fluences, exact_density_out=None, exact_population_final=None):
filename = lambda name: os.path.join(dirname, name)
density = amp.density
population = amp.population
upper = population[0]
lower = population[1]
inversion = upper - lower
Z = amp.Z
T = amp.T
if params.output_rel_time:
T = T / fwhm
TZ, ZT = np.meshgrid(T, Z)
zlim = (Z[0], Z[-1])
tlim = (T[0], T[-1])
ref_density = input_pulse.ref_density
ref_inversion = amp.active_medium.initial_inversion.ref_inversion
out_t_label = norm_t_label if params.output_rel_time else t_amp_label
stride_z = max(len(amp.Z) // params.out_count_z, 1)
stride_t = max(len(amp.T) // params.out_count_t, 1)
plot.plot_data(filename("density_in"), "Input Photon Density", (T, None, tlim, out_t_label), (density[0]/ref_density, None, None, density_rel_label))
plot.plot_data(filename("density_out"), "Output Photon Density", (T, None, tlim, out_t_label), (density[-1]/ref_density, None, None, density_rel_label))
plot.plot_data(filename("densities"), "Input and Output Photon Density", ((T, ) * 2, None, tlim, out_t_label), ((density[0]/ref_density, density[-1]/ref_density), None, None, density_rel_label), ("input pulse", "output pulse"))
plot.plot_data(filename("densities_norm"), "Normalized Input and Output Photon Density", ((T, ) * 2, None, tlim, out_t_label), ((density[0]/ref_density, density[-1]/np.amax(density[-1])), None, None, density_norm_rel_label), ("input pulse", "output pulse"))
plot.plot_data(filename("upper_init"), "Initial Upper State Population", (Z, None, zlim, z_label), (upper.T[0]/ref_inversion, None, None, upper_rel_label))
plot.plot_data(filename("upper_final"), "Final Upper State Population", (Z, None, zlim, z_label), (upper.T[-1]/ref_inversion, None, None, upper_rel_label))
plot.plot_data(filename("lower_init"), "Initial Lower State Population", (Z, None, zlim, z_label), (lower.T[0]/ref_inversion, None, None, lower_rel_label))
plot.plot_data(filename("lower_final"), "Final Lower State Population", (Z, None, zlim, z_label), (lower.T[-1]/ref_inversion, None, None, lower_rel_label))
plot.plot_data(filename("inversion_init"), "Initial Population Inversion", (Z, None, zlim, z_label), (inversion.T[0]/ref_inversion, None, None, inversion_rel_label))
plot.plot_data(filename("inversion_final"), "Final Population Inversion", (Z, None, zlim, z_label), (inversion.T[-1]/ref_inversion, None, None, inversion_rel_label))
plot.plot_projection(filename("density_evo"), "Photon Density Evolution", (ZT, None, z_label), (TZ, None, out_t_label), (density/ref_density, None, density_rel_label), (30, -30), (stride_z, stride_t))
plot.plot_projection(filename("upper_evo"), "Upper State Population Evolution", (ZT, None, z_label), (TZ, None, out_t_label), (upper/ref_inversion, None, upper_rel_label), (30, 30), (stride_z, stride_t))
plot.plot_projection(filename("lower_evo"), "Lower State Population Evolution", (ZT, None, z_label), (TZ, None, out_t_label), (lower/ref_inversion, None, lower_rel_label), (30, 30), (stride_z, stride_t))
plot.plot_projection(filename("inversion_evo"), "Population Inversion Evolution", (ZT, None, z_label), (TZ, None, out_t_label), (inversion/ref_inversion, None, inversion_rel_label), (30, 30), (stride_z, stride_t))
if exact_density_out is not None:
plot.plot_error(filename("density_err"), "Photon Density Relative Error", (T, None, tlim, out_t_label), ((exact_density_out, density[-1]), None, None, error_label))
if exact_population_final is not None:
plot.plot_error(filename("inversion_err"), "Population Inversion Relative Error", (Z, None, zlim, z_label), ((exact_population_final[0] - exact_population_final[1], inversion.T[-1]), None, None, error_label))
if amp.active_medium.doping_agent.lower_lifetime != 0.0:
plot.plot_error(filename("upper_err"), "Upper State Population Relative Error", (Z, None, zlim, z_label), ((exact_population_final[0], upper.T[-1]), None, None, error_label))
plot.plot_error(filename("lower_err"), "Lower State Population Relative Error", (Z, None, zlim, z_label), ((exact_population_final[1], lower.T[-1]), None, None, error_label))
norm_fluences = fluences / input_beam.ref_fluence
plot.plot_data(filename("fluence"), "Fluence Evolution", (Z, None, zlim, z_label), (norm_fluences, None, None, fluence_rel_label))
def plot_train(dirname, input_beam, active_medium, output_photon_counts):
filename = lambda name: os.path.join(dirname, name)
pulse_count = len(output_photon_counts)
pulse_nums = np.arange(1, pulse_count + 1)
nlim = (pulse_nums[0] - 1, pulse_nums[-1] + 1)
extra_args = dict(style="o", vlines=True, grid="y") if pulse_count <= 32 else {}
input_photon_count = input_beam.fluence_integral(active_medium.radius)
plot.plot_data(filename("pulse_energy_gain"), "Pulse Energy Gain", (pulse_nums, None, nlim, i_label), (output_photon_counts/input_photon_count, None, None, energy_rel_label), **extra_args)
def plot_beam(dirname, input_beam, Rho, Phi, ref_output_fluence):
filename = lambda name: os.path.join(dirname, name)
if len(Rho) > 1:
vfluence = np.vectorize(input_beam.fluence)
ref_input_fluence = vfluence(*np.meshgrid(Rho, Phi)).T
norm_input_fluence = ref_input_fluence / input_beam.ref_fluence
norm_output_fluence = ref_output_fluence / input_beam.ref_fluence
max_output_fluence = np.amax(norm_output_fluence)
n_ref = -1
for n, phi in enumerate(Phi):
if n_ref < 0 or abs(phi - input_beam.phi_ref) < abs(Phi[n_ref] - input_beam.phi_ref):
n_ref = n
rholim = (Rho[0], Rho[-1])
plot.plot_data(filename("fluences"), "Input and Output Fluence", ((Rho,)*2, None, rholim, rho_label), ((norm_input_fluence[:, n_ref], norm_output_fluence[:, n_ref]), None, None, fluence_rel_label), ("input beam", "output beam"))
plot.plot_data(filename("fluences_norm"), "Normalized Input and Output Fluence", ((Rho,)*2, None, rholim, rho_label), ((norm_input_fluence[:, n_ref], norm_output_fluence[:, n_ref] / max_output_fluence), None, None, fluence_norm_rel_label), ("input beam", "output beam"))
if len(Phi) > 1:
FR, RF = np.meshgrid(Phi, Rho)
XY, YX = RF * np.cos(FR), RF * np.sin(FR)
stride_rho = max(len(Rho) // params.out_count_rho, 1)
stride_phi = max(len(Phi) // params.out_count_phi, 1)
plot.plot_projection(filename("fluence_in"), "Input Fluence", (XY, None, x_label), (YX, None, y_label), (norm_input_fluence, None, fluence_rel_label), (30, -60), (stride_rho, stride_phi))
plot.plot_projection(filename("fluence_out"), "Output Fluence", (XY, None, x_label), (YX, None, y_label), (norm_output_fluence, None, fluence_rel_label), (30, -60), (stride_rho, stride_phi))
|
bsd-2-clause
| -5,244,082,922,388,500,000
| 49.646552
| 278
| 0.655234
| false
| 3.151824
| false
| false
| false
|
living180/vex
|
vex/remove.py
|
1
|
1329
|
import os
import shutil
from vex import exceptions
def obviously_not_a_virtualenv(path):
include = os.path.join(path, 'include')
bin = os.path.join(path, 'bin')
scripts = os.path.join(path, 'Scripts')
if not os.path.exists(bin) and not os.path.exists(scripts):
return True
if os.path.exists(include) and not any(
filename.startswith('py') for filename in os.listdir(include)
):
return True
return False
def handle_remove(ve_path):
if not os.path.exists(ve_path):
return
if hasattr(os, "geteuid"):
if os.geteuid() == 0 or os.environ.get('USER', '') == 'root':
raise exceptions.VirtualenvNotRemoved(
"not removing any directory as root user")
if ve_path in ("/", "\\"):
raise exceptions.VirtualenvNotRemoved(
"not removing possible root directory {0!r}".format(ve_path))
if ve_path == os.path.expanduser("~"):
raise exceptions.VirtualenvNotRemoved(
"not removing possible home directory {0!r}".format(ve_path))
# last-minute checks
if obviously_not_a_virtualenv(ve_path):
raise exceptions.VirtualenvNotRemoved(
"path {0!r} did not look like a virtualenv".format(ve_path))
print("Removing {0!r}".format(ve_path))
shutil.rmtree(ve_path)
|
mit
| -4,692,091,804,238,745,000
| 34.918919
| 73
| 0.632054
| false
| 3.691667
| false
| false
| false
|
cginternals/glkernel
|
scripts/generate.py
|
1
|
19818
|
import posixpath # instead of os.path, to always use forward slashes
import os
import re
# TODOs:
# (more TODOs in code)
standardTypes = {
"bool",
"char",
"short",
"int",
"long",
"long long",
"unsigned char",
"unsigned short",
"unsigned int",
"unsigned long",
"unsigned long long",
"float",
"double",
"long double",
"size_t",
"glm::uint16"
}
# ------------
# large-scale parsing
def findPairedBrace(code):
nl = 1
for i,c in enumerate(code):
if c == '}': nl -= 1
if c == '{': nl += 1
if nl == 0:
return i
def getNamespaces(code):
namespaces = dict()
global namespaceBeginPattern
namespaceBeginPattern = re.compile(r"^namespace(?:\s+(?P<name>\w+))?\s*\{", re.M | re.S)
lastEnd = 0
for match in namespaceBeginPattern.finditer(code):
# skip inner namespaces
if match.start() < lastEnd:
continue
nsStart = match.end() # behind opening brace
nsEnd = findPairedBrace(code[nsStart:]) + nsStart # index of closing brace
subNamespaces = getNamespaces(code[nsStart:nsEnd])
namespaces[(nsStart,nsEnd)] = (match.group("name") or "<unnamed>", subNamespaces)
# remember end for skipping inner namespaces
lastEnd = nsEnd
return namespaces
def namespaceAtPosition(namespaces, pos):
for span in namespaces:
if pos in range(*span):
innerNS = namespaceAtPosition(namespaces[span][1], pos - span[0])
return namespaces[span][0] + ("::" + innerNS if innerNS else "")
return ""
# ------------
# small-scale parsing
def removeCVRef(typeString):
return re.sub(r'^(?:const |volatile )*(.*?)(?:\s*&)?$', r'\1', typeString)
def splitParams(paramString):
splitParams = [p.strip() for p in paramString.split(',') if p.strip()]
i = 0
while i < len(splitParams)-1:
if splitParams[i].count('<') != splitParams[i].count('>'):
splitParams[i:i+2] = [splitParams[i] + ", " + splitParams[i+1]]
else:
i += 1
paramDefaults = [(split[0].strip(), split[1].strip() if len(split) > 1 else '') for split in [p.rsplit('=', 1) for p in splitParams]]
paramsSplit = [(l.strip(), r.strip(), d) for l,r,d in [p.rsplit(' ', 1) + [d] for p,d in paramDefaults]]
return paramsSplit
def removeParamDefaults(params):
return [(p[0], p[1]) for p in params]
def getParamNames(params):
return [p[1] for p in params]
def getParamTypes(params):
return [p[0] for p in params]
def getParamDefaults(params):
return [(p[1], p[2]) for p in params if p[2]]
def possibleTypes(argType, templateList):
if re.match("^\w+$", argType): # argType is just single word, e.g. 'T'
if "std::enable_if<std::is_floating_point<"+argType+">::value>::type" in templateList:
return {"float"}
else:
return {"float", "vec2", "vec3", "vec4"}
genVecMatch = re.match("(\w+)\s*<\s*\w+\s*,\s*\w+\s*>", argType) # general glm vector, e.g. 'V<T, P>'
if genVecMatch:
if re.search("template\s*<\s*(?:typename|class)\s*,\s*glm::precision\s*>\s*(?:typename|class)\s*" + genVecMatch.group(1), templateList):
return {"vec2", "vec3", "vec4"}
specVecMatch = re.match("glm::tvec(\d)<.*?>", argType) # specific glm vector, e.g. 'glm::tcev4<T, P>'
if specVecMatch:
return {"vec"+specVecMatch.group(1)}
return {argType}
def paramTypeFromKernelTypes(kernelTypeString, paramTypeString, templateList, enums):
if possibleTypes(paramTypeString, templateList) == {'float'}:
return "float"
strippedTypeString = removeCVRef(paramTypeString)
if kernelTypeString == strippedTypeString: # e.g. 'V<T, P>' and 'const V<T, P>&'
return "same"
if strippedTypeString in kernelTypeString: # e.g. 'const T&' and 'V<T, P>'
return "float"
if strippedTypeString in [e["name"] for e in enums]:
return strippedTypeString
if strippedTypeString in standardTypes:
return strippedTypeString
print("Unknown Type encountered: " + paramTypeString)
def getEnumValues(valueDefString):
definitions = [d.strip() for d in valueDefString.split(',')]
values = []
i = 0
for d in definitions:
if '=' in d:
_, _, expr = d.partition('=')
i = eval(expr, dict(values))
values.append((d,i))
i += 1
return values
# ------------
# generation
def enumForJS(value, enums):
if "::" not in value:
return value
enumDict = {enum["name"]: {valueName:value for valueName, value in enum["values"]} for enum in enums}
enumName, _, valueName = value.partition("::")
if enumName not in enumDict:
# TODO: Warning?
return value
if valueName not in enumDict[enumName]:
# TODO: Warning?
return value
return enumName + "." + valueName
def jsFuncName(func):
name = func["name"]
if "alternativeNumber" in func:
name += str(func["alternativeNumber"])
return "_".join(func["namespace"].split('::')[1:] + [name])
def jsFunction(func, enums):
assert func["namespace"].startswith("glkernel::"), "function \""+func["name"]+"\" from outside glkernel namespace: " + func["namespace"]
namespaceStack = func["namespace"].split("::")
namespaceStack.pop(0) # ignore outmost namespace glkernel
defaultChecks = '\n'.join([" {name} = (typeof {name} !== 'undefined') ? {name} : {default};".format(name=name, default=enumForJS(default, enums)) for name, default in getParamDefaults(func["params"])])
if defaultChecks:
defaultChecks = "\n // Defaults\n" + defaultChecks + "\n"
paramString = ', '.join(getParamNames(func["params"]))
paramStringKomma = "" if not paramString else ', ' + paramString
firstLine = " {name}: function({params}) {{".format(name = func["name"], params = paramString)
finalCall = " _glkernel.{generatedName}(that.kernel{paramsWithKomma});".format(generatedName = jsFuncName(func), paramsWithKomma = paramStringKomma)
jsCode = """{firstLine}{defaultChecks}
{finalCall}
return that;
}}""".format(firstLine = firstLine, defaultChecks = defaultChecks, finalCall = finalCall)
return jsCode
def buildJSNamespaces(funcs, enums):
namespaces = dict()
for func in funcs:
if func["namespace"] not in namespaces:
namespaces[func["namespace"]] = []
namespaces[func["namespace"]].append(jsFunction(func, enums))
nsCodes = []
for ns, codes in sorted(namespaces.items()):
name = ns[len("glkernel::"):]
functionsCode = ",\n".join(codes)
nsCode = " this.{name} = {{\n{funcCodes}\n }};".format(name = name, funcCodes = functionsCode)
nsCodes.append(nsCode)
return "\n".join(nsCodes)
def buildJSEnums(enums):
enumCodes = []
for enum in sorted(enums, key=lambda e: e["name"]):
valueLines = []
for name, value in enum["values"]:
valueLines.append(" " + name + ": " + str(value))
valuesCode = ',\n'.join(valueLines)
enumCode = "{name} = {{\n{members}\n}};".format(name = enum["name"], members = valuesCode)
enumCodes.append(enumCode)
return "\n\n".join(enumCodes)
def buildCPPFunctionAdds(funcs):
return '\n'.join([' addFunction("{name}", this, &JSInterface::{name});'.format(name = jsFuncName(func)) for func in funcs])
def buildCPPFunctionForwardDecl(func, enums):
enumNames = [enum["name"] for enum in enums]
funcName = jsFuncName(func)
# Deduce parameter types
kernelTypes = possibleTypes(func["kernelType"], func["template"])
paramTypes = [paramTypeFromKernelTypes(func["kernelType"], param[0], func["template"], enums) for param in func["params"]]
cases = [(kernelType, [kernelType if param == "same" else param for param in paramTypes]) for kernelType in kernelTypes]
if "alternatives" in func:
for alt in func["alternatives"]:
altKernelTypes = possibleTypes(alt["kernelType"], alt["template"])
altParamTypes = [paramTypeFromKernelTypes(alt["kernelType"], param[0], alt["template"], enums) for param in alt["params"]]
cases += [(kernelType, [kernelType if param == "same" else param for param in altParamTypes]) for kernelType in altKernelTypes]
cases.sort()
typesPerParam = [{case[1][i] for case in cases} for i in range(len(cases[0][1]))]
variantNeeded = [len(types) > 1 for types in typesPerParam]
enumParam = [list(types)[0] in enumNames for types in typesPerParam]
paramTypes = ["cppexpose::Object*"] + ["const cppexpose::Variant&" if needVariant else "int" if isEnum else list(types)[0] for types, needVariant, isEnum in zip(typesPerParam, variantNeeded, enumParam)]
paramNames = ["obj"] + [param[1] for param in func["params"]]
paramList = ", ".join(type + " " + name for type,name in zip(paramTypes, paramNames))
return " void " + funcName + "(" + paramList + ");"
def buildCPPFunctionForwardDecls(funcs, enums):
return '\n'.join([buildCPPFunctionForwardDecl(func, enums) for func in funcs])
def buildCPPIncludes(fileNames):
includeFiles = []
for f in fileNames:
if not "include/" in f:
print("Error: " + f + " is outside include directory!")
continue
while not f.startswith("include/"):
f = f[1:]
f = f[len("include/"):]
includeFiles.append(f)
return '\n'.join(['#include <' + name + '>' for name in includeFiles])
def buildCPPImplementation(func, enums):
enumNames = [enum["name"] for enum in enums]
funcName = jsFuncName(func)
# Deduce parameter types
kernelTypes = possibleTypes(func["kernelType"], func["template"])
paramTypes = [paramTypeFromKernelTypes(func["kernelType"], param[0], func["template"], enums) for param in func["params"]]
cases = [(kernelType, [kernelType if param == "same" else param for param in paramTypes]) for kernelType in kernelTypes]
if "alternatives" in func:
for alt in func["alternatives"]:
altKernelTypes = possibleTypes(alt["kernelType"], alt["template"])
altParamTypes = [paramTypeFromKernelTypes(alt["kernelType"], param[0], alt["template"], enums) for param in alt["params"]]
cases += [(kernelType, [kernelType if param == "same" else param for param in altParamTypes]) for kernelType in altKernelTypes]
cases.sort()
typesPerParam = [{case[1][i] for case in cases} for i in range(len(cases[0][1]))]
variantNeeded = [len(types) > 1 for types in typesPerParam]
enumParam = [list(types)[0] in enumNames for types in typesPerParam]
paramTypes = ["cppexpose::Object*"] + ["const cppexpose::Variant&" if needVariant else "int" if isEnum else list(types)[0] for types, needVariant, isEnum in zip(typesPerParam, variantNeeded, enumParam)]
paramNames = ["obj"] + [param[1] for param in func["params"]]
paramList = ", ".join(type + " " + name for type,name in zip(paramTypes, paramNames))
# Parameters with only one possible type may be handled before branching into kernel types
earlyConv = []
for param, enumType in [(name, list(types)[0]) for name, types, isEnum in zip(paramNames[1:], typesPerParam, enumParam) if isEnum]:
enum = [e for e in enums if e["name"] == enumType][0]
earlyConv.append(" const auto {name}_enum = static_cast<{namespace}::{type}>({name});".format(name=param, type=enum["name"], namespace = enum["namespace"]))
earlyConversions = '\n'.join(earlyConv)
if earlyConversions:
earlyConversions += '\n\n'
# Split cases by kernel type
casesByKernelType = dict()
for kernel, params in cases:
if kernel not in casesByKernelType:
casesByKernelType[kernel] = []
casesByKernelType[kernel].append(params)
# Build code for different kernel types
kernelCases = []
for kernelType, cases in sorted(casesByKernelType.items()):
kernelDim = 1 if kernelType == "float" else int(kernelType[-1])
firstLine = " if (auto kernelObj = dynamic_cast<Kernel" + str(kernelDim) + "Object*>(obj))"
neededVariantChecks = False
# Build code for specific parameter type constellations
paramCases = []
for case in cases:
# Check if variants contain acceptable values
variantChecks = []
for name, type, needsVariant in zip(paramNames[1:], case, variantNeeded):
if not needsVariant:
continue
checkFunction = "canBe" + type[0].upper() + type[1:]
variantChecks.append(checkFunction + "(" + name + ")")
neededVariantChecks = True
# Unpack variants to usable values
variantUnpackers = []
for name, type, needsVariant in zip(paramNames[1:], case, variantNeeded):
if not needsVariant:
continue
convFunction = "variantTo" + type[0].upper() + type[1:]
variantUnpackers.append(" const auto {name}_conv = {func}({name});".format(name = name, func = convFunction))
variantUnpackingCode = '\n'.join(variantUnpackers)
if variantUnpackingCode:
variantUnpackingCode += '\n\n'
finalCallParams = ["kernelObj->kernel()"] + [name + ("_enum" if isEnum else "_conv" if needsVariant else "") for name, isEnum, needsVariant in zip(paramNames[1:], enumParam, variantNeeded)]
finalCallParamString = ', '.join(finalCallParams)
finalCallString = " {namespace}::{name}({params});".format(namespace = func["namespace"], name = func["name"], params = finalCallParamString)
innerCode = "{variants}{finalCall}\n return;".format(variants = variantUnpackingCode, finalCall = finalCallString)
caseCode = innerCode
if variantChecks:
variantCheckCode = ' && '.join(variantChecks)
indentedInnerCode = '\n'.join([(" " + line).rstrip() for line in innerCode.split('\n')])
caseCode = " if ({varChecks})\n {{\n{innerCode}\n }}".format(varChecks = variantCheckCode, innerCode = indentedInnerCode)
paramCases.append(caseCode)
if neededVariantChecks:
paramCases.append(" cppassist::error(\"glkernel-JSInterface\") << \"Invalid parameters for " + funcName + "\";\n return;")
paramCasesCode = '\n\n'.join(paramCases)
kernelCaseCode = "{firstLine}\n {{\n{cases}\n }}".format(firstLine = firstLine, cases = paramCasesCode)
kernelCases.append(kernelCaseCode)
kernelCasesCode = '\n\n'.join(kernelCases)
fullCode = """void JSInterface::{funcName}({paramList})
{{
{earlyConv}{cases}
cppassist::error("glkernel-JSInterface") << "Invalid kernel object for {funcName}";
}}""".format(funcName = funcName, paramList = paramList, earlyConv = earlyConversions, cases = kernelCasesCode)
return fullCode
def buildCPPImplementations(funcs, enums):
return '\n\n\n'.join([buildCPPImplementation(func, enums) for func in funcs])
# ------------
# misc
def dedupeFuncs(funcs):
i = 1
while i < len(funcs):
currentFunc = funcs[i]
for otherFunc in funcs[:i]:
if otherFunc["namespace"] != currentFunc["namespace"]:
continue
if otherFunc["name"] != currentFunc["name"]:
continue
if getParamNames(otherFunc["params"]) == getParamNames(currentFunc["params"]):
# identical in JS -> can be safely removed
funcs.remove(currentFunc)
i -= 1
if "alternatives" not in otherFunc:
otherFunc["alternatives"] = []
otherFunc["alternatives"].append(currentFunc)
break
if "renamedAlternatives" not in otherFunc:
otherFunc["renamedAlternatives"] = 0
otherFunc["renamedAlternatives"] += 1
currentFunc["alternativeNumber"] = otherFunc["renamedAlternatives"]
break
i += 1
# ------------
# main
def main(args):
glkernelIncludeDir = "../source/glkernel/include/glkernel"
sourceFiles = [posixpath.join(glkernelIncludeDir, p) for p in os.listdir(glkernelIncludeDir) if p not in ["Kernel.h", "glm_compatability.h"] and p.endswith(".h")]
funcPattern = re.compile(r"^template\s*<(?P<template>.*?)>$\s*^(?P<return>\w+)\s(?P<name>\w+)\(\s*tkernel<(?P<kernelType>.*?)>\s*&\s*\w+\s*(?P<params>(?:,.*?)*)\);$", re.M | re.S)
enumPattern = re.compile(r"^enum(?:\s+class)?\s+(?P<name>\w+)\s*(?::.*?\s*)?\{(?P<content>.*?)\};$", re.M | re.S)
allFunctions = []
allEnums = []
for f in sourceFiles:
content = ''
with open(f,'r') as file:
content = file.read()
namespaces = getNamespaces(content)
functionMatches = [m for m in funcPattern.finditer(content)]
functions = [{
"name": f.group("name"),
"kernelType": f.group("kernelType"),
"namespace": namespaceAtPosition(namespaces, f.start()),
"params": splitParams(f.group("params")),
"return": f.group("return"),
"template": f.group("template")
} for f in functionMatches]
enumMatches = [m for m in enumPattern.finditer(content)]
enums = [{
"name": e.group("name"),
"values": getEnumValues(e.group("content")),
"namespace": namespaceAtPosition(namespaces, e.start())
} for e in enumMatches]
allFunctions.extend(functions)
allEnums.extend(enums)
dedupeFuncs(allFunctions)
funcsJSCode = buildJSNamespaces(allFunctions, allEnums)
enumJSCode = buildJSEnums(allEnums)
templateDir = args.inDir
cppDestDir = args.cppDir
jsDestDir = args.jsDir
with open(templateDir + "/glkernel.js.template", "r") as templateFile:
with open(jsDestDir + "/glkernel.js", "w") as outFile:
outFile.write(templateFile.read().format(enums=enumJSCode, functions=funcsJSCode))
forwardDecls = buildCPPFunctionForwardDecls(allFunctions, allEnums)
with open(templateDir + "/JSInterface.h.template", "r") as templateFile:
with open(cppDestDir + "/JSInterface.h", "w") as outFile:
outFile.write(templateFile.read().format(functionForwardDecls=forwardDecls))
includes = buildCPPIncludes(sourceFiles)
funcAdds = buildCPPFunctionAdds(allFunctions)
funcImpl = buildCPPImplementations(allFunctions, allEnums)
with open(templateDir + "/JSInterface.cpp.template", "r") as templateFile:
with open(cppDestDir + "/JSInterface.cpp", "w") as outFile:
outFile.write(templateFile.read().format(includes=includes, addFunctionCalls=funcAdds, generatedFunctions=funcImpl))
if __name__ == "__main__":
import argparse
parser = argparse.ArgumentParser()
parser.add_argument("--templates", "-t", metavar="<dir>", type=str, default=".", dest="inDir", help="directory containing template files")
parser.add_argument("--cpp-dest" , "-c", metavar="<dir>", type=str, default=".", dest="cppDir", help="directory where result .h and .cpp files are written to")
parser.add_argument("--js-dest" , "-j", metavar="<dir>", type=str, default=".", dest="jsDir", help="directory where result .js files are written to")
args = parser.parse_args()
main(args)
|
mit
| 3,420,205,758,163,455,500
| 37.481553
| 220
| 0.612171
| false
| 3.753409
| false
| false
| false
|
AutorestCI/azure-sdk-for-python
|
azure-mgmt-monitor/azure/mgmt/monitor/models/rule_action.py
|
1
|
1307
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class RuleAction(Model):
"""The action that is performed when the alert rule becomes active, and when
an alert condition is resolved.
You probably want to use the sub-classes and not this class directly. Known
sub-classes are: RuleEmailAction, RuleWebhookAction
:param odatatype: Constant filled by server.
:type odatatype: str
"""
_validation = {
'odatatype': {'required': True},
}
_attribute_map = {
'odatatype': {'key': 'odata\\.type', 'type': 'str'},
}
_subtype_map = {
'odatatype': {'Microsoft.Azure.Management.Insights.Models.RuleEmailAction': 'RuleEmailAction', 'Microsoft.Azure.Management.Insights.Models.RuleWebhookAction': 'RuleWebhookAction'}
}
def __init__(self):
self.odatatype = None
|
mit
| -2,687,682,908,228,144,000
| 32.512821
| 187
| 0.613619
| false
| 4.400673
| false
| false
| false
|
UManPychron/pychron
|
pychron/experiment/experimentor.py
|
1
|
11553
|
# ===============================================================================
# Copyright 2013 Jake Ross
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ===============================================================================
# ============= enthought library imports =======================
from __future__ import absolute_import
from traits.api import Instance, List, on_trait_change, Bool, Event
from pychron.dvc.dvc_irradiationable import DVCIrradiationable
from pychron.experiment.experiment_executor import ExperimentExecutor
from pychron.experiment.factory import ExperimentFactory
from pychron.experiment.queue.experiment_queue import ExperimentQueue
class Experimentor(DVCIrradiationable):
experiment_factory = Instance(ExperimentFactory)
experiment_queue = Instance(ExperimentQueue)
executor = Instance(ExperimentExecutor)
experiment_queues = List
# stats = Instance(StatsGroup, ())
mode = None
# unique_executor_db = False
save_enabled = Bool
# ===========================================================================
# permissions
# ===========================================================================
# max_allowable_runs = 10000
# can_edit_scripts = True
# _last_ver_time = None
# _ver_timeout = 10
# ===========================================================================
# task events
# ===========================================================================
activate_editor_event = Event
save_event = Event
def prepare_destory(self):
if self.executor:
if self.executor.datahub:
self.executor.datahub.prepare_destroy()
if self.experiment_factory:
if self.experiment_factory.run_factory:
if self.experiment_factory.run_factory.datahub:
self.experiment_factory.run_factory.datahub.prepare_destroy()
def load(self):
self.experiment_factory.queue_factory.db_refresh_needed = True
self.experiment_factory.run_factory.db_refresh_needed = True
return True
def reset_run_generator(self):
if self.executor.is_alive():
self.debug('Queue modified. Reset run generator')
# self.executor.queue_modified = True
self.executor.set_queue_modified()
def refresh_executable(self, qs=None):
if qs is None:
qs = self.experiment_queues
if self.executor.is_alive():
qs = (self.executor.experiment_queue,)
self.executor.executable = all([ei.is_executable() for ei in qs])
self.debug('setting executable {}'.format(self.executor.executable))
def update_queues(self):
self._update_queues()
def update_info(self):
try:
self._update()
except BaseException as e:
self.debug_exception()
self.warning_dialog('Failed updating info: Error={}'.format(e))
# ===============================================================================
# info update
# ===============================================================================
def _get_all_automated_runs(self, qs=None):
if qs is None:
qs = self.experiment_queues
return [ai for ei in qs
for ai in ei.automated_runs
if ai.executable]
def _update(self, queues=None):
self.debug('update runs')
if queues is None:
queues = self.experiment_queues
queues = [qi for qi in queues if qi.is_updateable()]
if not queues:
return
self.debug('executor executable {}'.format(self.executor.executable))
self.debug('updating stats, ')
self.executor.stats.calculate()
self.refresh_executable(queues)
self._set_analysis_metadata()
self.debug('info updated')
for qi in queues:
qi.refresh_table_needed = True
def _set_analysis_metadata(self):
cache = dict()
db = self.get_database()
aruns = self._get_all_automated_runs()
with db.session_ctx():
for ai in aruns:
if ai.skip:
continue
ln = ai.labnumber
if ln == 'dg':
continue
# is run in cache
if ln not in cache:
info = db.get_identifier_info(ln)
self.debug('Info for {}={}'.format(ln, info))
if not info:
cache[ln] = dict(identifier_error=True)
else:
info['identifier_error'] = False
cache[ln] = info
ai.trait_set(**cache[ln])
def execute_queues(self, queues):
names = ','.join([e.name for e in queues])
self.debug('queues: n={}, names={}'.format(len(queues), names))
self.executor.trait_set(experiment_queues=queues, experiment_queue=queues[0])
return self.executor.execute()
def verify_database_connection(self, inform=True):
db = self.get_database()
if db is not None:
if db.connect(force=True):
return True
elif inform:
self.warning_dialog('No Database available')
def sync_queue(self, queue):
ms = queue.mass_spectrometer
ed = queue.extract_device
db = self.get_database()
with db.session_ctx():
next_pos = None
for i, ai in enumerate(queue.automated_runs):
if ai.skip or ai.is_special():
continue
kw = {'identifier': ai.identifier, 'position': ai.position,
'mass_spectrometer': ms.lower(),
'extract_device': ed}
if ai.is_step_heat():
kw['aliquot'] = ai.aliquot
kw['extract_value'] = ai.extract_value
self.debug('checking {}/{}. attr={}'.format(i, ai.runid, kw))
aa = db.get_analysis_by_attr(**kw)
if aa is None:
self.debug('----- not found')
if next_pos == ai:
i -= 1
break
elif not self.confirmation_dialog('Found analyses up to {}. '
'position={}, extract={}. '
'Continue searching?'.format(ai.runid, ai.extract_value,
ai.position)):
break
next_pos = queue.automated_runs[i + 1]
if i:
if i == len(queue.automated_runs) - 1:
self.information_dialog('All Analyses from this experiment have been run')
else:
queue.automated_runs = queue.automated_runs[i:]
else:
self.information_dialog('No Analyses from this experiment have been run')
# ===============================================================================
# handlers
# ===============================================================================
def _experiment_queue_changed(self, eq):
if eq:
self.experiment_factory.queue = eq
self.experiment_factory.sync_queue_meta()
self.experiment_factory.edit_enabled = True
else:
self.experiment_factory.edit_enabled = False
@on_trait_change('executor:experiment_queue')
def _activate_editor(self, eq):
self.activate_editor_event = id(eq)
@on_trait_change('experiment_queues[]')
def _update_queues(self):
qs = self.experiment_queues
self.executor.stats.experiment_queues = qs
@on_trait_change('experiment_factory:run_factory:changed')
def _queue_dirty(self):
self.experiment_queue.changed = True
@on_trait_change('experiment_queue:dclicked')
def _dclicked_changed(self, new):
self.experiment_factory.run_factory.edit_mode = True
self._set_factory_runs(self.experiment_queue.selected)
@on_trait_change('experiment_factory:run_factory:update_info_needed')
def _refresh3(self):
self.debug('update info needed fired')
self.update_info()
@on_trait_change('executor:queue_modified')
def _refresh5(self, new):
if new:
self.debug('queue modified fired')
self.update_info()
@on_trait_change('experiment_factory:run_factory:refresh_table_needed')
def _refresh4(self):
for qi in self.experiment_queues:
qi.refresh_table_needed = True
@on_trait_change('experiment_factory:save_button')
def _save_update(self):
self.save_event = True
self.update_info()
@on_trait_change('experiment_queue:refresh_info_needed')
def _handle_refresh(self):
self.update_info()
@on_trait_change('experiment_queue:selected')
def _selected_changed(self, new):
ef = self.experiment_factory
rf = ef.run_factory
rf.edit_mode = False
if new:
self._set_factory_runs(new)
# if self.executor.is_alive():
a = new[-1]
if not a.skip:
self.executor.stats.calculate_at(a, at_times=self.executor.is_alive())
# self.stats.calculate()
@on_trait_change('experiment_factory:queue_factory:delay_between_analyses')
def handle_delay_between_analyses(self, new):
if self.executor.is_alive():
self.executor.experiment_queue.delay_between_analyses = new
def _set_factory_runs(self, new):
ef = self.experiment_factory
rf = ef.run_factory
# print 'set runs'
# rf.special_labnumber = 'Special Labnumber'
rf.suppress_update = True
rf.set_selected_runs(new)
rf.suppress_update = False
def _executor_factory(self):
e = ExperimentExecutor(mode=self.mode,
application=self.application)
e.bind_preferences()
return e
# ===============================================================================
# defaults
# ===============================================================================
def _executor_default(self):
return self._executor_factory()
def _experiment_factory_default(self):
dms = 'Spectrometer'
if self.application:
p2 = 'pychron.spectrometer.base_spectrometer_manager.BaseSpectrometerManager'
spec = self.application.get_service(p2)
if spec:
dms = spec.name.capitalize()
e = ExperimentFactory(application=self.application,
dvc=self.dvc,
default_mass_spectrometer=dms)
return e
# ============= EOF =============================================
|
apache-2.0
| -4,886,870,572,820,441,000
| 34.990654
| 110
| 0.521769
| false
| 4.566403
| false
| false
| false
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.