code
stringlengths 2
1.05M
| repo_name
stringlengths 5
104
| path
stringlengths 4
251
| language
stringclasses 1
value | license
stringclasses 15
values | size
int32 2
1.05M
|
|---|---|---|---|---|---|
#!/usr/bin/env python
import os
import sys
import time
import cv2
import numpy as np
from imutils.video import WebcamVideoStream
import vision.cv_utils as cv_utils
from vision.network_utils import put, flush, table, settings_table
from vision.centroid_tracker import CentroidTracker
from . import args
class Vision:
def __init__(self):
self.args = args
self.tracker = CentroidTracker()
# Whether to start calculating distances/angles to the tracked target. Sent from the robot over NetworkTables
self.locked = False
self.lock_id = None
# Dictionary of lower and upper HSV color values of blobs, min/max area of blobs, and min/max fullness of blobs
self.settings = {
'lower': np.array(self.args['lower_color']),
'upper': np.array(self.args['upper_color']),
'min_area': int(self.args['min_area']),
'max_area': int(self.args['max_area']),
'min_full': float(self.args['min_full']),
'max_full': float(self.args['max_full']),
}
# Path to image
self.image = self.args['image']
# Whether to display results of processing in a new window
self.display = self.args['display']
# Whether to print log values to the console
self.verbose = self.args['verbose']
# Path to video source (or camera number if integer)
self.source = self.args['source']
if self.source.isdigit():
self.source = int(self.source)
if sys.platform == 'win32':
self.source += cv2.CAP_DSHOW
# Whether to display sliders to adjust settings
self.tuning = self.args['tuning']
# Put settings on NetworkTables
self.put_settings()
# Adds a listener that will set self.locked to the value in NT whenever the locked key is updated
table.addEntryListener(self.lock_listener, immediateNotify=True, localNotify=True, key='locked')
# Adds a listener that will update self.settings whenever a key on the settings table is updated
settings_table.addEntryListener(self.settings_listener, immediateNotify=True, localNotify=True)
if self.verbose:
print(self.args)
def run(self):
if self.image is not None:
self.run_image()
else:
self.run_video()
def do_image(self, im):
# Get all the contours in the image and the mask used to find them
blobs, mask = cv_utils.get_blobs(im, self.settings['lower'], self.settings['upper'])
# Don't process any blobs if vision isn't locked
if not self.locked:
self.tracker.deregister_all()
return im, mask
found_blob = False
# Create list of rectangles that bound the blob
bounding_rects = [cv2.boundingRect(blob) for blob in blobs]
# Sort list of blobs by x-value and zip them together with their corresponding bounding rectangle
sorted_blobs = sorted(zip(bounding_rects, blobs), key=lambda x: x[0], reverse=True)
goals = []
prev_target = None
prev_blob = None
for bounding_rect, blob in sorted_blobs:
if blob is not None and mask is not None:
# Get the location and size of the rectangle bounding the contour
x, y, w, h = bounding_rect
# Skip over the blob if its area is too small
if w * h < self.settings['min_area']:
continue
# Returns a rectangle of minimum area that bounds the blob (accounts for blobs at an angle)
target = cv2.minAreaRect(blob)
# Gets the coordinates of the 4 corners of the rectangle bounding the blob
box = np.int0(cv2.boxPoints(target))
# Straighten box to have a top-down view and get the transformed width and height
transformed_box = cv_utils.four_point_transform(mask, box)
width, height = transformed_box.shape
# Calculate the proportion of the transformed box that's filled up by the blob
full = cv_utils.get_percent_full(transformed_box)
area = width * height
# Check to make sure the box is sufficiently filled
if self.settings['min_area'] <= area <= self.settings['max_area'] and self.settings['min_full'] <= full <= self.settings['max_full']:
if self.verbose:
print('[Goal] x: %d, y: %d, w: %d, h: %d, area: %d, full: %f, angle: %f' % (x, y, width, height, area, full, target[2]))
if self.display:
# Draw rectangles around goals and points on the center of the goal
im = cv_utils.draw_images(im, target, box)
if prev_target is not None:
sum = abs(prev_target[2]) - abs(target[2])
# Track both the left and right sides of the vision target
if sum < 0:
goals.append(((prev_target, target), (prev_blob, blob)))
# Left vision tape
prev_target = target
prev_blob = blob
if len(goals) > 0:
goal_centers = None
try:
# Calculate robot angle, target angle, x distance, y distance, distance, and centroid
goal_centers = [cv_utils.process_image(im, goal[0], goal[1]) for goal in goals]
except:
pass
if goal_centers is None:
return im, mask
# Zip goal centers with their corresponding goals and sort by the angle difference between robot and target
possible_goals = sorted(zip(goal_centers, goals), key=lambda x: abs(x[0][0] + x[0][1]))
objects = self.tracker.update([centers[5] for centers, _ in possible_goals])
centers = None
goal = None
# Check if it's the first time locking onto the particular goal
if self.lock_id is None:
# Find the goal closest to where the robot is facing
probable_goal = possible_goals[0]
centers, goal = probable_goal
# Assign an id to the goal based on the location of the centroid
for index in objects:
if centers[5] == objects[index]:
self.lock_id = index
break
else:
try:
# Find the tracked goal that corresponds to the lock id
centers, goal = next(filter(lambda goal: goal[0][5] == objects[self.lock_id], possible_goals))
except Exception:
print('Exception while finding goal with lock id')
if centers is not None:
robot_angle, target_angle, x_distance, y_distance, distance, _ = centers
# Put calculations on NetworkTables
put('distance', distance)
put('x_distance', x_distance)
put('y_distance', y_distance)
put('robot_angle', robot_angle)
put('target_angle', target_angle)
found_blob = True
put('found', found_blob)
# Send the data to NetworkTables
flush()
return im, mask
def run_image(self):
if self.verbose:
print('Image path specified, reading from %s' % self.image)
bgr = cv2.imread(self.image)
im = cv2.cvtColor(bgr, cv2.COLOR_BGR2HSV)
im, mask = self.do_image(im)
if self.display:
# Show the images
cv2.imshow('Original', cv2.cvtColor(im, cv2.COLOR_HSV2BGR))
if mask is not None:
cv2.imshow('Mask', mask)
cv2.waitKey(0)
cv2.destroyAllWindows()
def run_video(self):
if self.verbose:
print('No image path specified, reading from camera video feed')
# Start reading images from the camera
camera = WebcamVideoStream(src=self.source).start()
# Set stream size -- TODO: figure out why this isn't working
# camera.stream.set(cv2.CAP_PROP_FRAME_WIDTH, 640)
# camera.stream.set(cv2.CAP_PROP_FRAME_HEIGHT, 480)
timeout = 0
# Create trackbars to tune lower and upper HSV values if in tuning mode
if self.tuning:
cv2.namedWindow('Settings')
cv2.resizeWindow('Settings', 700, 350)
cv2.createTrackbar('Lower H', 'Settings', self.settings['lower'][0], 255,
lambda val: self.update_thresh(True, 0, val))
cv2.createTrackbar('Lower S', 'Settings', self.settings['lower'][1], 255,
lambda val: self.update_thresh(True, 1, val))
cv2.createTrackbar('Lower V', 'Settings', self.settings['lower'][2], 255,
lambda val: self.update_thresh(True, 2, val))
cv2.createTrackbar('Upper H', 'Settings', self.settings['upper'][0], 255,
lambda val: self.update_thresh(False, 0, val))
cv2.createTrackbar('Upper S', 'Settings', self.settings['upper'][1], 255,
lambda val: self.update_thresh(False, 1, val))
cv2.createTrackbar('Upper V', 'Settings', self.settings['upper'][2], 255,
lambda val: self.update_thresh(False, 2, val))
# Initializes array that stores raw camera frame outputs in the BGR color space
bgr = np.zeros(shape=(360, 640, 3), dtype=np.uint8)
# Initializes array that stores camera frame outputs in the HSV color space
im = np.zeros(shape=(360, 640, 3), dtype=np.uint8)
while True:
# Fetches the most recent frame from the camera stream
bgr = camera.read()
if bgr is not None:
# Resize raw camera frame and convert into HSV
im = cv2.cvtColor(cv2.resize(bgr, (640, 360), 0, 0), cv2.COLOR_BGR2HSV)
im, mask = self.do_image(im)
# Display the original image and the masked image in two separate windows
if self.display:
if im is not None:
cv2.imshow('Original', cv2.cvtColor(im, cv2.COLOR_HSV2BGR))
if mask is not None:
cv2.imshow('Mask', mask)
if cv2.waitKey(1) & 0xFF == ord('q'):
break
else:
# Stop the program after 5 seconds if a camera hasn't been found
if timeout == 0:
timeout = time.time()
if time.time() - timeout > 5.0:
print('Camera search timed out!')
break
if self.tuning:
setting_names = ['Lower H', 'Lower S', 'Lower V', 'Upper H', 'Upper S', 'Upper V']
# Create a settings directory if it doesn't exist
if not os.path.exists('settings'):
os.makedirs('settings')
# Save new HSV values to a file in the settings directory
with open('settings/save-{}.thr'.format(round(time.time() * 1000)), 'w') as thresh_file:
values = enumerate(self.settings['lower'].tolist() + self.settings['upper'].tolist())
thresh_file.writelines(['{}: {}\n'.format(setting_names[num], value[0])
for num, value in values])
camera.stop()
cv2.destroyAllWindows()
# Adjusts HSV thresholds
def update_thresh(self, lower, index, value):
if lower:
self.settings['lower'][index] = value
else:
self.settings['upper'][index] = value
self.put_settings()
def lock_listener(self, source, key, value, is_new):
self.locked = value
if not value:
self.lock_id = None
# Updates settings with values sent over NetworkTables
def settings_listener(self, source, key, value, is_new):
key_parts = key.split('_')
if key_parts[0] == 'lower' or key_parts[0] == 'upper':
index = 0 if key_parts[1] == 'H' else 1 if key_parts[1] == 'S' in key else 2
self.settings[key_parts[0]][index] = value
else:
self.settings[key] = value
# Updates settings on NetworkTables
def put_settings(self):
for setting in self.settings:
if setting == 'lower' or setting == 'upper':
settings_table.putValue('{}_H'.format(setting), int(self.settings[setting][0][0]))
settings_table.putValue('{}_S'.format(setting), int(self.settings[setting][1][0]))
settings_table.putValue('{}_V'.format(setting), int(self.settings[setting][2][0]))
else:
settings_table.putValue(setting, self.settings[setting])
|
SouthEugeneRoboticsTeam/vision
|
vision/app.py
|
Python
|
gpl-3.0
| 13,081
|
# vim: ft=python fileencoding=utf-8 sts=4 sw=4 et:
# Copyright 2014-2020 Florian Bruhin (The Compiler) <mail@qutebrowser.org>
#
# This file is part of qutebrowser.
#
# qutebrowser is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# qutebrowser is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with qutebrowser. If not, see <http://www.gnu.org/licenses/>.
"""pytest helper to monkeypatch the message module."""
import logging
import attr
import pytest
from qutebrowser.utils import usertypes, message, objreg
@attr.s
class Message:
"""Information about a shown message."""
level = attr.ib()
text = attr.ib()
class MessageMock:
"""Helper object for message_mock.
Attributes:
Message: A object representing a message.
messages: A list of Message objects.
"""
def __init__(self):
self.messages = []
def _record_message(self, level, text):
log_levels = {
usertypes.MessageLevel.error: logging.ERROR,
usertypes.MessageLevel.info: logging.INFO,
usertypes.MessageLevel.warning: logging.WARNING,
}
log_level = log_levels[level]
logging.getLogger('messagemock').log(log_level, text)
self.messages.append(Message(level, text))
def getmsg(self, level=None):
"""Get the only message in self.messages.
Raises ValueError if there are multiple or no messages.
Args:
level: The message level to check against, or None.
"""
assert len(self.messages) == 1
msg = self.messages[0]
if level is not None:
assert msg.level == level
return msg
def patch(self):
"""Start recording messages."""
message.global_bridge.show_message.connect(self._record_message)
message.global_bridge._connected = True
def unpatch(self):
"""Stop recording messages."""
message.global_bridge.show_message.disconnect(self._record_message)
@pytest.fixture
def message_mock():
"""Fixture to get a MessageMock."""
mmock = MessageMock()
mmock.patch()
yield mmock
mmock.unpatch()
@pytest.fixture
def message_bridge(win_registry):
"""Fixture to get a MessageBridge."""
bridge = message.MessageBridge()
objreg.register('message-bridge', bridge, scope='window', window=0)
yield bridge
objreg.delete('message-bridge', scope='window', window=0)
|
t-wissmann/qutebrowser
|
tests/helpers/messagemock.py
|
Python
|
gpl-3.0
| 2,866
|
# This file is part of Sibyl.
# Copyright 2014 Camille MOUGEY <camille.mougey@cea.fr>
#
# Sibyl is free software: you can redistribute it and/or modify it
# under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Sibyl is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
# or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
# License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Sibyl. If not, see <http://www.gnu.org/licenses/>.
from sibyl.test.test import Test, TestSetTest
class TestAbs(Test):
value = 42
# Test1
def init1(self):
self._add_arg(0, self.value + 1)
def check1(self):
result = self._get_result()
return result == (self.value + 1)
# Test2
def init2(self):
self._add_arg(0, self._as_int(-1 * self.value))
def check2(self):
result = self._get_result()
return result == self.value
# Properties
func = "abs"
tests = TestSetTest(init1, check1) & TestSetTest(init2, check2)
class TestA64l(Test):
my_string = "v/"
value = 123
# Test
def init(self):
self.my_addr = self._alloc_string(self.my_string)
self._add_arg(0, self.my_addr)
def check(self):
result = self._get_result()
return all([result == self.value,
self._ensure_mem(self.my_addr, self.my_string)])
# Properties
func = "a64l"
tests = TestSetTest(init, check)
class TestAtoi(Test):
my_string = "44"
my_string2 = "127.0.0.1"
# Test
def my_init(self, string):
self.my_addr = self._alloc_string(string)
self._add_arg(0, self.my_addr)
def my_check(self, string):
result = self._get_result()
return all([result == int(string.split(".")[0]),
self._ensure_mem(self.my_addr, string)])
# Test1
def init1(self):
return self.my_init(self.my_string)
def check1(self):
return self.my_check(self.my_string)
# Test1
def init2(self):
return self.my_init(self.my_string2)
def check2(self):
return self.my_check(self.my_string2)
# Properties
func = "atoi"
tests = TestSetTest(init1, check1) & TestSetTest(init2, check2)
TESTS = [TestAbs, TestA64l, TestAtoi]
|
cea-sec/Sibyl
|
sibyl/test/stdlib.py
|
Python
|
gpl-3.0
| 2,517
|
'''
Manager of text selection controls.
Many text selections can be displayed.
One-to-one with text glyphs.
Only one active: receiving keyboard events.
Generally, when the pointer is in a TextMorph.
Note distinction between TextMorph and TextGlyph
'''
'''
Copyright 2010, 2011 Lloyd Konneker
This file is part of Pensool.
Pensool is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
'''
from decorators import *
text_select = {} # TextGlyph -> TextSelectControl
active_text_select = None
@dump_event
def activate_select_for_text(direction, text = None):
'''
'''
global active_text_select
if direction:
try:
active_text_select = text_select[text]
except KeyError:
print "Text glyph without a text select?"
else:
active_text_select = None
def get_active_select():
'''
'''
return active_text_select
def new_select(control, text, index):
'''
Associate new selection control with text at position x.
'''
text_select[text] = control
|
bootchk/pensool
|
source/gui/manager/textselect.py
|
Python
|
gpl-3.0
| 1,169
|
# -*- coding: utf-8 -*-
from __future__ import print_function
import datetime
import getpass
import os
import random
import sys
import time
import win32gui
from json import load
from platform import uname
from time import sleep
try:
# For Python 3.0 and later
from urllib.request import urlopen
except ImportError:
# Fall back to Python 2's urllib2
from urllib2 import urlopen
import pafy
import pyautogui
import selenium.webdriver.support.ui as ui
import win32con
from colorama import init, Fore, Back, Style
from selenium import webdriver
from selenium.webdriver.common.keys import Keys
from selenium.webdriver.firefox.firefox_binary import FirefoxBinary
import rasdial
from list_timezone import LIST_TIME_ZONE
from config import SCREEN_RESOLUTION # config.py
from config import USER_PASS
from config import PURE_VPN_NAME
from config import PIA_VPN_NAME
from screen_resolution import ScreenRes
import subprocess
import shutil
import errno
import smtplib
init()
def send_email_alert():
try:
fromaddr = 'vu.nomos@gmail.com'
toaddrs = 'vunguyen.xbt@gmail.com'
text = uname()[1] + ' could not connect!!!'
msg = 'Subject: {}\n\n{}'.format('AutoClicker', text)
username = 'vu.nomos@gmail.com'
password = 'Params$&#!'
server = smtplib.SMTP('smtp.gmail.com:587')
server.ehlo()
server.starttls()
server.login(username, password)
server.sendmail(fromaddr, toaddrs, msg)
server.close()
except:
pass
def copyanything(src, dst):
try:
shutil.copytree(src, dst, ignore=shutil.ignore_patterns("parent.lock", "lock", ".parentlock"))
except OSError as exc:
if exc.errno == errno.ENOTDIR:
shutil.copy(src, dst)
else:
pass
def restore_profile():
profile_number = str(random.randint(0, len(os.listdir('ressources\Profiles\\')) - 1))
user_name = getpass.getuser()
path_profil = 'C:\Users\\' + user_name + '\AppData\Roaming\Mozilla\Firefox\Profiles\\'
profil_name = os.listdir(path_profil)[0]
folder = path_profil + profil_name
for the_file in os.listdir(folder):
file_path = os.path.join(folder, the_file)
try:
if os.path.isfile(file_path):
os.unlink(file_path)
elif os.path.isdir(file_path):
shutil.rmtree(file_path)
except:
pass
try:
shutil.rmtree(folder)
except:
pass
print(Back.BLACK + Fore.LIGHTYELLOW_EX + Style.BRIGHT + 'Profile: ' + profile_number + Style.RESET_ALL)
if not os.path.exists(folder):
copyanything('ressources\Profiles\\' + profile_number, folder)
def get_title_clip(channel):
global TITLE_YOUTUBE
load_result = False
search_link = 'https://www.youtube.com/results?search_query='
while load_result is False:
try:
links_tinyurl = tuple(open('ressources/TitlesYoutube/' + str(channel) + '.txt', 'r'))
random_int = random.randint(1, len(links_tinyurl) - 1)
if 'undefined' not in links_tinyurl[random_int].strip() and links_tinyurl[random_int].strip() is not None \
and links_tinyurl[random_int].strip() != '':
TITLE_YOUTUBE = links_tinyurl[random_int].strip()
load_result = True
except:
pass
return search_link + TITLE_YOUTUBE
def get_random_vpn(name):
value = random.randint(1, len(name))
server = name.get(value)
return server
def check_ping_is_ok():
print('Check PING...')
hostname = 'bing.com'
try:
response = os.system("ping -n 1 " + hostname)
if response == 0:
return True
except:
if PURE_VPN_NAME == 0:
connect_openvpn()
else:
connect_purevpn()
def connect_purevpn():
if PUREVPN == 1:
load_result = False
rasdial.disconnect()
division = round(float(TOTAL_CHANNEL) / len(USER_PASS))
print('Current VPN: ' + str(rasdial.get_current_vpn()))
counter_connect = 0
while load_result is False and counter_connect < 4:
if counter_connect >= 2:
send_email_alert()
counter_connect += 1
rasdial.disconnect()
sleep(1)
if USER_CONFIG == 'VUNPA' and NUMBER_MACHINE <= TOTAL_CHANNEL and ADS_BOTTOM == 1 and NUMBER_MACHINE <= 15:
server = get_random_vpn(PURE_VPN_NAME)
if NUMBER_MACHINE <= division:
value = 1
elif division < NUMBER_MACHINE <= TOTAL_CHANNEL - (TOTAL_CHANNEL / len(USER_PASS)):
value = 2
else:
value = 3
user = USER_PASS.get(value)[0]
password = USER_PASS.get(value)[1]
elif USER_CONFIG == 'VUNPA' and NUMBER_MACHINE >= 20:
division = NUMBER_MACHINE % 20
if division == 0:
value = 1
elif division == 1:
value = 2
elif division == 2:
value = 3
else:
value = random.randint(1, 3)
server = get_random_vpn(PURE_VPN_NAME)
user = USER_PASS.get(value)[0]
password = USER_PASS.get(value)[1]
elif USER_CONFIG != 'VUNPA' or (USER_CONFIG == 'VUNPA' and ADS_BOTTOM == 1) or ADS_BOTTOM == 0:
server = get_random_vpn(PIA_VPN_NAME)
user = 'x3569491'
password = 'rUTPQnvnv7'
# server = 'HMA'
# user = 'avestergrd'
# password = 'vESsRzDB'
rasdial.connect(server, user, password) # connect to a vpn
sleep(1)
if check_ping_is_ok() is True:
# if check_country_is_ok() is True:
if set_zone() is True:
load_result = True
def connect_openvpn_purevpn():
if OPENVPN == 1:
load_result = False
while load_result is False:
try:
print('Try to Disconnect OpenVPN')
rasdial.disconnect() # Disconnect params_PureVPN first
subprocess.check_output("taskkill /im openvpn.exe /F", shell=True)
except:
pass
print('Connect OpenVPN')
cmd = '"C:/Program Files/OpenVPN/bin/openvpn.exe"'
value = random.randint(0, len(CONFIG_IP_PURE) - 1)
print('Random Server: ' + CONFIG_IP_PURE[value].strip())
if 'pointtoserver' in CONFIG_IP_PURE[value].strip():
parameters = ' --client --dev tun --remote ' + CONFIG_IP_PURE[value].strip() + ' --port 53' + \
' --proto udp --nobind --persist-key --persist-tun ' \
'--tls-auth ressources/params_PureVPN/Wdc.key 1 --ca ressources/params_PureVPN/ca.crt' + \
' --cipher AES-256-CBC --comp-lzo --verb 1 --mute 20 --float --route-method exe' + \
' --route-delay 2 --auth-user-pass ressources/params_PureVPN/auth.txt ' + \
'--auth-retry interact' + \
' --explicit-exit-notify 2 --ifconfig-nowarn --auth-nocache '
cmd += parameters
try:
subprocess.Popen(cmd)
print('Please wait to connect to OpenVPN...')
countdown(8)
except:
pass
if check_ping_is_ok() is True:
if check_country_is_ok() is True:
if set_zone() is True:
load_result = True
def connect_openvpn():
if OPENVPN == 1 or ADS_BOTTOM == 0:
# if NUMBER_MACHINE > TOTAL_CHANNEL or ADS_BOTTOM == 0 or PUREVPN == 0:
load_result = False
counter_connect = 0
while load_result is False and counter_connect < 4:
if counter_connect >= 2:
send_email_alert()
counter_connect += 1
if sys.platform == 'win32':
try:
print('Try to Disconnect OpenVPN')
rasdial.disconnect() # Disconnect params_PureVPN first
subprocess.check_output("taskkill /im openvpn.exe /F", shell=True)
except:
pass
subprocess.check_output('ipconfig /release', shell=True)
subprocess.check_output('ipconfig /renew', shell=True)
print('Connect OpenVPN')
if sys.platform == 'win32':
cmd = '"C:/Program Files/OpenVPN/bin/openvpn.exe"'
else:
cmd = '/etc/openvpn/openvpn'
if ADS_BOTTOM == 0:
USE_IP = CONFIG_IP_VIEW
else:
USE_IP = CONFIG_IP
value = random.randint(0, len(USE_IP) - 1)
print('Random Server: ' + USE_IP[value].strip())
if 'privateinternetaccess' in USE_IP[value].strip():
parameters = ' --client --dev tun --proto udp --remote ' \
+ USE_IP[value].strip() + \
' --port 1198 --resolv-retry infinite --nobind --persist-key --persist-tun' \
' --cipher aes-128-cbc --auth sha1 --tls-client --remote-cert-tls server' \
' --auth-user-pass ressources/params_PIA/data/auth.txt ' \
'--comp-lzo --verb 1 --reneg-sec 0' \
' --crl-verify ressources/params_PIA/data/crl.rsa.2048.pem' \
' --auth-nocache' \
' --ca ressources/params_PIA/data/ca.rsa.2048.crt' \
# ' --block-outside-dns'
else:
parameters = ' --tls-client --client --dev tun --link-mtu 1500' \
' --remote ' + USE_IP[value].strip() + \
' --proto udp --port 1197' \
' --lport 53 --persist-key --persist-tun --ca ressources/params_PIA/data/ca.crt ' \
'--comp-lzo --mute 3' \
' --auth-user-pass ressources/params_PIA/data/auth.txt' \
' --reneg-sec 0 --route-method exe --route-delay 2' \
' --verb 3 --log c:/log.txt --status c:/stat.db 1 --auth-nocache' \
' --crl-verify ressources/params_PIA/data/crl.pem ' \
'--remote-cert-tls server --block-outside-dns' \
' --cipher aes-256-cbc --auth sha256'
cmd += parameters
try:
subprocess.Popen(cmd)
print('Please wait to connect to OpenVPN...')
countdown(8)
except:
pass
if check_ping_is_ok() is True:
# if check_country_is_ok() is True:
if set_zone() is True:
load_result = True
def get_random_resolution():
value = random.randint(1, len(SCREEN_RESOLUTION))
width = SCREEN_RESOLUTION.get(value)[0]
height = SCREEN_RESOLUTION.get(value)[1]
return width, height
def get_recalcul_xy(x, y):
x_new = x * X_SCREEN_SET / X_SCREEN
y_new = y * Y_SCREEN_SET / Y_SCREEN
return x_new, y_new
def get_info_length_youtube(url_real_youtube):
video = pafy.new(url_real_youtube)
return video.length
def set_screen_resolution():
print('Primary screen resolution: {}x{}'.format(
*ScreenRes.get()
))
width, height = get_random_resolution()
ScreenRes.set(width, height)
def search_youtube(url):
load_result = False
count = 0
while load_result is False and count <= 1:
count += 1
try:
BROWSER.get(url)
print(Back.BLACK + Fore.LIGHTGREEN_EX + Style.BRIGHT + TITLE_YOUTUBE + Style.RESET_ALL)
xpath_search = "//a[@title=" + "'" + TITLE_YOUTUBE + "']"
first_link = ui.WebDriverWait(BROWSER, 5).until(lambda BROWSER: BROWSER.find_element_by_xpath(xpath_search))
try:
first_link.click()
print('**** Click is done! ***')
except:
pass
# first_link.send_keys(Keys.RETURN)
# print('**** Click is done! ***')
load_result = True
except:
pass
return load_result
def click_button_skipads():
try:
first_result = ui.WebDriverWait(BROWSER, 3).until(
lambda BROWSER: BROWSER.find_element_by_class_name('videoAdUiSkipButton'))
x, y = get_recalcul_xy(980, 559)
try:
first_result.click()
except:
pass
except:
pass
def random_sleep():
r = random.randint(3, 5)
sleep(r)
def random_small_sleep():
r = random.randint(1, 2)
sleep(r)
def set_zone():
try:
link = 'http://freegeoip.net/json/'
latitude = load(urlopen(link))['latitude']
print(Back.BLACK + Fore.LIGHTGREEN_EX + Style.BRIGHT + '[Latitude] => ' + str(latitude) + Style.RESET_ALL)
longitude = load(urlopen(link))['longitude']
print(Back.BLACK + Fore.LIGHTWHITE_EX + Style.BRIGHT + '[Longitude] => ' + str(longitude) + Style.RESET_ALL)
timestamp = str(time.time())
# Public IP & DateTime
ip = urlopen('http://ip.42.pl/raw').read()
print(Back.BLACK + Fore.LIGHTGREEN_EX + Style.BRIGHT + '[IP] => ' + ip + Style.RESET_ALL)
# Google API service form Vu.nomos
link = 'https://maps.googleapis.com/maps/api/timezone/json?location=' + str(latitude) + ',' + \
str(longitude) + '×tamp=' + timestamp + '&key=AIzaSyAC2ESW2jOFDdABT6hZ4AKfL7U8jQRSOKA'
timeZoneId = load(urlopen(link))['timeZoneId']
zone_to_set = LIST_TIME_ZONE.get(timeZoneId)
print(Back.BLACK + Fore.LIGHTCYAN_EX + Style.BRIGHT + 'Synchronize ' + zone_to_set + Style.RESET_ALL)
if zone_to_set.strip() != '':
subprocess.check_output("tzutil /s " + '"' + zone_to_set + '" ', shell=True)
return True
except:
return False
pass
def countdown(timing):
while timing >= 0:
mins, secs = divmod(timing, 60)
timeformat = '{:02d}:{:02d}'.format(mins, secs)
time.sleep(1)
timing -= 1
print(Fore.LIGHTCYAN_EX + Back.BLACK + 'Please wait...' + timeformat + Style.RESET_ALL, end='\r')
def get_params(param):
return CONFIG_JSON['DEFAULT'][0][param]
def main(optional):
global BROWSER
global ADS_BOTTOM
global ADS_RIGHT
global TOTAL_CHANNEL
global PUREVPN
global OPENVPN
global X_SCREEN_SET
global Y_SCREEN_SET
global X_SCREEN
global Y_SCREEN
global KEYWORDS
global CONFIG_IP
global CONFIG_IP_VIEW
global CONFIG_IP_PURE
global CONFIG_JSON
global USER_CONFIG
global COUNTER_TOURS
with open('config_auto_clicker.json') as data_file:
CONFIG_JSON = load(data_file)
USER_CONFIG = get_params('USER_CONFIG')
if optional == 1:
ADS_BOTTOM = int(get_params('ADS_BOTTOM'))
else:
ADS_BOTTOM = 0
ADS_RIGHT = int(get_params('ADS_RIGHT'))
TOTAL_CHANNEL = int(get_params('TOTAL_CHANNEL'))
PUREVPN = int(get_params('PureVPN'))
OPENVPN = int(get_params('OpenVPN'))
if ADS_BOTTOM == 1:
BOUCLE_SUPER_VIP = int(get_params('BOUCLE_SUPER_VIP'))
else:
PUREVPN = 0
OPENVPN = 1
BOUCLE_SUPER_VIP = 1
X_SCREEN = int(get_params('WIDTH'))
Y_SCREEN = int(get_params('HEIGHT'))
X_SCREEN_SET, Y_SCREEN_SET = pyautogui.size()
CONFIG_IP = tuple(open('ressources/params_PIA/list_PIA.txt', 'r'))
CONFIG_IP_VIEW = tuple(open('ressources/params_PIA/list_PIA_VIEW.txt', 'r'))
KEYWORDS = tuple(open('ressources/keyword.txt', 'r'))
# Resize Screen and set Always on TOP
set_screen_resolution()
print(Back.BLACK + Fore.LIGHTBLUE_EX + Style.NORMAL + '=' * 37 + Style.RESET_ALL)
print(Fore.LIGHTWHITE_EX + '=' * 8 + ' ' + 'Phantom Viewer [AVU]' + ' ' + '=' * 7 + Style.RESET_ALL)
print(Back.BLACK + Fore.LIGHTRED_EX + Style.NORMAL + '=' * 37 + Style.RESET_ALL)
print(
Back.BLACK + Fore.LIGHTCYAN_EX + Style.BRIGHT + "Number Machine: " + str(NUMBER_MACHINE) + '' + Style.RESET_ALL)
print(Back.BLACK + Fore.LIGHTCYAN_EX + Style.BRIGHT + "Total Channel: " +
str(TOTAL_CHANNEL) + '' + Style.RESET_ALL)
print(Back.BLACK + Fore.LIGHTRED_EX + Style.BRIGHT + '-----------[MODE] VIEW ONLY----------' +
Style.RESET_ALL)
for z in range(BOUCLE_SUPER_VIP):
if (NUMBER_MACHINE > TOTAL_CHANNEL and PUREVPN != 1) or ADS_BOTTOM == 0 or PUREVPN == 0 or optional == 0:
connect_openvpn() # OpenVPN
else:
connect_purevpn() # params_PureVPN
for i in range(NUMBER_MACHINE, TOTAL_CHANNEL + NUMBER_MACHINE):
file_channel = i
if i <= TOTAL_CHANNEL:
file_channel = i
else:
file_channel = (i + TOTAL_CHANNEL) % TOTAL_CHANNEL
if file_channel == 0:
file_channel = TOTAL_CHANNEL
# Open Firefox with default profile
if sys.platform == 'win32':
headers = { 'Accept':'*/*',
'Accept-Encoding':'gzip, deflate, sdch',
'Accept-Language':'en-US,en;q=0.8',
'Cache-Control':'max-age=0',
'User-Agent': 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/48.0.2564.116 Safari/537.36'
}
for key, value in enumerate(headers):
webdriver.DesiredCapabilities.PHANTOMJS['phantomjs.page.customHeaders.{}'.format(key)] = value
BROWSER = webdriver.PhantomJS()
BROWSER.set_window_size(1920, 1080)
print(Fore.LIGHTWHITE_EX + '.' * 37 + Style.RESET_ALL)
print(Back.BLACK + Fore.LIGHTGREEN_EX + Style.BRIGHT + ' ' * 9 + ' Tours -> ' +
Style.RESET_ALL + Back.BLACK + Fore.LIGHTYELLOW_EX + str(COUNTER_TOURS) + '' + Style.RESET_ALL)
print(Fore.LIGHTWHITE_EX + '.' * 37 + Style.RESET_ALL)
total_key = random.randint(5, 7)
for j in range(total_key):
COUNTER_TOURS += 1
try:
url = get_title_clip(str(file_channel))
result_search_youtube = search_youtube(url)
countdown(10)
try:
BROWSER.get_screenshot_as_file("1-screen.png")
except:
print('Error Screenshot')
pass
print(Back.BLACK + Fore.LIGHTYELLOW_EX + Style.BRIGHT + 'URL VIEW: ' + str(j) + ' >> ' +
Style.RESET_ALL + Back.BLACK + Fore.LIGHTWHITE_EX + url + '' + Style.RESET_ALL)
click_button_skipads()
countdown(random.randint(20, 45))
try:
BROWSER.get_screenshot_as_file("2-screen.png")
except:
print('Error Screenshot')
pass
except:
pass
try:
BROWSER.quit()
except:
pass
########################################################################################################################
# Main Program #
# Arguments: #
# argv[1]: NUMBER_MACHINE #
# #
########################################################################################################################
if __name__ == "__main__":
global NUMBER_MACHINE
COUNTER_TOURS = 0
if len(sys.argv) > 1:
NUMBER_MACHINE = int(sys.argv[1])
else:
print(Back.BLACK + Fore.LIGHTWHITE_EX + ' ' * 3 + '[ Please enter the Machine Number: ]' +
Back.LIGHTRED_EX + Fore.LIGHTWHITE_EX)
print(Style.RESET_ALL)
NUMBER_MACHINE = int(raw_input())
for i in range(0, 10000):
main(0)
|
pavnguyen/Auto-Clicker
|
phantom_views.py
|
Python
|
gpl-3.0
| 21,001
|
from django.conf.urls import patterns, url
from publications import views
from django.contrib.auth.decorators import login_required
urlpatterns = patterns('',
url(r'^articles/$', views.IndexView.as_view(), name='articleindex'),
# ex: /publications/articles/5/
url(r'^articles/(?P<pk>\d+)/$', views.ArticleDetailView.as_view(), name='article_detail'),
#url(r'^articles/add/$', views.ArticleCreate.as_view(), name='article_add'),
url(r'^articles/add/$',
login_required(views.ArticleWizard.as_view(views.ARTICLE_WIZARD_FORMS)),
name='article_add'),
url(r'^articles/(?P<pk>\d+)/update/$', views.ArticleUpdate.as_view(), name='article_update'),
url(r'^articles/(?P<pk>\d+)/delete/$', views.ArticleDelete.as_view(), name='article_delete'),
url(r'^authors/$', views.AuthorList.as_view(), name='authorindex'),
url(r'^authors/(?P<pk>\d+)/$', views.AuthorDetailView.as_view(), name='author_detail'),
#url(r'^authors/add/$', views.AuthorCreate.as_view(), name='author_add'),
url(r'^authors/add/$', views.AuthorCreate, name='author_add'),
url(r'^authors/(?P<pk>\d+)/update/$', views.AuthorUpdate.as_view(), name='author_update'),
url(r'^authors/(?P<pk>\d+)/delete/$', views.AuthorDelete.as_view(), name='author_delete'),
)
|
andreynech/salutem
|
publications/urls.py
|
Python
|
gpl-3.0
| 1,289
|
# -*- coding: utf-8 -*-
'''
Copyright (c) 2015 Jacob Mendt
Created on 04.08.15
@author: mendt
'''
import os
import uuid
from mapscript import MS_IMAGEMODE_RGB, MS_OFF, MS_PIXELS, MS_LAYER_RASTER, layerObj, mapObj, MS_ON, outputFormatObj
from georeference.utils.exceptions import MapfileBindingInitalizationException
OutputFormat_JPEG = {"NAME":"jpeg","MIMETYPE":"image/jpeg","DRIVER":"AGG/JPEG","EXTENSION":"jpg",
"IMAGEMODE":MS_IMAGEMODE_RGB,"TRANSPARENT":MS_OFF}
Metadata = {"wms_srs":"EPSG:4326","wms_onlineresource":"http://localhost/cgi-bin/mapserv?",
"wms_enable_request":"*","wms_titel":"Temporary Messtischblatt WMS"}
def createMapfile(layername, datapath, georefTargetSRS, mapfileTemplate, mapfileDir, mapfileParams):
""" Function creates a temporary mapfile
:type layername: str
:type datapath: str
:type georefTargetSRS: int
:type mapfileTemplate: str
:type mapfileDir: str
:type mapfileParams: str """
try:
mapfile = MapfileBinding(mapfileTemplate,mapfileDir, **mapfileParams)
mapfile.addLayerToMapfile(datapath, layername, georefTargetSRS)
wms = mapfile.saveMapfile()
return wms
except:
raise
class MapfileBinding:
def __init__(self, src_mapfilePath, dest_mapfileFolder, **kwargs):
# init wms service name
self.servicename= "wms_%s.map"%uuid.uuid4()
# init the mapfile based on a template file
self.mapfilepath = os.path.join(dest_mapfileFolder, self.servicename)
self.__initMapfile__(src_mapfilePath, self.mapfilepath)
if len(kwargs) > 0:
self.__initMapfileParameter__(kwargs)
else:
raise MapfileBindingInitalizationException("Missing mapfile information!")
def __initMapfile__(self, src_mapfilePath, dest_mapfilePath):
mapfile = mapObj(src_mapfilePath)
self.saveMapfile(mapfile)
self.mapfile = mapObj(self.mapfilepath)
def __initMapfileParameter__(self, kwargs):
"""
Set the option parameter for the map element
"""
#generic mapfile options
self.mapfile.units = MS_PIXELS
self.mapfile.status = MS_ON
#if "OUTPUTFORMAT" in kwargs:
# self.__addOutputFormat__(kwargs["OUTPUTFORMAT"])
if "METADATA" in kwargs:
self.__addMetadata__(kwargs["METADATA"])
def __addMetadata__(self, dictMD):
self.wms_url = dictMD["wms_onlineresource"]+"map=%s"%self.mapfilepath
for key in dictMD:
if key is "wms_onlineresource":
self.mapfile.web.metadata.set(key,self.wms_url)
else:
self.mapfile.web.metadata.set(key,dictMD[key])
def __addOutputFormat__(self, dictOutFormat):
"""
Function adds a outputformat object to the mapfile.
@param dictOutFormat: Represents a dictionary with the outputformat arguments. It should
contains the keys:
@param NAME:
@param MIMETYPE:
@param DRIVER:
@param EXTENSION:
@param IMAGEMODE:
@param TRANSPARENT:
"""
# creates a OutputFormatObject and adds the parameter to it
if "DRIVER" in dictOutFormat:
outFormatObj = outputFormatObj(dictOutFormat["DRIVER"])
else:
raise MapfileBindingInitalizationException("Missing Driver for OutputFormat Element")
if "NAME" in dictOutFormat:
outFormatObj.name = dictOutFormat["NAME"]
if "MIMETYPE" in dictOutFormat:
outFormatObj.mimetype = dictOutFormat["MIMETYPE"]
if "EXTENSION" in dictOutFormat:
outFormatObj.extension = dictOutFormat["EXTENSION"]
if "IMAGEMODE" in dictOutFormat:
outFormatObj.imagemode = dictOutFormat["IMAGEMODE"]
if "TRANSPARENT" in dictOutFormat:
outFormatObj.transparent = dictOutFormat["TRANSPARENT"]
# adds the OutputFormatObject to the mapfile
self.mapfile.appendOutputFormat(outFormatObj)
def saveMapfile(self, mapfile = None):
if mapfile != None and isinstance(mapfile,mapObj):
mapfile.save(self.mapfilepath)
return None
else:
self.mapfile.save(self.mapfilepath)
return self.mapfile.getMetaData("wms_onlineresource")
def addLayerToMapfile(self, dataPath, layerName,georefTargetSRS):
""" Function adds a layer to a mapfile
:type dataPath: str
:type layerName: str
:type georefTargetSRS: int """
layer = layerObj()
layer.data = dataPath
layer.type = MS_LAYER_RASTER
layer.name = layerName
layer.units = MS_PIXELS
layer.status = MS_OFF
layer.setProjection("init=epsg:%s"%georefTargetSRS)
self.mapfile.insertLayer(layer)
|
slub/vk2-georeference
|
georeference/utils/process/mapfile.py
|
Python
|
gpl-3.0
| 4,870
|
import RPi.GPIO as GPIO
import os
import time
def alert(channel):
global start1
if time.time()-start1 > 30:
start1 = time.time()
print('Motor Failure Detected: Imminent Threat to: Laundry')
os.system("curl -X POST http://textbelt.com/text -d number=4158899309 -d message=\"SOS SMS: Zombie Apocalypse\nYour laundry is soggy!\"")
os.system("curl -X POST http://textbelt.com/text -d number=4806289102 -d message=\"SOS SMS: Zombie Apocalypse\nYour laundry is soggy!\"")
os.system("curl -X POST http://textbelt.com/text -d number=5038161171 -d message=\"SOS SMS: Zombie Apocalypse\nYour laundry is soggy!\"")
GPIO.setmode(GPIO.BCM)
GPIO.setup(23,GPIO.IN, pull_up_down=GPIO.PUD_DOWN)
GPIO.setup(24,GPIO.IN, pull_up_down=GPIO.PUD_DOWN)
start1 = 0.0
start2 = 0.0
print"system armed\n"
GPIO.add_event_detect(23,GPIO.RISING,callback = alert)
GPIO.add_event_detect(24,GPIO.RISING,callback = alert)
while True:
#raw_input("press enter to exit")
time.sleep(1)
|
pdxfrog/imminentthreat
|
xxalert.py
|
Python
|
gpl-3.0
| 1,015
|
"""
This file contains the map page sub-class
"""
#####################################
# Imports
#####################################
# Python native imports
from PyQt5 import QtCore, QtWidgets, QtGui, QtWebEngine
from PyQt5.QtCore import QUrl
import logging
#####################################
# About Class Definition
#####################################
class Map(QtCore.QObject):
def __init__(self, main_window):
super(Map, self).__init__()
# ########## Reference to top level window ##########
self.main_window = main_window # type: QtWidgets.QMainWindow
# ########## Get the settings instance ##########
self.settings = QtCore.QSettings()
# ########## Get the Pick And Plate instance of the logger ##########
self.logger = logging.getLogger("RoverBaseStation")
# ########## References to GUI Elements ##########
self.maps_view = self.main_window.map_label # type: QtWidgets.QLabel
# ########## Class Variables ##########
# self.map_view_pixmap = QtGui.QPixmap("Resources/Maps/mars_testing_site.png")
#
# self.maps_view.setPixmap(self.map_view_pixmap)
QtWebEngine.QtWebEngine.initialize()
self.map_view = self.main_window.leaflet_map # type: QtWidgets.QQuickView
self.map_view.setSource(QUrl("Resources/UI/map_view.qml"))
# ########## Set defaults on GUI Elements ##########
self.__load_settings()
def __load_settings(self):
self.logger.info("Map Interface Configured")
|
caperren/Archives
|
OSU Robotics Club/Mars Rover 2016-2017/ground_station/Interface/Map/MapCore.py
|
Python
|
gpl-3.0
| 1,606
|
from __future__ import print_function
print('Whazzup Mundo!')
print('8=====D~~~~')
|
mehese/PropertyCheck
|
helloworld.py
|
Python
|
gpl-3.0
| 84
|
# Django
from django.utils.translation import ugettext_lazy as _
ICON_CHOICE = [
("", _("Select an icon")),
("dog-and-a-man", _("dog-and-a-man")),
("dog-and-pets-house", _("dog-and-pets-house")),
("dog-bitting-a-stick-playing-with-a-man", _("dog-bitting-a-stick-playing-with-a-man")),
("dog-checked-by-a-veterinary", _("dog-checked-by-a-veterinary")),
("dog-having-a-bubbles-bath", _("dog-having-a-bubbles-bath")),
("dog-in-front-of-a-man", _("dog-in-front-of-a-man")),
("dog-learning-man-instructions", _("dog-learning-man-instructions")),
("dog-playing-with-a-man", _("dog-playing-with-a-man")),
("dog-pooping", _("dog-pooping")),
(
"dog-puppy-and-his-owner-looking-to-opposite-directions",
_("dog-puppy-and-his-owner-looking-to-opposite-directions"),
),
("dog-puppy-sitting-in-front-of-his-man", _("dog-puppy-sitting-in-front-of-his-man")),
("dog-smelling-a-bone", _("dog-smelling-a-bone")),
("dog-with-belt-walking-with-a-man", _("dog-with-belt-walking-with-a-man")),
("dog-with-first-aid-kit-bag", _("dog-with-first-aid-kit-bag")),
("dog-with-owner", _("dog-with-owner")),
("man-carrying-a-dog-with-a-belt-to-walk", _("man-carrying-a-dog-with-a-belt-to-walk")),
("man-combing-a-dog", _("man-combing-a-dog")),
("man-on-his-knees-to-cuddle-his-dog", _("man-on-his-knees-to-cuddle-his-dog")),
("man-throwing-a-disc-and-dog-jumping-to-catch-it", _("man-throwing-a-disc-and-dog-jumping-to-catch-it")),
("paws-up-dog-playing-with-a-man", _("paws-up-dog-playing-with-a-man")),
("facebook-square", _("facebook-square")),
("google-plus-square", _("google-plus-square")),
("pinterest-square", _("pinterest-square")),
("twitter-square", _("twitter-square")),
]
|
bengosney/romrescue.org
|
icons/icons.py
|
Python
|
gpl-3.0
| 1,776
|
from kivy.uix.label import Label
from kivy.properties import DictProperty, StringProperty
from kivy.clock import Clock
from kivy.uix.screenmanager import Screen
from kivy.uix.boxlayout import BoxLayout
from kivy.uix.scrollview import ScrollView
from kivy.logger import Logger
import requests
import time
from datetime import datetime
import dateutil.parser
from dateutil import tz
import pytz
import json
import locale
MIN_TIDES = 7
TYPES_MAP = {"english": {"High": "HW", "Low": "LW"}, "french": { "High": "HM", "Low": "BM" }}
class TideException(Exception):
"""
Generic class for Tide exception
"""
def __init__(self, value):
self.value = value
def __str__(self):
return str(self.value)
class Tide(BoxLayout):
desc = StringProperty("")
def __init__(self, **kwargs):
super(Tide, self).__init__(**kwargs)
self.language = kwargs["language"]
self.buildText(kwargs["summary"])
def buildText(self, summary):
summary["ldate"] = dateutil.parser.parse(summary["date"]).astimezone(tz.tzlocal()).strftime("%A, %H:%M")
summary["type_i18n"] = TYPES_MAP[self.language][summary["type"]]
self.desc = ("{type_i18n:s}\n{ldate:s}").format(**summary)
class TidesSummary(Screen):
tidesurl = "https://www.worldtides.info/api?extremes&lat={lat}&lon={lon}&length=172800&key={key}"
timedata = DictProperty(None)
next_t = DictProperty(None)
prev_t = DictProperty(None)
location = DictProperty(None)
def __init__(self, **kwargs):
# Init data by checking cache then calling API
self.location = kwargs["location"]
self.key = kwargs["key"]
self.language = kwargs["language"]
if self.language == "french":
locale.setlocale(locale.LC_ALL, 'fr_FR.UTF-8')
self.get_data()
self.get_time()
self.get_next()
super(TidesSummary, self).__init__(**kwargs)
self.timer = None
self.tides_list = self.ids.tides_list
self.build_tides_list()
def buildURL(self, location):
lon = location['coords']['lon']
lat = location['coords']['lat']
return self.tidesurl.format(key=self.key, lon=lon, lat=lat)
def get_data(self):
self.url_tides = self.buildURL(self.location)
#with open('screens/tides/result.json') as data_file:
# self.tides = json.load(data_file)
self.tides = requests.get(self.url_tides).json()
if self.tides == None or not self.tides.has_key('status'):
raise TideException("Unknown error")
if self.tides['status'] != 200:
if self.tides.has_key('error'):
raise TideException(self.tides['error'])
else:
raise TideException("Unknown error")
return True
def get_time(self):
"""Sets self.timedata to current time."""
n = datetime.now()
self.timedata["h"] = n.hour
self.timedata["m"] = n.minute
self.timedata["s"] = n.second
n = datetime.utcnow()
if hasattr(self, "next_extreme") and n >= self.next_extreme:
self.get_next()
def get_next(self):
if self.tides == None or self.tides['status'] != 200:
self.prev_t = {}
self.next_t = {}
return False
found = False
prev = None
oldentries = []
for extreme in self.tides['extremes']:
date = dateutil.parser.parse(extreme['date'])
if date > datetime.now(tz = tz.tzutc()):
next = extreme
date = date.astimezone(tz.tzlocal())
next["h"] = date.hour
next["m"] = date.minute
next["s"] = date.second
next["type_i18n"] = TYPES_MAP[self.language][next["type"]]
self.next_extreme = dateutil.parser.parse(extreme['date']).replace(tzinfo=None)
date = dateutil.parser.parse(prev['date'])
date = date.astimezone(tz.tzlocal())
prev["h"] = date.hour
prev["m"] = date.minute
prev["s"] = date.second
prev["type_i18n"] = TYPES_MAP[self.language][prev["type"]]
self.next_t = next
self.prev_t = prev
break
else:
if prev:
oldentries.append(prev)
prev = extreme
# clean up old entries
self.tides['extremes'] = [x for x in self.tides['extremes'] if x not in oldentries]
# fetch new one if our set is small
if len(self.tides['extremes']) <= MIN_TIDES:
try:
self.get_data()
except:
pass
if hasattr(self, "tides_list"):
self.build_tides_list()
return True
def build_tides_list(self):
if self.tides == None:
return
self.tides_list.clear_widgets()
w = (len(self.tides['extremes']) - 1) * 150
tl = BoxLayout(orientation="horizontal", size=(w, 60),
size_hint=(None, 1), spacing=5)
sv = ScrollView(size_hint=(1, 1.1), bar_margin = -5, do_scroll_y = False)
sv.add_widget(tl)
for tide in self.tides['extremes']:
if self.next_t["dt"] < tide["dt"]:
uptide = Tide(summary = tide, language = self.language)
tl.add_widget(uptide)
self.tides_list.add_widget(sv)
def update(self, dt):
self.get_time()
def on_enter(self):
# We only need to update the clock every second.
self.timer = Clock.schedule_interval(self.update, 1)
def on_pre_enter(self):
self.get_time()
def on_pre_leave(self):
# Save resource by unscheduling the updates.
Clock.unschedule(self.timer)
def is_setup(self):
if self.tides:
return True
return False
class TidesScreen(Screen):
def __init__(self, **kwargs):
super(TidesScreen, self).__init__(**kwargs)
self.running = False
self.location = kwargs["params"]["location"]
self.flt = self.ids.tides_float
self.key = kwargs["params"]["key"]
self.language = kwargs["params"]["language"]
self.scrmgr = self.ids.tides_scrmgr
def on_enter(self):
if not self.running:
try:
self.ids.tides_lbl_load.text = "Loading tides"
ts = TidesSummary(location = self.location,
key = self.key,
language = self.language
)
if ts == None or not ts.is_setup():
self.ids.tides_lbl_load.text = "Unable to load tides"
return
# and add to our screen manager.
self.scrmgr.add_widget(ts)
self.running = True
self.flt.remove_widget(self.ids.tides_base_box)
except IOError as err:
self.ids.tides_lbl_load.text = "Error: " + str(err)
pass
except TideException as err:
self.ids.tides_lbl_load.text = "Error: " + str(err)
pass
else:
# Fixes bug where nested screens don't have "on_enter" or
# "on_leave" methods called.
for c in self.scrmgr.children:
if c.name == self.scrmgr.current:
c.on_enter()
|
elParaguayo/RPi-InfoScreen-Kivy
|
screens/tides/screen.py
|
Python
|
gpl-3.0
| 7,469
|
# -*- coding: utf-8 -*-
__author__ = 'Jeffery Tillotson'
__email__ = 'jpt@jeffx.com'
__version__ = '0.1.0'
from .python_roshambo_runner import RoShamBo
|
jeffx/python_roshambo_runner
|
python_roshambo_runner/__init__.py
|
Python
|
gpl-3.0
| 154
|
HOST = "wfSciwoncGW:enw1989@172.31.2.76:27001/?authSource=admin"
PORT = ""
USER = ""
PASSWORD = ""
DATABASE = "googlew"
READ_PREFERENCE = "secondary"
WRITE_CONCERN = "majority"
COLLECTION_INPUT = "ratio"
COLLECTION_OUTPUT = "average_ratioevent"
PREFIX_COLUMN = "g_"
ATTRIBUTES = ["event type", "sds from all avg ratio"]
SORT = ["_id.filepath", "_id.numline"]
OPERATION_TYPE = "GROUP_BY_COLUMN"
COLUMN = "event type"
VALUE = ["2","3","4","6"]
INPUT_FILE = "ratio_cpu_memory.csv"
OUTPUT_FILE = "mean_ratio_cpu_memory_0.csv"
|
elainenaomi/sciwonc-dataflow-examples
|
dissertation2017/Experiment 1B/instances/7_2_workflow_full_10files_secondary_wmj_1sh_3rs_noannot_with_proj_3s/averageratioevent_0/ConfigDB_AverageRatioEvent_0.py
|
Python
|
gpl-3.0
| 524
|
#!/usr/bin/env python
'Unit test for trepan.lib.display'
import inspect, unittest
from trepan.lib import display as Mdisplay
class TestLibDisplay(unittest.TestCase):
def test_display(self):
mgr = Mdisplay.DisplayMgr()
self.assertEqual(mgr.list, [], "Initial list empty")
x = 1 # NOQA
frame = inspect.currentframe()
disp = mgr.add(frame, 'x > 1')
self.assertEqual(disp.__class__, Mdisplay.Display,
"mgr.add should return display")
self.assertEqual(len(mgr.list), 1, "display list with one item")
disp = mgr.add(frame, 'x')
self.assertEqual(disp.__class__, Mdisplay.Display,
"mgr.add should return another display")
self.assertEqual(len(mgr.list), 2, "display list with two items")
self.assertEqual(mgr.delete_index(1), True, "return True on ok delete")
self.assertEqual(mgr.delete_index(1), False,
"return False on no delete")
self.assertEqual(len(mgr.list), 1, "display list again with one item")
return
pass
if __name__ == '__main__':
unittest.main()
pass
|
rocky/python2-trepan
|
test/unit/test-lib-display.py
|
Python
|
gpl-3.0
| 1,169
|
# Import required modules
from random import randint
from time import sleep as wait
done = 0
while done == 0:
# Generate random number between 1 & 100
answer=randint(1,100)
print("Generated new number")
correct = 0
guess = 0
again = "no"
while correct == 0:
guess = int(input("Please guess a number... \n"))
if guess == answer:
print("Spot on")
again = input("Would you like to play again?")
if again.lower() == "no":
print("Ok, goodbye.")
wait(2)
exit()
elif guess < answer:
print("Too Low")
elif guess > answer:
print("Too High")
|
UTC-Coding/Benji-s-Python
|
Guessing Game.py
|
Python
|
gpl-3.0
| 701
|
#!/usr/bin/env python
import configparser
import os
from slackclient import SlackClient
BOT_NAME = 'isitupbot'
config = configparser.ConfigParser()
# CUR_DIR = os.path.abspath('.')
# PAR_DIR = os.path.dirname(CUR_DIR)
# SETTINGS_FILE = os.path.join(PAR_DIR, 'settings.ini')
SETTINGS_FILE = 'settings.ini'
config.read(SETTINGS_FILE)
sc = SlackClient(config.get('slack', 'SLACK_BOT_TOKEN'))
def bot_id():
api_call = sc.api_call("auth.test")
if api_call.get('ok'):
print(api_call['user_id'])
else:
print('could not find a user named : ' + BOT_NAME)
|
prodicus/margo
|
scripts/print_bot_id.py
|
Python
|
gpl-3.0
| 581
|
#-----------------------------------------------------------------------------
# Copyright (c) 2014-2020, PyInstaller Development Team.
#
# Distributed under the terms of the GNU General Public License (version 2
# or later) with exception for distributing the bootloader.
#
# The full license is in the file COPYING.txt, distributed with this software.
#
# SPDX-License-Identifier: (GPL-2.0-or-later WITH Bootloader-exception)
#-----------------------------------------------------------------------------
#
# ***************************************************
# hook-astriod.py - PyInstaller hook file for astriod
# ***************************************************
# The astriod package, in __pkginfo__.py, is version 1.1.1. Looking at its
# source:
#
# From __init__.py, starting at line 111::
#
# BRAIN_MODULES_DIR = join(dirname(__file__), 'brain')
# if BRAIN_MODULES_DIR not in sys.path:
# # add it to the end of the list so user path take precedence
# sys.path.append(BRAIN_MODULES_DIR)
# # load modules in this directory
# for module in listdir(BRAIN_MODULES_DIR):
# if module.endswith('.py'):
# __import__(module[:-3])
#
# So, we need all the Python source in the ``brain/`` subdirectory,
# since this is run-time discovered and loaded. Therefore, these
# files are all data files.
from PyInstaller.utils.hooks import collect_data_files, collect_submodules, \
is_module_or_submodule
# Note that brain/ isn't a module (it lacks an __init__.py, so it can't be
# referred to as astroid.brain; instead, locate it as package astriod,
# subdirectory brain/.
datas = collect_data_files('astroid', True, 'brain')
# Update: in astroid v 1.4.1, the brain/ module import parts of astroid. Since
# everything in brain/ is dynamically imported, these are hidden imports. For
# simplicity, include everything in astroid. Exclude all the test/ subpackage
# contents and the test_util module.
hiddenimports = ['six'] + collect_submodules('astroid',
lambda name: (not is_module_or_submodule(name, 'astroid.tests')) and
(not name == 'test_util'))
|
etherkit/OpenBeacon2
|
client/linux-arm/venv/lib/python3.6/site-packages/PyInstaller/hooks/hook-astroid.py
|
Python
|
gpl-3.0
| 2,110
|
#!/usr/bin/python3
import random, sys, os
from random import *
from math import pi, floor
seed(floor(pi*1000))
def gen():
vars = randint(10,28);
clauses = randint(1,10);
xclauses = randint(1,clauses);
V = list(range(1,vars))
L = V + list(map(lambda x: x*-1, range(1,vars)))
print("p cnf %d %d" % (vars, clauses))
seq2str = lambda seq: ' '.join(map(str,seq))
for i in range(clauses):
s = list(sample(L, randint(1, vars-1)))
print(seq2str(s), '0')
for i in range(xclauses):
s = list(sample(V, randint(1, vars-1)))
if randint(0,1):
s[0] *= -1
print('x',seq2str(s), '0')
for i in range(1,1000):
with open("test_%03d.cnf" % i, 'w') as fp:
sys.stdout = fp
gen()
|
wadoon/xorblast
|
test/gen_test.py
|
Python
|
gpl-3.0
| 779
|
#!/usr/bin/python
from review.Asymmetries import *
from ucnacore.PyxUtils import *
def plot_bgSubtrHist(basedir,depth=2):
print "--------------------- Division",depth,"-------------------------"
bgs = {}
n = 0
conn = open_connection()
for af in collectOctetFiles(basedir,depth):
for k in af.bgs:
bgs.setdefault(k,[]).append((getRunStartTime(conn,af.getRuns()[0]),af.bgs[k],af))
n += 1
gBgs=graph.graphxy(width=25,height=8,
#x=graph.axis.lin(title=unitNames[depth],min=0,max=n-1),
x=graph.axis.lin(title="Time [days]"),
y=graph.axis.lin(title="Residual Background [$\\mu$Hz/keV]"),
key = graph.key.key(pos="bl"))
setTexrunner(gBgs)
sideCols = {'E':rgb.red,'W':rgb.blue}
segCols = {0:rgb.red,1:rgb.blue}
segNames = {0:"BG Before",1:"BG After"}
s = 'E'
LF = LinearFitter(terms=[polyterm(0)])
for seg in [0,1]:
a0 = [b for b in bgs[(s,"Off","0")] if b[-1].whichSegment(depth)%2 == seg ]
a1 = [b for b in bgs[(s,"On","0")] if b[-1].whichSegment(depth)%2 == seg ]
t0 = a0[0][0]
gdat = [ (n,(b[0]-t0)/(24*3600.),(b[1].nrate+a1[n][1].nrate)*5e5,sqrt(b[1].d_nrate**2+a1[n][1].d_nrate**2)*5e5) for (n,b) in enumerate(a0)]
LF.fit(gdat,cols=(0,2,3),errorbarWeights=True)
err = 1.0/sqrt(LF.sumWeights())
gBgs.plot(graph.data.points(gdat,x=2,y=3,dy=4,title="%s: $%.1f \\pm %.1f$"%(segNames[seg],LF.coeffs[0],err)),
[graph.style.symbol(symbol.circle,size=0.2,symbolattrs=[segCols[seg],]),
graph.style.errorbar(errorbarattrs=[segCols[seg],])])
gBgs.writetofile(basedir+"/BGResid_%i.pdf"%depth)
def plot_TypeIV_resid(basedir,depth=2):
print "--------------------- Division",depth,"-------------------------"
rts = {}
n = -1
hname = "ExcessTheta"
for af in collectOctetFiles(basedir,depth):
n += 1
if [badrun for badrun in [14166,14888,15518] if badrun in af.getRuns()]:
continue
for s in ["E","W"]:
for afp in ["On","Off"]:
rts.setdefault((s,afp),[]).append((n,af.getRate(s,afp,"1",hname+"_")))
gRts=graph.graphxy(width=30,height=10,
x=graph.axis.lin(title=unitNames[depth],min=0,max=n),
y=graph.axis.lin(title="Excess Rate [Hz]"),
key = graph.key.key(pos="bc",columns=2))
setTexrunner(gRts)
scols = {"E":rgb.red,"W":rgb.blue}
LF = LinearFitter(terms=[polyterm(0)])
for s in scols:
for afp in afpSymbs:
gdat = [ [n,r.rate,r.d_rate] for (n,r) in rts[(s,afp)] ]
LF.fit(gdat,cols=(0,1,2),errorbarWeights=True)
err = 1.0/sqrt(LF.sumWeights())
chi2 = LF.chisquared()
ndf = LF.nu()
gtitle = "%s %s: $%.3f \\pm %.3f$, $\\chi^2/\\nu = %.1f/%i$"%(s,afp,LF.coeffs[0],err,chi2,ndf)
print gtitle
gRts.plot(graph.data.points(gdat,x=1,y=2,dy=3,title=gtitle),
[graph.style.symbol(afpSymbs[afp],size=0.2,symbolattrs=[scols[s],]),
graph.style.errorbar(errorbarattrs=[scols[s]])])
gRts.writetofile(basedir+"/Rt_%s_%i.pdf"%(hname,depth))
class fltErr:
def __init__(self,x,dx):
self.x = x
self.dx = dx
def __repr__(self):
return "%g~%g"%(self.x,self.dx)
def __add__(self,other):
if type(other)==type(self):
return fltErr(self.x+other.x,sqrt(self.dx**2+other.dx**2))
return fltErr(self.x+other,self.dx)
class bgSubtr(KVMap):
def __init__(self,m):
self.dat = m.dat
self.loadFloats(["nBG","d_nBG","xs","d_xs","eMin","eMax"])
self.nBG = fltErr(self.nBG,self.d_nBG)
self.xs = fltErr(self.xs,self.d_xs)
self.loadStrings(["side","afp","name"])
class XSFile(AsymmetryFile):
def __init__(self,fname):
AsymmetryFile.__init__(self,fname)
self.bgs = dict([((b.side,b.afp,b.type),b) for b in [bgSubtr(m) for m in self.dat.get("bg_subtr_fit",[])]])
self.XS = dict([((b.side,b.afp,b.name,b.eMin),b) for b in [bgSubtr(b) for b in self.dat["bg_subtr_xs"]]])
def afpSumXS(self,side,name,emin):
return self.XS[(side,"Off",name,emin)].xs+self.XS[(side,"On",name,emin)].xs
def excess_table(dset,ltxt="",dscale=1.0):
print "\n------------",dset,"--------------\n"
XS = XSFile(os.environ["UCNA_ANA_PLOTS"]+"/"+dset)
cols = [("Type 0","hEnergy_Type_0",1000), ("Type I","hEnergy_Type_1",1000), ("Type II/III","hEnergy_Type_2",1000),
("$\\beta$ 1--2.2MeV","ExcessE",1000), ("$\\beta > 2.2$MeV","ExcessE",2200),
("$\\gamma$ 0.2--1MeV","ExcessGamma",200), ("$\\gamma$ 1--2.2MeV","ExcessGamma",1000) ,("$\\gamma >2.2$MeV","ExcessGamma",2200)]
tbl = "\\begin{table} \\centering \\begin{tabular}{| c ||"
for c in cols:
tbl += " c |"
tbl += "}\\hline\n "
for c in cols:
tbl += "& %s\t"%c[0]
tbl += "\\\\ \\hline \hline\n"
for s in ["E","W"]:
for afp in ["Off","On"]:
if ltxt:
if afp == "On":
continue
tbl += ltxt+" "
else:
tbl += "%s %s "%(s,afp)
for c in cols:
bxs = XS.XS[(s,afp,c[1],c[2])]
tbl += "& $%i \\pm %i$\t"%(bxs.xs.x*dscale,bxs.xs.dx*dscale)
if ltxt:
tbl += "\\\\ \\hline\n"
continue
tbl += "\\\\\nBG "
for c in cols:
bxs = XS.XS[(s,afp,c[1],c[2])]
tbl += "& %i\t\t"%(bxs.nBG.x)
tbl += "\\\\ \\hline\n"
tbl += "\\end{tabular}\\caption{Excess event counts after BG subtraction and size of subtracted BG}\\end{table}"
print
print tbl
def NGBG_combo_plot():
# comparison categories
cats = [("Type 0 $(r<50)$","hEnergy_Type_0",1000), ("Type I $(r<50)$","hEnergy_Type_1",1000),
("$\\beta$ 1--2.2MeV","ExcessE",1000), ("$\\beta > 2.2$MeV","ExcessE",2200),
("$\\gamma$ 1--2.2MeV","ExcessGamma",1000)]
myticks = [ graph.axis.tick.tick(n,label=c[0]) for (n,c) in enumerate(cats) ]
catAxis = graph.axis.lin(title=None,min=-0.5,max=len(cats)-0.5,parter=None,manualticks=myticks,painter=graph.axis.painter.regular(labeldist=0.1,labeldirection=graph.axis.painter.rotatetext(135)))
#
# individual MC attributes
#
XSalb = XSFile(os.environ["UCNA_ANA_PLOTS"]+"/NGBG/DetAl_nCaptAl/DetAl_nCaptAl.txt")
XSalg = XSFile(os.environ["UCNA_ANA_PLOTS"]+"/NGBG/DetAl_nCaptAlGamma/DetAl_nCaptAlGamma.txt")
XSsch = XSFile(os.environ["UCNA_ANA_PLOTS"]+"/NGBG/ScintFace_nCaptH/ScintFace_nCaptH.txt")
sims = [(XSalb,symbol.triangle,"Al $\\beta+\\gamma$"),(XSalg,symbol.square,"Al $\\gamma$"),(XSsch,symbol.circle,"H $\\gamma$")]
gXS=graph.graphxy(width=10,height=7,
x=catAxis,
y=graph.axis.lin(title="events per $10^6$ captures",min=0),
key = graph.key.key(pos="tl"))
setTexrunner(gXS)
for (sm,ssymb,sname) in sims:
gdat = [ (n,sm.afpSumXS("E",c[1],c[2]).x*0.1) for (n,c) in enumerate(cats)]
print gdat
gXS.plot(graph.data.points(gdat,x=1,y=2,title=sname),[graph.style.symbol(ssymb),graph.style.line([style.linestyle.dotted])])
gXS.writetofile(os.environ["UCNA_ANA_PLOTS"]+"/NGBG/NGBGSimAttrs.pdf")
#
# compare combined MC and data
#
# East data counts for each category
#XSdat = XSFile(os.environ["UCNA_ANA_PLOTS"]+"/OctetAsym_Offic/OctetAsym_Offic.txt")
Eoff = [fltErr(2157,503),fltErr(1213,454),fltErr(7435,781),fltErr(1318,478),fltErr(3522,1767)]
Eon = [fltErr(1237,567),fltErr(876,435),fltErr(3636,879),fltErr(456,476),fltErr(5791,1930)]
Ecomb = [Eoff[n]+Eon[n] for n in range(len(Eoff))]
# combined MC
XSComb = XSFile(os.environ["UCNA_ANA_PLOTS"]+"/NGBG/Combined/Combined.txt")
gComp=graph.graphxy(width=10,height=7,
x=catAxis,
y=graph.axis.lin(title="excess counts ($\\times 10^3$)",min=0),
key = graph.key.key(pos="tl"))
setTexrunner(gComp)
gdat = [(n-0.1,Ecomb[n].x/1000.,Ecomb[n].dx/1000.) for (n,c) in enumerate(cats)]
print gdat
gComp.plot(graph.data.points(gdat,x=1,y=2,dy=3,title="East data"),[graph.style.symbol(symbol.circle),graph.style.errorbar()])
gdat = [ (n+0.1,XSComb.afpSumXS("E",c[1],c[2]).x*0.001) for (n,c) in enumerate(cats)]
print gdat
gComp.plot(graph.data.points(gdat,x=1,y=2,title="Combo MC"),[graph.style.symbol(symbol.triangle,symbolattrs=[deco.filled])])
gComp.writetofile(os.environ["UCNA_ANA_PLOTS"]+"/NGBG/NGBGDatSimCompare.pdf")
if __name__=="__main__":
#NGBG_combo_plot()
#exit(0)
excess_table("OctetAsym_Offic/OctetAsym_Offic.txt")
#excess_table("NGBG/ScintFace_nCaptH/ScintFace_nCaptH.txt","Scintillator",0.1)
#excess_table("NGBG/DetAl_nCaptAl/DetAl_nCaptAl.txt","Al $\\beta+\gamma$",0.1)
#excess_table("NGBG/DetAl_nCaptAlGamma/DetAl_nCaptAlGamma.txt","Al $\\gamma$",0.1)
#excess_table("NGBG/Combined/Combined.txt","Combined",1.0)
exit(0)
#plot_bgSubtrHist(os.environ["UCNA_ANA_PLOTS"]+"/OctetAsym_Annulus/")
for i in range(3):
plot_TypeIV_resid(os.environ["UCNA_ANA_PLOTS"]+"/OctetAsym_Offic/",i)
|
UCNA/main
|
Scripts/plotters/BGSubtr.py
|
Python
|
gpl-3.0
| 8,434
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo import api, models
class ResCompany(models.Model):
_inherit = 'res.company'
# -------------------------------------------------------------------------
# Sequences
# -------------------------------------------------------------------------
def _create_dropship_sequence(self):
dropship_vals = []
for company in self:
dropship_vals.append({
'name': 'Dropship (%s)' % company.name,
'code': 'stock.dropshipping',
'company_id': company.id,
'prefix': 'DS/',
'padding': 5,
})
if dropship_vals:
self.env['ir.sequence'].create(dropship_vals)
@api.model
def create_missing_dropship_sequence(self):
company_ids = self.env['res.company'].search([])
company_has_dropship_seq = self.env['ir.sequence'].search([('code', '=', 'stock.dropshipping')]).mapped('company_id')
company_todo_sequence = company_ids - company_has_dropship_seq
company_todo_sequence._create_dropship_sequence()
def _create_per_company_sequences(self):
super(ResCompany, self)._create_per_company_sequences()
self._create_dropship_sequence()
# -------------------------------------------------------------------------
# Picking types
# -------------------------------------------------------------------------
def _create_dropship_picking_type(self):
dropship_vals = []
for company in self:
sequence = self.env['ir.sequence'].search([
('code', '=', 'stock.dropshipping'),
('company_id', '=', company.id),
])
dropship_vals.append({
'name': 'Dropship',
'company_id': company.id,
'warehouse_id': False,
'sequence_id': sequence.id,
'code': 'incoming',
'default_location_src_id': self.env.ref('stock.stock_location_suppliers').id,
'default_location_dest_id': self.env.ref('stock.stock_location_customers').id,
'sequence_code': 'DS',
'use_existing_lots': False,
})
if dropship_vals:
self.env['stock.picking.type'].create(dropship_vals)
@api.model
def create_missing_dropship_picking_type(self):
company_ids = self.env['res.company'].search([])
company_has_dropship_picking_type = (
self.env['stock.picking.type']
.search([
('default_location_src_id.usage', '=', 'supplier'),
('default_location_dest_id.usage', '=', 'customer'),
])
.mapped('company_id')
)
company_todo_picking_type = company_ids - company_has_dropship_picking_type
company_todo_picking_type._create_dropship_picking_type()
def _create_per_company_picking_types(self):
super(ResCompany, self)._create_per_company_picking_types()
self._create_dropship_picking_type()
# -------------------------------------------------------------------------
# Stock rules
# -------------------------------------------------------------------------
def _create_dropship_rule(self):
dropship_route = self.env.ref('stock_dropshipping.route_drop_shipping')
supplier_location = self.env.ref('stock.stock_location_suppliers')
customer_location = self.env.ref('stock.stock_location_customers')
dropship_vals = []
for company in self:
dropship_picking_type = self.env['stock.picking.type'].search([
('company_id', '=', company.id),
('default_location_src_id.usage', '=', 'supplier'),
('default_location_dest_id.usage', '=', 'customer'),
], limit=1, order='sequence')
dropship_vals.append({
'name': '%s → %s' % (supplier_location.name, customer_location.name),
'action': 'buy',
'location_dest_id': customer_location.id,
'location_src_id': supplier_location.id,
'procure_method': 'make_to_stock',
'route_id': dropship_route.id,
'picking_type_id': dropship_picking_type.id,
'company_id': company.id,
})
if dropship_vals:
self.env['stock.rule'].create(dropship_vals)
@api.model
def create_missing_dropship_rule(self):
dropship_route = self.env.ref('stock_dropshipping.route_drop_shipping')
company_ids = self.env['res.company'].search([])
company_has_dropship_rule = self.env['stock.rule'].search([('route_id', '=', dropship_route.id)]).mapped('company_id')
company_todo_rule = company_ids - company_has_dropship_rule
company_todo_rule._create_dropship_rule()
def _create_per_company_rules(self):
super(ResCompany, self)._create_per_company_rules()
self._create_dropship_rule()
|
jeremiahyan/odoo
|
addons/stock_dropshipping/models/res_company.py
|
Python
|
gpl-3.0
| 5,103
|
#!/usr/bin/env python3
# Author: PabstMirror
# Uploads ace relases to workshop
# Will slice up compats to their own folders
import sys
if sys.version_info[0] == 2:
print("Python 3 is required.")
sys.exit(1)
import os
import os.path
import shutil
import platform
import glob
import subprocess
import hashlib
import configparser
import json
import traceback
import time
import timeit
import re
import fnmatch
import argparse
import psutil
import pysftp
import tempfile
from uritemplate import URITemplate, expand
if sys.platform == "win32":
import winreg
def get_project_version(version_file):
majorText = ""
minorText = ""
patchText = ""
buildText = ""
try:
if os.path.isfile(version_file):
f = open(version_file, "r")
hpptext = f.read()
f.close()
if hpptext:
majorText = re.search(r"#define MAJOR (.*\b)", hpptext).group(1)
minorText = re.search(r"#define MINOR (.*\b)", hpptext).group(1)
patchText = re.search(r"#define PATCHLVL (.*\b)", hpptext).group(1)
buildText = re.search(r"#define BUILD (.*\b)", hpptext).group(1)
else:
print_error("A Critical file seems to be missing or inaccessible: {}".format(version_file))
raise FileNotFoundError("File Not Found: {}".format(version_file))
except Exception as e:
raise Exception("Check the integrity of the file: {}".format(version_file))
return [majorText, minorText, patchText, buildText]
def find_steam_exe():
reg = winreg.ConnectRegistry(None, winreg.HKEY_CURRENT_USER)
try:
k = winreg.OpenKey(reg, r"Software\Valve\Steam")
steam_path = winreg.QueryValueEx(k, "SteamExe")[0]
winreg.CloseKey(k)
return steam_path.replace("/", "\\")
except:
raise Exception("BadSteam","Steam path could not be located! Please make sure that Steam is properly installed.")
def start_steam_with_user(username, password):
PROCNAME = "Steam.exe"
steam_path = find_steam_exe()
for proc in psutil.process_iter():
if proc.name().lower() == PROCNAME.lower():
print(proc.exe())
steam_path = proc.exe()
print("Shutting down Steam...")
subprocess.call([steam_path, "-shutdown"])
steam_running = True
while steam_running:
steam_running = False
for proc in psutil.process_iter():
if proc.name() == PROCNAME:
steam_running = True
print("Steam shutdown.")
print("Starting Steam...")
print(steam_path)
os.system("start \"\" \"{}\" -silent -noverifyfiles -login {} {}".format(steam_path, username, password))
def close_steam():
PROCNAME = "Steam.exe"
steam_path = find_steam_exe()
for proc in psutil.process_iter():
if proc.name().lower() == PROCNAME.lower():
print(proc.exe())
steam_path = proc.exe()
print("Shutting down Steam...")
subprocess.call([steam_path, "-shutdown"])
steam_running = True
while steam_running:
steam_running = False
for proc in psutil.process_iter():
if proc.name() == PROCNAME:
steam_running = True
print("Steam shutdown.")
def find_bi_publisher():
reg = winreg.ConnectRegistry(None, winreg.HKEY_CURRENT_USER)
try:
k = winreg.OpenKey(reg, r"Software\bohemia interactive\arma 3 tools")
arma3tools_path = winreg.QueryValueEx(k, "path")[0]
winreg.CloseKey(k)
except:
raise Exception("BadTools","Arma 3 Tools are not installed correctly or the P: drive needs to be created.")
publisher_path = os.path.join(arma3tools_path, "Publisher", "PublisherCmd.exe")
if os.path.isfile(publisher_path):
return publisher_path
else:
raise Exception("BadTools","Arma 3 Tools are not installed correctly or the P: drive needs to be created.")
def steam_publish_folder(folder, mod_id, version, steam_changelog):
change_notes = steam_changelog.format(major=version[0], minor=version[1], patch=version[2], build=version[3])
steam_changelog_filepath = "steam_changelog.txt"
steam_changelog_file = open(steam_changelog_filepath, "w")
steam_changelog_file.write(change_notes)
steam_changelog_file.close()
cmd = [find_bi_publisher(), "update", "/id:{}".format(mod_id), "/changeNoteFile:{}".format(steam_changelog_filepath), "/path:{}".format(folder)]
print ("running: {}".format(" ".join(cmd)))
print("")
print("Publishing folder {} to workshop ID {}".format(folder,mod_id))
print("")
ret = 17
while ret != 0:
if ret == 17 or ret == 19:
print("Waiting for Steam to initialize...")
time.sleep(30)
else:
print("publisher failed with code {}".format(ret))
raise Exception("Publisher","Publisher had problems")
ret = subprocess.call(cmd)
print("Publisher Status: {}".format(ret))
os.remove(steam_changelog_filepath)
def main(argv):
try:
parser = argparse.ArgumentParser(description="Arma Automatic Publishing Script")
parser.add_argument('manifest', type=argparse.FileType('r'), help='manifest json file')
parser.add_argument('-r', '--release_target', type=str, help="the name of the release target in the manifest file.", default="release")
args = parser.parse_args()
manifest_file = args.manifest
release_target = args.release_target
manifest = json.load(manifest_file)
version = get_project_version("..\\addons\\\main\\script_version.hpp")
if(not "CBA_PUBLISH_CREDENTIALS_PATH" in os.environ):
raise Exception("CBA_PUBLISH_CREDENTIALS_PATH is not set in the environment")
credentials_path = os.environ["CBA_PUBLISH_CREDENTIALS_PATH"]
for destination in manifest['publish'][release_target]['destinations']:
if(destination["type"] == "steam"):
cred_file = json.load(open(os.path.join(credentials_path, destination["cred_file"])))
if("username" in cred_file and "password" in cred_file):
steam_username = cred_file["username"]
steam_password = cred_file["password"]
start_steam_with_user(steam_username, steam_password)
else:
raise Exception("Credentials file did not specify a username and password for Steam login")
if(not "project_id" in destination):
raise Exception("Steam Publish","No project ID defined in manifest for Steam publish")
project_id = destination["project_id"]
if(not "release_dir" in destination):
raise Exception("Steam Publish","No release directory defined in manifest for Steam publish")
release_dir = destination["release_dir"]
if(not "steam_changelog" in destination):
raise Exception("Steam Publish","No steam changelog defined in manifest for Steam publish")
steam_changelog = destination["steam_changelog"]
steam_publish_folder(release_dir, project_id, version, steam_changelog)
close_steam()
if(destination["type"] == "sftp"):
cred_file = json.load(open(os.path.join(credentials_path, destination["cred_file"])))
if("username" in cred_file and "password" in cred_file):
sftp_username = cred_file["username"]
sftp_password = cred_file["password"]
else:
raise Exception("Credentials file did not specify a username and password for SFTP login")
if(not "hostname" in destination):
raise Exception("SFTP Publish","No hostname was defined for the SFTP site.")
hostname = destination["hostname"]
if(not "local_path" in destination):
raise Exception("SFTP Publish","No local path was defined for the SFTP upload.")
local_path = destination["local_path"]
if(not "remote_path" in destination):
raise Exception("SFTP Publish","No remote path was defined for the SFTP upload.")
remote_path = destination["remote_path"]
cnopts = pysftp.CnOpts()
cnopts.hostkeys = None
sftp = pysftp.Connection(host=hostname, username=sftp_username, password=sftp_password, cnopts=cnopts)
local_path = local_path.format(major=version[0], minor=version[1], patch=version[2], build=version[3])
remote_path = remote_path.format(major=version[0], minor=version[1], patch=version[2], build=version[3])
print("SFTP: Publishing {} to remote {}:{}".format(local_path, hostname, remote_path))
sftp.put(local_path, remotepath=remote_path)
print("SFTP: Upload Complete!")
if(destination["type"] == "github"):
account = destination["account"]
tag_name = destination["tag_name"]
branch = destination["branch"]
name = destination["name"]
body_file = destination["body_file"]
local_path = destination["local_path"]
prerelease = destination["prerelease"]
asset_name = destination["asset_name"]
tag_name = tag_name.format(major=version[0], minor=version[1], patch=version[2], build=version[3])
name = name.format(major=version[0], minor=version[1], patch=version[2], build=version[3])
asset_name = asset_name.format(major=version[0], minor=version[1], patch=version[2], build=version[3])
local_path = local_path.format(major=version[0], minor=version[1], patch=version[2], build=version[3])
release_text_file = open(body_file, mode='r')
release_text = release_text_file.read()
release_text_file.close()
create_request = {
"tag_name": tag_name,
"target_commitish": branch,
"name": name,
"body": release_text,
"draft": False,
"prerelease": prerelease
}
github_token = os.environ["IDI_GITHUB_TOKEN"]
release_string = json.dumps(create_request, separators=(',',':'))
temp_dir = tempfile.mkdtemp()
tmpname = os.path.join(temp_dir,"jsonpost")
temp_file = open(tmpname, 'w')
temp_file.write(release_string)
temp_file.close()
curl_string = ' '.join(["curl", '-s', '-H "Authorization: token {}"'.format(github_token), '-H "Content-Type: application/json"', "--request POST", "--data", '"@{}"'.format(tmpname).replace('\\','\\\\'), "https://api.github.com/repos/{}/releases".format(account)])
print("Creating Github Release...")
response = subprocess.check_output(curl_string)
response_json = json.loads(response.decode("ascii"))
shutil.rmtree(temp_dir)
if("id" in response_json):
print("Github Release Created @ {}".format(response_json["url"]))
release_id = response_json["id"]
upload_url = response_json["upload_url"]
t = URITemplate(upload_url)
upload_url = t.expand(name=asset_name)
curl_string = ' '.join(["curl", '-s', '-H "Authorization: token {}"'.format(github_token),
'-H "Content-Type: application/zip"',
"--data-binary",
'"@{}"'.format(local_path),
upload_url])
print("Attaching Asset...")
response = subprocess.check_output(curl_string)
response_json = json.loads(response.decode("ascii"))
if("browser_download_url" in response_json):
print("Asset Attached @ {}".format(response_json["browser_download_url"]))
else:
print(response_json)
raise Exception("Github Publish","Failed to Attach Asset")
else:
print(response_json)
raise Exception("Github Publish","Failed to Create Release")
except Exception as e:
print(e)
sys.exit(1)
if __name__ == "__main__":
main(sys.argv)
|
Raspu86/acre2
|
tools/publish.py
|
Python
|
gpl-3.0
| 13,053
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.20 on 2019-07-05 01:32
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('diarios', '0002_auto_20190618_1523'),
]
operations = [
migrations.AlterField(
model_name='diariooficial',
name='normas',
field=models.ManyToManyField(to='norma.NormaJuridica', verbose_name='Normas Publicadas no Diário'),
),
]
|
cmjatai/cmj
|
cmj/diarios/migrations/0003_auto_20190704_2232.py
|
Python
|
gpl-3.0
| 521
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright (C) 2018 David Arroyo Menéndez
# Author: David Arroyo Menéndez <davidam@gnu.org>
# Maintainer: David Arroyo Menéndez <davidam@gnu.org>
# This file is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3, or (at your option)
# any later version.
# This file is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with GNU Emacs; see the file COPYING. If not, write to
# the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
# Boston, MA 02110-1301 USA,
import os, re
num = input("Write a number: ")
print("Is it your number from 1 to 12? ")
match = re.search(r'^(1[0-2]|[1-9])$', num)
# If-statement after search() tests if it succeeded
if match:
print("Yes, it's") # match.group() ## 'found word:cat'
else:
print("Not, it isn't")
print("Is it your number from 1 to 24? ")
match = re.search(r'^(2[0-4]|1[0-9]|[1-9])$', num)
# If-statement after search() tests if it succeeded
if match:
print("Yes, it's") # match.group() ## 'found word:cat'
else:
print("Not, it isn't")
print("Is it your number from 1 to 31? ")
match = re.search(r'^(3[01]|[12][0-9]|[1-9])$', num)
# If-statement after search() tests if it succeeded
if match:
print("Yes, it's") # match.group() ## 'found word:cat'
else:
print("Not, it isn't")
print("Is it your number from 1 to 53? ")
match = re.search(r'^(5[0123]|[1-4][0-9]|[1-9])$', num)
# If-statement after search() tests if it succeeded
if match:
print("Yes, it's") # match.group() ## 'found word:cat'
else:
print("Not, it isn't")
print("Is it your number from 0 to 59? ")
match = re.search(r'^([1-5][0-9]|[0-9])$', num)
# If-statement after search() tests if it succeeded
if match:
print("Yes, it's") # match.group() ## 'found word:cat'
else:
print("Not, it isn't")
print("Is it your number from 0 to 100? ")
match = re.search(r'^([1]?[0-9]?[0-9])$', num)
if match:
print("Yes, it's") # match.group() ## 'found word:cat'
else:
print("Not, it isn't")
print("Is it your number from 1 to 100? ")
match = re.search(r'^(100|[1-9][0-9]|[1-9])$', num)
if match:
print("Yes, it's") # match.group() ## 'found word:cat'
else:
print("Not, it isn't")
print("Is it your number from 0 to 127? ")
match = re.search(r'^(12[0-7]|1[0-1][0-9]|[0-9][0-9]|[0-9])$', num)
# If-statement after search() tests if it succeeded
if match:
print("Yes, it's") # match.group() ## 'found word:cat'
else:
print("Not, it isn't")
print("Is it your number from 32 to 126? ")
match = re.search(r'^(12[0-6]|1[0-1][0-9]|[4-9][0-9]|3[2-9])$', num)
if match:
print("Yes, it's") # match.group() ## 'found word:cat'
else:
print("Not, it isn't")
|
davidam/python-examples
|
regex/number.py
|
Python
|
gpl-3.0
| 3,258
|
# -*- coding: utf-8 -*-
#########################################################################
#
# Copyright (C) 2016 OSGeo
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
#########################################################################
import math
import logging
import urlparse
from itertools import chain
from guardian.shortcuts import get_perms
from django.contrib.auth.decorators import login_required
from django.core.exceptions import ObjectDoesNotExist
from django.core.urlresolvers import reverse
from django.shortcuts import redirect
from django.core.serializers.json import DjangoJSONEncoder
from django.http import HttpResponse, HttpResponseRedirect, HttpResponseNotAllowed, HttpResponseServerError, Http404
from django.shortcuts import render, get_object_or_404
from django.conf import settings
from django.utils.translation import ugettext as _
from django.views.decorators.http import require_http_methods
try:
# Django >= 1.7
import json
except ImportError:
# Django <= 1.6 backwards compatibility
from django.utils import simplejson as json
from django.utils.html import strip_tags
from django.db.models import F
from django.views.decorators.clickjacking import (xframe_options_exempt,
xframe_options_sameorigin)
from geonode.layers.models import Layer
from geonode.maps.models import Map, MapLayer, MapSnapshot
from geonode.layers.views import _resolve_layer
from geonode.utils import (DEFAULT_TITLE,
DEFAULT_ABSTRACT,
num_encode, num_decode,
build_social_links,
http_client,
forward_mercator,
llbbox_to_mercator,
bbox_to_projection,
default_map_config,
resolve_object,
layer_from_viewer_config,
check_ogc_backend)
from geonode.maps.forms import MapForm
from geonode.security.views import _perms_info_json
from geonode.base.forms import CategoryForm
from geonode.base.models import TopicCategory
from geonode import geoserver, qgis_server
from geonode.groups.models import GroupProfile
from geonode.documents.models import get_related_documents
from geonode.people.forms import ProfileForm
from geonode.base.views import batch_modify
from .tasks import delete_map
from requests.compat import urljoin
if check_ogc_backend(geoserver.BACKEND_PACKAGE):
# FIXME: The post service providing the map_status object
# should be moved to geonode.geoserver.
from geonode.geoserver.helpers import ogc_server_settings
from geonode.geoserver.helpers import (_render_thumbnail,
_prepare_thumbnail_body_from_opts)
elif check_ogc_backend(qgis_server.BACKEND_PACKAGE):
from geonode.qgis_server.helpers import ogc_server_settings
logger = logging.getLogger("geonode.maps.views")
DEFAULT_MAPS_SEARCH_BATCH_SIZE = 10
MAX_MAPS_SEARCH_BATCH_SIZE = 25
_PERMISSION_MSG_DELETE = _("You are not permitted to delete this map.")
_PERMISSION_MSG_GENERIC = _('You do not have permissions for this map.')
_PERMISSION_MSG_LOGIN = _("You must be logged in to save this map")
_PERMISSION_MSG_SAVE = _("You are not permitted to save or edit this map.")
_PERMISSION_MSG_METADATA = _(
"You are not allowed to modify this map's metadata.")
_PERMISSION_MSG_VIEW = _("You are not allowed to view this map.")
_PERMISSION_MSG_UNKNOWN = _('An unknown error has occured.')
def _resolve_map(request, id, permission='base.change_resourcebase',
msg=_PERMISSION_MSG_GENERIC, **kwargs):
'''
Resolve the Map by the provided typename and check the optional permission.
'''
if Map.objects.filter(urlsuffix=id).count() > 0:
key = 'urlsuffix'
else:
key = 'pk'
return resolve_object(request, Map, {key: id}, permission=permission,
permission_msg=msg, **kwargs)
# BASIC MAP VIEWS #
def map_detail(request, mapid, snapshot=None, template='maps/map_detail.html'):
'''
The view that show details of each map
'''
map_obj = _resolve_map(
request,
mapid,
'base.view_resourcebase',
_PERMISSION_MSG_VIEW)
# Update count for popularity ranking,
# but do not includes admins or resource owners
if request.user != map_obj.owner and not request.user.is_superuser:
Map.objects.filter(
id=map_obj.id).update(
popular_count=F('popular_count') + 1)
if snapshot is None:
config = map_obj.viewer_json(request)
else:
config = snapshot_config(snapshot, map_obj, request)
config = json.dumps(config)
layers = MapLayer.objects.filter(map=map_obj.id)
links = map_obj.link_set.download()
group = None
if map_obj.group:
try:
group = GroupProfile.objects.get(slug=map_obj.group.name)
except GroupProfile.DoesNotExist:
group = None
context_dict = {
'config': config,
'resource': map_obj,
'group': group,
'layers': layers,
'perms_list': get_perms(
request.user,
map_obj.get_self_resource()) + get_perms(request.user, map_obj),
'permissions_json': _perms_info_json(map_obj),
"documents": get_related_documents(map_obj),
'links': links,
'preview': getattr(
settings,
'GEONODE_CLIENT_LAYER_PREVIEW_LIBRARY',
'geoext'),
'crs': getattr(
settings,
'DEFAULT_MAP_CRS',
'EPSG:3857')
}
if settings.SOCIAL_ORIGINS:
context_dict["social_links"] = build_social_links(request, map_obj)
if request.user.is_authenticated():
if getattr(settings, 'FAVORITE_ENABLED', False):
from geonode.favorite.utils import get_favorite_info
context_dict["favorite_info"] = get_favorite_info(request.user, map_obj)
return render(request, template, context=context_dict)
@login_required
def map_metadata(
request,
mapid,
template='maps/map_metadata.html',
ajax=True):
map_obj = _resolve_map(
request,
mapid,
'base.change_resourcebase_metadata',
_PERMISSION_MSG_VIEW)
poc = map_obj.poc
metadata_author = map_obj.metadata_author
topic_category = map_obj.category
if request.method == "POST":
map_form = MapForm(request.POST, instance=map_obj, prefix="resource")
category_form = CategoryForm(request.POST, prefix="category_choice_field", initial=int(
request.POST["category_choice_field"]) if "category_choice_field" in request.POST and
request.POST["category_choice_field"] else None)
else:
map_form = MapForm(instance=map_obj, prefix="resource")
category_form = CategoryForm(
prefix="category_choice_field",
initial=topic_category.id if topic_category else None)
if request.method == "POST" and map_form.is_valid(
) and category_form.is_valid():
new_poc = map_form.cleaned_data['poc']
new_author = map_form.cleaned_data['metadata_author']
new_keywords = map_form.cleaned_data['keywords']
new_regions = map_form.cleaned_data['regions']
new_title = strip_tags(map_form.cleaned_data['title'])
new_abstract = strip_tags(map_form.cleaned_data['abstract'])
new_category = None
if category_form and 'category_choice_field' in category_form.cleaned_data and\
category_form.cleaned_data['category_choice_field']:
new_category = TopicCategory.objects.get(
id=int(category_form.cleaned_data['category_choice_field']))
if new_poc is None:
if poc is None:
poc_form = ProfileForm(
request.POST,
prefix="poc",
instance=poc)
else:
poc_form = ProfileForm(request.POST, prefix="poc")
if poc_form.has_changed and poc_form.is_valid():
new_poc = poc_form.save()
if new_author is None:
if metadata_author is None:
author_form = ProfileForm(request.POST, prefix="author",
instance=metadata_author)
else:
author_form = ProfileForm(request.POST, prefix="author")
if author_form.has_changed and author_form.is_valid():
new_author = author_form.save()
if new_poc is not None and new_author is not None:
map_obj.poc = new_poc
map_obj.metadata_author = new_author
map_obj.title = new_title
map_obj.abstract = new_abstract
if new_keywords:
map_obj.keywords.clear()
map_obj.keywords.add(*new_keywords)
if new_regions:
map_obj.regions.clear()
map_obj.regions.add(*new_regions)
map_obj.category = new_category
map_obj.save()
if not ajax:
return HttpResponseRedirect(
reverse(
'map_detail',
args=(
map_obj.id,
)))
message = map_obj.id
return HttpResponse(json.dumps({'message': message}))
# - POST Request Ends here -
# Request.GET
if poc is None:
poc_form = ProfileForm(request.POST, prefix="poc")
else:
if poc is None:
poc_form = ProfileForm(instance=poc, prefix="poc")
else:
map_form.fields['poc'].initial = poc.id
poc_form = ProfileForm(prefix="poc")
poc_form.hidden = True
if metadata_author is None:
author_form = ProfileForm(request.POST, prefix="author")
else:
if metadata_author is None:
author_form = ProfileForm(
instance=metadata_author,
prefix="author")
else:
map_form.fields['metadata_author'].initial = metadata_author.id
author_form = ProfileForm(prefix="author")
author_form.hidden = True
config = map_obj.viewer_json(request)
layers = MapLayer.objects.filter(map=map_obj.id)
metadata_author_groups = []
if request.user.is_superuser or request.user.is_staff:
metadata_author_groups = GroupProfile.objects.all()
else:
try:
all_metadata_author_groups = chain(
request.user.group_list_all(),
GroupProfile.objects.exclude(
access="private").exclude(access="public-invite"))
except BaseException:
all_metadata_author_groups = GroupProfile.objects.exclude(
access="private").exclude(access="public-invite")
[metadata_author_groups.append(item) for item in all_metadata_author_groups
if item not in metadata_author_groups]
if settings.ADMIN_MODERATE_UPLOADS:
if not request.user.is_superuser:
map_form.fields['is_published'].widget.attrs.update(
{'disabled': 'true'})
can_change_metadata = request.user.has_perm(
'change_resourcebase_metadata',
map_obj.get_self_resource())
try:
is_manager = request.user.groupmember_set.all().filter(role='manager').exists()
except BaseException:
is_manager = False
if not is_manager or not can_change_metadata:
map_form.fields['is_approved'].widget.attrs.update(
{'disabled': 'true'})
return render(request, template, context={
"config": json.dumps(config),
"resource": map_obj,
"map": map_obj,
"map_form": map_form,
"poc_form": poc_form,
"author_form": author_form,
"category_form": category_form,
"layers": layers,
"preview": getattr(settings, 'GEONODE_CLIENT_LAYER_PREVIEW_LIBRARY', 'geoext'),
"crs": getattr(settings, 'DEFAULT_MAP_CRS', 'EPSG:3857'),
"metadata_author_groups": metadata_author_groups,
"TOPICCATEGORY_MANDATORY": getattr(settings, 'TOPICCATEGORY_MANDATORY', False),
"GROUP_MANDATORY_RESOURCES": getattr(settings, 'GROUP_MANDATORY_RESOURCES', False),
})
@login_required
def map_metadata_advanced(request, mapid):
return map_metadata(
request,
mapid,
template='maps/map_metadata_advanced.html')
@login_required
def map_remove(request, mapid, template='maps/map_remove.html'):
''' Delete a map, and its constituent layers. '''
map_obj = _resolve_map(
request,
mapid,
'base.delete_resourcebase',
_PERMISSION_MSG_VIEW)
if request.method == 'GET':
return render(request, template, context={
"map": map_obj
})
elif request.method == 'POST':
delete_map.delay(object_id=map_obj.id)
return HttpResponseRedirect(reverse("maps_browse"))
@xframe_options_exempt
def map_embed(
request,
mapid=None,
snapshot=None,
template='maps/map_embed.html'):
if mapid is None:
config = default_map_config(request)[0]
else:
map_obj = _resolve_map(
request,
mapid,
'base.view_resourcebase',
_PERMISSION_MSG_VIEW)
if snapshot is None:
config = map_obj.viewer_json(request)
else:
config = snapshot_config(
snapshot, map_obj, request)
return render(request, template, context={
'config': json.dumps(config)
})
def map_embed_widget(request, mapid,
template='leaflet/maps/map_embed_widget.html'):
"""Display code snippet for embedding widget.
:param request: The request from the frontend.
:type request: HttpRequest
:param mapid: The id of the map.
:type mapid: String
:return: formatted code.
"""
map_obj = _resolve_map(request,
mapid,
'base.view_resourcebase',
_PERMISSION_MSG_VIEW)
map_bbox = map_obj.bbox_string.split(',')
# Sanity Checks
for coord in map_bbox:
if not coord:
return
map_layers = MapLayer.objects.filter(
map_id=mapid).order_by('stack_order')
layers = []
for layer in map_layers:
if layer.group != 'background':
layers.append(layer)
if map_obj.srid != 'EPSG:3857':
map_bbox = [float(coord) for coord in map_bbox]
else:
map_bbox = llbbox_to_mercator([float(coord) for coord in map_bbox])
if map_bbox and len(map_bbox) >= 4:
minx, miny, maxx, maxy = [float(coord) for coord in map_bbox]
x = (minx + maxx) / 2
y = (miny + maxy) / 2
if getattr(settings, 'DEFAULT_MAP_CRS') == "EPSG:3857":
center = list((x, y))
else:
center = list(forward_mercator((x, y)))
if center[1] == float('-inf'):
center[1] = 0
BBOX_DIFFERENCE_THRESHOLD = 1e-5
# Check if the bbox is invalid
valid_x = (maxx - minx) ** 2 > BBOX_DIFFERENCE_THRESHOLD
valid_y = (maxy - miny) ** 2 > BBOX_DIFFERENCE_THRESHOLD
width_zoom = 15
if valid_x:
try:
width_zoom = math.log(360 / abs(maxx - minx), 2)
except BaseException:
pass
height_zoom = 15
if valid_y:
try:
height_zoom = math.log(360 / abs(maxy - miny), 2)
except BaseException:
pass
map_obj.center_x = center[0]
map_obj.center_y = center[1]
map_obj.zoom = math.ceil(min(width_zoom, height_zoom))
context = {
'resource': map_obj,
'map_bbox': map_bbox,
'map_layers': layers
}
message = render(request, template, context)
return HttpResponse(message)
# MAPS VIEWER #
@require_http_methods(["GET", ])
def add_layer(request):
"""
The view that returns the map composer opened to
a given map and adds a layer on top of it.
"""
map_id = request.GET.get('map_id')
layer_name = request.GET.get('layer_name')
map_obj = _resolve_map(
request,
map_id,
'base.view_resourcebase',
_PERMISSION_MSG_VIEW)
return map_view(request, str(map_obj.id), layer_name=layer_name)
@xframe_options_sameorigin
def map_view(request, mapid, snapshot=None, layer_name=None,
template='maps/map_view.html'):
"""
The view that returns the map composer opened to
the map with the given map ID.
"""
map_obj = _resolve_map(
request,
mapid,
'base.view_resourcebase',
_PERMISSION_MSG_VIEW)
if snapshot is None:
config = map_obj.viewer_json(request)
else:
config = snapshot_config(snapshot, map_obj, request)
if layer_name:
config = add_layers_to_map_config(
request, map_obj, (layer_name, ), False)
return render(request, template, context={
'config': json.dumps(config),
'map': map_obj,
'preview': getattr(
settings,
'GEONODE_CLIENT_LAYER_PREVIEW_LIBRARY',
'geoext')
})
def map_view_js(request, mapid):
map_obj = _resolve_map(
request,
mapid,
'base.view_resourcebase',
_PERMISSION_MSG_VIEW)
config = map_obj.viewer_json(request)
return HttpResponse(
json.dumps(config),
content_type="application/javascript")
def map_json(request, mapid, snapshot=None):
if request.method == 'GET':
map_obj = _resolve_map(
request,
mapid,
'base.view_resourcebase',
_PERMISSION_MSG_VIEW)
return HttpResponse(
json.dumps(
map_obj.viewer_json(request)))
elif request.method == 'PUT':
if not request.user.is_authenticated():
return HttpResponse(
_PERMISSION_MSG_LOGIN,
status=401,
content_type="text/plain"
)
map_obj = Map.objects.get(id=mapid)
if not request.user.has_perm(
'change_resourcebase',
map_obj.get_self_resource()):
return HttpResponse(
_PERMISSION_MSG_SAVE,
status=401,
content_type="text/plain"
)
try:
map_obj.update_from_viewer(request.body, context={'request': request, 'mapId': mapid, 'map': map_obj})
MapSnapshot.objects.create(
config=clean_config(
request.body),
map=map_obj,
user=request.user)
return HttpResponse(
json.dumps(
map_obj.viewer_json(request)))
except ValueError as e:
return HttpResponse(
"The server could not understand the request." + str(e),
content_type="text/plain",
status=400
)
@xframe_options_sameorigin
def map_edit(request, mapid, snapshot=None, template='maps/map_edit.html'):
"""
The view that returns the map composer opened to
the map with the given map ID.
"""
map_obj = _resolve_map(
request,
mapid,
'base.view_resourcebase',
_PERMISSION_MSG_VIEW)
if snapshot is None:
config = map_obj.viewer_json(request)
else:
config = snapshot_config(snapshot, map_obj, request)
return render(request, template, context={
'mapId': mapid,
'config': json.dumps(config),
'map': map_obj,
'preview': getattr(
settings,
'GEONODE_CLIENT_LAYER_PREVIEW_LIBRARY',
'geoext')
})
# NEW MAPS #
def clean_config(conf):
if isinstance(conf, basestring):
config = json.loads(conf)
config_extras = [
"tools",
"rest",
"homeUrl",
"localGeoServerBaseUrl",
"localCSWBaseUrl",
"csrfToken",
"db_datastore",
"authorizedRoles"]
for config_item in config_extras:
if config_item in config:
del config[config_item]
if config_item in config["map"]:
del config["map"][config_item]
return json.dumps(config)
else:
return conf
def new_map(request, template='maps/map_new.html'):
map_obj, config = new_map_config(request)
context_dict = {
'config': config,
'map': map_obj
}
context_dict["preview"] = getattr(
settings,
'GEONODE_CLIENT_LAYER_PREVIEW_LIBRARY',
'geoext')
if isinstance(config, HttpResponse):
return config
else:
return render(
request,
template,
context=context_dict)
def new_map_json(request):
if request.method == 'GET':
map_obj, config = new_map_config(request)
if isinstance(config, HttpResponse):
return config
else:
return HttpResponse(config)
elif request.method == 'POST':
if not request.user.is_authenticated():
return HttpResponse(
'You must be logged in to save new maps',
content_type="text/plain",
status=401
)
map_obj = Map(owner=request.user, zoom=0,
center_x=0, center_y=0)
map_obj.save()
map_obj.set_default_permissions()
map_obj.handle_moderated_uploads()
# If the body has been read already, use an empty string.
# See https://github.com/django/django/commit/58d555caf527d6f1bdfeab14527484e4cca68648
# for a better exception to catch when we move to Django 1.7.
try:
body = request.body
except Exception:
body = ''
try:
map_obj.update_from_viewer(body, context={'request': request, 'mapId': map_obj.id, 'map': map_obj})
MapSnapshot.objects.create(
config=clean_config(body),
map=map_obj,
user=request.user)
except ValueError as e:
return HttpResponse(str(e), status=400)
else:
return HttpResponse(
json.dumps({'id': map_obj.id}),
status=200,
content_type='application/json'
)
else:
return HttpResponse(status=405)
def new_map_config(request):
'''
View that creates a new map.
If the query argument 'copy' is given, the initial map is
a copy of the map with the id specified, otherwise the
default map configuration is used. If copy is specified
and the map specified does not exist a 404 is returned.
'''
DEFAULT_MAP_CONFIG, DEFAULT_BASE_LAYERS = default_map_config(request)
map_obj = None
if request.method == 'GET' and 'copy' in request.GET:
mapid = request.GET['copy']
map_obj = _resolve_map(request, mapid, 'base.view_resourcebase')
map_obj.abstract = DEFAULT_ABSTRACT
map_obj.title = DEFAULT_TITLE
if request.user.is_authenticated():
map_obj.owner = request.user
config = map_obj.viewer_json(request)
map_obj.handle_moderated_uploads()
del config['id']
else:
if request.method == 'GET':
params = request.GET
elif request.method == 'POST':
params = request.POST
else:
return HttpResponse(status=405)
if 'layer' in params:
map_obj = Map(projection=getattr(settings, 'DEFAULT_MAP_CRS',
'EPSG:3857'))
config = add_layers_to_map_config(
request, map_obj, params.getlist('layer'))
else:
config = DEFAULT_MAP_CONFIG
return map_obj, json.dumps(config)
def add_layers_to_map_config(
request, map_obj, layer_names, add_base_layers=True):
DEFAULT_MAP_CONFIG, DEFAULT_BASE_LAYERS = default_map_config(request)
bbox = []
layers = []
for layer_name in layer_names:
try:
layer = _resolve_layer(request, layer_name)
except ObjectDoesNotExist:
# bad layer, skip
continue
except Http404:
# can't find the layer, skip it.
continue
if not request.user.has_perm(
'view_resourcebase',
obj=layer.get_self_resource()):
# invisible layer, skip inclusion
continue
layer_bbox = layer.bbox[0:4]
bbox = layer_bbox[:]
bbox[0] = layer_bbox[0]
bbox[1] = layer_bbox[2]
bbox[2] = layer_bbox[1]
bbox[3] = layer_bbox[3]
# assert False, str(layer_bbox)
def decimal_encode(bbox):
import decimal
_bbox = []
for o in [float(coord) for coord in bbox]:
if isinstance(o, decimal.Decimal):
o = (str(o) for o in [o])
_bbox.append(o)
# Must be in the form : [x0, x1, y0, y1
return [_bbox[0], _bbox[2], _bbox[1], _bbox[3]]
def sld_definition(style):
from urllib import quote
_sld = {
"title": style.sld_title or style.name,
"legend": {
"height": "40",
"width": "22",
"href": layer.ows_url +
"?service=wms&request=GetLegendGraphic&format=image%2Fpng&width=20&height=20&layer=" +
quote(layer.service_typename, safe=''),
"format": "image/png"
},
"name": style.name
}
return _sld
config = layer.attribute_config()
if hasattr(layer, 'srid'):
config['crs'] = {
'type': 'name',
'properties': layer.srid
}
# Add required parameters for GXP lazy-loading
attribution = "%s %s" % (layer.owner.first_name,
layer.owner.last_name) if layer.owner.first_name or layer.owner.last_name else str(
layer.owner)
srs = getattr(settings, 'DEFAULT_MAP_CRS', 'EPSG:3857')
srs_srid = int(srs.split(":")[1]) if srs != "EPSG:900913" else 3857
config["attribution"] = "<span class='gx-attribution-title'>%s</span>" % attribution
config["format"] = getattr(
settings, 'DEFAULT_LAYER_FORMAT', 'image/png')
config["title"] = layer.title
config["wrapDateLine"] = True
config["visibility"] = True
config["srs"] = srs
config["bbox"] = decimal_encode(
bbox_to_projection([float(coord) for coord in layer_bbox] + [layer.srid, ],
target_srid=int(srs.split(":")[1]))[:4])
config["capability"] = {
"abstract": layer.abstract,
"name": layer.alternate,
"title": layer.title,
"queryable": True,
"storeType": layer.storeType,
"bbox": {
layer.srid: {
"srs": layer.srid,
"bbox": decimal_encode(bbox)
},
srs: {
"srs": srs,
"bbox": decimal_encode(
bbox_to_projection([float(coord) for coord in layer_bbox] + [layer.srid, ],
target_srid=srs_srid)[:4])
},
"EPSG:4326": {
"srs": "EPSG:4326",
"bbox": decimal_encode(bbox) if layer.srid == 'EPSG:4326' else
decimal_encode(bbox_to_projection(
[float(coord) for coord in layer_bbox] + [layer.srid, ], target_srid=4326)[:4])
},
"EPSG:900913": {
"srs": "EPSG:900913",
"bbox": decimal_encode(bbox) if layer.srid == 'EPSG:900913' else
decimal_encode(bbox_to_projection(
[float(coord) for coord in layer_bbox] + [layer.srid, ], target_srid=3857)[:4])
}
},
"srs": {
srs: True
},
"formats": ["image/png", "application/atom xml", "application/atom+xml", "application/json;type=utfgrid",
"application/openlayers", "application/pdf", "application/rss xml", "application/rss+xml",
"application/vnd.google-earth.kml", "application/vnd.google-earth.kml xml",
"application/vnd.google-earth.kml+xml", "application/vnd.google-earth.kml+xml;mode=networklink",
"application/vnd.google-earth.kmz", "application/vnd.google-earth.kmz xml",
"application/vnd.google-earth.kmz+xml", "application/vnd.google-earth.kmz;mode=networklink",
"atom", "image/geotiff", "image/geotiff8", "image/gif", "image/gif;subtype=animated",
"image/jpeg", "image/png8", "image/png; mode=8bit", "image/svg", "image/svg xml",
"image/svg+xml", "image/tiff", "image/tiff8", "image/vnd.jpeg-png",
"kml", "kmz", "openlayers", "rss", "text/html; subtype=openlayers", "utfgrid"],
"attribution": {
"title": attribution
},
"infoFormats": ["text/plain", "application/vnd.ogc.gml", "text/xml", "application/vnd.ogc.gml/3.1.1",
"text/xml; subtype=gml/3.1.1", "text/html", "application/json"],
"styles": [sld_definition(s) for s in layer.styles.all()],
"prefix": layer.alternate.split(":")[0] if ":" in layer.alternate else "",
"keywords": [k.name for k in layer.keywords.all()] if layer.keywords else [],
"llbbox": decimal_encode(bbox) if layer.srid == 'EPSG:4326' else
decimal_encode(bbox_to_projection(
[float(coord) for coord in layer_bbox] + [layer.srid, ], target_srid=4326)[:4])
}
all_times = None
if check_ogc_backend(geoserver.BACKEND_PACKAGE):
from geonode.geoserver.views import get_capabilities
workspace, layername = layer.alternate.split(
":") if ":" in layer.alternate else (None, layer.alternate)
# WARNING Please make sure to have enabled DJANGO CACHE as per
# https://docs.djangoproject.com/en/2.0/topics/cache/#filesystem-caching
wms_capabilities_resp = get_capabilities(
request, layer.id, tolerant=True)
if wms_capabilities_resp.status_code >= 200 and wms_capabilities_resp.status_code < 400:
wms_capabilities = wms_capabilities_resp.getvalue()
if wms_capabilities:
import xml.etree.ElementTree as ET
namespaces = {'wms': 'http://www.opengis.net/wms',
'xlink': 'http://www.w3.org/1999/xlink',
'xsi': 'http://www.w3.org/2001/XMLSchema-instance'}
e = ET.fromstring(wms_capabilities)
for atype in e.findall(
"./[wms:Name='%s']/wms:Dimension[@name='time']" % (layer.alternate), namespaces):
dim_name = atype.get('name')
if dim_name:
dim_name = str(dim_name).lower()
if dim_name == 'time':
dim_values = atype.text
if dim_values:
all_times = dim_values.split(",")
break
if all_times:
config["capability"]["dimensions"] = {
"time": {
"name": "time",
"units": "ISO8601",
"unitsymbol": None,
"nearestVal": False,
"multipleVal": False,
"current": False,
"default": "current",
"values": all_times
}
}
if layer.storeType == "remoteStore":
service = layer.remote_service
source_params = {}
if service.type in ('REST_MAP', 'REST_IMG'):
source_params = {
"ptype": service.ptype,
"remote": True,
"url": service.service_url,
"name": service.name,
"title": "[R] %s" % service.title}
maplayer = MapLayer(map=map_obj,
name=layer.alternate,
ows_url=layer.ows_url,
layer_params=json.dumps(config),
visibility=True,
source_params=json.dumps(source_params)
)
else:
ogc_server_url = urlparse.urlsplit(
ogc_server_settings.PUBLIC_LOCATION).netloc
layer_url = urlparse.urlsplit(layer.ows_url).netloc
access_token = request.session['access_token'] if request and 'access_token' in request.session else None
if access_token and ogc_server_url == layer_url and 'access_token' not in layer.ows_url:
url = '%s?access_token=%s' % (layer.ows_url, access_token)
else:
url = layer.ows_url
maplayer = MapLayer(
map=map_obj,
name=layer.alternate,
ows_url=url,
# use DjangoJSONEncoder to handle Decimal values
layer_params=json.dumps(config, cls=DjangoJSONEncoder),
visibility=True
)
layers.append(maplayer)
if bbox and len(bbox) >= 4:
minx, maxx, miny, maxy = [float(coord) for coord in bbox]
x = (minx + maxx) / 2
y = (miny + maxy) / 2
if getattr(
settings,
'DEFAULT_MAP_CRS',
'EPSG:3857') == "EPSG:4326":
center = list((x, y))
else:
center = list(forward_mercator((x, y)))
if center[1] == float('-inf'):
center[1] = 0
BBOX_DIFFERENCE_THRESHOLD = 1e-5
# Check if the bbox is invalid
valid_x = (maxx - minx) ** 2 > BBOX_DIFFERENCE_THRESHOLD
valid_y = (maxy - miny) ** 2 > BBOX_DIFFERENCE_THRESHOLD
if valid_x:
width_zoom = math.log(360 / abs(maxx - minx), 2)
else:
width_zoom = 15
if valid_y:
height_zoom = math.log(360 / abs(maxy - miny), 2)
else:
height_zoom = 15
map_obj.center_x = center[0]
map_obj.center_y = center[1]
map_obj.zoom = math.ceil(min(width_zoom, height_zoom))
map_obj.handle_moderated_uploads()
if add_base_layers:
layers_to_add = DEFAULT_BASE_LAYERS + layers
else:
layers_to_add = layers
config = map_obj.viewer_json(
request, *layers_to_add)
config['fromLayer'] = True
return config
# MAPS DOWNLOAD #
def map_download(request, mapid, template='maps/map_download.html'):
"""
Download all the layers of a map as a batch
XXX To do, remove layer status once progress id done
This should be fix because
"""
map_obj = _resolve_map(
request,
mapid,
'base.download_resourcebase',
_PERMISSION_MSG_VIEW)
map_status = dict()
if request.method == 'POST':
def perm_filter(layer):
return request.user.has_perm(
'base.view_resourcebase',
obj=layer.get_self_resource())
mapJson = map_obj.json(perm_filter)
# we need to remove duplicate layers
j_map = json.loads(mapJson)
j_layers = j_map["layers"]
for j_layer in j_layers:
if j_layer["service"] is None:
j_layers.remove(j_layer)
continue
if (len([_l for _l in j_layers if _l == j_layer])) > 1:
j_layers.remove(j_layer)
mapJson = json.dumps(j_map)
if check_ogc_backend(qgis_server.BACKEND_PACKAGE):
url = urljoin(settings.SITEURL,
reverse("qgis_server:download-map", kwargs={'mapid': mapid}))
# qgis-server backend stop here, continue on qgis_server/views.py
return redirect(url)
# the path to geoserver backend continue here
resp, content = http_client.request(url, 'POST', body=mapJson)
status = int(resp.status_code)
if status == 200:
map_status = json.loads(content)
request.session["map_status"] = map_status
else:
raise Exception(
'Could not start the download of %s. Error was: %s' %
(map_obj.title, content))
locked_layers = []
remote_layers = []
downloadable_layers = []
for lyr in map_obj.layer_set.all():
if lyr.group != "background":
if not lyr.local:
remote_layers.append(lyr)
else:
ownable_layer = Layer.objects.get(alternate=lyr.name)
if not request.user.has_perm(
'download_resourcebase',
obj=ownable_layer.get_self_resource()):
locked_layers.append(lyr)
else:
# we need to add the layer only once
if len(
[_l for _l in downloadable_layers if _l.name == lyr.name]) == 0:
downloadable_layers.append(lyr)
site_url = settings.SITEURL.rstrip('/') if settings.SITEURL.startswith('http') else settings.SITEURL
return render(request, template, context={
"geoserver": ogc_server_settings.PUBLIC_LOCATION,
"map_status": map_status,
"map": map_obj,
"locked_layers": locked_layers,
"remote_layers": remote_layers,
"downloadable_layers": downloadable_layers,
"site": site_url
})
def map_wmc(request, mapid, template="maps/wmc.xml"):
"""Serialize an OGC Web Map Context Document (WMC) 1.1"""
map_obj = _resolve_map(
request,
mapid,
'base.view_resourcebase',
_PERMISSION_MSG_VIEW)
site_url = settings.SITEURL.rstrip('/') if settings.SITEURL.startswith('http') else settings.SITEURL
return render(request, template, context={
'map': map_obj,
'siteurl': site_url,
}, content_type='text/xml')
def map_wms(request, mapid):
"""
Publish local map layers as group layer in local OWS.
/maps/:id/wms
GET: return endpoint information for group layer,
PUT: update existing or create new group layer.
"""
map_obj = _resolve_map(
request,
mapid,
'base.view_resourcebase',
_PERMISSION_MSG_VIEW)
if request.method == 'PUT':
try:
layerGroupName = map_obj.publish_layer_group()
response = dict(
layerGroupName=layerGroupName,
ows=getattr(ogc_server_settings, 'ows', ''),
)
return HttpResponse(
json.dumps(response),
content_type="application/json")
except BaseException:
return HttpResponseServerError()
if request.method == 'GET':
response = dict(
layerGroupName=getattr(map_obj.layer_group, 'name', ''),
ows=getattr(ogc_server_settings, 'ows', ''),
)
return HttpResponse(
json.dumps(response),
content_type="application/json")
return HttpResponseNotAllowed(['PUT', 'GET'])
def maplayer_attributes(request, layername):
# Return custom layer attribute labels/order in JSON format
layer = Layer.objects.get(alternate=layername)
return HttpResponse(
json.dumps(
layer.attribute_config()),
content_type="application/json")
def snapshot_config(snapshot, map_obj, request):
"""
Get the snapshot map configuration - look up WMS parameters (bunding box)
for local GeoNode layers
"""
# Match up the layer with it's source
def snapsource_lookup(source, sources):
for k, v in sources.iteritems():
if v.get("id") == source.get("id"):
return k
return None
# Set up the proper layer configuration
def snaplayer_config(layer, sources, request):
cfg = layer.layer_config()
src_cfg = layer.source_config()
source = snapsource_lookup(src_cfg, sources)
if source:
cfg["source"] = source
if src_cfg.get(
"ptype",
"gxp_wmscsource") == "gxp_wmscsource" or src_cfg.get(
"ptype",
"gxp_gnsource") == "gxp_gnsource":
cfg["buffer"] = 0
return cfg
decodedid = num_decode(snapshot)
snapshot = get_object_or_404(MapSnapshot, pk=decodedid)
if snapshot.map == map_obj.map:
config = json.loads(clean_config(snapshot.config))
layers = [_l for _l in config["map"]["layers"]]
sources = config["sources"]
maplayers = []
for ordering, layer in enumerate(layers):
maplayers.append(
layer_from_viewer_config(
map_obj.id,
MapLayer,
layer,
config["sources"][
layer["source"]],
ordering))
# map_obj.map.layer_set.from_viewer_config(
# map_obj, layer, config["sources"][layer["source"]], ordering))
config['map']['layers'] = [
snaplayer_config(
_l,
sources,
request) for _l in maplayers]
else:
config = map_obj.viewer_json(request)
return config
def get_suffix_if_custom(map):
if map.use_custom_template:
if map.featuredurl:
return map.featuredurl
elif map.urlsuffix:
return map.urlsuffix
else:
return None
else:
return None
def featured_map(request, site):
"""
The view that returns the map composer opened to
the map with the given official site url.
"""
map_obj = resolve_object(request,
Map,
{'featuredurl': site},
permission='base.view_resourcebase',
permission_msg=_PERMISSION_MSG_VIEW)
return map_view(request, str(map_obj.id))
def featured_map_info(request, site):
'''
main view for map resources, dispatches to correct
view based on method and query args.
'''
map_obj = resolve_object(request,
Map,
{'featuredurl': site},
permission='base.view_resourcebase',
permission_msg=_PERMISSION_MSG_VIEW)
return map_detail(request, str(map_obj.id))
def snapshot_create(request):
"""
Create a permalinked map
"""
conf = request.body
if isinstance(conf, basestring):
config = json.loads(conf)
snapshot = MapSnapshot.objects.create(
config=clean_config(conf),
map=Map.objects.get(
id=config['id']))
return HttpResponse(num_encode(snapshot.id), content_type="text/plain")
else:
return HttpResponse(
"Invalid JSON",
content_type="text/plain",
status=500)
def ajax_snapshot_history(request, mapid):
map_obj = _resolve_map(
request,
mapid,
'base.view_resourcebase',
_PERMISSION_MSG_VIEW)
history = [snapshot.json() for snapshot in map_obj.snapshots]
return HttpResponse(json.dumps(history), content_type="text/plain")
def ajax_url_lookup(request):
if request.method != 'POST':
return HttpResponse(
content='ajax user lookup requires HTTP POST',
status=405,
content_type='text/plain'
)
elif 'query' not in request.POST:
return HttpResponse(
content='use a field named "query" to specify a prefix to filter urls',
content_type='text/plain')
if request.POST['query'] != '':
maps = Map.objects.filter(urlsuffix__startswith=request.POST['query'])
if request.POST['mapid'] != '':
maps = maps.exclude(id=request.POST['mapid'])
json_dict = {
'urls': [({'url': m.urlsuffix}) for m in maps],
'count': maps.count(),
}
else:
json_dict = {
'urls': [],
'count': 0,
}
return HttpResponse(
content=json.dumps(json_dict),
content_type='text/plain'
)
@require_http_methods(["POST"])
def map_thumbnail(request, mapid):
map_obj = _resolve_map(request, mapid)
try:
image = None
try:
image = _prepare_thumbnail_body_from_opts(
request.body, request=request)
except BaseException:
image = _render_thumbnail(request.body)
if not image:
return HttpResponse(
content=_('couldn\'t generate thumbnail'),
status=500,
content_type='text/plain'
)
filename = "map-%s-thumb.png" % map_obj.uuid
map_obj.save_thumbnail(filename, image)
return HttpResponse(_('Thumbnail saved'))
except BaseException:
return HttpResponse(
content=_('error saving thumbnail'),
status=500,
content_type='text/plain'
)
def map_metadata_detail(
request,
mapid,
template='maps/map_metadata_detail.html'):
map_obj = _resolve_map(request, mapid, 'view_resourcebase')
group = None
if map_obj.group:
try:
group = GroupProfile.objects.get(slug=map_obj.group.name)
except GroupProfile.DoesNotExist:
group = None
site_url = settings.SITEURL.rstrip('/') if settings.SITEURL.startswith('http') else settings.SITEURL
return render(request, template, context={
"resource": map_obj,
"group": group,
'SITEURL': site_url
})
@login_required
def map_batch_metadata(request, ids):
return batch_modify(request, ids, 'Map')
|
mcldev/geonode
|
geonode/maps/views.py
|
Python
|
gpl-3.0
| 47,567
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
"""
Questo script è stato realizzato per l'estrazione dei premi messi in
palio dall'associazione PerugiaGNULug durante lo svolgimento
dell'evento Linux Day 2015 svoltosi a Magione.
L'elenco dei partecipanti all'estrazione comprende tutti i
partecipanti all'evento registrati in www.eventbrite.it.
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
__author__ = 'Marco Rufinelli'
__date__ = '24/10/2015'
__version__ = '0.3'
__licence__ = 'GPLv3'
__copyright__ = 'Copyright 2015, Associazione PerugiaGNULug'
from os import system
from argparse import ArgumentParser
from random import randint
from csv import reader
def _get_parameters():
"""
Funzione che recupera dalla riga di comando
i parametri utilizzati dallo script
"""
parser = ArgumentParser(description='Riffa dello GNULugPerugia')
parser.add_argument('filename',
help='Nomefile CSV')
parser.add_argument('--ticket_col',
type=int,
action='store',
default=1,
help='Colonna Biglietto nel file CVS')
parser.add_argument('--name_col',
type=int,
action='store',
default=13,
help='Colonna Nome nel file CVS')
parser.add_argument('--surname_col',
type=int,
action='store',
default=14,
help='Colonna Cognome nel file CVS')
parser.add_argument('--max_draws',
type=int,
action='store',
default=0,
help='Numero massimo di estrazioni')
return parser.parse_args()
def _show_header(show_best_wishes):
"""
Funzione che mostra l'intestazione della riffa
"""
system('clear')
print('\n' * 2)
print('L I N U X D A Y 2 0 1 5'.center(80))
print('\n' * 2)
print('E S T R A Z I O N E B I G L I E T T I'.center(80))
print('\n' * 3)
if show_best_wishes:
print('\n' * 8)
print('Buona fortuna! ' .rjust(80))
raw_input()
return
def _show_ticket(counter, ticket):
"""
Funzione che mostra il ticket
"""
print(' ' * 16 +
'{:2}° '.format(counter) +
'estratto: {0}'.format(ticket[0]) +
' - {0}'.format(ticket[1].title()))
return
def load_tickets(filename, number_col, name_col, surname_col):
"""
Funziona che carica in un array l'elenco dei
biglietti e dei corrispondenti nominativi
che partecipano alla riffa
"""
tickets=[]
file = open(filename, 'r')
csv_text = reader(file, delimiter=',', quotechar='"')
for row in csv_text:
if row[number_col].isdigit():
tickets.append((row[number_col],
"{0} {1}".format(row[name_col], row[surname_col])))
file.close()
return tickets
def draw(tickets):
"""
Funziona che esegue l'estrazione dei
biglietti e ne mostra il risultato
"""
if len(tickets) > 0:
ticket = tickets[randint(0, len(tickets) - 1)]
tickets.remove(ticket)
else:
raise IndexError('list parameter is empty')
return ticket
if __name__ == '__main__':
args = _get_parameters()
_show_header(True)
ticket_list = load_tickets(filename = args.filename,
number_col = args.ticket_col,
name_col = args.name_col,
surname_col = args.surname_col)
_show_header(False)
counter = 1
while(len(ticket_list) > 0
and (counter <= args.max_draws
or args.max_draws == 0)):
_show_ticket(counter, draw(ticket_list))
raw_input()
counter += 1
|
glugpg/riffa
|
riffa.py
|
Python
|
gpl-3.0
| 4,447
|
import yadtminion
import os
import sys
def load_yadt_defaults():
if os.path.isfile('/etc/yadt.services'):
sys.stderr.write(
"/etc/yadt.services is unsupported, please migrate to "
"/etc/yadt.conf.d : "
"https://github.com/yadt/yadtshell/wiki/Host-Configuration\n")
sys.exit(1)
else:
return load_yadt_defaults_newstyle()
def load_yadt_defaults_newstyle():
settings = yadtminion.yaml_merger.merge_yaml_files('/etc/yadt.conf.d/')
return settings.get('defaults', {})
|
yadt/yadt-minion
|
src/main/python/yadtminion/configuration.py
|
Python
|
gpl-3.0
| 543
|
# coding = "utf-8"
"""
通过命令查询火车票
Usage:
tickets <from> <to> <date>
Examples:
tickets beijing shanghai 2016-09-21
Options:
-h 显示帮助菜单
(-g 高铁
-d 动车
-t 特快
-k 快速
-z 直达)(未实现)
"""
import requests
from docopt import docopt
from stations import stations
from prettytable import PrettyTable
def addColor(color, text):
return text
# 用于 Unix 终端
table = {'red': '\033[91m', 'green': '\033[92m', 'nc': '\033[0m'} # 最后一个指没有颜色
cv = table.get(color)
nc = table.get('nc')
return ''.join([cv, text, nc]) # 设置颜色|文字|取消颜色
class TrColl(object):
"""解析数据"""
headers = '车次 车站 时间 时长 商务座 一等座 二等座 软卧 硬卧 硬座'.split(' ')
def __init__(self, rows):
self.rows = rows
def _getDur(self, rows):
"""获取车次运行时间 Duration"""
dur = rows.get('lishi').replace(':', 'h') + 'm'
if dur.startswith('00'):
dur = dur[4:]
elif dur.startswith('0'):
dur = dur[1:]
return dur
@property
def trains(self):
for row in self.rows:
train = [
str(row['station_train_code']) + '\n\n', # 车次
addColor('green', str(row['from_station_name'])) + '\n' + addColor('red', str(row['to_station_name'])), # 出发到达车站
addColor('green', str(row['start_time'])) + '\n' + addColor('red', str(row['arrive_time'])), # 出发到达时间
self._getDur(row), # 经历时长
row['swz_num'], # 商务座
row['zy_num'], # 一等座
row['ze_num'], # 二等座
row['rw_num'], # 软卧
row['yw_num'], # 硬卧
row['yz_num'] # 硬座
]
yield train
def prettyPrint(self):
"""使用 prettyprint 库美化显示,像 MySQL 数据库那样"""
pt = PrettyTable()
pt._set_field_names(self.headers) # 设置每一列的标题
for train in self.trains:
pt.add_row(train)
print(pt)
def cli():
"""命令行接口"""
args = docopt(__doc__)
print(args)
headers = {"User-Agent": "Mozilla/5.0 (Windows NT 10.0; WOW64; rv:47.0) Gecko/20100101 Firefox/47.0"}
date = args['<date>']
fromSt = stations.get(args['<from>'])
toSt = stations.get(args['<to>'])
url = 'https://kyfw.12306.cn/otn/lcxxcx/query?purpose_codes=ADULT&queryDate={0}&from_station={1}&to_station={2}'.format(date, fromSt, toSt)
r = requests.get(url, headers=headers, verify=False)
print('\n网址\t', r.url)
print('响应状态码\t', r.status_code, '\n')
rows = r.json()['data']['datas']
trains = TrColl(rows)
trains.prettyPrint()
if __name__ == "__main__":
cli()
|
Ginkgo-Biloba/Misc-Python
|
tickets/tickets.py
|
Python
|
gpl-3.0
| 2,550
|
import urllib2
url = "http://www.ask.com/web?q="
phrase = "butterfly"
w_url = url + phrase
data = urllib2.urlopen(w_url)
html = data.read()
print html
|
Duroktar/cookbook
|
Python/Web Request/request.py
|
Python
|
gpl-3.0
| 169
|
#!/usr/bin/env python
import sys
import os
from os.path import dirname, join
import subprocess
from datetime import date, datetime, timedelta
import calendar
import argparse
import math
import re
from input import load_from_csv
import output
import defaults
from daterange import get_date_range, in_range
from invoice import *
from settings import *
from project_settings import PROJECTS
parser = argparse.ArgumentParser(description='Get overview of your projects '
'and generate invoices')
parser.add_argument(metavar='filename', help='specify input timesheet',
dest='inputfile')
group = parser.add_mutually_exclusive_group()
date_group = group.add_argument_group('Date filter')
date_group.add_argument('-m', metavar='month', help='filter by month',
default=None, dest='month', type=int)
date_group.add_argument('-y', metavar='year', help='filter by year',
default=None, dest='year', type=int)
date_group.add_argument('-d', metavar='day', help='filter by day',
default=None, dest='day', type=int)
# TODO Today/Week invalid with day, month, year set
group.add_argument('-t', '--today', action='store_true',
help='show today')
group.add_argument('--week', action='store_true',
help='show current week')
group.add_argument('--month', action='store_true', dest='cmonth',
help='show current month')
parser.add_argument('-p', metavar='project', help='filter by project',
default=None, dest='project')
parser.add_argument('-i', '--invoice', action='store_true',
help='generate invoice splitted per project')
parser.add_argument('-r', '--round', action="store_true",
help='round hours')
parser.add_argument('-u', '--uncleared', action='store_true',
help='Show only uncleared hours')
parser.add_argument('-w', '--hourly_rate', type=float,
help='overwrite hourly wage rate')
parser.add_argument('-c', '--charts', action='store_true',
help='Render charts in browser')
parser.add_argument('-o', '--overtime', action='store_true',
help='Show overtime')
parser.add_argument('--since', help='show hours starting with date')
parser.add_argument('--until', help='show hours until date')
args = parser.parse_args()
hours = {} # projectname: hours
lst = []
if args.project:
project = args.project
else:
project = None
invoicedirectory = join(dirname(__file__), 'invoices')
pdfdir = join(invoicedirectory, 'pdf')
texdir = join(invoicedirectory, 'tex')
# TODO move invoicestuff to module
def get_invoicenumber(dte):
dt = datetime.strftime(dte, '%y%m')
c = sorted(filter(lambda f: re.match(dt+"\d\d.pdf", f),
os.listdir(pdfdir)))
if c:
inum = int(c[-1][4:-4])+1
else:
inum = 1
return '%s%s' % (dt, str(inum).rjust(2, '0'))
today = date.today()
def project_or_default_settings(project, item):
if project is None or not project.lower() in PROJECTS:
return getattr(defaults, item)
proj_settings = PROJECTS[project.lower()]
return proj_settings.get(item, getattr(defaults, item, None))
vals = ['address', 'name', 'recipient', 'greeting', 'closing', 'currency',
'vat', 'iban', 'bic']
settings = {v: project_or_default_settings(project, v) for v in vals}
settings.update(date=today, number=get_invoicenumber(today))
invoice = Invoice(**settings)
# load csv
try:
lst = load_from_csv(args.inputfile)
except ValueError as e:
print(e)
sys.exit(1)
if len(lst) == 0:
print('Nothing found in file')
sys.exit(1)
first_date = lst[0]['date']
date_range = get_date_range(first_date, **vars(args))
if date_range:
print('From %s to %s' % date_range)
rnd = args.round
uncleared = args.uncleared
rate = None
if args.hourly_rate:
rate = args.hourly_rate
charts = args.charts
for w in lst:
prj = w['project']
if prj == 'CLEARED':
if uncleared:
hours = {}
invoice.projects = []
continue
if (not date_range or in_range(w['date'], date_range)) and \
(not project or project.lower() == prj.lower()):
h = float(w['hours'])
if prj in hours:
h += hours[prj]
hours[prj] = h
if rnd:
for key, value in hours.items():
hours[key] = math.ceil(value)
for proj, h in hours.items():
p = Project(proj)
if rate is None:
c_rate = project_or_default_settings(proj, 'rate')
else:
c_rate = rate
p.add_fee(Fee('Development', c_rate, h))
invoice.add_project(p)
if charts:
for chart in output.charts(invoice):
chart.render_in_browser()
if args.invoice:
pdf = output.pdf(invoice)
subprocess.call(('xdg-open', pdf))
if args.overtime:
output.console(invoice, date_range=date_range)
else:
output.console(invoice)
|
kelvan/project_tracker
|
hours_per_project.py
|
Python
|
gpl-3.0
| 4,990
|
#
# This file is part of vaisalad
#
# vaisalad is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# vaisalad is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with vaisalad. If not, see <http://www.gnu.org/licenses/>.
"""roomalertd common code"""
from .config import Config
|
warwick-one-metre/vaisalad
|
warwick/observatory/vaisala/__init__.py
|
Python
|
gpl-3.0
| 716
|
# vim: ft=python fileencoding=utf-8 sts=4 sw=4 et:
# Copyright 2015-2020 Florian Bruhin (The Compiler) <mail@qutebrowser.org>
#
# This file is part of qutebrowser.
#
# qutebrowser is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# qutebrowser is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with qutebrowser. If not, see <http://www.gnu.org/licenses/>.
"""Web server for end2end tests.
This script gets called as a QProcess from end2end/conftest.py.
Some of the handlers here are inspired by the server project, but simplified
for qutebrowser's needs. Note that it probably doesn't handle e.g. multiple
parameters or headers with the same name properly.
"""
import sys
import json
import time
import signal
import os
import threading
from http import HTTPStatus
import cheroot.wsgi
import flask
app = flask.Flask(__name__)
_redirect_later_event = None
@app.route('/')
def root():
"""Show simple text."""
return flask.Response(b'qutebrowser test webserver, '
b'<a href="/user-agent">user agent</a>')
@app.route('/data/<path:path>')
@app.route('/data2/<path:path>') # for per-URL settings
def send_data(path):
"""Send a given data file to qutebrowser.
If a directory is requested, its index.html is sent.
"""
if hasattr(sys, 'frozen'):
basedir = os.path.realpath(os.path.dirname(sys.executable))
data_dir = os.path.join(basedir, 'end2end', 'data')
else:
basedir = os.path.join(os.path.realpath(os.path.dirname(__file__)),
'..')
data_dir = os.path.join(basedir, 'data')
print(basedir)
if os.path.isdir(os.path.join(data_dir, path)):
path += '/index.html'
return flask.send_from_directory(data_dir, path)
@app.route('/redirect-later')
def redirect_later():
"""302 redirect to / after the given delay.
If delay is -1, wait until a request on redirect-later-continue is done.
"""
global _redirect_later_event
delay = float(flask.request.args.get('delay', '1'))
if delay == -1:
_redirect_later_event = threading.Event()
ok = _redirect_later_event.wait(timeout=30 * 1000)
assert ok
_redirect_later_event = None
else:
time.sleep(delay)
x = flask.redirect('/')
return x
@app.route('/redirect-later-continue')
def redirect_later_continue():
"""Continue a redirect-later request."""
if _redirect_later_event is None:
return flask.Response(b'Timed out or no redirect pending.')
else:
_redirect_later_event.set()
return flask.Response(b'Continued redirect.')
@app.route('/redirect-self')
def redirect_self():
"""302 Redirects to itself."""
return app.make_response(flask.redirect(flask.url_for('redirect_self')))
@app.route('/redirect/<int:n>')
def redirect_n_times(n):
"""302 Redirects n times."""
assert n > 0
return flask.redirect(flask.url_for('redirect_n_times', n=n-1))
@app.route('/relative-redirect')
def relative_redirect():
"""302 Redirect once."""
response = app.make_response('')
response.status_code = HTTPStatus.FOUND
response.headers['Location'] = flask.url_for('root')
return response
@app.route('/absolute-redirect')
def absolute_redirect():
"""302 Redirect once."""
response = app.make_response('')
response.status_code = HTTPStatus.FOUND
response.headers['Location'] = flask.url_for('root', _external=True)
return response
@app.route('/redirect-to')
def redirect_to():
"""302/3XX Redirects to the given URL."""
# We need to build the response manually and convert to UTF-8 to prevent
# werkzeug from "fixing" the URL. This endpoint should set the Location
# header to the exact string supplied.
response = app.make_response('')
response.status_code = HTTPStatus.FOUND
response.headers['Location'] = flask.request.args['url'].encode('utf-8')
return response
@app.route('/content-size')
def content_size():
"""Send two bytes of data without a content-size."""
def generate_bytes():
yield b'*'
time.sleep(0.2)
yield b'*'
response = flask.Response(generate_bytes(), headers={
"Content-Type": "application/octet-stream",
})
response.status_code = HTTPStatus.OK
return response
@app.route('/twenty-mb')
def twenty_mb():
"""Send 20MB of data."""
def generate_bytes():
yield b'*' * 20 * 1024 * 1024
response = flask.Response(generate_bytes(), headers={
"Content-Type": "application/octet-stream",
"Content-Length": str(20 * 1024 * 1024),
})
response.status_code = HTTPStatus.OK
return response
@app.route('/500-inline')
def internal_error_attachment():
"""A 500 error with Content-Disposition: inline."""
response = flask.Response(b"", headers={
"Content-Type": "application/octet-stream",
"Content-Disposition": 'inline; filename="attachment.jpg"',
})
response.status_code = HTTPStatus.INTERNAL_SERVER_ERROR
return response
@app.route('/500')
def internal_error():
"""A normal 500 error."""
r = flask.make_response()
r.status_code = HTTPStatus.INTERNAL_SERVER_ERROR
return r
@app.route('/cookies')
def view_cookies():
"""Show cookies."""
return flask.jsonify(cookies=flask.request.cookies)
@app.route('/cookies/set')
def set_cookies():
"""Set cookie(s) as provided by the query string."""
r = app.make_response(flask.redirect(flask.url_for('view_cookies')))
for key, value in flask.request.args.items():
r.set_cookie(key=key, value=value)
return r
@app.route('/basic-auth/<user>/<passwd>')
def basic_auth(user='user', passwd='passwd'):
"""Prompt the user for authorization using HTTP Basic Auth."""
auth = flask.request.authorization
if not auth or auth.username != user or auth.password != passwd:
r = flask.make_response()
r.status_code = HTTPStatus.UNAUTHORIZED
r.headers = {'WWW-Authenticate': 'Basic realm="Fake Realm"'}
return r
return flask.jsonify(authenticated=True, user=user)
@app.route('/drip')
def drip():
"""Drip data over a duration."""
duration = float(flask.request.args.get('duration'))
numbytes = int(flask.request.args.get('numbytes'))
pause = duration / numbytes
def generate_bytes():
for _ in range(numbytes):
yield "*".encode('utf-8')
time.sleep(pause)
response = flask.Response(generate_bytes(), headers={
"Content-Type": "application/octet-stream",
"Content-Length": str(numbytes),
})
response.status_code = HTTPStatus.OK
return response
@app.route('/404')
def status_404():
r = flask.make_response()
r.status_code = HTTPStatus.NOT_FOUND
return r
@app.route('/headers')
def view_headers():
"""Return HTTP headers."""
return flask.jsonify(headers=dict(flask.request.headers))
@app.route('/response-headers')
def response_headers():
"""Return a set of response headers from the query string."""
headers = flask.request.args
response = flask.jsonify(headers)
response.headers.extend(headers)
response = flask.jsonify(dict(response.headers))
response.headers.extend(headers)
return response
@app.route('/query')
def query():
return flask.jsonify(flask.request.args)
@app.route('/user-agent')
def view_user_agent():
"""Return User-Agent."""
return flask.jsonify({'user-agent': flask.request.headers['user-agent']})
@app.route('/favicon.ico')
def favicon():
basedir = os.path.join(os.path.realpath(os.path.dirname(__file__)),
'..', '..', '..')
return flask.send_from_directory(os.path.join(basedir, 'icons'),
'qutebrowser.ico',
mimetype='image/vnd.microsoft.icon')
@app.after_request
def log_request(response):
"""Log a webserver request."""
request = flask.request
data = {
'verb': request.method,
'path': request.full_path if request.query_string else request.path,
'status': response.status_code,
}
print(json.dumps(data), file=sys.stderr, flush=True)
return response
class WSGIServer(cheroot.wsgi.Server):
"""A custom WSGIServer that prints a line on stderr when it's ready.
Attributes:
_ready: Internal state for the 'ready' property.
_printed_ready: Whether the initial ready message was printed.
"""
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self._ready = False
self._printed_ready = False
@property
def ready(self):
return self._ready
@ready.setter
def ready(self, value):
if value and not self._printed_ready:
print(' * Running on http://127.0.0.1:{}/ (Press CTRL+C to quit)'
.format(self.bind_addr[1]), file=sys.stderr, flush=True)
self._printed_ready = True
self._ready = value
def main():
if hasattr(sys, 'frozen'):
basedir = os.path.realpath(os.path.dirname(sys.executable))
app.template_folder = os.path.join(basedir, 'end2end', 'templates')
port = int(sys.argv[1])
server = WSGIServer(('127.0.0.1', port), app)
signal.signal(signal.SIGTERM, lambda *args: server.stop())
try:
server.start()
except KeyboardInterrupt:
server.stop()
if __name__ == '__main__':
main()
|
The-Compiler/qutebrowser
|
tests/end2end/fixtures/webserver_sub.py
|
Python
|
gpl-3.0
| 9,922
|
import requests
import os
import re
class ClAd :
def __init__(self , desc="" , link="" ) :
self.desc=desc
self.link=link
def __lt__(self,x ) :
return self.desc < x.desc
class ClPage :
def __init__(self , adVec , nextPage , title ) :
self.adVec = adVec
self.nextPage = nextPage
self.title=title
def NextPage( text ) :
for m in re.finditer( '<a href=.*? class=\"button next\" .*?>.*?<\/a>' , text ) :
nstr = text[m.start():m.end()]
nstr= nstr[nstr.find('href=')+len('href='):]
return nstr[:nstr.find(' ')]
def Title( text ) :
tstr = '<title>'
tend = '</title>'
p0 = text.find(tstr)+len(tstr)
p1= text.find(tend)
return text[p0:p1]
def ParseAds( text , sort=True , debug=False ) :
title=Title(text)
nextp=NextPage(text)
print title,nextp
quit()
contentBegin = text.find('<div class="content">')
contentEnd = text.find( '<section class="blurbs">' )
footerBegin = text.find( '<footer>' )
cstr = text[contentBegin:contentEnd]
off = len('<a href=\"')
adVec=[]
for m in re.finditer( '<a href=\".*?\">.*?<\/a>' , cstr ) :
b=m.start()
e=m.end()
pstr = cstr[b+off:e]
l0 = pstr.find('"' )
p0=pstr.find('>')
p1=pstr.find('<')
if debug :
print '------------------'
print pstr
print l0,pstr[l0:l0+5]
print p0,pstr[p0:p0+5]
print p1,pstr[p1:p1+5]
if p1 - p0 > 1 :
descr = pstr[p0+1:p1]
link = pstr[:l0]
ad=ClAd(descr,link)
adVec.append(ad)
if debug :
print 'appending',link,descr
continue
if sort == False :
adVec = sorted( adVec )
return ClPage( adVec , nextp , title )
def GetWebPage( link , file=None ) :
req = requests.get( link )
if file is not None :
handle = open( file , 'w' )
handle.write( req.text )
return req
|
jrrpanix/master
|
examples/python/web/webCraigslistExample.py
|
Python
|
gpl-3.0
| 2,037
|
import sys
if len(sys.argv) != 2:
print("Usage: python3 " + sys.argv[0] + " [freq_file]")
exit(1)
with open(sys.argv[1]) as f:
correct_string = f.readline()
correct_binary = ""
received_string = ""
received_binary = ""
for line in f:
freqs = line.strip().split(" ")
freqs = list(map(int, freqs))
tmp = ""
for f in range(195, 203):
if f in freqs:
tmp += "1"
else:
tmp += "0"
received_binary += tmp[::-1] + " "
received_string += chr(int(tmp[::-1], 2)) + " "
tmp = ""
for f in range(203, 211):
if f in freqs:
tmp += "1"
else:
tmp += "0"
received_binary += tmp[::-1] + " "
received_string += chr(int(tmp[::-1], 2)) + " "
for c in correct_string.strip():
correct_binary += bin(ord(c))[2:].zfill(8) + " "
sys.stdout.write(c + " ")
sys.stdout.write("\n")
print(correct_binary)
print(received_binary)
print(received_string)
|
Sound-drop/SoundDrop
|
test.py
|
Python
|
gpl-3.0
| 911
|
# Copyright (C) 2014 Vangelis Tasoulas <vangelis@tasoulas.net>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import os
import sys
import logging
import ConfigParser
import argparse
import traceback
import datetime
from libs import globalvars
import helperfuncs
LOG = logging.getLogger('default.' + __name__)
LOG_CONSOLE = logging.getLogger('console.' + __name__)
_quiet = False
def parse_all_conf():
# Parse the command line options
options = _command_Line_Options()
# Configure logging
_set_logging(options)
# Read configuration from file
_read_config_file(options)
# Validate user input and perform any necessary actions
# to the given command line options
# If any command line options need to override certain configuration
# read from the conf file, put the code in this function.
_validate_command_Line_Options(options)
return options
def _command_Line_Options():
"""
Define the accepted command line arguments in this function
Read the documentation of argparse for more advanced command line
argument parsing examples
http://docs.python.org/2/library/argparse.html
"""
########################################
#### Add user defined options here #####
########################################
parser = argparse.ArgumentParser(description=globalvars.PROGRAM_NAME + " version " + globalvars.VERSION)
parser.add_argument("-v", "--version",
action="version", default=argparse.SUPPRESS,
version=globalvars.VERSION,
help="show program's version number and exit")
parser.add_argument("-a", "--append-file",
action="store_true",
default=False,
dest="append_file",
help="If the FILE exists, append to the end of the file. Otherwise, create a new one with an extended filename.")
parser.add_argument("-b", "--output-file",
action="store",
dest="output_file",
metavar="FILE",
help="Filename to save the collected data file")
parser.add_argument("-c", "--delimiter",
action="store",
dest="delimiter",
metavar="CHAR",
help="CHAR is a single character to be used for field separation in the output FILE")
parser.add_argument("-d", "--list-available-plugins",
action="store_true",
default=False,
dest="list_available_plugins",
help="Prints a list of the available plugins and exit")
parser.add_argument("-e", "--list-active-plugins",
action="store_true",
default=False,
dest="list_active_plugins",
help="Prints a list of the active plugins located under the chosen active-directory and exit")
parser.add_argument("-f", "--active-plugins-dir",
action="store",
dest="active_plugins_dir",
metavar="DIR_ACTIVE",
help="Define the active-directory to load plugins from for this experiment.")
parser.add_argument("-g", "--custom-plugins-dir",
action="store",
dest="custom_plugins_dir",
metavar="DIR_PLUGINS",
help="Define a directory containing more plugins to be loaded.")
# Don't use -h
# it is used by --help
parser.add_argument("-i", "--interval-between-sampling",
action="store",
type=float,
dest="intervalBetweenSamples",
metavar="FLOAT",
help="A FLOAT number which is the sleeping time given in seconds between sampling. If the value is 0 or negative, instant sampling will be initiated after each previous one")
parser.add_argument("-j", "--only-print-samples",
action="store_true",
dest="only_print_samples",
help="Enabling this flag, will disable saving samples in a file. They will only be printed instead.")
parser.add_argument("-k", "--test-plugin",
action="store",
dest="test_plugin_id_name",
metavar="PLUGIN_IDENTIFIER_NAME",
help="Use this option for debugging newly created plugins. Get the plugin identified name by using the '--list-available-plugins' option")
########################################
#### End user defined options here #####
########################################
parser.add_argument("-C", "--conf-file",
action="store",
default=globalvars.conf_file,
dest="conffile",
metavar="CONF_FILE",
help="CONF_FILE where the configuration will be read from (Default: will search for file '" +
globalvars.DEFAULT_CONFIG_FILENAME +
"' in the known predefined locations")
parser.add_argument("-D", "--daemon",
action="store_true",
default=globalvars.daemonMode,
dest="isDaemon",
help="run in daemon mode")
### Add logging options in a different options group
loggingGroupOpts = parser.add_argument_group('Logging Options', 'List of optional logging options')
loggingGroupOpts.add_argument("-Q", "--quiet",
action="store_true",
default=_quiet,
dest="isQuiet",
help="Disable logging in the console but still keep logs in a file. This options is forced when run in daemon mode.")
loggingGroupOpts.add_argument("-L", "--loglevel",
action="store",
default="NOTSET",
dest="loglevel",
metavar="LOG_LEVEL",
help="LOG_LEVEL might be set to: CRITICAL, ERROR, WARNING, INFO, DEBUG. (Default: INFO)")
loggingGroupOpts.add_argument("-F", "--logfile",
action="store",
default=globalvars.log_file,
dest="logfile",
metavar="LOG_FILE",
help="LOG_FILE where the logs will be stored. If the file exists, text will be appended," +
" otherwise the file will be created (Default: " + globalvars.log_file + ")")
return parser.parse_args();
def replaceVariablesInConfStrings(string):
"""
Replace variables in configuration strings
Acceptable conf variables are:
%{ts} Replaced with the current timestamp
%{datetime} Replaced wit Current date and time in
this format YYYYmmDD_HHMMSS
"""
current_time = datetime.datetime.now()
if(string.find('%{ts}') != -1):
replacement = current_time.strftime('%s')
return string.replace('%{ts}', replacement)
elif(string.find('%{datetime}') != -1):
replacement = current_time.strftime('%Y%m%d_%H%M%S')
return string.replace('%{datetime}', replacement)
return string
def _validate_command_Line_Options(opts):
"""
Validate the passed arguments if needed
"""
# Deamon, Quiet and Only Print Samples options are validated in _set_logging
globalvars.list_available_plugins = opts.list_available_plugins
globalvars.list_active_plugins = opts.list_active_plugins
globalvars.append_file = opts.append_file
if(globalvars.only_print_samples and globalvars.append_file):
print("ERROR: You cannot combine '--only-print-samples' and '--append-file' switches. Please choose only one of them.")
exit(globalvars.exitCode.INCORRECT_USAGE)
elif(globalvars.only_print_samples and opts.output_file):
print("ERROR: You cannot combine '--only-print-samples' and '--output-file' switches. Please choose only one of them.")
exit(globalvars.exitCode.INCORRECT_USAGE)
if(opts.test_plugin_id_name):
globalvars.test_plugin = opts.test_plugin_id_name
if(opts.output_file):
globalvars.output_file = replaceVariablesInConfStrings(opts.output_file)
if(opts.delimiter):
escaped_string = opts.delimiter.decode('string-escape')
if(len(escaped_string) > 1):
LOG.error("Delimiter must be a single character.")
exit(globalvars.exitCode.INCORRECT_USAGE)
globalvars.delimiter = escaped_string
if(opts.active_plugins_dir):
globalvars.active_plugins_dir = opts.active_plugins_dir
if(opts.custom_plugins_dir):
globalvars.plugin_directories.insert(0, opts.custom_plugins_dir)
if(helperfuncs.is_number(opts.intervalBetweenSamples)):
if(opts.intervalBetweenSamples > 0):
globalvars.intervalBetweenSamples = opts.intervalBetweenSamples
else:
globalvars.intervalBetweenSamples = 0
def _set_logging(opts):
global _quiet
globalvars.daemonMode = opts.isDaemon
globalvars.only_print_samples = opts.only_print_samples
_quiet = True if globalvars.daemonMode else opts.isQuiet
if(globalvars.only_print_samples and globalvars.daemonMode):
print("ERROR: You cannot combine '--only-print-samples' and '--daemon' switches. Please choose only one of them.")
exit(globalvars.exitCode.INCORRECT_USAGE)
elif(globalvars.only_print_samples and _quiet):
print("ERROR: You cannot combine '--only-print-samples' and '--quiet' switches. Please choose only one of them.")
exit(globalvars.exitCode.INCORRECT_USAGE)
_set_log_file(opts.logfile)
_set_log_level(opts.loglevel)
def _set_log_level(loglevel):
# Get the numeric loglevel provided by user (if provided)
numeric_log_level = getattr(logging, loglevel.upper(), None)
# Validate if the loglevel provided is correct (accepted)
try:
if not isinstance(numeric_log_level, int):
raise ValueError()
except ValueError:
LOG.error('Invalid log level: %s' % loglevel)
LOG.info('\tLog level must be set to one of the following:')
LOG.info('\t CRITICAL <- Least verbose')
LOG.info('\t ERROR')
LOG.info('\t WARNING')
LOG.info('\t INFO')
LOG.info('\t DEBUG <- Most verbose')
exit(globalvars.exitCode.INCORRECT_USAGE)
if(numeric_log_level != logging.NOTSET):
# If logging is set from the command line
# define the logging policy
globalvars.FileLogLevel = numeric_log_level
_configureLogging()
if(globalvars.FileLogLevel == logging.DEBUG):
LOG.info("Debug level logging is enabled: Very Verbose")
def _set_log_file(logfile):
# Get the absolute path
globalvars.log_file = os.path.abspath(logfile)
# If the path exists and it is NOT a file, exit with an error message
if(os.path.exists(globalvars.log_file) and not os.path.isfile(globalvars.log_file)):
# Using print here because the LOG will try to
# write to a file which is not yet writable
print("ERROR: " + globalvars.log_file + " exists but it is not a file.")
exit(globalvars.exitCode.INCORRECT_USAGE)
_configureLogging()
def _read_config_file(opts):
"""
This function contains code to read from a configuration file
"""
conffile = opts.conffile
if(conffile != ""):
# Get the absolute path
globalvars.conf_file = os.path.abspath(conffile)
# If the path exists and it is NOT a file, exit with an error message
if(os.path.exists(globalvars.conf_file) and not os.path.isfile(globalvars.conf_file)):
LOG.error(globalvars.conf_file + " exists but it is not a file.")
exit(globalvars.exitCode.INCORRECT_USAGE)
else:
for confpath in globalvars.CONFIG_FILE_LOCATIONS:
globalvars.conf_file = "{0}/{1}".format(confpath,globalvars.DEFAULT_CONFIG_FILENAME)
if(os.path.isfile(globalvars.conf_file)):
break
else:
globalvars.conf_file = ""
LOG.debug("No configuration file found in: " + globalvars.conf_file)
# If globalvars.conf_file var is still "" in this point, no configuration file is defined
if(globalvars.conf_file == ""):
LOG.debug("No configuration files found in the known paths")
return
try:
with open(globalvars.conf_file):
LOG.debug("Reading configuration from file " + globalvars.conf_file)
config = ConfigParser.ConfigParser()
config.read(globalvars.conf_file)
##################################################################################
##################################################################################
##################################################################################
##################################################################################
############### Add your code to read the configuration file here ################
##################################################################################
##################################################################################
##################################################################################
##################################################################################
CurrentSection = "Default"
if(config.has_section(CurrentSection)):
if(config.has_option(CurrentSection, "output_file")):
globalvars.output_file = replaceVariablesInConfStrings(config.get(CurrentSection, "output_file"))
LOG.debug("output_file = " + globalvars.output_file)
if(config.has_option(CurrentSection, "append_file")):
globalvars.append_file = config.getboolean(CurrentSection, "append_file")
LOG.debug("append_file = " + str(globalvars.append_file))
if(config.has_option(CurrentSection, "delimiter")):
globalvars.delimiter = config.get(CurrentSection, "delimiter").decode('string-escape')
if(len(globalvars.delimiter) > 1):
LOG.error("Delimiter must be a single character. Make sure that you do not enclose the character in quotes.")
exit(globalvars.exitCode.INCORRECT_USAGE)
LOG.debug("delimiter = " + str(globalvars.delimiter))
if(config.has_option(CurrentSection, "active_plugins_dir")):
globalvars.active_plugins_dir = config.get(CurrentSection, "active_plugins_dir")
LOG.debug("active_plugins_dir = " + globalvars.active_plugins_dir)
if(config.has_option(CurrentSection, "custom_plugins_dir")):
globalvars.custom_plugins_dir = config.get(CurrentSection, "custom_plugins_dir")
LOG.debug("custom_plugins_dir = " + globalvars.custom_plugins_dir)
if(config.has_option(CurrentSection, "intervalBetweenSamples")):
globalvars.intervalBetweenSamples = config.getfloat(CurrentSection, "intervalBetweenSamples")
LOG.debug("intervalBetweenSamples = " + str(globalvars.intervalBetweenSamples))
##################################################################################
##################################################################################
##################################################################################
##################################################################################
############### Until here ################
##################################################################################
##################################################################################
##################################################################################
##################################################################################
LOG.debug("Finished reading configuration from file " + globalvars.conf_file)
return
except:
LOG.error("\n" + traceback.format_exc())
exit(globalvars.exitCode.FAILURE)
def _configureLogging():
# Configure a defaultLogger
defaultLogger = logging.getLogger('default')
onlyConsoleLogger = logging.getLogger('console')
# Define the format of file log output
logFileFormatter = DefaultLoggingFormatter("%(asctime)s, [%(levelname)8s], [%(module)18s:%(lineno)-5d] %(message)s", "%Y-%m-%d %H:%M:%S.%f, %s%f")
# Define the format of console log output
logConsoleFormatter = VisualFormatter()
# Set default logging level
defaultLogger.setLevel(logging.DEBUG)
onlyConsoleLogger.setLevel(logging.INFO)
# Enable logging in a file
defaultFileHandler = logging.FileHandler(globalvars.log_file)
defaultFileHandler.setLevel(globalvars.FileLogLevel)
defaultFileHandler.setFormatter(logFileFormatter)
# Enable logging to the console
defaultConsoleHandler = logging.StreamHandler()
defaultConsoleHandler.setLevel(globalvars.CONSOLE_LOG_LEVEL)
defaultConsoleHandler.setFormatter(logConsoleFormatter)
onlyConsoleHandler = logging.StreamHandler()
onlyConsoleHandler.setLevel(globalvars.CONSOLE_LOG_LEVEL)
onlyConsoleHandler.setFormatter(logConsoleFormatter)
# Remove existing handlers if present
defaultLogger.handlers = []
onlyConsoleLogger.handlers = []
if(globalvars.only_print_samples):
# If only_print_samples, print only ERRORS or CRITICAL messages
defaultConsoleHandler.setLevel(40)
# If quiet, set the level very high to suppress all
# messages in the console handlers.
if(_quiet):
defaultConsoleHandler.setLevel(1000)
onlyConsoleHandler.setLevel(1000)
# Add the handlers to the loggers
defaultLogger.addHandler(defaultFileHandler)
defaultLogger.addHandler(defaultConsoleHandler)
onlyConsoleLogger.addHandler(onlyConsoleHandler)
class DefaultLoggingFormatter(logging.Formatter):
"""
The logging.Formatter does not accept %f argument
which returns microseconds because it is using
struct_time.
This class, uses datetime instead, to provide microsecond
precision in logging time.
"""
converter=datetime.datetime.fromtimestamp
def formatTime(self, record, datefmt=None):
ct = self.converter(record.created)
if datefmt:
s = ct.strftime(datefmt)
else:
t = ct.strftime("%Y-%m-%d %H:%M:%S")
s = "%s,%03d" % (t, record.msecs)
return s
class VisualFormatter(DefaultLoggingFormatter):
"""
This visual formatter allows the user to
Define different formats and date formats
for different Log Levels
fmt sets the global format
datefmt sets the global date format
xxx_fmt sets the format for each xxx level
xxx_datefmt set the date format for each xxx level
"""
def __init__(self, fmt=None, datefmt=None,
dbg_fmt=None, dbg_datefmt=None,
info_fmt=None, info_datefmt=None,
warn_fmt=None, warn_datefmt=None,
err_fmt=None, err_datefmt=None,
crit_fmt=None, crit_datefmt=None):
# If fmt is set, instantiate the format
# for each level to this of fmt
# Otherwise set the default values
if(fmt is not None):
self._dbg_fmt = fmt
self._info_fmt = fmt
self._warn_fmt = fmt
self._err_fmt = fmt
self._crit_fmt = fmt
else:
self._dbg_fmt = "[{0}] {1}".format("%(levelname)8s", "%(message)s")
self._info_fmt = "%(message)s"
self._warn_fmt = self._dbg_fmt
self._err_fmt = self._dbg_fmt
self._crit_fmt = self._dbg_fmt
# If each individual format has been set
# then choose this one for each specific level
if(dbg_fmt):
self._dbg_fmt = dbg_fmt
if(info_fmt):
self._info_fmt = info_fmt
if(warn_fmt):
self._warn_fmt = warn_fmt
if(err_fmt):
self._err_fmt = err_fmt
if(crit_fmt):
self._crit_fmt = crit_fmt
# instantiate the date format for each level
# to this of datefmt
self._dbg_datefmt = datefmt
self._info_datefmt = datefmt
self._warn_datefmt = datefmt
self._err_datefmt = datefmt
self._crit_datefmt = datefmt
# If each individual date format has been set
# then choose this one for each specific level
if(dbg_datefmt):
self._dbg_datefmt = dbg_datefmt
if(info_datefmt):
self._info_datefmt = info_datefmt
if(warn_datefmt):
self._warn_datefmt = warn_datefmt
if(err_datefmt):
self._err_datefmt = err_datefmt
if(crit_datefmt):
self._crit_datefmt = crit_datefmt
def format(self, record):
# Replace the original format with one customized by logging level
if record.levelno == logging.DEBUG:
self.datefmt = self._dbg_datefmt
self._fmt = self._dbg_fmt
elif record.levelno == logging.INFO:
self.datefmt = self._info_datefmt
self._fmt = self._info_fmt
elif record.levelno == logging.WARNING:
self.datefmt = self._warn_datefmt
self._fmt = self._warn_fmt
elif record.levelno == logging.ERROR:
self.datefmt = self._err_datefmt
self._fmt = self._err_fmt
elif record.levelno == logging.CRITICAL:
self.datefmt = self._crit_datefmt
self._fmt = self._crit_fmt
# Call the original formatter class to do the grunt work
result = logging.Formatter.format(self, record)
return result
|
cyberang3l/sysdata-collector
|
libs/parseoptions.py
|
Python
|
gpl-3.0
| 23,353
|
# Copyright 2012 VPAC
#
# This file is part of django-placard.
#
# django-placard is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# django-placard is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with django-placard If not, see <http://www.gnu.org/licenses/>.
from tldap.schemas import rfc
import tldap.methods as base
import tldap.methods.common as common
import tldap.methods.pwdpolicy as pwdpolicy
import tldap.manager
#######
# rfc #
#######
class rfc_account(base.baseMixin):
schema_list = [
rfc.person, rfc.organizationalPerson, rfc.inetOrgPerson, rfc.pwdPolicy,
rfc.posixAccount, rfc.shadowAccount]
mixin_list = [common.personMixin, pwdpolicy.pwdPolicyMixin, common.accountMixin, common.shadowMixin]
class Meta:
base_dn_setting = "LDAP_ACCOUNT_BASE"
object_classes = set(['top'])
search_classes = set(['posixAccount'])
pk = 'uid'
managed_by = tldap.manager.ManyToOneDescriptor(this_key='manager', linked_cls='placard.test.schemas.rfc_account', linked_key='dn')
manager_of = tldap.manager.OneToManyDescriptor(this_key='dn', linked_cls='placard.test.schemas.rfc_account', linked_key='manager')
unixHomeDirectory = tldap.manager.AliasDescriptor("homeDirectory")
class rfc_group(base.baseMixin):
schema_list = [rfc.posixGroup]
mixin_list = [common.groupMixin]
class Meta:
base_dn_setting = "LDAP_GROUP_BASE"
object_classes = set(['top'])
search_classes = set(['posixGroup'])
pk = 'cn'
# accounts
primary_accounts = tldap.manager.OneToManyDescriptor(this_key='gidNumber', linked_cls=rfc_account, linked_key='gidNumber', related_name="primary_group")
secondary_accounts = tldap.manager.ManyToManyDescriptor(this_key='memberUid', linked_cls=rfc_account, linked_key='uid', linked_is_p=False, related_name="secondary_groups")
|
VPAC/django-placard
|
placard/test/schemas.py
|
Python
|
gpl-3.0
| 2,306
|
# -*- coding: utf-8 -*-
#!/usr/bin/env python
# =============================================================================
#
# Dialog implementation generated from a XDL file.
#
# Created: Sat Jul 9 15:06:44 2016
# by: unodit 0.5
#
# WARNING! All changes made in this file will be overwritten
# if the file is generated again!
#
# =============================================================================
import uno
import unohelper
from com.sun.star.awt import XActionListener
from com.sun.star.task import XJobExecutor
class Test_convert_UI(unohelper.Base, XActionListener, XJobExecutor):
"""
Class documentation...
"""
def __init__(self):
self.LocalContext = uno.getComponentContext()
self.ServiceManager = self.LocalContext.ServiceManager
self.Toolkit = self.ServiceManager.createInstanceWithContext("com.sun.star.awt.ExtToolkit", self.LocalContext)
# -----------------------------------------------------------
# Create dialog and insert controls
# -----------------------------------------------------------
# --------------create dialog container and set model and properties
self.DialogContainer = self.ServiceManager.createInstanceWithContext("com.sun.star.awt.UnoControlDialog", self.LocalContext)
self.DialogModel = self.ServiceManager.createInstance("com.sun.star.awt.UnoControlDialogModel")
self.DialogContainer.setModel(self.DialogModel)
self.DialogModel.Closeable = True
self.DialogModel.Moveable = True
self.DialogModel.Name = "Default"
self.DialogModel.Height = 220
self.DialogModel.PositionX = "60"
self.DialogModel.Width = 300
self.DialogModel.PositionY = "60"
# --------- create an instance of TimeField control, set properties ---
self.TimeField1 = self.DialogModel.createInstance("com.sun.star.awt.UnoControlTimeFieldModel")
self.TimeField1.TimeMax = uno.createUnoStruct("com.sun.star.util.Time", Hours = 22, Minutes = 59, Seconds = 0, NanoSeconds = 0, IsUTC = True)
self.TimeField1.PositionX = "160"
self.TimeField1.TimeMin = uno.createUnoStruct("com.sun.star.util.Time", Hours = 10, Minutes = 0, Seconds = 0, NanoSeconds = 0, IsUTC = True)
self.TimeField1.TabIndex = 13
self.TimeField1.Name = "TimeField1"
self.TimeField1.Time = uno.createUnoStruct("com.sun.star.util.Time", Hours = 14, Minutes = 5, Seconds = 3, NanoSeconds = 0, IsUTC = True)
self.TimeField1.Text = "Set Time"
self.TimeField1.PositionY = "52"
self.TimeField1.Width = 60
self.TimeField1.Height = 20
# inserts the control model into the dialog model
self.DialogModel.insertByName("TimeField1", self.TimeField1)
# --------- create an instance of FixedText control, set properties ---
self.Label7 = self.DialogModel.createInstance("com.sun.star.awt.UnoControlFixedTextModel")
self.Label7.PositionX = "158"
self.Label7.Label = "PatternField"
self.Label7.TabIndex = 28
self.Label7.Name = "Label7"
self.Label7.PositionY = "185"
self.Label7.Width = 60
self.Label7.Height = 10
# inserts the control model into the dialog model
self.DialogModel.insertByName("Label7", self.Label7)
# --------- create an instance of FixedText control, set properties ---
self.Label1 = self.DialogModel.createInstance("com.sun.star.awt.UnoControlFixedTextModel")
self.Label1.PositionX = "83"
self.Label1.Label = "New"
self.Label1.TabIndex = 7
self.Label1.Name = "Label1"
self.Label1.PositionY = "8"
self.Label1.Width = 20
self.Label1.Height = 20
# inserts the control model into the dialog model
self.DialogModel.insertByName("Label1", self.Label1)
# --------- create an instance of FixedLine control, set properties ---
self.FixedLine2 = self.DialogModel.createInstance("com.sun.star.awt.UnoControlFixedLineModel")
self.FixedLine2.Orientation = 1
self.FixedLine2.TabIndex = 20
self.FixedLine2.Name = "FixedLine2"
self.FixedLine2.Height = 210
self.FixedLine2.PositionX = "150"
self.FixedLine2.Width = 4
self.FixedLine2.PositionY = "5"
# inserts the control model into the dialog model
self.DialogModel.insertByName("FixedLine2", self.FixedLine2)
# --------- create an instance of ListBox control, set properties ---
self.ListBox1 = self.DialogModel.createInstance("com.sun.star.awt.UnoControlListBoxModel")
self.ListBox1.PositionX = "83"
self.ListBox1.Align = 1
self.ListBox1.TabIndex = 9
self.ListBox1.MultiSelection = True
self.ListBox1.StringItemList = ('one', 'two')
self.ListBox1.Name = "ListBox1"
self.ListBox1.PositionY = "45"
self.ListBox1.Width = 60
self.ListBox1.Height = 82
# inserts the control model into the dialog model
self.DialogModel.insertByName("ListBox1", self.ListBox1)
# --------- create an instance of Button control, set properties ---
self.CommandButton3 = self.DialogModel.createInstance("com.sun.star.awt.UnoControlButtonModel")
self.CommandButton3.PositionX = "43"
self.CommandButton3.Label = "CommandButton3"
self.CommandButton3.TabIndex = 2
self.CommandButton3.EnableVisible = False
self.CommandButton3.Name = "CommandButton3"
self.CommandButton3.PositionY = "33"
self.CommandButton3.Width = 26
self.CommandButton3.Height = 20
# inserts the control model into the dialog model
self.DialogModel.insertByName("CommandButton3", self.CommandButton3)
# add the action listener
self.DialogContainer.getControl('CommandButton3').addActionListener(self)
self.DialogContainer.getControl('CommandButton3').setActionCommand('CommandButton3_OnClick')
# --------- create an instance of FixedText control, set properties ---
self.Label13 = self.DialogModel.createInstance("com.sun.star.awt.UnoControlFixedTextModel")
self.Label13.PositionX = "235"
self.Label13.Label = "SpinButton"
self.Label13.TabIndex = 34
self.Label13.Name = "Label13"
self.Label13.PositionY = "156"
self.Label13.Width = 60
self.Label13.Height = 10
# inserts the control model into the dialog model
self.DialogModel.insertByName("Label13", self.Label13)
# --------- create an instance of FixedText control, set properties ---
self.Label6 = self.DialogModel.createInstance("com.sun.star.awt.UnoControlFixedTextModel")
self.Label6.PositionX = "158"
self.Label6.Label = "FormattedField"
self.Label6.TabIndex = 27
self.Label6.Name = "Label6"
self.Label6.PositionY = "150"
self.Label6.Width = 60
self.Label6.Height = 10
# inserts the control model into the dialog model
self.DialogModel.insertByName("Label6", self.Label6)
# --------- create an instance of Edit control, set properties ---
self.TextField1 = self.DialogModel.createInstance("com.sun.star.awt.UnoControlEditModel")
self.TextField1.PositionX = "103"
self.TextField1.TabIndex = 8
self.TextField1.Name = "TextField1"
self.TextField1.Text = "New Text"
self.TextField1.PositionY = "8"
self.TextField1.Width = 40
self.TextField1.Height = 20
# inserts the control model into the dialog model
self.DialogModel.insertByName("TextField1", self.TextField1)
# --------- create an instance of FixedText control, set properties ---
self.Label8 = self.DialogModel.createInstance("com.sun.star.awt.UnoControlFixedTextModel")
self.Label8.PositionX = "83"
self.Label8.Label = "ProgressBar"
self.Label8.TabIndex = 29
self.Label8.Name = "Label8"
self.Label8.PositionY = "170"
self.Label8.Width = 60
self.Label8.Height = 10
# inserts the control model into the dialog model
self.DialogModel.insertByName("Label8", self.Label8)
# --------- create an instance of .tree.TreeControl control, set properties ---
self.TreeControl1 = self.DialogModel.createInstance("com.sun.star.awt.tree.TreeControlModel")
self.TreeControl1.PositionX = "235"
self.TreeControl1.TabIndex = 35
self.TreeControl1.Name = "TreeControl1"
self.TreeControl1.PositionY = "53"
self.TreeControl1.Width = 59
self.TreeControl1.Height = 100
# inserts the control model into the dialog model
self.DialogModel.insertByName("TreeControl1", self.TreeControl1)
# --------- create an instance of SpinButton control, set properties ---
self.SpinButton1 = self.DialogModel.createInstance("com.sun.star.awt.UnoControlSpinButtonModel")
self.SpinButton1.PositionX = "235"
self.SpinButton1.TabIndex = 22
self.SpinButton1.Name = "SpinButton1"
self.SpinButton1.PositionY = "167"
self.SpinButton1.Width = 60
self.SpinButton1.Height = 20
# inserts the control model into the dialog model
self.DialogModel.insertByName("SpinButton1", self.SpinButton1)
# --------- create an instance of FileControl control, set properties ---
self.FileControl1 = self.DialogModel.createInstance("com.sun.star.awt.UnoControlFileControlModel")
self.FileControl1.PositionX = "235"
self.FileControl1.TabIndex = 18
self.FileControl1.Name = "FileControl1"
self.FileControl1.Text = "/home/sasa"
self.FileControl1.PositionY = "17"
self.FileControl1.Width = 60
self.FileControl1.Height = 20
# inserts the control model into the dialog model
self.DialogModel.insertByName("FileControl1", self.FileControl1)
# --------- create an instance of ComboBox control, set properties ---
self.ComboBox1 = self.DialogModel.createInstance("com.sun.star.awt.UnoControlComboBoxModel")
self.ComboBox1.PositionX = "83"
self.ComboBox1.TabIndex = 10
self.ComboBox1.StringItemList = ('one', 'two')
self.ComboBox1.Name = "ComboBox1"
self.ComboBox1.Dropdown = True
self.ComboBox1.PositionY = "143"
self.ComboBox1.Width = 60
self.ComboBox1.Height = 20
# inserts the control model into the dialog model
self.DialogModel.insertByName("ComboBox1", self.ComboBox1)
# --------- create an instance of FixedLine control, set properties ---
self.FixedLine1 = self.DialogModel.createInstance("com.sun.star.awt.UnoControlFixedLineModel")
self.FixedLine1.Orientation = 1
self.FixedLine1.TabIndex = 19
self.FixedLine1.Name = "FixedLine1"
self.FixedLine1.Height = 210
self.FixedLine1.PositionX = "75"
self.FixedLine1.Width = 4
self.FixedLine1.PositionY = "5"
# inserts the control model into the dialog model
self.DialogModel.insertByName("FixedLine1", self.FixedLine1)
# --------- create an instance of NumericField control, set properties ---
self.NumericField1 = self.DialogModel.createInstance("com.sun.star.awt.UnoControlNumericFieldModel")
self.NumericField1.PositionX = "158"
self.NumericField1.Value = 55555
self.NumericField1.TabIndex = 14
self.NumericField1.Name = "NumericField1"
self.NumericField1.PositionY = "87"
self.NumericField1.Width = 60
self.NumericField1.Height = 20
# inserts the control model into the dialog model
self.DialogModel.insertByName("NumericField1", self.NumericField1)
# --------- create an instance of GroupBox control, set properties ---
self.FrameControl1 = self.DialogModel.createInstance("com.sun.star.awt.UnoControlGroupBoxModel")
self.FrameControl1.PositionX = "9"
self.FrameControl1.Label = "FrameControl1"
self.FrameControl1.TabIndex = 36
self.FrameControl1.Name = "FrameControl1"
self.FrameControl1.PositionY = "147"
self.FrameControl1.Width = 60
self.FrameControl1.Height = 65
# inserts the control model into the dialog model
self.DialogModel.insertByName("FrameControl1", self.FrameControl1)
# --------- create an instance of FixedText control, set properties ---
self.Label11 = self.DialogModel.createInstance("com.sun.star.awt.UnoControlFixedTextModel")
self.Label11.PositionX = "235"
self.Label11.Label = "FileControl"
self.Label11.TabIndex = 32
self.Label11.Name = "Label11"
self.Label11.PositionY = "6"
self.Label11.Width = 60
self.Label11.Height = 10
# inserts the control model into the dialog model
self.DialogModel.insertByName("Label11", self.Label11)
# --------- create an instance of FixedText control, set properties ---
self.Label2 = self.DialogModel.createInstance("com.sun.star.awt.UnoControlFixedTextModel")
self.Label2.PositionX = "158"
self.Label2.Label = "DateField"
self.Label2.TabIndex = 23
self.Label2.Name = "Label2"
self.Label2.PositionY = "6"
self.Label2.Width = 60
self.Label2.Height = 10
# inserts the control model into the dialog model
self.DialogModel.insertByName("Label2", self.Label2)
# --------- create an instance of FormattedField control, set properties ---
self.FormattedField1 = self.DialogModel.createInstance("com.sun.star.awt.UnoControlFormattedFieldModel")
self.FormattedField1.EffectiveValue = 2000
self.FormattedField1.EffectiveMin = 1000
self.FormattedField1.EffectiveMax = 5000
self.FormattedField1.PositionX = "158"
self.FormattedField1.TabIndex = 16
self.FormattedField1.Name = "FormattedField1"
self.FormattedField1.Text = "2,000"
self.FormattedField1.PositionY = "160"
self.FormattedField1.Width = 60
self.FormattedField1.Height = 20
# inserts the control model into the dialog model
self.DialogModel.insertByName("FormattedField1", self.FormattedField1)
# --------- create an instance of ProgressBar control, set properties ---
self.ProgressBar1 = self.DialogModel.createInstance("com.sun.star.awt.UnoControlProgressBarModel")
self.ProgressBar1.PositionX = "85"
self.ProgressBar1.TabIndex = 11
self.ProgressBar1.Name = "ProgressBar1"
self.ProgressBar1.ProgressValue = 50
self.ProgressBar1.PositionY = "184"
self.ProgressBar1.Width = 60
self.ProgressBar1.Height = 20
# inserts the control model into the dialog model
self.DialogModel.insertByName("ProgressBar1", self.ProgressBar1)
# --------- create an instance of FixedText control, set properties ---
self.Label5 = self.DialogModel.createInstance("com.sun.star.awt.UnoControlFixedTextModel")
self.Label5.PositionX = "158"
self.Label5.Label = "CurrencyField"
self.Label5.TabIndex = 26
self.Label5.Name = "Label5"
self.Label5.PositionY = "114"
self.Label5.Width = 60
self.Label5.Height = 10
# inserts the control model into the dialog model
self.DialogModel.insertByName("Label5", self.Label5)
# --------- create an instance of DateField control, set properties ---
self.DateField1 = self.DialogModel.createInstance("com.sun.star.awt.UnoControlDateFieldModel")
self.DateField1.RepeatDelay = 50
self.DateField1.DateMin = uno.createUnoStruct("com.sun.star.util.Date", Year = 1820, Month = 1, Day = 1)
self.DateField1.Date = uno.createUnoStruct("com.sun.star.util.Date", Year = 2015, Month = 3, Day = 26)
self.DateField1.Dropdown = True
self.DateField1.DateMax = uno.createUnoStruct("com.sun.star.util.Date", Year = 2020, Month = 1, Day = 1)
self.DateField1.PositionX = "158"
self.DateField1.TabIndex = 12
self.DateField1.Name = "DateField1"
self.DateField1.DateFormat = 9
self.DateField1.Text = "Set Date"
self.DateField1.PositionY = "17"
self.DateField1.Width = 60
self.DateField1.Height = 20
# inserts the control model into the dialog model
self.DialogModel.insertByName("DateField1", self.DateField1)
# --------- create an instance of CheckBox control, set properties ---
self.CheckBox1 = self.DialogModel.createInstance("com.sun.star.awt.UnoControlCheckBoxModel")
self.CheckBox1.State = True
self.CheckBox1.TabIndex = 4
self.CheckBox1.Name = "CheckBox1"
self.CheckBox1.Label = "CheckBox1"
self.CheckBox1.Height = 20
self.CheckBox1.TriState = True
self.CheckBox1.PositionY = "121"
self.CheckBox1.Width = 60
self.CheckBox1.PositionX = "9"
# inserts the control model into the dialog model
self.DialogModel.insertByName("CheckBox1", self.CheckBox1)
# --------- create an instance of FixedText control, set properties ---
self.Label12 = self.DialogModel.createInstance("com.sun.star.awt.UnoControlFixedTextModel")
self.Label12.PositionX = "235"
self.Label12.Label = "TreeControl"
self.Label12.TabIndex = 33
self.Label12.Name = "Label12"
self.Label12.PositionY = "41"
self.Label12.Width = 60
self.Label12.Height = 10
# inserts the control model into the dialog model
self.DialogModel.insertByName("Label12", self.Label12)
# --------- create an instance of FixedLine control, set properties ---
self.FixedLine3 = self.DialogModel.createInstance("com.sun.star.awt.UnoControlFixedLineModel")
self.FixedLine3.Orientation = 1
self.FixedLine3.TabIndex = 21
self.FixedLine3.Name = "FixedLine3"
self.FixedLine3.Height = 210
self.FixedLine3.PositionX = "225"
self.FixedLine3.Width = 4
self.FixedLine3.PositionY = "5"
# inserts the control model into the dialog model
self.DialogModel.insertByName("FixedLine3", self.FixedLine3)
# --------- create an instance of RadioButton control, set properties ---
self.OptionButton1 = self.DialogModel.createInstance("com.sun.star.awt.UnoControlRadioButtonModel")
self.OptionButton1.PositionX = "14"
self.OptionButton1.State = True
self.OptionButton1.TabIndex = 5
self.OptionButton1.Label = "OptionButton1"
self.OptionButton1.Name = "OptionButton1"
self.OptionButton1.PositionY = "162"
self.OptionButton1.Width = 50
self.OptionButton1.Height = 20
# inserts the control model into the dialog model
self.DialogModel.insertByName("OptionButton1", self.OptionButton1)
# --------- create an instance of PatternField control, set properties ---
self.PatternField1 = self.DialogModel.createInstance("com.sun.star.awt.UnoControlPatternFieldModel")
self.PatternField1.PositionX = "158"
self.PatternField1.EditMask = "NNLNNLLLLL"
self.PatternField1.TabIndex = 17
self.PatternField1.Name = "PatternField1"
self.PatternField1.LiteralMask = "__.__.2015"
self.PatternField1.StrictFormat = True
self.PatternField1.Text = "Pattern Field Text"
self.PatternField1.PositionY = "194"
self.PatternField1.Width = 60
self.PatternField1.Height = 20
# inserts the control model into the dialog model
self.DialogModel.insertByName("PatternField1", self.PatternField1)
# --------- create an instance of FixedText control, set properties ---
self.Label4 = self.DialogModel.createInstance("com.sun.star.awt.UnoControlFixedTextModel")
self.Label4.PositionX = "158"
self.Label4.Label = "NumericField"
self.Label4.TabIndex = 25
self.Label4.Name = "Label4"
self.Label4.PositionY = "76"
self.Label4.Width = 60
self.Label4.Height = 10
# inserts the control model into the dialog model
self.DialogModel.insertByName("Label4", self.Label4)
# --------- create an instance of FixedText control, set properties ---
self.Label3 = self.DialogModel.createInstance("com.sun.star.awt.UnoControlFixedTextModel")
self.Label3.PositionX = "158"
self.Label3.Label = "TimeField"
self.Label3.TabIndex = 24
self.Label3.Name = "Label3"
self.Label3.PositionY = "42"
self.Label3.Width = 60
self.Label3.Height = 10
# inserts the control model into the dialog model
self.DialogModel.insertByName("Label3", self.Label3)
# --------- create an instance of FixedText control, set properties ---
self.Label10 = self.DialogModel.createInstance("com.sun.star.awt.UnoControlFixedTextModel")
self.Label10.PositionX = "83"
self.Label10.Label = "ComboBox"
self.Label10.TabIndex = 31
self.Label10.Name = "Label10"
self.Label10.PositionY = "133"
self.Label10.Width = 60
self.Label10.Height = 10
# inserts the control model into the dialog model
self.DialogModel.insertByName("Label10", self.Label10)
# --------- create an instance of ImageControl control, set properties ---
self.ImageControl1 = self.DialogModel.createInstance("com.sun.star.awt.UnoControlImageControlModel")
self.ImageControl1.PositionX = "9"
self.ImageControl1.TabIndex = 3
self.ImageControl1.ImageURL = uno.fileUrlToSystemPath("file:///home/sasa/Pictures/coquette-icons-set/png/32x32/add_home.png")
self.ImageControl1.Name = "ImageControl1"
self.ImageControl1.PositionY = "56"
self.ImageControl1.Width = 60
self.ImageControl1.Height = 60
# inserts the control model into the dialog model
self.DialogModel.insertByName("ImageControl1", self.ImageControl1)
# --------- create an instance of CurrencyField control, set properties ---
self.CurrencyField1 = self.DialogModel.createInstance("com.sun.star.awt.UnoControlCurrencyFieldModel")
self.CurrencyField1.PositionX = "158"
self.CurrencyField1.ShowThousandsSeparator = True
self.CurrencyField1.TabIndex = 15
self.CurrencyField1.Value = 5555
self.CurrencyField1.Name = "CurrencyField1"
self.CurrencyField1.PositionY = "124"
self.CurrencyField1.Width = 60
self.CurrencyField1.Height = 20
# inserts the control model into the dialog model
self.DialogModel.insertByName("CurrencyField1", self.CurrencyField1)
# --------- create an instance of FixedText control, set properties ---
self.Label9 = self.DialogModel.createInstance("com.sun.star.awt.UnoControlFixedTextModel")
self.Label9.PositionX = "85"
self.Label9.Label = "ListBox"
self.Label9.TabIndex = 30
self.Label9.Name = "Label9"
self.Label9.PositionY = "35"
self.Label9.Width = 60
self.Label9.Height = 10
# inserts the control model into the dialog model
self.DialogModel.insertByName("Label9", self.Label9)
# --------- create an instance of RadioButton control, set properties ---
self.OptionButton2 = self.DialogModel.createInstance("com.sun.star.awt.UnoControlRadioButtonModel")
self.OptionButton2.PositionX = "14"
self.OptionButton2.Label = "OptionButton2"
self.OptionButton2.TabIndex = 6
self.OptionButton2.Name = "OptionButton2"
self.OptionButton2.PositionY = "187"
self.OptionButton2.Width = 50
self.OptionButton2.Height = 20
# inserts the control model into the dialog model
self.DialogModel.insertByName("OptionButton2", self.OptionButton2)
# --------- create an instance of Button control, set properties ---
self.CommandButton1 = self.DialogModel.createInstance("com.sun.star.awt.UnoControlButtonModel")
self.CommandButton1.PositionX = "9"
self.CommandButton1.Align = 0
self.CommandButton1.TabIndex = 0
self.CommandButton1.Label = "CommandButton1"
self.CommandButton1.Name = "CommandButton1"
self.CommandButton1.Toggle = 1
self.CommandButton1.PositionY = "8"
self.CommandButton1.Width = 60
self.CommandButton1.Height = 20
# inserts the control model into the dialog model
self.DialogModel.insertByName("CommandButton1", self.CommandButton1)
# add the action listener
self.DialogContainer.getControl('CommandButton1').addActionListener(self)
self.DialogContainer.getControl('CommandButton1').setActionCommand('CommandButton1_OnClick')
# --------- create an instance of Button control, set properties ---
self.CommandButton2 = self.DialogModel.createInstance("com.sun.star.awt.UnoControlButtonModel")
self.CommandButton2.PositionX = "9"
self.CommandButton2.Label = "CommandButton2"
self.CommandButton2.TabIndex = 1
self.CommandButton2.Enabled = True
self.CommandButton2.Name = "CommandButton2"
self.CommandButton2.PositionY = "33"
self.CommandButton2.Width = 29
self.CommandButton2.Height = 20
# inserts the control model into the dialog model
self.DialogModel.insertByName("CommandButton2", self.CommandButton2)
# add the action listener
self.DialogContainer.getControl('CommandButton2').addActionListener(self)
self.DialogContainer.getControl('CommandButton2').setActionCommand('CommandButton2_OnClick')
# -----------------------------------------------------------
# Action events
# -----------------------------------------------------------
def actionPerformed(self, oActionEvent):
if oActionEvent.ActionCommand == 'CommandButton3_OnClick':
self.CommandButton3_OnClick()
if oActionEvent.ActionCommand == 'CommandButton1_OnClick':
self.CommandButton1_OnClick()
if oActionEvent.ActionCommand == 'CommandButton2_OnClick':
self.CommandButton2_OnClick()
# ----------------- END GENERATED CODE ----------------------------------------
|
kelsa-pi/unodit
|
examples/convert dialog/src/pythonpath/Test_convert_UI.py
|
Python
|
gpl-3.0
| 26,618
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
###############################################################################
# Copyright (C) 2008-2010 Francisco José Rodríguez Bogado #
# <frbogado@novaweb.es> #
# #
# This program is free software: you can redistribute it and/or modify #
# it under the terms of the GNU General Public License as published by #
# the Free Software Foundation, either version 3 of the License, or #
# (at your option) any later version. #
# #
# This program is distributed in the hope that it will be useful, #
# but WITHOUT ANY WARRANTY; without even the implied warranty of #
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
# GNU General Public License for more details. #
# #
# You should have received a copy of the GNU General Public License #
# along with this program. If not, see <http://www.gnu.org/licenses/>. #
# #
###############################################################################
'''
Created on 23/08/2011
@author: bogado
Clase heredable que construye automáticamente los controles para los
formularios de consulta y listado de datos.
'''
NUMERO_ARBITRARIAMENTE_GRANDE = 1000 # Si los resultados a mostrar por el
# número de columnas es mayor que esto, el TreeView se actualizará
# "off-line", es decir, sin refrescar en pantalla hasta que no se hayan
# recuperado todos los datos.
MAX_DATA_GRAFICA = 5 # Número máximo de barras en la gráfica, para evitar
# que quede feote con muchos datos apiñados.
import pygtk
pygtk.require('2.0')
import gtk, gtk.glade
from ventana import Ventana
from framework import pclases
import utils
from formularios.ventana_progreso import VentanaProgreso
import datetime, os
from gettext import gettext as _
from gettext import bindtextdomain, textdomain
bindtextdomain("cican",
os.path.abspath(os.path.join(os.path.dirname(__file__), "..", "l10n")))
textdomain("cican")
from gobject import TYPE_INT64 as ENTERO
from gobject import TYPE_STRING as CADENA
from ventana_generica import camelize, humanizar, \
_abrir_en_ventana_nueva, GALACTUS, \
build_widget_valor, labelize
from formularios.graficas import charting
class VentanaConsulta(Ventana):
"""
Ventana de consulta que se construye dinámicamente dependiendo de la
clase donde buscar los resultados.
"""
def __init__(self, clase = None, objeto = None, usuario = None,
run = True, ventana_marco = 'ventana_consulta.glade',
campos = None, filtros = [], filtros_defecto = {},
agrupar_por = None):
"""
Recibe la clase base para construir la ventana.
Opcionalmente recibe un objeto para mostrar en la misma y
el usuario registrado en el sistema.
Construye la ventana e inicia el bucle Gtk si «run» es True.
@param clase: Clase de pclases. Se puede omitir para obtenerla del
objeto.
@param objeto: Objeto instanciado de pclases. Se puede omitir si se
especifica «clase».
@param usuario: Objeto usuario de la aplicación o None.
@param run: Si True inicia Gtk. Si no, solo crea la ventana en memoria.
@param ventana_marco: Ventana glade con los controles a mostrar.
@param campos: Lista o tupla de campos para construir las columnas del
TreeView de resultados.
@param filtros Lista de filtros que se insertarán de manera
automática. Deben ser campos (como cadena, igual que
"campos") de la tabla principal. En función del tipo
de campo, se mostrará un tipo de filtro u otro.
@param filtros_defecto: Diccionario de campos y valores que tomarán
por defecto los filtros de esos campos. Todas
las claves del diccionario deben estar en
la lista de filtros o se ignorarán.
@param agrupar_por: Campo por el que se agruparán los resultados en
en TreeView o None para verlos "planos".
"""
# Antes de nada, si me llegan como PUID por haberse invocado desde
# línea de comandos o algo, transformo a objetos en memoria:
if isinstance(clase, str):
if not clase.startswith("pclases"):
clase = "pclases.%s" % clase
clase = eval(clase)
if hasattr(objeto, "isdigit") and objeto.isdigit():
objeto = int(objeto)
if isinstance(objeto, str):
objeto = pclases.getObjetoPUID(objeto)
elif isinstance(objeto, int) and clase:
objeto = clase.get(objeto)
if isinstance(usuario, str):
try:
usuario = pclases.getObjetoPUID(usuario)
except ValueError:
if hasattr(usuario, "isdigit") and usuario.isdigit():
usuario = pclases.Usuario.selectBy(id = int(usuario))[0]
else:
usuario = pclases.Usuario.selectBy(usuario = usuario)[0]
elif isinstance(usuario, int):
usuario = pclases.Usuario.get(usuario)
# Y ahora sigo con lo mío
if not clase and not objeto:
raise ValueError, _("Debe pasar al menos la clase o un objeto.")
self.__usuario = usuario
if isinstance(clase, pclases.SQLObject):
#print "AHORA SHIIIII!"
clase, objeto = None, clase
if not clase:
self.clase = objeto.__class__
else:
if isinstance(clase, str):
clase = getattr(pclases, clase)
self.clase = clase
self.objeto = objeto
self.resultados = pclases.SQLtuple(())
Ventana.__init__(self, ventana_marco, objeto, usuario)
self.columnas = campos
self.filtros = filtros
self.filtros_defecto = filtros_defecto
self.agrupar_por = agrupar_por
self.inicializar_ventana()
titulo_ventana = self.wids['ventana'].get_title()
if (not titulo_ventana or titulo_ventana.strip() == ""
or titulo_ventana == "Oh, the humanity!"):
self.wids['ventana'].set_title(self.clase.__name__)
# Botones genéricos:
connections = {'b_salir/clicked': self.salir,
'b_actualizar/clicked': self.actualizar_ventana,
'b_exportar/clicked': self.exportar,
'b_imprimir/clicked': self.imprimir,
'b_fini/clicked': self.set_fecha_ini,
'b_ffin/clicked': self.set_fecha_fin,
'e_fini/focus-out-event': self.show_fecha,
'e_ffin/focus-out-event': self.show_fecha,
}
try:
self.wids['e_fini'].set_text("")
except KeyError: # No tiene filtros de fecha
connections.pop("e_fini/focus-out-event")
connections.pop("b_fini/clicked")
#self.wids['e_ffin'].set_text(
# utils.fecha.str_fecha(datetime.date.today()))
try:
self.wids['e_ffin'].set_text("")
except KeyError: # No tiene filtros de fecha
connections.pop("e_ffin/focus-out-event")
connections.pop("b_ffin/clicked")
self.add_connections(connections)
self._funcs_actualizacion_extra = [] # Lista de funciones que se
# llamarán cuando se actualice la ventana.
if run:
gtk.main()
def set_fecha_ini(self, boton):
utils.ui.set_fecha(self.wids['e_fini'])
def set_fecha_fin(self, boton):
utils.ui.set_fecha(self.wids['e_ffin'])
def show_fecha(self, entry, event):
"""
Muestra la fecha en modo texto después de parsearla.
"""
if entry.get_text():
try:
entry.set_text(utils.fecha.str_fecha(utils.fecha.parse_fecha(
entry.get_text())))
except (ValueError, TypeError):
entry.set_text(utils.fecha.str_fecha(datetime.date.today()))
def get_usuario(self):
return self.__usuario
# Funciones estándar "de facto":
def actualizar_ventana(self, boton = None):
cursor_reloj = gtk.gdk.Cursor(gtk.gdk.WATCH)
self.wids['ventana'].window.set_cursor(cursor_reloj)
utils.ui.set_unset_urgency_hint(self.wids['ventana'], False)
while gtk.events_pending(): gtk.main_iteration(False)
self.rellenar_widgets()
self.wids['ventana'].window.set_cursor(None)
def inicializar_ventana(self):
"""
Inicializa los controles de la ventana, construyendo
los modelos para los TreeView, etc.
Si se recibe «columnas», que sea una lista de nombres de columnas en
el orden en que se deben mostrar en la ventana; tanto de columnas
normales como de relaciones (claves ajenas y múltiples).
"""
if self.columnas is None:
self.columnas = [c.name for c in self.clase.sqlmeta.columnList]
cols = []
# Nombre, tipo de datos, editable, ordenable, buscable, func. edición.
for nombre_col in self.columnas:
sqlcol = self.clase.search_col_by_name(nombre_col)
tipo_gobject = utils.ui.sqltype2gobject(sqlcol)
col=(labelize(nombre_col), tipo_gobject, False, True, False, None)
cols.append(col)
cols.append(("PUID", "gobject.TYPE_STRING", False, False, False, None))
try:
tv = self.wids['tv_datos']
except KeyError:
tv = None # No es "estándar" y tiene su propio TreeView
if tv:
utils.ui.preparar_treeview(tv, cols)
tv.connect("row-activated", _abrir_en_ventana_nueva,
self.__usuario, GALACTUS, None, self.clase)
self.build_totales()
self.build_filtros()
self.build_agrupar_por()
self.build_widget_grafico()
try:
self.wids['label_notas'].set_text("\nIntroduzca filtro en la "
"parte superior y pulse actualizar.\n")
except KeyError:
pass # Tampoco tiene label...
def build_widget_grafico(self):
try:
wgrafica = self.wids['grafica']
except KeyError: # Efectivamente, no está creada.
self.wids['grafica'] = gtk.Alignment(0.5, 0.5, 0.9, 0.9)
self.wids['grafica'].set_property("visible", True)
self.wids['eventbox_chart'].add(self.wids['grafica'])
self.grafica = charting.Chart(value_format = "%.1f",
#max_bar_width = 20,
#legend_width = 70,
interactive = False)
self.wids['grafica'].add(self.grafica)
self.wids['eventbox_chart'].show_all()
def actualizar_grafica(self, keys, data):
"""
Actualiza (o crea) la gráfica contenida en el eventbox de la ventana.
"""
try:
self.grafica.plot(keys, data)
except AttributeError: # Todavía no se ha creado el widget.
self.build_widget_grafico()
self.grafica.plot(keys, data)
def build_agrupar_por(self):
"""
Construye un combo para agrupar por algún campo en concreto. Toma por
defecto el valor pasado al constructor.
"""
c = self.wids['vbox_filtros']
cb_agrupar_por = gtk.ComboBox()
self.wids['cb_agrupar_por'] = cb_agrupar_por
utils.ui.rellenar_lista(cb_agrupar_por,
enumerate([labelize(i) for i in self.columnas]))
if self.agrupar_por: # Valor por defecto
for i, col in enumerate(self.columnas):
if col == self.agrupar_por:
utils.ui.combo_set_from_db(self.wids['cb_agrupar_por'], i)
cb_agrupar_por.connect("changed", self.__store_agrupar)
box_agrupar = gtk.HBox()
box_agrupar.pack_start(gtk.Label("Agrupar resultados por"))
box_agrupar.pack_start(cb_agrupar_por)
box_agrupar.show_all()
c.add(box_agrupar)
def __store_agrupar(self, combo):
indice = utils.ui.combo_get_value(combo)
self.agrupar_por = self.columnas[indice]
# No merece la pena actualizar. Que lo haga el usuario.
def build_filtros(self):
c = self.wids['vbox_filtros']
for campo in self.filtros:
sqlcampo = self.clase.sqlmeta.columns[campo]
wfiltro, contenedor = build_filtro(sqlcampo)
inner = gtk.HBox()
inner.add(gtk.Label(labelize(campo)))
try:
inner.add(contenedor)
except TypeError: # No lleva contenedor
inner.add(wfiltro)
self.wids[wfiltro.name] = wfiltro
try: # Valor por defecto para el filtro
escribir_valor(sqlcampo,
self.filtros_defecto[wfiltro.name],
self.wids[wfiltro.name])
except KeyError: # No hay valor para el filtro
pass
c.add(inner)
c.show_all()
def activar_widgets(self, activo = True):
"""
Activa o desactiva los widgets de la ventana que
dependan del objeto mostrado (generalmente todos
excepto el botón de nuevo, salir y buscar).
"""
pass
def es_diferente(self):
"""
Devuelve True si algún valor en ventana difiere de
los del objeto.
"""
return False
def rellenar_widgets(self):
"""
Muestra los valores de cada atributo en el widget
del campo correspondiente.
"""
try:
fechaini = self.wids['e_fini'].get_text().strip()
except KeyError: # No es una ventana de consulta "estándar".
return
if fechaini:
try:
fechaini = utils.fecha.parse_fecha(fechaini)
except (ValueError, TypeError):
utils.dialogo_info(titulo = "ERROR EN FECHA INICIAL",
texto = "El texto «%s» no es una fecha correcta." % fechaini,
padre = self.wids['ventana'])
fechaini = None
fechafin = self.wids['e_ffin'].get_text().strip()
if fechafin:
try:
fechafin = utils.fecha.parse_fecha(fechafin)
except (ValueError, TypeError):
utils.ui.dialogo_info(titulo = "ERROR EN FECHA FINAL",
texto = "El texto «%s» no es una fecha correcta." % fechafin,
padre = self.wids['ventana'])
fechafin = None
if fechaini and fechafin and fechafin < fechaini:
fechaini, fechafin = fechafin, fechaini
self.wids['e_fini'].set_text(utils.fecha.str_fecha(fechaini))
self.wids['e_ffin'].set_text(utils.fecha.str_fecha(fechafin))
criterios = []
try:
campofecha = getattr(self.clase.q,
buscar_campos_fecha(self.clase)[0].name)
except IndexError:
pass # No puedo filtrar por fecha.
else:
if fechaini:
criterios.append(campofecha >= fechaini)
if fechafin:
criterios.append(campofecha <= fechafin)
# Más filtros:
# PLAN: No es demasiado útil tal y como está ahora. Debería permitir
# definir rangos y operadores >=, <>, etc.
for filtro in self.filtros:
sqlcolumn = self.clase.sqlmeta.columns[filtro]
valor = leer_valor(sqlcolumn,
self.wids[filtro])
if isinstance(sqlcolumn, pclases.SOForeignKey) and valor == -1:
criterios.append(getattr(self.clase.q, filtro) == None)
elif valor: # Si no especifica, es que no lo quiere usar. ¿No?
criterios.append(getattr(self.clase.q, filtro) == valor)
self.resultados = self.clase.select(pclases.AND(*criterios))
self.rellenar_resultados()
try:
self.actualizar_total(self.wids['cb_total_totalizador'],
self.wids['e_total_totalizador'])
except KeyError:
pass # No hay totalizador.
def rellenar_resultados(self):
vpro = VentanaProgreso(padre = self.wids['ventana'])
txtpro = self.clase.__name__
vpro.set_valor(0.0, txtpro)
vpro.mostrar()
model = self.wids['tv_datos'].get_model()
try:
tot = len(self.resultados)
except TypeError:
tot = self.resultados.count()
if tot*len(self.columnas) > NUMERO_ARBITRARIAMENTE_GRANDE:
self.wids['tv_datos'].freeze_child_notify()
self.wids['tv_datos'].set_model(None)
model.clear()
i = 0.0
padres = {}
for registro in self.resultados:
vpro.set_valor(i / tot, "[%d/%d] %s (%s)" % (
i, tot, txtpro, registro.get_puid()))
i += 1
fila = []
for columna in self.columnas:
valor = getattr(registro, columna)
valor = humanizar(valor, registro.sqlmeta.columns[columna])
fila.append(valor)
fila.append(registro.get_puid())
if not self.agrupar_por:
model.append(None, (fila)) # Es un treeview plano que usaré
# como listview. Por eso el nodo padre es None.
else:
try:
padre = padres[getattr(registro, self.agrupar_por)]
except KeyError: # Primera vez que aparece este padre.
valor_cabecera = getattr(registro, self.agrupar_por)
puid = None
if isinstance(self.clase.sqlmeta.columns[self.agrupar_por],
pclases.SOForeignKey):
valor_cabecera = getattr(registro,
self.agrupar_por.replace("ID", "")) # CHAPU
try:
puid = valor_cabecera.get_puid()
valor_cabecera = valor_cabecera.get_info()
except AttributeError: # Es None
valor_cabecera = "Sin %s" % (
labelize(self.agrupar_por))
puid = None
fila_padre = [valor_cabecera]
for i in range(len(self.columnas) - 1):
fila_padre.append("")
fila_padre.append(puid)
padre = padres[getattr(registro, self.agrupar_por)] \
= model.append(None, (fila_padre))
model.append(padre, (fila))
# Actualizo totales numéricos del padre:
ncell = 0
for columna in self.columnas:
valor_hijo_columna = getattr(registro, columna)
sqlcolumna = registro.sqlmeta.columns[columna]
if (isinstance(valor_hijo_columna, (int, float)) and
not isinstance(sqlcolumna, pclases.SOForeignKey)):
try:
nuevo_valor_padre = utils.numero._float(
model[padre][ncell])
except ValueError:
nuevo_valor_padre = 0
nuevo_valor_padre += valor_hijo_columna
nuevo_valor_padre = utils.numero.float2str(
nuevo_valor_padre)
model[padre][ncell] = nuevo_valor_padre
ncell += 1
self.actualizar_totales()
if tot*len(self.columnas) > NUMERO_ARBITRARIAMENTE_GRANDE:
self.wids['tv_datos'].set_model(model)
self.wids['tv_datos'].thaw_child_notify()
vpro.ocultar()
def actualizar_totales(self):
"""
Actualiza el grid de totales.
"""
self.wids['e_total_total'].set_text(str(self.clase.select().count()))
self.wids['e_total_listed'].set_text(str(self.resultados.count()))
def buscar(self, boton):
"""
Pide un texto a buscar y hace activo el resultado
de la búsqueda.
"""
pass
def build_totales(self):
"""
Crea el grid de totales y añade un combo para seleccionar la columna
de valores sobre la que hacer el sumatorio.
"""
gt = self.wids['grid_totales']
self.wids['e_total_total'] = gtk.Entry()
self.wids['e_total_listed'] = gtk.Entry()
filas = [("Número de elementos totales: ", self.wids['e_total_total']),
("Número de elementos listados", self.wids['e_total_listed'])]
filas.append(self.build_totalizador())
numfila = 0
for etiqueta, entry in filas:
if etiqueta and entry:
numfila += 1
gt.resize(numfila, 2)
if isinstance(etiqueta, str):
label = gtk.Label(etiqueta)
else:
label = etiqueta # Debe ser el combo del toal.
gt.attach(label, 0, 1, numfila - 1, numfila)
entry.set_property("editable", False)
entry.set_property("has-frame", False)
entry.set_alignment(1.0)
gt.attach(entry, 1, 2, numfila - 1, numfila)
gt.show_all()
def build_totalizador(self):
"""
Devuelve un ComboBox y un Entry no editable. El ComboBox
tiene asociado al evento «changed» una función que hace un sumatorio
de la columna seleccionada en el mismo y lo escribe en el entry.
En el Combo solo estarán las columnas de tipo Float o Int. No tiene
sentido sumar textos.
"""
# TODO: PLAN: Hacer los totales también con horas
# (datetime.timedelta = SOTimeCol, creo porque los TimestampCol son
# fechas con hora).
clases_numericas = (pclases.SOBigIntCol, pclases.SOCurrencyCol,
pclases.SODecimalCol, pclases.SOFloatCol,
pclases.SOIntCol, pclases.SOMediumIntCol,
pclases.SOSmallIntCol, pclases.SOTinyIntCol)
nombres_colsnumericas = []
for col in self.columnas:
for clase_numerica in clases_numericas:
sqlcol = self.clase.sqlmeta.columns[col]
if isinstance(sqlcol, clase_numerica):
nombres_colsnumericas.append(col)
if nombres_colsnumericas:
nombres_colsnumericas.sort()
combo_total = gtk.combo_box_new_text()
for nombrecol in nombres_colsnumericas:
combo_total.append_text(nombrecol)
entry_total = gtk.Entry()
entry_total.set_has_frame(False)
entry_total.set_editable(False)
combo_total.connect("changed", self.actualizar_total,
entry_total)
totalizador = gtk.HBox()
labeler = gtk.Label("Totalizar ")
labeler.set_alignment(1.0, labeler.get_alignment()[1])
totalizador.pack_start(labeler)
not_expansor = gtk.VBox()
not_expansor.pack_start(combo_total, fill = False)
totalizador.pack_start(not_expansor)
#totalizador.pack_start(entry_total)
entry_total.set_property("name",
"e_total_totalizador")
self.wids[entry_total.name] = entry_total
combo_total.set_property("name",
"cb_total_totalizador")
self.wids[combo_total.name] = combo_total
labeler.set_property("name", "l_total_totalizador")
self.wids[labeler.name] = labeler
totalizador.set_property("name",
"totalizador_container")
self.wids[totalizador.name] = totalizador
else:
totalizador = entry_total = None
return totalizador, entry_total
def actualizar_total(self, combo, e_total):
"""
Actualiza el TreeView indicado usando el nombre de columna marcado en
el combo para hacer la suma.
"""
coltotal = combo.get_active_text()
if coltotal: # Se ha seleccionado campo para sumatorio.
total = sum([getattr(i, coltotal) for i in self.resultados])
self.wids['e_total_totalizador'].set_text(str(total))
# Tremendo momento gráfica:
data = {}
for r in self.resultados:
data[r.get_info()] = getattr(r, coltotal)
#keys = sorted(data.keys(), key = lambda x: x[0])
#values = [data[key] for key in keys]
# Ordeno por valores, mejor:
import operator
keys = []
values = []
sorted_by_value = sorted(data.iteritems(),
key = operator.itemgetter(1),
reverse = True)
for k, v in sorted_by_value:
keys.append(k)
values.append(v)
if len(keys) > MAX_DATA_GRAFICA:
resto = sum(values[MAX_DATA_GRAFICA:])
keys = keys[:MAX_DATA_GRAFICA]
keys.append("Resto")
values = values[:MAX_DATA_GRAFICA]
values.append(resto)
self.actualizar_grafica(keys, values)
def exportar(self, boton):
"""
Exporta el contenido del TreeView a un CSV
"""
try:
tv = self.wids['tv_datos']
except KeyError:
return # No es ventana estándar. Quien me herede, que redefina.
from utils.treeview2csv import treeview2csv
from utils.informes import abrir_csv
abrir_csv(treeview2csv(tv))
def imprimir(self, boton):
"""
Genera un PDF con los datos en pantalla.
"""
try:
tv = self.wids['tv_datos']
except KeyError:
return # No es ventana estándar. Quien me herede, que redefina.
titulo = self.wids['ventana'].get_title()
from utils.treeview2pdf import treeview2pdf
from utils.informes import abrir_pdf
strfini = self.wids['e_fini'].get_text()
strffin = self.wids['e_ffin'].get_text()
if strfini and strfin:
strfecha = "%s - %s" % (strfini, strffin)
elif not strfini and strffin:
strfecha = "Hasta %s" % (strffin)
elif strfini and not strffin:
strfecha = "Desde %s" % (strfini)
else:
strfecha = None
nomarchivo = treeview2pdf(tv,
titulo = titulo,
fecha = strfecha)
abrir_pdf(nomarchivo)
def buscar_campos_fecha(clase):
"""
Busca y devielve los campos de fecha de la clase.
@param clase: Clase de pclases.
"""
res = []
for c in clase.sqlmeta.columnList:
if isinstance(c, (pclases.SODateCol, pclases.SODateTimeCol)):
res.append(c)
return res
def build_filtro(sqlcampo):
wvalor, contenedor = build_widget_valor(sqlcampo, sqlcampo.name,
let_edit = False,
let_create = False)
# Ahora, si es una FK, tengo que dejar que ponga "Todos" o "Ninguno" y
# tratarlo después convenientemente.
if isinstance(sqlcampo, pclases.SOForeignKey):
model = wvalor.get_model()
model.insert(0, (0, "Todos"))
model.insert(1, (-1, "Ninguno"))
return wvalor, contenedor
def leer_valor(col, widget):
"""
Lee el valor de la ventana correspondiente al campo
"col" y lo trata convenientemente (p.e. convirtiéndolo
a float si el tipo de la columna es SOFloatCol) antes
de devolverlo.
Lanza una excepción si ocurre algún error de conversión.
"""
if isinstance(col, pclases.SOStringCol):
try:
valor = widget.get_text()
except AttributeError: # Puede ser un TextView
buffer = widget.get_buffer()
valor = buffer.get_text(buffer.get_start_iter(),
buffer.get_end_iter())
elif isinstance(col, pclases.SOIntCol):
if isinstance(widget, gtk.SpinButton):
valor = widget.get_value()
valor = int(valor)
else:
valor = widget.get_text()
valor = utils.numero.parse_formula(valor, int)
try:
valor = int(valor)
except Exception, e:
if pclases.DEBUG and pclases.VERBOSE:
ttt = "ventana_consulta::leer_valor -> "\
"Excepción {0} capturada "\
"al convertir «{1}» a entero."\
" Devuelvo None.".format(e, valor)
print ttt
# raise e
valor = None
elif isinstance(col, pclases.SOFloatCol):
valor = widget.get_text()
valor = utils.numero.parse_formula(valor, int)
try:
valor = utils.numero._float(valor)
except Exception, e:
# Intento leerlo como euro
try:
valor = utils.numero.parse_euro(valor)
except Exception, e:
# Intento leerlo como porcentaje
try:
valor = utils.numero.parse_porcentaje(valor)
except Exception, e:
if pclases.DEBUG and pclases.VERBOSE:
ttt = "ventana_consulta::leer_valor -> "\
"Excepción {0} "\
"capturada al convertir «{1}» a flotante. "\
"Devuelvo None.".format(e, valor)
print ttt
#raise e
valor = None
elif isinstance(col, pclases.SOBoolCol):
valor = widget.get_active()
elif isinstance(col, pclases.SODateCol):
valor = widget.get_text()
try:
valor = utils.fecha.parse_fecha(valor)
except Exception, e:
if pclases.DEBUG and pclases.VERBOSE:
ttt = "ventana_consulta::leer_valor -> Excepción %s "\
"capturada al convertir «%s» a fecha. "\
"Devuelvo None." % (e, valor)
print ttt
#raise e
valor = None
elif isinstance(col, pclases.SOForeignKey):
valor = utils.ui.combo_get_value(widget)
else:
# Lo intento como puedo. A lo mejor faltaría intentarlo también
# como si fuera un TextView.
if hasattr(widget, "child"):
valor = widget.child.get_text()
else:
valor = widget.get_text()
return valor
def escribir_valor(col, valor, widget):
"""
Muestra el valor "valor" en el widget correspondiente
al campo "col", haciendo las transformaciones necesarias
dependiendo del tipo de datos.
El criterio para los valores nulos (NULL en la BD y None en Python)
es usar la cadena vacía para representarlos en pantalla y -1 en el caso
de las claves ajenas.
"""
if isinstance(col, pclases.SOStringCol):
if valor is None:
valor = "" # Prefiero que muestre la cadena vacía y que
# el usuario guarde ese valor en lugar de None si
# quiere.
try:
widget.set_text(valor)
except AttributeError: # Puede ser un TextView
widget.get_buffer().set_text(valor)
except TypeError:
terr = "ventana_generica::escribir_valor -> El valor «%s» "\
"no es válido."
raise TypeError, terr
elif isinstance(col, pclases.SOIntCol):
if isinstance(widget, gtk.SpinButton):
widget.set_value(valor)
else:
try:
if valor != None:
valor = str(valor)
else:
valor = ""
except Exception, e:
if pclases.DEBUG:
print "Excepción %s capturada al convertir %s de "\
"entero a cadena." % (e, valor)
raise e
widget.set_text(valor)
elif isinstance(col, pclases.SOFloatCol):
try:
# precision = 6 es por las coordenadas (lat, lon) GPS
valor = utils.numero.float2str(valor, precision = 6,
autodec = True)
except Exception, e:
if pclases.DEBUG and pclases.VERBOSE:
terr = "Excepción %s capturada al convertir «%s» de"\
" flotante a cadena." % (e, valor)
print terr
#raise e
valor = ''
widget.set_text(valor)
elif isinstance(col, pclases.SOBoolCol):
widget.set_active(valor)
elif isinstance(col, pclases.SODateCol):
try:
valor = utils.fecha.str_fecha(valor)
except Exception, e:
if pclases.DEBUG and pclases.VERBOSE:
txterr = "Excepción %s capturada al convertir «%s» "\
"de fecha a cadena." % (e, valor)
print txterr
#raise e
valor = ""
widget.set_text(valor)
elif isinstance(col, pclases.SOForeignKey):
utils.ui.combo_set_from_db(widget, valor)
else:
# Lo intento como puedo. A lo mejor faltaría intentarlo también
# como si fuera un TextView.
if hasattr(widget, "child"):
widget.child.set_text(`valor`)
else:
widget.set_text(`valor`)
if __name__ == "__main__":
import sys
try:
glade = sys.argv[2]
except IndexError:
glade = "ventana_consulta.glade"
if not glade.endswith(".glade"):
glade += ".glade"
try:
clase = sys.argv[1]
except IndexError:
clase = camelize(glade.replace(".glade", "").title())
VentanaConsulta(clase = clase, run = True, ventana_marco = glade,
filtros = ["iva", "serieNumericaID"])
|
pacoqueen/cican
|
formularios/ventana_consulta.py
|
Python
|
gpl-3.0
| 35,311
|
# Copyright (C) 2013 Dennis Gosnell
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
This module deals with getting information from dictionaries.
A dictionary can get information from any resource, such as
a website, an epwing dictionary, a database, etc.
"""
from .epwing import *
from .web import *
|
cdepillabout/semiautocards
|
semiauto/dictionary/__init__.py
|
Python
|
gpl-3.0
| 897
|
# -*- coding: utf-8 -*-
"""Caliopen disccions index classes.
Discussions are not really indexed, they are result of messages aggregations.
So there is not direct document mapping, only helpers to find discussions
and build a suitable representation for displaying.
"""
from __future__ import absolute_import, print_function, unicode_literals
import logging
from elasticsearch_dsl import A
from caliopen_storage.store.model import BaseIndexDocument
from caliopen_main.message.store.message_index import IndexedMessage
log = logging.getLogger(__name__)
class DiscussionIndex(object):
"""Informations from index about a discussion."""
total_count = 0
unread_count = 0
attachment_count = 0
last_message = None
def __init__(self, id):
self.discussion_id = id
class DiscussionIndexManager(object):
"""Manager for building discussions from index storage layer."""
def __init__(self, user):
self.index = user.shard_id
self.user_id = user.user_id
self.proxy = BaseIndexDocument.client()
def _prepare_search(self):
"""Prepare a dsl.Search object on current index."""
search = IndexedMessage.search(using=self.proxy,
index=self.index)
search = search.filter('term', user_id=self.user_id)
return search
def __search_ids(self, limit, offset, min_pi, max_pi, min_il, max_il):
"""Search discussions ids as a bucket aggregation."""
# TODO : search on participants_hash instead
search = self._prepare_search(). \
filter("range", importance_level={'gte': min_il, 'lte': max_il})
# Do bucket term aggregation, sorted by last_message date
size = offset + (limit * 2)
agg = A('terms', field='discussion_id',
order={'last_message': 'desc'}, size=size, shard_size=size)
search.aggs.bucket('discussions', agg) \
.metric('last_message', 'max', field='date_sort') \
.bucket("unread", "filter", term={"is_unread": True})
result = search.source(exclude=["*"]).execute()
if hasattr(result, 'aggregations'):
# Something found
buckets = result.aggregations.discussions.buckets
# XXX Ugly but don't find a way to paginate on bucket aggregation
buckets = buckets[offset:offset + limit]
total = result.aggregations.discussions.sum_other_doc_count
# remove last_message for now as it doesn't have relevant attrs
for discussion in buckets:
del discussion["last_message"]
return buckets, total
log.debug('No result found on index {}'.format(self.index))
return {}, 0
def get_last_message(self, discussion_id, min_il, max_il, include_draft):
"""Get last message of a given discussion."""
search = self._prepare_search() \
.filter("match", discussion_id=discussion_id) \
.filter("range", importance_level={'gte': min_il, 'lte': max_il})
if not include_draft:
search = search.filter("match", is_draft=False)
result = search.sort('-date_sort')[0:1].execute()
if not result.hits:
# XXX what to do better if not found ?
return {}
return result.hits[0]
def list_discussions(self, limit=10, offset=0, min_pi=0, max_pi=0,
min_il=-10, max_il=10):
"""Build a list of limited number of discussions."""
buckets, total = self.__search_ids(limit, offset, min_pi, max_pi,
min_il,
max_il)
discussions = []
for bucket in buckets:
# TODO : les buckets seront des hash_participants, donc il faut créer la liste des discussion_id avant et itérer là-dessus
message = self.get_last_message(bucket['key'],
min_il, max_il,
True)
discussion = DiscussionIndex(bucket['key'])
discussion.total_count = bucket['doc_count']
discussion.unread_count = bucket['unread']['doc_count']
discussion.last_message = message
# XXX build others values from index
discussions.append(discussion)
# XXX total do not work completly, hack a bit
return discussions, total + len(discussions)
def message_belongs_to(self, discussion_id, message_id):
"""Search if a message belongs to a discussion"""
msg = IndexedMessage.get(message_id, using=self.proxy, index=self.index)
return str(msg.discussion_id) == str(discussion_id)
def get_by_id(self, discussion_id, min_il=0, max_il=100):
"""Return a single discussion by discussion_id"""
# TODO : search by multiple discussion_id because they are hashes now
search = self._prepare_search() \
.filter("match", discussion_id=discussion_id)
search.aggs.bucket('discussions', A('terms', field='discussion_id')) \
.bucket("unread", "filter", term={"is_unread": True})
result = search.execute()
if not result.hits or len(result.hits) < 1:
return None
message = self.get_last_message(discussion_id, min_il, max_il, True)
discussion = DiscussionIndex(discussion_id)
discussion.total_count = result.hits.total
discussion.last_message = message
discussion.unread_count = result.aggregations.discussions.buckets[
0].unread.doc_count
return discussion
def get_by_uris(self, uris_hashes, min_il=0, max_il=100):
"""
:param uris_hashes: an array of uris hashes
:param min_il:
:param max_il:
:return:
"""
search = self._prepare_search(). \
filter("terms", discussion_id=uris_hashes). \
filter("range", importance_level={'gte': min_il, 'lte': max_il})
agg = A('terms', field='discussion_id',
order={'last_message': 'desc'})
search.aggs.bucket('discussions', agg). \
metric('last_message', 'max', field='date_sort'). \
bucket("unread", "filter", term={"is_unread": True})
result = search.execute()
if not result.hits or len(result.hits) < 1:
return None
return result
|
CaliOpen/CaliOpen
|
src/backend/main/py.main/caliopen_main/discussion/store/discussion_index.py
|
Python
|
gpl-3.0
| 6,446
|
# Copyright © 2020, Joseph Berry, Rico Tabor (opendrop.dev@gmail.com)
# OpenDrop is released under the GNU GPL License. You are free to
# modify and distribute the code, but always under the same license
#
# If you use this software in your research, please cite the following
# journal articles:
#
# J. D. Berry, M. J. Neeson, R. R. Dagastine, D. Y. C. Chan and
# R. F. Tabor, Measurement of surface and interfacial tension using
# pendant drop tensiometry. Journal of Colloid and Interface Science 454
# (2015) 226–237. https://doi.org/10.1016/j.jcis.2015.05.012
#
# E. Huang, T. Denning, A. Skoufis, J. Qi, R. R. Dagastine, R. F. Tabor
# and J. D. Berry, OpenDrop: Open-source software for pendant drop
# tensiometry & contact angle measurements, submitted to the Journal of
# Open Source Software
#
# These citations help us not only to understand who is using and
# developing OpenDrop, and for what purpose, but also to justify
# continued development of this code and other open source resources.
#
# OpenDrop is distributed WITHOUT ANY WARRANTY; without even the
# implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
# PURPOSE. See the GNU General Public License for more details. You
# should have received a copy of the GNU General Public License along
# with this software. If not, see <https://www.gnu.org/licenses/>.
from typing import Optional, Sequence, Iterable
from gi.repository import GObject, Gtk
from opendrop.app.ift.services.analysis import PendantAnalysisJob
from opendrop.appfw import Presenter, component, install
@component(
template_path='./overview.ui',
)
class IFTReportOverviewPresenter(Presenter[Gtk.Paned]):
_analyses = ()
_selection = None
@install
@GObject.Property
def analyses(self) -> Sequence[PendantAnalysisJob]:
return self._analyses
@analyses.setter
def analyses(self, analyses: Iterable[PendantAnalysisJob]) -> None:
self._analyses = tuple(analyses)
@GObject.Property
def selection(self) -> Optional[PendantAnalysisJob]:
return self._selection
@selection.setter
def selection(self, selection: Optional[PendantAnalysisJob]) -> None:
self._selection = selection
|
jdber1/opendrop
|
opendrop/app/ift/report/overview/overview.py
|
Python
|
gpl-3.0
| 2,211
|
from random import choice
# If the code is not Cython-compiled, we need to add some imports.
from cython import compiled
if not compiled:
from mazelib.solve.MazeSolveAlgo import MazeSolveAlgo
class RandomMouse(MazeSolveAlgo):
""" This mouse just randomly wanders around the maze until it finds the cheese. """
def _solve(self):
""" Solve a maze as stupidly as possible: just wander randomly until you find the end.
This should be basically optimally slow and should have just obsurdly long solutions,
with lots of double backs.
Returns:
list: solution to the maze
"""
solution = []
# a first move has to be made
current = self.start
if self._on_edge(self.start):
current = self._push_edge(self.start)
solution.append(current)
# pick a random neighbor and travel to it, until you're at the end
while not self._within_one(solution[-1], self.end):
ns = self._find_unblocked_neighbors(solution[-1])
nxt = choice(ns)
solution.append(self._midpoint(solution[-1], nxt))
solution.append(nxt)
return [solution]
|
theJollySin/mazelib
|
mazelib/solve/RandomMouse.py
|
Python
|
gpl-3.0
| 1,197
|
#!/usr/bin/python3
# -*- coding: utf-8 -*-
# res_core_data_NOTES.py
import unittest
import sys
import os
root_folder = os.path.abspath(os.path.dirname(os.path.abspath(__file__)) + os.sep + ".." + os.sep + "aikif" )
sys.path.append(root_folder)
import core_data as mod_core
import res_core_data_mthd1
import res_core_data_mthd2
fname = 'res_core_data.rst'
data_files = ['sample_raw_data1.csv', 'sample_raw_data2.csv']
def main():
"""
This generates the research document based on the results of
the various programs and includes RST imports for introduction
and summary
"""
print("Generating research notes...")
if os.path.exists(fname):
os.remove(fname)
append_rst('================================================\n')
append_rst('Comparison of Information Aggregation Techniques\n')
append_rst('================================================\n\n')
append_rst('.. contents::\n\n')
# import header
append_rst(open('res_core_data_HEADER.rst', 'r').read())
append_rst(res_core_data_mthd1.get_method())
append_rst(res_core_data_mthd2.get_method())
# call programs
append_rst('Results\n')
append_rst('=====================================\n')
for dat in data_files:
append_rst('\nData File : ' + dat + '\n---------------------------------------\n\n')
res_core_data_mthd1.get_results(fname, dat)
res_core_data_mthd2.get_results(fname, dat)
# import footer
append_rst(open('res_core_data_FOOTER.rst', 'r').read())
print("Done!")
def append_rst(txt):
with open(fname, 'a') as f:
f.write(txt)
main()
|
acutesoftware/AIKIF
|
scripts/res_core_data_NOTES.py
|
Python
|
gpl-3.0
| 1,670
|
# This file is part of Indico.
# Copyright (C) 2002 - 2017 European Organization for Nuclear Research (CERN).
#
# Indico is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 3 of the
# License, or (at your option) any later version.
#
# Indico is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Indico; if not, see <http://www.gnu.org/licenses/>.
from __future__ import unicode_literals
import mimetypes
from flask import render_template
from pygments import highlight
from pygments.lexers import get_lexer_for_mimetype, CppLexer, PhpLexer, JavaLexer, RubyLexer
from pygments.formatters import HtmlFormatter
from indico.core import signals
from indico.core.plugins import IndicoPlugin
from indico.modules.attachments.preview import Previewer
def register_custom_mimetypes():
mimetypes.add_type(b'text/x-csharp', b'.cs')
register_custom_mimetypes()
class PygmentsPreviewer(Previewer):
# All supported MIME types
MIMETYPES = ('text/css', 'text/x-python', 'text/x-ruby-script', 'text/x-java-source', 'text/x-c',
'application/javascript', 'text/x-c', 'text/x-fortran', 'text/x-csharp', 'text/php',
'text/x-php')
# Python's mimetypes lib and Pygments do not quite agree on some MIME types
CUSTOM_LEXERS = {
'text/x-c': CppLexer(),
'text/x-java-source': JavaLexer(),
'text/x-ruby-script': RubyLexer(),
'text/php': PhpLexer()
}
@classmethod
def can_preview(cls, attachment_file):
return attachment_file.content_type in cls.MIMETYPES
@classmethod
def generate_content(cls, attachment):
mime_type = attachment.file.content_type
lexer = cls.CUSTOM_LEXERS.get(mime_type)
if lexer is None:
lexer = get_lexer_for_mimetype(mime_type)
with attachment.file.open() as f:
html_formatter = HtmlFormatter(style='tango', linenos='inline', prestyles='mono')
html_code = highlight(f.read(), lexer, html_formatter)
css_code = html_formatter.get_style_defs('.highlight')
return render_template('previewer_code:pygments_preview.html', attachment=attachment,
html_code=html_code, css_code=css_code)
class CodePreviewerPlugin(IndicoPlugin):
"""Syntax highlighter (Pygments)"""
configurable = False
def init(self):
super(CodePreviewerPlugin, self).init()
self.connect(signals.attachments.get_file_previewers, self._get_file_previewers)
def _get_file_previewers(self, sender, **kwargs):
yield PygmentsPreviewer
|
nop33/indico-plugins
|
previewer_code/indico_previewer_code/plugin.py
|
Python
|
gpl-3.0
| 2,952
|
from openerp import api, fields, models
class BusinessType(models.Model):
_name = "crm.business.type"
_rec_name = "business_type"
business_type = fields.Char("Business Type", required=True)
|
sumihai-tekindo/helpdesk_sicepat
|
crm_sicepat/models/business_type.py
|
Python
|
gpl-3.0
| 219
|
#!/usr/bin/python
#coding=utf8
"""
# Author: Bill
# Created Time : 2016-07-27 23:18:24
# File Name: mdb_sstat.py
# Description:
"""
import sys
import os
from optparse import OptionParser
import re
import time
import platform
import json
import types
#######Log
import logging
from logging.handlers import RotatingFileHandler
class ColoredFormatter(logging.Formatter):
'''A colorful formatter.'''
def __init__(self, fmt = None, datefmt = None):
logging.Formatter.__init__(self, fmt, datefmt)
# Color escape string
COLOR_RED='\033[1;31m'
COLOR_GREEN='\033[1;32m'
COLOR_YELLOW='\033[1;33m'
COLOR_BLUE='\033[1;34m'
COLOR_PURPLE='\033[1;35m'
COLOR_CYAN='\033[1;36m'
COLOR_GRAY='\033[1;37m'
COLOR_WHITE='\033[1;38m'
COLOR_RESET='\033[1;0m'
# Define log color
self.LOG_COLORS = {
'DEBUG': '%s',
'INFO': COLOR_GREEN + '%s' + COLOR_RESET,
'WARNING': COLOR_YELLOW + '%s' + COLOR_RESET,
'ERROR': COLOR_RED + '%s' + COLOR_RESET,
'CRITICAL': COLOR_RED + '%s' + COLOR_RESET,
'EXCEPTION': COLOR_RED + '%s' + COLOR_RESET,
}
def format(self, record):
level_name = record.levelname
msg = logging.Formatter.format(self, record)
return self.LOG_COLORS.get(level_name, '%s') % msg
class Log(object):
'''
log
'''
def __init__(self, filename, level="debug", logid="qiueer", mbs=20, count=10, is_console=True):
'''
mbs: how many MB
count: the count of remain
'''
try:
self._level = level
#print "init,level:",level,"\t","get_map_level:",self._level
self._filename = filename
self._logid = logid
self._logger = logging.getLogger(self._logid)
if not len(self._logger.handlers):
self._logger.setLevel(self.get_map_level(self._level))
fmt = '[%(asctime)s] %(levelname)s\n%(message)s'
datefmt = '%Y-%m-%d %H:%M:%S'
formatter = logging.Formatter(fmt, datefmt)
maxBytes = int(mbs) * 1024 * 1024
file_handler = RotatingFileHandler(self._filename, mode='a',maxBytes=maxBytes,backupCount=count)
self._logger.setLevel(self.get_map_level(self._level))
file_handler.setFormatter(formatter)
self._logger.addHandler(file_handler)
if is_console == True:
stream_handler = logging.StreamHandler(sys.stderr)
console_formatter = ColoredFormatter(fmt, datefmt)
stream_handler.setFormatter(console_formatter)
self._logger.addHandler(stream_handler)
except Exception as expt:
print expt
def tolog(self, msg, level=None):
try:
level = level if level else self._level
level = str(level).lower()
level = self.get_map_level(level)
if level == logging.DEBUG:
self._logger.debug(msg)
if level == logging.INFO:
self._logger.info(msg)
if level == logging.WARN:
self._logger.warn(msg)
if level == logging.ERROR:
self._logger.error(msg)
if level == logging.CRITICAL:
self._logger.critical(msg)
except Exception as expt:
print expt
def debug(self,msg):
self.tolog(msg, level="debug")
def info(self,msg):
self.tolog(msg, level="info")
def warn(self,msg):
self.tolog(msg, level="warn")
def error(self,msg):
self.tolog(msg, level="error")
def critical(self,msg):
self.tolog(msg, level="critical")
def get_map_level(self,level="debug"):
level = str(level).lower()
#print "get_map_level:",level
if level == "debug":
return logging.DEBUG
if level == "info":
return logging.INFO
if level == "warn":
return logging.WARN
if level == "error":
return logging.ERROR
if level == "critical":
return logging.CRITICAL
def docmd(command,timeout=300, raw=False):
'''
功能:
执行命令
参数:command,命令以及其参数/选项
timeout,命令超时时间,单位秒
debug,是否debug,True输出debug信息,False不输出
raw,命令输出是否为元素的输出,True是,False会将结果的每一行去除空格、换行符、制表符等,默认False
返回:
含有3个元素的元组,前两个元素类型是list,第三个元素类型是int,第一个list存储stdout的输出,第二个list存储stderr的输出,第三int存储命令执行的返回码,其中-1表示命令执行超时
示例:
cmd.docmd("ls -alt")
'''
import subprocess, datetime, os, time, signal
start = datetime.datetime.now()
ps = None
retcode = 0
if platform.system() == "Linux":
ps = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True)
else:
ps = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=False)
while ps.poll() is None:
time.sleep(0.2)
now = datetime.datetime.now()
if (now - start).seconds > timeout:
os.kill(ps.pid, signal.SIGINT)
retcode = -1
return (None,None,retcode)
stdo = ps.stdout.readlines()
stde = ps.stderr.readlines()
if not ps.returncode:
retcode = ps.returncode
if raw == True: #去除行末换行符
stdo = [line.strip("\n") for line in stdo]
stde = [line.strip("\n") for line in stde]
if raw == False: #去除行末换行符,制表符、空格等
stdo = [str.strip(line) for line in stdo]
stde = [str.strip(line) for line in stde]
return (stdo,stde,retcode)
def get_logstr(list_dict, max_key_len=16, join_str="\n"):
log_str = ""
for conf in list_dict:
for (key,val) in dict(conf).iteritems():
log_str = log_str + str(key).ljust(max_key_len) + ": " + str(val) + join_str
log_str = log_str.strip() # 去掉尾部 \n
return log_str
def get_user_passwd_by_port(conffile, port):
if os.path.exists(conffile) == False:
return (None,None)
with open(conffile,'r') as fd:
alllines = fd.readlines()
for line in alllines:
line = str(line).strip()
if not line or line.startswith("#"):continue
ln_ary = re.split('[ ,;]+', line)
if len(ln_ary) < 3:continue
if str(port) == ln_ary[0]:
return (ln_ary[1],ln_ary[2])
return (None, None)
class MGdb(object):
def __init__(self, iphost="127.0.0.1", port=27017, username=None, password=None, force=False, debug=True):
self._iphost = iphost
self._port = port
self._username = username
self._password = password
self._force = force
self._logpath = "/tmp/zabbix_mongodb.log"
self._cache_file = "/tmp/zabbix_mongodb_cache_%s.txt" %(port)
if not port:
self._cache_file = "/tmp/zabbix_mongodb_cache.txt"
self._logger = Log(self._logpath,level="error",is_console=debug, mbs=5, count=5)
def get_logger(self):
return self._logger
def get_port_list(self,port_list=""):
# sudo权限,必须授予
# [root@localhost ~]# tail -n 2 /etc/sudoers
# Defaults:zabbix !requiretty
# zabbix ALL=(root) NOPASSWD:/bin/netstat
data = list()
if port_list:
for port in port_list:
data.append({"{#MONGODB_PORT}": port})
else:
binname = "mongod"
cmdstr = "netstat -nlpt | grep '%s' | awk '{print $4}'|awk -F: '{print $2}'|uniq" % (binname)
disk_space_info = []
(stdo_list, stde_list, retcode) = docmd(cmdstr, timeout=3, raw = False)
log_da = [{"cmdstr": cmdstr},{"ret": retcode},{"stdo": "".join(stdo_list)}, {"stde": "".join(stde_list)}]
logstr = get_logstr(log_da, max_key_len=10)
if retcode !=0:
self._logger.error(logstr)
return {}
else:
self._logger.info(logstr)
for port in stdo_list:
port = int(str(port).strip())
data.append({"{#MONGODB_PORT}": port})
import json
return json.dumps({'data': data}, sort_keys=True, indent=7, separators=(",",":"))
def _get_result(self, iphost=None, port=None, username=None, password=None):
try:
hostname= iphost if iphost else self._iphost
port = port if port else self._port
username = username if username else self._username
password = password if password else self._password
resobj = None
if self._force == False:
if os.path.exists(self._cache_file):
with open(self._cache_file, "r") as fd:
alllines = fd.readlines()
fd.close()
if alllines and len(alllines)>1:
old_unixtime = int(str(alllines[0]).strip())
now_unixtime = int(time.time())
if (now_unixtime - old_unixtime) <= 60: ## 1min内
resobj = str(alllines[1]).strip()
resobj = json.loads(resobj)
if resobj:
log_da = [{"msg": "Get From Cache File"}, {"content": str(resobj)}]
logstr = get_logstr(log_da, max_key_len=10)
self._logger.info(logstr)
return resobj
pbinpaths = [
"/usr/local/mongodb/bin/mongo",
"/opt/mongodb/bin/mongo",
"/home/mongodb/mongodb/bin/mongo",
]
cmdstr = None
for bp in pbinpaths:
if os.path.exists(bp):
if username and password:
cmdstr = "echo 'db.serverStatus()' | %s admin --host '%s' --port %s -u %s -p %s --quiet" % (bp, hostname, port, username, password)
else:
cmdstr = "echo 'db.serverStatus()' | %s admin --host '%s' --port %s --quiet" % (bp, hostname, port)
break
if not cmdstr:
print "the mongo not find"
return None
(stdo_list, stde_list, retcode) = docmd(cmdstr, timeout=3, raw = False)
log_da = [{"cmdstr": cmdstr},{"ret": retcode},{"stdo": None if not stdo_list else "".join(stdo_list)}, {"stde": None if not stde_list else"".join(stde_list)}]
logstr = get_logstr(log_da, max_key_len=10)
if retcode !=0:
self._logger.error(logstr)
return None
else:
self._logger.info(logstr)
stdo_str = "".join(stdo_list)
stdo_str = stdo_str.replace("NumberLong(", "").replace(")","").replace("ISODate(","").replace("ObjectId(","")
#print stdo_str
resobj = json.loads(stdo_str)
now_unixtime = int(time.time())
with open(self._cache_file, "w") as fd:
fd.write(str(now_unixtime)+"\n")
fd.write(stdo_str)
fd.close()
return resobj
except Exception as expt:
import traceback
tb = traceback.format_exc()
self._logger.error(tb)
def get_item_val(self, *items):
resobj = self._get_result()
src_res = resobj
for item in items:
if resobj and type(resobj) == types.DictType and resobj.has_key(item):
resobj = resobj[item]
if resobj == None or resobj == src_res:
resobj = 0
return resobj
def get_result(self):
return self._get_result()
def get_item_tval(self, items, val_type="int"):
val = self.get_item_val(*items)
if val == None:return None #0也满足此条件
if val_type == "int":
return int(val)
if val_type == "float":
fval = "%.2f" % (val)
return float(fval)
if val_type == "str":
return str(val)
return int(val)
def print_all_key_val(self):
resobj = self._get_result()
print json.dumps(resobj, indent=4)
def main():
usage = "usage: %prog [options]\n Fetch mongodb status"
parser = OptionParser(usage)
parser.add_option("-l", "--list",
action="store_true", dest="is_list", default=False,
help="if list all port")
parser.add_option("-H", "--host", action="store", dest="host", type="string", default='localhost', help="Connect to mongod host.")
parser.add_option("-p",
"--port",
action="store",
dest="port",
type="int",
default=27017,
help="the port for mongodb, for example: 27017")
parser.add_option("-u",
"--user",
action="store",
dest="username",
type="string",
default=None,
help="username")
parser.add_option("-P",
"--password",
action="store",
dest="password",
type="string",
default=None,
help="password")
parser.add_option("-i",
"--item",
dest="item",
action="store",
type="string",
default=None,
help="which item to fetch")
parser.add_option("-f", "--force",
action="store_true", dest="force", default=False,
help="if get from cache")
parser.add_option("-d", "--debug",
action="store_true", dest="debug", default=False,
help="if open debug mode")
parser.add_option("-a", "--all",
action="store_true",dest="all", default=False,
help="output all info")
(options, args) = parser.parse_args()
if 1 >= len(sys.argv):
parser.print_help()
return
hostname = options.host
port = options.port
conffile = "/usr/local/public-ops/conf/.mongodb.passwd"
username = options.username
password = options.password
if password == None or username == None:
(username, password) = get_user_passwd_by_port(conffile, port)
#print "Get (username=%s,password=%s) From Config File By port:%s" % (username, password, port)
monitor_obj = MGdb(iphost=hostname, port=port, username=username, password=password, debug=options.debug, force=options.force)
#################start
port_list=[27017]
if options.is_list == True:
print monitor_obj.get_port_list(port_list)
return
if options.all == True:
mg = MGdb(debug=False)
mg.print_all_key_val()
sys.exit(0)
try:
item = options.item
item_ary = re.split("\.", item)
print monitor_obj.get_item_tval(item_ary)
except Exception as expt:
import traceback
tb = traceback.format_exc()
monitor_obj.get_logger().error(tb)
if __name__ == '__main__':
main()
|
BillWang139967/zabbix_templates
|
template_mongodb/install/conf/mdb_sstat.py
|
Python
|
gpl-3.0
| 16,439
|
# (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import base64
import json
import subprocess
import sys
import time
import traceback
from ansible.compat.six import iteritems, string_types, binary_type
from ansible import constants as C
from ansible.errors import AnsibleError, AnsibleParserError, AnsibleUndefinedVariable, AnsibleConnectionFailure
from ansible.executor.task_result import TaskResult
from ansible.playbook.conditional import Conditional
from ansible.playbook.task import Task
from ansible.template import Templar
from ansible.utils.encrypt import key_for_hostname
from ansible.utils.listify import listify_lookup_plugin_terms
from ansible.utils.unicode import to_unicode, to_bytes
from ansible.vars.unsafe_proxy import UnsafeProxy, wrap_var
try:
from __main__ import display
except ImportError:
from ansible.utils.display import Display
display = Display()
__all__ = ['TaskExecutor']
class TaskExecutor:
'''
This is the main worker class for the executor pipeline, which
handles loading an action plugin to actually dispatch the task to
a given host. This class roughly corresponds to the old Runner()
class.
'''
# Modules that we optimize by squashing loop items into a single call to
# the module
SQUASH_ACTIONS = frozenset(C.DEFAULT_SQUASH_ACTIONS)
def __init__(self, host, task, job_vars, play_context, new_stdin, loader, shared_loader_obj, rslt_q):
self._host = host
self._task = task
self._job_vars = job_vars
self._play_context = play_context
self._new_stdin = new_stdin
self._loader = loader
self._shared_loader_obj = shared_loader_obj
self._connection = None
self._rslt_q = rslt_q
def run(self):
'''
The main executor entrypoint, where we determine if the specified
task requires looping and either runs the task with self._run_loop()
or self._execute(). After that, the returned results are parsed and
returned as a dict.
'''
display.debug("in run()")
try:
# lookup plugins need to know if this task is executing from
# a role, so that it can properly find files/templates/etc.
roledir = None
if self._task._role:
roledir = self._task._role._role_path
self._job_vars['roledir'] = roledir
items = self._get_loop_items()
if items is not None:
if len(items) > 0:
item_results = self._run_loop(items)
# loop through the item results, and remember the changed/failed
# result flags based on any item there.
changed = False
failed = False
for item in item_results:
if 'changed' in item and item['changed']:
changed = True
if 'failed' in item and item['failed']:
failed = True
# create the overall result item, and set the changed/failed
# flags there to reflect the overall result of the loop
res = dict(results=item_results)
if changed:
res['changed'] = True
if failed:
res['failed'] = True
res['msg'] = 'One or more items failed'
else:
res['msg'] = 'All items completed'
else:
res = dict(changed=False, skipped=True, skipped_reason='No items in the list', results=[])
else:
display.debug("calling self._execute()")
res = self._execute()
display.debug("_execute() done")
# make sure changed is set in the result, if it's not present
if 'changed' not in res:
res['changed'] = False
def _clean_res(res):
if isinstance(res, dict):
for k in res.keys():
res[k] = _clean_res(res[k])
elif isinstance(res, list):
for idx,item in enumerate(res):
res[idx] = _clean_res(item)
elif isinstance(res, UnsafeProxy):
return res._obj
elif isinstance(res, binary_type):
return to_unicode(res, errors='strict')
return res
display.debug("dumping result to json")
res = _clean_res(res)
display.debug("done dumping result, returning")
return res
except AnsibleError as e:
return dict(failed=True, msg=to_unicode(e, nonstring='simplerepr'))
except Exception as e:
return dict(failed=True, msg='Unexpected failure during module execution.', exception=to_unicode(traceback.format_exc()), stdout='')
finally:
try:
self._connection.close()
except AttributeError:
pass
except Exception as e:
display.debug(u"error closing connection: %s" % to_unicode(e))
def _get_loop_items(self):
'''
Loads a lookup plugin to handle the with_* portion of a task (if specified),
and returns the items result.
'''
# save the play context variables to a temporary dictionary,
# so that we can modify the job vars without doing a full copy
# and later restore them to avoid modifying things too early
play_context_vars = dict()
self._play_context.update_vars(play_context_vars)
old_vars = dict()
for k in play_context_vars.keys():
if k in self._job_vars:
old_vars[k] = self._job_vars[k]
self._job_vars[k] = play_context_vars[k]
templar = Templar(loader=self._loader, shared_loader_obj=self._shared_loader_obj, variables=self._job_vars)
items = None
if self._task.loop:
if self._task.loop in self._shared_loader_obj.lookup_loader:
#TODO: remove convert_bare true and deprecate this in with_
if self._task.loop == 'first_found':
# first_found loops are special. If the item is undefined
# then we want to fall through to the next value rather
# than failing.
loop_terms = listify_lookup_plugin_terms(terms=self._task.loop_args, templar=templar, loader=self._loader, fail_on_undefined=False, convert_bare=True)
loop_terms = [t for t in loop_terms if not templar._contains_vars(t)]
else:
try:
loop_terms = listify_lookup_plugin_terms(terms=self._task.loop_args, templar=templar, loader=self._loader, fail_on_undefined=True, convert_bare=True)
except AnsibleUndefinedVariable as e:
display.deprecated("Skipping task due to undefined Error, in the future this will be a fatal error.: %s" % to_bytes(e))
return None
items = self._shared_loader_obj.lookup_loader.get(self._task.loop, loader=self._loader, templar=templar).run(terms=loop_terms, variables=self._job_vars, wantlist=True)
else:
raise AnsibleError("Unexpected failure in finding the lookup named '%s' in the available lookup plugins" % self._task.loop)
# now we restore any old job variables that may have been modified,
# and delete them if they were in the play context vars but not in
# the old variables dictionary
for k in play_context_vars.keys():
if k in old_vars:
self._job_vars[k] = old_vars[k]
else:
del self._job_vars[k]
if items:
from ansible.vars.unsafe_proxy import UnsafeProxy
for idx, item in enumerate(items):
if item is not None and not isinstance(item, UnsafeProxy):
items[idx] = UnsafeProxy(item)
return items
def _run_loop(self, items):
'''
Runs the task with the loop items specified and collates the result
into an array named 'results' which is inserted into the final result
along with the item for which the loop ran.
'''
results = []
# make copies of the job vars and task so we can add the item to
# the variables and re-validate the task with the item variable
#task_vars = self._job_vars.copy()
task_vars = self._job_vars
loop_var = 'item'
if self._task.loop_control:
# the value may be 'None', so we still need to default it back to 'item'
loop_var = self._task.loop_control.loop_var or 'item'
if loop_var in task_vars:
display.warning("The loop variable '%s' is already in use. You should set the `loop_var` value in the `loop_control` option for the task to something else to avoid variable collisions and unexpected behavior." % loop_var)
items = self._squash_items(items, loop_var, task_vars)
for item in items:
task_vars[loop_var] = item
try:
tmp_task = self._task.copy()
tmp_play_context = self._play_context.copy()
except AnsibleParserError as e:
results.append(dict(failed=True, msg=to_unicode(e)))
continue
# now we swap the internal task and play context with their copies,
# execute, and swap them back so we can do the next iteration cleanly
(self._task, tmp_task) = (tmp_task, self._task)
(self._play_context, tmp_play_context) = (tmp_play_context, self._play_context)
res = self._execute(variables=task_vars)
(self._task, tmp_task) = (tmp_task, self._task)
(self._play_context, tmp_play_context) = (tmp_play_context, self._play_context)
# now update the result with the item info, and append the result
# to the list of results
res[loop_var] = item
res['_ansible_item_result'] = True
self._rslt_q.put(TaskResult(self._host, self._task, res), block=False)
results.append(res)
del task_vars[loop_var]
return results
def _squash_items(self, items, loop_var, variables):
'''
Squash items down to a comma-separated list for certain modules which support it
(typically package management modules).
'''
name = None
try:
# _task.action could contain templatable strings (via action: and
# local_action:) Template it before comparing. If we don't end up
# optimizing it here, the templatable string might use template vars
# that aren't available until later (it could even use vars from the
# with_items loop) so don't make the templated string permanent yet.
templar = Templar(loader=self._loader, shared_loader_obj=self._shared_loader_obj, variables=variables)
task_action = self._task.action
if templar._contains_vars(task_action):
task_action = templar.template(task_action, fail_on_undefined=False)
if len(items) > 0 and task_action in self.SQUASH_ACTIONS:
if all(isinstance(o, string_types) for o in items):
final_items = []
for allowed in ['name', 'pkg', 'package']:
name = self._task.args.pop(allowed, None)
if name is not None:
break
# This gets the information to check whether the name field
# contains a template that we can squash for
template_no_item = template_with_item = None
if name:
if templar._contains_vars(name):
variables[loop_var] = '\0$'
template_no_item = templar.template(name, variables, cache=False)
variables[loop_var] = '\0@'
template_with_item = templar.template(name, variables, cache=False)
del variables[loop_var]
# Check if the user is doing some operation that doesn't take
# name/pkg or the name/pkg field doesn't have any variables
# and thus the items can't be squashed
if template_no_item != template_with_item:
for item in items:
variables[loop_var] = item
if self._task.evaluate_conditional(templar, variables):
new_item = templar.template(name, cache=False)
final_items.append(new_item)
self._task.args['name'] = final_items
# Wrap this in a list so that the calling function loop
# executes exactly once
return [final_items]
else:
# Restore the name parameter
self._task.args['name'] = name
#elif:
# Right now we only optimize single entries. In the future we
# could optimize more types:
# * lists can be squashed together
# * dicts could squash entries that match in all cases except the
# name or pkg field.
except:
# Squashing is an optimization. If it fails for any reason,
# simply use the unoptimized list of items.
# Restore the name parameter
if name is not None:
self._task.args['name'] = name
pass
return items
def _execute(self, variables=None):
'''
The primary workhorse of the executor system, this runs the task
on the specified host (which may be the delegated_to host) and handles
the retry/until and block rescue/always execution
'''
if variables is None:
variables = self._job_vars
templar = Templar(loader=self._loader, shared_loader_obj=self._shared_loader_obj, variables=variables)
context_validation_error = None
try:
# apply the given task's information to the connection info,
# which may override some fields already set by the play or
# the options specified on the command line
self._play_context = self._play_context.set_task_and_variable_override(task=self._task, variables=variables, templar=templar)
# fields set from the play/task may be based on variables, so we have to
# do the same kind of post validation step on it here before we use it.
self._play_context.post_validate(templar=templar)
# now that the play context is finalized, if the remote_addr is not set
# default to using the host's address field as the remote address
if not self._play_context.remote_addr:
self._play_context.remote_addr = self._host.address
# We also add "magic" variables back into the variables dict to make sure
# a certain subset of variables exist.
self._play_context.update_vars(variables)
except AnsibleError as e:
# save the error, which we'll raise later if we don't end up
# skipping this task during the conditional evaluation step
context_validation_error = e
# Evaluate the conditional (if any) for this task, which we do before running
# the final task post-validation. We do this before the post validation due to
# the fact that the conditional may specify that the task be skipped due to a
# variable not being present which would otherwise cause validation to fail
try:
if not self._task.evaluate_conditional(templar, variables):
display.debug("when evaluation failed, skipping this task")
return dict(changed=False, skipped=True, skip_reason='Conditional check failed', _ansible_no_log=self._play_context.no_log)
except AnsibleError:
# skip conditional exception in the case of includes as the vars needed might not be avaiable except in the included tasks or due to tags
if self._task.action != 'include':
raise
# if we ran into an error while setting up the PlayContext, raise it now
if context_validation_error is not None:
raise context_validation_error
# if this task is a TaskInclude, we just return now with a success code so the
# main thread can expand the task list for the given host
if self._task.action == 'include':
include_variables = self._task.args.copy()
include_file = include_variables.pop('_raw_params', None)
if not include_file:
return dict(failed=True, msg="No include file was specified to the include")
include_file = templar.template(include_file)
return dict(include=include_file, include_variables=include_variables)
# Now we do final validation on the task, which sets all fields to their final values.
self._task.post_validate(templar=templar)
if '_variable_params' in self._task.args:
variable_params = self._task.args.pop('_variable_params')
if isinstance(variable_params, dict):
display.deprecated("Using variables for task params is unsafe, especially if the variables come from an external source like facts")
variable_params.update(self._task.args)
self._task.args = variable_params
# get the connection and the handler for this execution
if not self._connection or not getattr(self._connection, 'connected', False) or self._play_context.remote_addr != self._connection._play_context.remote_addr:
self._connection = self._get_connection(variables=variables, templar=templar)
self._connection.set_host_overrides(host=self._host, hostvars=variables.get('hostvars', {}).get(self._host.name, {}))
else:
# if connection is reused, its _play_context is no longer valid and needs
# to be replaced with the one templated above, in case other data changed
self._connection._play_context = self._play_context
self._handler = self._get_action_handler(connection=self._connection, templar=templar)
# And filter out any fields which were set to default(omit), and got the omit token value
omit_token = variables.get('omit')
if omit_token is not None:
self._task.args = dict((i[0], i[1]) for i in iteritems(self._task.args) if i[1] != omit_token)
# Read some values from the task, so that we can modify them if need be
if self._task.until:
retries = self._task.retries
if retries is None:
retries = 3
else:
retries = 1
delay = self._task.delay
if delay < 0:
delay = 1
# make a copy of the job vars here, in case we need to update them
# with the registered variable value later on when testing conditions
vars_copy = variables.copy()
display.debug("starting attempt loop")
result = None
for attempt in range(1, retries + 1):
display.debug("running the handler")
try:
result = self._handler.run(task_vars=variables)
except AnsibleConnectionFailure as e:
return dict(unreachable=True, msg=to_unicode(e))
display.debug("handler run complete")
# preserve no log
result["_ansible_no_log"] = self._play_context.no_log
# update the local copy of vars with the registered value, if specified,
# or any facts which may have been generated by the module execution
if self._task.register:
vars_copy[self._task.register] = wrap_var(result.copy())
if self._task.async > 0:
# the async_wrapper module returns dumped JSON via its stdout
# response, so we parse it here and replace the result
try:
if 'skipped' in result and result['skipped'] or 'failed' in result and result['failed']:
return result
result = json.loads(result.get('stdout'))
except (TypeError, ValueError) as e:
return dict(failed=True, msg=u"The async task did not return valid JSON: %s" % to_unicode(e))
if self._task.poll > 0:
result = self._poll_async_result(result=result, templar=templar)
# ensure no log is preserved
result["_ansible_no_log"] = self._play_context.no_log
# helper methods for use below in evaluating changed/failed_when
def _evaluate_changed_when_result(result):
if self._task.changed_when is not None and self._task.changed_when:
cond = Conditional(loader=self._loader)
cond.when = self._task.changed_when
result['changed'] = cond.evaluate_conditional(templar, vars_copy)
def _evaluate_failed_when_result(result):
if self._task.failed_when:
cond = Conditional(loader=self._loader)
cond.when = self._task.failed_when
failed_when_result = cond.evaluate_conditional(templar, vars_copy)
result['failed_when_result'] = result['failed'] = failed_when_result
else:
failed_when_result = False
return failed_when_result
if 'ansible_facts' in result:
vars_copy.update(result['ansible_facts'])
# set the failed property if the result has a non-zero rc. This will be
# overridden below if the failed_when property is set
if result.get('rc', 0) != 0:
result['failed'] = True
# if we didn't skip this task, use the helpers to evaluate the changed/
# failed_when properties
if 'skipped' not in result:
_evaluate_changed_when_result(result)
_evaluate_failed_when_result(result)
if retries > 1:
cond = Conditional(loader=self._loader)
cond.when = self._task.until
if cond.evaluate_conditional(templar, vars_copy):
break
else:
# no conditional check, or it failed, so sleep for the specified time
if attempt < retries:
result['attempts'] = attempt
result['_ansible_retry'] = True
result['retries'] = retries
display.debug('Retrying task, attempt %d of %d' % (attempt, retries))
self._rslt_q.put(TaskResult(self._host, self._task, result), block=False)
time.sleep(delay)
else:
if retries > 1:
# we ran out of attempts, so mark the result as failed
result['failed'] = True
# do the final update of the local variables here, for both registered
# values and any facts which may have been created
if self._task.register:
variables[self._task.register] = wrap_var(result)
if 'ansible_facts' in result:
variables.update(result['ansible_facts'])
# save the notification target in the result, if it was specified, as
# this task may be running in a loop in which case the notification
# may be item-specific, ie. "notify: service {{item}}"
if self._task.notify is not None:
result['_ansible_notify'] = self._task.notify
# add the delegated vars to the result, so we can reference them
# on the results side without having to do any further templating
# FIXME: we only want a limited set of variables here, so this is currently
# hardcoded but should be possibly fixed if we want more or if
# there is another source of truth we can use
delegated_vars = variables.get('ansible_delegated_vars', dict()).get(self._task.delegate_to, dict()).copy()
if len(delegated_vars) > 0:
result["_ansible_delegated_vars"] = dict()
for k in ('ansible_host', ):
result["_ansible_delegated_vars"][k] = delegated_vars.get(k)
# and return
display.debug("attempt loop complete, returning result")
return result
def _poll_async_result(self, result, templar):
'''
Polls for the specified JID to be complete
'''
async_jid = result.get('ansible_job_id')
if async_jid is None:
return dict(failed=True, msg="No job id was returned by the async task")
# Create a new psuedo-task to run the async_status module, and run
# that (with a sleep for "poll" seconds between each retry) until the
# async time limit is exceeded.
async_task = Task().load(dict(action='async_status jid=%s' % async_jid))
# Because this is an async task, the action handler is async. However,
# we need the 'normal' action handler for the status check, so get it
# now via the action_loader
normal_handler = self._shared_loader_obj.action_loader.get(
'normal',
task=async_task,
connection=self._connection,
play_context=self._play_context,
loader=self._loader,
templar=templar,
shared_loader_obj=self._shared_loader_obj,
)
time_left = self._task.async
while time_left > 0:
time.sleep(self._task.poll)
async_result = normal_handler.run()
if int(async_result.get('finished', 0)) == 1 or 'failed' in async_result or 'skipped' in async_result:
break
time_left -= self._task.poll
if int(async_result.get('finished', 0)) != 1:
return dict(failed=True, msg="async task did not complete within the requested time")
else:
return async_result
def _get_connection(self, variables, templar):
'''
Reads the connection property for the host, and returns the
correct connection object from the list of connection plugins
'''
if self._task.delegate_to is not None:
# since we're delegating, we don't want to use interpreter values
# which would have been set for the original target host
for i in variables.keys():
if isinstance(i, string_types) and i.startswith('ansible_') and i.endswith('_interpreter'):
del variables[i]
# now replace the interpreter values with those that may have come
# from the delegated-to host
delegated_vars = variables.get('ansible_delegated_vars', dict()).get(self._task.delegate_to, dict())
if isinstance(delegated_vars, dict):
for i in delegated_vars:
if isinstance(i, string_types) and i.startswith("ansible_") and i.endswith("_interpreter"):
variables[i] = delegated_vars[i]
conn_type = self._play_context.connection
if conn_type == 'smart':
conn_type = 'ssh'
if sys.platform.startswith('darwin') and self._play_context.password:
# due to a current bug in sshpass on OSX, which can trigger
# a kernel panic even for non-privileged users, we revert to
# paramiko on that OS when a SSH password is specified
conn_type = "paramiko"
else:
# see if SSH can support ControlPersist if not use paramiko
try:
cmd = subprocess.Popen(['ssh','-o','ControlPersist'], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
(out, err) = cmd.communicate()
err = to_unicode(err)
if u"Bad configuration option" in err or u"Usage:" in err:
conn_type = "paramiko"
except OSError:
conn_type = "paramiko"
connection = self._shared_loader_obj.connection_loader.get(conn_type, self._play_context, self._new_stdin)
if not connection:
raise AnsibleError("the connection plugin '%s' was not found" % conn_type)
if self._play_context.accelerate:
# accelerate is deprecated as of 2.1...
display.deprecated('Accelerated mode is deprecated. Consider using SSH with ControlPersist and pipelining enabled instead')
# launch the accelerated daemon here
ssh_connection = connection
handler = self._shared_loader_obj.action_loader.get(
'normal',
task=self._task,
connection=ssh_connection,
play_context=self._play_context,
loader=self._loader,
templar=templar,
shared_loader_obj=self._shared_loader_obj,
)
key = key_for_hostname(self._play_context.remote_addr)
accelerate_args = dict(
password=base64.b64encode(key.__str__()),
port=self._play_context.accelerate_port,
minutes=C.ACCELERATE_DAEMON_TIMEOUT,
ipv6=self._play_context.accelerate_ipv6,
debug=self._play_context.verbosity,
)
connection = self._shared_loader_obj.connection_loader.get('accelerate', self._play_context, self._new_stdin)
if not connection:
raise AnsibleError("the connection plugin '%s' was not found" % conn_type)
try:
connection._connect()
except AnsibleConnectionFailure:
display.debug('connection failed, fallback to accelerate')
res = handler._execute_module(module_name='accelerate', module_args=accelerate_args, task_vars=variables, delete_remote_tmp=False)
display.debug(res)
connection._connect()
return connection
def _get_action_handler(self, connection, templar):
'''
Returns the correct action plugin to handle the requestion task action
'''
if self._task.action in self._shared_loader_obj.action_loader:
if self._task.async != 0:
raise AnsibleError("async mode is not supported with the %s module" % self._task.action)
handler_name = self._task.action
elif self._task.async == 0:
handler_name = 'normal'
else:
handler_name = 'async'
handler = self._shared_loader_obj.action_loader.get(
handler_name,
task=self._task,
connection=connection,
play_context=self._play_context,
loader=self._loader,
templar=templar,
shared_loader_obj=self._shared_loader_obj,
)
if not handler:
raise AnsibleError("the handler '%s' was not found" % handler_name)
return handler
|
lpirl/ansible
|
lib/ansible/executor/task_executor.py
|
Python
|
gpl-3.0
| 32,789
|
from yanntricks import *
def ContourGreen():
pspict,fig = SinglePicture("ContourGreen")
x=var('x')
contour=PolarCurve(1+cos(x)*sin(x)).graph(0,2*pi)
contour.put_arrow([i*pi/4+0.5 for i in range(0,8)])
pspict.DrawGraphs(contour)
fig.conclude()
fig.write_the_file()
|
LaurentClaessens/mazhe
|
src_yanntricks/yanntricksContourGreen.py
|
Python
|
gpl-3.0
| 295
|
import time
import webbrowser
import json
import wx
import requests
from service.port import Port
from service.fit import Fit
from eos.types import Cargo
from eos.db import getItem
from gui.display import Display
import gui.globalEvents as GE
if 'wxMac' not in wx.PlatformInfo or ('wxMac' in wx.PlatformInfo and wx.VERSION >= (3, 0)):
from service.crest import Crest, CrestModes
class CrestFittings(wx.Frame):
def __init__(self, parent):
wx.Frame.__init__(self, parent, id=wx.ID_ANY, title="Browse EVE Fittings", pos=wx.DefaultPosition,
size=wx.Size(550, 450), style=wx.DEFAULT_FRAME_STYLE | wx.TAB_TRAVERSAL)
self.SetBackgroundColour(wx.SystemSettings.GetColour(wx.SYS_COLOUR_BTNFACE))
self.mainFrame = parent
mainSizer = wx.BoxSizer(wx.VERTICAL)
sCrest = Crest.getInstance()
characterSelectSizer = wx.BoxSizer(wx.HORIZONTAL)
if sCrest.settings.get('mode') == CrestModes.IMPLICIT:
self.stLogged = wx.StaticText(self, wx.ID_ANY, "Currently logged in as %s" % sCrest.implicitCharacter.name,
wx.DefaultPosition, wx.DefaultSize)
self.stLogged.Wrap(-1)
characterSelectSizer.Add(self.stLogged, 1, wx.ALIGN_CENTER_VERTICAL | wx.ALL, 5)
else:
self.charChoice = wx.Choice(self, wx.ID_ANY, wx.DefaultPosition, wx.DefaultSize, [])
characterSelectSizer.Add(self.charChoice, 1, wx.ALIGN_CENTER_VERTICAL | wx.ALL, 5)
self.updateCharList()
self.fetchBtn = wx.Button(self, wx.ID_ANY, u"Fetch Fits", wx.DefaultPosition, wx.DefaultSize, 5)
characterSelectSizer.Add(self.fetchBtn, 0, wx.ALL, 5)
mainSizer.Add(characterSelectSizer, 0, wx.EXPAND, 5)
self.sl = wx.StaticLine(self, wx.ID_ANY, wx.DefaultPosition, wx.DefaultSize, wx.LI_HORIZONTAL)
mainSizer.Add(self.sl, 0, wx.EXPAND | wx.ALL, 5)
contentSizer = wx.BoxSizer(wx.HORIZONTAL)
browserSizer = wx.BoxSizer(wx.VERTICAL)
self.fitTree = FittingsTreeView(self)
browserSizer.Add(self.fitTree, 1, wx.ALL | wx.EXPAND, 5)
contentSizer.Add(browserSizer, 1, wx.EXPAND, 0)
fitSizer = wx.BoxSizer(wx.VERTICAL)
self.fitView = FitView(self)
fitSizer.Add(self.fitView, 1, wx.ALL | wx.EXPAND, 5)
btnSizer = wx.BoxSizer(wx.HORIZONTAL)
self.importBtn = wx.Button(self, wx.ID_ANY, u"Import to pyfa", wx.DefaultPosition, wx.DefaultSize, 5)
self.deleteBtn = wx.Button(self, wx.ID_ANY, u"Delete from EVE", wx.DefaultPosition, wx.DefaultSize, 5)
btnSizer.Add(self.importBtn, 1, wx.ALL, 5)
btnSizer.Add(self.deleteBtn, 1, wx.ALL, 5)
fitSizer.Add(btnSizer, 0, wx.EXPAND)
contentSizer.Add(fitSizer, 1, wx.EXPAND, 0)
mainSizer.Add(contentSizer, 1, wx.EXPAND, 5)
self.fetchBtn.Bind(wx.EVT_BUTTON, self.fetchFittings)
self.importBtn.Bind(wx.EVT_BUTTON, self.importFitting)
self.deleteBtn.Bind(wx.EVT_BUTTON, self.deleteFitting)
self.mainFrame.Bind(GE.EVT_SSO_LOGOUT, self.ssoLogout)
self.mainFrame.Bind(GE.EVT_SSO_LOGIN, self.ssoLogin)
self.Bind(wx.EVT_CLOSE, self.OnClose)
self.statusbar = wx.StatusBar(self)
self.statusbar.SetFieldsCount()
self.SetStatusBar(self.statusbar)
self.cacheTimer = wx.Timer(self)
self.Bind(wx.EVT_TIMER, self.updateCacheStatus, self.cacheTimer)
self.SetSizer(mainSizer)
self.Layout()
self.Centre(wx.BOTH)
def ssoLogin(self, event):
self.updateCharList()
event.Skip()
def updateCharList(self):
sCrest = Crest.getInstance()
chars = sCrest.getCrestCharacters()
if len(chars) == 0:
self.Close()
self.charChoice.Clear()
for char in chars:
self.charChoice.Append(char.name, char.ID)
self.charChoice.SetSelection(0)
def updateCacheStatus(self, event):
t = time.gmtime(self.cacheTime - time.time())
if t < 0:
self.cacheTimer.Stop()
else:
sTime = time.strftime("%H:%M:%S", t)
self.statusbar.SetStatusText("Cached for %s" % sTime, 0)
def ssoLogout(self, event):
if event.type == CrestModes.IMPLICIT:
self.Close()
else:
self.updateCharList()
event.Skip() # continue event
def OnClose(self, event):
self.mainFrame.Unbind(GE.EVT_SSO_LOGOUT, handler=self.ssoLogout)
self.mainFrame.Unbind(GE.EVT_SSO_LOGIN, handler=self.ssoLogin)
event.Skip()
def getActiveCharacter(self):
sCrest = Crest.getInstance()
if sCrest.settings.get('mode') == CrestModes.IMPLICIT:
return sCrest.implicitCharacter.ID
selection = self.charChoice.GetCurrentSelection()
return self.charChoice.GetClientData(selection) if selection is not None else None
def fetchFittings(self, event):
sCrest = Crest.getInstance()
try:
waitDialog = wx.BusyInfo("Fetching fits, please wait...", parent=self)
fittings = sCrest.getFittings(self.getActiveCharacter())
self.cacheTime = fittings.get('cached_until')
self.updateCacheStatus(None)
self.cacheTimer.Start(1000)
self.fitTree.populateSkillTree(fittings)
except requests.exceptions.ConnectionError:
self.statusbar.SetStatusText("Connection error, please check your internet connection")
finally:
del waitDialog
def importFitting(self, event):
selection = self.fitView.fitSelection
if not selection:
return
data = self.fitTree.fittingsTreeCtrl.GetPyData(selection)
sPort = Port.getInstance()
fits = sPort.importFitFromBuffer(data)
self.mainFrame._openAfterImport(fits)
def deleteFitting(self, event):
sCrest = Crest.getInstance()
selection = self.fitView.fitSelection
if not selection:
return
data = json.loads(self.fitTree.fittingsTreeCtrl.GetPyData(selection))
dlg = wx.MessageDialog(self,
"Do you really want to delete %s (%s) from EVE?" % (data['name'], data['ship']['name']),
"Confirm Delete", wx.YES | wx.NO | wx.ICON_QUESTION)
if dlg.ShowModal() == wx.ID_YES:
try:
sCrest.delFitting(self.getActiveCharacter(), data['fittingID'])
except requests.exceptions.ConnectionError:
self.statusbar.SetStatusText("Connection error, please check your internet connection")
class ExportToEve(wx.Frame):
def __init__(self, parent):
wx.Frame.__init__(self, parent, id=wx.ID_ANY, title="Export fit to EVE", pos=wx.DefaultPosition,
size=(wx.Size(350, 100)), style=wx.DEFAULT_FRAME_STYLE | wx.TAB_TRAVERSAL)
self.mainFrame = parent
self.SetBackgroundColour(wx.SystemSettings.GetColour(wx.SYS_COLOUR_BTNFACE))
sCrest = Crest.getInstance()
mainSizer = wx.BoxSizer(wx.VERTICAL)
hSizer = wx.BoxSizer(wx.HORIZONTAL)
if sCrest.settings.get('mode') == CrestModes.IMPLICIT:
self.stLogged = wx.StaticText(self, wx.ID_ANY, "Currently logged in as %s" % sCrest.implicitCharacter.name,
wx.DefaultPosition, wx.DefaultSize)
self.stLogged.Wrap(-1)
hSizer.Add(self.stLogged, 1, wx.ALIGN_CENTER_VERTICAL | wx.ALL, 5)
else:
self.charChoice = wx.Choice(self, wx.ID_ANY, wx.DefaultPosition, wx.DefaultSize, [])
hSizer.Add(self.charChoice, 1, wx.ALIGN_CENTER_VERTICAL | wx.ALL, 5)
self.updateCharList()
self.charChoice.SetSelection(0)
self.exportBtn = wx.Button(self, wx.ID_ANY, u"Export Fit", wx.DefaultPosition, wx.DefaultSize, 5)
hSizer.Add(self.exportBtn, 0, wx.ALL, 5)
mainSizer.Add(hSizer, 0, wx.EXPAND, 5)
self.exportBtn.Bind(wx.EVT_BUTTON, self.exportFitting)
self.statusbar = wx.StatusBar(self)
self.statusbar.SetFieldsCount(2)
self.statusbar.SetStatusWidths([100, -1])
self.mainFrame.Bind(GE.EVT_SSO_LOGOUT, self.ssoLogout)
self.mainFrame.Bind(GE.EVT_SSO_LOGIN, self.ssoLogin)
self.Bind(wx.EVT_CLOSE, self.OnClose)
self.SetSizer(hSizer)
self.SetStatusBar(self.statusbar)
self.Layout()
self.Centre(wx.BOTH)
def updateCharList(self):
sCrest = Crest.getInstance()
chars = sCrest.getCrestCharacters()
if len(chars) == 0:
self.Close()
self.charChoice.Clear()
for char in chars:
self.charChoice.Append(char.name, char.ID)
self.charChoice.SetSelection(0)
def ssoLogin(self, event):
self.updateCharList()
event.Skip()
def ssoLogout(self, event):
if event.type == CrestModes.IMPLICIT:
self.Close()
else:
self.updateCharList()
event.Skip() # continue event
def OnClose(self, event):
self.mainFrame.Unbind(GE.EVT_SSO_LOGOUT, handler=self.ssoLogout)
event.Skip()
def getActiveCharacter(self):
sCrest = Crest.getInstance()
if sCrest.settings.get('mode') == CrestModes.IMPLICIT:
return sCrest.implicitCharacter.ID
selection = self.charChoice.GetCurrentSelection()
return self.charChoice.GetClientData(selection) if selection is not None else None
def exportFitting(self, event):
sPort = Port.getInstance()
fitID = self.mainFrame.getActiveFit()
self.statusbar.SetStatusText("", 0)
if fitID is None:
self.statusbar.SetStatusText("Please select an active fitting in the main window", 1)
return
self.statusbar.SetStatusText("Sending request and awaiting response", 1)
sCrest = Crest.getInstance()
try:
sFit = Fit.getInstance()
data = sPort.exportCrest(sFit.getFit(fitID))
res = sCrest.postFitting(self.getActiveCharacter(), data)
self.statusbar.SetStatusText("%d: %s" % (res.status_code, res.reason), 0)
try:
text = json.loads(res.text)
self.statusbar.SetStatusText(text['message'], 1)
except ValueError:
self.statusbar.SetStatusText("", 1)
except requests.exceptions.ConnectionError:
self.statusbar.SetStatusText("Connection error, please check your internet connection", 1)
class CrestMgmt(wx.Dialog):
def __init__(self, parent):
wx.Dialog.__init__(self, parent, id=wx.ID_ANY, title="CREST Character Management", pos=wx.DefaultPosition,
size=wx.Size(550, 250), style=wx.DEFAULT_DIALOG_STYLE)
self.mainFrame = parent
mainSizer = wx.BoxSizer(wx.HORIZONTAL)
self.lcCharacters = wx.ListCtrl(self, wx.ID_ANY, wx.DefaultPosition, wx.DefaultSize, wx.LC_REPORT)
self.lcCharacters.InsertColumn(0, heading='Character')
self.lcCharacters.InsertColumn(1, heading='Refresh Token')
self.popCharList()
mainSizer.Add(self.lcCharacters, 1, wx.ALL | wx.EXPAND, 5)
btnSizer = wx.BoxSizer(wx.VERTICAL)
self.addBtn = wx.Button(self, wx.ID_ANY, u"Add Character", wx.DefaultPosition, wx.DefaultSize, 0)
btnSizer.Add(self.addBtn, 0, wx.ALL | wx.EXPAND, 5)
self.deleteBtn = wx.Button(self, wx.ID_ANY, u"Revoke Character", wx.DefaultPosition, wx.DefaultSize, 0)
btnSizer.Add(self.deleteBtn, 0, wx.ALL | wx.EXPAND, 5)
mainSizer.Add(btnSizer, 0, wx.EXPAND, 5)
self.addBtn.Bind(wx.EVT_BUTTON, self.addChar)
self.deleteBtn.Bind(wx.EVT_BUTTON, self.delChar)
self.mainFrame.Bind(GE.EVT_SSO_LOGIN, self.ssoLogin)
self.SetSizer(mainSizer)
self.Layout()
self.Centre(wx.BOTH)
def ssoLogin(self, event):
self.popCharList()
event.Skip()
def popCharList(self):
sCrest = Crest.getInstance()
chars = sCrest.getCrestCharacters()
self.lcCharacters.DeleteAllItems()
for index, char in enumerate(chars):
self.lcCharacters.InsertStringItem(index, char.name)
self.lcCharacters.SetStringItem(index, 1, char.refresh_token)
self.lcCharacters.SetItemData(index, char.ID)
self.lcCharacters.SetColumnWidth(0, wx.LIST_AUTOSIZE)
self.lcCharacters.SetColumnWidth(1, wx.LIST_AUTOSIZE)
def addChar(self, event):
sCrest = Crest.getInstance()
uri = sCrest.startServer()
webbrowser.open(uri)
def delChar(self, event):
item = self.lcCharacters.GetFirstSelected()
if item > -1:
charID = self.lcCharacters.GetItemData(item)
sCrest = Crest.getInstance()
sCrest.delCrestCharacter(charID)
self.popCharList()
class FittingsTreeView(wx.Panel):
def __init__(self, parent):
wx.Panel.__init__(self, parent, id=wx.ID_ANY)
self.parent = parent
pmainSizer = wx.BoxSizer(wx.VERTICAL)
tree = self.fittingsTreeCtrl = wx.TreeCtrl(self, wx.ID_ANY, style=wx.TR_DEFAULT_STYLE | wx.TR_HIDE_ROOT)
pmainSizer.Add(tree, 1, wx.EXPAND | wx.ALL, 0)
self.root = tree.AddRoot("Fits")
self.populateSkillTree(None)
self.Bind(wx.EVT_TREE_ITEM_ACTIVATED, self.displayFit)
self.SetSizer(pmainSizer)
self.Layout()
def populateSkillTree(self, data):
if data is None:
return
root = self.root
tree = self.fittingsTreeCtrl
tree.DeleteChildren(root)
dict = {}
fits = data['items']
for fit in fits:
if fit['ship']['name'] not in dict:
dict[fit['ship']['name']] = []
dict[fit['ship']['name']].append(fit)
for name, fits in dict.iteritems():
shipID = tree.AppendItem(root, name)
for fit in fits:
fitId = tree.AppendItem(shipID, fit['name'])
tree.SetPyData(fitId, json.dumps(fit))
tree.SortChildren(root)
def displayFit(self, event):
selection = self.fittingsTreeCtrl.GetSelection()
fit = json.loads(self.fittingsTreeCtrl.GetPyData(selection))
list = []
for item in fit['items']:
try:
cargo = Cargo(getItem(item['type']['id']))
cargo.amount = item['quantity']
list.append(cargo)
except:
pass
self.parent.fitView.fitSelection = selection
self.parent.fitView.update(list)
class FitView(Display):
DEFAULT_COLS = ["Base Icon",
"Base Name"]
def __init__(self, parent):
Display.__init__(self, parent, style=wx.LC_SINGLE_SEL)
self.fitSelection = None
|
Ebag333/Pyfa
|
gui/crestFittings.py
|
Python
|
gpl-3.0
| 15,015
|
#!/usr/bin/python
import subprocess
# this script gets called regularly by cron and runs as the unprivileged exhibit user
if __name__ == '__main__':
# pull latest changes from git
# will throw and exit on error (e. g. if network connection is down)
subprocess.check_call("git pull", shell=True)
|
wmde/MSWissenschaft
|
update/update-appusr.py
|
Python
|
gpl-3.0
| 317
|
#!/usr/bin/python
# coding: utf-8
import json
import logging
import os.path
import sys
import time
import urllib
import urllib2
import settings
import backends
import utils.dictconfig
POST_SYNC_SECRET = SYNC_SECRET = urllib.urlencode((("secret", settings.SYNC_SECRET),))
def init_logging():
"""
Set up logging, see config in the settings module
"""
utils.dictconfig.dictConfig(settings.LOGGING)
return logging.getLogger("user_daemon")
def init_backends():
"""
Import backends, logic borrowed from Django.
"""
my_backends = []
# Logic borrowed from django.middleware.base
for backend_path in settings.BACKENDS:
module, classname = backend_path.rsplit('.', 1)
try:
# Extract, since Python 2.6 doesn't include importlib
__import__(module)
mod = sys.modules[module]
except ImportError, e:
raise RuntimeError('Error importing backend %s: "%s"' % (module, e))
try:
klass = getattr(mod, classname)
except AttributeError:
raise RuntimeError('Backend module "%s" does not define a "%s" class' % (module, classname))
my_backends.append(klass())
return my_backends
def poll(my_backends):
"""
Fetch a list of dirty users and pipe them through processing backends.
"""
# Tick all backends. This gives them a chance to do background work.
for backend in my_backends:
backend.tick()
# Fetch a list of dirty users
log.debug("Polling dirty users from %s", settings.DIRTY_USERS)
fp = urllib2.urlopen(settings.DIRTY_USERS, POST_SYNC_SECRET)
dirty = json.load(fp)
fp.close()
return process(my_backends, dirty)
def process(my_backends, dirty):
# Iterate through users and process
processed = 0
for user in dirty:
log.info("Processing user %s", user["username"])
try:
for backend in my_backends:
backend.process_user(user)
except Exception, e:
log.exception("Could not process user %s", user["username"])
continue
clear_dirtybit(user)
processed += 1
return processed
def clear_dirtybit(user):
"""
Connect to Stjornbord and clear the user's dirty bit. The clearing condition
is that the dirty timestamp is the same.
"""
query = urllib2.urlopen(settings.CLEAN_DIRTY % (user["username"], user["dirty"]), POST_SYNC_SECRET)
http_code = query.getcode()
if http_code == 200:
log.info("Successfully cleared dirtybit for %s (%s)", user["username"], user["dirty"])
else:
log.warning("Failed to clear dirtybit for %s (%s). Return code: %s", user["username"], user["dirty"], http_code)
def main():
log.info("Starting up!")
poll_failures = 0
my_backends = init_backends()
while True:
processed = 0
try:
processed = poll(my_backends)
poll_failures = 0
except urllib2.URLError, e:
poll_failures += 1
log.warn("Fetch failure, reason: %s. Will throw an exception after %d failures.",
e, settings.POLL_ALERT_THRESHOLD)
if poll_failures % settings.POLL_ALERT_THRESHOLD == 0:
log.exception("Error fetching data from %s. Failures: %d", settings.DIRTY_USERS, poll_failures)
except backends.NonRetryableException, e:
t = settings.NON_RETRYABLE_ERROR_SLEEP_SEC
log.exception("Non retryable exception raised, going to sleep for %d seconds", t)
time.sleep(t)
except Exception, e:
log.exception("Uncaught exception while processing dirty users")
if processed == 0:
log.debug("Didn't process any records, sleeping for %d seconds", settings.POLL_INTERVAL_SEC)
time.sleep(settings.POLL_INTERVAL_SEC)
if __name__ == "__main__":
log = init_logging()
main()
|
opinnmr/stjornbord-user-daemon
|
main.py
|
Python
|
gpl-3.0
| 3,955
|
import cgi
import wsgiref.handlers
import os
#import admin
import mewantee
from google.appengine.api import users
from google.appengine.ext import webapp
from google.appengine.ext.webapp import template
class MainPage(webapp.RequestHandler):
def get(self):
user = users.get_current_user()
(account, url_linktext, url) = mewantee.FlimUtility().loginoutUrls(self,user)
# index
template_values = {
'user': user,
# 'message': message,
'account': account,
# 'requests': requests,
'url': url,
'url_linktext': url_linktext,
}
path = os.path.join(os.path.dirname(__file__), 'index.html')
self.response.out.write(template.render(path, template_values))
def main():
user = users.get_current_user()
account = mewantee.Account.gql("WHERE owner=:1 ORDER BY date DESC", user).get();
if account and account.active:
application = webapp.WSGIApplication([
('/', MainPage),
('/account', mewantee.ManageAccount),
('/request/(.*)', mewantee.FullRequest),
('/addrequest', mewantee.AddRequest),
('/addbounty/(.*)', mewantee.AddBounty),
('/bounties', mewantee.AddBounty),
('/payment/(.*)/(.*)', mewantee.AddPayment),
('/payments', mewantee.AddPayment),
('/comments', mewantee.AddComment),
('/comment/(.*)', mewantee.AddComment),
],
debug=True)
else:
application = webapp.WSGIApplication([
('/', MainPage),
('/account', mewantee.ManageAccount),
],
debug=True)
wsgiref.handlers.CGIHandler().run(application)
if __name__ == "__main__":
main()
|
lucasw/mewantee
|
mewanteee/home.py
|
Python
|
gpl-3.0
| 1,557
|
import mwclient
import mwparserfromhell
import sys
import json
import unidecode
from bs4 import BeautifulSoup
from pymongo import MongoClient
useragent = "MW Database update bot 0.1, Run by Paul.Andrel@kantarmedia.com"
site = mwclient.Site('172.18.64.38', '/', clients_useragent=useragent)
site.login('pandrel', '4357pja')
#/ Set up an empty array to capture page list
#/ Process command line arguments into pages
pages = []
for arg in sys.argv:
if arg == "process-page.py":
continue
if arg == "/home/pandrel/sitesheets-database/process-page.py":
continue
pages.append("Sitesheets:" + arg)
if pages == []:
#We were called with no args pull from postgres
import psycopg2
conn = psycopg2.connect(database="oplogs", host="mwops.tnsmi-cmr.com", user="remote", password="littleblue")
cursor = conn.cursor()
cursor.execute("SELECT site FROM sitelist WHERE nonsite=false AND webremops=true ORDER BY site;")
pgres = cursor.fetchall()
for item in pgres:
pages.append("Sitesheets:" + item[0])
def processPageText(text, page):
data = {}
wikicode = mwparserfromhell.parse(text)
templates = wikicode.filter_templates()
template = templates[0]
for item in template.params:
item = unidecode.unidecode(item)
soup = BeautifulSoup(item, "lxml")
ckey = item.split("=", 1)
if ckey[0].startswith("<!--"):
print "Skipping - " + item
continue
if ckey[0] != 'comments':
item = soup.text
key, value = item.split("=",1)
value = value.rstrip('\n')
else:
key, value = item.split("=",1)
value = value.rstrip('\n')
data[key.strip()] = value.strip()
client = MongoClient("mongodb://sitedata:Gibson9371@172.18.64.35/sitesheets")
db = client.get_default_database()
sitedata = db['sitedata']
sitedata.delete_one({"site_id" : data[u"site_id"]})
sitedata.insert_one(data)
for page in pages:
mpage = site.Pages[page]
text = mpage.text()
if page == "Sitesheets:ONTO2":
continue
if page == "Sitesheets:ZDEV1":
continue
if page == "Sitesheets:ZDEV2":
continue
if page == "Sitesheets:ZDEV3":
continue
if page == "Sitesheets:ZFOP1":
continue
if page == "Sitesheets:ZFOP2":
continue
if page == "Sitesheets:DTV1":
continue
print "Processing - " + page + "."
processPageText(text, page)
print "Completed - " + page + "."
|
stavlor/sitesheets-database
|
process-page.py
|
Python
|
gpl-3.0
| 2,539
|
#!/usr/bin/env python
import os
import i3
import sys
import pickle
PATH = "/home/stabjo/.config/i3/workspace_mapping"
def showHelp():
print(sys.argv[0] + " [save|restore]")
if __name__ == '__main__':
if len(sys.argv) < 2:
showHelp()
sys.exit(1)
if sys.argv[1] == 'save':
pickle.dump(i3.get_workspaces(), open(PATH, "wb"))
elif sys.argv[1] == 'restore':
try:
workspace_mapping = pickle.load(open(PATH, "rb"))
except Exception:
print("Can't find existing mappings...")
sys.exit(1)
for workspace in workspace_mapping:
i3.msg('command', 'workspace %s' % workspace['name'])
i3.msg('command', 'move workspace to output %s' % workspace['output'])
for workspace in filter(lambda w: w['visible'], workspace_mapping):
i3.msg('command', 'workspace %s' % workspace['name'])
else:
showHelp()
sys.exit(1)
|
StaffanOB/dotfiles
|
scripts/i3plug.py
|
Python
|
gpl-3.0
| 964
|
"""Access FTDI hardware.
Contents
--------
:class:`Error`
Base error class.
:class:`DeviceNotFoundError`
Raised when device is not connected.
:class:`DeviceError`
Raised for generic pylibftdi exceptions.
:class:`ReadError`
Raised on read errors.
:class:`WriteError`
Raised on write errors.
:class:`FtdiChip`
Access FTDI hardware.
"""
import logging
import time
import pylibftdi
logger = logging.getLogger(__name__)
# FTDI documentation: must be multiple of block size, which is 64 bytes
# with 2 bytes overhead. So, must be multiple of 62 bytes.
READ_SIZE = 62
# Default buffer size is 4K (64 * 64 bytes), but mind the overhead
# But this was not enough to clear all buffers. To be safe, for now, increase
# it ten-fold.
BUFFER_SIZE = 10 * 64 * 62
# Sleep between read/write error retries in seconds
RW_ERROR_WAIT = .5
# parity for rs232 line settings in libftdi::ftdi_set_line_property
PARITY_NONE = 0
PARITY_ODD = 1
PARITY_EVEN = 2
PARITY_MARK = 3
PARITY_SPACE = 4
# bitsize for rs232 line settings in libftdi::ftdi_set_line_property
BITS_8 = 8
BITS_7 = 7
# stopbits for rs232 line settings in libftdi::ftdi_set_line_property
STOP_BIT_1 = 0
STOP_BIT_15 = 1
STOP_BIT_2 = 2
class Error(Exception):
"""Base error class."""
def __init__(self, msg):
self.ftdi_msg = msg
class DeviceNotFoundError(Error):
"""Raised when device is not connected."""
def __str__(self):
return "Device not found."
class DeviceError(Error):
"""Raised for generic pylibftdi exceptions."""
def __str__(self):
return "Device error: %s" % self.ftdi_msg
class ClosedDeviceError(Error):
"""Raised when trying a read/write operation if device is closed."""
def __str__(self):
return "Device is closed, %s" % self.ftdi_msg
class ReadError(Error):
"""Raised on read errors."""
def __str__(self):
return "Device read error: %s" % self.ftdi_msg
class WriteError(Error):
"""Raised on write errors."""
def __str__(self):
return "Device write error: %s" % self.ftdi_msg
class FtdiChip(object):
"""Access FTDI hardware.
Instantiate this class to get access to connected FTDI hardware.
The hardware device is opened during instantiation.
You can use the :meth:`find_all` static method to list all connected
devices before openening them::
>>> FtdiChip.find_all()
"""
_device = None
closed = True
def __init__(self, device_description=None, interface_select=0):
self._device_description = device_description
self._interface_select = interface_select
self.open()
def open(self):
"""Open device.
Raises :class:`DeviceNotFoundError` if the device cannot be found.
Raises :class:`DeviceError` if the device cannot be opened.
"""
if self._device is None:
try:
self._device = pylibftdi.Device(self._device_description,
interface_select=self._interface_select)
except pylibftdi.FtdiError as exc:
if "(-3)" in str(exc):
raise DeviceNotFoundError(str(exc))
else:
raise DeviceError(str(exc))
else:
# force default latency timer of 16 ms
# on some systems, this reverts to 0 ms if not set explicitly
self._device.ftdi_fn.ftdi_set_latency_timer(16)
self.closed = False
self.flush()
else:
return
def __del__(self):
self.close()
def set_line_settings(self, bits, parity, stop_bit):
"""Set line settings (bits, parity, stop bit).
:param bits: one of BITS_8 or BITS_7
:param parity: one of PARITY_NONE, PARITY_ODD, PARITY_EVEN,
PARITY_MARK, PARITY_SPACE
:param stop_bit: one of STOP_BIT_1, STOP_BIT_15, STOP_BIT_2
"""
self._device.ftdi_fn.ftdi_set_line_property(bits, stop_bit, parity)
def close(self):
"""Close device."""
if not self.closed:
self._device.close()
self._device = None
self.closed = True
@staticmethod
def find_all():
"""Find all connected FTDI devices.
:returns: list of (manufacturer, description, serial#) tuples.
"""
return pylibftdi.Driver().list_devices()
def flush(self):
"""Flush device buffers.
To completely clear out outdated measurements, e.g. when changing
parameters, call this method. All data received after this method
is called is really newly measured.
"""
self._device.flush()
self.read(BUFFER_SIZE)
def read(self, read_size=None):
"""Read from device and retry if necessary.
A read is tried three times. When unsuccesful, raises
:class:`ReadError`. Raises :class:`ClosedDeviceError` when
attempting to read from a closed device.
:param read_size: number of bytes to read (defaults to READ_SIZE).
As per the FTDI documentation, this should be a multiple of 62
for best performance.
:returns: string containing the data.
"""
if self.closed:
logger.warning("Attempting to read from closed device.")
raise ClosedDeviceError("attempting to read.")
if not read_size:
read_size = READ_SIZE
for i in range(3):
try:
data = self._device.read(read_size)
except pylibftdi.FtdiError as exc:
logger.warning("Read failed, retrying...")
time.sleep(RW_ERROR_WAIT)
continue
else:
return data
logger.error("Read failed.")
self.close()
raise ReadError(str(exc))
def write(self, data):
"""Write to device and retry if necessary.
A write is tried three times. When unsuccesful, raises
:class:`WriteError`. Raises :class:`ClosedDeviceError` when
attempting to write from a closed device.
:param data: string containing the data to write.
"""
if self.closed:
logger.warning("Attempting to read from closed device.")
raise ClosedDeviceError("attempting to write.")
for i in range(3):
try:
self._device.write(data)
except pylibftdi.FtdiError as exc:
logger.warning("Write failed, retrying...")
time.sleep(RW_ERROR_WAIT)
continue
else:
return
logger.error("Write failed.")
self.close()
raise WriteError(str(exc))
|
HiSPARC/pysparc
|
pysparc/ftdi_chip.py
|
Python
|
gpl-3.0
| 6,785
|
# -*- coding: utf-8 -*-
# Copyright (C) Duncan Macleod (2014-2020)
#
# This file is part of GWpy.
#
# GWpy is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# GWpy is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with GWpy. If not, see <http://www.gnu.org/licenses/>.
"""Unit test for frequencyseries module
"""
from io import BytesIO
import pytest
import numpy
from numpy import shares_memory
from scipy import signal
from matplotlib import rc_context
from astropy import units
from ...testing import utils
from ...timeseries import TimeSeries
from ...types.tests.test_series import TestSeries as _TestSeries
from .. import FrequencySeries
__author__ = 'Duncan Macleod <duncan.macleod@ligo.org>'
LIGO_LW_ARRAY = r"""<?xml version='1.0' encoding='utf-8'?>
<!DOCTYPE LIGO_LW SYSTEM "http://ldas-sw.ligo.caltech.edu/doc/ligolwAPI/html/ligolw_dtd.txt">
<LIGO_LW>
<LIGO_LW Name="REAL8FrequencySeries">
<Time Type="GPS" Name="epoch">1000000000</Time>
<Param Type="lstring" Name="channel:param">X1:TEST-CHANNEL_1</Param>
<Array Type="real_8" Name="PSD1:array" Unit="Hz^-1">
<Dim Start="10" Scale="1" Name="Frequency" Unit="Hz">5</Dim>
<Dim Name="Frequency,Real">2</Dim>
<Stream Delimiter=" " Type="Local">
0 1
1 2
2 3
3 4
4 5
</Stream>
</Array>
</LIGO_LW>
<LIGO_LW Name="REAL8FrequencySeries">
<Param Type="lstring" Name="channel:param">X1:TEST-CHANNEL_2</Param>
<Param Type="real_8" Name="f0:param" Unit="s^-1">0</Param>
<Array Type="real_8" Name="PSD2:array" Unit="s m^2">
<Dim Start="0" Scale="1" Name="Frequency" Unit="s^-1">5</Dim>
<Dim Name="Real">1</Dim>
<Stream Delimiter=" " Type="Local">
10
20
30
40
50
</Stream>
</Array>
</LIGO_LW>
<LIGO_LW Name="REAL8FrequencySeries">
<Time Type="GPS" Name="epoch">1000000001</Time>
<Array Type="real_8" Name="PSD2:array" Unit="s m^2">
<Dim Start="0" Scale="1" Name="Frequency" Unit="s^-1">5</Dim>
<Dim Name="Frequency,Real">3</Dim>
<Stream Delimiter=" " Type="Local">
0 10 1
1 20 2
2 30 3
3 40 4
4 50 5
</Stream>
</Array>
</LIGO_LW>
</LIGO_LW>
""" # noqa: E501
class TestFrequencySeries(_TestSeries):
TEST_CLASS = FrequencySeries
# -- test properties ------------------------
def test_f0(self, array):
assert array.f0 is array.x0
array.f0 = 4
assert array.f0 == 4 * units.Hz
def test_df(self, array):
assert array.df is array.dx
array.df = 4
assert array.df == 4 * units.Hz
def test_frequencies(self, array):
assert array.frequencies is array.xindex
utils.assert_quantity_equal(
array.frequencies, numpy.arange(array.size) * array.df + array.f0)
# -- test methods ---------------------------
def test_plot(self, array):
with rc_context(rc={'text.usetex': False}):
plot = array.plot()
line = plot.gca().lines[0]
utils.assert_array_equal(line.get_xdata(), array.xindex.value)
utils.assert_array_equal(line.get_ydata(), array.value)
plot.save(BytesIO(), format='png')
plot.close()
def test_ifft(self):
# construct a TimeSeries, then check that it is unchanged by
# the operation TimeSeries.fft().ifft()
ts = TimeSeries([1.0, 0.0, -1.0, 0.0], sample_rate=1.0)
utils.assert_quantity_sub_equal(ts.fft().ifft(), ts)
utils.assert_allclose(ts.fft().ifft().value, ts.value)
def test_filter(self, array):
a2 = array.filter([100], [1], 1e-2)
assert isinstance(a2, type(array))
utils.assert_quantity_equal(a2.frequencies, array.frequencies)
# manually rebuild the filter to test it works
b, a, = signal.zpk2tf([100], [1], 1e-2)
fresp = abs(signal.freqs(b, a, array.frequencies.value)[1])
utils.assert_array_equal(a2.value, fresp * array.value)
def test_zpk(self, array):
a2 = array.zpk([100], [1], 1e-2)
assert isinstance(a2, type(array))
utils.assert_quantity_equal(a2.frequencies, array.frequencies)
def test_inject(self):
# create a timeseries out of an array of zeros
df, nyquist = 1, 2048
nsamp = int(nyquist/df) + 1
data = FrequencySeries(numpy.zeros(nsamp), f0=0, df=df, unit='')
# create a second timeseries to inject into the first
w_nyquist = 1024
w_nsamp = int(w_nyquist/df) + 1
sig = FrequencySeries(numpy.ones(w_nsamp), f0=0, df=df, unit='')
# test that we recover this waveform when we add it to data,
# and that the operation does not change the original data
new_data = data.inject(sig)
assert new_data.unit == data.unit
assert new_data.size == data.size
ind, = new_data.value.nonzero()
assert len(ind) == sig.size
utils.assert_allclose(new_data.value[ind], sig.value)
utils.assert_allclose(data.value, numpy.zeros(nsamp))
def test_interpolate(self):
# create a simple FrequencySeries
df, nyquist = 1, 256
nsamp = int(nyquist/df) + 1
fseries = FrequencySeries(numpy.ones(nsamp), f0=1, df=df, unit='')
# create an interpolated FrequencySeries
newf = fseries.interpolate(df/2.)
# check that the interpolated series is what was expected
assert newf.unit == fseries.unit
assert newf.size == 2*(fseries.size - 1) + 1
assert newf.df == fseries.df / 2.
assert newf.f0 == fseries.f0
utils.assert_allclose(newf.value, numpy.ones(2*int(nyquist/df) + 1))
@utils.skip_missing_dependency('lal')
def test_to_from_lal(self, array):
import lal
array.epoch = 0
# check that to + from returns the same array
lalts = array.to_lal()
a2 = type(array).from_lal(lalts)
utils.assert_quantity_sub_equal(array, a2, exclude=['name', 'channel'])
assert a2.name == array.name
# test copy=False
a2 = type(array).from_lal(lalts, copy=False)
assert shares_memory(a2.value, lalts.data.data)
# test units
array.override_unit('undef')
with pytest.warns(UserWarning):
lalts = array.to_lal()
assert lalts.sampleUnits == lal.DimensionlessUnit
a2 = self.TEST_CLASS.from_lal(lalts)
assert a2.unit is units.dimensionless_unscaled
@utils.skip_missing_dependency('lal')
@utils.skip_missing_dependency('pycbc')
def test_to_from_pycbc(self, array):
from pycbc.types import FrequencySeries as PyCBCFrequencySeries
array.epoch = 0
# test default conversion
pycbcfs = array.to_pycbc()
assert isinstance(pycbcfs, PyCBCFrequencySeries)
utils.assert_array_equal(array.value, pycbcfs.data)
assert array.f0.value == 0 * units.Hz
assert array.df.value == pycbcfs.delta_f
assert array.epoch.gps == pycbcfs.epoch
# go back and check we get back what we put in in the first place
a2 = type(array).from_pycbc(pycbcfs)
utils.assert_quantity_sub_equal(
array, a2, exclude=['name', 'unit', 'channel'])
# test copy=False
a2 = type(array).from_pycbc(array.to_pycbc(copy=False), copy=False)
assert shares_memory(array.value, a2.value)
@pytest.mark.parametrize('format', [
'txt',
'csv',
])
def test_read_write(self, array, format):
utils.test_read_write(
array, format,
assert_equal=utils.assert_quantity_sub_equal,
assert_kw={'exclude': ['name', 'channel', 'unit', 'epoch']})
@staticmethod
@pytest.fixture
def ligolw(tmp_path):
tmp = tmp_path / "test.xml"
tmp.write_text(LIGO_LW_ARRAY)
return tmp
@utils.skip_missing_dependency('lal')
@utils.skip_missing_dependency('ligo.lw')
def test_read_ligolw(self, ligolw):
array = FrequencySeries.read(ligolw, 'PSD1')
utils.assert_quantity_equal(
array,
[1, 2, 3, 4, 5] / units.Hz,
)
utils.assert_quantity_equal(
array.frequencies,
[10, 11, 12, 13, 14] * units.Hz,
)
assert numpy.isclose(array.epoch.gps, 1000000000) # precision gah!
assert array.unit == units.Hz ** -1
@utils.skip_missing_dependency('lal')
@utils.skip_missing_dependency('ligo.lw')
def test_read_ligolw_params(self, ligolw):
array = FrequencySeries.read(
ligolw,
channel="X1:TEST-CHANNEL_2",
)
assert list(array.value) == [10, 20, 30, 40, 50]
assert array.epoch is None
@utils.skip_missing_dependency('ligo.lw')
def test_read_ligolw_error_multiple_array(self, ligolw):
# assert errors
with pytest.raises(ValueError) as exc: # multiple <Array> hits
FrequencySeries.read(ligolw)
assert "'name'" in str(exc.value)
with pytest.raises(ValueError) as exc: # multiple <Array> hits
FrequencySeries.read(ligolw, "PSD2")
assert "'epoch" in str(exc.value) and "'name'" not in str(exc.value)
@utils.skip_missing_dependency('ligo.lw')
def test_read_ligolw_error_no_array(self, ligolw):
with pytest.raises(ValueError) as exc: # no hits
FrequencySeries.read(ligolw, "blah")
assert str(exc.value).startswith("no <Array> elements found")
@utils.skip_missing_dependency('ligo.lw')
def test_read_ligolw_error_no_match(self, ligolw):
with pytest.raises(ValueError): # wrong epoch
FrequencySeries.read(ligolw, epoch=0)
with pytest.raises(ValueError): # <Param>s don't match
FrequencySeries.read(
ligolw,
"PSD1",
f0=0,
)
@utils.skip_missing_dependency('ligo.lw')
def test_read_ligolw_error_no_param(self, ligolw):
with pytest.raises(ValueError): # no <Param>
FrequencySeries.read(
ligolw,
"PSD2",
blah="blah",
)
@utils.skip_missing_dependency('ligo.lw')
def test_read_ligolw_error_dim(self, ligolw):
with pytest.raises(ValueError): # wrong dimensionality
FrequencySeries.read(ligolw, epoch=1000000001)
|
gwpy/gwpy
|
gwpy/frequencyseries/tests/test_frequencyseries.py
|
Python
|
gpl-3.0
| 10,888
|
import networkx as nx
from riaa import riaa_labels
from copy import copy
class Graph():
def __init__(self):
self.db = nx.MultiDiGraph()
self.rel_rules = {
"label ownership": (1, 0, "owned_by"),
"label reissue": (1, 0, "catalog_reissued_by"),
"label rename": (0, 1, "renamed_to"),
"label distribution": (1, 0, "catalog_distributed_by"),
"business association": (0, 1, "business_association_with")
}
def add_labels(self, labels):
for label in labels:
attrs = {"name": label.name, "mbid": label.mbid}
if label.country:
attrs["country"] = label.country
if label.mbid in riaa_labels:
attrs["riaa"] = riaa_labels[label.mbid]
self.db.add_node(label.id, attrs)
def add_relations(self, relations):
for relation in relations:
self.add_relation(relation)
def add_relation(self, relation):
labels = (relation.id1, relation.id2)
rel_type = relation.type
rel_rule = self.rel_rules[rel_type]
self.db.add_edge(labels[rel_rule[0]], labels[rel_rule[1]],
attr_dict={"rel": rel_rule[2]})
def generate_riaa_tree(self):
tree = {}
for label_id in self.db:
path = self.get_riaa_path(label_id)
# If the label is RIAA affiliated, a label will exist in `path`
if len(path) > 0:
# TODO: Don't include 'mbid' as a property of each value in the object
label = self.db.node[label_id]
label_mbid = self.db.node[label_id]["mbid"]
if len(path) > 1:
parent_label_id = path[1]
parent_label = self.db.node[parent_label_id]
rel = self.db.edge[label_id][parent_label_id][0]["rel"]
label["rel"] = rel
label["parent"] = parent_label["mbid"]
tree[label_mbid] = label
return tree
def get_riaa_path(self, label_id):
shortest = []
paths = nx.single_source_shortest_path(self.db, label_id)
for path in paths.values():
if self.is_riaa(path[-1]):
if len(shortest) == 0:
shortest = path
elif len(path) < len(shortest):
shortest = path
return shortest
def is_riaa(self, label_id):
"""
Determine if the label is RIAA affiliated
label_id
"""
return "riaa" in self.db.node[label_id]
def get_riaa_path_old(self, label_id, visited=None):
if not visited:
visited = set()
if "riaa" in self.db.node[label_id]:
return [label_id]
paths = []
for parent_label_id in self.db.successors(label_id):
if parent_label_id not in visited:
visited.add(parent_label_id)
path = self.get_riaa_path(parent_label_id, copy(visited))
if len(path) > 0:
path.insert(0, label_id)
paths.append(path)
return min(paths) if len(paths) > 0 else []
|
frelsi/riaapath
|
lib/graph.py
|
Python
|
gpl-3.0
| 3,205
|
import math
import unittest
from datetime import datetime, time, timedelta, tzinfo
from asynctest.mock import patch
from tests.unittest.base_custom import TestCustomField
# Needs to be imported last
from lib.data import timezones
from lib.data.timedelta import format
from ..custom import countdown
class TimeZone(tzinfo):
def __init__(self, offset):
self.offset = offset
def utcoffset(self, dt):
return timedelta(seconds=self.offset)
def dst(self, dt):
return timedelta()
class TestCustomCommandCustomCountdownParse(unittest.TestCase):
def setUp(self):
patcher = patch(countdown.__name__ + '.timezones',
autospec=True)
self.addCleanup(patcher.stop)
self.mock_timezones = patcher.start()
self.mock_timezones.abbreviations = {
'utc': TimeZone(0),
'utc-00:00': TimeZone(0),
'utc+00:00': TimeZone(0),
'utc-12:00': TimeZone(0),
'utc+12:00': TimeZone(0),
'utc-08:00': TimeZone(-8 * 3600),
'utc+08:00': TimeZone(8 * 3600)
}
self.mock_timezones.utc = TimeZone(0)
def test(self):
self.assertIsNone(countdown.parse_date_string(''))
def test_time_of_day(self):
self.assertEqual(
countdown.parse_date_string('0:00'),
countdown.DateTimeInstance(time(0, 0, 0, 0, timezones.utc),
None, None, True))
self.assertEqual(
countdown.parse_date_string('00:00'),
countdown.DateTimeInstance(time(0, 0, 0, 0, timezones.utc),
None, None, True))
self.assertEqual(
countdown.parse_date_string('12:34'),
countdown.DateTimeInstance(time(12, 34, 0, 0, timezones.utc),
None, None, True))
self.assertEqual(
countdown.parse_date_string('23:59'),
countdown.DateTimeInstance(time(23, 59, 0, 0, timezones.utc),
None, None, True))
self.assertIsNone(countdown.parse_date_string('24:00'))
self.assertIsNone(countdown.parse_date_string('0:60'))
self.assertIsNone(countdown.parse_date_string('000:00'))
self.assertIsNone(countdown.parse_date_string('0:000'))
def test_time_of_day_seconds(self):
self.assertEqual(
countdown.parse_date_string('0:00:00'),
countdown.DateTimeInstance(time(0, 0, 0, 0, timezones.utc),
None, None, True))
self.assertEqual(
countdown.parse_date_string('0:00:59'),
countdown.DateTimeInstance(time(0, 0, 59, 0, timezones.utc),
None, None, True))
self.assertIsNone(countdown.parse_date_string('0:00:60'))
def test_time_of_day_seconds_microseconds(self):
self.assertEqual(
countdown.parse_date_string('0:00:00.000000'),
countdown.DateTimeInstance(time(0, 0, 0, 0, timezones.utc),
None, None, True))
self.assertEqual(
countdown.parse_date_string('0:00:00.000001'),
countdown.DateTimeInstance(time(0, 0, 0, 1, timezones.utc),
None, None, True))
self.assertEqual(
countdown.parse_date_string('0:00:00.999999'),
countdown.DateTimeInstance(time(0, 0, 0, 999999, timezones.utc),
None, None, True))
self.assertIsNone(countdown.parse_date_string('0:00:00.0000000'))
self.assertIsNone(countdown.parse_date_string('0:00:00.9999999'))
def test_time_of_day_meridiem(self):
self.assertEqual(
countdown.parse_date_string('12:00AM'),
countdown.DateTimeInstance(time(0, 0, 0, 0, timezones.utc),
None, None, False))
self.assertEqual(
countdown.parse_date_string('1:23PM'),
countdown.DateTimeInstance(time(13, 23, 0, 0, timezones.utc),
None, None, False))
self.assertEqual(
countdown.parse_date_string('01:23am'),
countdown.DateTimeInstance(time(1, 23, 0, 0, timezones.utc),
None, None, False))
self.assertEqual(
countdown.parse_date_string('11:59pm'),
countdown.DateTimeInstance(time(23, 59, 0, 0, timezones.utc),
None, None, False))
self.assertIsNone(countdown.parse_date_string('12:00BM'))
self.assertIsNone(countdown.parse_date_string('0:00AM'))
self.assertIsNone(countdown.parse_date_string('0:60AM'))
self.assertIsNone(countdown.parse_date_string('13:00AM'))
self.assertIsNone(countdown.parse_date_string('000:00AM'))
self.assertIsNone(countdown.parse_date_string('0:000AM'))
def test_time_of_day_seconds_meridiem(self):
self.assertEqual(
countdown.parse_date_string('12:00:00AM'),
countdown.DateTimeInstance(time(0, 0, 0, 0, timezones.utc),
None, None, False))
self.assertEqual(
countdown.parse_date_string('12:00:59PM'),
countdown.DateTimeInstance(time(12, 0, 59, 0, timezones.utc),
None, None, False))
self.assertIsNone(countdown.parse_date_string('0:00:60AM'))
def test_time_of_day_seconds_microseconds_meridiem(self):
self.assertEqual(
countdown.parse_date_string('12:00:00.000000AM'),
countdown.DateTimeInstance(time(0, 0, 0, 0, timezones.utc),
None, None, False))
self.assertEqual(
countdown.parse_date_string('12:00:00.000001PM'),
countdown.DateTimeInstance(time(12, 0, 0, 1, timezones.utc),
None, None, False))
self.assertEqual(
countdown.parse_date_string('12:00:00.999999AM'),
countdown.DateTimeInstance(time(0, 0, 0, 999999, timezones.utc),
None, None, False))
self.assertIsNone(countdown.parse_date_string('0:00:00.0000000AM'))
self.assertIsNone(countdown.parse_date_string('0:00:00.9999999PM'))
def test_time_of_day_timezone(self):
self.assertEqual(
countdown.parse_date_string('0:00 UTC'),
countdown.DateTimeInstance(time(0, 0, 0, 0, timezones.utc),
None, None, True))
self.assertEqual(
countdown.parse_date_string('0:00 utc'),
countdown.DateTimeInstance(time(0, 0, 0, 0, timezones.utc),
None, None, True))
self.assertEqual(
countdown.parse_date_string('0:00 UTC-00:00'),
countdown.DateTimeInstance(time(0, 0, 0, 0, timezones.utc),
None, None, True))
self.assertEqual(
countdown.parse_date_string('12:00AM UTC+12:00'),
countdown.DateTimeInstance(
time(0, 0, 0, 0,
self.mock_timezones.abbreviations['utc+12:00']),
None, None, False))
self.assertIsNone(countdown.parse_date_string('0:00 ABC'))
def test_day_of_week_time_of_day(self):
self.assertEqual(
countdown.parse_date_string('Sunday 0:00'),
countdown.DateTimeInstance(time(0, 0, 0, 0, timezones.utc),
countdown.SUNDAY, None, True))
self.assertEqual(
countdown.parse_date_string('Monday 0:00'),
countdown.DateTimeInstance(time(0, 0, 0, 0, timezones.utc),
countdown.MONDAY, None, True))
self.assertEqual(
countdown.parse_date_string('Tuesday 0:00'),
countdown.DateTimeInstance(time(0, 0, 0, 0, timezones.utc),
countdown.TUESDAY, None, True))
self.assertEqual(
countdown.parse_date_string('Wednesday 0:00'),
countdown.DateTimeInstance(time(0, 0, 0, 0, timezones.utc),
countdown.WEDNESDAY, None, True))
self.assertEqual(
countdown.parse_date_string('Thursday 0:00'),
countdown.DateTimeInstance(time(0, 0, 0, 0, timezones.utc),
countdown.THURSDAY, None, True))
self.assertEqual(
countdown.parse_date_string('Friday 0:00'),
countdown.DateTimeInstance(time(0, 0, 0, 0, timezones.utc),
countdown.FRIDAY, None, True))
self.assertEqual(
countdown.parse_date_string('Saturday 0:00'),
countdown.DateTimeInstance(time(0, 0, 0, 0, timezones.utc),
countdown.SATURDAY, None, True))
self.assertEqual(
countdown.parse_date_string('SUNDAY 0:00'),
countdown.DateTimeInstance(time(0, 0, 0, 0, timezones.utc),
countdown.SUNDAY, None, True))
self.assertEqual(
countdown.parse_date_string('sunday 0:00'),
countdown.DateTimeInstance(time(0, 0, 0, 0, timezones.utc),
countdown.SUNDAY, None, True))
self.assertEqual(
countdown.parse_date_string('Sun 0:00'),
countdown.DateTimeInstance(time(0, 0, 0, 0, timezones.utc),
countdown.SUNDAY, None, True))
self.assertEqual(
countdown.parse_date_string('Mon 0:00'),
countdown.DateTimeInstance(time(0, 0, 0, 0, timezones.utc),
countdown.MONDAY, None, True))
self.assertEqual(
countdown.parse_date_string('Tue 0:00'),
countdown.DateTimeInstance(time(0, 0, 0, 0, timezones.utc),
countdown.TUESDAY, None, True))
self.assertEqual(
countdown.parse_date_string('Wed 0:00'),
countdown.DateTimeInstance(time(0, 0, 0, 0, timezones.utc),
countdown.WEDNESDAY, None, True))
self.assertEqual(
countdown.parse_date_string('Thu 0:00'),
countdown.DateTimeInstance(time(0, 0, 0, 0, timezones.utc),
countdown.THURSDAY, None, True))
self.assertEqual(
countdown.parse_date_string('Fri 0:00'),
countdown.DateTimeInstance(time(0, 0, 0, 0, timezones.utc),
countdown.FRIDAY, None, True))
self.assertEqual(
countdown.parse_date_string('Sat 0:00'),
countdown.DateTimeInstance(time(0, 0, 0, 0, timezones.utc),
countdown.SATURDAY, None, True))
self.assertEqual(
countdown.parse_date_string('SUN 0:00'),
countdown.DateTimeInstance(time(0, 0, 0, 0, timezones.utc),
countdown.SUNDAY, None, True))
self.assertEqual(
countdown.parse_date_string('sun 0:00'),
countdown.DateTimeInstance(time(0, 0, 0, 0, timezones.utc),
countdown.SUNDAY, None, True))
self.assertIsNone(countdown.parse_date_string('abc 0:00'))
def test_negative_timezone_time_of_day(self):
self.assertEqual(
countdown.parse_date_string('8:00PM UTC-08:00'),
countdown.DateTimeInstance(
time(20, 0, 0, 0,
self.mock_timezones.abbreviations['utc-08:00']),
None, None, False))
def test_month_day_time_of_day(self):
self.assertEqual(
countdown.parse_date_string('1/1 0:00'),
countdown.DateTimeInstance(
time(0, 0, 0, 0, timezones.utc),
None, countdown.Date(None, 1, 1), True))
self.assertEqual(
countdown.parse_date_string('12-31 0:00'),
countdown.DateTimeInstance(
time(0, 0, 0, 0, timezones.utc),
None, countdown.Date(None, 12, 31), True))
self.assertEqual(
countdown.parse_date_string('2/29 0:00'),
countdown.DateTimeInstance(
time(0, 0, 0, 0, timezones.utc),
None, countdown.Date(None, 2, 29), True))
self.assertIsNone(countdown.parse_date_string('1/32 0:00'))
self.assertIsNone(countdown.parse_date_string('13/1 0:00'))
self.assertIsNone(countdown.parse_date_string('2/30 0:00'))
def test_month_day_year_time_of_day(self):
self.assertEqual(
countdown.parse_date_string('1/1/2000 0:00'),
countdown.DateTimeInstance(
time(0, 0, 0, 0, timezones.utc),
None, countdown.Date(2000, 1, 1), True))
self.assertEqual(
countdown.parse_date_string('12-31-2016 0:00'),
countdown.DateTimeInstance(
time(0, 0, 0, 0, timezones.utc),
None, countdown.Date(2016, 12, 31), True))
self.assertEqual(
countdown.parse_date_string('2/29/2000 0:00'),
countdown.DateTimeInstance(
time(0, 0, 0, 0, timezones.utc),
None, countdown.Date(2000, 2, 29), True))
self.assertIsNone(countdown.parse_date_string('1/32/2015 0:00'))
self.assertIsNone(countdown.parse_date_string('13/1/2014 0:00'))
self.assertIsNone(countdown.parse_date_string('2/30/2000 0:00'))
self.assertIsNone(countdown.parse_date_string('2/29/2001 0:00'))
def test_many(self):
self.assertEqual(
countdown.parse_date_string('6/15/2000 10:48:23.987654PM UTC'),
countdown.DateTimeInstance(
time(22, 48, 23, 987654, timezones.utc),
None, countdown.Date(2000, 6, 15), False))
self.assertEqual(
countdown.parse_date_string('Wed 16:49:31.456187 UTC'),
countdown.DateTimeInstance(time(16, 49, 31, 456187, timezones.utc),
countdown.WEDNESDAY, None, True))
self.assertIsNone(
countdown.parse_date_string('UTC 16:49:31.456187 Wed'))
self.assertIsNone(
countdown.parse_date_string('UTC 10:48:23.987654PM 6/15/2000'))
class TestCustomCommandCustomCountdownNextDatetime(unittest.TestCase):
def setUp(self):
self.now = datetime(2000, 1, 1, tzinfo=timezones.utc)
patcher = patch('lib.data.timezones', autospec=True)
self.addCleanup(patcher.stop)
self.mock_timezones = patcher.start()
self.mock_timezones.abbreviations = {'utc-08:00': TimeZone(-8 * 3600),
'utc+08:00': TimeZone(8 * 3600)}
self.mock_timezones.utc = TimeZone(0)
def test_time_of_day(self):
self.assertEqual(
countdown.next_datetime(self.now, time(0, 0, 0, 0, timezones.utc),
None, None, True),
countdown.DateTime(datetime(2000, 1, 2, 0, 0, 0, 0, timezones.utc),
True))
self.assertEqual(
countdown.next_datetime(self.now, time(0, 1, 0, 0, timezones.utc),
None, None, False),
countdown.DateTime(datetime(2000, 1, 1, 0, 1, 0, 0, timezones.utc),
False))
self.assertEqual(
countdown.next_datetime(self.now,
time(23, 59, 59, 999999, timezones.utc),
None, None, False),
countdown.DateTime(datetime(2000, 1, 1, 23, 59, 59, 999999,
timezones.utc),
False))
def test_time_of_day_day_of_week(self):
self.assertEqual(
countdown.next_datetime(self.now, time(0, 0, 0, 0, timezones.utc),
countdown.SUNDAY, None, True),
countdown.DateTime(datetime(2000, 1, 2, 0, 0, 0, 0, timezones.utc),
True))
self.assertEqual(
countdown.next_datetime(self.now, time(0, 0, 0, 0, timezones.utc),
countdown.MONDAY, None, True),
countdown.DateTime(datetime(2000, 1, 3, 0, 0, 0, 0, timezones.utc),
True))
self.assertEqual(
countdown.next_datetime(self.now, time(0, 0, 0, 0, timezones.utc),
countdown.TUESDAY, None, True),
countdown.DateTime(datetime(2000, 1, 4, 0, 0, 0, 0, timezones.utc),
True))
self.assertEqual(
countdown.next_datetime(self.now, time(0, 0, 0, 0, timezones.utc),
countdown.WEDNESDAY, None, True),
countdown.DateTime(datetime(2000, 1, 5, 0, 0, 0, 0, timezones.utc),
True))
self.assertEqual(
countdown.next_datetime(self.now, time(0, 0, 0, 0, timezones.utc),
countdown.THURSDAY, None, True),
countdown.DateTime(datetime(2000, 1, 6, 0, 0, 0, 0, timezones.utc),
True))
self.assertEqual(
countdown.next_datetime(self.now, time(0, 0, 0, 0, timezones.utc),
countdown.FRIDAY, None, True),
countdown.DateTime(datetime(2000, 1, 7, 0, 0, 0, 0, timezones.utc),
True))
self.assertEqual(
countdown.next_datetime(self.now, time(0, 0, 0, 0, timezones.utc),
countdown.SATURDAY, None, True),
countdown.DateTime(datetime(2000, 1, 8, 0, 0, 0, 0, timezones.utc),
True))
def test_time_of_day_day_of_week_timezone(self):
self.assertEqual(
countdown.next_datetime(
self.now,
time(20, 0, 0, 0,
self.mock_timezones.abbreviations['utc-08:00']),
countdown.FRIDAY, None, True),
countdown.DateTime(datetime(2000, 1, 1, 4, 0, 0, 0,
timezones.utc),
True))
self.assertEqual(
countdown.next_datetime(
self.now,
time(4, 0, 0, 0,
self.mock_timezones.abbreviations['utc+08:00']),
countdown.SUNDAY, None, True),
countdown.DateTime(datetime(2000, 1, 1, 20, 0, 0, 0,
timezones.utc),
True))
def test_time_of_day_month_day(self):
self.assertEqual(
countdown.next_datetime(self.now, time(0, 0, 0, 0, timezones.utc),
None, countdown.Date(None, 1, 1), True),
countdown.DateTime(datetime(2001, 1, 1, 0, 0, 0, 0, timezones.utc),
True))
self.assertEqual(
countdown.next_datetime(self.now, time(0, 0, 0, 0, timezones.utc),
None, countdown.Date(None, 12, 31), True),
countdown.DateTime(datetime(2000, 12, 31, 0, 0, 0, 0,
timezones.utc),
True))
def test_time_of_day_year_month_day(self):
self.assertIsNone(
countdown.next_datetime(self.now, time(0, 0, 0, 0, timezones.utc),
None, countdown.Date(2000, 1, 1), True))
self.assertEqual(
countdown.next_datetime(self.now, time(0, 0, 0, 0, timezones.utc),
None, countdown.Date(2000, 12, 31), True),
countdown.DateTime(datetime(2000, 12, 31, 0, 0, 0, 0,
timezones.utc),
True))
class TesCustomCommandtCustomCountdownPastDatetime(unittest.TestCase):
def setUp(self):
self.now = datetime(2000, 1, 1, tzinfo=timezones.utc)
patcher = patch('lib.data.timezones', autospec=True)
self.addCleanup(patcher.stop)
self.mock_timezones = patcher.start()
self.mock_timezones.abbreviations = {'utc-08:00': TimeZone(-8 * 3600),
'utc+08:00': TimeZone(8 * 3600)}
self.mock_timezones.utc = TimeZone(0)
def test_time_of_day(self):
self.assertEqual(
countdown.past_datetime(self.now, time(0, 0, 0, 0, timezones.utc),
None, None, True),
countdown.DateTime(datetime(2000, 1, 1, 0, 0, 0, 0, timezones.utc),
True))
self.assertEqual(
countdown.past_datetime(self.now, time(0, 1, 0, 0, timezones.utc),
None, None, False),
countdown.DateTime(datetime(1999, 12, 31, 0, 1, 0, 0,
timezones.utc),
False))
self.assertEqual(
countdown.past_datetime(self.now,
time(23, 59, 59, 999999, timezones.utc),
None, None, False),
countdown.DateTime(datetime(1999, 12, 31, 23, 59, 59, 999999,
timezones.utc),
False))
def test_time_of_day_day_of_week(self):
self.assertEqual(
countdown.past_datetime(self.now, time(0, 0, 0, 0, timezones.utc),
countdown.SUNDAY, None, True),
countdown.DateTime(datetime(1999, 12, 26, 0, 0, 0, 0,
timezones.utc),
True))
self.assertEqual(
countdown.past_datetime(self.now, time(0, 0, 0, 0, timezones.utc),
countdown.MONDAY, None, True),
countdown.DateTime(datetime(1999, 12, 27, 0, 0, 0, 0,
timezones.utc),
True))
self.assertEqual(
countdown.past_datetime(self.now, time(0, 0, 0, 0, timezones.utc),
countdown.TUESDAY, None, True),
countdown.DateTime(datetime(1999, 12, 28, 0, 0, 0, 0,
timezones.utc),
True))
self.assertEqual(
countdown.past_datetime(self.now, time(0, 0, 0, 0, timezones.utc),
countdown.WEDNESDAY, None, True),
countdown.DateTime(datetime(1999, 12, 29, 0, 0, 0, 0,
timezones.utc),
True))
self.assertEqual(
countdown.past_datetime(self.now, time(0, 0, 0, 0, timezones.utc),
countdown.THURSDAY, None, True),
countdown.DateTime(datetime(1999, 12, 30, 0, 0, 0, 0,
timezones.utc),
True))
self.assertEqual(
countdown.past_datetime(self.now, time(0, 0, 0, 0, timezones.utc),
countdown.FRIDAY, None, True),
countdown.DateTime(datetime(1999, 12, 31, 0, 0, 0, 0,
timezones.utc),
True))
self.assertEqual(
countdown.past_datetime(self.now, time(0, 0, 0, 0, timezones.utc),
countdown.SATURDAY, None, True),
countdown.DateTime(datetime(2000, 1, 1, 0, 0, 0, 0,
timezones.utc),
True))
def test_time_of_day_day_of_week_timezone(self):
self.assertEqual(
countdown.past_datetime(
self.now,
time(20, 0, 0, 0,
self.mock_timezones.abbreviations['utc-08:00']),
countdown.SATURDAY, None, True),
countdown.DateTime(datetime(1999, 12, 26, 4, 0, 0, 0,
timezones.utc),
True))
self.assertEqual(
countdown.past_datetime(
self.now,
time(4, 0, 0, 0,
self.mock_timezones.abbreviations['utc+08:00']),
countdown.SATURDAY, None, True),
countdown.DateTime(datetime(1999, 12, 31, 20, 0, 0, 0,
timezones.utc),
True))
def test_time_of_day_month_day(self):
self.assertEqual(
countdown.past_datetime(self.now, time(0, 0, 0, 0, timezones.utc),
None, countdown.Date(None, 1, 1), True),
countdown.DateTime(datetime(2000, 1, 1, 0, 0, 0, 0,
timezones.utc),
True))
self.assertEqual(
countdown.past_datetime(self.now, time(0, 0, 0, 0, timezones.utc),
None, countdown.Date(None, 12, 31), True),
countdown.DateTime(datetime(1999, 12, 31, 0, 0, 0, 0,
timezones.utc),
True))
def test_time_of_day_year_month_day(self):
self.assertEqual(
countdown.past_datetime(self.now, time(0, 0, 0, 0, timezones.utc),
None, countdown.Date(2000, 1, 1), True),
countdown.DateTime(datetime(2000, 1, 1, 0, 0, 0, 0, timezones.utc),
True))
self.assertIsNone(
countdown.past_datetime(self.now, time(0, 0, 0, 0, timezones.utc),
None, countdown.Date(2000, 12, 31), True))
class TestCustomCommandCustomCountdownParseCooldown(unittest.TestCase):
def test_percent(self):
self.assertEqual(countdown.parse_cooldown('0%'), 0.0)
self.assertEqual(countdown.parse_cooldown('100%'), 1.0)
self.assertEqual(countdown.parse_cooldown('42%'), 0.42)
self.assertIsNone(countdown.parse_cooldown('101%'))
self.assertIsNone(countdown.parse_cooldown('1000%'))
self.assertIsNone(countdown.parse_cooldown('-0%'))
def test_weeks(self):
self.assertEqual(countdown.parse_cooldown('0w'), timedelta(weeks=0))
self.assertEqual(countdown.parse_cooldown('1w'), timedelta(weeks=1))
self.assertEqual(countdown.parse_cooldown('2w'), timedelta(weeks=2))
self.assertEqual(countdown.parse_cooldown('15961w'),
timedelta(weeks=15961))
def test_days(self):
self.assertEqual(countdown.parse_cooldown('0d'), timedelta(days=0))
self.assertEqual(countdown.parse_cooldown('1d'), timedelta(days=1))
self.assertEqual(countdown.parse_cooldown('2d'), timedelta(days=2))
self.assertEqual(countdown.parse_cooldown('89156d'),
timedelta(days=89156))
def test_hours(self):
self.assertEqual(countdown.parse_cooldown('0h'), timedelta(hours=0))
self.assertEqual(countdown.parse_cooldown('1h'), timedelta(hours=1))
self.assertEqual(countdown.parse_cooldown('23h'), timedelta(hours=23))
self.assertIsNone(countdown.parse_cooldown('24h'))
def test_minutes(self):
self.assertEqual(countdown.parse_cooldown('0m'), timedelta(minutes=0))
self.assertEqual(countdown.parse_cooldown('1m'), timedelta(minutes=1))
self.assertEqual(countdown.parse_cooldown('59m'),
timedelta(minutes=59))
self.assertIsNone(countdown.parse_cooldown('60m'))
def test_seconds(self):
self.assertEqual(countdown.parse_cooldown('0s'), timedelta(seconds=0))
self.assertEqual(countdown.parse_cooldown('1s'), timedelta(seconds=1))
self.assertEqual(countdown.parse_cooldown('59s'),
timedelta(seconds=59))
self.assertIsNone(countdown.parse_cooldown('60s'))
def test_multiple(self):
self.assertEqual(countdown.parse_cooldown('1w1d'),
timedelta(weeks=1, days=1))
self.assertEqual(countdown.parse_cooldown('1d1h'),
timedelta(days=1, hours=1))
self.assertEqual(countdown.parse_cooldown('1h1m'),
timedelta(hours=1, minutes=1))
self.assertEqual(countdown.parse_cooldown('1m1s'),
timedelta(minutes=1, seconds=1))
self.assertIsNone(countdown.parse_cooldown('1d1w'))
self.assertIsNone(countdown.parse_cooldown('1h1d'))
self.assertIsNone(countdown.parse_cooldown('1m1h'))
self.assertIsNone(countdown.parse_cooldown('1s1m'))
self.assertIsNone(countdown.parse_cooldown('1s1m1h1d1w'))
self.assertEqual(countdown.parse_cooldown('0w0d0h0m0s'), timedelta())
self.assertEqual(
countdown.parse_cooldown('1w1d1h1m1s'),
timedelta(weeks=1, days=1, hours=1, minutes=1, seconds=1))
self.assertEqual(
countdown.parse_cooldown('2w13d23h59m59s'),
timedelta(weeks=3, days=6, hours=23, minutes=59, seconds=59))
class TestCustomCommandCustomCountdownTestCooldown(unittest.TestCase):
def test(self):
self.assertEqual(
countdown.test_cooldown(None,
datetime(2000, 1, 1, 0, 0, 0, 0),
datetime(2000, 1, 2, 0, 0, 0, 0),
datetime(1999, 12, 31, 0, 0, 0, 0)),
0)
def test_timedelta(self):
duration = timedelta(hours=1)
past = datetime(2000, 1, 1, 0, 0, 0, 0)
future = datetime(2000, 1, 2, 0, 0, 0, 0)
self.assertEqual(
countdown.test_cooldown(duration, past, future,
datetime(1999, 12, 31, 0, 0, 0, 0)),
-math.inf)
self.assertEqual(
countdown.test_cooldown(duration, past, future,
datetime(2000, 1, 1, 0, 0, 0, 0)),
-1)
self.assertEqual(
countdown.test_cooldown(duration, past, future,
datetime(2000, 1, 1, 0, 59, 59, 999999)),
-1)
self.assertEqual(
countdown.test_cooldown(duration, past, future,
datetime(2000, 1, 1, 1, 0, 0, 0)),
0)
self.assertEqual(
countdown.test_cooldown(duration, past, future,
datetime(2000, 1, 1, 12, 0, 0, 0)),
0)
self.assertEqual(
countdown.test_cooldown(duration, past, future,
datetime(2000, 1, 1, 23, 0, 0, 0)),
0)
self.assertEqual(
countdown.test_cooldown(duration, past, future,
datetime(2000, 1, 1, 23, 0, 0, 1)),
1)
self.assertEqual(
countdown.test_cooldown(duration, past, future,
datetime(2000, 1, 2, 0, 0, 0, 0)),
1)
self.assertEqual(
countdown.test_cooldown(duration, past, future,
datetime(2000, 1, 3, 0, 0, 0, 0)),
math.inf)
def test_timedelta_over_half(self):
duration = timedelta(hours=20)
past = datetime(2000, 1, 1, 0, 0, 0, 0)
future = datetime(2000, 1, 2, 0, 0, 0, 0)
self.assertEqual(
countdown.test_cooldown(duration, past, future,
datetime(1999, 12, 31, 0, 0, 0, 0)),
-math.inf)
self.assertEqual(
countdown.test_cooldown(duration, past, future,
datetime(2000, 1, 1, 0, 0, 0, 0)),
-1)
self.assertEqual(
countdown.test_cooldown(duration, past, future,
datetime(2000, 1, 1, 3, 59, 59, 999999)),
-1)
test = countdown.test_cooldown(duration, past, future,
datetime(2000, 1, 1, 4, 0, 0, 0))
self.assertTrue(math.isnan(test), test)
test = countdown.test_cooldown(duration, past, future,
datetime(2000, 1, 1, 12, 0, 0, 0))
self.assertTrue(math.isnan(test), test)
test = countdown.test_cooldown(duration, past, future,
datetime(2000, 1, 1, 20, 0, 0, 0))
self.assertTrue(math.isnan(test), test)
self.assertEqual(
countdown.test_cooldown(duration, past, future,
datetime(2000, 1, 1, 20, 0, 0, 1)),
1)
self.assertEqual(
countdown.test_cooldown(duration, past, future,
datetime(2000, 1, 2, 0, 0, 0, 0)),
1)
self.assertEqual(
countdown.test_cooldown(duration, past, future,
datetime(2000, 1, 3, 0, 0, 0, 0)),
math.inf)
def test_timedelta_over_full(self):
duration = timedelta(days=2)
past = datetime(2000, 1, 1, 0, 0, 0, 0)
future = datetime(2000, 1, 2, 0, 0, 0, 0)
self.assertEqual(
countdown.test_cooldown(duration, past, future,
datetime(1999, 12, 31, 0, 0, 0, 0)),
-math.inf)
test = countdown.test_cooldown(duration, past, future,
datetime(2000, 1, 1, 0, 0, 0, 0))
self.assertTrue(math.isnan(test), test)
test = countdown.test_cooldown(duration, past, future,
datetime(2000, 1, 1, 12, 0, 0, 0))
self.assertTrue(math.isnan(test), test)
test = countdown.test_cooldown(duration, past, future,
datetime(2000, 1, 2, 0, 0, 0, 0))
self.assertTrue(math.isnan(test), test)
self.assertEqual(
countdown.test_cooldown(duration, past, future,
datetime(2000, 1, 3, 0, 0, 0, 0)),
math.inf)
def test_float(self):
duration = 1 / 24
past = datetime(2000, 1, 1, 0, 0, 0, 0)
future = datetime(2000, 1, 2, 0, 0, 0, 0)
self.assertEqual(
countdown.test_cooldown(duration, past, future,
datetime(1999, 12, 31, 0, 0, 0, 0)),
-math.inf)
self.assertEqual(
countdown.test_cooldown(duration, past, future,
datetime(2000, 1, 1, 0, 0, 0, 0)),
-1)
self.assertEqual(
countdown.test_cooldown(duration, past, future,
datetime(2000, 1, 1, 0, 59, 59, 999999)),
-1)
self.assertEqual(
countdown.test_cooldown(duration, past, future,
datetime(2000, 1, 1, 1, 0, 0, 0)),
0)
self.assertEqual(
countdown.test_cooldown(duration, past, future,
datetime(2000, 1, 1, 12, 0, 0, 0)),
0)
self.assertEqual(
countdown.test_cooldown(duration, past, future,
datetime(2000, 1, 1, 23, 0, 0, 0)),
0)
self.assertEqual(
countdown.test_cooldown(duration, past, future,
datetime(2000, 1, 1, 23, 0, 0, 1)),
1)
self.assertEqual(
countdown.test_cooldown(duration, past, future,
datetime(2000, 1, 2, 0, 0, 0, 0)),
1)
self.assertEqual(
countdown.test_cooldown(duration, past, future,
datetime(2000, 1, 3, 0, 0, 0, 0)),
math.inf)
def test_float_over_half(self):
duration = 20 / 24
past = datetime(2000, 1, 1, 0, 0, 0, 0)
future = datetime(2000, 1, 2, 0, 0, 0, 0)
self.assertEqual(
countdown.test_cooldown(duration, past, future,
datetime(1999, 12, 31, 0, 0, 0, 0)),
-math.inf)
self.assertEqual(
countdown.test_cooldown(duration, past, future,
datetime(2000, 1, 1, 0, 0, 0, 0)),
-1)
self.assertEqual(
countdown.test_cooldown(duration, past, future,
datetime(2000, 1, 1, 3, 59, 59, 999999)),
-1)
test = countdown.test_cooldown(duration, past, future,
datetime(2000, 1, 1, 4, 0, 0, 0))
self.assertTrue(math.isnan(test), test)
test = countdown.test_cooldown(duration, past, future,
datetime(2000, 1, 1, 12, 0, 0, 0))
self.assertTrue(math.isnan(test), test)
test = countdown.test_cooldown(duration, past, future,
datetime(2000, 1, 1, 20, 0, 0, 0))
self.assertTrue(math.isnan(test), test)
self.assertEqual(
countdown.test_cooldown(duration, past, future,
datetime(2000, 1, 1, 20, 0, 0, 1)),
1)
self.assertEqual(
countdown.test_cooldown(duration, past, future,
datetime(2000, 1, 2, 0, 0, 0, 0)),
1)
self.assertEqual(
countdown.test_cooldown(duration, past, future,
datetime(2000, 1, 3, 0, 0, 0, 0)),
math.inf)
def test_float_over_full(self):
duration = 2.0
past = datetime(2000, 1, 1, 0, 0, 0, 0)
future = datetime(2000, 1, 2, 0, 0, 0, 0)
self.assertEqual(
countdown.test_cooldown(duration, past, future,
datetime(1999, 12, 31, 0, 0, 0, 0)),
-math.inf)
test = countdown.test_cooldown(duration, past, future,
datetime(2000, 1, 1, 0, 0, 0, 0))
self.assertTrue(math.isnan(test), test)
test = countdown.test_cooldown(duration, past, future,
datetime(2000, 1, 1, 12, 0, 0, 0))
self.assertTrue(math.isnan(test), test)
test = countdown.test_cooldown(duration, past, future,
datetime(2000, 1, 2, 0, 0, 0, 0))
self.assertTrue(math.isnan(test), test)
self.assertEqual(
countdown.test_cooldown(duration, past, future,
datetime(2000, 1, 3, 0, 0, 0, 0)),
math.inf)
class TestCustomCommandCustomCountdownParseNextPastCooldown(unittest.TestCase):
def setUp(self):
self.now = datetime(2000, 1, 1, tzinfo=timezones.utc)
def test_blank(self):
times = ''
self.assertEqual(
countdown.parse_next_past_cooldown(times, self.now),
countdown.NextPastCooldown(None, None, None))
def test(self):
times = 'abcd'
self.assertEqual(
countdown.parse_next_past_cooldown(times, self.now),
countdown.NextPastCooldown(None, None, None))
def test_cooldown(self):
times = '1h'
self.assertEqual(
countdown.parse_next_past_cooldown(times, self.now),
countdown.NextPastCooldown(None, None, None))
def test_single_exact_next(self):
times = '1/2/2000 0:00'
self.assertEqual(
countdown.parse_next_past_cooldown(times, self.now),
countdown.NextPastCooldown(
countdown.DateTime(
datetime(2000, 1, 2, 0, 0, tzinfo=timezones.utc), True),
None, None))
def test_single_exact_past(self):
times = '12/31/1999 12:00AM'
self.assertEqual(
countdown.parse_next_past_cooldown(times, self.now),
countdown.NextPastCooldown(
None,
countdown.DateTime(
datetime(1999, 12, 31, 0, 0, tzinfo=timezones.utc), False),
None))
def test_multiple_exact(self):
times = '12/31/1999 12:00AM,1/2/2000 0:00'
self.assertEqual(
countdown.parse_next_past_cooldown(times, self.now),
countdown.NextPastCooldown(
countdown.DateTime(
datetime(2000, 1, 2, 0, 0, tzinfo=timezones.utc), True),
countdown.DateTime(
datetime(1999, 12, 31, 0, 0, tzinfo=timezones.utc), False),
0))
def test_hour_minute(self):
times = '0:00'
self.assertEqual(
countdown.parse_next_past_cooldown(times, self.now),
countdown.NextPastCooldown(
countdown.DateTime(
datetime(2000, 1, 2, 0, 0, tzinfo=timezones.utc), True),
countdown.DateTime(
datetime(2000, 1, 1, 0, 0, tzinfo=timezones.utc), True),
0))
def test_day_of_week(self):
times = 'Sun 0:00'
self.assertEqual(
countdown.parse_next_past_cooldown(times, self.now),
countdown.NextPastCooldown(
countdown.DateTime(
datetime(2000, 1, 2, 0, 0, tzinfo=timezones.utc), True),
countdown.DateTime(
datetime(1999, 12, 26, 0, 0, tzinfo=timezones.utc), True),
0))
def test_month_day(self):
times = '1/1 0:00'
self.assertEqual(
countdown.parse_next_past_cooldown(times, self.now),
countdown.NextPastCooldown(
countdown.DateTime(
datetime(2001, 1, 1, 0, 0, tzinfo=timezones.utc), True),
countdown.DateTime(
datetime(2000, 1, 1, 0, 0, tzinfo=timezones.utc), True),
0))
def test_multiple(self):
times = '12/25/1999 6:00PM,1/31 0:00,7:00AM,19:00,Wed 20:00'
self.assertEqual(
countdown.parse_next_past_cooldown(times, self.now),
countdown.NextPastCooldown(
countdown.DateTime(
datetime(2000, 1, 1, 7, 0, tzinfo=timezones.utc), False),
countdown.DateTime(
datetime(1999, 12, 31, 19, 0, tzinfo=timezones.utc), True),
0))
def test_multiple_2(self):
times = '6:00AM,18:00,12/31 23:00,1/1 1:00AM'
self.assertEqual(
countdown.parse_next_past_cooldown(times, self.now),
countdown.NextPastCooldown(
countdown.DateTime(
datetime(2000, 1, 1, 1, 0, tzinfo=timezones.utc), False),
countdown.DateTime(
datetime(1999, 12, 31, 23, 0, tzinfo=timezones.utc), True),
0))
def test_hour_minute_cooldown(self):
times = '0:00,1h'
self.assertEqual(
countdown.parse_next_past_cooldown(times, self.now),
countdown.NextPastCooldown(
countdown.DateTime(
datetime(2000, 1, 2, 0, 0, tzinfo=timezones.utc), True),
countdown.DateTime(
datetime(2000, 1, 1, 0, 0, tzinfo=timezones.utc), True),
0))
def test_cooldown_hour_minute(self):
times = '4h,12:00'
self.assertEqual(
countdown.parse_next_past_cooldown(times, self.now),
countdown.NextPastCooldown(
countdown.DateTime(
datetime(2000, 1, 1, 12, 0, tzinfo=timezones.utc), True),
countdown.DateTime(
datetime(1999, 12, 31, 12, 0, tzinfo=timezones.utc), True),
0))
def test_cooldown_hour_minute_early_cooldown(self):
times = '4h,23:00'
self.assertEqual(
countdown.parse_next_past_cooldown(times, self.now),
countdown.NextPastCooldown(
countdown.DateTime(
datetime(2000, 1, 1, 23, 0, tzinfo=timezones.utc), True),
countdown.DateTime(
datetime(1999, 12, 31, 23, 0, tzinfo=timezones.utc), True),
-1))
def test_cooldown_hour_minute_late_cooldown(self):
times = '4h,1:00'
self.assertEqual(
countdown.parse_next_past_cooldown(times, self.now),
countdown.NextPastCooldown(
countdown.DateTime(
datetime(2000, 1, 1, 1, 0, tzinfo=timezones.utc), True),
countdown.DateTime(
datetime(1999, 12, 31, 1, 0, tzinfo=timezones.utc), True),
1))
def test_cooldown_hour_minute_overlap_cooldown(self):
times = '18h,12:00'
self.assertEqual(
countdown.parse_next_past_cooldown(times, self.now),
countdown.NextPastCooldown(
countdown.DateTime(
datetime(2000, 1, 1, 12, 0, tzinfo=timezones.utc), True),
countdown.DateTime(
datetime(1999, 12, 31, 12, 0, tzinfo=timezones.utc), True),
math.nan))
def test_cooldown_hour_minute_multiple(self):
times = '50%,9:00,21:00'
self.assertEqual(
countdown.parse_next_past_cooldown(times, self.now),
countdown.NextPastCooldown(
countdown.DateTime(
datetime(2000, 1, 1, 9, 0, tzinfo=timezones.utc), True),
countdown.DateTime(
datetime(1999, 12, 31, 21, 0, tzinfo=timezones.utc), True),
-1))
class TestCustomCommandCustomCountdownFieldCountdown(TestCustomField):
def setUp(self):
super().setUp()
self.args = self.args._replace(field='countdown', param='')
patcher = patch(countdown.__name__ + '.parse_next_past_cooldown',
autospec=True)
self.addCleanup(patcher.stop)
self.mock_parse = patcher.start()
self.mock_parse.return_value = countdown.NextPastCooldown(None, None,
None)
async def test(self):
self.args = self.args._replace(field='')
self.assertIsNone(await countdown.fieldCountdown(self.args))
self.assertFalse(self.mock_parse.called)
async def test_none_time(self):
self.args = self.args._replace(param=None)
self.assertIsNone(await countdown.fieldCountdown(self.args))
self.assertFalse(self.mock_parse.called)
async def test_invalid_time(self):
self.assertIsNone(await countdown.fieldCountdown(self.args))
self.assertTrue(self.mock_parse.called)
async def test_default(self):
self.mock_parse.return_value = countdown.NextPastCooldown(
None,
countdown.DateTime(
datetime(1999, 12, 31, 0, 0, tzinfo=timezones.utc), True),
None)
self.assertEqual(
await countdown.fieldCountdown(self.args), 'has passed')
self.assertTrue(self.mock_parse.called)
async def test_default_prefix_suffix(self):
self.args = self.args._replace(prefix='[', suffix=']')
self.mock_parse.return_value = countdown.NextPastCooldown(
None,
countdown.DateTime(
datetime(1999, 12, 31, 0, 0, tzinfo=timezones.utc), True),
None)
self.assertEqual(
await countdown.fieldCountdown(self.args), 'has passed')
self.assertTrue(self.mock_parse.called)
async def test_default_default(self):
self.args = self.args._replace(default='Kappa')
self.mock_parse.return_value = countdown.NextPastCooldown(
None,
countdown.DateTime(
datetime(1999, 12, 31, 0, 0, tzinfo=timezones.utc), True),
None)
self.assertEqual(await countdown.fieldCountdown(self.args), 'Kappa')
self.assertTrue(self.mock_parse.called)
async def test_default_default_prefix_suffix(self):
self.args = self.args._replace(default='Kappa',
prefix='[', suffix=']')
self.mock_parse.return_value = countdown.NextPastCooldown(
None,
countdown.DateTime(
datetime(1999, 12, 31, 0, 0, tzinfo=timezones.utc), True),
None)
self.assertEqual(await countdown.fieldCountdown(self.args), 'Kappa')
self.assertTrue(self.mock_parse.called)
async def test_time(self):
self.mock_parse.return_value = countdown.NextPastCooldown(
countdown.DateTime(
datetime(2000, 1, 2, 0, 0, tzinfo=timezones.utc), True),
None,
None)
self.assertEqual(await countdown.fieldCountdown(self.args),
format(timedelta(days=1)))
self.assertTrue(self.mock_parse.called)
async def test_time_prefix(self):
self.args = self.args._replace(prefix='[')
self.mock_parse.return_value = countdown.NextPastCooldown(
countdown.DateTime(
datetime(2000, 1, 2, 0, 0, tzinfo=timezones.utc), True),
None,
None)
self.assertEqual(await countdown.fieldCountdown(self.args),
'[' + format(timedelta(days=1)))
self.assertTrue(self.mock_parse.called)
async def test_time_prefix_blank(self):
self.args = self.args._replace(prefix='')
self.mock_parse.return_value = countdown.NextPastCooldown(
countdown.DateTime(
datetime(2000, 1, 2, 0, 0, tzinfo=timezones.utc), True),
None,
None)
self.assertEqual(await countdown.fieldCountdown(self.args),
format(timedelta(days=1)))
self.assertTrue(self.mock_parse.called)
async def test_time_suffix(self):
self.args = self.args._replace(suffix=']')
self.mock_parse.return_value = countdown.NextPastCooldown(
countdown.DateTime(
datetime(2000, 1, 2, 0, 0, tzinfo=timezones.utc), True),
None,
None)
self.assertEqual(await countdown.fieldCountdown(self.args),
format(timedelta(days=1)) + ']')
self.assertTrue(self.mock_parse.called)
async def test_time_suffix_blank(self):
self.args = self.args._replace(suffix='')
self.mock_parse.return_value = countdown.NextPastCooldown(
countdown.DateTime(
datetime(2000, 1, 2, 0, 0, tzinfo=timezones.utc), True),
None,
None)
self.assertEqual(await countdown.fieldCountdown(self.args),
format(timedelta(days=1)))
self.assertTrue(self.mock_parse.called)
async def test_not_cooldown(self):
self.mock_parse.return_value = countdown.NextPastCooldown(
countdown.DateTime(
datetime(2000, 1, 2, 0, 0, tzinfo=timezones.utc), True),
countdown.DateTime(
datetime(1999, 12, 31, 0, 0, tzinfo=timezones.utc), True),
0)
self.assertEqual(await countdown.fieldCountdown(self.args),
format(timedelta(days=1)))
self.assertTrue(self.mock_parse.called)
self.mock_parse.reset_mock()
self.mock_parse.return_value = countdown.NextPastCooldown(
countdown.DateTime(
datetime(2000, 1, 2, 0, 0, tzinfo=timezones.utc), True),
countdown.DateTime(
datetime(1999, 12, 31, 0, 0, tzinfo=timezones.utc), True),
1)
self.assertEqual(await countdown.fieldCountdown(self.args),
format(timedelta(days=1)))
self.assertTrue(self.mock_parse.called)
self.mock_parse.reset_mock()
self.mock_parse.return_value = countdown.NextPastCooldown(
countdown.DateTime(
datetime(2000, 1, 2, 0, 0, tzinfo=timezones.utc), True),
countdown.DateTime(
datetime(1999, 12, 31, 0, 0, tzinfo=timezones.utc), True),
math.inf)
self.assertEqual(await countdown.fieldCountdown(self.args),
format(timedelta(days=1)))
self.assertTrue(self.mock_parse.called)
async def test_cooldown(self):
self.mock_parse.return_value = countdown.NextPastCooldown(
countdown.DateTime(
datetime(2000, 1, 2, 0, 0, tzinfo=timezones.utc), True),
countdown.DateTime(
datetime(1999, 12, 31, 0, 0, tzinfo=timezones.utc), True),
-math.inf)
self.assertEqual(await countdown.fieldCountdown(self.args),
'has passed')
self.assertTrue(self.mock_parse.called)
self.mock_parse.reset_mock()
self.mock_parse.return_value = countdown.NextPastCooldown(
countdown.DateTime(
datetime(2000, 1, 2, 0, 0, tzinfo=timezones.utc), True),
countdown.DateTime(
datetime(1999, 12, 31, 0, 0, tzinfo=timezones.utc), True),
-1)
self.assertEqual(await countdown.fieldCountdown(self.args),
'has passed')
self.assertTrue(self.mock_parse.called)
self.mock_parse.reset_mock()
self.mock_parse.return_value = countdown.NextPastCooldown(
countdown.DateTime(
datetime(2000, 1, 2, 0, 0, tzinfo=timezones.utc), True),
countdown.DateTime(
datetime(1999, 12, 31, 0, 0, tzinfo=timezones.utc), True),
math.nan)
self.assertEqual(await countdown.fieldCountdown(self.args),
'has passed')
self.assertTrue(self.mock_parse.called)
async def test_cooldown_prefix_suffix(self):
self.args = self.args._replace(prefix='[', suffix=']')
self.mock_parse.return_value = countdown.NextPastCooldown(
countdown.DateTime(
datetime(2000, 1, 2, 0, 0, tzinfo=timezones.utc), True),
countdown.DateTime(
datetime(1999, 12, 31, 0, 0, tzinfo=timezones.utc), True),
-1)
self.assertEqual(
await countdown.fieldCountdown(self.args), 'has passed')
self.assertTrue(self.mock_parse.called)
async def test_cooldown_default(self):
self.args = self.args._replace(default='Kappa')
self.mock_parse.return_value = countdown.NextPastCooldown(
countdown.DateTime(
datetime(2000, 1, 2, 0, 0, tzinfo=timezones.utc), True),
countdown.DateTime(
datetime(1999, 12, 31, 0, 0, tzinfo=timezones.utc), True),
-1)
self.assertEqual(await countdown.fieldCountdown(self.args), 'Kappa')
self.assertTrue(self.mock_parse.called)
async def test_cooldown_default_prefix_suffix(self):
self.args = self.args._replace(default='Kappa',
prefix='[', suffix=']')
self.mock_parse.return_value = countdown.NextPastCooldown(
countdown.DateTime(
datetime(2000, 1, 2, 0, 0, tzinfo=timezones.utc), True),
countdown.DateTime(
datetime(1999, 12, 31, 0, 0, tzinfo=timezones.utc), True),
-1)
self.assertEqual(await countdown.fieldCountdown(self.args), 'Kappa')
self.assertTrue(self.mock_parse.called)
class TestCustomCommandCustomCountdownFieldSince(TestCustomField):
def setUp(self):
super().setUp()
self.args = self.args._replace(field='since', param='')
patcher = patch(countdown.__name__ + '.parse_next_past_cooldown',
autospec=True)
self.addCleanup(patcher.stop)
self.mock_parse = patcher.start()
self.mock_parse.return_value = countdown.NextPastCooldown(None, None,
None)
async def test(self):
self.args = self.args._replace(field='')
self.assertIsNone(await countdown.fieldSince(self.args))
self.assertFalse(self.mock_parse.called)
async def test_none_time(self):
self.args = self.args._replace(param=None)
self.assertIsNone(await countdown.fieldSince(self.args))
self.assertFalse(self.mock_parse.called)
async def test_invalid_time(self):
self.assertIsNone(await countdown.fieldSince(self.args))
self.assertTrue(self.mock_parse.called)
async def test_default(self):
self.mock_parse.return_value = countdown.NextPastCooldown(
countdown.DateTime(
datetime(2000, 1, 2, 0, 0, tzinfo=timezones.utc), True),
None,
None)
self.assertEqual(await countdown.fieldSince(self.args), 'is coming')
self.assertTrue(self.mock_parse.called)
async def test_default_prefix_suffix(self):
self.args = self.args._replace(prefix='[', suffix=']')
self.mock_parse.return_value = countdown.NextPastCooldown(
countdown.DateTime(
datetime(2000, 1, 2, 0, 0, tzinfo=timezones.utc), True),
None,
None)
self.assertEqual(await countdown.fieldSince(self.args), 'is coming')
self.assertTrue(self.mock_parse.called)
async def test_default_default(self):
self.args = self.args._replace(default='Kappa')
self.mock_parse.return_value = countdown.NextPastCooldown(
countdown.DateTime(
datetime(2000, 1, 2, 0, 0, tzinfo=timezones.utc), True),
None,
None)
self.assertEqual(await countdown.fieldSince(self.args), 'Kappa')
self.assertTrue(self.mock_parse.called)
async def test_default_default_prefix_suffix(self):
self.args = self.args._replace(default='Kappa',
prefix='[', suffix=']')
self.mock_parse.return_value = countdown.NextPastCooldown(
countdown.DateTime(
datetime(2000, 1, 2, 0, 0, tzinfo=timezones.utc), True),
None,
None)
self.assertEqual(await countdown.fieldSince(self.args), 'Kappa')
self.assertTrue(self.mock_parse.called)
async def test_time(self):
self.mock_parse.return_value = countdown.NextPastCooldown(
None,
countdown.DateTime(
datetime(1999, 12, 31, 0, 0, tzinfo=timezones.utc), True),
None)
self.assertEqual(await countdown.fieldSince(self.args),
format(timedelta(days=1)))
self.assertTrue(self.mock_parse.called)
async def test_time_prefix(self):
self.args = self.args._replace(prefix='[')
self.mock_parse.return_value = countdown.NextPastCooldown(
None,
countdown.DateTime(
datetime(1999, 12, 31, 0, 0, tzinfo=timezones.utc), True),
None)
self.assertEqual(await countdown.fieldSince(self.args),
'[' + format(timedelta(days=1)))
self.assertTrue(self.mock_parse.called)
async def test_time_prefix_blank(self):
self.args = self.args._replace(prefix='')
self.mock_parse.return_value = countdown.NextPastCooldown(
None,
countdown.DateTime(
datetime(1999, 12, 31, 0, 0, tzinfo=timezones.utc), True),
None)
self.assertEqual(await countdown.fieldSince(self.args),
format(timedelta(days=1)))
self.assertTrue(self.mock_parse.called)
async def test_time_suffix(self):
self.args = self.args._replace(suffix=']')
self.mock_parse.return_value = countdown.NextPastCooldown(
None,
countdown.DateTime(
datetime(1999, 12, 31, 0, 0, tzinfo=timezones.utc), True),
None)
self.assertEqual(await countdown.fieldSince(self.args),
format(timedelta(days=1)) + ']')
self.assertTrue(self.mock_parse.called)
async def test_time_suffix_blank(self):
self.args = self.args._replace(suffix='')
self.mock_parse.return_value = countdown.NextPastCooldown(
None,
countdown.DateTime(
datetime(1999, 12, 31, 0, 0, tzinfo=timezones.utc), True),
None)
self.assertEqual(await countdown.fieldSince(self.args),
format(timedelta(days=1)))
self.assertTrue(self.mock_parse.called)
async def test_not_cooldown(self):
self.mock_parse.return_value = countdown.NextPastCooldown(
countdown.DateTime(
datetime(2000, 1, 2, 0, 0, tzinfo=timezones.utc), True),
countdown.DateTime(
datetime(1999, 12, 31, 0, 0, tzinfo=timezones.utc), True),
0)
self.assertEqual(await countdown.fieldSince(self.args),
format(timedelta(days=1)))
self.assertTrue(self.mock_parse.called)
self.mock_parse.reset_mock()
self.mock_parse.return_value = countdown.NextPastCooldown(
countdown.DateTime(
datetime(2000, 1, 2, 0, 0, tzinfo=timezones.utc), True),
countdown.DateTime(
datetime(1999, 12, 31, 0, 0, tzinfo=timezones.utc), True),
-1)
self.assertEqual(await countdown.fieldSince(self.args),
format(timedelta(days=1)))
self.assertTrue(self.mock_parse.called)
self.mock_parse.reset_mock()
self.assertEqual(await countdown.fieldSince(self.args),
format(timedelta(days=1)))
self.mock_parse.return_value = countdown.NextPastCooldown(
countdown.DateTime(
datetime(2000, 1, 2, 0, 0, tzinfo=timezones.utc), True),
countdown.DateTime(
datetime(1999, 12, 31, 0, 0, tzinfo=timezones.utc), True),
-math.inf)
self.assertEqual(await countdown.fieldSince(self.args),
format(timedelta(days=1)))
self.assertTrue(self.mock_parse.called)
async def test_cooldown(self):
self.mock_parse.return_value = countdown.NextPastCooldown(
countdown.DateTime(
datetime(2000, 1, 2, 0, 0, tzinfo=timezones.utc), True),
countdown.DateTime(
datetime(1999, 12, 31, 0, 0, tzinfo=timezones.utc), True),
math.inf)
self.assertEqual(await countdown.fieldSince(self.args), 'is coming')
self.assertTrue(self.mock_parse.called)
self.mock_parse.reset_mock()
self.mock_parse.return_value = countdown.NextPastCooldown(
countdown.DateTime(
datetime(2000, 1, 2, 0, 0, tzinfo=timezones.utc), True),
countdown.DateTime(
datetime(1999, 12, 31, 0, 0, tzinfo=timezones.utc), True),
1)
self.assertEqual(await countdown.fieldSince(self.args), 'is coming')
self.assertTrue(self.mock_parse.called)
self.mock_parse.reset_mock()
self.mock_parse.return_value = countdown.NextPastCooldown(
countdown.DateTime(
datetime(2000, 1, 2, 0, 0, tzinfo=timezones.utc), True),
countdown.DateTime(
datetime(1999, 12, 31, 0, 0, tzinfo=timezones.utc), True),
math.nan)
self.assertEqual(await countdown.fieldSince(self.args), 'is coming')
self.assertTrue(self.mock_parse.called)
async def test_cooldown_prefix_suffix(self):
self.args = self.args._replace(prefix='[', suffix=']')
self.mock_parse.return_value = countdown.NextPastCooldown(
countdown.DateTime(
datetime(2000, 1, 2, 0, 0, tzinfo=timezones.utc), True),
countdown.DateTime(
datetime(1999, 12, 31, 0, 0, tzinfo=timezones.utc), True),
1)
self.assertEqual(await countdown.fieldSince(self.args), 'is coming')
self.assertTrue(self.mock_parse.called)
async def test_cooldown_default(self):
self.args = self.args._replace(default='Kappa')
self.mock_parse.return_value = countdown.NextPastCooldown(
countdown.DateTime(
datetime(2000, 1, 2, 0, 0, tzinfo=timezones.utc), True),
countdown.DateTime(
datetime(1999, 12, 31, 0, 0, tzinfo=timezones.utc), True),
1)
self.assertEqual(await countdown.fieldSince(self.args), 'Kappa')
self.assertTrue(self.mock_parse.called)
async def test_cooldown_default_prefix_suffix(self):
self.args = self.args._replace(default='Kappa',
prefix='[', suffix=']')
self.mock_parse.return_value = countdown.NextPastCooldown(
countdown.DateTime(
datetime(2000, 1, 2, 0, 0, tzinfo=timezones.utc), True),
countdown.DateTime(
datetime(1999, 12, 31, 0, 0, tzinfo=timezones.utc), True),
1)
self.assertEqual(await countdown.fieldSince(self.args), 'Kappa')
self.assertTrue(self.mock_parse.called)
class TestCustomCommandCustomCountdownFieldNext(TestCustomField):
def setUp(self):
super().setUp()
self.args = self.args._replace(field='next', param='')
patcher = patch(countdown.__name__ + '.parse_next_past_cooldown',
autospec=True)
self.addCleanup(patcher.stop)
self.mock_parse = patcher.start()
self.mock_parse.return_value = countdown.NextPastCooldown(None, None,
None)
async def test(self):
self.args = self.args._replace(field='')
self.assertIsNone(await countdown.fieldNext(self.args))
self.assertFalse(self.mock_parse.called)
async def test_none_time(self):
self.args = self.args._replace(param=None)
self.assertIsNone(await countdown.fieldNext(self.args))
self.assertFalse(self.mock_parse.called)
async def test_invalid_time(self):
self.assertIsNone(await countdown.fieldNext(self.args))
self.assertTrue(self.mock_parse.called)
async def test_default(self):
self.mock_parse.return_value = countdown.NextPastCooldown(
None,
countdown.DateTime(
datetime(1999, 12, 31, 0, 0, tzinfo=timezones.utc), True),
None)
self.assertEqual(await countdown.fieldNext(self.args), 'None')
self.assertTrue(self.mock_parse.called)
async def test_default_prefix_suffix(self):
self.args = self.args._replace(prefix='[', suffix=']')
self.mock_parse.return_value = countdown.NextPastCooldown(
None,
countdown.DateTime(
datetime(1999, 12, 31, 0, 0, tzinfo=timezones.utc), True),
None)
self.assertEqual(await countdown.fieldNext(self.args), 'None')
self.assertTrue(self.mock_parse.called)
async def test_default_default(self):
self.args = self.args._replace(default='Kappa')
self.mock_parse.return_value = countdown.NextPastCooldown(
None,
countdown.DateTime(
datetime(1999, 12, 31, 0, 0, tzinfo=timezones.utc), True),
None)
self.assertEqual(await countdown.fieldNext(self.args), 'Kappa')
self.assertTrue(self.mock_parse.called)
async def test_default_default_prefix_suffix(self):
self.args = self.args._replace(default='Kappa',
prefix='[', suffix=']')
self.mock_parse.return_value = countdown.NextPastCooldown(
None,
countdown.DateTime(
datetime(1999, 12, 31, 0, 0, tzinfo=timezones.utc), True),
None)
self.assertEqual(await countdown.fieldNext(self.args), 'Kappa')
self.assertTrue(self.mock_parse.called)
async def test_time(self):
self.mock_parse.return_value = countdown.NextPastCooldown(
countdown.DateTime(
datetime(2000, 1, 2, 0, 0, tzinfo=timezones.utc), True),
None,
None)
self.assertEqual(await countdown.fieldNext(self.args),
'01/02/2000 00:00 UTC')
self.assertTrue(self.mock_parse.called)
async def test_time_12_hour(self):
self.mock_parse.return_value = countdown.NextPastCooldown(
countdown.DateTime(
datetime(2000, 1, 2, 0, 0, tzinfo=timezones.utc), False),
None,
None)
self.assertEqual(await countdown.fieldNext(self.args),
'01/02/2000 12:00AM UTC')
self.assertTrue(self.mock_parse.called)
async def test_future(self):
self.args = self.args._replace(field='future')
self.mock_parse.return_value = countdown.NextPastCooldown(
countdown.DateTime(
datetime(2000, 1, 2, 0, 0, tzinfo=timezones.utc), True),
None,
None)
self.assertEqual(await countdown.fieldNext(self.args),
'01/02/2000 00:00 UTC')
self.assertTrue(self.mock_parse.called)
async def test_time_prefix(self):
self.args = self.args._replace(prefix='[')
self.mock_parse.return_value = countdown.NextPastCooldown(
countdown.DateTime(
datetime(2000, 1, 2, 0, 0, tzinfo=timezones.utc), True),
None,
None)
self.assertEqual(await countdown.fieldNext(self.args),
'[01/02/2000 00:00 UTC')
self.assertTrue(self.mock_parse.called)
async def test_time_prefix_blank(self):
self.args = self.args._replace(prefix='')
self.mock_parse.return_value = countdown.NextPastCooldown(
countdown.DateTime(
datetime(2000, 1, 2, 0, 0, tzinfo=timezones.utc), True),
None,
None)
self.assertEqual(await countdown.fieldNext(self.args),
'01/02/2000 00:00 UTC')
self.assertTrue(self.mock_parse.called)
async def test_time_suffix(self):
self.args = self.args._replace(suffix=']')
self.mock_parse.return_value = countdown.NextPastCooldown(
countdown.DateTime(
datetime(2000, 1, 2, 0, 0, tzinfo=timezones.utc), True),
None,
None)
self.assertEqual(await countdown.fieldNext(self.args),
'01/02/2000 00:00 UTC]')
self.assertTrue(self.mock_parse.called)
async def test_time_suffix_blank(self):
self.args = self.args._replace(suffix='')
self.mock_parse.return_value = countdown.NextPastCooldown(
countdown.DateTime(
datetime(2000, 1, 2, 0, 0, tzinfo=timezones.utc), True),
None,
None)
self.assertEqual(await countdown.fieldNext(self.args),
'01/02/2000 00:00 UTC')
self.assertTrue(self.mock_parse.called)
async def test_cooldown(self):
self.mock_parse.reset_mock()
self.mock_parse.return_value = countdown.NextPastCooldown(
countdown.DateTime(
datetime(2000, 1, 2, 0, 0, tzinfo=timezones.utc), True),
countdown.DateTime(
datetime(1999, 12, 31, 0, 0, tzinfo=timezones.utc), True),
0)
self.assertEqual(await countdown.fieldNext(self.args),
'01/02/2000 00:00 UTC')
self.assertTrue(self.mock_parse.called)
self.mock_parse.reset_mock()
self.mock_parse.return_value = countdown.NextPastCooldown(
countdown.DateTime(
datetime(2000, 1, 2, 0, 0, tzinfo=timezones.utc), True),
countdown.DateTime(
datetime(1999, 12, 31, 0, 0, tzinfo=timezones.utc), True),
math.inf)
self.assertEqual(await countdown.fieldNext(self.args),
'01/02/2000 00:00 UTC')
self.assertTrue(self.mock_parse.called)
self.mock_parse.reset_mock()
self.mock_parse.return_value = countdown.NextPastCooldown(
countdown.DateTime(
datetime(2000, 1, 2, 0, 0, tzinfo=timezones.utc), True),
countdown.DateTime(
datetime(1999, 12, 31, 0, 0, tzinfo=timezones.utc), True),
1)
self.assertEqual(await countdown.fieldNext(self.args),
'01/02/2000 00:00 UTC')
self.assertTrue(self.mock_parse.called)
self.mock_parse.reset_mock()
self.mock_parse.return_value = countdown.NextPastCooldown(
countdown.DateTime(
datetime(2000, 1, 2, 0, 0, tzinfo=timezones.utc), True),
countdown.DateTime(
datetime(1999, 12, 31, 0, 0, tzinfo=timezones.utc), True),
math.nan)
self.assertEqual(await countdown.fieldNext(self.args),
'01/02/2000 00:00 UTC')
self.assertTrue(self.mock_parse.called)
self.mock_parse.reset_mock()
self.mock_parse.return_value = countdown.NextPastCooldown(
countdown.DateTime(
datetime(2000, 1, 2, 0, 0, tzinfo=timezones.utc), True),
countdown.DateTime(
datetime(1999, 12, 31, 0, 0, tzinfo=timezones.utc), True),
-1)
self.assertEqual(await countdown.fieldNext(self.args),
'01/02/2000 00:00 UTC')
self.assertTrue(self.mock_parse.called)
self.mock_parse.reset_mock()
self.mock_parse.return_value = countdown.NextPastCooldown(
countdown.DateTime(
datetime(2000, 1, 2, 0, 0, tzinfo=timezones.utc), True),
countdown.DateTime(
datetime(1999, 12, 31, 0, 0, tzinfo=timezones.utc), True),
-math.inf)
self.assertEqual(await countdown.fieldNext(self.args),
'01/02/2000 00:00 UTC')
self.assertTrue(self.mock_parse.called)
class TestCustomCommandCustomCountdownFieldPrevious(TestCustomField):
def setUp(self):
super().setUp()
self.args = self.args._replace(field='previous', param='')
patcher = patch(countdown.__name__ + '.parse_next_past_cooldown',
autospec=True)
self.addCleanup(patcher.stop)
self.mock_parse = patcher.start()
self.mock_parse.return_value = countdown.NextPastCooldown(None, None,
None)
async def test(self):
self.args = self.args._replace(field='')
self.assertIsNone(await countdown.fieldPrevious(self.args))
self.assertFalse(self.mock_parse.called)
async def test_none_time(self):
self.args = self.args._replace(param=None)
self.assertIsNone(await countdown.fieldPrevious(self.args))
self.assertFalse(self.mock_parse.called)
async def test_invalid_time(self):
self.assertIsNone(await countdown.fieldPrevious(self.args))
self.assertTrue(self.mock_parse.called)
async def test_default(self):
self.mock_parse.return_value = countdown.NextPastCooldown(
countdown.DateTime(
datetime(2000, 1, 2, 0, 0, tzinfo=timezones.utc), True),
None,
None)
self.assertEqual(await countdown.fieldPrevious(self.args), 'None')
self.assertTrue(self.mock_parse.called)
async def test_default_prefix_suffix(self):
self.args = self.args._replace(prefix='[', suffix=']')
self.mock_parse.return_value = countdown.NextPastCooldown(
countdown.DateTime(
datetime(2000, 1, 2, 0, 0, tzinfo=timezones.utc), True),
None,
None)
self.assertEqual(await countdown.fieldPrevious(self.args), 'None')
self.assertTrue(self.mock_parse.called)
async def test_default_default(self):
self.args = self.args._replace(default='Kappa')
self.mock_parse.return_value = countdown.NextPastCooldown(
countdown.DateTime(
datetime(2000, 1, 2, 0, 0, tzinfo=timezones.utc), True),
None,
None)
self.assertEqual(await countdown.fieldPrevious(self.args), 'Kappa')
self.assertTrue(self.mock_parse.called)
async def test_default_default_prefix_suffix(self):
self.args = self.args._replace(default='Kappa',
prefix='[', suffix=']')
self.mock_parse.return_value = countdown.NextPastCooldown(
countdown.DateTime(
datetime(2000, 1, 2, 0, 0, tzinfo=timezones.utc), True),
None,
None)
self.assertEqual(await countdown.fieldPrevious(self.args), 'Kappa')
self.assertTrue(self.mock_parse.called)
async def test_time(self):
self.mock_parse.return_value = countdown.NextPastCooldown(
None,
countdown.DateTime(
datetime(1999, 12, 31, 0, 0, tzinfo=timezones.utc), True),
None)
self.assertEqual(await countdown.fieldPrevious(self.args),
'12/31/1999 00:00 UTC')
self.assertTrue(self.mock_parse.called)
async def test_time_12_hour(self):
self.mock_parse.return_value = countdown.NextPastCooldown(
None,
countdown.DateTime(
datetime(1999, 12, 31, 0, 0, tzinfo=timezones.utc), False),
None)
self.assertEqual(await countdown.fieldPrevious(self.args),
'12/31/1999 12:00AM UTC')
self.assertTrue(self.mock_parse.called)
async def test_past(self):
self.args = self.args._replace(field='past')
self.mock_parse.return_value = countdown.NextPastCooldown(
None,
countdown.DateTime(
datetime(1999, 12, 31, 0, 0, tzinfo=timezones.utc), True),
None)
self.assertEqual(await countdown.fieldPrevious(self.args),
'12/31/1999 00:00 UTC')
self.assertTrue(self.mock_parse.called)
async def test_prev(self):
self.args = self.args._replace(field='prev')
self.mock_parse.return_value = countdown.NextPastCooldown(
None,
countdown.DateTime(
datetime(1999, 12, 31, 0, 0, tzinfo=timezones.utc), True),
None)
self.assertEqual(await countdown.fieldPrevious(self.args),
'12/31/1999 00:00 UTC')
self.assertTrue(self.mock_parse.called)
async def test_time_prefix(self):
self.args = self.args._replace(prefix='[')
self.mock_parse.return_value = countdown.NextPastCooldown(
None,
countdown.DateTime(
datetime(1999, 12, 31, 0, 0, tzinfo=timezones.utc), True),
None)
self.assertEqual(await countdown.fieldPrevious(self.args),
'[12/31/1999 00:00 UTC')
self.assertTrue(self.mock_parse.called)
async def test_time_prefix_blank(self):
self.args = self.args._replace(prefix='')
self.mock_parse.return_value = countdown.NextPastCooldown(
None,
countdown.DateTime(
datetime(1999, 12, 31, 0, 0, tzinfo=timezones.utc), True),
None)
self.assertEqual(await countdown.fieldPrevious(self.args),
'12/31/1999 00:00 UTC')
self.assertTrue(self.mock_parse.called)
async def test_time_suffix(self):
self.args = self.args._replace(suffix=']')
self.mock_parse.return_value = countdown.NextPastCooldown(
None,
countdown.DateTime(
datetime(1999, 12, 31, 0, 0, tzinfo=timezones.utc), True),
None)
self.assertEqual(await countdown.fieldPrevious(self.args),
'12/31/1999 00:00 UTC]')
self.assertTrue(self.mock_parse.called)
async def test_time_suffix_blank(self):
self.args = self.args._replace(suffix='')
self.mock_parse.return_value = countdown.NextPastCooldown(
None,
countdown.DateTime(
datetime(1999, 12, 31, 0, 0, tzinfo=timezones.utc), True),
None)
self.assertEqual(await countdown.fieldPrevious(self.args),
'12/31/1999 00:00 UTC')
self.assertTrue(self.mock_parse.called)
async def test_cooldown(self):
self.mock_parse.reset_mock()
self.mock_parse.return_value = countdown.NextPastCooldown(
countdown.DateTime(
datetime(2000, 1, 2, 0, 0, tzinfo=timezones.utc), True),
countdown.DateTime(
datetime(1999, 12, 31, 0, 0, tzinfo=timezones.utc), True),
0)
self.assertEqual(await countdown.fieldPrevious(self.args),
'12/31/1999 00:00 UTC')
self.assertTrue(self.mock_parse.called)
self.mock_parse.reset_mock()
self.mock_parse.return_value = countdown.NextPastCooldown(
countdown.DateTime(
datetime(2000, 1, 2, 0, 0, tzinfo=timezones.utc), True),
countdown.DateTime(
datetime(1999, 12, 31, 0, 0, tzinfo=timezones.utc), True),
math.inf)
self.assertEqual(await countdown.fieldPrevious(self.args),
'12/31/1999 00:00 UTC')
self.assertTrue(self.mock_parse.called)
self.mock_parse.reset_mock()
self.mock_parse.return_value = countdown.NextPastCooldown(
countdown.DateTime(
datetime(2000, 1, 2, 0, 0, tzinfo=timezones.utc), True),
countdown.DateTime(
datetime(1999, 12, 31, 0, 0, tzinfo=timezones.utc), True),
1)
self.assertEqual(await countdown.fieldPrevious(self.args),
'12/31/1999 00:00 UTC')
self.assertTrue(self.mock_parse.called)
self.mock_parse.reset_mock()
self.mock_parse.return_value = countdown.NextPastCooldown(
countdown.DateTime(
datetime(2000, 1, 2, 0, 0, tzinfo=timezones.utc), True),
countdown.DateTime(
datetime(1999, 12, 31, 0, 0, tzinfo=timezones.utc), True),
math.nan)
self.assertEqual(await countdown.fieldPrevious(self.args),
'12/31/1999 00:00 UTC')
self.assertTrue(self.mock_parse.called)
self.mock_parse.reset_mock()
self.mock_parse.return_value = countdown.NextPastCooldown(
countdown.DateTime(
datetime(2000, 1, 2, 0, 0, tzinfo=timezones.utc), True),
countdown.DateTime(
datetime(1999, 12, 31, 0, 0, tzinfo=timezones.utc), True),
-1)
self.assertEqual(await countdown.fieldPrevious(self.args),
'12/31/1999 00:00 UTC')
self.assertTrue(self.mock_parse.called)
self.mock_parse.reset_mock()
self.mock_parse.return_value = countdown.NextPastCooldown(
countdown.DateTime(
datetime(2000, 1, 2, 0, 0, tzinfo=timezones.utc), True),
countdown.DateTime(
datetime(1999, 12, 31, 0, 0, tzinfo=timezones.utc), True),
-math.inf)
self.assertEqual(await countdown.fieldPrevious(self.args),
'12/31/1999 00:00 UTC')
self.assertTrue(self.mock_parse.called)
|
MeGotsThis/BotGotsThis
|
pkg/custom_command/tests/test_custom_countdown.py
|
Python
|
gpl-3.0
| 84,145
|
"""Users API tests."""
from rest_framework import status
from tests.utils.api import HyperlinkedAPITestCase
from users.factory import UserFactory
from users.serializers import UserSerializer
class UserEndpointsTest(HyperlinkedAPITestCase):
"""Test access to the users endpoints."""
factory = UserFactory
serializer_class = UserSerializer
def perform_list(self):
response = self.client.get('/api/users/')
return response
def test_list(self):
self.assertRequiresAuth(
self.perform_list,
expected_status_code=status.HTTP_200_OK)
def perform_retrieve(self):
obj = self.factory.create()
response = self.client.get('/api/users/{obj.pk}/'.format(obj=obj))
return response
def test_retrieve(self):
self.assertRequiresAuth(
self.perform_retrieve,
expected_status_code=status.HTTP_200_OK)
|
oser-cs/oser-website
|
tests/test_users/test_user_api.py
|
Python
|
gpl-3.0
| 920
|
"""
whois.py
Provides a command to allow users to look up information on domain names.
"""
from contextlib import suppress
import pythonwhois
from cloudbot import hook
@hook.command
def whois(text):
"""<domain> -- Does a whois query on <domain>."""
domain = text.strip().lower()
try:
data = pythonwhois.get_whois(domain, normalized=True)
except pythonwhois.shared.WhoisException:
return "Invalid input."
info = []
# We suppress errors here because different domains provide different data fields
with suppress(KeyError):
info.append("\x02Registrar\x02: {}".format(data["registrar"][0]))
with suppress(KeyError):
info.append("\x02Registered\x02: {}".format(data["creation_date"][0].strftime("%d-%m-%Y")))
with suppress(KeyError):
info.append("\x02Expires\x02: {}".format(data["expiration_date"][0].strftime("%d-%m-%Y")))
info_text = ", ".join(info)
return "{} - {}".format(domain, info_text)
|
paris-ci/CloudBot
|
plugins/whois.py
|
Python
|
gpl-3.0
| 985
|
"""
tmdbsimple.py is a wrapper for The Movie Database API.
Refer to the official API documentation for more information.
http://docs.themoviedb.apiary.io/
Created by Celia Oakley on 2013-10-31.
"""
import json
import lib.requests as requests
class TMDB:
def __init__(self, api_key, version=3):
TMDB.api_key = str(api_key)
TMDB.url = 'https://api.themoviedb.org' + '/' + str(version)
@staticmethod
def _request(method, path, params={}, json_body={}):
url = TMDB.url + '/' + path + '?api_key=' + TMDB.api_key
if method == 'GET':
headers = {'Accept': 'application/json'}
content = requests.get(url, params=params, headers=headers, verify=False).content
elif method == 'POST':
for key in params.keys():
url += '&' + key + '=' + params[key]
headers = {'Content-Type': 'application/json', \
'Accept': 'application/json'}
content = requests.post(url, data=json.dumps(json_body), headers=headers, verify=False).content
elif method == 'DELETE':
for key in params.keys():
url += '&' + key + '=' + params[key]
headers = {'Content-Type': 'application/json', \
'Accept': 'application/json'}
content = requests.delete(url, data=json.dumps(json_body), headers=headers, verify=False).content
else:
raise Exception('method: ' + method + ' not supported.')
response = json.loads(content.decode('utf-8'))
return response
#
# Set attributes to dictionary values.
# - e.g.
# >>> tmdb = TMDB()
# >>> movie = tmdb.Movie(103332)
# >>> response = movie.info()
# >>> movie.title # instead of response['title']
#
@staticmethod
def _set_attrs_to_values(object, response={}):
for key in response.keys():
setattr(object, key, response[key])
#
# Configuration
# http://docs.themoviedb.apiary.io/#configuration
#
class Configuration:
def __init__(self):
pass
def info(self):
path = 'configuration'
response = TMDB._request('GET', path)
TMDB._set_attrs_to_values(self, response)
return response
#
# Account
# http://docs.themoviedb.apiary.io/#account
#
class Account:
def __init__(self, session_id):
self.session_id = session_id
# need to call this first to set account id
def info(self):
path = 'account'
params = {'session_id': self.session_id}
response = TMDB._request('GET', path, params)
TMDB._set_attrs_to_values(self, response)
return response
# optional parameters: page, language
def lists(self, params={}):
path = 'account' + '/' + str(self.session_id) + '/lists'
params['session_id'] = self.session_id
response = TMDB._request('GET', path, params)
TMDB._set_attrs_to_values(self, response)
return response
# optional parameters: page, sort_by, sort_order, language
def favorite_movies(self, params={}):
path = 'account' + '/' + str(self.session_id) + '/favorite_movies'
params['session_id'] = self.session_id
response = TMDB._request('GET', path, params)
TMDB._set_attrs_to_values(self, response)
return response
# required JSON body: movie_id, favorite
def favorite(self, json_body):
path = 'account' + '/' + str(json_body['movie_id']) + '/favorite'
params = {'session_id': self.session_id}
response = TMDB._request('POST', path, params, json_body)
TMDB._set_attrs_to_values(self, response)
return response
# optional parameters: page, sort_by, sort_order, language
def rated_movies(self, params={}):
path = 'account' + '/' + str(self.session_id) + '/rated_movies'
params['session_id'] = self.session_id
response = TMDB._request('GET', path, params)
TMDB._set_attrs_to_values(self, response)
return response
# optional parameters: page, sort_by, sort_order, language
def movie_watchlist(self, params={}):
path = 'account' + '/' + str(self.session_id) + '/movie_watchlist'
params['session_id'] = self.session_id
response = TMDB._request('GET', path, params)
TMDB._set_attrs_to_values(self, response)
return response
# required JSON body: movie_id, movie_watchlist
def movie_watchlist_post(self, json_body):
path = 'account' + '/' + str(json_body['movie_id']) + \
'/movie_watchlist'
params = {'session_id': self.session_id}
response = TMDB._request('POST', path, params, json_body)
TMDB._set_attrs_to_values(self, response)
return response
#
# Authentication
# http://docs.themoviedb.apiary.io/#authentication
#
# Note: to use authentication to access a user account, see:
# https://www.themoviedb.org/documentation/api/sessions
#
class Authentication:
def __init__(self):
pass
def token_new(self):
path = 'authentication/token/new'
response = TMDB._request('GET', path)
TMDB._set_attrs_to_values(self, response)
return response
# required parameters: request_token
def session_new(self, params):
path = 'authentication/session/new'
response = TMDB._request('GET', path, params)
TMDB._set_attrs_to_values(self, response)
return response
def guest_session_new(self):
path = 'authentication/guest_session/new'
response = TMDB._request('GET', path)
TMDB._set_attrs_to_values(self, response)
return response
#
# Changes
# http://docs.themoviedb.apiary.io/#changes
#
class Changes:
def __init__(self):
pass
# optional parameters: page, start_date, end_date
def movie(self, params={}):
path = 'movie/changes'
response = TMDB._request('GET', path, params)
TMDB._set_attrs_to_values(self, response)
return response
# optional parameters: page, start_date, end_date
def person(self, params={}):
path = 'person/changes'
response = TMDB._request('GET', path, params)
TMDB._set_attrs_to_values(self, response)
return response
#
# Collections
# http://docs.themoviedb.apiary.io/#collections
#
class Collections:
def __init__(self, id):
self.id = id
# optional parameter: language
def info(self, params={}):
path = 'collection' + '/' + str(self.id)
response = TMDB._request('GET', path, params)
TMDB._set_attrs_to_values(self, response)
return response
# optional parameters: language, include_image_language
def images(self, params={}):
path = 'collection' + '/' + str(self.id) + '/images'
response = TMDB._request('GET', path, params)
TMDB._set_attrs_to_values(self, response)
return response
#
# Companies
# http://docs.themoviedb.apiary.io/#companies
#
class Companies:
def __init__(self, id=0):
self.id = id
def info(self):
path = 'company' + '/' + str(self.id)
response = TMDB._request('GET', path)
TMDB._set_attrs_to_values(self, response)
return response
# optional parameters: page, language
def movies(self, params={}):
path = 'company' + '/' + str(self.id) + '/movies'
response = TMDB._request('GET', path, params)
TMDB._set_attrs_to_values(self, response)
return response
#
# Credits
# http://docs.themoviedb.apiary.io/#credits
#
class Credits:
def __init__(self, credit_id):
self.credit_id = credit_id
# optional parameters: language
def info(self, params={}):
path = 'credit' + '/' + str(self.credit_id)
response = TMDB._request('GET', path, params)
TMDB._set_attrs_to_values(self, response)
return response
#
# Discover
# http://docs.themoviedb.apiary.io/#discover
#
class Discover:
def __init__(self):
pass
# optional parameters: page, language, sort_by, include_adult, year,
# primary_release_year, vote_count.gte, vote_average.gte, with_genres,
# release_date.gte, release_date.lte, certification_country,
# certification.lte, with_companies
def movie(self, params):
path = 'discover/movie'
response = TMDB._request('GET', path, params)
TMDB._set_attrs_to_values(self, response)
return response
# optional parameters: page, language, sort_by, first_air_date_year,
# vote_count.gte, vote_average.gte, with_genres, with_networks,
# first_air_date.gte, first_air_date.lte
def tv(self, params):
path = 'discover/tv'
response = TMDB._request('GET', path, params)
TMDB._set_attrs_to_values(self, response)
return response
#
# Find
# http://docs.themoviedb.apiary.io/#find
#
class Find:
def __init__(self, id=0):
self.id = id
# required parameters: external_source
def info(self, params={}):
path = 'find' + '/' + str(self.id)
response = TMDB._request('GET', path, params)
TMDB._set_attrs_to_values(self, response)
return response
#
# Genres
# http://docs.themoviedb.apiary.io/#genres
#
class Genres:
def __init__(self, id=0):
self.id = id
# optional parameters: language
def list(self, params={}):
path = 'genre/list'
response = TMDB._request('GET', path, params)
TMDB._set_attrs_to_values(self, response)
return response
# optional parameters: page, language, include_all_movies, include_adult
def movies(self, params={}):
path = 'genre' + '/' + str(self.id) + '/movies'
response = TMDB._request('GET', path, params)
TMDB._set_attrs_to_values(self, response)
return response
#
# Jobs
# http://docs.themoviedb.apiary.io/#jobs
#
class Jobs:
def __init__(self):
pass
def list(self):
path = 'job/list'
response = TMDB._request('GET', path)
TMDB._set_attrs_to_values(self, response)
return response
#
# Keywords
# http://docs.themoviedb.apiary.io/#keywords
#
class Keywords:
def __init__(self, id):
self.id = id
def info(self):
path = 'keyword' + '/' + str(self.id)
response = TMDB._request('GET', path)
TMDB._set_attrs_to_values(self, response)
return response
# optional parameters: page, language
def movies(self, params={}):
path = 'keyword' + '/' + str(self.id) + '/movies'
response = TMDB._request('GET', path, params)
TMDB._set_attrs_to_values(self, response)
return response
#
# Lists
# http://docs.themoviedb.apiary.io/#lists
#
class Lists:
def __init__(self, id=0, session_id=0):
self.id = id
self.session_id = session_id
def info(self):
path = 'list' + '/' + str(self.id)
response = TMDB._request('GET', path)
TMDB._set_attrs_to_values(self, response)
return response
# required parameters: movie_id
def item_status(self, params):
path = 'list' + '/' + str(self.id) + '/item_status'
response = TMDB._request('GET', path, params)
TMDB._set_attrs_to_values(self, response)
return response
# required JSON body: name, description
# optional JSON body: language
def create_list(self, json_body):
path = 'list'
params = {'session_id': self.session_id}
response = TMDB._request('POST', path, params, json_body)
TMDB._set_attrs_to_values(self, response)
return response
# required JSON body: media_id
def add_item(self, json_body):
path = 'list' + '/' + str(self.id) + '/add_item'
params = {'session_id': self.session_id}
response = TMDB._request('POST', path, params, json_body)
TMDB._set_attrs_to_values(self, response)
return response
# required JSON body: media_id
def remove_item(self, json_body):
path = 'list' + '/' + str(self.id) + '/remove_item'
params = {'session_id': self.session_id}
response = TMDB._request('POST', path, params, json_body)
TMDB._set_attrs_to_values(self, response)
return response
def delete_list(self):
path = 'list' + '/' + str(self.id)
params = {'session_id': self.session_id}
response = TMDB._request('DELETE', path, params)
TMDB._set_attrs_to_values(self, response)
return response
#
# Movies
# http://docs.themoviedb.apiary.io/#movies
#
class Movies:
""" """
def __init__(self, id=0):
self.id = id
# optional parameters: language
def info(self, params={}):
path = 'movie' + '/' + str(self.id)
response = TMDB._request('GET', path, params)
TMDB._set_attrs_to_values(self, response)
return response
# optional parameters: country
def alternative_titles(self, params={}):
path = 'movie' + '/' + str(self.id) + '/alternative_titles'
response = TMDB._request('GET', path, params)
TMDB._set_attrs_to_values(self, response)
return response
def credits(self):
path = 'movie' + '/' + str(self.id) + '/credits'
response = TMDB._request('GET', path)
TMDB._set_attrs_to_values(self, response)
return response
# optional parameters: language, include_image_language
def images(self, params={}):
path = 'movie' + '/' + str(self.id) + '/images'
response = TMDB._request('GET', path, params)
TMDB._set_attrs_to_values(self, response)
return response
def keywords(self):
path = 'movie' + '/' + str(self.id) + '/keywords'
response = TMDB._request('GET', path)
TMDB._set_attrs_to_values(self, response)
return response
def releases(self):
path = 'movie' + '/' + str(self.id) + '/releases'
response = TMDB._request('GET', path)
TMDB._set_attrs_to_values(self, response)
return response
def trailers(self):
path = 'movie' + '/' + str(self.id) + '/trailers'
response = TMDB._request('GET', path)
TMDB._set_attrs_to_values(self, response)
return response
def translations(self):
path = 'movie' + '/' + str(self.id) + '/translations'
response = TMDB._request('GET', path)
TMDB._set_attrs_to_values(self, response)
return response
# optional parameters: page, language
def similar_movies(self, params={}):
path = 'movie' + '/' + str(self.id) + '/similar_movies'
response = TMDB._request('GET', path, params)
TMDB._set_attrs_to_values(self, response)
return response
# optional parameters: page, language
def reviews(self, params={}):
path = 'movie' + '/' + str(self.id) + '/reviews'
response = TMDB._request('GET', path, params)
TMDB._set_attrs_to_values(self, response)
return response
# optional parameters: page, language
def lists(self, params={}):
path = 'movie' + '/' + str(self.id) + '/lists'
response = TMDB._request('GET', path, params)
TMDB._set_attrs_to_values(self, response)
return response
# optional parameters: start_date, end_date
def changes(self, params={}):
path = 'movie' + '/' + str(self.id) + '/changes'
response = TMDB._request('GET', path, params)
TMDB._set_attrs_to_values(self, response)
return response
def latest(self):
path = 'movie/latest'
response = TMDB._request('GET', path)
TMDB._set_attrs_to_values(self, response)
return response
# optional parameters: page, language
def upcoming(self, params={}):
path = 'movie/upcoming'
response = TMDB._request('GET', path, params)
TMDB._set_attrs_to_values(self, response)
return response
# optional parameters: page, language
def now_playing(self, params={}):
path = 'movie/now_playing'
response = TMDB._request('GET', path, params)
TMDB._set_attrs_to_values(self, response)
return response
# optional parameters: page, language
def popular(self, params={}):
path = 'movie/popular'
response = TMDB._request('GET', path, params)
TMDB._set_attrs_to_values(self, response)
return response
# optional parameters: page, language
def top_rated(self, params={}):
path = 'movie/top_rated'
response = TMDB._request('GET', 'movie' + '/top_rated', params)
TMDB._set_attrs_to_values(self, response)
return response
# required parameters: session_id
def account_states(self, params):
path = 'movie' + '/' + str(self.id) + '/account_states'
response = TMDB._request('GET', path, params)
TMDB._set_attrs_to_values(self, response)
return response
# required parameters: session_id or guest_session_id
# required JSON body: value
def rating(self, params, json_body):
path = 'movie' + '/' + str(self.id) + '/rating'
response = TMDB._request('POST', path, params, json_body)
TMDB._set_attrs_to_values(self, response)
return response
#
# Networks
# http://docs.themoviedb.apiary.io/#networks
#
class Networks:
def __init__(self, id):
self.id = id
def info(self):
path = 'network' + '/' + str(self.id)
response = TMDB._request('GET', path)
TMDB._set_attrs_to_values(self, response)
return response
#
# People
# http://docs.themoviedb.apiary.io/#people
#
class People:
def __init__(self, id=0):
self.id = id
def info(self):
path = 'person' + '/' + str(self.id)
response = TMDB._request('GET', path)
TMDB._set_attrs_to_values(self, response)
return response
# optional parameters: language
def movie_credits(self, params={}):
path = 'person' + '/' + str(self.id) + '/movie_credits'
response = TMDB._request('GET', path, params)
TMDB._set_attrs_to_values(self, response)
return response
# optional parameters: language
def tv_credits(self, params={}):
path = 'person' + '/' + str(self.id) + '/tv_credits'
response = TMDB._request('GET', path, params)
TMDB._set_attrs_to_values(self, response)
return response
# optional parameters: language
def combined_credits(self, params={}):
path = 'person' + '/' + str(self.id) + '/combined_credits'
response = TMDB._request('GET', path, params)
TMDB._set_attrs_to_values(self, response)
return response
def images(self):
path = 'person' + '/' + str(self.id) + '/images'
response = TMDB._request('GET', path)
TMDB._set_attrs_to_values(self, response)
return response
# optional parameters: start_date, end_date
def changes(self, params={}):
path = 'person' + '/' + str(self.id) + '/changes'
response = TMDB._request('GET', path, params)
TMDB._set_attrs_to_values(self, response)
return response
# optional parameters: page
def popular(self, params={}):
path = 'person/popular'
response = TMDB._request('GET', path, params)
TMDB._set_attrs_to_values(self, response)
return response
def latest(self):
path = 'person/latest'
response = TMDB._request('GET', path)
TMDB._set_attrs_to_values(self, response)
return response
#
# Reviews
# http://docs.themoviedb.apiary.io/#reviews
#
class Reviews:
def __init__(self, id):
self.id = id
def info(self):
path = 'review' + '/' + str(self.id)
response = TMDB._request('GET', path)
TMDB._set_attrs_to_values(self, response)
return response
#
# Search
# http://docs.themoviedb.apiary.io/#search
#
class Search:
def __init__(self):
pass
# required parameters: query
# optional parameters: page, language, include_adult, year,
# primary_release_year, search_type
def movie(self, params):
path = 'search/movie'
response = TMDB._request('GET', path, params)
TMDB._set_attrs_to_values(self, response)
return response
# required parameters: query
# optional parameters: page, language
def collection(self, params):
path = 'search/collection'
response = TMDB._request('GET', path, params)
TMDB._set_attrs_to_values(self, response)
return response
# required parameters: query
# optional parameters: page, language, first_air_date_year, search_type
def tv(self, params):
path = 'search/tv'
response = TMDB._request('GET', path, params)
TMDB._set_attrs_to_values(self, response)
return response
# required parameters: query
# optional parameters: page, include_adult, search_type
def person(self, params):
path = 'search/person'
response = TMDB._request('GET', path, params)
TMDB._set_attrs_to_values(self, response)
return response
# required parameters: query
# optional parameters: page, include_adult
def list(self, params):
path = 'search/list'
response = TMDB._request('GET', path, params)
TMDB._set_attrs_to_values(self, response)
return response
# required parameters: query
# optional parameters: page
def company(self, params):
path = 'search/company'
response = TMDB._request('GET', path, params)
TMDB._set_attrs_to_values(self, response)
return response
# required parameters: query
# optional parameters: page
def keyword(self, params):
path = 'search/keyword'
response = TMDB._request('GET', path, params)
TMDB._set_attrs_to_values(self, response)
return response
#
# TV
# http://docs.themoviedb.apiary.io/#tv
#
class TV:
def __init__(self, id=0):
self.id = id
# optional parameter: language
def info(self, params={}):
path = 'tv' + '/' + str(self.id)
response = TMDB._request('GET', path, params)
TMDB._set_attrs_to_values(self, response)
return response
# optional parameter: language
def credits(self, params={}):
path = 'tv' + '/' + str(self.id) + '/credits'
response = TMDB._request('GET', path, params)
TMDB._set_attrs_to_values(self, response)
return response
# optional parameter: language
def external_ids(self, params={}):
path = 'tv' + '/' + str(self.id) + '/external_ids'
response = TMDB._request('GET', path, params)
TMDB._set_attrs_to_values(self, response)
return response
# optional parameters: language, include_image_language
def images(self, params={}):
path = 'tv' + '/' + str(self.id) + '/images'
response = TMDB._request('GET', path, params)
TMDB._set_attrs_to_values(self, response)
return response
def translations(self):
path = 'tv' + '/' + str(self.id) + '/translations'
response = TMDB._request('GET', path)
TMDB._set_attrs_to_values(self, response)
return response
# optional parameter: page, language
def top_rated(self, params={}):
path = 'tv/top_rated'
response = TMDB._request('GET', path, params)
TMDB._set_attrs_to_values(self, response)
return response
# optional parameter: page, language
def popular(self, params={}):
path = 'tv/popular'
response = TMDB._request('GET', path, params)
TMDB._set_attrs_to_values(self, response)
return response
#
# TV Seasons
# http://docs.themoviedb.apiary.io/#tvseasons
#
class TV_Seasons:
def __init__(self, id, season_number):
self.id = id
self.season_number = season_number
# optional parameter: language
def info(self, params={}):
path = 'tv' + '/' + str(self.id) + '/season' + \
'/' + str(self.season_number)
response = TMDB._request('GET', path, params)
TMDB._set_attrs_to_values(self, response)
return response
def credits(self):
path = 'tv' + '/' + str(self.id) + '/season' + \
'/' + str(self.season_number) + '/credits'
response = TMDB._request('GET', path)
TMDB._set_attrs_to_values(self, response)
return response
# optional parameter: language
def external_ids(self, params={}):
path = 'tv' + '/' + str(self.id) + '/season' + \
'/' + str(self.season_number) + '/external_ids'
response = TMDB._request('GET', path, params)
TMDB._set_attrs_to_values(self, response)
return response
# optional parameter: language
def images(self, params={}):
path = 'tv' + '/' + str(self.id) + '/season' + \
'/' + str(self.season_number) + '/images'
response = TMDB._request('GET', path, params)
TMDB._set_attrs_to_values(self, response)
return response
#
# TV Episodes
# http://docs.themoviedb.apiary.io/#tvepisodes
#
class TV_Episodes:
def __init__(self, id, season_number, episode_number):
self.id = id
self.season_number = season_number
self.episode_number = episode_number
# optional parameter: language
def info(self, params={}):
path = 'tv' + '/' + str(self.id) + '/season' + \
'/' + str(self.season_number) + '/episode' + \
'/' + str(self.episode_number)
response = TMDB._request('GET', path, params)
TMDB._set_attrs_to_values(self, response)
return response
def credits(self, params={}):
path = 'tv' + '/' + str(self.id) + '/season' + \
'/' + str(self.season_number) + '/episode' + \
'/' + str(self.episode_number) + '/credits'
response = TMDB._request('GET', path, params)
TMDB._set_attrs_to_values(self, response)
return response
# optional parameter: language
def external_ids(self, params={}):
path = 'tv' + '/' + str(self.id) + '/season' + \
'/' + str(self.season_number) + '/episode' + \
'/' + str(self.episode_number) + '/external_ids'
response = TMDB._request('GET', path, params)
TMDB._set_attrs_to_values(self, response)
return response
# optional parameter: language
def images(self, params={}):
path = 'tv' + '/' + str(self.id) + '/season' + \
'/' + str(self.season_number) + '/episode' + \
'/' + str(self.episode_number) + '/images'
response = TMDB._request('GET', path, params)
TMDB._set_attrs_to_values(self, response)
return response
|
jetskijoe/SickGear
|
lib/tmdb_api/tmdb_api.py
|
Python
|
gpl-3.0
| 29,528
|
from MESH import SimulationPlanar
s = SimulationPlanar()
s.AddMaterial("GaAs", "GaAs.txt")
s.AddMaterial("Vacuum", "Vacuum.txt")
s.AddMaterial("PEC", "PEC.txt")
s.AddLayer("PECBottom", 0, "PEC")
s.AddLayer("GaAsBottom", 1e-6, "GaAs")
s.AddLayer("VacGap", 1e-8, "Vacuum")
s.AddLayerCopy("GaAsTop", "GaAsBottom")
s.AddLayerCopy("PECTop", "PECBottom")
s.SetSourceLayer("GaAsBottom")
s.SetProbeLayer("VacGap")
s.OptUseQuadgk()
s.SetThread(4)
s.SetKParallelIntegral(500)
for i in range(10, 110, 10):
s.SetLayerThickness("VacGap", i * 1e-9)
s.InitSimulation()
s.IntegrateKParallel()
|
kfrancischen/MESH
|
examples/iterate_over_gap/iterate.py
|
Python
|
gpl-3.0
| 590
|
import numpy
# From: https://stackoverflow.com/questions/14177744/how-does-perspective-transformation-work-in-pil
from PIL import Image
def find_coeffs(pa, pb):
"""
pa and pb contain 4 corresponding points on image in order UL,UR,LR,LL
UL=(0,0), UR=(w,0), LR=(w,h), LL=(0,h)
Units for pa and pb are pixels.
pa is target plane (e.g. [(0, 0), (256, 0), (256, 256), (0, 256)])
pb is source plane (e.g. [(0, 0), (256, 0), (new_width, height), (xshift, height)])
"""
matrix = []
for p1, p2 in zip(pa, pb):
matrix.append([p1[0], p1[1], 1, 0, 0, 0, -p2[0]*p1[0], -p2[0]*p1[1]])
matrix.append([0, 0, 0, p1[0], p1[1], 1, -p2[1]*p1[0], -p2[1]*p1[1]])
A = numpy.matrix(matrix, dtype=numpy.float)
B = numpy.array(pb).reshape(8)
res = numpy.dot(numpy.linalg.inv(A.T * A) * A.T, B)
# take commenters advice to avoid A.T
#res = numpy.linalg.solve(A, B)
return numpy.array(res).reshape(8)
def fix_plot_img( UL, UR, LR, LL, img):
"""
Correct plot so that xmin and xmax have same y value.
And, ymin/ymax have same x value
"""
# source points
pb = [UL,UR,LR,LL]
xlo = (LL[0]+UL[0])/2
xhi = (UR[0]+LR[0])/2
yhi = (UL[1]+UL[1])/2
ylo = (LL[1]+LR[1])/2
# make target points
UL = (xlo, yhi)
UR = (xhi, yhi)
LR = (xhi, ylo)
LL = (xlo, ylo)
pa = [UL,UR,LR,LL]
# get most common color in original
w,h = img.size
img_rgba = img.convert('RGBA')
pixels = img_rgba.getcolors(w*h)
most_frequent_pixel = pixels[0][1]
print( 'most_frequent_pixel =',most_frequent_pixel ) # (count, (color)) e.g. (505888, (255, 255, 255, 255))
# fix original image (may have black corners from rotation/transform)
coeffs = find_coeffs(pa, pb)
img_fixed = img_rgba.transform(img.size, Image.PERSPECTIVE, coeffs, Image.BICUBIC)
fff = Image.new('RGBA', img_rgba.size, most_frequent_pixel)
img_out = Image.composite(img_fixed, fff, img_fixed)
return img_out
if __name__=="__main__":
img_name = "rot_poly_p3.jpg"
UL = (105., 93.)
UR = (848., 51.)
LR = (877., 627.)
LL = (135., 666.)
img = Image.open(img_name)
print('img.size =',img.size)
img_und = fix_plot_img( UL, UR, LR, LL, img)
img_und.save('undistort_'+img_name)
|
sonofeft/DigiPlot
|
digiplot/examples/fix_distortion.py
|
Python
|
gpl-3.0
| 2,359
|
# -*- coding: utf-8 -*-
# Copyright (C) 2005 Osmo Salomaa
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""Dialog for lengthening or shortening durations."""
import gaupol
from aeidon.i18n import _, n_
from gi.repository import Gtk
__all__ = ("DurationAdjustDialog",)
class DurationAdjustDialog(gaupol.BuilderDialog):
"""Dialog for lengthening or shortening durations."""
_widgets = [
"all_radio",
"current_radio",
"gap_check",
"gap_spin",
"lengthen_check",
"max_check",
"max_spin",
"min_check",
"min_spin",
"selected_radio",
"shorten_check",
"speed_spin",
]
def __init__(self, parent, application):
"""Initialize a :class:`DurationAdjustDialog` instance."""
gaupol.BuilderDialog.__init__(self, "duration-adjust-dialog.ui")
self.application = application
self._init_dialog(parent)
self._init_values()
self._init_sensitivities()
def _adjust_durations(self):
"""Adjust durations of subtitles."""
conf = gaupol.conf.duration_adjust
target = self._get_target()
for page in self.application.get_target_pages(target):
self.application.set_current_page(page)
rows = page.project.adjust_durations(
indices=self.application.get_target_rows(target),
speed=conf.speed,
lengthen=conf.lengthen,
shorten=conf.shorten,
maximum=(conf.maximum if conf.use_maximum else None),
minimum=(conf.minimum if conf.use_minimum else None),
gap=conf.gap if conf.use_gap else None)
self.application.flash_message(n_(
"Adjusted duration of {:d} subtitle",
"Adjusted durations of {:d} subtitles",
len(rows)).format(len(rows)))
def _get_target(self):
"""Return the selected target."""
if self._selected_radio.get_active():
return gaupol.targets.SELECTED
if self._current_radio.get_active():
return gaupol.targets.CURRENT
if self._all_radio.get_active():
return gaupol.targets.ALL
raise ValueError("Invalid target radio state")
def _init_dialog(self, parent):
"""Initialize the dialog."""
self.add_button(_("_Cancel"), Gtk.ResponseType.CANCEL)
self.add_button(_("_Adjust"), Gtk.ResponseType.OK)
self.set_default_response(Gtk.ResponseType.OK)
self.set_transient_for(parent)
self.set_modal(True)
def _init_sensitivities(self):
"""Initialize sensitivities of widgets."""
self._all_radio.emit("toggled")
self._current_radio.emit("toggled")
self._gap_check.emit("toggled")
self._gap_spin.emit("value-changed")
self._lengthen_check.emit("toggled")
self._max_check.emit("toggled")
self._max_spin.emit("value-changed")
self._min_check.emit("toggled")
self._min_spin.emit("value-changed")
self._selected_radio.emit("toggled")
self._shorten_check.emit("toggled")
self._speed_spin.emit("value-changed")
def _init_values(self):
"""Intialize default values for widgets."""
conf = gaupol.conf.duration_adjust
self._gap_check.set_active(conf.use_gap)
self._gap_spin.set_value(conf.gap)
self._lengthen_check.set_active(conf.lengthen)
self._max_check.set_active(conf.use_maximum)
self._max_spin.set_value(conf.maximum)
self._min_check.set_active(conf.use_minimum)
self._min_spin.set_value(conf.minimum)
self._shorten_check.set_active(conf.shorten)
self._speed_spin.set_value(conf.speed)
self._selected_radio.set_active(conf.target == gaupol.targets.SELECTED)
self._current_radio.set_active(conf.target == gaupol.targets.CURRENT)
self._all_radio.set_active(conf.target == gaupol.targets.ALL)
page = self.application.get_current_page()
rows = page.view.get_selected_rows()
if not rows and conf.target == gaupol.targets.SELECTED:
self._current_radio.set_active(True)
self._selected_radio.set_sensitive(bool(rows))
def _on_gap_check_toggled(self, check_button):
"""Set sensitivity of the gap spin button."""
self._gap_spin.set_sensitive(check_button.get_active())
def _on_lengthen_check_toggled(self, *args):
"""Set sensitivity of the speed spin button."""
lengthen = self._lengthen_check.get_active()
shorten = self._shorten_check.get_active()
self._speed_spin.set_sensitive(lengthen or shorten)
def _on_max_check_toggled(self, check_button):
"""Set sensitivity of the maximum spin button."""
self._max_spin.set_sensitive(check_button.get_active())
def _on_min_check_toggled(self, check_button):
"""Set sensitivity of the minimum spin button."""
self._min_spin.set_sensitive(check_button.get_active())
def _on_response(self, dialog, response):
"""Save settings and adjust durations."""
conf = gaupol.conf.duration_adjust
conf.gap = self._gap_spin.get_value()
conf.lengthen = self._lengthen_check.get_active()
conf.maximum = self._max_spin.get_value()
conf.minimum = self._min_spin.get_value()
conf.speed = self._speed_spin.get_value()
conf.shorten = self._shorten_check.get_active()
conf.target = self._get_target()
conf.use_gap = self._gap_check.get_active()
conf.use_maximum = self._max_check.get_active()
conf.use_minimum = self._min_check.get_active()
if response == Gtk.ResponseType.OK:
self._adjust_durations()
def _on_shorten_check_toggled(self, *args):
"""Set sensitivity of the speed spin button."""
lengthen = self._lengthen_check.get_active()
shorten = self._shorten_check.get_active()
self._speed_spin.set_sensitive(lengthen or shorten)
|
otsaloma/gaupol
|
gaupol/dialogs/duration_adjust.py
|
Python
|
gpl-3.0
| 6,650
|
# How to generate a secret key with Python
# via http://flask.pocoo.org/docs/quickstart/
import os
os.urandom(24)
|
migueib17/IV-PLUCO-MFF
|
secretkey.py
|
Python
|
gpl-3.0
| 115
|
from django.contrib import admin
from django.utils.translation import ugettext as _
from notifications.admin import NotificationAdmin
from notifications.models import Notification
admin.site.unregister(Notification)
def mark_as_read_action(modeladmin, request, queryset):
queryset.mark_all_as_read()
mark_as_read_action.short_description = _("Mark selected as read")
@admin.register(Notification)
class _NotificationAdmin(NotificationAdmin):
"""
Use this list only as some list of Notifications
"""
list_display = (
"timestamp",
"verb",
"description",
"level",
"unread",
)
fields = list_display
readonly_fields = list_display
list_filter = ("unread", "level")
actions = (mark_as_read_action,)
def get_queryset(self, request):
return super().get_queryset(request).filter(recipient=request.user)
def has_delete_permission(self, request, obj=None):
return False
def has_add_permission(self, request, obj=None):
return False
|
auto-mat/klub
|
apps/notifications_edit/admin.py
|
Python
|
gpl-3.0
| 1,049
|
# -*- coding: utf-8 -*-
# Dioptas - GUI program for fast processing of 2D X-ray diffraction data
# Principal author: Clemens Prescher (clemens.prescher@gmail.com)
# Copyright (C) 2014-2019 GSECARS, University of Chicago, USA
# Copyright (C) 2015-2018 Institute for Geology and Mineralogy, University of Cologne, Germany
# Copyright (C) 2019 DESY, Hamburg, Germany
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from .BackgroundExtraction import extract_background
from .Pattern import Pattern
from .jcpds import jcpds
|
erangre/Dioptas
|
dioptas/model/util/__init__.py
|
Python
|
gpl-3.0
| 1,109
|
#
# Tuxemon
# Copyright (c) 2014-2017 William Edwards <shadowapex@gmail.com>,
# Benjamin Bean <superman2k5@gmail.com>
#
# This file is part of Tuxemon
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
from __future__ import annotations
from tuxemon.event import get_npc
from tuxemon.event.eventaction import EventAction
from typing import NamedTuple, final
class NpcRunActionParameters(NamedTuple):
npc_slug: str
@final
class NpcRun(EventAction[NpcRunActionParameters]):
"""
Set the NPC movement speed to the global run speed.
Script usage:
.. code-block::
npc_run <npc_slug>
Script parameters:
npc_slug: Either "player" or npc slug name (e.g. "npc_maple").
"""
name = "npc_run"
param_class = NpcRunActionParameters
def start(self) -> None:
npc = get_npc(self.session, self.parameters.npc_slug)
assert npc
npc.moverate = self.session.client.config.player_runrate
|
Tuxemon/Tuxemon
|
tuxemon/event/actions/npc_run.py
|
Python
|
gpl-3.0
| 1,572
|
import numpy as np
import trep
# set mass, length, and gravity:
m = 1.0; l = 1.0; g = 9.8;
# create system
system = trep.System()
# define frames
frames = [
trep.rz("theta_1", name="Link1"), [
trep.ty(-l, name="Mass1", mass=m), [
trep.rz("theta_2", name="Link2"), [
trep.ty(-l, name="Mass2", mass=m)]]],
trep.tx(2*l, name="Link3Anchor")]
# add frames to system
system.import_frames(frames)
# add link 3 as a distance constraint
trep.constraints.Distance(system, "Mass2", "Link3Anchor", l)
# set gravity
trep.potentials.Gravity(system, (0, -g, 0))
# add and set torque input on theta_1
trep.forces.ConfigForce(system, "theta_1", "torque1")
system.get_input('torque1').u = 2.0
# solve for equilibrium configuration
system.q = system.minimize_potential_energy()
# compute null space and set velocities
h1 = system.constraints[0].h_dq(system.get_config('theta_1'))
h2 = system.constraints[0].h_dq(system.get_config('theta_2'))
system.dq = [1.000, -h1/h2]
# print configuration and linearizations
print "===================="
print "TREP RESULTS:"
print "===================="
print "q = ", system.q
print "v = ", system.dq, "\r\n"
print "State Linearization:"
print np.vstack([np.hstack([np.zeros([2,2]),np.eye(2)]),
np.hstack([system.f_dq(),system.f_ddq()])]), "\r\n"
print "Input Linearization:"
print system.f_du()
#################################################################
# numerical tests for validation:
def test_ddq_dq(system, q=None, eps=0.001):
if q == None:
q = system.q
ddq_dq = np.zeros((system.nQ,system.nQ))
system.q = q
f = system.f()
for j in range(system.nQ):
system.configs[j].q += eps
fp = system.f()
system.configs[j].q -= 2*eps
fm = system.f()
df_approx = ((fp-f)/eps + (f-fm)/eps)/2.0
ddq_dq[:,j] = df_approx
return ddq_dq
def test_ddq_ddq(system, dq=None, eps=0.001):
if dq == None:
dq = system.dq
ddq_ddq = np.zeros((system.nQ,system.nQ))
system.dq = dq
f = system.f()
for j in range(system.nQ):
system.configs[j].dq += eps
fp = system.f()
system.configs[j].dq -= 2*eps
fm = system.f()
df_approx = ((fp-f)/eps + (f-fm)/eps)/2.0
ddq_ddq[:,j] = df_approx
return ddq_ddq
def test_ddq_du(system, u=None, eps=0.001):
if u == None:
u = system.u
ddq_du = np.zeros((system.nQ,system.nu))
system.u = u
f = system.f()
for j in range(system.nu):
system.inputs[j].u += eps
fp = system.f()
system.inputs[j].u -= 2*eps
fm = system.f()
df_approx = ((fp-f)/eps + (f-fm)/eps)/2.0
ddq_du[:,j] = df_approx
return ddq_du
print ""
print "===================="
print "NUMERICAL TESTS:"
print "===================="
print "q = ", system.q
print "v = ", system.dq, "\r\n"
print "State Linearization:"
print np.vstack([np.hstack([np.zeros([2,2]),np.eye(2)]),
np.hstack([test_ddq_dq(system, eps=1e-6),
test_ddq_ddq(system, eps=1e-6)])]), "\r\n"
print "Input Linearization:"
print test_ddq_du(system, eps=1e-6)
|
MurpheyLab/trep
|
examples/papers/acc2010/acc2010.py
|
Python
|
gpl-3.0
| 3,162
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
import sys
sys.path += ['../']
from mingus.containers.Note import Note
import unittest
from mingus.containers.mt_exceptions import NoteFormatError
class test_Note(unittest.TestCase):
def setUp(self):
self.c = Note('C', 5)
self.c1 = Note('C')
self.c2 = Note('C', 3)
self.b4 = Note('B', 4)
self.b5 = Note('B', 5)
def test_cmp(self):
self.assertTrue(self.c1 <= self.b5)
self.assertTrue(self.c < self.b5)
self.assertTrue(self.c1 < self.b5)
self.assertTrue(self.c2 < self.b5)
self.assertTrue(self.c > self.b4, '%s %s' % (self.c, self.b4))
self.assertTrue(self.c1 < self.b4)
self.assertTrue(self.c2 < self.b4)
self.assertTrue(self.b4 < self.b5)
self.assertTrue(Note('C') > Note('Cb'))
def test_to_int(self):
self.assertEqual(48, Note('C', 4))
self.assertEqual(47, Note('Cb', 4))
self.assertEqual(36, int(self.c2))
self.assertEqual(71, int(self.b5))
self.assertEqual(59, int(self.b4))
def test_set_note(self):
n = Note()
self.assertTrue(n.set_note('C', 5, {}))
n.empty()
self.assertTrue(n.set_note('C-5'))
self.assertTrue(n.set_note('C', 5))
self.assertTrue(n.set_note('C#-12', 5))
self.assertRaises(NoteFormatError, n.set_note, 'H')
self.assertRaises(NoteFormatError, n.set_note, 'C 23')
self.assertRaises(NoteFormatError, n.set_note, 'C# 123')
def test_to_hertz(self):
self.assertEqual(Note('A', 0).to_hertz(), 27.5)
self.assertEqual(Note('A', 1).to_hertz(), 55)
self.assertEqual(Note('A', 2).to_hertz(), 110)
self.assertEqual(Note('A', 3).to_hertz(), 220)
self.assertEqual(Note('A', 4).to_hertz(), 440)
self.assertEqual(Note('A', 5).to_hertz(), 880)
self.assertEqual(Note('A', 6).to_hertz(), 1760)
def test_from_hertz(self):
a = Note()
self.assertEqual(a.from_hertz(55.5), Note('A', 1))
self.assertEqual(a.from_hertz(110), Note('A', 2))
a.from_hertz(220)
self.assertEqual(a, Note('A', 3))
a.from_hertz(440)
self.assertEqual(a, Note('A', 4))
a.from_hertz(880)
self.assertEqual(a, Note('A', 5))
a.from_hertz(1760)
self.assertEqual(a, Note('A', 6))
def test_transpose(self):
a = Note('C')
a.transpose('3')
self.assertEqual(Note('E'), a)
a.transpose('b2')
self.assertEqual(Note('F'), a)
a.transpose('5')
self.assertEqual(Note('C', 5), a)
a.transpose('5', False)
self.assertEqual(Note('F'), a)
a = Note('G-5')
a.transpose('5')
self.assertEqual(Note('D-6'), a)
a.transpose('5', False)
self.assertEqual(Note('G-5'), a)
a.transpose('5', False)
self.assertEqual(Note('C-5'), a)
def test_from_int(self):
self.assertEqual(Note('C', 0), Note().from_int(0))
self.assertEqual(Note('C', 1), Note().from_int(12))
def test_measure(self):
self.assertTrue(Note('C').measure(Note('D')) == 2)
self.assertTrue(Note('D').measure(Note('C')) == -2)
def test_to_shorthand(self):
self.assertTrue(Note('C-0').to_shorthand() == 'C,,')
self.assertTrue(Note('C-2').to_shorthand() == 'C')
self.assertTrue(Note('C-3').to_shorthand() == 'c')
self.assertTrue(Note('C-4').to_shorthand() == "c'")
self.assertTrue(Note('C-9').to_shorthand() == "c''''''")
def test_from_shorthand(self):
self.assertTrue(Note().from_shorthand('C,,') == Note('C-0'))
self.assertTrue(Note().from_shorthand('C') == Note('C-2'))
self.assertTrue(Note().from_shorthand('c') == Note('C-3'))
self.assertTrue(Note().from_shorthand("c'") == Note('C-4'))
self.assertTrue(Note().from_shorthand("c''''''") == Note('C-9'))
def suite():
return unittest.TestLoader().loadTestsFromTestCase(test_Note)
|
anthonyt/mingus-counterpoint
|
unittest/test_Note.py
|
Python
|
gpl-3.0
| 4,038
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.6 on 2017-10-18 04:03
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('edu_warning', '0004_auto_20171017_2227'),
]
operations = [
migrations.AddField(
model_name='questionnaire',
name='status',
field=models.BooleanField(default=False),
),
]
|
ran777/edu_intell
|
edu_warning/migrations/0005_questionnaire_status.py
|
Python
|
gpl-3.0
| 463
|
# -*- coding:utf8 -*-
#!/usr/bin/python
# Python: 3.5.1
# Platform: Windows
# Author: Heyn (heyunhuan@gmail.com)
# Program: weChat
# History: 2016-09-24
import os
import re
import sys
import time
import json
import random
import urllib
import urllib.request
import urllib.parse
# pip install requests 2016-09-29
import requests
import xml.dom.minidom
import multiprocessing
import platform
def catchKeyboardInterrupt(fn):
def wrapper(*args):
try:
return fn(*args)
except KeyboardInterrupt:
print ('\n[*] Force the exit procedure')
return wrapper
class WebWeixin(object):
def __str__(self):
description = \
"=========================\n" + \
"[#] Web Weixin\n" + \
"[#] Debug Mode: " + str(self.DEBUG) + "\n" + \
"[#] Uuid: " + self.uuid + "\n" + \
"[#] Uin: " + str(self.uin) + "\n" + \
"[#] Sid: " + self.sid + "\n" + \
"[#] Skey: " + self.skey + "\n" + \
"[#] DeviceId: " + self.deviceId + "\n" + \
"[#] PassTicket: " + self.pass_ticket + "\n" + \
"========================="
return description
def __init__(self):
self.DEBUG = False
self.uuid = ''
self.base_uri = ''
self.redirect_uri = ''
self.uin = ''
self.sid = ''
self.skey = ''
self.pass_ticket = ''
self.deviceId = 'e' + repr(random.random())[2:17]
self.BaseRequest = {}
self.synckey = ''
self.SyncKey = []
self.User = []
self.MemberList = []
self.ContactList = [] # 好友
self.GroupList = [] # 群
self.GroupMemeberList = [] # 群友
self.PublicUsersList = [] # 公众号/服务号
self.SpecialUsersList = [] # 特殊账号
self.autoReplyMode = True
self.syncHost = ''
self.user_agent = 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_3) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/48.0.2564.109 Safari/537.36'
self.interactive = True
self.autoOpen = False
self.saveFolder = os.path.join(os.getcwd(), 'saved')
self.saveSubFolders = {'webwxgeticon': 'icons', 'webwxgetheadimg': 'headimgs', 'webwxgetmsgimg': 'msgimgs',
'webwxgetvideo': 'videos', 'webwxgetvoice': 'voices', '_showQRCodeImg': 'qrcodes'}
self.appid = 'wx782c26e4c19acffb'
self.lang = 'zh_CN'
self.lastCheckTs = time.time()
self.memberCount = 0
self.SpecialUsers = ['newsapp', 'fmessage', 'filehelper', 'weibo', 'qqmail', 'fmessage', 'tmessage', 'qmessage', 'qqsync', 'floatbottle', 'lbsapp', 'shakeapp', 'medianote', 'qqfriend', 'readerapp', 'blogapp', 'facebookapp', 'masssendapp', 'meishiapp', 'feedsapp',
'voip', 'blogappweixin', 'weixin', 'brandsessionholder', 'weixinreminder', 'wxid_novlwrv3lqwv11', 'gh_22b87fa7cb3c', 'officialaccounts', 'notification_messages', 'wxid_novlwrv3lqwv11', 'gh_22b87fa7cb3c', 'wxitil', 'userexperience_alarm', 'notification_messages']
self.TimeOut = 20 # 同步最短时间间隔(单位:秒)
self.media_count = -1
self.qrcodePath = ''
# self.cookie = cookielib.CookieJar()
# opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(self.cookie))
# opener.addheaders = [('User-agent', self.user_agent)]
# urllib2.install_opener(opener)
def loadConfig(self, config):
if config['DEBUG']:
self.DEBUG = config['DEBUG']
if config['autoReplyMode']:
self.autoReplyMode = config['autoReplyMode']
if config['user_agent']:
self.user_agent = config['user_agent']
if config['interactive']:
self.interactive = config['interactive']
if config['autoOpen']:
self.autoOpen = config['autoOpen']
def getUUID(self):
url = 'https://login.weixin.qq.com/jslogin'
params = {
'appid': self.appid,
'fun': 'new',
'lang': self.lang,
'_': int(time.time()),
}
request = urllib.request.Request(url=url, data=urllib.parse.urlencode(params).encode(encoding='UTF-8'))
response = urllib.request.urlopen(request)
data = response.read().decode('UTF-8')
regx = r'window.QRLogin.code = (\d+); window.QRLogin.uuid = "(\S+?)"'
pm = re.search(regx, data)
if pm:
code = pm.group(1)
self.uuid = pm.group(2)
return code == '200'
return False
def genQRCode(self):
if sys.platform.startswith('win'):
self._showQRCodeImg()
else:
self._str2qr('https://login.weixin.qq.com/l/' + self.uuid)
def _showQRCodeImg(self):
url = 'https://login.weixin.qq.com/qrcode/' + self.uuid
params = {
't': 'webwx',
'_': int(time.time())
}
request = urllib.request.Request(url=url, data=urllib.parse.urlencode(params).encode(encoding='UTF-8'))
response = urllib.request.urlopen(request)
data = response.read()
self.qrcodePath = self._saveFile('qrcode.jpg', data, '_showQRCodeImg')
os.startfile(self.qrcodePath)
def waitForLogin(self, tip=1):
time.sleep(tip)
url = 'https://login.weixin.qq.com/cgi-bin/mmwebwx-bin/login?tip=%s&uuid=%s&_=%s' % (
tip, self.uuid, int(time.time()))
data = self._get(url)
pm = re.search('window.code=(\d+)', data)
code = pm.group(1)
if code == '201':
return True
elif code == '200':
pm = re.search('window.redirect_uri="(\S+?)";', data)
r_uri = pm.group(1) + '&fun=new'
self.redirect_uri = r_uri
self.base_uri = r_uri[:r_uri.rfind('/')]
return True
elif code == '408':
self._echo('[Login timeout] \n')
else:
self._echo('[Landing abnormal] \n')
return False
def login(self):
data = self._get(self.redirect_uri)
doc = xml.dom.minidom.parseString(data)
root = doc.documentElement
for node in root.childNodes:
if node.nodeName == 'skey':
self.skey = node.childNodes[0].data
elif node.nodeName == 'wxsid':
self.sid = node.childNodes[0].data
elif node.nodeName == 'wxuin':
self.uin = node.childNodes[0].data
elif node.nodeName == 'pass_ticket':
self.pass_ticket = node.childNodes[0].data
if '' in (self.skey, self.sid, self.uin, self.pass_ticket):
return False
self.BaseRequest = {
'Uin': int(self.uin),
'Sid': self.sid,
'Skey': self.skey,
'DeviceID': self.deviceId,
}
return True
def webwxinit(self):
url = self.base_uri + '/webwxinit?pass_ticket=%s&skey=%s&r=%s' % (
self.pass_ticket, self.skey, int(time.time()))
params = {
'BaseRequest': self.BaseRequest
}
dic = self._post(url, params)
self.SyncKey = dic['SyncKey']
self.User = dic['User']
# synckey for synccheck
self.synckey = '|'.join([str(keyVal['Key']) + '_' + str(keyVal['Val']) for keyVal in self.SyncKey['List']])
return dic['BaseResponse']['Ret'] == 0
def webwxstatusnotify(self):
url = self.base_uri + \
'/webwxstatusnotify?lang=zh_CN&pass_ticket=%s' % (self.pass_ticket)
params = {
'BaseRequest': self.BaseRequest,
"Code": 3,
"FromUserName": self.User['UserName'],
"ToUserName": self.User['UserName'],
"ClientMsgId": int(time.time())
}
dic = self._post(url, params)
return dic['BaseResponse']['Ret'] == 0
def webwxgetcontact(self):
SpecialUsers = self.SpecialUsers
print (self.base_uri)
url = self.base_uri + '/webwxgetcontact?pass_ticket=%s&skey=%s&r=%s' % (
self.pass_ticket, self.skey, int(time.time()))
params = {
'BaseRequest': self.BaseRequest
}
dic = self._post(url, params)
self.MemberCount = dic['MemberCount']
self.MemberList = dic['MemberList']
ContactList = self.MemberList[:]
GroupList = self.GroupList[:]
PublicUsersList = self.PublicUsersList[:]
SpecialUsersList = self.SpecialUsersList[:]
for i in range(len(ContactList) - 1, -1, -1):
Contact = ContactList[i]
if Contact['VerifyFlag'] & 8 != 0: # 公众号/服务号
ContactList.remove(Contact)
self.PublicUsersList.append(Contact)
elif Contact['UserName'] in SpecialUsers: # 特殊账号
ContactList.remove(Contact)
self.SpecialUsersList.append(Contact)
elif Contact['UserName'].find('@@') != -1: # 群聊
ContactList.remove(Contact)
self.GroupList.append(Contact)
elif Contact['UserName'] == self.User['UserName']: # 自己
ContactList.remove(Contact)
self.ContactList = ContactList
return True
def webwxbatchgetcontact(self):
url = self.base_uri + \
'/webwxbatchgetcontact?type=ex&r=%s&pass_ticket=%s' % (
int(time.time()), self.pass_ticket)
params = {
'BaseRequest': self.BaseRequest,
"Count": len(self.GroupList),
"List": [{"UserName": g['UserName'], "EncryChatRoomId":""} for g in self.GroupList]
}
dic = self._post(url, params)
ContactList = dic['ContactList']
ContactCount = dic['Count']
self.GroupList = ContactList
for i in range(len(ContactList) - 1, -1, -1):
Contact = ContactList[i]
MemberList = Contact['MemberList']
for member in MemberList:
self.GroupMemeberList.append(member)
return True
def getNameById(self, id):
url = self.base_uri + \
'/webwxbatchgetcontact?type=ex&r=%s&pass_ticket=%s' % (
int(time.time()), self.pass_ticket)
params = {
'BaseRequest': self.BaseRequest,
"Count": 1,
"List": [{"UserName": id, "EncryChatRoomId": ""}]
}
dic = self._post(url, params)
# blabla ...
return dic['ContactList']
def testsynccheck(self):
SyncHost = [
'webpush.weixin.qq.com',
'webpush2.weixin.qq.com',
'webpush.wechat.com',
'webpush1.wechat.com',
'webpush2.wechat.com',
'webpush1.wechatapp.com',
# 'webpush.wechatapp.com'
]
for host in SyncHost:
self.syncHost = host
[retcode, selector] = self.synccheck()
if retcode == '0':
return True
return False
def synccheck(self):
params = {
'r': int(time.time()),
'sid': self.sid,
'uin': self.uin,
'skey': self.skey,
'deviceid': self.deviceId,
'synckey': self.synckey,
'_': int(time.time()),
}
url = 'https://' + self.syncHost + \
'/cgi-bin/mmwebwx-bin/synccheck?' + urllib.parse.urlencode(params)
data = self._get(url)
pm = re.search(r'window.synccheck={retcode:"(\d+)",selector:"(\d+)"}', data)
retcode = pm.group(1)
selector = pm.group(2)
return [retcode, selector]
def webwxsync(self):
url = self.base_uri + \
'/webwxsync?sid=%s&skey=%s&pass_ticket=%s' % (
self.sid, self.skey, self.pass_ticket)
params = {
'BaseRequest': self.BaseRequest,
'SyncKey': self.SyncKey,
'rr': ~int(time.time())
}
dic = self._post(url, params)
if self.DEBUG:
print (json.dumps(dic, indent=4))
if dic['BaseResponse']['Ret'] == 0:
self.SyncKey = dic['SyncKey']
self.synckey = '|'.join(
[str(keyVal['Key']) + '_' + str(keyVal['Val']) for keyVal in self.SyncKey['List']])
return dic
def webwxsendmsg(self, word, to='filehelper'):
url = self.base_uri + \
'/webwxsendmsg?pass_ticket=%s' % (self.pass_ticket)
clientMsgId = str(int(time.time() * 1000)) + \
str(random.random())[:5].replace('.', '')
params = {
'BaseRequest': self.BaseRequest,
'Msg': {
"Type": 1,
"Content": word,
"FromUserName": self.User['UserName'],
"ToUserName": to,
"LocalID": clientMsgId,
"ClientMsgId": clientMsgId
}
}
headers = {'content-type': 'application/json; charset=UTF-8'}
data = json.dumps(params, ensure_ascii=False).encode('utf8')
r = requests.post(url, data=data, headers=headers)
dic = r.json()
return dic['BaseResponse']['Ret'] == 0
def webwxuploadmedia(self, image_name):
url = 'https://file2.wx.qq.com/cgi-bin/mmwebwx-bin/webwxuploadmedia?f=json'
# 计数器
self.media_count = self.media_count + 1
# 文件名
file_name = image_name
# MIME格式
# mime_type = application/pdf, image/jpeg, image/png, etc.
mime_type = mimetypes.guess_type(image_name, strict=False)[0]
# 微信识别的文档格式,微信服务器应该只支持两种类型的格式。pic和doc
# pic格式,直接显示。doc格式则显示为文件。
media_type = 'pic' if mime_type.split('/')[0] == 'image' else 'doc'
# 上一次修改日期
lastModifieDate = 'Thu Mar 17 2016 00:55:10 GMT+0800 (CST)'
# 文件大小
file_size = os.path.getsize(file_name)
# PassTicket
pass_ticket = self.pass_ticket
# clientMediaId
client_media_id = str(int(time.time() * 1000)) + \
str(random.random())[:5].replace('.', '')
# webwx_data_ticket
webwx_data_ticket = ''
for item in self.cookie:
if item.name == 'webwx_data_ticket':
webwx_data_ticket = item.value
break
if (webwx_data_ticket == ''):
return "None Fuck Cookie"
uploadmediarequest = json.dumps({
"BaseRequest": self.BaseRequest,
"ClientMediaId": client_media_id,
"TotalLen": file_size,
"StartPos": 0,
"DataLen": file_size,
"MediaType": 4
}, ensure_ascii=False).encode('utf8')
multipart_encoder = MultipartEncoder(
fields={
'id': 'WU_FILE_' + str(self.media_count),
'name': file_name,
'type': mime_type,
'lastModifieDate': lastModifieDate,
'size': str(file_size),
'mediatype': media_type,
'uploadmediarequest': uploadmediarequest,
'webwx_data_ticket': webwx_data_ticket,
'pass_ticket': pass_ticket,
'filename': (file_name, open(file_name, 'rb'), mime_type.split('/')[1])
},
boundary='-----------------------------1575017231431605357584454111'
)
headers = {
'Host': 'file2.wx.qq.com',
'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10.10; rv:42.0) Gecko/20100101 Firefox/42.0',
'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8',
'Accept-Language': 'en-US,en;q=0.5',
'Accept-Encoding': 'gzip, deflate',
'Referer': 'https://wx2.qq.com/',
'Content-Type': multipart_encoder.content_type,
'Origin': 'https://wx2.qq.com',
'Connection': 'keep-alive',
'Pragma': 'no-cache',
'Cache-Control': 'no-cache'
}
r = requests.post(url, data=multipart_encoder, headers=headers)
response_json = r.json()
if response_json['BaseResponse']['Ret'] == 0:
return response_json
return None
def webwxsendmsgimg(self, user_id, media_id):
url = 'https://wx2.qq.com/cgi-bin/mmwebwx-bin/webwxsendmsgimg?fun=async&f=json&pass_ticket=%s' % self.pass_ticket
clientMsgId = str(int(time.time() * 1000)) + \
str(random.random())[:5].replace('.', '')
data_json = {
"BaseRequest": self.BaseRequest,
"Msg": {
"Type": 3,
"MediaId": media_id,
"FromUserName": self.User['UserName'],
"ToUserName": user_id,
"LocalID": clientMsgId,
"ClientMsgId": clientMsgId
}
}
headers = {'content-type': 'application/json; charset=UTF-8'}
data = json.dumps(data_json, ensure_ascii=False).encode('utf8')
r = requests.post(url, data=data, headers=headers)
dic = r.json()
return dic['BaseResponse']['Ret'] == 0
def webwxsendmsgemotion(self, user_id, media_id):
url = 'https://wx2.qq.com/cgi-bin/mmwebwx-bin/webwxsendemoticon?fun=sys&f=json&pass_ticket=%s' % self.pass_ticket
clientMsgId = str(int(time.time() * 1000)) + \
str(random.random())[:5].replace('.', '')
data_json = {
"BaseRequest": self.BaseRequest,
"Msg": {
"Type": 47,
"EmojiFlag": 2,
"MediaId": media_id,
"FromUserName": self.User['UserName'],
"ToUserName": user_id,
"LocalID": clientMsgId,
"ClientMsgId": clientMsgId
}
}
headers = {'content-type': 'application/json; charset=UTF-8'}
data = json.dumps(data_json, ensure_ascii=False).encode('utf8')
r = requests.post(url, data=data, headers=headers)
dic = r.json()
if self.DEBUG:
print (json.dumps(dic, indent=4))
return dic['BaseResponse']['Ret'] == 0
def _saveFile(self, filename, data, api=None):
fn = filename
if self.saveSubFolders[api]:
dirName = os.path.join(self.saveFolder, self.saveSubFolders[api])
if not os.path.exists(dirName):
os.makedirs(dirName)
fn = os.path.join(dirName, filename)
with open(fn, 'wb') as f:
f.write(data)
f.close()
return fn
def webwxgeticon(self, id):
url = self.base_uri + \
'/webwxgeticon?username=%s&skey=%s' % (id, self.skey)
data = self._get(url)
fn = 'img_' + id + '.jpg'
return self._saveFile(fn, data.encode(), 'webwxgeticon')
def webwxgetheadimg(self, id):
url = self.base_uri + \
'/webwxgetheadimg?username=%s&skey=%s' % (id, self.skey)
data = self._get(url)
fn = 'img_' + id + '.jpg'
return self._saveFile(fn, data.encode(), 'webwxgetheadimg')
def webwxgetmsgimg(self, msgid):
url = self.base_uri + \
'/webwxgetmsgimg?MsgID=%s&skey=%s' % (msgid, self.skey)
data = self._get(url)
fn = 'img_' + msgid + '.jpg'
return self._saveFile(fn, data.encode(), 'webwxgetmsgimg')
# Not work now for weixin haven't support this API
def webwxgetvideo(self, msgid):
url = self.base_uri + \
'/webwxgetvideo?msgid=%s&skey=%s' % (msgid, self.skey)
data = self._get(url, api='webwxgetvideo')
fn = 'video_' + msgid + '.mp4'
return self._saveFile(fn, data.encode(), 'webwxgetvideo')
def webwxgetvoice(self, msgid):
url = self.base_uri + \
'/webwxgetvoice?msgid=%s&skey=%s' % (msgid, self.skey)
data = self._get(url)
fn = 'voice_' + msgid + '.mp3'
return self._saveFile(fn, data.encode(), 'webwxgetvoice')
def getGroupName(self, id):
name = 'Unknown group'
for member in self.GroupList:
if member['UserName'] == id:
name = member['NickName']
if name == 'Unknown group':
# 现有群里面查不到
GroupList = self.getNameById(id)
for group in GroupList:
self.GroupList.append(group)
if group['UserName'] == id:
name = group['NickName']
MemberList = group['MemberList']
for member in MemberList:
self.GroupMemeberList.append(member)
return name
def getUserRemarkName(self, id):
name = 'Unknown group' if id[:2] == '@@' else 'Unknown person'
if id == self.User['UserName']:
return self.User['NickName'] # 自己
if id[:2] == '@@':
# 群
name = self.getGroupName(id)
else:
# 特殊账号
for member in self.SpecialUsersList:
if member['UserName'] == id:
name = member['RemarkName'] if member[
'RemarkName'] else member['NickName']
# 公众号或服务号
for member in self.PublicUsersList:
if member['UserName'] == id:
name = member['RemarkName'] if member[
'RemarkName'] else member['NickName']
# 直接联系人
for member in self.ContactList:
if member['UserName'] == id:
name = member['RemarkName'] if member[
'RemarkName'] else member['NickName']
# 群友
for member in self.GroupMemeberList:
if member['UserName'] == id:
name = member['DisplayName'] if member[
'DisplayName'] else member['NickName']
if name == 'Unknown group' or name == 'Unknown person':
print (id)
return name
def getUSerID(self, name):
for member in self.MemberList:
if name == member['RemarkName'] or name == member['NickName']:
return member['UserName']
return None
def _showMsg(self, message):
srcName = None
dstName = None
groupName = None
content = None
msg = message
if msg['raw_msg']:
srcName = self.getUserRemarkName(msg['raw_msg']['FromUserName'])
dstName = self.getUserRemarkName(msg['raw_msg']['ToUserName'])
content = msg['raw_msg']['Content'].replace(
'<', '<').replace('>', '>')
message_id = msg['raw_msg']['MsgId']
if content.find('http://weixin.qq.com/cgi-bin/redirectforward?args=') != -1:
# 地理位置消息
data = self._get(content).decode('gbk').encode('utf-8')
pos = self._searchContent('title', data, 'xml')
tree = html.fromstring(self._get(content))
url = tree.xpath('//html/body/div/img')[0].attrib['src']
for item in urlparse(url).query.split('&'):
if item.split('=')[0] == 'center':
loc = item.split('=')[-1:]
content = '%s Sends a location message - I am in [%s](%s) @ %s]' % (
srcName, pos, url, loc)
if msg['raw_msg']['ToUserName'] == 'filehelper':
# 文件传输助手
dstName = 'File Transfer Assistant'
if msg['raw_msg']['FromUserName'][:2] == '@@':
# 接收到来自群的消息
if re.search(":<br/>", content, re.IGNORECASE):
[people, content] = content.split(':<br/>')
groupName = srcName
srcName = self.getUserRemarkName(people)
dstName = 'GROUP'
else:
groupName = srcName
srcName = 'SYSTEM'
elif msg['raw_msg']['ToUserName'][:2] == '@@':
# 自己发给群的消息
groupName = dstName
dstName = 'GROUP'
# 收到了红包
if content == 'Received a red envelope, please view on the phone':
msg['message'] = content
# 指定了消息内容
if 'message' in msg.keys():
content = msg['message']
if groupName != None:
print ('%s |%s| %s -> %s: %s' % (message_id, groupName.strip(), srcName.strip(), dstName.strip(), content.replace('<br/>', '\n')))
else:
print ('%s %s -> %s: %s' % (message_id, srcName.strip(), dstName.strip(), content.replace('<br/>', '\n')))
def handleMsg(self, r):
for msg in r['AddMsgList']:
print ('[*] You have new news, please pay attention to check.')
if self.DEBUG:
fn = 'msg' + str(int(random.random() * 1000)) + '.json'
with open(fn, 'w') as f:
f.write(json.dumps(msg))
print ('[*] The message has been saved to a file: ' + fn)
msgType = msg['MsgType']
name = self.getUserRemarkName(msg['FromUserName'])
content = msg['Content'].replace('<', '<').replace('>', '>')
msgid = msg['MsgId']
if msgType == 1:
raw_msg = {'raw_msg': msg}
self._showMsg(raw_msg)
if self.autoReplyMode:
# TODO 2016-09-29
# ans = self._xiaodoubi(content)
ans = '[疑问]'
if self.webwxsendmsg(ans, msg['FromUserName']):
print ('Automatic response: ' + ans)
else:
print ('Automatic reply failed')
elif msgType == 3:
image = self.webwxgetmsgimg(msgid)
raw_msg = {'raw_msg': msg,
'message': '%s sent a picture: %s' % (name, image)}
self._showMsg(raw_msg)
self._safe_open(image)
elif msgType == 34:
voice = self.webwxgetvoice(msgid)
raw_msg = {'raw_msg': msg,
'message': '%s sent a voice: %s' % (name, voice)}
self._showMsg(raw_msg)
self._safe_open(voice)
elif msgType == 42:
info = msg['RecommendInfo']
print ('%s sending a business card:' % name)
print ('=========================')
print ('= NickName: %s' % info['NickName'])
print ('= Alias: %s' % info['Alias'])
print ('= Area: %s %s' % (info['Province'], info['City']))
print ('= Gender: %s' % ['Unknown', 'male', 'Female'][info['Sex']])
print ('=========================')
raw_msg = {'raw_msg': msg, 'message': '%s sending a business card: %s' % (
name.strip(), json.dumps(info))}
self._showMsg(raw_msg)
elif msgType == 47:
url = self._searchContent('cdnurl', content)
raw_msg = {'raw_msg': msg,
'message': '%s Made an animated expression, click on the link below to view: %s' % (name, url)}
self._showMsg(raw_msg)
self._safe_open(url)
elif msgType == 49:
appMsgType = defaultdict(lambda: "")
appMsgType.update({5: 'Link', 3: 'Music', 7: 'Blog'})
print ('%s shared a %s:' % (name, appMsgType[msg['AppMsgType']]))
print ('=========================')
print ('= Title: %s' % msg['FileName'])
print ('= Desc: %s' % self._searchContent('des', content, 'xml'))
print ('= Link: %s' % msg['Url'])
print ('= From: %s' % self._searchContent('appname', content, 'xml'))
print ('=========================')
card = {
'title': msg['FileName'],
'description': self._searchContent('des', content, 'xml'),
'url': msg['Url'],
'appname': self._searchContent('appname', content, 'xml')
}
raw_msg = {'raw_msg': msg, 'message': '%s shared a %s: %s' % (
name, appMsgType[msg['AppMsgType']], json.dumps(card))}
self._showMsg(raw_msg)
elif msgType == 51:
raw_msg = {'raw_msg': msg, 'message': '[*] Successful access to contact information'}
self._showMsg(raw_msg)
elif msgType == 62:
video = self.webwxgetvideo(msgid)
raw_msg = {'raw_msg': msg,
'message': '%s send a micro-video: %s' % (name, video)}
self._showMsg(raw_msg)
self._safe_open(video)
elif msgType == 10002:
raw_msg = {'raw_msg': msg, 'message': '%s withdrew a message' % name}
self._showMsg(raw_msg)
else:
print ('[*] The message type is: %d,May be emoticons, pictures, links or red envelopes: %s' %
(msg['MsgType'], json.dumps(msg)))
raw_msg = {
'raw_msg': msg, 'message': '[*] The message type is: %d,May be emoticons, pictures, links or red envelopes' % msg['MsgType']}
self._showMsg(raw_msg)
def listenMsgMode(self):
print ('[*] The message listener mode is entered ... Success')
self._run('[*] Perform a synchronization line test ... ', self.testsynccheck)
playWeChat = 0
redEnvelope = 0
while True:
self.lastCheckTs = time.time()
[retcode, selector] = self.synccheck()
if self.DEBUG:
print ('retcode: %s, selector: %s' % (retcode, selector))
if retcode == '1100':
print ('[*] You login a WeChat on the phone, Bye')
break
if retcode == '1101':
print ('[*] In other places you log on the WEB version of the WeChat, Bye')
break
elif retcode == '0':
if selector == '2':
r = self.webwxsync()
if r is not None:
self.handleMsg(r)
elif selector == '6':
# TODO
redEnvelope += 1
print ('[*] Received a suspected red envelope message %d ' % redEnvelope)
elif selector == '7':
playWeChat += 1
print ('[*] You play on the phone WeChat I found %d ' % playWeChat)
r = self.webwxsync()
elif selector == '0':
time.sleep(1)
if (time.time() - self.lastCheckTs) <= 20:
time.sleep(time.time() - self.lastCheckTs)
def sendMsg(self, name, word, isfile=False):
id = self.getUSerID(name)
if id:
if isfile:
with open(word, 'r') as f:
for line in f.readlines():
line = line.replace('\n', '')
self._echo('-> ' + name + ': ' + line)
if self.webwxsendmsg(line, id):
print (' [Success]')
else:
print (' [Failure]')
time.sleep(1)
else:
if self.webwxsendmsg(word, id):
print ('[*] The message was sent successfully')
else:
print ('[*] Message delivery failed')
else:
print ('[*] This user does not exist')
def sendMsgToAll(self, word):
for contact in self.ContactList:
name = contact['RemarkName'] if contact[
'RemarkName'] else contact['NickName']
id = contact['UserName']
self._echo('-> ' + name + ': ' + word)
print (' [Success]')
# if self.webwxsendmsg(word, id):
# print (' [Success]')
# else:
# print (' [Failure]')
time.sleep(1)
def sendImg(self, name, file_name):
response = self.webwxuploadmedia(file_name)
media_id = ""
if response is not None:
media_id = response['MediaId']
user_id = self.getUSerID(name)
response = self.webwxsendmsgimg(user_id, media_id)
def sendEmotion(self, name, file_name):
response = self.webwxuploadmedia(file_name)
media_id = ""
if response is not None:
media_id = response['MediaId']
user_id = self.getUSerID(name)
response = self.webwxsendmsgemotion(user_id, media_id)
@catchKeyboardInterrupt
def start(self):
self._echo('[*] Web WeChat ... Starting')
while True:
self._run('[*] Getting UUID ... ', self.getUUID)
self._echo('[*] Getting QR Code ... Success')
self.genQRCode()
print ('[*] Please use WeChat to scan QR code to login ... ')
if not self.waitForLogin():
continue
print ('[*] Please click on the phone to confirm ... ')
if not self.waitForLogin(0):
continue
break
os.remove(self.qrcodePath)
self._run('[*] Logging in ... ', self.login)
self._run('[*] Wechat initialization ... ', self.webwxinit)
self._run('[*] Start status notify ... ', self.webwxstatusnotify)
self._run('[*] Get contact ... ', self.webwxgetcontact)
self._echo('[*] Due %s Contact, Read to Contacts %d' % (self.MemberCount, len(self.MemberList)))
self._echo('[*] Total %d Group | %d Contact | %d SpecialUsers | %d PublicUsers' % (len(self.GroupList),len(self.ContactList), len(self.SpecialUsersList), len(self.PublicUsersList)))
self._run('[*] Get a group ... ', self.webwxbatchgetcontact)
if self.DEBUG:
print (self)
if self.interactive and input('[*] Whether to enable auto reply mode(y/n): ') == 'y':
self.autoReplyMode = True
print ('[*] Automatic reply mode ... Open')
else:
print ('[*] Automatic reply mode ... Close')
# 2016-09-29 Windows下,要在命令行才能执行listenProcess.start,用IDE是不行的.
listenProcess = multiprocessing.Process(target = self.listenMsgMode)
listenProcess.start()
while True:
text = input('')
try :
if text == 'quit':
listenProcess.terminate()
print('[*] Exit WeChat')
sys.exit(0)
elif text[:2] == '->':
[name, word] = text[2:].split(':')
if name == 'all':
self.sendMsgToAll(word)
else:
self.sendMsg(name, word)
elif text[:3] == 'm->':
[name, file] = text[3:].split(':')
self.sendMsg(name, file, True)
elif text[:3] == 'f->':
print ('Send file')
elif text[:3] == 'i->':
print ('Send image')
[name, file_name] = text[3:].split(':')
self.sendImg(name, file_name)
elif text[:3] == 'e->':
print ('Send emoji')
[name, file_name] = text[3:].split(':')
self.sendEmotion(name, file_name)
except Exception as e:
print ('Input error')
def _safe_open(self, path):
if self.autoOpen:
if platform.system() == "Linux":
os.system("xdg-open %s &" % path)
else:
os.system('open %s &' % path)
def _run(self, str, func, *args):
print (str, end='')
if func(*args):
print('Success')
else:
print('Failure\n[*] Exit the program')
exit()
def _echo(self, str):
# sys.stdout.write(str)
# sys.stdout.flush()
print (str)
def _printQR(self, mat):
for i in mat:
BLACK = '\033[40m \033[0m'
WHITE = '\033[47m \033[0m'
print (''.join([BLACK if j else WHITE for j in i]))
def _str2qr(self, str):
qr = qrcode.QRCode()
qr.border = 1
qr.add_data(str)
mat = qr.get_matrix()
self._printQR(mat) # qr.print_tty() or qr.print_ascii()
def _transcoding(self, data):
if not data:
return data
result = None
if type(data) == unicode:
result = data
elif type(data) == str:
result = data.decode('utf-8')
return result
def _get(self, url, api=None):
request = urllib.request.Request(url=url)
request.add_header('Referer', 'https://wx.qq.com/')
if api == 'webwxgetvoice':
request.add_header('Range', 'bytes=0-')
if api == 'webwxgetvideo':
request.add_header('Range', 'bytes=0-')
response = urllib.request.urlopen(request)
data = response.read().decode()
return data
def _post(self, url, params, jsonfmt=True):
if jsonfmt:
request = urllib.request.Request(url=url, data=json.dumps(params).encode(encoding='UTF-8'))
request.add_header(
'ContentType', 'application/json; charset=UTF-8')
else:
request = urllib.request.Request(url=url, data=urllib.parse.urlencode(params).encode(encoding='UTF-8'))
response = urllib.request.urlopen(request)
data = response.read().decode()
if jsonfmt:
return json.loads(data) #json.loads(data, object_hook=_decode_dict)
return data
def _xiaodoubi(self, word):
url = 'http://emoji.qpic.cn/wx_emoji/QsDXJwBwXEXGFwLHJCm6haBlFls30vzPQcZTib9dyhLiazKxoPX6kp0A/'
try:
r = requests.post(url, data={'chat': word}.encode(encoding='UTF-8'))
return r.content
except:
return ("Let me be alone T_T...")
def _simsimi(self, word):
key = ''
url = 'http://sandbox.api.simsimi.com/request.p?key=%s&lc=ch&ft=0.0&text=%s' % (
key, word)
r = requests.get(url)
ans = r.json()
if ans['result'] == '100':
return ans['response']
else:
return ('What you are saying')
def _searchContent(self, key, content, fmat='attr'):
if fmat == 'attr':
pm = re.search(key + '\s?=\s?"([^"<]+)"', content)
if pm:
return pm.group(1)
elif fmat == 'xml':
pm = re.search('<{0}>([^<]+)</{0}>'.format(key), content)
if not pm:
pm = re.search(
'<{0}><\!\[CDATA\[(.*?)\]\]></{0}>'.format(key), content)
if pm:
return pm.group(1)
return ('Unknown')
if __name__ == '__main__':
webwx = WebWeixin()
webwx.start()
|
Heyn2016/Python
|
Python/weChat.py
|
Python
|
gpl-3.0
| 39,533
|
from . import application
|
TheFadeliskOrganization/fadelisk
|
lib/python/fadelisk/__init__.py
|
Python
|
gpl-3.0
| 27
|
from __future__ import print_function
from runtests.mpi import MPITest
from numpy.testing import assert_allclose
from numpy.testing import dec
import fastpm
try:
import nbodykit
nbodykit.setup_logging('debug')
except ImportError:
nbodykit = None
@MPITest([1, 4])
@dec.skipif(nbodykit is None, "nbodykit test doesn't work on travis; is not installed")
def test_nbkit(comm):
from fastpm.nbkit import FastPMCatalogSource
from nbodykit.lab import cosmology, FOF, LinearMesh
cosmo = cosmology.Planck15
power = cosmology.LinearPower(cosmo, 0)
linear = LinearMesh(power, 256., 64, seed=400, comm=comm)
sim = FastPMCatalogSource(linear, boost=2, Nsteps=5, cosmo=cosmo)
fof = FOF(sim, 0.2, 8)
sim['Labels'] = fof.labels
sim.save('nbkit-%d' % comm.size, ['Position', 'InitialPosition', 'Displacement', 'Labels'])
features = fof.find_features()
features.save('nbkit-fof-%d' % comm.size, ['CMPosition', 'Length'])
#print(features._size, features._csize)
assert_allclose(features.csize, 500, rtol=0.1)
|
rainwoodman/fastpm-python
|
fastpm/tests/test_nbkit.py
|
Python
|
gpl-3.0
| 1,058
|
from distutils.command.clean import clean
import logging
import os
from crispy_forms.bootstrap import Alert, InlineRadios
from crispy_forms.layout import (HTML, Button, Field, Fieldset,
Layout, Row, Div)
from django import forms
from django.conf import settings
from django.contrib.contenttypes.models import ContentType
from django.core.exceptions import ObjectDoesNotExist, ValidationError
from django.core.files.base import File
from django.db import models, transaction
from django.db.models import Max, Q, F
from django.forms import ModelChoiceField, ModelForm, widgets
from django.forms.forms import Form
from django.forms.models import ModelMultipleChoiceField
from django.forms.widgets import CheckboxSelectMultiple, HiddenInput, Select
from django.urls.base import reverse
from django.utils import timezone
from django.utils.encoding import force_text
from django.utils.html import format_html
from django.utils.safestring import mark_safe
from django.utils.translation import ugettext_lazy as _
import django_filters
from cmj.mixins import GoogleRecapthaMixin
from cmj.utils import CHOICE_SIGNEDS
import sapl
from sapl.base.models import AppConfig, Autor, TipoAutor
from sapl.comissoes.models import Comissao, Participacao, Composicao
from sapl.compilacao.models import (STATUS_TA_IMMUTABLE_PUBLIC,
STATUS_TA_PRIVATE)
from sapl.crispy_layout_mixin import (SaplFormLayout, form_actions, to_column,
to_row)
from sapl.crispy_layout_mixin import SaplFormHelper
from sapl.materia.models import (AssuntoMateria, Autoria, MateriaAssunto,
MateriaLegislativa, Orgao, RegimeTramitacao,
TipoDocumento, TipoProposicao, StatusTramitacao,
UnidadeTramitacao)
from sapl.norma.models import (LegislacaoCitada, NormaJuridica,
TipoNormaJuridica)
from sapl.parlamentares.models import Legislatura, Partido, Parlamentar
from sapl.protocoloadm.models import Protocolo, DocumentoAdministrativo, Anexado
from sapl.settings import MAX_DOC_UPLOAD_SIZE
from sapl.utils import (YES_NO_CHOICES, SEPARADOR_HASH_PROPOSICAO,
ChoiceWithoutValidationField,
MateriaPesquisaOrderingFilter, RangeWidgetOverride,
autor_label, autor_modal, gerar_hash_arquivo,
models_with_gr_for_model, qs_override_django_filter,
choice_anos_com_materias, FilterOverridesMetaMixin, FileFieldCheckMixin,
lista_anexados)
from .models import (AcompanhamentoMateria, Anexada, Autoria, DespachoInicial,
DocumentoAcessorio, Numeracao, Proposicao, Relatoria,
TipoMateriaLegislativa, Tramitacao, UnidadeTramitacao)
def CHOICE_TRAMITACAO():
return [('', 'Ambos'),
(1, 'Sim'),
(0, 'Não')]
def CHOICE_TIPO_LISTAGEM():
return [
(1, _('Detalhada')),
(2, _('Simplificada')),
]
class AdicionarVariasAutoriasFilterSet(django_filters.FilterSet):
class Meta:
model = Autor
fields = ['nome']
def __init__(self, *args, **kwargs):
super(AdicionarVariasAutoriasFilterSet, self).__init__(*args, **kwargs)
row1 = to_row([('nome', 12)])
self.form.helper = SaplFormHelper()
self.form.helper.form_method = 'GET'
self.form.helper.layout = Layout(
Fieldset(_('Filtrar Autores'),
row1, form_actions(label='Filtrar'))
)
class OrgaoForm(ModelForm):
class Meta:
model = Orgao
fields = ['nome', 'sigla', 'unidade_deliberativa',
'endereco', 'telefone']
@transaction.atomic
def save(self, commit=True):
orgao = super(OrgaoForm, self).save(commit)
content_type = ContentType.objects.get_for_model(Orgao)
object_id = orgao.pk
tipo = TipoAutor.objects.get(content_type=content_type)
nome = orgao.nome + ' - ' + orgao.sigla
Autor.objects.create(
content_type=content_type,
object_id=object_id,
tipo=tipo,
nome=nome
)
return orgao
class ReceberProposicaoForm(Form):
cod_hash = forms.CharField(label='Código do Documento', required=True)
def __init__(self, *args, **kwargs):
row1 = to_row([('cod_hash', 12)])
self.helper = SaplFormHelper()
self.helper.layout = Layout(
Fieldset(
_('Incorporar Proposição'), row1,
form_actions(label='Buscar Proposição')
)
)
super(ReceberProposicaoForm, self).__init__(*args, **kwargs)
class MateriaSimplificadaForm(FileFieldCheckMixin, ModelForm):
logger = logging.getLogger(__name__)
class Meta:
model = MateriaLegislativa
fields = ['tipo', 'numero', 'ano', 'data_apresentacao',
'numero_protocolo', 'regime_tramitacao',
'em_tramitacao', 'ementa', 'tipo_apresentacao',
'texto_original']
widgets = {
'numero_protocolo': forms.TextInput(attrs={'readonly': True}),
}
def __init__(self, *args, **kwargs):
row1 = to_row([('tipo', 6), ('numero', 3), ('ano', 3)])
row2 = to_row([('data_apresentacao', 6), ('numero_protocolo', 6)])
row3 = to_row([('regime_tramitacao', 6),
('em_tramitacao', 3), ('tipo_apresentacao', 3)])
row4 = to_row([('ementa', 12)])
row5 = to_row([('texto_original', 12)])
self.helper = SaplFormHelper()
self.helper.layout = Layout(
Fieldset(
_('Formulário Simplificado'),
row1, row2, row3, row4, row5,
form_actions(label='Salvar')
)
)
super(MateriaSimplificadaForm, self).__init__(*args, **kwargs)
class MateriaLegislativaForm(FileFieldCheckMixin, ModelForm):
logger = logging.getLogger(__name__)
tipo_autor = ModelChoiceField(label=_('Tipo Autor'),
required=False,
queryset=TipoAutor.objects.all(),
empty_label=_('------'),)
autor = forms.ModelChoiceField(required=False,
empty_label='------',
queryset=Autor.objects.all()
)
class Meta:
model = MateriaLegislativa
exclude = ['texto_articulado',
'autores',
'proposicao',
'anexadas',
'data_ultima_atualizacao',
'_paginas',
'checkcheck',
'arquivado',
'metadata']
widgets = {
'user': forms.HiddenInput(),
'ip': forms.HiddenInput(),
}
def __init__(self, *args, **kwargs):
super(MateriaLegislativaForm, self).__init__(*args, **kwargs)
self.fields['ementa'].widget.attrs['maxlength'] = 1000
if self.instance and self.instance.pk:
self.fields['tipo_autor'] = forms.CharField(required=False,
widget=forms.HiddenInput())
self.fields['autor'] = forms.CharField(required=False,
widget=forms.HiddenInput())
p = Protocolo.objects.filter(
numero=kwargs['instance'].numero_protocolo, ano=kwargs['instance'].ano)
if kwargs['instance'].numero_protocolo and p.exists():
if not kwargs['initial']['user'].is_superuser:
self.fields['numero_protocolo'].widget.attrs['readonly'] = True
self.fields['numero_protocolo'].help_text = p[0].epigrafe
def clean(self):
super(MateriaLegislativaForm, self).clean()
if not self.is_valid():
return self.cleaned_data
cleaned_data = self.cleaned_data
data_apresentacao = cleaned_data['data_apresentacao']
ano = cleaned_data['ano']
protocolo = cleaned_data['numero_protocolo']
protocolo_antigo = self.instance.numero_protocolo
if protocolo:
pn = Protocolo.objects.filter(numero=protocolo, ano=ano)
if not pn.exists():
self.logger.error("Protocolo %s/%s não"
" existe." % (protocolo, ano))
raise ValidationError(_('Protocolo %s/%s não'
' existe' % (protocolo, ano)))
if not cleaned_data['user'].is_superuser:
if pn.first().conteudo_protocolado and \
pn.first().conteudo_protocolado != self.instance:
self.logger.error("Protocolo %s/%s ja possui"
" documento vinculado."
% (protocolo, ano))
raise ValidationError(_('Protocolo %s/%s ja possui'
' documento vinculado - %s.'
% (protocolo, ano, pn.first().conteudo_protocolado)))
if pn.first().tipo_conteudo_protocolado != cleaned_data['tipo']:
self.logger.error("Tipo do Protocolo ({}) deve ser o mesmo do Tipo Matéria ({})."
.format(cleaned_data['tipo'], pn.first().tipo_conteudo_protocolado))
raise ValidationError(
_('Tipo do Protocolo deve ser o mesmo do Tipo de Matéria'))
elif protocolo_antigo and not protocolo:
if not cleaned_data['user'].is_superuser:
self.logger.error(
"Usuário não possui permissão para desvincular protocolo via edição de matéria")
raise ValidationError(
_('Usuário não possui permissão para desvincular protocolo via edição de matéria'))
ano_origem_externa = cleaned_data['ano_origem_externa']
data_origem_externa = cleaned_data['data_origem_externa']
if ano_origem_externa and data_origem_externa and \
ano_origem_externa != data_origem_externa.year:
self.logger.error("O ano de origem externa da matéria ({}) é "
" diferente do ano na data de origem externa ({})."
.format(ano_origem_externa, data_origem_externa))
raise ValidationError(_("O ano de origem externa da matéria não "
"pode ser diferente do ano na data de "
"origem externa"))
texto_original = self.cleaned_data.get('texto_original', False)
if texto_original and texto_original.size > MAX_DOC_UPLOAD_SIZE:
raise ValidationError("O arquivo Texto Original deve ser menor que {0:.1f} mb, o tamanho atual desse arquivo é {1:.1f} mb"
.format((MAX_DOC_UPLOAD_SIZE / 1024) / 1024, (texto_original.size / 1024) / 1024))
return cleaned_data
def save(self, commit=False):
iold = None
if not self.instance.pk:
primeiro_autor = True
else:
primeiro_autor = False
iold = MateriaLegislativa.objects.get(pk=self.instance.pk)
ano = self.cleaned_data['ano']
protocolo = self.cleaned_data['numero_protocolo']
ano_antigo = iold.ano if iold else 0
protocolo_antigo = iold.numero_protocolo if iold else 0
materia = super(MateriaLegislativaForm, self).save(commit)
materia.save()
if protocolo:
pn = Protocolo.objects.filter(numero=protocolo, ano=ano).first()
pn.conteudo_protocolado = materia
pn.tipo_conteudo_protocolado = materia.tipo
pn.save()
if protocolo_antigo and protocolo != protocolo_antigo:
po = Protocolo.objects.filter(
numero=protocolo_antigo, ano=ano_antigo).first()
if po:
po.conteudo_protocolado = None
po.tipo_conteudo_protocolado = None
po.save()
if self.cleaned_data['autor']:
autoria = Autoria()
autoria.primeiro_autor = primeiro_autor
autoria.materia = materia
autoria.autor = self.cleaned_data['autor']
autoria.save()
return materia
class UnidadeTramitacaoForm(ModelForm):
logger = logging.getLogger(__name__)
class Meta:
model = UnidadeTramitacao
fields = ['comissao', 'orgao', 'parlamentar']
def clean(self):
super(UnidadeTramitacaoForm, self).clean()
if not self.is_valid():
return self.cleaned_data
cleaned_data = self.cleaned_data
for key in list(cleaned_data.keys()):
if cleaned_data[key] is None:
del cleaned_data[key]
if len(cleaned_data) != 1:
msg = _('Somente um campo deve ser preenchido!')
self.logger.error("Somente um campo deve ser preenchido!")
raise ValidationError(msg)
return cleaned_data
def save(self, commit=False):
unidade = super(UnidadeTramitacaoForm, self).save(commit)
cd = self.cleaned_data
if not cd.get('orgao'):
unidade.orgao = None
if not cd.get('parlamentar'):
unidade.parlamentar = None
if not cd.get('comissao'):
unidade.comissao = None
unidade.save()
return unidade
class AcompanhamentoMateriaForm(GoogleRecapthaMixin, ModelForm):
class Meta:
model = AcompanhamentoMateria
fields = ['email']
def __init__(self, *args, **kwargs):
kwargs['title_label'] = _('Acompanhamento de Matéria por e-mail')
kwargs['action_label'] = _('Cadastrar')
super().__init__(*args, **kwargs)
class DocumentoAcessorioForm(FileFieldCheckMixin, ModelForm):
data = forms.DateField(required=True)
numero_protocolo = forms.IntegerField(
required=False,
label=_('Número do Protocolo'))
ano_protocolo = forms.IntegerField(
required=False,
label=_('Ano do Protocolo'))
class Meta:
model = DocumentoAcessorio
fields = ['tipo', 'nome', 'data', 'autor',
'ementa', 'indexacao', 'arquivo', 'numero_protocolo',
'ano_protocolo']
def __init__(self, *args, **kwargs):
self.user = kwargs['initial'].pop('user', None)
super().__init__(*args, **kwargs)
if self.instance.pk:
if self.instance.protocolo_gr.exists():
self.fields['numero_protocolo'].initial = self.instance.protocolo_gr.first(
).numero
self.fields['ano_protocolo'].initial = self.instance.protocolo_gr.first(
).ano
if not self.user or not self.user.is_superuser:
self.fields['numero_protocolo'].widget.attrs['readonly'] = True
self.fields['ano_protocolo'].widget.attrs['readonly'] = True
def clean(self):
super(DocumentoAcessorioForm, self).clean()
if not self.is_valid():
return self.cleaned_data
cd = self.cleaned_data
arquivo = cd.get('arquivo', False)
if arquivo and arquivo.size > MAX_DOC_UPLOAD_SIZE:
raise ValidationError("O arquivo Texto Integral deve ser menor que {0:.1f} mb, o tamanho atual desse arquivo é {1:.1f} mb"
.format((MAX_DOC_UPLOAD_SIZE / 1024) / 1024, (arquivo.size / 1024) / 1024))
if not self.instance.pk or self.user.is_superuser:
if cd['numero_protocolo'] and cd['ano_protocolo']:
p_list = Protocolo.objects.filter(
numero=cd['numero_protocolo'],
ano=cd['ano_protocolo'])
if not p_list.exists():
raise ValidationError(_('Protocolo não encontrado!'))
return cd
def save(self, commit=True):
cd = self.cleaned_data
documento = super().save(commit)
p = Protocolo.objects.filter(
numero=cd['numero_protocolo'],
ano=cd['ano_protocolo']).first()
if p:
p.tipo_conteudo_protocolado = documento.tipo
p.conteudo_protocolado = documento
p.save()
return documento
class DocumentoAcessorioProtocoloForm(FileFieldCheckMixin, ModelForm):
logger = logging.getLogger(__name__)
data = forms.DateField(required=True)
materia = forms.ModelChoiceField(
label=MateriaLegislativa._meta.verbose_name,
required=False,
queryset=MateriaLegislativa.objects.all(),
empty_label='Selecione',
widget=forms.HiddenInput())
tipo_materia = forms.ModelChoiceField(
label=TipoMateriaLegislativa._meta.verbose_name,
required=False,
queryset=TipoMateriaLegislativa.objects.all(),
empty_label='Selecione')
numero_materia = forms.CharField(
label='Número', required=False)
ano_materia = forms.CharField(
label='Ano', required=False)
class Meta:
model = DocumentoAcessorio
fields = ['tipo',
'nome',
'data',
'autor',
'ementa',
'indexacao',
'arquivo',
'tipo_materia',
'numero_materia',
'ano_materia',
'materia'
]
def clean(self):
super(DocumentoAcessorioProtocoloForm, self).clean()
if not self.is_valid():
return self.cleaned_data
cleaned_data = self.cleaned_data
arquivo = self.cleaned_data.get('arquivo', False)
if arquivo and arquivo.size > MAX_DOC_UPLOAD_SIZE:
raise ValidationError("O arquivo Texto Integral deve ser menor que {0:.1f} mb, o tamanho atual desse arquivo é {1:.1f} mb"
.format((MAX_DOC_UPLOAD_SIZE / 1024) / 1024, (arquivo.size / 1024) / 1024))
tm, am, nm = (cleaned_data.get('tipo_materia', ''),
cleaned_data.get('ano_materia', ''),
cleaned_data.get('numero_materia', ''))
if tm and am and nm:
try:
self.logger.debug("Tentando obter objeto MateriaLegislativa (tipo_id={}, ano={}, numero={})."
.format(tm, am, nm))
materia_de_vinculo = MateriaLegislativa.objects.get(
tipo_id=tm,
ano=am,
numero=nm
)
except ObjectDoesNotExist:
self.logger.error("Objeto MateriaLegislativa vinculada (tipo_id={}, ano={}, numero={}) não existe!"
.format(tm, am, nm))
raise ValidationError(_('Matéria Vinculada não existe!'))
else:
self.logger.info("MateriaLegislativa vinculada (tipo_id={}, ano={}, numero={}) com sucesso."
.format(tm, am, nm))
cleaned_data['materia'] = materia_de_vinculo
return self.cleaned_data
def save(self, commit=True):
documento = super(DocumentoAcessorioProtocoloForm, self).save(commit)
protocolo = self.initial['protocolo']
protocolo.tipo_conteudo_protocolado = documento.tipo
protocolo.conteudo_protocolado = documento
protocolo.save()
return documento
def __init__(self, *args, **kwargs):
fields = []
row1 = to_row(
[('tipo', 4), ('nome', 5), ('data', 3)])
row2 = to_row(
[('autor', 5), ('arquivo', 7), ])
row3 = to_row(
[('ementa', 8), ('indexacao', 4), ])
fields.append(
Fieldset(_('Dados do Documento Acessório'), row1, row2, row3)
)
row0 = to_row(
[('tipo_materia', 6), ('numero_materia', 3), ('ano_materia', 3)])
fields.append(
Fieldset(_('Vincular a Matéria Legislativa'), row0,
to_column(
(Alert('<strong></strong><br><span></span>',
css_class="ementa_materia hidden alert-info",
dismiss=False), 12)))
)
self.helper = SaplFormHelper()
self.helper.layout = SaplFormLayout(*fields)
super().__init__(*args, **kwargs)
class RelatoriaForm(ModelForm):
logger = logging.getLogger(__name__)
composicao = forms.ModelChoiceField(
required=True,
empty_label='---------',
queryset=Composicao.objects.all(),
label=_('Composição')
)
class Meta:
model = Relatoria
fields = [
'comissao',
'data_designacao_relator',
'data_destituicao_relator',
'tipo_fim_relatoria',
'composicao',
'parlamentar'
]
widgets = {'comissao': forms.Select(attrs={'disabled': 'disabled'})}
def __init__(self, *args, **kwargs):
row1 = to_row([('comissao', 12)])
row2 = to_row([('data_designacao_relator', 4),
('data_destituicao_relator', 4),
('tipo_fim_relatoria', 4)])
row3 = to_row([('composicao', 4),
('parlamentar', 8)])
self.helper = SaplFormHelper()
self.helper.layout = SaplFormLayout(
Fieldset(_('Relatoria'), row1, row2, row3))
super().__init__(*args, **kwargs)
comissao_pk = kwargs['initial']['comissao']
composicoes = Composicao.objects.filter(comissao_id=comissao_pk)
self.fields['composicao'].choices = [('', '---------')] + \
[(c.pk, c) for c in composicoes]
# UPDATE
if self.initial.get('composicao') and self.initial.get('parlamentar'):
parlamentares = [(p.parlamentar.id, p.parlamentar) for p in
Participacao.objects.filter(composicao__comissao_id=comissao_pk,
composicao_id=self.initial['composicao'])]
self.fields['parlamentar'].choices = [
('', '---------')] + parlamentares
# INSERT
else:
self.fields['parlamentar'].choices = [('', '---------')]
def clean(self):
super().clean()
cleaned_data = self.cleaned_data
if not self.is_valid():
return cleaned_data
try:
self.logger.debug("Tentando obter objeto Comissao.")
comissao = Comissao.objects.get(id=self.initial['comissao'])
except ObjectDoesNotExist as e:
self.logger.error(
"Objeto Comissao não encontrado com id={}. A localização atual deve ser uma comissão. ".format(
self.initial['comissao']) + str(e))
msg = _('A localização atual deve ser uma comissão.')
raise ValidationError(msg)
else:
cleaned_data['comissao'] = comissao
if cleaned_data['data_designacao_relator'] < cleaned_data['composicao'].periodo.data_inicio \
or cleaned_data['data_designacao_relator'] > cleaned_data['composicao'].periodo.data_fim:
raise ValidationError(
_('Data de designação deve estar dentro do período da composição.'))
return cleaned_data
class TramitacaoForm(ModelForm):
urgente = forms.ChoiceField(required=True,
choices=YES_NO_CHOICES,
initial=False,
label=_("Urgente?"))
logger = logging.getLogger(__name__)
class Meta:
model = Tramitacao
fields = ['data_tramitacao',
'unidade_tramitacao_local',
'status',
'turno',
'urgente',
'unidade_tramitacao_destino',
'data_encaminhamento',
'data_fim_prazo',
'texto',
'user',
'ip']
widgets = {'user': forms.HiddenInput(),
'ip': forms.HiddenInput()}
def __init__(self, *args, **kwargs):
super(TramitacaoForm, self).__init__(*args, **kwargs)
self.fields['data_tramitacao'].initial = timezone.now().date()
ust = UnidadeTramitacao.objects.select_related().all()
unidade_tramitacao_destino = [('', '---------')] + [(ut.pk, ut)
for ut in ust if ut.comissao and ut.comissao.ativa]
unidade_tramitacao_destino.extend(
[(ut.pk, ut) for ut in ust if ut.orgao])
unidade_tramitacao_destino.extend(
[(ut.pk, ut) for ut in ust if ut.parlamentar])
self.fields['unidade_tramitacao_destino'].choices = unidade_tramitacao_destino
def clean(self):
super(TramitacaoForm, self).clean()
if not self.is_valid():
return self.cleaned_data
cleaned_data = self.cleaned_data
if 'data_encaminhamento' in cleaned_data:
data_enc_form = cleaned_data['data_encaminhamento']
if 'data_fim_prazo' in cleaned_data:
data_prazo_form = cleaned_data['data_fim_prazo']
if 'data_tramitacao' in cleaned_data:
data_tram_form = cleaned_data['data_tramitacao']
ultima_tramitacao = Tramitacao.objects.filter(
materia_id=self.instance.materia_id).exclude(
id=self.instance.id).order_by(
'-data_tramitacao',
'-id').first()
if not self.instance.data_tramitacao:
if ultima_tramitacao:
destino = ultima_tramitacao.unidade_tramitacao_destino
if (destino != self.cleaned_data['unidade_tramitacao_local']):
self.logger.error("A origem da nova tramitação ({}) não é igual ao "
"destino da última adicionada ({})!"
.format(self.cleaned_data['unidade_tramitacao_local'], destino))
msg = _('A origem da nova tramitação deve ser igual ao '
'destino da última adicionada!')
raise ValidationError(msg)
if cleaned_data['data_tramitacao'] > timezone.now().date():
self.logger.error('A data de tramitação informada ({}) não é ' +
'menor ou igual a data de hoje!'.format(cleaned_data['data_tramitacao']))
msg = _(
'A data de tramitação deve ser ' +
'menor ou igual a data de hoje!')
raise ValidationError(msg)
if (ultima_tramitacao and
data_tram_form < ultima_tramitacao.data_tramitacao):
msg = _('A data da nova tramitação deve ser ' +
'maior que a data da última tramitação!')
self.logger.error("A data da nova tramitação ({}) deve ser "
"maior que a data da última tramitação ({})!"
.format(data_tram_form, ultima_tramitacao.data_tramitacao))
raise ValidationError(msg)
if data_enc_form:
if data_enc_form < data_tram_form:
msg = _('A data de encaminhamento deve ser ' +
'maior que a data de tramitação!')
self.logger.error("A data de encaminhamento ({}) deve ser "
"maior que a data de tramitação! ({})"
.format(data_enc_form, data_tram_form))
raise ValidationError(msg)
if data_prazo_form:
if data_prazo_form < data_tram_form:
msg = _('A data fim de prazo deve ser ' +
'maior que a data de tramitação!')
self.logger.error("A data fim de prazo ({}) deve ser " +
"maior que a data de tramitação ({})!"
.format(data_prazo_form, data_tram_form))
raise ValidationError(msg)
return cleaned_data
@transaction.atomic
def save(self, commit=True):
tramitacao = super(TramitacaoForm, self).save(commit)
materia = tramitacao.materia
materia.em_tramitacao = False if tramitacao.status.indicador == "F" else True
materia.save()
tramitar_anexadas = sapl.base.models.AppConfig.attr(
'tramitacao_materia')
if tramitar_anexadas:
lista_tramitacao = []
anexadas_list = lista_anexados(materia)
for ma in anexadas_list:
if not ma.tramitacao_set.all() \
or ma.tramitacao_set.first().unidade_tramitacao_destino == tramitacao.unidade_tramitacao_local:
ma.em_tramitacao = False if tramitacao.status.indicador == "F" else True
ma.save()
lista_tramitacao.append(Tramitacao(
status=tramitacao.status,
materia=ma,
data_tramitacao=tramitacao.data_tramitacao,
unidade_tramitacao_local=tramitacao.unidade_tramitacao_local,
data_encaminhamento=tramitacao.data_encaminhamento,
unidade_tramitacao_destino=tramitacao.unidade_tramitacao_destino,
urgente=tramitacao.urgente,
turno=tramitacao.turno,
texto=tramitacao.texto,
data_fim_prazo=tramitacao.data_fim_prazo,
user=tramitacao.user,
ip=tramitacao.ip
))
Tramitacao.objects.bulk_create(lista_tramitacao)
return tramitacao
# Compara se os campos de duas tramitações são iguais,
# exceto os campos id, documento_id e timestamp
def compara_tramitacoes_mat(tramitacao1, tramitacao2):
if not tramitacao1 or not tramitacao2:
return False
lst_items = ['id', 'materia_id', 'timestamp']
values = [(k, v) for k, v in tramitacao1.__dict__.items()
if ((k not in lst_items) and (k[0] != '_'))]
other_values = [(k, v) for k, v in tramitacao2.__dict__.items()
if (k not in lst_items and k[0] != '_')]
return values == other_values
class TramitacaoUpdateForm(TramitacaoForm):
unidade_tramitacao_local = forms.ModelChoiceField(
queryset=UnidadeTramitacao.objects.all(),
widget=forms.HiddenInput())
data_tramitacao = forms.DateField(widget=forms.HiddenInput())
logger = logging.getLogger(__name__)
class Meta:
model = Tramitacao
fields = ['data_tramitacao',
'unidade_tramitacao_local',
'status',
'turno',
'urgente',
'unidade_tramitacao_destino',
'data_encaminhamento',
'data_fim_prazo',
'texto',
'user',
'ip'
]
widgets = {
'data_encaminhamento': forms.DateInput(format='%d/%m/%Y'),
'data_fim_prazo': forms.DateInput(format='%d/%m/%Y'),
'user': forms.HiddenInput(),
'ip': forms.HiddenInput()
}
def clean(self):
super(TramitacaoUpdateForm, self).clean()
if not self.is_valid():
return self.cleaned_data
cd = self.cleaned_data
obj = self.instance
ultima_tramitacao = Tramitacao.objects.filter(
materia_id=obj.materia_id).order_by(
'-data_tramitacao',
'-id').first()
# Se a Tramitação que está sendo editada não for a mais recente,
# ela não pode ter seu destino alterado.
if ultima_tramitacao != obj:
if cd['unidade_tramitacao_destino'] != \
obj.unidade_tramitacao_destino:
self.logger.error("Você não pode mudar a Unidade de Destino desta "
"tramitação para {}, pois irá conflitar com a Unidade "
"Local da tramitação seguinte ({})."
.format(cd['unidade_tramitacao_destino'],
obj.unidade_tramitacao_destino))
raise ValidationError(
'Você não pode mudar a Unidade de Destino desta '
'tramitação, pois irá conflitar com a Unidade '
'Local da tramitação seguinte')
cd['data_tramitacao'] = obj.data_tramitacao
cd['unidade_tramitacao_local'] = obj.unidade_tramitacao_local
return cd
@transaction.atomic
def save(self, commit=True):
ant_tram_principal = Tramitacao.objects.get(id=self.instance.id)
nova_tram_principal = super(TramitacaoUpdateForm, self).save(commit)
materia = nova_tram_principal.materia
if materia.em_tramitacao != (False if nova_tram_principal.status.indicador == "F" else True):
materia.em_tramitacao = False if nova_tram_principal.status.indicador == "F" else True
materia.save()
tramitar_anexadas = sapl.base.models.AppConfig.attr(
'tramitacao_materia')
if tramitar_anexadas:
anexadas_list = lista_anexados(materia)
for ma in anexadas_list:
tram_anexada = ma.tramitacao_set.first()
if compara_tramitacoes_mat(ant_tram_principal, tram_anexada):
tram_anexada.status = nova_tram_principal.status
tram_anexada.data_tramitacao = nova_tram_principal.data_tramitacao
tram_anexada.unidade_tramitacao_local = nova_tram_principal.unidade_tramitacao_local
tram_anexada.data_encaminhamento = nova_tram_principal.data_encaminhamento
tram_anexada.unidade_tramitacao_destino = nova_tram_principal.unidade_tramitacao_destino
tram_anexada.urgente = nova_tram_principal.urgente
tram_anexada.turno = nova_tram_principal.turno
tram_anexada.texto = nova_tram_principal.texto
tram_anexada.data_fim_prazo = nova_tram_principal.data_fim_prazo
tram_anexada.user = nova_tram_principal.user
tram_anexada.ip = nova_tram_principal.ip
tram_anexada.save()
ma.em_tramitacao = False if nova_tram_principal.status.indicador == "F" else True
ma.save()
return nova_tram_principal
class LegislacaoCitadaForm(ModelForm):
tipo = forms.ModelChoiceField(
label=_('Tipo Norma'),
required=True,
queryset=TipoNormaJuridica.objects.all(),
empty_label='Selecione',
)
numero = forms.CharField(label='Número', required=True)
ano = forms.CharField(label='Ano', required=True)
logger = logging.getLogger(__name__)
class Meta:
model = LegislacaoCitada
fields = ['tipo',
'numero',
'ano',
'disposicoes',
'parte',
'livro',
'titulo',
'capitulo',
'secao',
'subsecao',
'artigo',
'paragrafo',
'inciso',
'alinea',
'item']
def clean(self):
super(LegislacaoCitadaForm, self).clean()
if not self.is_valid():
return self.cleaned_data
cleaned_data = self.cleaned_data
try:
self.logger.debug("Tentando obter objeto NormalJuridica (numero={}, ano={}, tipo={})."
.format(cleaned_data['numero'], cleaned_data['ano'], cleaned_data['tipo']))
norma = NormaJuridica.objects.get(
numero=cleaned_data['numero'],
ano=cleaned_data['ano'],
tipo=cleaned_data['tipo'])
except ObjectDoesNotExist:
self.logger.error("A norma a ser inclusa (numero={}, ano={}, tipo={}) "
"não existe no cadastro de Normas."
.format(cleaned_data['numero'], cleaned_data['ano'], cleaned_data['tipo']))
msg = _('A norma a ser inclusa não existe no cadastro'
' de Normas.')
raise ValidationError(msg)
else:
cleaned_data['norma'] = norma
filtro_base = LegislacaoCitada.objects.filter(
materia=self.instance.materia,
norma=self.cleaned_data['norma'],
disposicoes=self.cleaned_data['disposicoes'],
parte=self.cleaned_data['parte'],
livro=self.cleaned_data['livro'],
titulo=self.cleaned_data['titulo'],
capitulo=self.cleaned_data['capitulo'],
secao=self.cleaned_data['secao'],
subsecao=self.cleaned_data['subsecao'],
artigo=self.cleaned_data['artigo'],
paragrafo=self.cleaned_data['paragrafo'],
inciso=self.cleaned_data['inciso'],
alinea=self.cleaned_data['alinea'],
item=self.cleaned_data['item'])
if not self.instance.id:
if filtro_base.exists():
msg = _('Essa Legislação já foi cadastrada.')
self.logger.error("Essa Legislação já foi cadastrada.")
raise ValidationError(msg)
else:
if filtro_base.exclude(id=self.instance.id).exists():
msg = _('Essa Legislação já foi cadastrada.')
self.logger.error("Essa Legislação já foi cadastrada.")
raise ValidationError(msg)
return cleaned_data
def save(self, commit=False):
legislacao = super(LegislacaoCitadaForm, self).save(commit)
legislacao.norma = self.cleaned_data['norma']
legislacao.save()
return legislacao
class NumeracaoForm(ModelForm):
logger = logging.getLogger(__name__)
class Meta:
model = Numeracao
fields = ['tipo_materia',
'numero_materia',
'ano_materia',
'data_materia']
def clean(self):
super(NumeracaoForm, self).clean()
if not self.is_valid():
return self.cleaned_data
try:
self.logger.info("Tentando obter objeto MateriaLegislativa (numero={}, ano={}. tipo={})."
.format(self.cleaned_data['numero_materia'],
self.cleaned_data['ano_materia'], self.cleaned_data['tipo_materia']))
MateriaLegislativa.objects.get(
numero=self.cleaned_data['numero_materia'],
ano=self.cleaned_data['ano_materia'],
tipo=self.cleaned_data['tipo_materia'])
except ObjectDoesNotExist:
msg = _('A matéria a ser inclusa não existe no cadastro'
' de matérias legislativas.')
self.logger.error("A MateriaLegislativa a ser inclusa (numero={}, ano={}. tipo={}) não existe no cadastro de matérias legislativas."
.format(self.cleaned_data['numero_materia'],
self.cleaned_data['ano_materia'], self.cleaned_data['tipo_materia']))
raise ValidationError(msg)
if Numeracao.objects.filter(
materia=self.instance.materia,
tipo_materia=self.cleaned_data['tipo_materia'],
ano_materia=self.cleaned_data['ano_materia'],
numero_materia=self.cleaned_data['numero_materia']
).exists():
msg = _('Essa numeração já foi cadastrada.')
self.logger.error("Essa numeração (materia={}, tipo_materia={}, ano_materia={}, numero_materia={}) "
"já foi cadastrada.".format(self.instance.materia, self.cleaned_data['tipo_materia'],
self.cleaned_data['ano_materia'], self.cleaned_data['numero_materia']))
raise ValidationError(msg)
return self.cleaned_data
class AnexadaForm(ModelForm):
logger = logging.getLogger(__name__)
tipo = forms.ModelChoiceField(
label='Tipo',
required=True,
queryset=TipoMateriaLegislativa.objects.all(),
empty_label='Selecione',
)
numero = forms.IntegerField(label='Número', required=True)
ano = forms.CharField(label='Ano', required=True)
def __init__(self, *args, **kwargs):
return super(AnexadaForm, self).__init__(*args, **kwargs)
def clean(self):
super(AnexadaForm, self).clean()
if not self.is_valid():
return self.cleaned_data
cleaned_data = self.cleaned_data
data_anexacao = cleaned_data['data_anexacao']
data_desanexacao = cleaned_data['data_desanexacao'] if cleaned_data['data_desanexacao'] else data_anexacao
if data_anexacao > data_desanexacao:
self.logger.error(
"Data de anexação posterior à data de desanexação.")
raise ValidationError(
_("Data de anexação posterior à data de desanexação."))
try:
self.logger.info("Tentando obter objeto MateriaLegislativa (numero={}, ano={}, tipo={})."
.format(cleaned_data['numero'], cleaned_data['ano'], cleaned_data['tipo']))
materia_anexada = MateriaLegislativa.objects.get(
numero=cleaned_data['numero'],
ano=cleaned_data['ano'],
tipo=cleaned_data['tipo'])
except ObjectDoesNotExist:
msg = _('A {} {}/{} não existe no cadastro de matérias legislativas.'
.format(cleaned_data['tipo'], cleaned_data['numero'], cleaned_data['ano']))
self.logger.error("A matéria a ser anexada não existe no cadastro"
" de matérias legislativas.")
raise ValidationError(msg)
materia_principal = self.instance.materia_principal
if materia_principal == materia_anexada:
self.logger.error("Matéria não pode ser anexada a si mesma.")
raise ValidationError(_('Matéria não pode ser anexada a si mesma'))
is_anexada = Anexada.objects.filter(
materia_principal=materia_principal,
materia_anexada=materia_anexada
).exclude(pk=self.instance.pk).exists()
if is_anexada:
self.logger.error("Matéria já se encontra anexada.")
raise ValidationError(_('Matéria já se encontra anexada'))
ciclico = False
anexadas_anexada = Anexada.objects.filter(
materia_principal=materia_anexada)
while anexadas_anexada and not ciclico:
anexadas = []
for anexa in anexadas_anexada:
if materia_principal == anexa.materia_anexada:
ciclico = True
else:
for a in Anexada.objects.filter(materia_principal=anexa.materia_anexada):
anexadas.append(a)
anexadas_anexada = anexadas
if ciclico:
self.logger.error(
"A matéria não pode ser anexada por uma de suas anexadas.")
raise ValidationError(
_("A matéria não pode ser anexada por uma de suas anexadas."))
cleaned_data['materia_anexada'] = materia_anexada
return cleaned_data
def save(self, commit=False):
anexada = super(AnexadaForm, self).save(commit)
anexada.materia_anexada = self.cleaned_data['materia_anexada']
anexada.save()
return anexada
class Meta:
model = Anexada
fields = ['tipo', 'numero', 'ano', 'data_anexacao', 'data_desanexacao']
class MateriaLegislativaFilterSet(django_filters.FilterSet):
ano = django_filters.ChoiceFilter(required=False,
label='Ano da Matéria',
choices=choice_anos_com_materias)
numero = django_filters.CharFilter(
label=_('Número'),
method='filter_numero'
)
autoria__autor = django_filters.CharFilter(widget=forms.HiddenInput())
autoria__primeiro_autor = django_filters.BooleanFilter(
required=False,
label=_('Primeiro Autor'))
autoria__autor__parlamentar_set__filiacao__partido = django_filters.ModelChoiceFilter(
queryset=Partido.objects.all(),
label=_('Matérias por Partido'))
ementa = django_filters.CharFilter(
label=_(
'Pesquisar expressões na ementa'),
help_text=_(
'"Para busca no conteúdo das matérias, use a Busca Textual acima"'),
method='filter_ementa'
)
indexacao = django_filters.CharFilter(lookup_expr='icontains',
label=_('Indexação'))
em_tramitacao = django_filters.ChoiceFilter(required=False,
label='Em tramitação',
choices=CHOICE_TRAMITACAO)
materiaassunto__assunto = django_filters.ModelChoiceFilter(
queryset=AssuntoMateria.objects.all(),
label=_('Assunto'))
numeracao__numero_materia = django_filters.NumberFilter(
required=False,
label=_('Número do processo'))
signeds = django_filters.ChoiceFilter(
required=False,
choices=CHOICE_SIGNEDS,
label=_('Com Assinatura Digital?'),
method='filter_signeds')
o = MateriaPesquisaOrderingFilter(help_text='')
tipo_listagem = forms.ChoiceField(
required=False,
choices=CHOICE_TIPO_LISTAGEM,
label=_('Tipo da Pesquisa'))
class Meta(FilterOverridesMetaMixin):
model = MateriaLegislativa
fields = ['numero',
'numero_protocolo',
'numeracao__numero_materia',
'ano',
'tipo',
'data_apresentacao',
'data_publicacao',
'autoria__autor__tipo',
'autoria__primeiro_autor',
'autoria__autor__parlamentar_set__filiacao__partido',
'relatoria__parlamentar_id',
'tramitacao__unidade_tramitacao_destino',
'tramitacao__status',
'materiaassunto__assunto',
'em_tramitacao',
'tipo_origem_externa',
'numero_origem_externa',
'ano_origem_externa',
'data_origem_externa',
'local_origem_externa',
]
def filter_signeds(self, queryset, name, value):
q = Q()
if not value:
return queryset
if value == '1':
q &= Q(metadata__signs__texto_original__signs__0__isnull=False)
else:
q &= (Q(metadata__signs__texto_original__signs__isnull=True) |
Q(metadata__signs__texto_original__signs__len=0))
return queryset.filter(q)
def filter_numero(self, qs, name, value):
value = value.replace('.', '')
value = value.replace(',', '')
if len(value) > 2:
qs = qs.filter(numero__icontains=value)
else:
qs = qs.filter(numero=value)
return qs
def filter_ementa(self, queryset, name, value):
texto = value.split()
q = Q()
for t in texto:
q &= Q(ementa__icontains=t)
return queryset.filter(q)
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
# self.filters['tipo'].label = 'Tipo de Matéria'
self.filters[
'autoria__autor__parlamentar_set__filiacao__partido'
].label = 'Partido do Autor'
self.filters['autoria__autor__tipo'].label = _('Tipo de Autor')
self.filters['relatoria__parlamentar_id'].label = _('Relatoria')
self.filters['tramitacao__unidade_tramitacao_destino'].label = _(
'Unidade de tramitação atual')
self.filters['tramitacao__status'].label = _(
'Status da tramitação atual')
self.filters['tramitacao__status'].label = _(
'Status da tramitação atual')
self.filters['o'].label = _('Ordenação')
self.form.fields['tipo_listagem'] = self.tipo_listagem
row1 = to_row(
[
('em_tramitacao', 2),
('tipo', 4),
('numero', 3),
('ano', 3),
]
)
row2 = to_row(
[
('ementa', 12)
]
)
# row2 = to_row(
# [
# ('numeracao__numero_materia', 3),
# ('numero_protocolo', 3),
# ]
#)
row3 = to_row(
[('data_apresentacao', 6),
('data_publicacao', 6)])
row4 = to_row([
('autoria__autor', 0),
(Button('pesquisar',
'Selecionar Autor',
css_class='btn btn-secondary btn-sm'), 2),
(Button('limpar',
'limpar Autor',
css_class='btn btn-secondary btn-sm'), 2),
#('autoria__primeiro_autor', 2),
('autoria__autor__tipo', 4),
('autoria__autor__parlamentar_set__filiacao__partido', 4)
])
row6 = to_row(
[('relatoria__parlamentar_id', 6),
])
row7 = to_row(
[('tramitacao__unidade_tramitacao_destino', 6),
('tramitacao__status', 6),
])
row9 = to_row(
[('materiaassunto__assunto', 6), ('indexacao', 6)])
row8 = to_row(
[
('o', 5),
('signeds', 4),
('tipo_listagem', 3)
])
# row10 = to_row([
# ('tipo_origem_externa', 4),
# ('numero_origem_externa', 4),
# ('ano_origem_externa', 4),
# ])
# row11 = to_row([
# ('data_origem_externa', 8),
# ('local_origem_externa', 4)
# ])
self.form.helper = SaplFormHelper()
self.form.helper.form_method = 'GET'
self.form.helper.layout = Layout(
Fieldset(
_(
'''
Pesquisa Parametrizada<br>
<small>
<strong class="text-red">TODOS OS CAMPOS SÃO OPCIONAIS!</strong>
</small>
'''
),
row1,
to_row([
(row2, 'col'),
(form_actions(label=_('Processar Pesquisa')),
'col-md-auto mt-3 pt-3')
])
),
Fieldset(
_('Como listar os resultados da pesquisa'),
row8,
),
# Fieldset(_('Origem externa'),
# row10, row11
# ),
Fieldset(
_('Pesquisa Avançada'),
HTML(autor_label),
HTML(autor_modal),
row4,
row3,
row7,
# row6,
# row9,
)
)
@property
def qs(self):
qs = qs_override_django_filter(self)
if hasattr(self.form, 'cleaned_data') and self.form.cleaned_data[
'autoria__autor__parlamentar_set__filiacao__partido']:
q_data_inicio_e_fim = Q(data_apresentacao__gte=F(
'autoria__autor__parlamentar_set__filiacao__data'),
data_apresentacao__lte=F(
'autoria__autor__parlamentar_set__filiacao__data_desfiliacao'))
q_data_inicio = Q(
data_apresentacao__gte=F(
'autoria__autor__parlamentar_set__filiacao__data'),
autoria__autor__parlamentar_set__filiacao__data_desfiliacao__isnull=True
)
qs = qs.filter(
q_data_inicio_e_fim | q_data_inicio
)
return qs
def pega_ultima_tramitacao():
return Tramitacao.objects.values(
'materia_id').annotate(data_encaminhamento=Max(
'data_encaminhamento'),
id=Max('id')).values_list('id', flat=True)
def filtra_tramitacao_status(status):
lista = pega_ultima_tramitacao()
return Tramitacao.objects.filter(
id__in=lista,
status=status).distinct().values_list('materia_id', flat=True)
def filtra_tramitacao_destino(destino):
lista = pega_ultima_tramitacao()
return Tramitacao.objects.filter(
id__in=lista,
unidade_tramitacao_destino=destino).distinct().values_list(
'materia_id', flat=True)
def filtra_tramitacao_destino_and_status(status, destino):
lista = pega_ultima_tramitacao()
return Tramitacao.objects.filter(
id__in=lista,
status=status,
unidade_tramitacao_destino=destino).distinct().values_list(
'materia_id', flat=True)
class DespachoInicialCreateForm(forms.Form):
comissao = forms.ModelMultipleChoiceField(
queryset=Comissao.objects.filter(ativa=True),
widget=forms.CheckboxSelectMultiple(),
label=Comissao._meta.verbose_name_plural)
def __init__(self, *args, **kwargs):
row1 = to_row(
[('comissao', 12), ])
self.helper = SaplFormHelper()
self.helper.form_method = 'POST'
self.helper.layout = SaplFormLayout(row1)
super().__init__(*args, **kwargs)
def clean(self):
super().clean()
comissoes = self.cleaned_data.get('comissao')
if not comissoes:
msg = _('Você deve escolher pelo menos uma comissão.')
raise ValidationError(msg)
if not self.is_valid():
return self.cleaned_data
errors = []
for comissao in comissoes:
if DespachoInicial.objects.filter(
materia=self.initial['materia'],
comissao=comissao,
).exists():
msg = _('Já existe um Despacho cadastrado para %s' %
comissao)
errors.append(msg)
if errors:
raise ValidationError(errors)
return self.cleaned_data
class DespachoInicialForm(ModelForm):
comissao = forms.ModelChoiceField(
queryset=Comissao.objects.filter(ativa=True), label=_('Comissão'))
class Meta:
model = DespachoInicial
fields = ['comissao']
def clean(self):
super(DespachoInicialForm, self).clean()
if not self.is_valid():
return self.cleaned_data
if DespachoInicial.objects.filter(
materia=self.instance.materia,
comissao=self.cleaned_data['comissao'],
).exclude(pk=self.instance.pk).exists():
msg = _('Já existe um Despacho cadastrado para %s' %
self.cleaned_data['comissao'])
raise ValidationError(msg)
return self.cleaned_data
class AutoriaForm(ModelForm):
tipo_autor = ModelChoiceField(label=_('Tipo Autor'),
required=True,
queryset=TipoAutor.objects.all(),
empty_label=_('Selecione'),)
data_relativa = forms.DateField(
widget=forms.HiddenInput(), required=False)
logger = logging.getLogger(__name__)
def __init__(self, *args, **kwargs):
super(AutoriaForm, self).__init__(*args, **kwargs)
self.fields['primeiro_autor'].required = True
if 'initial' in kwargs and 'materia' in kwargs['initial']:
materia = kwargs['initial']['materia']
self.fields['primeiro_autor'].initial = Autoria.objects.filter(
materia=materia).count() == 0
row1 = to_row([('tipo_autor', 4),
('autor', 4),
('primeiro_autor', 4)])
self.helper = SaplFormHelper()
self.helper.layout = Layout(
Fieldset(_('Autoria'),
row1, 'data_relativa', form_actions(label='Salvar')))
if not kwargs['instance']:
self.fields['autor'].choices = []
class Meta:
model = Autoria
fields = ['tipo_autor', 'autor', 'primeiro_autor', 'data_relativa']
def clean(self):
cd = super(AutoriaForm, self).clean()
if not self.is_valid():
return self.cleaned_data
autorias = Autoria.objects.filter(
materia=self.instance.materia, autor=cd['autor'])
pk = self.instance.pk
if ((not pk and autorias.exists()) or
(pk and autorias.exclude(pk=pk).exists())):
self.logger.error(
"Esse Autor (pk={}) já foi cadastrado.".format(pk))
raise ValidationError(_('Esse Autor já foi cadastrado.'))
return cd
class AutoriaMultiCreateForm(Form):
logger = logging.getLogger(__name__)
tipo_autor = ModelChoiceField(label=_('Tipo Autor'),
required=True,
queryset=TipoAutor.objects.all(),
empty_label=_('Selecione'),)
data_relativa = forms.DateField(
widget=forms.HiddenInput(), required=False)
autor = ModelMultipleChoiceField(
queryset=Autor.objects.all(),
label=_('Possiveis Autores'),
required=True,
widget=CheckboxSelectMultiple)
autores = ModelMultipleChoiceField(
queryset=Autor.objects.all(),
required=False,
widget=HiddenInput)
primeiro_autor = forms.ChoiceField(
required=True,
choices=YES_NO_CHOICES,
label="Primeiro Autor?"
)
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
if 'initial' in kwargs and 'autores' in kwargs['initial']:
self.fields['primeiro_autor'].initial = kwargs['initial']['autores'].count(
) == 0
row1 = to_row([('tipo_autor', 10), ('primeiro_autor', 2)])
row2 = to_row([('autor', 12), ])
self.helper = SaplFormHelper()
self.helper.layout = Layout(
Fieldset(
_('Autorias'), row1, row2, 'data_relativa', 'autores',
form_actions(label='Incluir Autores Selecionados')))
self.fields['autor'].choices = []
def clean(self):
cd = super().clean()
if 'autores' in self.errors:
del self.errors['autores']
if 'autor' not in cd or not cd['autor'].exists():
self.logger.error(
"Ao menos um autor deve ser selecionado para inclusão")
raise ValidationError(
_('Ao menos um autor deve ser selecionado para inclusão'))
return cd
class AcessorioEmLoteFilterSet(django_filters.FilterSet):
class Meta(FilterOverridesMetaMixin):
model = MateriaLegislativa
fields = ['tipo', 'data_apresentacao']
def __init__(self, *args, **kwargs):
super(AcessorioEmLoteFilterSet, self).__init__(*args, **kwargs)
self.filters['tipo'].label = 'Tipo de Matéria'
self.filters['data_apresentacao'].label = 'Data (Inicial - Final)'
self.form.fields['tipo'].required = True
self.form.fields['data_apresentacao'].required = True
row1 = to_row([('tipo', 12)])
row2 = to_row([('data_apresentacao', 12)])
self.form.helper = SaplFormHelper()
self.form.helper.form_method = 'GET'
self.form.helper.layout = Layout(
Fieldset(_('Documentos Acessórios em Lote'),
row1, row2, form_actions(label='Pesquisar')))
class AnexadaEmLoteFilterSet(django_filters.FilterSet):
class Meta(FilterOverridesMetaMixin):
model = MateriaLegislativa
fields = ['tipo', 'data_apresentacao']
def __init__(self, *args, **kwargs):
super(AnexadaEmLoteFilterSet, self).__init__(*args, **kwargs)
self.filters['tipo'].label = 'Tipo de Matéria'
self.filters['data_apresentacao'].label = 'Data (Inicial - Final)'
row1 = to_row([('tipo', 12)])
row2 = to_row([('data_apresentacao', 12)])
self.form.helper = SaplFormHelper()
self.form.helper.form_method = 'GET'
self.form.helper.layout = Layout(
Fieldset(_('Pesquisa de Matérias'),
row1, row2, form_actions(label='Pesquisar')))
class PrimeiraTramitacaoEmLoteFilterSet(django_filters.FilterSet):
class Meta(FilterOverridesMetaMixin):
model = MateriaLegislativa
fields = ['tipo', 'data_apresentacao']
def __init__(self, *args, **kwargs):
super(PrimeiraTramitacaoEmLoteFilterSet, self).__init__(
*args, **kwargs)
self.filters['tipo'].label = 'Tipo de Matéria'
self.filters['data_apresentacao'].label = 'Data (Inicial - Final)'
self.form.fields['tipo'].required = True
self.form.fields['data_apresentacao'].required = False
row1 = to_row([('tipo', 12)])
row2 = to_row([('data_apresentacao', 12)])
self.form.helper = SaplFormHelper()
self.form.helper.form_method = 'GET'
self.form.helper.layout = Layout(
Fieldset(_('Primeira Tramitação'),
row1, row2, form_actions(label='Pesquisar')))
class TramitacaoEmLoteFilterSet(django_filters.FilterSet):
class Meta(FilterOverridesMetaMixin):
model = MateriaLegislativa
fields = ['tipo', 'data_apresentacao', 'tramitacao__status',
'tramitacao__unidade_tramitacao_destino']
def __init__(self, *args, **kwargs):
super(TramitacaoEmLoteFilterSet, self).__init__(
*args, **kwargs)
self.filters['tipo'].label = _('Tipo de Matéria')
self.filters['data_apresentacao'].label = _('Data (Inicial - Final)')
self.filters['tramitacao__unidade_tramitacao_destino'
].label = _('Unidade Destino (Último Destino)')
self.filters['tramitacao__status'].label = _('Status')
self.form.fields['tipo'].required = True
self.form.fields['data_apresentacao'].required = False
self.form.fields['tramitacao__status'].required = True
self.form.fields[
'tramitacao__unidade_tramitacao_destino'].required = True
row1 = to_row([
('tipo', 4),
('tramitacao__unidade_tramitacao_destino', 4),
('tramitacao__status', 4)])
row2 = to_row([('data_apresentacao', 12)])
self.form.helper = SaplFormHelper()
self.form.helper.form_method = 'GET'
self.form.helper.layout = Layout(
Fieldset(_('Tramitação em Lote'),
row1, row2, form_actions(label=_('Pesquisar'))))
class TipoProposicaoForm(ModelForm):
logger = logging.getLogger(__name__)
content_type = forms.ModelChoiceField(
queryset=ContentType.objects.all(),
label=TipoProposicao._meta.get_field('content_type').verbose_name,
required=True,
help_text=TipoProposicao._meta.get_field('content_type').help_text)
tipo_conteudo_related_radio = ChoiceWithoutValidationField(
label="Seleção de Tipo",
required=False,
widget=forms.RadioSelect())
tipo_conteudo_related = forms.IntegerField(
widget=forms.HiddenInput(),
required=True)
class Meta:
model = TipoProposicao
fields = ['descricao',
'content_type',
'tipo_conteudo_related_radio',
'tipo_conteudo_related',
'perfis',
'tipo_autores',
'exige_assinatura_digital'
]
widgets = {'tipo_conteudo_related': forms.HiddenInput(),
'perfis': widgets.CheckboxSelectMultiple(),
'tipo_autores': widgets.CheckboxSelectMultiple()}
def __init__(self, *args, **kwargs):
tipo_select = Fieldset(
TipoProposicao._meta.verbose_name,
Row(
to_column(
(
Row(
to_column(('descricao', 12)),
to_column(('exige_assinatura_digital', 12)),
to_column(('tipo_autores', 12)),
to_column(('perfis', 12)),
),
5
)
),
to_column(
(
Row(
to_column(('content_type', 12)),
to_column(('tipo_conteudo_related_radio', 12)),
to_column(('tipo_conteudo_related', 12)),
),
7
)
),
)
)
self.helper = SaplFormHelper()
self.helper.layout = SaplFormLayout(tipo_select)
super(TipoProposicaoForm, self).__init__(*args, **kwargs)
content_types = ContentType.objects.get_for_models(
*models_with_gr_for_model(TipoProposicao))
self.fields['content_type'].choices = [
(ct.pk, ct) for k, ct in content_types.items()]
# Ordena por id
self.fields['content_type'].choices.sort(key=lambda x: x[0])
if self.instance.pk:
self.fields[
'tipo_conteudo_related'].initial = self.instance.object_id
def clean(self):
super(TipoProposicaoForm, self).clean()
if not self.is_valid():
return self.cleaned_data
cd = self.cleaned_data
if not cd['tipo_autores'].exists():
raise ValidationError(
_('O Tipo de Proposição deve ser associado '
'a ao menos um Tipo de Autor.'))
content_type = cd['content_type']
if 'tipo_conteudo_related' not in cd or not cd[
'tipo_conteudo_related']:
self.logger.error("Seleção de Tipo não definida.")
raise ValidationError(
_('Seleção de Tipo não definida.'))
if not content_type.model_class().objects.filter(
pk=cd['tipo_conteudo_related']).exists():
self.logger.error("O Registro definido (%s) não está na base de %s."
% (cd['tipo_conteudo_related'], content_type))
raise ValidationError(
_('O Registro definido (%s) não está na base de %s.'
) % (cd['tipo_conteudo_related'], content_type))
# """
# A unicidade de tipo proposição para tipo de conteudo
# foi desabilitada pois existem casos em quem é o procedimento da
# instituição convergir vários tipos de proposição
# para um tipo de matéria.
unique_value = self._meta.model.objects.filter(
content_type=content_type, object_id=cd['tipo_conteudo_related'])
if self.instance.pk:
unique_value = unique_value.exclude(pk=self.instance.pk)
unique_value = unique_value.first()
if unique_value:
raise ValidationError(
_('Já existe um Tipo de Proposição (%s) '
'que foi defindo como (%s) para (%s)'
) % (unique_value,
content_type,
unique_value.tipo_conteudo_related))
return self.cleaned_data
@transaction.atomic
def save(self, commit=False):
tipo_proposicao = self.instance
assert tipo_proposicao.content_type
tipo_proposicao.tipo_conteudo_related = \
tipo_proposicao.content_type.model_class(
).objects.get(pk=self.cleaned_data['tipo_conteudo_related'])
return super().save(True)
class TramitacaoEmLoteForm(ModelForm):
logger = logging.getLogger(__name__)
class Meta:
model = Tramitacao
fields = ['data_tramitacao',
'unidade_tramitacao_local',
'status',
'urgente',
'turno',
'unidade_tramitacao_destino',
'data_encaminhamento',
'data_fim_prazo',
'texto',
'user',
'ip']
widgets = {'user': forms.HiddenInput(),
'ip': forms.HiddenInput()}
def __init__(self, *args, **kwargs):
super(TramitacaoEmLoteForm, self).__init__(*args, **kwargs)
self.fields['data_tramitacao'].initial = timezone.now().date()
ust = UnidadeTramitacao.objects.select_related().all()
unidade_tramitacao_destino = [('', '---------')] + [(ut.pk, ut)
for ut in ust if ut.comissao and ut.comissao.ativa]
unidade_tramitacao_destino.extend(
[(ut.pk, ut) for ut in ust if ut.orgao])
unidade_tramitacao_destino.extend(
[(ut.pk, ut) for ut in ust if ut.parlamentar])
self.fields['unidade_tramitacao_destino'].choices = unidade_tramitacao_destino
self.fields['urgente'].label = "Urgente? *"
row1 = to_row([
('data_tramitacao', 4),
('data_encaminhamento', 4),
('data_fim_prazo', 4)
])
row2 = to_row([
('unidade_tramitacao_local', 6),
('unidade_tramitacao_destino', 6),
])
row3 = to_row([
('status', 4),
('urgente', 4),
('turno', 4)
])
row4 = to_row([
('texto', 12)
])
documentos_checkbox_HTML = '''
<br\><br\><br\>
<fieldset>
<legend style="font-size: 24px;">Selecione as matérias para tramitação:</legend>
<table class="table table-striped table-hover">
<div class="controls">
<div class="checkbox">
<label for="id_check_all">
<input type="checkbox" id="id_check_all" onchange="checkAll(this)" /> Marcar/Desmarcar Todos
</label>
</div>
</div>
<thead>
<tr><th>Matéria</th></tr>
</thead>
<tbody>
{% for materia in object_list %}
<tr>
<td>
<input type="checkbox" name="materias" value="{{materia.id}}" {% if check %} checked {% endif %}/>
<a href="{% url 'sapl.materia:materialegislativa_detail' materia.id %}">
{{materia.tipo.sigla}} {{materia.tipo.descricao}} {{materia.numero}}/{{materia.ano}}
</a>
</td>
</tr>
{% endfor %}
</tbody>
</table>
</fieldset>
'''
self.helper = SaplFormHelper()
self.helper.layout = Layout(
Fieldset(
'Detalhes da tramitação:',
row1, row2, row3, row4,
HTML(documentos_checkbox_HTML),
form_actions(label='Salvar')
)
)
def clean(self):
cleaned_data = super(TramitacaoEmLoteForm, self).clean()
if not self.is_valid():
return self.cleaned_data
if 'data_encaminhamento' in cleaned_data:
data_enc_form = cleaned_data['data_encaminhamento']
if 'data_fim_prazo' in cleaned_data:
data_prazo_form = cleaned_data['data_fim_prazo']
if 'data_tramitacao' in cleaned_data:
data_tram_form = cleaned_data['data_tramitacao']
if not self.instance.data_tramitacao:
if cleaned_data['data_tramitacao'] > timezone.now().date():
self.logger.error('A data de tramitação ({}) deve ser '
'menor ou igual a data de hoje ({})!'
.format(cleaned_data['data_tramitacao'], timezone.now().date()))
msg = _(
'A data de tramitação deve ser ' +
'menor ou igual a data de hoje!')
raise ValidationError(msg)
if data_enc_form:
if data_enc_form < data_tram_form:
self.logger.error('A data de encaminhamento ({}) deve ser '
'maior que a data de tramitação ({})!'
.format(data_enc_form, data_tram_form))
msg = _('A data de encaminhamento deve ser ' +
'maior que a data de tramitação!')
raise ValidationError(msg)
if data_prazo_form:
if data_prazo_form < data_tram_form:
self.logger.error('A data fim de prazo ({}) deve ser '
'maior que a data de tramitação ({})!'
.format(data_prazo_form, data_tram_form))
msg = _('A data fim de prazo deve ser ' +
'maior que a data de tramitação!')
raise ValidationError(msg)
return cleaned_data
@transaction.atomic
def save(self, commit=True):
cd = self.cleaned_data
materias = self.initial['materias']
user = self.initial['user'] if 'user' in self.initial else None
ip = self.initial['ip'] if 'ip' in self.initial else ''
tramitar_anexadas = AppConfig.attr('tramitacao_materia')
for mat_id in materias:
mat = MateriaLegislativa.objects.get(id=mat_id)
tramitacao = Tramitacao.objects.create(
status=cd['status'],
materia=mat,
data_tramitacao=cd['data_tramitacao'],
unidade_tramitacao_local=cd['unidade_tramitacao_local'],
unidade_tramitacao_destino=cd['unidade_tramitacao_destino'],
data_encaminhamento=cd['data_encaminhamento'],
urgente=cd['urgente'],
turno=cd['turno'],
texto=cd['texto'],
data_fim_prazo=cd['data_fim_prazo'],
user=user,
ip=ip
)
mat.em_tramitacao = False if tramitacao.status.indicador == "F" else True
mat.save()
if tramitar_anexadas:
lista_tramitacao = []
anexadas = lista_anexados(mat)
for ml in anexadas:
if not ml.tramitacao_set.all() \
or ml.tramitacao_set.first() \
.unidade_tramitacao_destino == tramitacao.unidade_tramitacao_local:
ml.em_tramitacao = False if tramitacao.status.indicador == "F" else True
ml.save()
lista_tramitacao.append(Tramitacao(
status=tramitacao.status,
materia=ml,
data_tramitacao=tramitacao.data_tramitacao,
unidade_tramitacao_local=tramitacao.unidade_tramitacao_local,
data_encaminhamento=tramitacao.data_encaminhamento,
unidade_tramitacao_destino=tramitacao.unidade_tramitacao_destino,
urgente=tramitacao.urgente,
turno=tramitacao.turno,
texto=tramitacao.texto,
data_fim_prazo=tramitacao.data_fim_prazo,
user=tramitacao.user,
ip=tramitacao.ip
))
Tramitacao.objects.bulk_create(lista_tramitacao)
return tramitacao
class ProposicaoForm(FileFieldCheckMixin, forms.ModelForm):
logger = logging.getLogger(__name__)
TIPO_TEXTO_CHOICE = [
('D', _('Arquivo Digital')),
('T', _('Texto Articulado'))
]
tipo_materia = forms.ModelChoiceField(
label=TipoMateriaLegislativa._meta.verbose_name,
required=False,
queryset=TipoMateriaLegislativa.objects.all(),
empty_label='Selecione')
numero_materia = forms.CharField(
label='Número', required=False)
ano_materia = forms.CharField(
label='Ano', required=False)
vinculo_numero = forms.CharField(
label='Número', required=False,)
vinculo_ano = forms.CharField(
label='Ano', required=False)
tipo_texto = forms.ChoiceField(
label=_('Tipo do Texto da Proposição'),
required=False,
choices=TIPO_TEXTO_CHOICE,
widget=widgets.RadioSelect())
materia_de_vinculo = forms.ModelChoiceField(
queryset=MateriaLegislativa.objects.all(),
widget=widgets.HiddenInput(),
required=False)
proposicao_vinculada = forms.ModelChoiceField(
queryset=Proposicao.objects.all(),
widget=widgets.HiddenInput(),
required=False)
receber_recibo = forms.TypedChoiceField(
choices=YES_NO_CHOICES,
widget=widgets.HiddenInput(),
required=False)
numero_materia_futuro = forms.IntegerField(
label='Número (Opcional)', required=False)
especie = forms.ModelChoiceField(
queryset=ContentType.objects.all(),
label=_('Espécie da Proposição'),
required=True)
tipo = forms.ModelChoiceField(
queryset=TipoProposicao.objects.all(),
label=_('Tipo da Proposição'),
required=True,
help_text=_('<i class="text-red">NAD - Necessário Assinatura Digital</i>'))
class Meta:
model = Proposicao
fields = ['tipo',
'receber_recibo',
'descricao',
'observacao',
'texto_original',
'materia_de_vinculo',
'proposicao_vinculada',
'tipo_materia',
'numero_materia',
'ano_materia',
'vinculo_numero',
'vinculo_ano',
'tipo_texto',
'hash_code',
'numero_materia_futuro',
'user',
'ip',
'ultima_edicao',
'especie']
widgets = {
'descricao': widgets.Textarea(attrs={'rows': 4}),
'hash_code': forms.HiddenInput(),
'user': forms.HiddenInput(),
'ip': forms.HiddenInput(),
'ultima_edicao': forms.HiddenInput()
}
def __init__(self, *args, **kwargs):
self.texto_articulado_proposicao = AppConfig.attr(
'texto_articulado_proposicao')
self.receber_recibo = AppConfig.attr(
'receber_recibo_proposicao')
if not self.texto_articulado_proposicao:
if 'tipo_texto' in self._meta.fields:
self._meta.fields.remove('tipo_texto')
else:
if 'tipo_texto' not in self._meta.fields:
self._meta.fields.append('tipo_texto')
fields = [
to_row([
('especie', 5),
('tipo', 7)]
),
to_row([
('descricao', 12),
('observacao', 12)
])
]
if AppConfig.objects.last().escolher_numero_materia_proposicao:
fields.append(to_column(('numero_materia_futuro', 12)),)
else:
if 'numero_materia_futuro' in self._meta.fields:
self._meta.fields.remove('numero_materia_futuro')
if self.texto_articulado_proposicao:
fields.append(
to_column((InlineRadios('tipo_texto'), 5)),)
fields.append(to_column((
'texto_original', 7 if self.texto_articulado_proposicao else 12)))
fields.append(
Div(
to_row([
(
Fieldset(_('Víncular a Proposição ainda não recebida')), 12),
(
HTML(
'<small class="form-text text-muted">Esta proposição é parte de outra de sua própria autoria? '
'Exemplo: Você está está registrando um '
'documento acessório de uma proposição que '
'ainda não foi recebida pelo protocolo, '
'informe aqui que proposição é essa! (Caso a proposição já tenha sido recebida pelo protocolo, o sistema fará vínculo automatícamente com a matéria, e não com a proposição.</small>'
), 12),
(
Div(
to_row(
[
('vinculo_numero', 6),
('vinculo_ano', 6),
(
Alert(
'',
css_class="ementa_proposicao hidden alert-info",
dismiss=False
),
12
)
]
),
),
8
),
]),
css_id='vinculo_proposicao'
)
)
fields.append(
Div(
to_row([
(Fieldset(_('Víncular a uma Matéria Legislativa')), 12),
(
HTML(
'<small class="form-text text-muted">Colabore com o protocolo informando que esta '
'proposição se trata de uma matéria anexada a outra. '
'Exemplo: Você está criando uma proposição que é uma emenda, '
'então informe aqui de que projeto é essa emenda.</small>'), 12),
(
Div(
to_row(
[
('tipo_materia', 6),
('numero_materia', 3),
('ano_materia', 3),
(
Alert(
'',
css_class="ementa_materia hidden alert-info",
dismiss=False
),
12
)
]
),
),
12
),
]),
css_id="vinculo_materia"
)
)
self.helper = SaplFormHelper()
self.helper.layout = SaplFormLayout(*fields)
super(ProposicaoForm, self).__init__(*args, **kwargs)
content_types = ContentType.objects.get_for_models(
*models_with_gr_for_model(TipoProposicao))
self.fields['especie'].choices = [
(ct.pk, ct) for k, ct in content_types.items()]
# Ordena por id
self.fields['especie'].choices.sort(key=lambda x: x[0])
if self.instance.pk:
self.fields['especie'].initial = self.instance.tipo.content_type_id
self.fields['tipo_texto'].initial = ''
if self.instance.texto_original:
self.fields['tipo_texto'].initial = 'D'
if self.texto_articulado_proposicao:
if self.instance.texto_articulado.exists():
self.fields['tipo_texto'].initial = 'T'
if self.instance.materia_de_vinculo:
self.fields[
'tipo_materia'
].initial = self.instance.materia_de_vinculo.tipo
self.fields[
'numero_materia'
].initial = self.instance.materia_de_vinculo.numero
self.fields[
'ano_materia'
].initial = self.instance.materia_de_vinculo.ano
if self.instance.proposicao_vinculada:
self.fields[
'vinculo_numero'
].initial = self.instance.proposicao_vinculada.numero_proposicao
self.fields[
'vinculo_ano'
].initial = self.instance.proposicao_vinculada.data_envio.year \
if self.instance.proposicao_vinculada.data_envio else \
self.instance.proposicao_vinculada.ultima_edicao.year
def clean_texto_original(self):
texto_original = self.cleaned_data.get('texto_original', False)
if texto_original and texto_original.size > MAX_DOC_UPLOAD_SIZE:
raise ValidationError("O arquivo Texto Original deve ser menor que {0:.1f} mb, o tamanho atual desse arquivo é {1:.1f} mb"
.format((MAX_DOC_UPLOAD_SIZE / 1024) / 1024, (texto_original.size / 1024) / 1024))
return texto_original
def gerar_hash(self, inst, receber_recibo):
inst.save()
if receber_recibo == True:
inst.hash_code = ''
else:
if inst.texto_original:
inst.hash_code = gerar_hash_arquivo(
inst.texto_original.path, str(inst.pk))
elif inst.texto_articulado.exists():
ta = inst.texto_articulado.first()
inst.hash_code = 'P' + ta.hash() + SEPARADOR_HASH_PROPOSICAO + str(inst.pk)
def clean(self):
super(ProposicaoForm, self).clean()
if not self.is_valid():
return self.cleaned_data
cd = self.cleaned_data
tm, am, nm = (cd.get('tipo_materia', ''),
cd.get('ano_materia', ''),
cd.get('numero_materia', ''))
if cd['numero_materia_futuro'] and \
'tipo' in cd and \
MateriaLegislativa.objects.filter(tipo=cd['tipo'].tipo_conteudo_related,
ano=timezone.now().year,
numero=cd['numero_materia_futuro']):
raise ValidationError(_("A matéria {} {}/{} já existe.".format(cd['tipo'].tipo_conteudo_related.descricao,
cd['numero_materia_futuro'],
timezone.now().year)))
if tm and am and nm:
try:
self.logger.debug("Tentando obter objeto MateriaLegislativa (tipo_id={}, ano={}, numero={})."
.format(tm, am, nm))
materia_de_vinculo = MateriaLegislativa.objects.get(
tipo_id=tm,
ano=am,
numero=nm
)
except ObjectDoesNotExist:
self.logger.error("Objeto MateriaLegislativa vinculada (tipo_id={}, ano={}, numero={}) não existe!"
.format(tm, am, nm))
raise ValidationError(_('Matéria Vinculada não existe!'))
else:
self.logger.info("MateriaLegislativa vinculada (tipo_id={}, ano={}, numero={}) com sucesso."
.format(tm, am, nm))
cd['materia_de_vinculo'] = materia_de_vinculo
vn, va = (cd.get('vinculo_numero', ''),
cd.get('vinculo_ano', ''))
if vn and va:
if cd['materia_de_vinculo']:
raise ValidationError(
_('Não é possível vincular a uma proposição e a uma matéria ao mesmo tempo!'))
self.logger.debug("Tentando obter objeto Proposição (numero={}, ano={})."
.format(vn, va))
q = Q(
autor=self.initial['user'].autor_set.first(),
numero_proposicao=vn,
data_envio__year=va
) | Q(
autor=self.initial['user'].autor_set.first(),
numero_proposicao=vn,
data_envio__isnull=True,
ultima_edicao__year=va
)
proposicao_vinculada = Proposicao.objects.filter(q).first()
if not proposicao_vinculada:
raise ValidationError(_('Proposição Vinculada não existe!'))
else:
self.logger.info("Proposição vinculada (ano={}, numero={}) com sucesso."
.format(va, vn))
if not proposicao_vinculada.conteudo_gerado_related:
cd['proposicao_vinculada'] = proposicao_vinculada
else:
cd['materia_de_vinculo'] = proposicao_vinculada.conteudo_gerado_related
return cd
def save(self, commit=True):
cd = self.cleaned_data
inst = self.instance
receber_recibo = AppConfig.objects.last().receber_recibo_proposicao
if inst.pk:
if 'tipo_texto' in cd:
if cd['tipo_texto'] == 'T' and inst.texto_original:
inst.texto_original.delete()
elif cd['tipo_texto'] != 'T':
inst.texto_articulado.all().delete()
if 'texto_original' in cd and\
not cd['texto_original'] and \
inst.texto_original:
inst.texto_original.delete()
self.gerar_hash(inst, receber_recibo)
return super().save(commit)
inst.ano = timezone.now().year
sequencia_numeracao = AppConfig.attr('sequencia_numeracao_proposicao')
if sequencia_numeracao == 'A':
numero__max = Proposicao.objects.filter(
autor=inst.autor,
ano=timezone.now().year).aggregate(Max('numero_proposicao'))
elif sequencia_numeracao == 'B':
numero__max = Proposicao.objects.filter(
ano=timezone.now().year).aggregate(Max('numero_proposicao'))
numero__max = numero__max['numero_proposicao__max']
inst.numero_proposicao = (
numero__max + 1) if numero__max else 1
self.gerar_hash(inst, receber_recibo)
inst.save()
return inst
class DevolverProposicaoForm(forms.ModelForm):
justificativa_devolucao = forms.CharField(
required=False, widget=widgets.Textarea(attrs={'rows': 5}))
logger = logging.getLogger(__name__)
class Meta:
model = Proposicao
fields = [
'justificativa_devolucao',
'observacao',
]
def __init__(self, *args, **kwargs):
# esta chamada isola o __init__ de ProposicaoForm
super(DevolverProposicaoForm, self).__init__(*args, **kwargs)
fields = []
fields.append(
Fieldset(
_('Registro de Devolução'),
to_column(('justificativa_devolucao', 12)),
to_column(('observacao', 12)),
to_column(
(form_actions(label=_('Devolver'),
name='devolver',
css_class='btn-danger float-right'), 12)
)
)
)
self.helper = SaplFormHelper()
self.helper.layout = Layout(*fields)
def clean(self):
super(DevolverProposicaoForm, self).clean()
if not self.is_valid():
return self.cleaned_data
cd = self.cleaned_data
if 'justificativa_devolucao' not in cd or\
not cd['justificativa_devolucao']:
# TODO Implementar notificação ao autor por email
self.logger.error("Adicione uma Justificativa para devolução.")
raise ValidationError(
_('Adicione uma Justificativa para devolução.'))
return cd
@transaction.atomic
def save(self, commit=False):
cd = self.cleaned_data
self.instance.data_devolucao = timezone.now()
self.instance.data_recebimento = None
self.instance.data_envio = None
self.instance.save()
if self.instance.texto_articulado.exists():
ta = self.instance.texto_articulado.first()
ta.privacidade = STATUS_TA_PRIVATE
ta.editing_locked = False
ta.save()
self.instance.results = {
'messages': {
'success': [_('Devolução efetuada com sucesso.'), ]
},
'url': reverse('sapl.materia:receber-proposicao')
}
return self.instance
class ConfirmarProposicaoForm(ProposicaoForm):
tipo_readonly = forms.CharField(
label=TipoProposicao._meta.verbose_name,
required=False, widget=widgets.TextInput(
attrs={'readonly': 'readonly'}))
autor_readonly = forms.CharField(
label=Autor._meta.verbose_name,
required=False, widget=widgets.TextInput(
attrs={'readonly': 'readonly'}))
regime_tramitacao = forms.ModelChoiceField(label="Regime de tramitação",
required=False, queryset=RegimeTramitacao.objects.all())
gerar_protocolo = forms.ChoiceField(
required=False,
label=_(
'Gerar Protocolo na incorporação?'),
choices=YES_NO_CHOICES,
widget=widgets.RadioSelect())
numero_de_paginas = forms.IntegerField(required=False, min_value=0,
label=_('Número de Páginas'),)
class Meta:
model = Proposicao
fields = [
'data_envio',
'descricao',
'observacao',
'gerar_protocolo',
'numero_de_paginas',
'numero_materia_futuro'
]
widgets = {
'descricao': widgets.Textarea(
attrs={'readonly': 'readonly', 'rows': 4}),
'data_envio': widgets.DateTimeInput(
attrs={'readonly': 'readonly'}),
}
def __init__(self, *args, **kwargs):
self.proposicao_incorporacao_obrigatoria = \
AppConfig.attr('proposicao_incorporacao_obrigatoria')
if self.proposicao_incorporacao_obrigatoria != 'C':
if 'gerar_protocolo' in self._meta.fields:
self._meta.fields.remove('gerar_protocolo')
else:
if 'gerar_protocolo' not in self._meta.fields:
self._meta.fields.append('gerar_protocolo')
if self.proposicao_incorporacao_obrigatoria == 'N':
if 'numero_de_paginas' in self._meta.fields:
self._meta.fields.remove('numero_de_paginas')
else:
if 'numero_de_paginas' not in self._meta.fields:
self._meta.fields.append('numero_de_paginas')
self.instance = kwargs.get('instance', None)
if not self.instance:
self.logger.error("Erro na Busca por proposição a incorporar")
raise ValueError(_('Erro na Busca por proposição a incorporar'))
if self.instance.tipo.content_type.model_class() == TipoDocumento:
if 'numero_de_paginas' in self._meta.fields:
self._meta.fields.remove('numero_de_paginas')
if 'gerar_protocolo' in self._meta.fields:
self._meta.fields.remove('gerar_protocolo')
if 'regime_tramitacao' in self._meta.fields:
self._meta.fields.remove('regime_tramitacao')
super(ProposicaoForm, self).__init__(*args, **kwargs)
if self.instance.tipo.content_type.model_class() == \
TipoMateriaLegislativa:
self.fields['regime_tramitacao'].required = True
self.fields['especie'].required = False
self.fields['tipo'].required = False
fields = [
Fieldset(
_('Dados Básicos'),
to_row(
[
('tipo_readonly', 5),
('data_envio', 3),
('autor_readonly', 4),
('numero_materia_futuro', 3),
('descricao', 12),
('observacao', 12)
]
)
)
]
if not AppConfig.objects.last().escolher_numero_materia_proposicao or \
not self.instance.numero_materia_futuro:
if 'numero_materia_futuro' in self._meta.fields:
del fields[0][0][3]
fields.append(
Div(
to_row([
(Fieldset(_('Víncular a uma Matéria Legislativa')), 12),
(
HTML(
'''<small class="form-text text-muted">
O Autor da proposição vinculou esta proposição a uma
Matéria Legislativa. Verifique se está correto
para prosseguir com a Incorporação.</small>
'''
if self.instance.materia_de_vinculo else
'''
<small class="form-text text-muted">
Você pode fazer a anexação diretamente aqui na
incorporaçao, basta informar a qual
matéria legislativa deseja incorporar.</small>
'''
), 12),
(
Div(
to_row(
[
('tipo_materia', 6),
('numero_materia', 3),
('ano_materia', 3),
(
Alert(
'',
css_class="ementa_materia hidden alert-info",
dismiss=False
),
12
)
]
),
),
12
),
]),
css_id="vinculo_materia"
)
)
itens_incorporacao = []
if self.instance.tipo.content_type.model_class() == \
TipoMateriaLegislativa:
itens_incorporacao = [to_column(('regime_tramitacao', 4))]
if self.proposicao_incorporacao_obrigatoria == 'C':
itens_incorporacao.append(to_column((InlineRadios(
'gerar_protocolo'), 4)))
if self.proposicao_incorporacao_obrigatoria != 'N':
itens_incorporacao.append(to_column(('numero_de_paginas', 4)))
itens_incorporacao.append(
to_column(
(form_actions(label=_('Incorporar'),
name='incorporar'), 12)
)
)
fields.append(
Fieldset(_('Registro de Incorporação'), Row(*itens_incorporacao)))
self.helper = SaplFormHelper()
self.helper.layout = Layout(*fields)
self.fields['tipo_readonly'].initial = self.instance.tipo.descricao
self.fields['autor_readonly'].initial = str(self.instance.autor)
if self.instance.numero_materia_futuro:
self.fields['numero_materia_futuro'].initial = self.instance.numero_materia_futuro
if self.instance.materia_de_vinculo:
self.fields[
'tipo_materia'
].initial = self.instance.materia_de_vinculo.tipo
self.fields[
'numero_materia'
].initial = self.instance.materia_de_vinculo.numero
self.fields[
'ano_materia'
].initial = self.instance.materia_de_vinculo.ano
if self.instance.proposicao_vinculada:
self.fields[
'vinculo_numero'
].initial = self.instance.proposicao_vinculada.numero_proposicao
self.fields[
'vinculo_ano'
].initial = self.instance.proposicao_vinculada.ano
if self.proposicao_incorporacao_obrigatoria == 'C':
self.fields['gerar_protocolo'].initial = True
def clean(self):
super(ConfirmarProposicaoForm, self).clean()
if not self.is_valid():
return self.cleaned_data
numeracao = AppConfig.attr('sequencia_numeracao_proposicao')
if not numeracao:
self.logger.error("A sequência de numeração (por ano ou geral)"
" não foi configurada para a aplicação em "
"tabelas auxiliares")
raise ValidationError("A sequência de numeração (por ano ou geral)"
" não foi configurada para a aplicação em "
"tabelas auxiliares")
cd = ProposicaoForm.clean(self)
if self.instance.tipo.content_type.model_class() == \
TipoMateriaLegislativa:
if 'regime_tramitacao' not in cd or\
not cd['regime_tramitacao']:
self.logger.error("Regime de Tramitação deve ser informado.")
raise ValidationError(
_('Regime de Tramitação deve ser informado.'))
elif self.instance.tipo.content_type.model_class(
) == TipoDocumento and not cd['materia_de_vinculo']:
self.logger.error("Documentos não podem ser incorporados sem definir "
"para qual Matéria Legislativa ele se destina.")
raise ValidationError(
_('Documentos não podem ser incorporados sem definir '
'para qual Matéria Legislativa ele se destina.'))
return cd
@transaction.atomic
def save(self, commit=False):
# TODO Implementar workflow entre protocolo e autores
cd = self.cleaned_data
self.instance.justificativa_devolucao = ''
self.instance.data_devolucao = None
self.instance.data_recebimento = timezone.now()
self.instance.materia_de_vinculo = cd['materia_de_vinculo']
if self.instance.texto_articulado.exists():
ta = self.instance.texto_articulado.first()
ta.privacidade = STATUS_TA_IMMUTABLE_PUBLIC
ta.editing_locked = True
ta.save()
self.instance.save()
"""
TipoProposicao possui conteúdo genérico para a modelegam de tipos
relacionados e, a esta modelagem, qual o objeto que está associado.
Porem, cada registro a ser gerado pode possuir uma estrutura diferente,
é os casos básicos já implementados,
TipoDocumento e TipoMateriaLegislativa, que são modelos utilizados
em DocumentoAcessorio e MateriaLegislativa geradas,
por sua vez a partir de uma Proposição.
Portanto para estas duas e para outras implementações que possam surgir
possuindo com matéria prima uma Proposição, dada sua estrutura,
deverá contar também com uma implementação particular aqui no código
abaixo.
"""
self.instance.results = {
'messages': {
'success': [_('Proposição incorporada com sucesso'), ]
},
'url': reverse('sapl.materia:receber-proposicao')
}
proposicao = self.instance
conteudo_gerado = None
if self.instance.tipo.content_type.model_class(
) == TipoMateriaLegislativa:
numeracao = None
try:
self.logger.debug(
"Tentando obter modelo de sequência de numeração.")
numeracao = AppConfig.objects.last(
).sequencia_numeracao_protocolo
except AttributeError as e:
self.logger.error("Erro ao obter modelo. " + str(e))
pass
tipo = self.instance.tipo.tipo_conteudo_related
if tipo.sequencia_numeracao:
numeracao = tipo.sequencia_numeracao
ano = timezone.now().year
if numeracao == 'A':
numero = MateriaLegislativa.objects.filter(
ano=ano, tipo=tipo).aggregate(Max('numero'))
elif numeracao == 'L':
legislatura = Legislatura.objects.filter(
data_inicio__year__lte=ano,
data_fim__year__gte=ano).first()
data_inicio = legislatura.data_inicio
data_fim = legislatura.data_fim
numero = MateriaLegislativa.objects.filter(
data_apresentacao__gte=data_inicio,
data_apresentacao__lte=data_fim,
tipo=tipo).aggregate(
Max('numero'))
elif numeracao == 'U':
numero = MateriaLegislativa.objects.filter(
tipo=tipo).aggregate(Max('numero'))
if numeracao is None:
numero['numero__max'] = 0
if cd['numero_materia_futuro'] and not MateriaLegislativa.objects.filter(tipo=tipo,
ano=ano,
numero=cd['numero_materia_futuro']):
max_numero = cd['numero_materia_futuro']
else:
max_numero = numero['numero__max'] + \
1 if numero['numero__max'] else 1
# dados básicos
materia = MateriaLegislativa()
materia.numero = max_numero
materia.tipo = tipo
materia.ementa = proposicao.descricao
materia.ano = ano
materia.data_apresentacao = timezone.now()
materia.em_tramitacao = True
materia.regime_tramitacao = cd['regime_tramitacao']
if proposicao.texto_original:
materia.texto_original = File(
proposicao.texto_original,
os.path.basename(proposicao.texto_original.path))
materia.save()
materia.save()
conteudo_gerado = materia
if proposicao.texto_articulado.exists():
ta = proposicao.texto_articulado.first()
ta_materia = ta.clone_for(materia)
ta_materia.editing_locked = True
ta_materia.privacidade = STATUS_TA_IMMUTABLE_PUBLIC
ta_materia.save()
self.instance.results['messages']['success'].append(_(
'Matéria Legislativa registrada com sucesso (%s)'
) % str(materia))
# autoria
autoria = Autoria()
autoria.autor = proposicao.autor
autoria.materia = materia
autoria.primeiro_autor = True
autoria.save()
try:
if isinstance(autoria.autor.autor_related, Parlamentar):
signs = list(
map(lambda s: s[0],
self.instance.metadata['signs']['texto_original']['signs']
)
)
parlamentares = Parlamentar.objects.filter(
nome_completo__in=signs
).exclude(
pk=autoria.autor.autor_related.id
)
for p in parlamentares:
autoria = Autoria()
autoria.autor = p.autor.first()
autoria.materia = materia
autoria.primeiro_autor = True
autoria.save()
except Exception as e:
self.logger.debug(
f"Erro no Registro de multiplas autorias. Proposicao id={proposicao.id}")
autores = materia.autores.all()
self.instance.results['messages']['success'].append(_(
'Autoria registrada para (%s)'
) % ', '.join(map(lambda a: a.nome, autores)))
# Matéria de vinlculo
if proposicao.materia_de_vinculo:
anexada = Anexada()
anexada.materia_principal = proposicao.materia_de_vinculo
anexada.materia_anexada = materia
anexada.data_anexacao = timezone.now()
anexada.save()
self.instance.results['messages']['success'].append(_(
'Matéria anexada a (%s)'
) % str(anexada.materia_principal))
self.instance.results['url'] = reverse(
'sapl.materia:materialegislativa_detail',
kwargs={'pk': materia.pk})
if proposicao.proposicao_vinculada_set.exists():
vinculos = proposicao.proposicao_vinculada_set.all()
for v in vinculos:
v.materia_de_vinculo = materia
v.proposicao_vinculada = None
v.save()
pass
elif self.instance.tipo.content_type.model_class() == TipoDocumento:
# dados básicos
doc = DocumentoAcessorio()
doc.materia = proposicao.materia_de_vinculo
doc.autor = str(proposicao.autor)
doc.tipo = proposicao.tipo.tipo_conteudo_related
doc.ementa = proposicao.descricao
""" FIXME verificar questão de nome e data de documento,
doc acessório. Possivelmente pode possuir data anterior a
data de envio e/ou recebimento dada a incorporação.
"""
doc.nome = str(proposicao.tipo.tipo_conteudo_related)[:30]
doc.data = proposicao.data_envio
doc.arquivo = proposicao.texto_original = File(
proposicao.texto_original,
os.path.basename(proposicao.texto_original.path))
doc.save()
doc.save()
conteudo_gerado = doc
self.instance.results['messages']['success'].append(_(
'Documento Acessório registrado com sucesso e anexado (%s)'
) % str(doc.materia))
self.instance.results['url'] = reverse(
'sapl.materia:documentoacessorio_detail',
kwargs={'pk': doc.pk})
if proposicao.proposicao_vinculada_set.exists():
vinculos = proposicao.proposicao_vinculada_set.all()
for v in vinculos:
v.materia_de_vinculo = doc.materia
v.proposicao_vinculada = None
v.save()
proposicao.conteudo_gerado_related = conteudo_gerado
proposicao.save()
proposicao.save()
# if self.instance.tipo.content_type.model_class() == TipoDocumento:
# return self.instance
# Nunca gerar protocolo
if self.proposicao_incorporacao_obrigatoria == 'N':
return self.instance
# ocorre se proposicao_incorporacao_obrigatoria == 'C' (condicional)
# and gerar_protocolo == False
if 'gerar_protocolo' not in cd or cd['gerar_protocolo'] == 'False':
return self.instance
# resta a opção proposicao_incorporacao_obrigatoria == 'C'
# and gerar_protocolo == True
# ou, proposicao_incorporacao_obrigatoria == 'O'
# que são idênticas.
"""
apesar de TipoProposicao estar com conteudo e tipo conteudo genérico,
aqui na incorporação de proposições, para gerar protocolo, cada caso
possível de conteudo em tipo de proposição deverá ser tratado
isoladamente justamente por Protocolo não estar generalizado com
GenericForeignKey
"""
numeracao = AppConfig.attr('sequencia_numeracao_protocolo')
if numeracao == 'A':
nm = Protocolo.objects.filter(
ano=timezone.now().year).aggregate(Max('numero'))
elif numeracao == 'L':
legislatura = Legislatura.objects.filter(
data_inicio__year__lte=timezone.now().year,
data_fim__year__gte=timezone.now().year).first()
data_inicio = legislatura.data_inicio
data_fim = legislatura.data_fim
nm = MateriaLegislativa.objects.filter(
data_apresentacao__gte=data_inicio,
data_apresentacao__lte=data_fim,
tipo=tipo).aggregate(Max('numero'))
else:
# numeracao == 'U' ou não informada
nm = Protocolo.objects.all().aggregate(Max('numero'))
protocolo = Protocolo()
protocolo.numero = (nm['numero__max'] + 1) if nm['numero__max'] else 1
protocolo.ano = timezone.now().year
protocolo.tipo_protocolo = '1'
protocolo.interessado = str(proposicao.autor)[
:200] # tamanho máximo 200
protocolo.autor = proposicao.autor
protocolo.assunto_ementa = proposicao.descricao
protocolo.numero_paginas = cd['numero_de_paginas']
protocolo.anulado = False
protocolo.tipo_conteudo_protocolado = proposicao.tipo.tipo_conteudo_related
protocolo.conteudo_protocolado = conteudo_gerado
protocolo.tipo_processo = '0'
if self.instance.tipo.content_type.model_class(
) in (TipoMateriaLegislativa, TipoDocumento):
# protocolo.tipo_materia = proposicao.tipo.tipo_conteudo_related
protocolo.tipo_processo = '1'
protocolo.save()
self.instance.results['messages']['success'].append(_(
'Protocolo realizado com sucesso'))
self.instance.results['url'] = reverse(
'sapl.protocoloadm:protocolo_mostrar',
kwargs={'pk': protocolo.pk})
conteudo_gerado.numero_protocolo = protocolo.numero
conteudo_gerado.save()
return self.instance
class MateriaAssuntoForm(ModelForm):
class Meta:
model = MateriaAssunto
fields = ['materia', 'assunto']
widgets = {'materia': forms.HiddenInput()}
class EtiquetaPesquisaForm(forms.Form):
logger = logging.getLogger(__name__)
tipo_materia = forms.ModelChoiceField(
label=TipoMateriaLegislativa._meta.verbose_name,
queryset=TipoMateriaLegislativa.objects.all(),
required=False,
empty_label='Selecione')
data_inicial = forms.DateField(
label='Data Inicial',
required=False,
widget=forms.DateInput(format='%d/%m/%Y')
)
data_final = forms.DateField(
label='Data Final',
required=False,
widget=forms.DateInput(format='%d/%m/%Y')
)
processo_inicial = forms.IntegerField(
label='Processo Inicial',
required=False)
processo_final = forms.IntegerField(
label='Processo Final',
required=False)
def __init__(self, *args, **kwargs):
super(EtiquetaPesquisaForm, self).__init__(*args, **kwargs)
row1 = to_row(
[('tipo_materia', 6),
('data_inicial', 3),
('data_final', 3)])
row2 = to_row(
[('processo_inicial', 6),
('processo_final', 6)])
self.helper = SaplFormHelper()
self.helper.layout = Layout(
Fieldset(
('Formulário de Etiqueta'),
row1, row2,
form_actions(label='Pesquisar')
)
)
def clean(self):
super(EtiquetaPesquisaForm, self).clean()
if not self.is_valid():
return self.cleaned_data
cleaned_data = self.cleaned_data
# Verifica se algum campo de data foi preenchido
if cleaned_data['data_inicial'] or cleaned_data['data_final']:
# Então verifica se o usuário preencheu o Incial e mas não
# preencheu o Final, ou vice-versa
if (not cleaned_data['data_inicial'] or
not cleaned_data['data_final']):
self.logger.error("Caso pesquise por data, os campos de Data Incial e "
"Data Final devem ser preenchidos obrigatoriamente")
raise ValidationError(_(
'Caso pesquise por data, os campos de Data Incial e ' +
'Data Final devem ser preenchidos obrigatoriamente'))
# Caso tenha preenchido, verifica se a data final é maior que
# a inicial
elif cleaned_data['data_final'] < cleaned_data['data_inicial']:
self.logger.error("A Data Final ({}) não pode ser menor que a Data Inicial({})."
.format(cleaned_data['data_final'], cleaned_data['data_inicial']))
raise ValidationError(_(
'A Data Final não pode ser menor que a Data Inicial'))
# O mesmo processo anterior é feito com o processo
if (cleaned_data['processo_inicial'] or
cleaned_data['processo_final']):
if (not cleaned_data['processo_inicial'] or
not cleaned_data['processo_final']):
self.logger.error("Caso pesquise por número de processo, os campos de "
"Processo Inicial e Processo Final "
"devem ser preenchidos obrigatoriamente")
raise ValidationError(_(
'Caso pesquise por número de processo, os campos de ' +
'Processo Inicial e Processo Final ' +
'devem ser preenchidos obrigatoriamente'))
elif (cleaned_data['processo_final'] <
cleaned_data['processo_inicial']):
self.logger.error("O processo final ({}) não pode ser menor que o inicial ({})."
.format(cleaned_data['processo_final'], cleaned_data['processo_inicial']))
raise ValidationError(_(
'O processo final não pode ser menor que o inicial'))
return cleaned_data
class FichaPesquisaForm(forms.Form):
logger = logging.getLogger(__name__)
tipo_materia = forms.ModelChoiceField(
label=TipoMateriaLegislativa._meta.verbose_name,
queryset=TipoMateriaLegislativa.objects.all(),
empty_label='Selecione')
data_inicial = forms.DateField(
label='Data Inicial',
widget=forms.DateInput(format='%d/%m/%Y')
)
data_final = forms.DateField(
label='Data Final',
widget=forms.DateInput(format='%d/%m/%Y')
)
def __init__(self, *args, **kwargs):
super(FichaPesquisaForm, self).__init__(*args, **kwargs)
row1 = to_row(
[('tipo_materia', 6),
('data_inicial', 3),
('data_final', 3)])
self.helper = SaplFormHelper()
self.helper.layout = Layout(
Fieldset(
('Formulário de Ficha'),
row1,
form_actions(label='Pesquisar')
)
)
def clean(self):
super(FichaPesquisaForm, self).clean()
if not self.is_valid():
return self.cleaned_data
cleaned_data = self.cleaned_data
if not self.is_valid():
return cleaned_data
if cleaned_data['data_final'] < cleaned_data['data_inicial']:
self.logger.error("A Data Final ({}) não pode ser menor que a Data Inicial ({})."
.format(cleaned_data['data_final'], cleaned_data['data_inicial']))
raise ValidationError(_(
'A Data Final não pode ser menor que a Data Inicial'))
return cleaned_data
class FichaSelecionaForm(forms.Form):
materia = forms.ModelChoiceField(
widget=forms.RadioSelect,
queryset=MateriaLegislativa.objects.all(),
label='')
def __init__(self, *args, **kwargs):
super(FichaSelecionaForm, self).__init__(*args, **kwargs)
row1 = to_row(
[('materia', 12)])
self.helper = SaplFormHelper()
self.helper.layout = Layout(
Fieldset(
('Selecione a ficha que deseja imprimir'),
row1,
form_actions(label='Gerar Impresso')
)
)
class ExcluirTramitacaoEmLote(forms.Form):
logger = logging.getLogger(__name__)
data_tramitacao = forms.DateField(required=True,
label=_('Data da Tramitação'))
unidade_tramitacao_local = forms.ModelChoiceField(label=_('Unidade Local'),
required=True,
queryset=UnidadeTramitacao.objects.all(),
empty_label='------')
unidade_tramitacao_destino = forms.ModelChoiceField(label=_('Unidade Destino'),
required=True,
queryset=UnidadeTramitacao.objects.all(),
empty_label='------')
status = forms.ModelChoiceField(label=_('Status'),
required=True,
queryset=StatusTramitacao.objects.all(),
empty_label='------')
def clean(self):
super(ExcluirTramitacaoEmLote, self).clean()
cleaned_data = self.cleaned_data
if not self.is_valid():
return cleaned_data
data_tramitacao = cleaned_data['data_tramitacao']
unidade_tramitacao_local = cleaned_data['unidade_tramitacao_local']
unidade_tramitacao_destino = cleaned_data['unidade_tramitacao_destino']
status = cleaned_data['status']
tramitacao_set = Tramitacao.objects.filter(data_tramitacao=data_tramitacao,
unidade_tramitacao_local=unidade_tramitacao_local,
unidade_tramitacao_destino=unidade_tramitacao_destino,
status=status)
if not tramitacao_set.exists():
self.logger.error("Não existem tramitações com os dados informados "
" (data_tramitacao={}, unidade_tramitacao_local={})."
"unidade_tramitacao_destino={}, status={})."
.format(data_tramitacao, unidade_tramitacao_local,
unidade_tramitacao_destino, status))
raise forms.ValidationError(
_("Não existem tramitações com os dados informados."))
return cleaned_data
def __init__(self, *args, **kwargs):
super(ExcluirTramitacaoEmLote, self).__init__(*args, **kwargs)
row1 = to_row(
[('data_tramitacao', 6),
('status', 6), ])
row2 = to_row(
[('unidade_tramitacao_local', 6),
('unidade_tramitacao_destino', 6)])
self.helper = SaplFormHelper()
self.helper.layout = Layout(
Fieldset(_('Dados das Tramitações'),
row1,
row2,
HTML(" "),
form_actions(label='Excluir')
)
)
class MateriaPesquisaSimplesForm(forms.Form):
tipo_materia = forms.ModelChoiceField(
label=TipoMateriaLegislativa._meta.verbose_name,
queryset=TipoMateriaLegislativa.objects.all(),
required=False,
empty_label='Selecione')
data_inicial = forms.DateField(
label='Data Inicial',
required=False,
widget=forms.DateInput(format='%d/%m/%Y')
)
data_final = forms.DateField(
label='Data Final',
required=False,
widget=forms.DateInput(format='%d/%m/%Y')
)
titulo = forms.CharField(
label='Título do Relatório',
required=False,
max_length=150)
logger = logging.getLogger(__name__)
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
row1 = to_row(
[('tipo_materia', 6),
('data_inicial', 3),
('data_final', 3)])
row2 = to_row(
[('titulo', 12)])
self.helper = SaplFormHelper()
self.helper.layout = Layout(
Fieldset(
'Índice de Materias',
row1, row2,
form_actions(label='Pesquisar')
)
)
def clean(self):
super().clean()
if not self.is_valid():
return self.cleaned_data
cleaned_data = self.cleaned_data
data_inicial = cleaned_data['data_inicial']
data_final = cleaned_data['data_final']
if data_inicial or data_final:
if not (data_inicial and data_final):
self.logger.error("Caso pesquise por data, os campos de Data Inicial e "
"Data Final devem ser preenchidos obrigatoriamente")
raise ValidationError(_('Caso pesquise por data, os campos de Data Inicial e '
'Data Final devem ser preenchidos obrigatoriamente'))
elif data_inicial > data_final:
self.logger.error("Data Final ({}) menor que a Data Inicial ({}).".format(
data_final, data_inicial))
raise ValidationError(
_('A Data Final não pode ser menor que a Data Inicial'))
return cleaned_data
|
cmjatai/cmj
|
sapl/materia/forms.py
|
Python
|
gpl-3.0
| 127,700
|
import pytest
from kulka.request import Sleep
def test_example_input():
expected = bytearray([0xFF, 0xFF, 0x00, 0x22, 0x00, 0x06,
0x04, 0xD2, 0x38, 0x1E, 0xD2, 0xD9])
request = Sleep(1234, 56, 7890)
assert request.tobytes() == expected
@pytest.mark.randomize(arg2=int, min_num=0, max_num=255)
@pytest.mark.randomize(arg1=int, arg3=int, min_num=0, max_num=65536)
def test_valid_input(arg1, arg2, arg3):
expected = bytearray([0xFF, 0xFF, 0x00, 0x22, 0x00, 0x06,
(arg1 >> 8), (arg1 & 0xFF), arg2,
(arg3 >> 8), (arg3 & 0xFF)])
expected.append((sum(expected[2:]) & 0xFF) ^ 0xFF)
request = Sleep(arg1, arg2, arg3)
assert request.tobytes() == expected
@pytest.mark.randomize(arg1=int, arg3=int, min_num=65536)
@pytest.mark.randomize(arg2=int, min_num=256)
def test_input_above_range(arg1, arg2, arg3):
request = Sleep(arg1, arg2, arg3)
with pytest.raises(ValueError):
request.tobytes()
@pytest.mark.randomize(arg1=int, arg2=int, arg3=int, min_num=-1)
def test_input_below_range(arg1, arg2, arg3):
request = Sleep(arg1, arg2, arg3)
with pytest.raises(ValueError):
request.tobytes()
|
TNT-Samuel/Coding-Projects
|
Kulka - Sphero/kulka-master/test/requests/sleep_test.py
|
Python
|
gpl-3.0
| 1,223
|
#!/usr/bin/python
import optparse
import datetime
import sys
import SeqIterator
"""
This program takes a SAM file and a FASTQ file as input. It prints out fastq file records
where the fastq id matches some QNAME in the SAM file.
@author: Jacob Porter
"""
def samMatchFASTQ(sam_file, fasta_file):
sam_dictionary = {record["QNAME"] : True for record in SeqIterator.SeqIterator(sam_file, file_type='SAM')}
fasta_iterator = SeqIterator.SeqIterator(fasta_file, file_type = 'fastq')
fasta_writer = SeqIterator.SeqWriter(sys.stdout, file_type = 'fastq')
counter = 0
#sys.stderr.write("sam_dictionary:\n%s\n" % str(sam_dictionary))
for fasta_record in fasta_iterator:
# sys.stderr.write("FASTQ record:\t%s\n" % fasta_record[0])
# sys.stderr.flush()
if sam_dictionary.get(fasta_record[0], False):
counter += 1
fasta_writer.write(fasta_record)
return counter
def parseArgs(p, now):
_, args = p.parse_args()
if len(args) != 2:
p.print_help()
p.error("There must be at least two arguments.")
sys.stderr.write("Finding FASTQ records from %s that match the QNAME from SAM records from %s\n" % (args[0], args[1]))
sys.stderr.flush()
counter = samMatchFASTQ(args[0], args[1])
sys.stderr.write("Found %d FASTQ records that matched a record in the SAM file.\n" % counter)
later = datetime.datetime.now()
sys.stderr.write("The process took time:\t%s\n" % str(later - now))
sys.stderr.flush()
def main():
now = datetime.datetime.now()
usage = "usage: %prog [options] <sam_file> <fastq_file> "
description = "This program takes a SAM file and a FASTQ file as input. It prints out fastq file records where the fastq id matches some QNAME in the SAM file."
epilog = ""
p = optparse.OptionParser(usage = usage,
description = description, epilog = epilog)
parseArgs(p, now)
#p.add_option('--processes', '-p', help='The number of processes to use to get results. [default: %default]', default = '12')
if __name__ == "__main__":
main()
|
JacobPorter/BisPin
|
Utilities/samFASTQmatcher.py
|
Python
|
gpl-3.0
| 2,119
|
# -*- coding: utf-8 -*-
# Copyright 2007-2020 The HyperSpy developers
#
# This file is part of HyperSpy.
#
# HyperSpy is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# HyperSpy is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with HyperSpy. If not, see <http://www.gnu.org/licenses/>.
import itertools
import logging
import numpy as np
import warnings
from collections.abc import Iterable
from matplotlib import pyplot as plt
from hyperspy import utils
from hyperspy.signal import BaseSignal
from hyperspy._signals.signal1d import Signal1D, LazySignal1D
from hyperspy.misc.elements import elements as elements_db
from hyperspy.misc.eds import utils as utils_eds
from hyperspy.misc.utils import isiterable
from hyperspy.utils.plot import markers
from hyperspy.docstrings.plot import (BASE_PLOT_DOCSTRING_PARAMETERS,
PLOT1D_DOCSTRING)
_logger = logging.getLogger(__name__)
class EDS_mixin:
_signal_type = "EDS"
def __init__(self, *args, **kwards):
super().__init__(*args, **kwards)
if self.metadata.Signal.signal_type == 'EDS':
warnings.warn('The microscope type is not set. Use '
'set_signal_type(\'EDS_TEM\') '
'or set_signal_type(\'EDS_SEM\')')
self.metadata.Signal.binned = True
self._xray_markers = {}
def _get_line_energy(self, Xray_line, FWHM_MnKa=None):
"""
Get the line energy and the energy resolution of a Xray line.
The return values are in the same units than the signal axis
Parameters
----------
Xray_line : strings
Valid element X-ray lines e.g. Fe_Kb
FWHM_MnKa: {None, float, 'auto'}
The energy resolution of the detector in eV
if 'auto', used the one in
'self.metadata.Acquisition_instrument.SEM.Detector.EDS.energy_resolution_MnKa'
Returns
-------
float: the line energy, if FWHM_MnKa is None
(float,float): the line energy and the energy resolution, if FWHM_MnKa
is not None
"""
units_name = self.axes_manager.signal_axes[0].units
if FWHM_MnKa == 'auto':
if self.metadata.Signal.signal_type == "EDS_SEM":
FWHM_MnKa = self.metadata.Acquisition_instrument.SEM.\
Detector.EDS.energy_resolution_MnKa
elif self.metadata.Signal.signal_type == "EDS_TEM":
FWHM_MnKa = self.metadata.Acquisition_instrument.TEM.\
Detector.EDS.energy_resolution_MnKa
else:
raise NotImplementedError(
"This method only works for EDS_TEM or EDS_SEM signals. "
"You can use `set_signal_type(\"EDS_TEM\")` or"
"`set_signal_type(\"EDS_SEM\")` to convert to one of these"
"signal types.")
line_energy = utils_eds._get_energy_xray_line(Xray_line)
if units_name == 'eV':
line_energy *= 1000
if FWHM_MnKa is not None:
line_FWHM = utils_eds.get_FWHM_at_Energy(
FWHM_MnKa, line_energy / 1000) * 1000
elif units_name == 'keV':
if FWHM_MnKa is not None:
line_FWHM = utils_eds.get_FWHM_at_Energy(FWHM_MnKa,
line_energy)
else:
raise ValueError(
"%s is not a valid units for the energy axis. "
"Only `eV` and `keV` are supported. "
"If `s` is the variable containing this EDS spectrum:\n "
">>> s.axes_manager.signal_axes[0].units = \'keV\' \n"
% units_name)
if FWHM_MnKa is None:
return line_energy
else:
return line_energy, line_FWHM
def _get_beam_energy(self):
"""
Get the beam energy.
The return value is in the same units than the signal axis
"""
if "Acquisition_instrument.SEM.beam_energy" in self.metadata:
beam_energy = self.metadata.Acquisition_instrument.SEM.beam_energy
elif "Acquisition_instrument.TEM.beam_energy" in self.metadata:
beam_energy = self.metadata.Acquisition_instrument.TEM.beam_energy
else:
raise AttributeError(
"The beam energy is not defined in `metadata`. "
"Use `set_microscope_parameters` to set it.")
units_name = self.axes_manager.signal_axes[0].units
if units_name == 'eV':
beam_energy *= 1000
return beam_energy
def _get_xray_lines_in_spectral_range(self, xray_lines):
"""
Return the lines in the energy range
Parameters
----------
xray_lines: List of string
The xray_lines
Return
------
The list of xray_lines in the energy range
"""
ax = self.axes_manager.signal_axes[0]
low_value = ax.low_value
high_value = ax.high_value
try:
if self._get_beam_energy() < high_value:
high_value = self._get_beam_energy()
except AttributeError:
# in case the beam energy is not defined in the metadata
pass
xray_lines_in_range = []
xray_lines_not_in_range = []
for xray_line in xray_lines:
line_energy = self._get_line_energy(xray_line)
if low_value < line_energy < high_value:
xray_lines_in_range.append(xray_line)
else:
xray_lines_not_in_range.append(xray_line)
return xray_lines_in_range, xray_lines_not_in_range
def sum(self, axis=None, out=None):
if axis is None:
axis = self.axes_manager.navigation_axes
# modify time spend per spectrum
s = super().sum(axis=axis, out=out)
s = out or s
mp = None
if s.metadata.get_item("Acquisition_instrument.SEM"):
mp = s.metadata.Acquisition_instrument.SEM
mp_old = self.metadata.Acquisition_instrument.SEM
elif s.metadata.get_item("Acquisition_instrument.TEM"):
mp = s.metadata.Acquisition_instrument.TEM
mp_old = self.metadata.Acquisition_instrument.TEM
if mp is not None and mp.has_item('Detector.EDS.live_time'):
mp.Detector.EDS.live_time = mp_old.Detector.EDS.live_time * \
self.data.size / s.data.size
if out is None:
return s
sum.__doc__ = Signal1D.sum.__doc__
def rebin(self, new_shape=None, scale=None, crop=True, out=None):
factors = self._validate_rebin_args_and_get_factors(
new_shape=new_shape,
scale=scale,)
m = super().rebin(new_shape=new_shape, scale=scale, crop=crop, out=out)
m = out or m
time_factor = np.prod([factors[axis.index_in_array]
for axis in m.axes_manager.navigation_axes])
aimd = m.metadata.Acquisition_instrument
if "Acquisition_instrument.SEM.Detector.EDS.real_time" in m.metadata:
aimd.SEM.Detector.EDS.real_time *= time_factor
elif "Acquisition_instrument.TEM.Detector.EDS.real_time" in m.metadata:
aimd.TEM.Detector.EDS.real_time *= time_factor
else:
_logger.info(
"real_time could not be found in the metadata and has not been updated.")
if "Acquisition_instrument.SEM.Detector.EDS.live_time" in m.metadata:
aimd.SEM.Detector.EDS.live_time *= time_factor
elif "Acquisition_instrument.TEM.Detector.EDS.live_time" in m.metadata:
aimd.TEM.Detector.EDS.live_time *= time_factor
else:
_logger.info(
"Live_time could not be found in the metadata and has not been updated.")
if out is None:
return m
else:
out.events.data_changed.trigger(obj=out)
return m
rebin.__doc__ = BaseSignal.rebin.__doc__
def set_elements(self, elements):
"""Erase all elements and set them.
Parameters
----------
elements : list of strings
A list of chemical element symbols.
See also
--------
add_elements, set_lines, add_lines
Examples
--------
>>> s = hs.datasets.example_signals.EDS_SEM_Spectrum()
>>> print(s.metadata.Sample.elements)
>>> s.set_elements(['Al'])
>>> print(s.metadata.Sample.elements)
['Al' 'C' 'Cu' 'Mn' 'Zr']
['Al']
"""
# Erase previous elements and X-ray lines
if "Sample.elements" in self.metadata:
del self.metadata.Sample.elements
self.add_elements(elements)
def add_elements(self, elements):
"""Add elements and the corresponding X-ray lines.
The list of elements is stored in `metadata.Sample.elements`
Parameters
----------
elements : list of strings
The symbol of the elements.
Examples
--------
>>> s = hs.datasets.example_signals.EDS_SEM_Spectrum()
>>> print(s.metadata.Sample.elements)
>>> s.add_elements(['Ar'])
>>> print(s.metadata.Sample.elements)
['Al' 'C' 'Cu' 'Mn' 'Zr']
['Al', 'Ar', 'C', 'Cu', 'Mn', 'Zr']
See also
--------
set_elements, add_lines, set_lines
"""
if not isiterable(elements) or isinstance(elements, str):
raise ValueError(
"Input must be in the form of a list. For example, "
"if `s` is the variable containing this EDS spectrum:\n "
">>> s.add_elements(('C',))\n"
"See the docstring for more information.")
if "Sample.elements" in self.metadata:
elements_ = set(self.metadata.Sample.elements)
else:
elements_ = set()
for element in elements:
if element in elements_db:
elements_.add(element)
else:
raise ValueError(
"%s is not a valid chemical element symbol." % element)
self.metadata.set_item('Sample.elements', sorted(list(elements_)))
def _get_xray_lines(self, xray_lines=None, only_one=None,
only_lines=('a',)):
if xray_lines is None:
if 'Sample.xray_lines' in self.metadata:
xray_lines = self.metadata.Sample.xray_lines
elif 'Sample.elements' in self.metadata:
xray_lines = self._get_lines_from_elements(
self.metadata.Sample.elements,
only_one=only_one,
only_lines=only_lines)
else:
raise ValueError(
"Not X-ray line, set them with `add_elements`.")
return xray_lines
def set_lines(self,
lines,
only_one=True,
only_lines=('a',)):
"""Erase all Xrays lines and set them.
See add_lines for details.
Parameters
----------
lines : list of strings
A list of valid element X-ray lines to add e.g. Fe_Kb.
Additionally, if `metadata.Sample.elements` is
defined, add the lines of those elements that where not
given in this list.
only_one: bool
If False, add all the lines of each element in
`metadata.Sample.elements` that has not line
defined in lines. If True (default),
only add the line at the highest energy
above an overvoltage of 2 (< beam energy / 2).
only_lines : {None, list of strings}
If not None, only the given lines will be added.
Examples
--------
>>> s = hs.datasets.example_signals.EDS_SEM_Spectrum()
>>> s.add_lines()
>>> print(s.metadata.Sample.xray_lines)
>>> s.set_lines(['Cu_Ka'])
>>> print(s.metadata.Sample.xray_lines)
['Al_Ka', 'C_Ka', 'Cu_La', 'Mn_La', 'Zr_La']
['Al_Ka', 'C_Ka', 'Cu_Ka', 'Mn_La', 'Zr_La']
See also
--------
add_lines, add_elements, set_elements
"""
only_lines = utils_eds._parse_only_lines(only_lines)
if "Sample.xray_lines" in self.metadata:
del self.metadata.Sample.xray_lines
self.add_lines(lines=lines,
only_one=only_one,
only_lines=only_lines)
def add_lines(self,
lines=(),
only_one=True,
only_lines=("a",)):
"""Add X-rays lines to the internal list.
Although most functions do not require an internal list of
X-ray lines because they can be calculated from the internal
list of elements, ocassionally it might be useful to customize the
X-ray lines to be use by all functions by default using this method.
The list of X-ray lines is stored in
`metadata.Sample.xray_lines`
Parameters
----------
lines : list of strings
A list of valid element X-ray lines to add e.g. Fe_Kb.
Additionally, if `metadata.Sample.elements` is
defined, add the lines of those elements that where not
given in this list. If the list is empty (default), and
`metadata.Sample.elements` is
defined, add the lines of all those elements.
only_one: bool
If False, add all the lines of each element in
`metadata.Sample.elements` that has not line
defined in lines. If True (default),
only add the line at the highest energy
above an overvoltage of 2 (< beam energy / 2).
only_lines : {None, list of strings}
If not None, only the given lines will be added.
Examples
--------
>>> s = hs.datasets.example_signals.EDS_SEM_Spectrum()
>>> s.add_lines()
>>> print(s.metadata.Sample.xray_lines)
['Al_Ka', 'C_Ka', 'Cu_La', 'Mn_La', 'Zr_La']
>>> s = hs.datasets.example_signals.EDS_SEM_Spectrum()
>>> s.set_microscope_parameters(beam_energy=30)
>>> s.add_lines()
>>> print(s.metadata.Sample.xray_lines)
['Al_Ka', 'C_Ka', 'Cu_Ka', 'Mn_Ka', 'Zr_La']
>>> s = hs.datasets.example_signals.EDS_SEM_Spectrum()
>>> s.add_lines()
>>> print(s.metadata.Sample.xray_lines)
>>> s.add_lines(['Cu_Ka'])
>>> print(s.metadata.Sample.xray_lines)
['Al_Ka', 'C_Ka', 'Cu_La', 'Mn_La', 'Zr_La']
['Al_Ka', 'C_Ka', 'Cu_Ka', 'Cu_La', 'Mn_La', 'Zr_La']
See also
--------
set_lines, add_elements, set_elements
"""
only_lines = utils_eds._parse_only_lines(only_lines)
if "Sample.xray_lines" in self.metadata:
xray_lines = set(self.metadata.Sample.xray_lines)
else:
xray_lines = set()
# Define the elements which Xray lines has been customized
# So that we don't attempt to add new lines automatically
elements = set()
for line in xray_lines:
elements.add(line.split("_")[0])
for line in lines:
try:
element, subshell = line.split("_")
except ValueError:
raise ValueError(
"Invalid line symbol. "
"Please provide a valid line symbol e.g. Fe_Ka")
if element in elements_db:
elements.add(element)
if subshell in elements_db[element]['Atomic_properties'
]['Xray_lines']:
lines_len = len(xray_lines)
xray_lines.add(line)
if lines_len != len(xray_lines):
_logger.info("%s line added," % line)
else:
_logger.info("%s line already in." % line)
else:
raise ValueError(
"%s is not a valid line of %s." % (line, element))
else:
raise ValueError(
"%s is not a valid symbol of an element." % element)
xray_not_here = self._get_xray_lines_in_spectral_range(xray_lines)[1]
for xray in xray_not_here:
warnings.warn("%s is not in the data energy range." % xray)
if "Sample.elements" in self.metadata:
extra_elements = (set(self.metadata.Sample.elements) -
elements)
if extra_elements:
new_lines = self._get_lines_from_elements(
extra_elements,
only_one=only_one,
only_lines=only_lines)
if new_lines:
self.add_lines(list(new_lines) + list(lines))
self.add_elements(elements)
if not hasattr(self.metadata, 'Sample'):
self.metadata.add_node('Sample')
if "Sample.xray_lines" in self.metadata:
xray_lines = xray_lines.union(
self.metadata.Sample.xray_lines)
self.metadata.Sample.xray_lines = sorted(list(xray_lines))
def _get_lines_from_elements(self,
elements,
only_one=False,
only_lines=("a",)):
"""Returns the X-ray lines of the given elements in spectral range
of the data.
Parameters
----------
elements : list of strings
A list containing the symbol of the chemical elements.
only_one : bool
If False, add all the lines of each element in the data spectral
range. If True only add the line at the highest energy
above an overvoltage of 2 (< beam energy / 2).
only_lines : {None, list of strings}
If not None, only the given lines will be returned.
Returns
-------
list of X-ray lines alphabetically sorted
"""
only_lines = utils_eds._parse_only_lines(only_lines)
try:
beam_energy = self._get_beam_energy()
except BaseException:
# Fall back to the high_value of the energy axis
beam_energy = self.axes_manager.signal_axes[0].high_value
lines = []
elements = [el if isinstance(el, str) else el.decode()
for el in elements]
for element in elements:
# Possible line (existing and excited by electron)
element_lines = []
for subshell in list(elements_db[element]['Atomic_properties'
]['Xray_lines'].keys()):
if only_lines and subshell not in only_lines:
continue
element_lines.append(element + "_" + subshell)
element_lines = self._get_xray_lines_in_spectral_range(
element_lines)[0]
if only_one and element_lines:
# Choose the best line
select_this = -1
element_lines.sort()
for i, line in enumerate(element_lines):
if (self._get_line_energy(line) < beam_energy / 2):
select_this = i
break
element_lines = [element_lines[select_this], ]
if not element_lines:
_logger.info(
("There is no X-ray line for element %s " % element) +
"in the data spectral range")
else:
lines.extend(element_lines)
lines.sort()
return lines
def _parse_xray_lines(self, xray_lines, only_one, only_lines):
only_lines = utils_eds._parse_only_lines(only_lines)
xray_lines = self._get_xray_lines(xray_lines, only_one=only_one,
only_lines=only_lines)
xray_lines, xray_not_here = self._get_xray_lines_in_spectral_range(
xray_lines)
for xray in xray_not_here:
warnings.warn("%s is not in the data energy range." % xray +
"You can remove it with" +
"s.metadata.Sample.xray_lines.remove('%s')"
% xray)
return xray_lines
def get_lines_intensity(self,
xray_lines=None,
integration_windows=2.,
background_windows=None,
plot_result=False,
only_one=True,
only_lines=("a",),
**kwargs):
"""Return the intensity map of selected Xray lines.
The intensities, the number of X-ray counts, are computed by
suming the spectrum over the
different X-ray lines. The sum window width
is calculated from the energy resolution of the detector
as defined in 'energy_resolution_MnKa' of the metadata.
Backgrounds average in provided windows can be subtracted from the
intensities.
Parameters
----------
xray_lines: {None, Iterable* of strings}
If None,
if `metadata.Sample.elements.xray_lines` contains a
list of lines use those.
If `metadata.Sample.elements.xray_lines` is undefined
or empty but `metadata.Sample.elements` is defined,
use the same syntax as `add_line` to select a subset of lines
for the operation.
Alternatively, provide an iterable containing
a list of valid X-ray lines symbols.
* Note that while dictionaries and strings are iterable,
their use is ambiguous and specifically not allowed.
integration_windows: Float or array
If float, the width of the integration windows is the
'integration_windows_width' times the calculated FWHM of the line.
Else provide an array for which each row corresponds to a X-ray
line. Each row contains the left and right value of the window.
background_windows: None or 2D array of float
If None, no background subtraction. Else, the backgrounds average
in the windows are subtracted from the return intensities.
'background_windows' provides the position of the windows in
energy. Each line corresponds to a X-ray line. In a line, the two
first values correspond to the limits of the left window and the
two last values correspond to the limits of the right window.
plot_result : bool
If True, plot the calculated line intensities. If the current
object is a single spectrum it prints the result instead.
only_one : bool
If False, use all the lines of each element in the data spectral
range. If True use only the line at the highest energy
above an overvoltage of 2 (< beam energy / 2).
only_lines : {None, list of strings}
If not None, use only the given lines.
kwargs
The extra keyword arguments for plotting. See
`utils.plot.plot_signals`
Returns
-------
intensities : list
A list containing the intensities as BaseSignal subclasses.
Examples
--------
>>> s = hs.datasets.example_signals.EDS_SEM_Spectrum()
>>> s.get_lines_intensity(['Mn_Ka'], plot_result=True)
Mn_La at 0.63316 keV : Intensity = 96700.00
>>> s = hs.datasets.example_signals.EDS_SEM_Spectrum()
>>> s.plot(['Mn_Ka'], integration_windows=2.1)
>>> s.get_lines_intensity(['Mn_Ka'],
>>> integration_windows=2.1, plot_result=True)
Mn_Ka at 5.8987 keV : Intensity = 53597.00
>>> s = hs.datasets.example_signals.EDS_SEM_Spectrum()
>>> s.set_elements(['Mn'])
>>> s.set_lines(['Mn_Ka'])
>>> bw = s.estimate_background_windows()
>>> s.plot(background_windows=bw)
>>> s.get_lines_intensity(background_windows=bw, plot_result=True)
Mn_Ka at 5.8987 keV : Intensity = 46716.00
See also
--------
set_elements, add_elements, estimate_background_windows,
plot
"""
if xray_lines is not None and \
(not isinstance(xray_lines, Iterable) or \
isinstance(xray_lines, (str, dict))):
raise TypeError(
"xray_lines must be a compatible iterable, but was "
"mistakenly provided as a %s." % type(xray_lines))
xray_lines = self._parse_xray_lines(xray_lines, only_one, only_lines)
if hasattr(integration_windows, '__iter__') is False:
integration_windows = self.estimate_integration_windows(
windows_width=integration_windows, xray_lines=xray_lines)
intensities = []
ax = self.axes_manager.signal_axes[0]
# test Signal1D (0D problem)
# signal_to_index = self.axes_manager.navigation_dimension - 2
for i, (Xray_line, window) in enumerate(
zip(xray_lines, integration_windows)):
element, line = utils_eds._get_element_and_line(Xray_line)
line_energy = self._get_line_energy(Xray_line)
img = self.isig[window[0]:window[1]].integrate1D(-1)
if np.issubdtype(img.data.dtype, np.integer):
# The operations below require a float dtype with the default
# numpy casting rule ('same_kind')
img.change_dtype("float")
if background_windows is not None:
bw = background_windows[i]
# TODO: test to prevent slicing bug. To be reomved when fixed
indexes = [float(ax.value2index(de))
for de in list(bw) + window]
if indexes[0] == indexes[1]:
bck1 = self.isig[bw[0]]
else:
bck1 = self.isig[bw[0]:bw[1]].integrate1D(-1)
if indexes[2] == indexes[3]:
bck2 = self.isig[bw[2]]
else:
bck2 = self.isig[bw[2]:bw[3]].integrate1D(-1)
corr_factor = (indexes[5] - indexes[4]) / (
(indexes[1] - indexes[0]) + (indexes[3] - indexes[2]))
img = img - (bck1 + bck2) * corr_factor
img.metadata.General.title = (
'X-ray line intensity of %s: %s at %.2f %s' %
(self.metadata.General.title,
Xray_line,
line_energy,
self.axes_manager.signal_axes[0].units,
))
img.axes_manager.set_signal_dimension(0)
if plot_result and img.axes_manager.navigation_size == 1:
print("%s at %s %s : Intensity = %.2f"
% (Xray_line,
line_energy,
ax.units,
img.data))
img.metadata.set_item("Sample.elements", ([element]))
img.metadata.set_item("Sample.xray_lines", ([Xray_line]))
intensities.append(img)
if plot_result and img.axes_manager.navigation_size != 1:
utils.plot.plot_signals(intensities, **kwargs)
return intensities
def get_take_off_angle(self):
"""Calculate the take-off-angle (TOA).
TOA is the angle with which the X-rays leave the surface towards
the detector. Parameters are read in 'SEM.Stage.tilt_alpha',
'Acquisition_instrument.SEM.Detector.EDS.azimuth_angle' and
'SEM.Detector.EDS.elevation_angle' and 'SEM.Stage.tilt_beta in
'metadata'.
Returns
-------
take_off_angle: float
in Degree
Examples
--------
>>> s = hs.datasets.example_signals.EDS_SEM_Spectrum()
>>> s.get_take_off_angle()
37.0
>>> s.set_microscope_parameters(tilt_stage=20.)
>>> s.get_take_off_angle()
57.0
See also
--------
hs.eds.take_off_angle
"""
if self.metadata.Signal.signal_type == "EDS_SEM":
mp = self.metadata.Acquisition_instrument.SEM
elif self.metadata.Signal.signal_type == "EDS_TEM":
mp = self.metadata.Acquisition_instrument.TEM
tilt_stage = mp.Stage.tilt_alpha
azimuth_angle = mp.Detector.EDS.azimuth_angle
elevation_angle = mp.Detector.EDS.elevation_angle
if 'beta_tilt' not in mp:
beta_tilt = 0.0
else:
beta_tilt = mp.Stage.tilt_beta
TOA = utils.eds.take_off_angle(tilt_stage, azimuth_angle,
elevation_angle,beta_tilt)
return TOA
def estimate_integration_windows(self,
windows_width=2.,
xray_lines=None):
"""
Estimate a window of integration for each X-ray line.
Parameters
----------
windows_width: float
The width of the integration windows is the 'windows_width' times
the calculated FWHM of the line.
xray_lines: None or list of string
If None, use 'metadata.Sample.elements.xray_lines'. Else,
provide an iterable containing a list of valid X-ray lines
symbols.
Return
------
integration_windows: 2D array of float
The positions of the windows in energy. Each row corresponds to a
X-ray line. Each row contains the left and right value of the
window.
Examples
--------
>>> s = hs.datasets.example_signals.EDS_TEM_Spectrum()
>>> s.add_lines()
>>> iw = s.estimate_integration_windows()
>>> s.plot(integration_windows=iw)
>>> s.get_lines_intensity(integration_windows=iw, plot_result=True)
Fe_Ka at 6.4039 keV : Intensity = 3710.00
Pt_La at 9.4421 keV : Intensity = 15872.00
See also
--------
plot, get_lines_intensity
"""
xray_lines = self._get_xray_lines(xray_lines)
integration_windows = []
for Xray_line in xray_lines:
line_energy, line_FWHM = self._get_line_energy(Xray_line,
FWHM_MnKa='auto')
element, line = utils_eds._get_element_and_line(Xray_line)
det = windows_width * line_FWHM / 2.
integration_windows.append([line_energy - det, line_energy + det])
return integration_windows
def estimate_background_windows(self,
line_width=[2, 2],
windows_width=1,
xray_lines=None):
"""
Estimate two windows around each X-ray line containing only the
background.
Parameters
----------
line_width: list of two floats
The position of the two windows around the X-ray line is given by
the `line_width` (left and right) times the calculated FWHM of the
line.
windows_width: float
The width of the windows is is the `windows_width` times the
calculated FWHM of the line.
xray_lines: None or list of string
If None, use `metadata.Sample.elements.xray_lines`. Else,
provide an iterable containing a list of valid X-ray lines
symbols.
Return
------
windows_position: 2D array of float
The position of the windows in energy. Each line corresponds to a
X-ray line. In a line, the two first values correspond to the
limits of the left window and the two last values correspond to
the limits of the right window.
Examples
--------
>>> s = hs.datasets.example_signals.EDS_TEM_Spectrum()
>>> s.add_lines()
>>> bw = s.estimate_background_windows(line_width=[5.0, 2.0])
>>> s.plot(background_windows=bw)
>>> s.get_lines_intensity(background_windows=bw, plot_result=True)
Fe_Ka at 6.4039 keV : Intensity = 2754.00
Pt_La at 9.4421 keV : Intensity = 15090.00
See also
--------
plot, get_lines_intensity
"""
xray_lines = self._get_xray_lines(xray_lines)
windows_position = []
for xray_line in xray_lines:
line_energy, line_FWHM = self._get_line_energy(xray_line,
FWHM_MnKa='auto')
tmp = [
line_energy - line_FWHM * line_width[0] -
line_FWHM * windows_width,
line_energy - line_FWHM * line_width[0],
line_energy + line_FWHM * line_width[1],
line_energy + line_FWHM * line_width[1] +
line_FWHM * windows_width
]
windows_position.append(tmp)
windows_position = np.array(windows_position)
# merge ovelapping windows
index = windows_position.argsort(axis=0)[:, 0]
for i in range(len(index) - 1):
ia, ib = index[i], index[i + 1]
if windows_position[ia, 2] > windows_position[ib, 0]:
interv = np.append(windows_position[ia, :2],
windows_position[ib, 2:])
windows_position[ia] = interv
windows_position[ib] = interv
return windows_position
def plot(self,
xray_lines=False,
only_lines=("a", "b"),
only_one=False,
background_windows=None,
integration_windows=None,
**kwargs):
"""Plot the EDS spectrum. The following markers can be added
- The position of the X-ray lines and their names.
- The background windows associated with each X-ray lines. A black line
links the left and right window with the average value in each window.
Parameters
----------
xray_lines: {False, True, 'from_elements', list of string}
If not False, indicate the position and the name of the X-ray
lines.
If True, if `metadata.Sample.elements.xray_lines` contains a
list of lines use those. If `metadata.Sample.elements.xray_lines`
is undefined or empty or if xray_lines equals 'from_elements' and
`metadata.Sample.elements` is defined, use the same syntax as
`add_line` to select a subset of lines for the operation.
Alternatively, provide an iterable containing a list of valid X-ray
lines symbols.
only_lines : None or list of strings
If not None, use only the given lines (eg. ('a','Kb')).
If None, use all lines.
only_one : bool
If False, use all the lines of each element in the data spectral
range. If True use only the line at the highest energy
above an overvoltage of 2 (< beam energy / 2).
background_windows: None or 2D array of float
If not None, add markers at the position of the windows in energy.
Each line corresponds to a X-ray lines. In a line, the two first
value corresponds to the limit of the left window and the two
last values corresponds to the limit of the right window.
integration_windows: None or 'auto' or float or 2D array of float
If not None, add markers at the position of the integration
windows.
If 'auto' (or float), the width of the integration windows is 2.0
(or float) times the calculated FWHM of the line. see
'estimate_integration_windows'.
Else provide an array for which each row corresponds to a X-ray
line. Each row contains the left and right value of the window.
%s
%s
Examples
--------
>>> s = hs.datasets.example_signals.EDS_SEM_Spectrum()
>>> s.plot()
>>> s = hs.datasets.example_signals.EDS_SEM_Spectrum()
>>> s.plot(True)
>>> s = hs.datasets.example_signals.EDS_TEM_Spectrum()
>>> s.add_lines()
>>> bw = s.estimate_background_windows()
>>> s.plot(background_windows=bw)
>>> s = hs.datasets.example_signals.EDS_SEM_Spectrum()
>>> s.plot(['Mn_Ka'], integration_windows='auto')
>>> s = hs.datasets.example_signals.EDS_TEM_Spectrum()
>>> s.add_lines()
>>> bw = s.estimate_background_windows()
>>> s.plot(background_windows=bw, integration_windows=2.1)
See also
--------
set_elements, add_elements, estimate_integration_windows,
get_lines_intensity, estimate_background_windows
"""
super().plot(**kwargs)
self._plot_xray_lines(xray_lines, only_lines, only_one,
background_windows, integration_windows)
plot.__doc__ %= (BASE_PLOT_DOCSTRING_PARAMETERS,
PLOT1D_DOCSTRING)
def _plot_xray_lines(self, xray_lines=False, only_lines=("a", "b"),
only_one=False, background_windows=None,
integration_windows=None):
if xray_lines is not False or\
background_windows is not None or\
integration_windows is not None:
if xray_lines is False:
xray_lines = True
only_lines = utils_eds._parse_only_lines(only_lines)
if xray_lines is True or xray_lines == 'from_elements':
if 'Sample.xray_lines' in self.metadata \
and xray_lines != 'from_elements':
xray_lines = self.metadata.Sample.xray_lines
elif 'Sample.elements' in self.metadata:
xray_lines = self._get_lines_from_elements(
self.metadata.Sample.elements,
only_one=only_one,
only_lines=only_lines)
else:
_logger.warning(
"No elements defined, set them with `add_elements`")
# No X-rays lines, nothing to do then
return
xray_lines, xray_not_here = self._get_xray_lines_in_spectral_range(
xray_lines)
for xray in xray_not_here:
_logger.warning("%s is not in the data energy range." % xray)
xray_lines = np.unique(xray_lines)
self.add_xray_lines_markers(xray_lines)
if background_windows is not None:
self._add_background_windows_markers(background_windows)
if integration_windows is not None:
if integration_windows == 'auto':
integration_windows = 2.0
if hasattr(integration_windows, '__iter__') is False:
integration_windows = self.estimate_integration_windows(
windows_width=integration_windows,
xray_lines=xray_lines)
self._add_vertical_lines_groups(integration_windows,
linestyle='--')
def _add_vertical_lines_groups(self, position, **kwargs):
"""
Add vertical markers for each group that shares the color.
Parameters
----------
position: 2D array of float
The position on the signal axis. Each row corresponds to a
group.
kwargs
keywords argument for markers.vertical_line
"""
per_xray = len(position[0])
colors = itertools.cycle(np.sort(
plt.rcParams['axes.prop_cycle'].by_key()["color"] * per_xray))
for x, color in zip(np.ravel(position), colors):
line = markers.vertical_line(x=x, color=color, **kwargs)
self.add_marker(line, render_figure=False)
self._render_figure(plot=['signal_plot'])
def add_xray_lines_markers(self, xray_lines):
"""
Add marker on a spec.plot() with the name of the selected X-ray
lines
Parameters
----------
xray_lines: list of string
A valid list of X-ray lines
"""
line_energy = []
intensity = []
for xray_line in xray_lines:
element, line = utils_eds._get_element_and_line(xray_line)
line_energy.append(self._get_line_energy(xray_line))
relative_factor = elements_db[element][
'Atomic_properties']['Xray_lines'][line]['weight']
a_eng = self._get_line_energy(element + '_' + line[0] + 'a')
intensity.append(self.isig[a_eng].data * relative_factor)
for i in range(len(line_energy)):
line = markers.vertical_line_segment(
x=line_energy[i], y1=None, y2=intensity[i] * 0.8)
self.add_marker(line, render_figure=False)
string = (r'$\mathrm{%s}_{\mathrm{%s}}$' %
utils_eds._get_element_and_line(xray_lines[i]))
text = markers.text(
x=line_energy[i], y=intensity[i] * 1.1, text=string,
rotation=90)
self.add_marker(text, render_figure=False)
self._xray_markers[xray_lines[i]] = [line, text]
line.events.closed.connect(self._xray_marker_closed)
text.events.closed.connect(self._xray_marker_closed)
self._render_figure(plot=['signal_plot'])
def _xray_marker_closed(self, obj):
marker = obj
for xray_line, line_markers in reversed(list(
self._xray_markers.items())):
if marker in line_markers:
line_markers.remove(marker)
if not line_markers:
self._xray_markers.pop(xray_line)
def remove_xray_lines_markers(self, xray_lines):
"""
Remove marker previosuly added on a spec.plot() with the name of the
selected X-ray lines
Parameters
----------
xray_lines: list of string
A valid list of X-ray lines to remove
"""
for xray_line in xray_lines:
if xray_line in self._xray_markers:
line_markers = self._xray_markers[xray_line]
while line_markers:
m = line_markers.pop()
m.close(render_figure=False)
self._render_figure(plot=['signal_plot'])
def _add_background_windows_markers(self,
windows_position):
"""
Plot the background windows associated with each X-ray lines.
For X-ray lines, a black line links the left and right window with the
average value in each window.
Parameters
----------
windows_position: 2D array of float
The position of the windows in energy. Each line corresponds to a
X-ray lines. In a line, the two first value corresponds to the
limit of the left window and the two last values corresponds to the
limit of the right window.
See also
--------
estimate_background_windows, get_lines_intensity
"""
self._add_vertical_lines_groups(windows_position)
ax = self.axes_manager.signal_axes[0]
for bw in windows_position:
# TODO: test to prevent slicing bug. To be reomved when fixed
if ax.value2index(bw[0]) == ax.value2index(bw[1]):
y1 = self.isig[bw[0]].data
else:
y1 = self.isig[bw[0]:bw[1]].mean(-1).data
if ax.value2index(bw[2]) == ax.value2index(bw[3]):
y2 = self.isig[bw[2]].data
else:
y2 = self.isig[bw[2]:bw[3]].mean(-1).data
line = markers.line_segment(
x1=(bw[0] + bw[1]) / 2., x2=(bw[2] + bw[3]) / 2.,
y1=y1, y2=y2, color='black')
self.add_marker(line, render_figure=False)
self._render_figure(plot=['signal_plot'])
class EDSSpectrum(EDS_mixin, Signal1D):
pass
class LazyEDSSpectrum(EDSSpectrum, LazySignal1D):
pass
|
dnjohnstone/hyperspy
|
hyperspy/_signals/eds.py
|
Python
|
gpl-3.0
| 45,061
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import random
import string
import os
import sys
import inspect
import api
from core._time import now
def default_paths():
"""
home path for the framework
Returns:
a JSON contain the working, tmp and results path
"""
return {
"home_path": os.path.join(sys.path[0], '.data'),
"tmp_path": os.path.join(sys.path[0], '.data/tmp'),
"results_path": os.path.join(sys.path[0], '.data/results')
}
def default_profiles():
"""
a shortcut and users profile to run customize scans
Returns:
a JSON contains profile names and module names for each
"""
return {
"information_gathering": ["port_scan"],
"vulnerability": ["*_vuln"],
"scan": ["*_scan"],
"brute": ["*_brute"]
}
def _api_default_config():
"""
API Config
Returns:
a JSON with API configuration
"""
return {
"api_host": "127.0.0.1",
"api_port": 5000,
"api_debug_mode": False,
"api_access_key": "".join(random.choice("0123456789abcdef") for x in range(32)),
"api_client_white_list": {
"enabled": False,
"ips": ["127.0.0.1", "10.0.0.0/24", "192.168.1.1-192.168.1.255"]
},
"api_access_log": {
"enabled": False,
"filename": "nettacker_api_access.log"
},
}
def _database_default_config():
"""
Default database Config
Returns:
a JSON with Database configuration
"""
return {
"DB": "sqlite",
"DATABASE": default_paths()["home_path"] + "/nettacker.db",
"USERNAME": "",
"PASSWORD": "",
"HOST": "",
"PORT": ""
}
def _core_default_config():
"""
core framework default config
Returns:
a JSON with all user default configurations
"""
return {
"language": "en",
"verbose_level": 0,
"show_version": False,
"check_update": False,
"log_in_file": "{0}/results_{1}_{2}.html".format(default_paths()["results_path"],
now(model="%Y_%m_%d_%H_%M_%S"),
"".join(random.choice(string.ascii_lowercase) for x in
range(10))),
"graph_flag": "d3_tree_v2_graph",
"help_menu_flag": False,
"targets": None,
"targets_list": None,
"scan_method": "all",
"exclude_method": None,
"users": None,
"users_list": None,
"passwds": None,
"passwds_list": None,
"ports": None,
"timeout_sec": 2.0,
"time_sleep": 0.0,
"check_ranges": False,
"check_subdomains": False,
"thread_number": 100,
"thread_number_host": 5,
"socks_proxy": None,
"retries": 3,
"ping_flag": False,
"methods_args": None,
"method_args_list": False,
"startup_check_for_update": True,
"wizard_mode": False,
"profile": None,
"start_api": False,
"api_host": _api_default_config()["api_host"],
"api_port": _api_default_config()["api_port"],
"api_debug_mode": _api_default_config()["api_debug_mode"],
"api_access_key": _api_default_config()["api_access_key"],
"api_client_white_list": _api_default_config()["api_client_white_list"]["enabled"],
"api_client_white_list_ips": _api_default_config()["api_client_white_list"]["ips"],
"api_access_log": _api_default_config()["api_access_log"]["enabled"],
"api_access_log_filename": _api_default_config()["api_access_log"]["filename"],
"database_type": _database_default_config()["DB"],
"database_name": _database_default_config()["DATABASE"],
"database_username": _database_default_config()["USERNAME"],
"database_password": _database_default_config()["PASSWORD"],
"database_host": _database_default_config()["HOST"],
"database_port": _database_default_config()["PORT"],
"home_path": default_paths()["home_path"],
"tmp_path": default_paths()["tmp_path"],
"results_path": default_paths()["results_path"]
}
def _builder(defaults, keys):
"""
Args:
defaults:
keys:
Returns:
"""
for key in keys:
try:
defaults[key]
except:
defaults[key] = keys[key]
return defaults
|
Nettacker/Nettacker
|
core/config_builder.py
|
Python
|
gpl-3.0
| 4,563
|
import traceback
import asyncio
import time
from inspect import signature, getfullargspec
import discord
from discord.ext import commands
from discord.ext.commands.view import StringView
from meowth import errors
from meowth.core.context import Context
from .cog_base import Cog
async def delete_error(message, error):
try:
await message.delete()
except (discord.errors.Forbidden, discord.errors.HTTPException):
pass
try:
await error.delete()
except (discord.errors.Forbidden, discord.errors.HTTPException):
pass
def missing_args(ctx):
prefix = ctx.prefix.replace(ctx.bot.user.mention, '@' + ctx.bot.user.name)
command = ctx.invoked_with
callback = ctx.command.callback
sig = list(signature(callback).parameters.keys())
args, varargs, __, defaults, __, kwonlydefaults, __ = getfullargspec(callback)
if defaults:
rq_args = args[:(- len(defaults))]
else:
rq_args = args
if varargs:
if varargs != 'args':
rq_args.append(varargs)
arg_num = len(ctx.args) - 1
sig.remove('ctx')
if 'self' in sig:
sig.remove('self')
arg_num = len(ctx.args) - 2
args_missing = sig[arg_num:]
return args_missing
class ErrorHandler(Cog):
@Cog.listener()
async def on_command_error(self, ctx, error):
channel = ctx.channel
prefix = ctx.prefix.replace(ctx.bot.user.mention, '@' + ctx.bot.user.name)
if isinstance(error, commands.MissingRequiredArgument):
fields = {
'Missing Arguments': "\n".join(missing_args(ctx))
}
await ctx.warning(title="Warning: Missing Required Arguments",
details="Reply to this message with the missing arguments listed below!", fields=fields)
def check(m):
return m.author == ctx.author and m.channel == ctx.channel
reply = await ctx.bot.wait_for('message', check=check)
ctx.message.content += f' {reply.content}'
ctx.view = StringView(ctx.message.content)
ctx.view.get_word()
try:
await ctx.command.invoke(ctx)
except errors.CommandError as exc:
await ctx.command.dispatch_error(ctx, exc)
else:
ctx.bot.dispatch('command_completion', ctx)
elif isinstance(error, commands.BadArgument):
await ctx.bot.send_cmd_help(
ctx, title=f'Bad Argument - {error}', msg_type='error')
elif isinstance(error, errors.MissingSubcommand):
await ctx.bot.send_cmd_help(
ctx, title=f'Missing Subcommand - {error}', msg_type='error')
elif isinstance(error, commands.DisabledCommand):
await ctx.send("That command is disabled.")
elif isinstance(error, commands.CommandInvokeError):
error_table = ctx.bot.dbi.table('unhandled_errors')
ctx.bot.logger.exception(
"Exception in command '{}'".format(ctx.command.qualified_name),
exc_info=error.original)
message = ("Error in command '{}'. This error has been logged "
"and will be tracked. Contact support for more information."
"".format(ctx.command.qualified_name))
exception_log = ("Exception in command '{}'\n"
"".format(ctx.command.qualified_name))
exception_log += "".join(traceback.format_exception(
type(error), error, error.__traceback__))
ctx.bot._last_exception = exception_log
d = {
'command_name': ctx.command.name,
'guild_id': ctx.guild.id,
'channel_id': ctx.channel.id,
'author_id': ctx.author.id,
'created': time.time(),
'full_traceback': exception_log
}
insert = error_table.insert
insert.row(**d)
await insert.commit()
await ctx.send(message)
elif isinstance(error, commands.MissingPermissions):
await ctx.error("User Missing Required Permissions",
fields={"Missing": "\n".join(error.missing_perms)})
elif isinstance(error, commands.BotMissingPermissions):
await ctx.error("Bot Missing Required Permissions",
fields={"Missing": "\n".join(error.missing_perms)})
elif isinstance(error, commands.CommandNotFound):
pass
elif isinstance(error, commands.CheckFailure):
pass
elif isinstance(error, commands.NoPrivateMessage):
await ctx.send("That command is not available in DMs.")
elif isinstance(error, commands.CommandOnCooldown):
await ctx.send("This command is on cooldown. "
"Try again in {:.2f}s"
"".format(error.retry_after))
elif isinstance(error, errors.LocationNotSet):
msg = ('Location has not been set for this channel. Use **{prefix}setlocation** to fix.').format(prefix=prefix)
error = await ctx.error('Location not set', details=msg)
await asyncio.sleep(10)
await delete_error(ctx.message, error)
elif isinstance(error, errors.TeamSetCheckFail):
msg = _('Meowth! Team Management is not enabled on this server. **{prefix}{cmd_name}** is unable to be used.').format(cmd_name=ctx.invoked_with, prefix=prefix)
error = await ctx.channel.send(msg)
await asyncio.sleep(10)
await delete_error(ctx.message, error)
elif isinstance(error, errors.WantSetCheckFail):
msg = _('Meowth! Pokemon Notifications are not enabled on this server. **{prefix}{cmd_name}** is unable to be used.').format(cmd_name=ctx.invoked_with, prefix=prefix)
error = await ctx.channel.send(msg)
await asyncio.sleep(10)
await delete_error(ctx.message, error)
elif isinstance(error, errors.WildSetCheckFail):
msg = _('Meowth! Wild Reporting is not enabled on this server. **{prefix}{cmd_name}** is unable to be used.').format(cmd_name=ctx.invoked_with, prefix=prefix)
error = await ctx.channel.send(msg)
await asyncio.sleep(10)
await delete_error(ctx.message, error)
elif isinstance(error, errors.ReportCheckFail):
msg = _('Meowth! Reporting is not enabled for this channel. **{prefix}{cmd_name}** is unable to be used.').format(cmd_name=ctx.invoked_with, prefix=prefix)
error = await ctx.channel.send(msg)
await asyncio.sleep(10)
await delete_error(ctx.message, error)
elif isinstance(error, errors.RaidSetCheckFail):
msg = _('Meowth! Raid Management is not enabled on this server. **{prefix}{cmd_name}** is unable to be used.').format(cmd_name=ctx.invoked_with, prefix=prefix)
error = await ctx.channel.send(msg)
await asyncio.sleep(10)
await delete_error(ctx.message, error)
elif isinstance(error, errors.EXRaidSetCheckFail):
msg = _('Meowth! EX Raid Management is not enabled on this server. **{prefix}{cmd_name}** is unable to be used.').format(cmd_name=ctx.invoked_with, prefix=prefix)
error = await ctx.channel.send(msg)
await asyncio.sleep(10)
await delete_error(ctx.message, error)
elif isinstance(error, errors.ResearchSetCheckFail):
msg = _('Meowth! Research Reporting is not enabled on this server. **{prefix}{cmd_name}** is unable to be used.').format(cmd_name=ctx.invoked_with, prefix=prefix)
error = await ctx.channel.send(msg)
await asyncio.sleep(10)
await delete_error(ctx.message, error)
elif isinstance(error, errors.MeetupSetCheckFail):
msg = _('Meowth! Meetup Reporting is not enabled on this server. **{prefix}{cmd_name}** is unable to be used.').format(cmd_name=ctx.invoked_with, prefix=prefix)
error = await ctx.channel.send(msg)
await asyncio.sleep(10)
await delete_error(ctx.message, error)
elif isinstance(error, errors.ArchiveSetCheckFail):
msg = _('Meowth! Channel Archiving is not enabled on this server. **{prefix}{cmd_name}** is unable to be used.').format(cmd_name=ctx.invoked_with, prefix=prefix)
error = await ctx.channel.send(msg)
await asyncio.sleep(10)
await delete_error(ctx.message, error)
elif isinstance(error, errors.InviteSetCheckFail):
msg = _('Meowth! EX Raid Invite is not enabled on this server. **{prefix}{cmd_name}** is unable to be used.').format(cmd_name=ctx.invoked_with, prefix=prefix)
error = await ctx.channel.send(msg)
await asyncio.sleep(10)
await delete_error(ctx.message, error)
elif isinstance(error, errors.CityChannelCheckFail):
guild = ctx.guild
msg = _('Meowth! Please use **{prefix}{cmd_name}** in ').format(cmd_name=ctx.invoked_with, prefix=prefix)
city_channels = bot.guild_dict[guild.id]['configure_dict']['raid']['report_channels']
if len(city_channels) > 10:
msg += _('a Region report channel.')
else:
msg += _('one of the following region channels:')
for c in city_channels:
channel = discord.utils.get(guild.channels, id=c)
if channel:
msg += '\n' + channel.mention
else:
msg += '\n#deleted-channel'
error = await ctx.channel.send(msg)
await asyncio.sleep(10)
await delete_error(ctx.message, error)
elif isinstance(error, errors.WantChannelCheckFail):
guild = ctx.guild
msg = _('Meowth! Please use **{prefix}{cmd_name}** in the following channel').format(cmd_name=ctx.invoked_with, prefix=prefix)
want_channels = bot.guild_dict[guild.id]['configure_dict']['want']['report_channels']
if len(want_channels) > 1:
msg += _('s:\n')
else:
msg += _(': ')
counter = 0
for c in want_channels:
channel = discord.utils.get(guild.channels, id=c)
if counter > 0:
msg += '\n'
if channel:
msg += channel.mention
else:
msg += '\n#deleted-channel'
counter += 1
error = await ctx.channel.send(msg)
await asyncio.sleep(10)
await delete_error(ctx.message, error)
elif isinstance(error, errors.RaidChannelCheckFail):
guild = ctx.guild
msg = _('Meowth! Please use **{prefix}{cmd_name}** in a Raid channel. Use **{prefix}list** in any ').format(cmd_name=ctx.invoked_with, prefix=prefix)
city_channels = bot.guild_dict[guild.id]['configure_dict']['raid']['report_channels']
if len(city_channels) > 10:
msg += _('Region report channel to see active raids.')
else:
msg += _('of the following Region channels to see active raids:')
for c in city_channels:
channel = discord.utils.get(guild.channels, id=c)
if channel:
msg += '\n' + channel.mention
else:
msg += '\n#deleted-channel'
error = await ctx.channel.send(msg)
await asyncio.sleep(10)
await delete_error(ctx.message, error)
elif isinstance(error, errors.EggChannelCheckFail):
guild = ctx.guild
msg = _('Meowth! Please use **{prefix}{cmd_name}** in an Egg channel. Use **{prefix}list** in any ').format(cmd_name=ctx.invoked_with, prefix=prefix)
city_channels = bot.guild_dict[guild.id]['configure_dict']['raid']['report_channels']
if len(city_channels) > 10:
msg += _('Region report channel to see active raids.')
else:
msg += _('of the following Region channels to see active raids:')
for c in city_channels:
channel = discord.utils.get(guild.channels, id=c)
if channel:
msg += '\n' + channel.mention
else:
msg += '\n#deleted-channel'
error = await ctx.channel.send(msg)
await asyncio.sleep(10)
await delete_error(ctx.message, error)
elif isinstance(error, errors.NonRaidChannelCheckFail):
msg = _("Meowth! **{prefix}{cmd_name}** can't be used in a Raid channel.").format(cmd_name=ctx.invoked_with, prefix=prefix)
error = await ctx.channel.send(msg)
await asyncio.sleep(10)
await delete_error(ctx.message, error)
elif isinstance(error, errors.ActiveRaidChannelCheckFail):
guild = ctx.guild
msg = _('Meowth! Please use **{prefix}{cmd_name}** in an Active Raid channel. Use **{prefix}list** in any ').format(cmd_name=ctx.invoked_with, prefix=prefix)
city_channels = bot.guild_dict[guild.id]['configure_dict']['raid']['report_channels']
try:
egg_check = bot.guild_dict[guild.id]['raidchannel_dict'][ctx.channel.id].get('type',None)
meetup = bot.guild_dict[guild.id]['raidchannel_dict'][ctx.channel.id].get('meetup',{})
except:
egg_check = ""
meetup = False
if len(city_channels) > 10:
msg += _('Region report channel to see active channels.')
else:
msg += _('of the following Region channels to see active channels:')
for c in city_channels:
channel = discord.utils.get(guild.channels, id=c)
if channel:
msg += '\n' + channel.mention
else:
msg += '\n#deleted-channel'
if egg_check == "egg" and not meetup:
msg += _('\nThis is an egg channel. The channel needs to be activated with **{prefix}raid <pokemon>** before I accept commands!').format(prefix=prefix)
error = await ctx.channel.send(msg)
await asyncio.sleep(10)
await delete_error(ctx.message, error)
elif isinstance(error, errors.ActiveChannelCheckFail):
guild = ctx.guild
msg = _('Meowth! Please use **{prefix}{cmd_name}** in an Active channel. Use **{prefix}list** in any ').format(cmd_name=ctx.invoked_with, prefix=prefix)
city_channels = bot.guild_dict[guild.id]['configure_dict']['raid']['report_channels']
try:
egg_check = bot.guild_dict[guild.id]['raidchannel_dict'][ctx.channel.id].get('type',None)
meetup = bot.guild_dict[guild.id]['raidchannel_dict'][ctx.channel.id].get('meetup',{})
except:
egg_check = ""
meetup = False
if len(city_channels) > 10:
msg += _('Region report channel to see active raids.')
else:
msg += _('of the following Region channels to see active raids:')
for c in city_channels:
channel = discord.utils.get(guild.channels, id=c)
if channel:
msg += '\n' + channel.mention
else:
msg += '\n#deleted-channel'
if egg_check == "egg" and not meetup:
msg += _('\nThis is an egg channel. The channel needs to be activated with **{prefix}raid <pokemon>** before I accept commands!').format(prefix=prefix)
error = await ctx.channel.send(msg)
await asyncio.sleep(10)
await delete_error(ctx.message, error)
elif isinstance(error, errors.CityRaidChannelCheckFail):
guild = ctx.guild
msg = _('Meowth! Please use **{prefix}{cmd_name}** in either a Raid channel or ').format(cmd_name=ctx.invoked_with, prefix=prefix)
city_channels = bot.guild_dict[guild.id]['configure_dict']['raid']['report_channels']
if len(city_channels) > 10:
msg += _('a Region report channel.')
else:
msg += _('one of the following region channels:')
for c in city_channels:
channel = discord.utils.get(guild.channels, id=c)
if channel:
msg += '\n' + channel.mention
else:
msg += '\n#deleted-channel'
error = await ctx.channel.send(msg)
await asyncio.sleep(10)
await delete_error(ctx.message, error)
elif isinstance(error, errors.RegionEggChannelCheckFail):
guild = ctx.guild
msg = _('Meowth! Please use **{prefix}{cmd_name}** in either a Raid Egg channel or ').format(cmd_name=ctx.invoked_with, prefix=prefix)
city_channels = bot.guild_dict[guild.id]['configure_dict']['raid']['report_channels']
if len(city_channels) > 10:
msg += _('a Region report channel.')
else:
msg += _('one of the following region channels:')
for c in city_channels:
channel = discord.utils.get(guild.channels, id=c)
if channel:
msg += '\n' + channel.mention
else:
msg += '\n#deleted-channel'
error = await ctx.channel.send(msg)
await asyncio.sleep(10)
await delete_error(ctx.message, error)
elif isinstance(error, errors.RegionExRaidChannelCheckFail):
guild = ctx.guild
msg = _('Meowth! Please use **{prefix}{cmd_name}** in either a EX Raid channel or one of the following region channels:').format(cmd_name=ctx.invoked_with, prefix=prefix)
city_channels = bot.guild_dict[guild.id]['configure_dict']['exraid']['report_channels']
if len(city_channels) > 10:
msg += _('a Region report channel.')
else:
msg += _('one of the following region channels:')
for c in city_channels:
channel = discord.utils.get(guild.channels, id=c)
if channel:
msg += '\n' + channel.mention
else:
msg += '\n#deleted-channel'
error = await ctx.channel.send(msg)
await asyncio.sleep(10)
await delete_error(ctx.message, error)
elif isinstance(error, errors.ExRaidChannelCheckFail):
guild = ctx.guild
msg = _('Meowth! Please use **{prefix}{cmd_name}** in a EX Raid channel. Use **{prefix}list** in any of the following region channels to see active raids:').format(cmd_name=ctx.invoked_with, prefix=prefix)
city_channels = bot.guild_dict[guild.id]['configure_dict']['exraid']['report_channels']
if len(city_channels) > 10:
msg += _('a Region report channel.')
else:
msg += _('one of the following region channels:')
for c in city_channels:
channel = discord.utils.get(guild.channels, id=c)
if channel:
msg += '\n' + channel.mention
else:
msg += '\n#deleted-channel'
error = await ctx.channel.send(msg)
await asyncio.sleep(10)
await delete_error(ctx.message, error)
elif isinstance(error, errors.ResearchReportChannelCheckFail):
guild = ctx.guild
msg = _('Meowth! Please use **{prefix}{cmd_name}** in ').format(cmd_name=ctx.invoked_with, prefix=prefix)
city_channels = bot.guild_dict[guild.id]['configure_dict']['research']['report_channels']
if len(city_channels) > 10:
msg += _('a Region report channel.')
else:
msg += _('one of the following region channels:')
for c in city_channels:
channel = discord.utils.get(guild.channels, id=c)
if channel:
msg += '\n' + channel.mention
else:
msg += '\n#deleted-channel'
error = await ctx.channel.send(msg)
await asyncio.sleep(10)
await delete_error(ctx.message, error)
elif isinstance(error, errors.MeetupReportChannelCheckFail):
guild = ctx.guild
msg = _('Meowth! Please use **{prefix}{cmd_name}** in ').format(cmd_name=ctx.invoked_with, prefix=prefix)
city_channels = bot.guild_dict[guild.id]['configure_dict']['meetup']['report_channels']
if len(city_channels) > 10:
msg += _('a Region report channel.')
else:
msg += _('one of the following region channels:')
for c in city_channels:
channel = discord.utils.get(guild.channels, id=c)
if channel:
msg += '\n' + channel.mention
else:
msg += '\n#deleted-channel'
error = await ctx.channel.send(msg)
await asyncio.sleep(10)
await delete_error(ctx.message, error)
elif isinstance(error, errors.WildReportChannelCheckFail):
guild = ctx.guild
msg = _('Meowth! Please use **{prefix}{cmd_name}** in ').format(cmd_name=ctx.invoked_with, prefix=prefix)
city_channels = bot.guild_dict[guild.id]['configure_dict']['wild']['report_channels']
if len(city_channels) > 10:
msg += _('a Region report channel.')
else:
msg += _('one of the following region channels:')
for c in city_channels:
channel = discord.utils.get(guild.channels, id=c)
if channel:
msg += '\n' + channel.mention
else:
msg += '\n#deleted-channel'
error = await ctx.channel.send(msg)
await asyncio.sleep(10)
await delete_error(ctx.message, error)
else:
ctx.bot.logger.exception(type(error).__name__, exc_info=error)
def setup(bot):
bot.add_cog(ErrorHandler(bot))
|
FoglyOgly/Meowth
|
meowth/core/error_handling.py
|
Python
|
gpl-3.0
| 22,798
|
# -*- coding: utf-8 -*-
#
# Copyright (C) 2015 Sylvain Boily
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>
from flask import Blueprint
q_bootstrap = Blueprint('q_bootstrap', __name__, template_folder='templates',
static_folder='static', static_url_path='/%s' % __name__)
class Plugin(object):
def load(self, core):
core.register_blueprint(q_bootstrap)
|
sboily/xivo-webi-plugins
|
xivo_webi_plugins/bootstrap/plugin.py
|
Python
|
gpl-3.0
| 981
|
"""Define custom widget as countdown control panel."""
import logging
from PyQt5.QtCore import QDateTime, Qt, QTime
from PyQt5.QtGui import QCursor
from PyQt5.QtWidgets import (QAction, QCheckBox, QDateTimeEdit, QGridLayout,
QLabel, QLineEdit, QMenu, QPushButton,
QRadioButton, QTimeEdit, QWidget)
import scctool.settings.config
import scctool.settings.translation
_ = scctool.settings.translation.gettext
module_logger = logging.getLogger(__name__)
class CountdownWidget(QWidget):
"""Define custom widget as countdown control panel."""
def __init__(self, ctrl, parent=None):
"""Init completer."""
super().__init__(parent)
self.controller = ctrl
self.createLayout()
self.loadSettings()
self.connect()
def createLayout(self):
"""Create widget to control the countdown browser source."""
layout = QGridLayout()
self.rb_static = QRadioButton(
_("Static Countdown to date:"), self)
layout.addWidget(self.rb_static, 0, 0)
self.rb_dynamic = QRadioButton(
_("Dynamic Countdown duration:"), self)
self.rb_dynamic.setChecked(True)
self.rb_dynamic.toggled.connect(self.toggleRadio)
layout.addWidget(self.rb_dynamic, 1, 0)
self.te_datetime = QDateTimeEdit()
self.te_datetime.setCalendarPopup(True)
self.te_datetime.setContextMenuPolicy(Qt.CustomContextMenu)
self.te_datetime.customContextMenuRequested.connect(
self.openDateTimeMenu)
layout.addWidget(self.te_datetime, 0, 1)
self.te_duration = QTimeEdit()
self.te_duration.setDisplayFormat("HH 'h' mm 'm' ss 's'")
self.te_duration.setContextMenuPolicy(Qt.CustomContextMenu)
self.te_duration.customContextMenuRequested.connect(
self.openDurationMenu)
layout.addWidget(self.te_duration, 1, 1)
self.event_label = QLabel(' ' + _('Event description:'))
layout.addWidget(self.event_label, 0, 2)
self.le_desc = QLineEdit()
self.le_desc.setAlignment(Qt.AlignCenter)
layout.addWidget(self.le_desc, 0, 3, 1, 2)
self.cb_restart = QCheckBox(
_('Restart countdown when source becomes active'))
layout.addWidget(self.cb_restart, 1, 2, 1, 2)
self.pb_start = QPushButton(" " + _('Start Countdown') + " ")
layout.addWidget(self.pb_start, 1, 4)
layout.setColumnStretch(2, 1)
layout.setColumnStretch(3, 2)
self.setLayout(layout)
def openDateTimeMenu(self, position):
"""Open menu to set date to today."""
menu = QMenu()
act1 = QAction(_("Set Today"))
act1.triggered.connect(self.setToday)
menu.addAction(act1)
menu.exec_(QCursor.pos())
def openDurationMenu(self, position):
"""Open menu to set the duration."""
menu = QMenu()
for duration in [15, 10, 5, 3, 1]:
act = QAction(_("Set {} min").format(duration), menu)
act.triggered.connect(
lambda x, duration=duration: self.setDuration(duration))
menu.addAction(act)
menu.exec_(QCursor.pos())
def setToday(self):
"""Set date to today."""
today = QDateTime.currentDateTime()
today.setTime(self.te_datetime.time())
self.te_datetime.setDateTime(today)
def setFromTimestamp(self, timestamp):
"""Set time and date based on timestamp."""
self.te_datetime.setDateTime(QDateTime.fromTime_t(int(timestamp)))
def setDuration(self, duration):
"""Set the duration."""
self.te_duration.setTime(QTime(0, duration, 0))
def toggleRadio(self):
"""Toggle radio buttion."""
static = self.rb_static.isChecked()
self.te_datetime.setEnabled(static)
self.te_duration.setEnabled(not static)
self.cb_restart.setEnabled(not static)
self.pb_start.setEnabled(not static)
def loadSettings(self):
"""Load data from settings."""
static = scctool.settings.config.parser.getboolean(
"Countdown", "static")
if static:
self.rb_static.setChecked(True)
else:
self.rb_dynamic.setChecked(True)
description = scctool.settings.config.parser.get(
'Countdown', 'description')
self.le_desc.setText(description.strip())
restart = scctool.settings.config.parser.getboolean(
"Countdown", "restart")
self.cb_restart.setChecked(restart)
duration = QTime()
string = scctool.settings.config.parser.get(
'Countdown', 'duration').strip()
duration = QTime.fromString(string, 'HH:mm:ss')
self.te_duration.setTime(duration)
string = scctool.settings.config.parser.get(
'Countdown', 'datetime').strip()
datetime = QDateTime.fromString(string, 'yyyy-MM-dd HH:mm')
self.te_datetime.setDateTime(datetime)
def connect(self):
"""Connect all form elements."""
self.le_desc.textChanged.connect(self.changed_description)
self.cb_restart.toggled.connect(self.changed_restart)
self.te_datetime.dateTimeChanged.connect(self.changed_datetime)
self.te_duration.timeChanged.connect(self.changed_duration)
self.rb_static.toggled.connect(self.changed_static)
self.pb_start.pressed.connect(self.start_pressed)
def changed_description(self):
"""Change the description."""
desc = self.le_desc.text().strip()
scctool.settings.config.parser.set('Countdown', 'description', desc)
self.controller.websocketThread.sendData2Path(
'countdown', "DESC", desc)
def changed_restart(self):
"""Handle change of restart option."""
restart = self.cb_restart.isChecked()
scctool.settings.config.parser.set(
'Countdown', 'restart', str(restart))
self.controller.websocketThread.sendData2Path(
'countdown', "RESTART", restart)
def changed_datetime(self, time):
"""Handle change of datetime."""
datetime = time.toString('yyyy-MM-dd HH:mm')
scctool.settings.config.parser.set('Countdown', 'datetime', datetime)
self.sendData()
def changed_duration(self, time):
"""Handle change of duration."""
duration = time.toString('HH:mm:ss')
scctool.settings.config.parser.set('Countdown', 'duration', duration)
self.sendData()
def changed_static(self):
"""Handle change of static/dynamic."""
static = self.rb_static.isChecked()
scctool.settings.config.parser.set('Countdown', 'static', str(static))
self.sendData()
def start_pressed(self):
"""Handle press of the start button."""
self.controller.websocketThread.sendData2Path('countdown', 'START')
def sendData(self):
"""Send the data to the websocket."""
self.controller.websocketThread.sendData2Path(
'countdown',
"DATA",
self.controller.websocketThread.getCountdownData())
|
teampheenix/StarCraft-Casting-Tool
|
scctool/view/countdown.py
|
Python
|
gpl-3.0
| 7,158
|
import pkg_resources
__version__ = pkg_resources.get_distribution('jiffybox').version
__author__ = 'Amadeus IT Group (opensource@amadeus.com)'
|
AmadeusITGroup/python-jiffybox
|
jiffybox/__init__.py
|
Python
|
gpl-3.0
| 144
|
# -*- coding: utf-8 -*-
'''
diacamma.accounting.system package
@author: Laurent GAY
@organization: sd-libre.fr
@contact: info@sd-libre.fr
@copyright: 2015 sd-libre.fr
@license: This file is part of Lucterios.
Lucterios is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
Lucterios is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with Lucterios. If not, see <http://www.gnu.org/licenses/>.
'''
from __future__ import unicode_literals
from django.utils.translation import ugettext_lazy as _
from django.utils.module_loading import import_module
def accounting_system_list():
res = {}
res['diacamma.accounting.system.french.FrenchSystemAcounting'] = _('French system acounting')
res['diacamma.accounting.system.belgium.BelgiumSystemAcounting'] = _('Belgium system acounting')
return res
def accounting_system_name(complete_name):
sys_list = accounting_system_list()
if complete_name in sys_list.keys():
return sys_list[complete_name]
else:
return "---"
def accounting_system_ident(complete_name):
modules_long = complete_name.split('.')
sys_list = accounting_system_list()
if (complete_name in sys_list.keys()) and (len(modules_long) == 5):
return modules_long[3]
else:
return "---"
def get_accounting_system(complete_name):
sys_list = accounting_system_list()
if complete_name in sys_list.keys():
modules_long = complete_name.split('.')
module_name = ".".join(modules_long[:-1])
class_name = modules_long[-1]
try:
module_sys = import_module(module_name)
class_sys = getattr(module_sys, class_name)
return class_sys()
except (ImportError, AttributeError):
pass
from diacamma.accounting.system.default import DefaultSystemAccounting
return DefaultSystemAccounting()
|
Diacamma2/financial
|
diacamma/accounting/system/__init__.py
|
Python
|
gpl-3.0
| 2,277
|
#!/usr/bin/env python
"""
===================================================================
heatController.py - Potential Field Region-to-Region Motion Control
===================================================================
Uses the heat-controller to take a current position, current region, and destination region and return a global velocity vector that will help us get there
"""
import __heatControllerHelper as heatControllerHelper
from numpy import *
from __is_inside import is_inside
import time
class motionControlHandler:
def __init__(self, proj, shared_data):
"""
Heat motion planning controller
"""
self.drive_handler = proj.h_instance['drive']
self.pose_handler = proj.h_instance['pose']
self.fwd_coordmap = proj.coordmap_map2lab
self.rfi = proj.rfi
self.last_warning = 0
def gotoRegion(self, current_reg, next_reg, last=False):
"""
If ``last`` is true, we will move to the center of the region.
Returns ``True`` if we are outside the supposed ``current_reg``
"""
if current_reg == next_reg and not last:
# No need to move!
self.drive_handler.setVelocity(0, 0) # So let's stop
return True
controller = self.get_controller(current_reg, next_reg, last)
pose = self.pose_handler.getPose()
[X, DqX, F, inside, J] = controller(mat(pose[0:2]).T)
self.drive_handler.setVelocity(X[0,0], X[1,0], pose[2])
# Transform the region vertices into real coordinates
pointArray = [self.fwd_coordmap(x) for x in self.rfi.regions[next_reg].getPoints()]
vertices = mat(pointArray).T
# Figure out whether we've reached the destination region
if is_inside([pose[0], pose[1]], vertices):
arrived = True
else:
arrived = False
if (arrived != (not inside)) and (time.time()-self.last_warning) > 0.5:
print "WARNING: Left current region but not in expected destination region"
# Figure out what region we think we stumbled into
for r in self.rfi.regions:
pointArray = [self.fwd_coordmap(x) for x in r.getPoints()]
vertices = mat(pointArray).T
if is_inside([pose[0], pose[1]], vertices):
print "I think I'm in " + r.name
print pose
break
self.last_warning = time.time()
return arrived
def get_controller(self, current, next, last, cache={}):
"""
Wrapper for the controller factory, with caching.
"""
# Check to see if we already have an appropriate controller stored in the cache.
# TODO: Account for last in cache
if current in cache and next in cache[current]:
return cache[current][next]
# If not, create a space in the cache to put our new controller.
cache[current] = {}
# Let's go get a controller!
if last:
transFaceIdx = None
else:
# Find a face to go through
# TODO: Account for non-determinacy?
# For now, let's just choose the largest face available, because we are probably using a big clunky robot
# TODO: Why don't we just store this as the index?
transFaceIdx = None
max_magsq = 0
for i, face in enumerate(self.rfi.regions[current_reg].getFaces()):
if face not in self.rfi.transitions[current_reg][next_reg]:
continue
tf_pta, tf_ptb = face
tf_vector = tf_ptb - tf_pta
magsq = (tf_vector.x)**2 + (tf_vector.y)**2
if magsq > max_magsq:
transFaceIdx = i
max_magsq = magsq
if transFaceIdx is None:
print "ERROR: Unable to find transition face between regions %s and %s. Please check the decomposition (try viewing projectname_decomposed.regions in RegionEditor or a text editor)." % (self.rfi.regions[current_reg].name, self.rfi.regions[next_reg].name)
# Transform the region vertices into real coordinates
pointArray = [x for x in self.rfi.regions[current].getPoints()]
pointArray = map(self.fwd_coordmap, pointArray)
vertices = mat(pointArray).T
# Get a controller function
controller = heatControllerHelper.getController(vertices, transFaceIdx, last)
# Cache it in
cache[current][next] = controller
return controller
|
jadecastro/LTLMoP
|
src/lib/handlers/motionControl/heatController.py
|
Python
|
gpl-3.0
| 4,678
|
Create({
"Endpoint": "localhost:7784/core",
})
|
takeshik/metatweet-old
|
resource/configuration/WcfNetTcpServant-wcf.conf.py
|
Python
|
gpl-3.0
| 51
|
from contrib.rfc3736.builder import *
from contrib.rfc3736.constants import *
from contrib.rfc3736.dhcpv6 import DHCPv6Helper
from scapy.all import *
from veripy.assertions import *
class DUIDFormatTestCase(DHCPv6Helper):
"""
DUID Format
Verify that a client device sends correctly formatted Client ID options.
@private
Source: IPv6 Ready DHCPv6 Interoperability Test Suite (Section 7.1.9)
"""
def run(self):
q = self.restart_and_wait_for_information_request(self.node(1), self.target(1))
assertHasLayer(DHCP6OptClientId, q, "expected the DHCPv6 Information Request to have a Client Identifier")
duid = q[DHCP6OptClientId].duid
if duid.__class__ == DUID_EN:
assertNotEqual(0, duid.id, "did not expect the DUID ID to be zero")
assertNotEqual(0, duid.enterprisenum, "did not expect the DUID Enterprise Number to be zero")
elif duid.__class__ == DUID_LLT:
assertEqual(str(self.target(1).ll_addr()), duid.lladdr, "expected the DUID Link Layer Address to be %s" % (self.target(1).ll_addr()))
assertTrue(duid.hwtype in range(1,37) or duid.hwtype == 256, "expected the DUID Hardware Type to be 1-37 or 256")
elif duid.__class__ == DUID_LL:
assertEqual(str(self.target(1).ll_addr()), duid.lladdr, "expected the DUID Link Layer Address to be %s" % (self.target(1).ll_addr()))
assertTrue(duid.hwtype in range(1,37) or duid.hwtype == 256, "expected the DUID Hardware Type to be 1-37 or 256")
|
mwrlabs/veripy
|
contrib/rfc3736/client/client_dhcp_unique_identifier.py
|
Python
|
gpl-3.0
| 1,578
|
# vi: ts=4 expandtab
#
# Copyright (C) 2011 Canonical Ltd.
# Copyright (C) 2012 Hewlett-Packard Development Company, L.P.
#
# Author: Scott Moser <scott.moser@canonical.com>
# Author: Juerg Haefliger <juerg.haefliger@hp.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 3, as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
Scripts Per Boot
----------------
**Summary:** run per boot scripts
Any scripts in the ``scripts/per-boot`` directory on the datasource will be run
every time the system boots. Scripts will be run in alphabetical order. This
module does not accept any config keys.
**Internal name:** ``cc_scripts_per_boot``
**Module frequency:** per always
**Supported distros:** all
"""
import os
from cloudinit import util
from cloudinit.settings import PER_ALWAYS
frequency = PER_ALWAYS
SCRIPT_SUBDIR = 'per-boot'
def handle(name, _cfg, cloud, log, _args):
# Comes from the following:
# https://forums.aws.amazon.com/thread.jspa?threadID=96918
runparts_path = os.path.join(cloud.get_cpath(), 'scripts', SCRIPT_SUBDIR)
try:
util.runparts(runparts_path)
except Exception:
log.warn("Failed to run module %s (%s in %s)",
name, SCRIPT_SUBDIR, runparts_path)
raise
|
clovertrail/cloudinit-bis
|
cloudinit/config/cc_scripts_per_boot.py
|
Python
|
gpl-3.0
| 1,760
|
"""
WSGI config for pasteque_admin project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.8/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "pasteque_admin.settings")
application = get_wsgi_application()
|
ScilCoop/pasteque-admin
|
pasteque_admin/pasteque_admin/wsgi.py
|
Python
|
gpl-3.0
| 405
|
import requests
import sys
import hashlib
from notify import notify_message
import os
from subprocess import Popen, PIPE
import time
class Web(object):
def __init__(self, login_url, username, password):
self.login_url = login_url
self.username = username
self.password = password
def has_internet(self):
process = Popen(["ping", "-c1", self.login_url.split('/')[2]])
exit_code = process.wait()
if exit_code > 1:
return False
else:
return True
def login(self):
payload = {
'username': self.username,
'password': self.password
}
while self.has_internet()==False:
print('waiting for an internet connection...')
time.sleep(30)
print('Logging in as {}...'.format(self.username))
self.session = requests.session()
response = self.session.post(self.login_url, data=payload)
if(response.text.find('You are logged in') < 0):
notify_message("Moodle Monitor", "Unable to login. Please check your credentials.")
os.system("python /usr/share/moodle-monitor/settings/main.py")
sys.exit('ERROR: Unable to login')
def get_hash(self, url):
print('Fetching {}'.format(url))
# getting the complete page source.
response = self.session.get(url)
html = response.text
# sepertes the course body
position1 = html.find('region-content')
position2 = html.find('region-pre')
html = html[position1:position2]
return hashlib.md5(html.encode('utf-8')).hexdigest()
|
kumarasinghe/moodle-monitor
|
MoodleMonitor/usr/share/moodle-monitor/moodled/web.py
|
Python
|
gpl-3.0
| 1,599
|
from users.models import DyeusUser
from faker import Faker
fake = Faker()
def create_regular_dyeus_user():
user = DyeusUser(username=fake.user_name(),
email=fake.email(),
password=fake.password())
user.save()
return user
|
piotrb5e3/Dyeus
|
users/tests/factory.py
|
Python
|
gpl-3.0
| 278
|
import re
from collections import defaultdict
from pupa.scrape import Scraper, Bill, VoteEvent
from .util import get_client, get_url, backoff, SESSION_SITE_IDS
# Methods (7):
# GetLegislationDetail(xs:int LegislationId, )
#
# GetLegislationDetailByDescription(ns2:DocumentType DocumentType,
# xs:int Number, xs:int SessionId)
#
# GetLegislationForSession(xs:int SessionId, )
#
# GetLegislationRange(ns2:LegislationIndexRangeSet Range, )
#
# GetLegislationRanges(xs:int SessionId,
# ns2:DocumentType DocumentType, xs:int RangeSize, )
#
# GetLegislationSearchResultsPaged(ns2:LegislationSearchConstraints
# Constraints, xs:int PageSize,
# xs:int StartIndex, )
# GetTitles()
member_cache = {}
SOURCE_URL = "http://www.legis.ga.gov/Legislation/en-US/display/{session}/{bid}"
vote_name_pattern = re.compile(r"(.*), (\d+(?:ST|ND|RD|TH))", re.IGNORECASE)
class GABillScraper(Scraper):
lservice = get_client("Legislation").service
vservice = get_client("Votes").service
mservice = get_client("Members").service
lsource = get_url("Legislation")
msource = get_url("Members")
vsource = get_url("Votes")
def get_member(self, member_id):
if member_id in member_cache:
return member_cache[member_id]
mem = backoff(self.mservice.GetMember, member_id)
member_cache[member_id] = mem
return mem
def scrape(self, session=None, chamber=None):
bill_type_map = {
"B": "bill",
"R": "resolution",
"JR": "joint resolution",
"CR": "concurrent resolution",
}
chamber_map = {
"H": "lower",
"S": "upper",
"J": "joint",
"E": "legislature", # Effective date
}
action_code_map = {
"HI": None,
"SI": None,
"HH": None,
"SH": None,
"HPF": ["introduction"],
"HDSAS": None,
"SPF": ["introduction"],
"HSR": ["reading-2"],
"SSR": ["reading-2"],
"HFR": ["reading-1"],
"SFR": ["reading-1"],
"HRECM": ["withdrawal", "referral-committee"],
"SRECM": ["withdrawal", "referral-committee"],
"SW&C": ["withdrawal", "referral-committee"],
"HW&C": ["withdrawal", "referral-committee"],
"HRA": ["passage"],
"SRA": ["passage"],
"HPA": ["passage"],
"HRECO": None,
"SPA": ["passage"],
"HTABL": None, # 'House Tabled' - what is this?
"SDHAS": None,
"HCFR": ["committee-passage-favorable"],
"SCFR": ["committee-passage-favorable"],
"HRAR": ["referral-committee"],
"SRAR": ["referral-committee"],
"STR": ["reading-3"],
"SAHAS": None,
"SE": ["passage"],
"SR": ["referral-committee"],
"HTRL": ["reading-3", "failure"],
"HTR": ["reading-3"],
"S3RLT": ["reading-3", "failure"],
"HASAS": None,
"S3RPP": None,
"STAB": None,
"SRECO": None,
"SAPPT": None,
"HCA": None,
"HNOM": None,
"HTT": None,
"STT": None,
"SRECP": None,
"SCRA": None,
"SNOM": None,
"S2R": ["reading-2"],
"H2R": ["reading-2"],
"SENG": ["passage"],
"HENG": ["passage"],
"HPOST": None,
"HCAP": None,
"SDSG": ["executive-signature"],
"SSG": ["executive-receipt"],
"Signed Gov": ["executive-signature"],
"HDSG": ["executive-signature"],
"HSG": ["executive-receipt"],
"EFF": None,
"HRP": None,
"STH": None,
"HTS": None,
}
if not session:
session = self.latest_session()
self.info("no session specified, using %s", session)
sid = SESSION_SITE_IDS[session]
legislation = backoff(self.lservice.GetLegislationForSession, sid)[
"LegislationIndex"
]
for leg in legislation:
lid = leg["Id"]
instrument = backoff(self.lservice.GetLegislationDetail, lid)
history = [x for x in instrument["StatusHistory"][0]]
actions = reversed(
[
{
"code": x["Code"],
"action": x["Description"],
"_guid": x["Id"],
"date": x["Date"],
}
for x in history
]
)
guid = instrument["Id"]
# A little bit hacky.
bill_prefix = instrument["DocumentType"]
bill_chamber = chamber_map[bill_prefix[0]]
bill_type = bill_type_map[bill_prefix[1:]]
bill_id = "%s %s" % (bill_prefix, instrument["Number"])
if instrument["Suffix"]:
bill_id += instrument["Suffix"]
title = instrument["Caption"]
description = instrument["Summary"]
if title is None:
continue
bill = Bill(
bill_id,
legislative_session=session,
chamber=bill_chamber,
title=title,
classification=bill_type,
)
bill.add_abstract(description, note="description")
bill.extras = {"guid": guid}
if instrument["Votes"]:
for vote_ in instrument["Votes"]:
_, vote_ = vote_
vote_ = backoff(self.vservice.GetVote, vote_[0]["VoteId"])
vote = VoteEvent(
start_date=vote_["Date"].strftime("%Y-%m-%d"),
motion_text=vote_["Caption"] or "Vote on Bill",
chamber={"House": "lower", "Senate": "upper"}[vote_["Branch"]],
result="pass" if vote_["Yeas"] > vote_["Nays"] else "fail",
classification="passage",
bill=bill,
)
vote.set_count("yes", vote_["Yeas"])
vote.set_count("no", vote_["Nays"])
vote.set_count("other", vote_["Excused"] + vote_["NotVoting"])
vote.add_source(self.vsource)
methods = {"Yea": "yes", "Nay": "no"}
if vote_["Votes"] is not None:
for vdetail in vote_["Votes"][0]:
whom = vdetail["Member"]
how = vdetail["MemberVoted"]
if whom["Name"] == "VACANT":
continue
name, district = vote_name_pattern.search(whom["Name"]).groups()
vote.vote(methods.get(how, "other"), name, note=district)
yield vote
ccommittees = defaultdict(list)
committees = instrument["Committees"]
if committees:
for committee in committees[0]:
ccommittees[
{"House": "lower", "Senate": "upper"}[committee["Type"]]
].append(committee["Name"])
for action in actions:
action_chamber = chamber_map[action["code"][0]]
try:
action_types = action_code_map[action["code"]]
except KeyError:
error_msg = "Code {code} for action {action} not recognized.".format(
code=action["code"], action=action["action"]
)
self.logger.warning(error_msg)
action_types = None
committees = []
if action_types and any(("committee" in x for x in action_types)):
committees = [str(x) for x in ccommittees.get(action_chamber, [])]
act = bill.add_action(
action["action"],
action["date"].strftime("%Y-%m-%d"),
classification=action_types,
chamber=action_chamber,
)
for committee in committees:
act.add_related_entity(committee, "organization")
act.extras = {"code": action["code"], "guid": action["_guid"]}
sponsors = []
if instrument["Authors"]:
sponsors = instrument["Authors"]["Sponsorship"]
if "Sponsors" in instrument and instrument["Sponsors"]:
sponsors += instrument["Sponsors"]["Sponsorship"]
sponsors = [(x["Type"], self.get_member(x["MemberId"])) for x in sponsors]
for typ, sponsor in sponsors:
name = "{First} {Last}".format(**dict(sponsor["Name"]))
bill.add_sponsorship(
name,
entity_type="person",
classification="primary" if "Author" in typ else "secondary",
primary="Author" in typ,
)
for version in instrument["Versions"]["DocumentDescription"]:
name, url, doc_id, version_id = [
version[x] for x in ["Description", "Url", "Id", "Version"]
]
link = bill.add_version_link(name, url, media_type="application/pdf")
link["extras"] = {
"_internal_document_id": doc_id,
"_version_id": version_id,
}
bill.add_source(self.msource)
bill.add_source(self.lsource)
bill.add_source(SOURCE_URL.format(**{"session": session, "bid": guid}))
yield bill
|
openstates/openstates
|
openstates/ga/bills.py
|
Python
|
gpl-3.0
| 10,170
|
import os
from gpaw import GPAW, restart
from ase import Atoms
from gpaw.test import equal
from gpaw.mpi import world, rank
from math import sqrt
import numpy as np
# Test the reading of wave functions as file references
modes = ['gpw']
try:
import _hdf5
modes.append('hdf5')
except ImportError:
pass
d = 3.0
atoms = Atoms('Na3', positions=[( 0, 0, 0),
( 0, 0, d),
( 0, d*sqrt(3./4.), d/2.)],
magmoms=[1.0, 1.0, 1.0],
cell=(3.5, 3.5, 4.+2/3.),
pbc=True)
# Only a short, non-converged calcuation
conv = {'eigenstates': 1.24, 'energy':2e-1, 'density':1e-1}
calc = GPAW(h=0.30, kpts=(1,1,3),
nbands=3, convergence=conv)
atoms.set_calculator(calc)
e0 = atoms.get_potential_energy()
wf0 = calc.get_pseudo_wave_function(2, 1, 1, broadcast=True)
# Write the restart file(s)
for mode in modes:
calc.write('tmp.%s' % mode, 'all')
del calc
# Now read with single process
comm = world.new_communicator(np.array((0,)))
if rank == 0:
for mode in modes:
calc = GPAW('tmp.%s' % mode, communicator=comm)
wf1 = calc.get_pseudo_wave_function(2, 1, 1)
diff = np.abs(wf0 - wf1)
assert(np.all(diff < 1e-12))
os.remove('tmp.%s' % mode)
|
ajylee/gpaw-rtxs
|
gpaw/test/fileio/file_reference.py
|
Python
|
gpl-3.0
| 1,309
|
# Copyright (c) 2013, Frappe Technologies Pvt. Ltd. and contributors
# For license information, please see license.txt
from __future__ import unicode_literals
import frappe
from frappe import _
def execute(filters=None):
columns, data = [], []
columns=get_columns()
data=get_data(filters,columns)
return columns, data
def get_columns():
return [
_("Item Name") + ":Link/Item:150",
_("Warehouse") + ":Link/Warehouse:130",
_("Stock Available") + ":Float:120",
_("Buying Price List") + ":Data:130",
_("Buying Rate") + ":Currency:110",
_("Selling Price List") + ":Data:130",
_("Selling Rate") + ":Currency:110"
]
def get_data(filters, columns):
item_price_qty_data = []
item_price_qty_data = get_item_price_qty_data(filters)
return item_price_qty_data
def get_item_price_qty_data(filters):
item_dicts = []
conditions = ""
if filters.get("item_code"):
conditions += "where a.item_code=%(item_code)s"
item_results = frappe.db.sql("""select a.item_code as name,a.name as price_list_name,
b.warehouse as warehouse,b.actual_qty as actual_qty
from `tabItem Price` a left join `tabBin` b
ON a.item_code = b.item_code
{conditions}"""
.format(conditions=conditions),filters,as_dict=1)
price_list_names = ",".join(['"' + item['price_list_name'] + '"' for item in item_results])
buying_price_map = get_buying_price_map(price_list_names)
selling_price_map = get_selling_price_map(price_list_names)
item_dicts = [{"Item Name": d['name'],"Item Price List": d['price_list_name'],"Warehouse": d['warehouse'],
"Stock Available": d['actual_qty']} for d in item_results]
for item_dict in item_dicts:
price_list = item_dict["Item Price List"]
item_dict["Warehouse"] = item_dict["Warehouse"] or ""
item_dict["Stock Available"] = item_dict["Stock Available"] or 0
if buying_price_map.get(price_list):
item_dict["Buying Price List"] = buying_price_map.get(price_list)["Buying Price List"] or ""
item_dict["Buying Rate"] = buying_price_map.get(price_list)["Buying Rate"] or 0
if selling_price_map.get(price_list):
item_dict["Selling Price List"] = selling_price_map.get(price_list)["Selling Price List"] or ""
item_dict["Selling Rate"] = selling_price_map.get(price_list)["Selling Rate"] or 0
return item_dicts
def get_buying_price_map(price_list_names):
buying_price = frappe.db.sql("""
select
name,price_list,price_list_rate
from
`tabItem Price`
where
name in ({price_list_names}) and buying=1
""".format(price_list_names=price_list_names),as_dict=1)
buying_price_map = {}
for d in buying_price:
name = d["name"]
buying_price_map[name] = {
"Buying Price List" :d["price_list"],
"Buying Rate" :d["price_list_rate"]
}
return buying_price_map
def get_selling_price_map(price_list_names):
selling_price = frappe.db.sql("""
select
name,price_list,price_list_rate
from
`tabItem Price`
where
name in ({price_list_names}) and selling=1
""".format(price_list_names=price_list_names),as_dict=1)
selling_price_map = {}
for d in selling_price:
name = d["name"]
selling_price_map[name] = {
"Selling Price List" :d["price_list"],
"Selling Rate" :d["price_list_rate"]
}
return selling_price_map
|
rohitwaghchaure/erpnext_develop
|
erpnext/stock/report/item_price_stock/item_price_stock.py
|
Python
|
gpl-3.0
| 3,200
|
import os
import re
import shutil
from tempfile import TemporaryDirectory
import click
from .ingest import ingest_file, ensure_album_dir
@object.__new__
class unpack:
"""The album unpacker dispatcher.
"""
_map = {}
def register(self, source, *, infer_album, infer_artist):
def _(f):
self._map[source] = f, infer_album, infer_artist
return f
return _
def __call__(self,
source,
music_home,
conn,
album,
artist,
paths,
verbose):
"""Unpack an album.
Parameters
----------
source : str
Where did this album come from?
music_home : str
The absolute path to the music home directory.
conn : sa.Connection
The connection to the metadata db.
album : str or None
The album name or None if it should be inferred.
artist : str or None
The artist name or None if it should be inferred.
paths : list[str]
The paths that make up this album.
verbose : bool
Print information about the status of the job.
Raises
------
ValueError
Raised when the source has not been registered.
Raised when album or artist is None and the source cannot infer
this information.
"""
try:
f, infer_album, infer_artist = self._map[source]
except KeyError:
raise ValueError('unknown source: %r' % source)
if album is None and not infer_album:
raise ValueError(
'cannot infer album name for %r sourced paths' % source,
)
if artist is None and not infer_artist:
raise ValueError(
'cannot infer artist name for %r sourced paths' % source,
)
return f(music_home, conn, album, artist, paths, verbose)
@unpack.register('bandcamp', infer_album=True, infer_artist=True)
def _unpack_bandcamp(music_home, conn, album, artist, paths, verbose):
"""Unpacker for bandcamp zipfiles.
This can only infer the artist or album name if the file is in the form:
``'{artist} - {album}.zip'`` which is how it comes from bandcamp.
"""
from zipfile import ZipFile
if not paths:
if verbose:
click.echo('no albums to unpack')
return
try:
path, = paths
except ValueError:
raise ValueError('bandcamp source expects exactly one file')
if album is None or artist is None:
filename = os.path.basename(os.path.splitext(path)[0])
match = re.match(r'(.*) - (.*)', filename)
if match is None:
raise ValueError(
'failed to infer artist or album name from file path %r' %
path,
)
album = album if album is not None else match.group(2)
artist = artist if artist is not None else match.group(1)
with ZipFile(path) as zf, TemporaryDirectory() as tmpdir:
for archivename in zf.namelist():
if re.match(r'.*\.(jpg|png|pdf)$', archivename):
# just copy the album/ep cover information
path = os.path.join(
ensure_album_dir(music_home, album, artist),
archivename,
)
with open(path, 'wb') as f:
shutil.copyfileobj(zf.open(archivename), f)
continue
ingest_file(
music_home=music_home,
conn=conn,
path=zf.extract(archivename, path=tmpdir),
verbose=verbose,
ignore_failures=False,
)
@unpack.register('amazon', infer_album=True, infer_artist=True)
def _unpack_amazon(music_home, conn, album, artist, paths, verbose):
from zipfile import ZipFile
if not paths:
if verbose:
click.echo('no albums to unpack')
return
try:
path, = paths
except ValueError:
raise ValueError('bandcamp source expects exactly one file')
if album is None or artist is None:
filename = os.path.basename(os.path.splitext(path)[0])
match = re.match(r'(.*) - (.*)', filename)
if match is None:
raise ValueError(
'failed to infer artist or album name from file path %r' %
path,
)
album = album if album is not None else match.group(1)
artist = artist if artist is not None else match.group(2)
with ZipFile(path) as zf, TemporaryDirectory() as tmpdir:
for archivename in zf.namelist():
ingest_file(
music_home=music_home,
conn=conn,
path=zf.extract(archivename, path=tmpdir),
verbose=verbose,
ignore_failures=False,
)
|
llllllllll/witchcraft
|
witchcraft/unpack.py
|
Python
|
gpl-3.0
| 4,993
|
# -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Deleting field 'Setup.technique'
db.delete_column(u'feed_setup', 'technique')
def backwards(self, orm):
# User chose to not deal with backwards NULL issues for 'Setup.technique'
raise RuntimeError("Cannot reverse this migration. 'Setup.technique' and its values cannot be restored.")
# The following code is provided here to aid in writing a correct migration # Adding field 'Setup.technique'
db.add_column(u'feed_setup', 'technique',
self.gf('django.db.models.fields.IntegerField')(),
keep_default=False)
models = {
u'auth.group': {
'Meta': {'object_name': 'Group'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Group']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Permission']"}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
u'feed.ageunit': {
'Meta': {'ordering': "['label']", 'object_name': 'AgeUnit'},
'created_at': ('django.db.models.fields.DateTimeField', [], {}),
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'ageunit_related'", 'null': 'True', 'to': u"orm['auth.User']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'label': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {})
},
u'feed.anatomicallocation': {
'Meta': {'object_name': 'AnatomicalLocation'},
'category': ('django.db.models.fields.IntegerField', [], {}),
'created_at': ('django.db.models.fields.DateTimeField', [], {}),
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'anatomicallocation_related'", 'null': 'True', 'to': u"orm['auth.User']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'label': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'ontology_term': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'+'", 'null': 'True', 'to': u"orm['feed.MuscleOwl']"}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {})
},
u'feed.animalapprovaltype': {
'Meta': {'object_name': 'AnimalApprovalType'},
'created_at': ('django.db.models.fields.DateTimeField', [], {}),
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'animalapprovaltype_related'", 'null': 'True', 'to': u"orm['auth.User']"}),
'description': ('django.db.models.fields.TextField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {})
},
u'feed.anteriorposterioraxis': {
'Meta': {'object_name': 'AnteriorPosteriorAxis'},
'created_at': ('django.db.models.fields.DateTimeField', [], {}),
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'anteriorposterioraxis_related'", 'null': 'True', 'to': u"orm['auth.User']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'label': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {})
},
u'feed.behavior': {
'Meta': {'ordering': "['label']", 'object_name': 'Behavior'},
'created_at': ('django.db.models.fields.DateTimeField', [], {}),
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'behavior_related'", 'null': 'True', 'to': u"orm['auth.User']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'label': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {})
},
u'feed.behaviorowl': {
'Meta': {'object_name': 'BehaviorOwl'},
'bfo_part_of_some': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'has_parts'", 'symmetrical': 'False', 'to': u"orm['feed.BehaviorOwl']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'label': ('django.db.models.fields.CharField', [], {'max_length': '1000'}),
'obo_definition': ('django.db.models.fields.TextField', [], {}),
'rdfs_comment': ('django.db.models.fields.TextField', [], {}),
'rdfs_is_class': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'rdfs_subClassOf_ancestors': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'has_subClass_descendants'", 'symmetrical': 'False', 'to': u"orm['feed.BehaviorOwl']"}),
'uri': ('django.db.models.fields.CharField', [], {'max_length': '1500'})
},
u'feed.channel': {
'Meta': {'object_name': 'Channel'},
'created_at': ('django.db.models.fields.DateTimeField', [], {}),
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'channel_related'", 'null': 'True', 'to': u"orm['auth.User']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'notes': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'rate': ('django.db.models.fields.IntegerField', [], {}),
'setup': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['feed.Setup']"}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {})
},
u'feed.channellineup': {
'Meta': {'ordering': "['position']", 'object_name': 'ChannelLineup'},
'channel': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['feed.Channel']", 'null': 'True', 'blank': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {}),
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'channellineup_related'", 'null': 'True', 'to': u"orm['auth.User']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'position': ('django.db.models.fields.IntegerField', [], {}),
'session': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['feed.Session']"}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {})
},
u'feed.depthaxis': {
'Meta': {'object_name': 'DepthAxis'},
'created_at': ('django.db.models.fields.DateTimeField', [], {}),
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'depthaxis_related'", 'null': 'True', 'to': u"orm['auth.User']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'label': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {})
},
u'feed.developmentstage': {
'Meta': {'ordering': "['label']", 'object_name': 'DevelopmentStage'},
'created_at': ('django.db.models.fields.DateTimeField', [], {}),
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'developmentstage_related'", 'null': 'True', 'to': u"orm['auth.User']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'label': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {})
},
u'feed.dorsalventralaxis': {
'Meta': {'object_name': 'DorsalVentralAxis'},
'created_at': ('django.db.models.fields.DateTimeField', [], {}),
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'dorsalventralaxis_related'", 'null': 'True', 'to': u"orm['auth.User']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'label': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {})
},
u'feed.electrodetype': {
'Meta': {'ordering': "['label']", 'object_name': 'ElectrodeType'},
'created_at': ('django.db.models.fields.DateTimeField', [], {}),
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'electrodetype_related'", 'null': 'True', 'to': u"orm['auth.User']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'label': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {})
},
u'feed.emgchannel': {
'Meta': {'object_name': 'EmgChannel', '_ormbases': [u'feed.Channel']},
u'channel_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': u"orm['feed.Channel']", 'unique': 'True', 'primary_key': 'True'}),
'emg_amplification': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'emg_filtering': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['feed.Emgfiltering']"}),
'sensor': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['feed.EmgSensor']"}),
'unit': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['feed.Unit']"})
},
u'feed.emgfiltering': {
'Meta': {'object_name': 'Emgfiltering'},
'created_at': ('django.db.models.fields.DateTimeField', [], {}),
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'emgfiltering_related'", 'null': 'True', 'to': u"orm['auth.User']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'label': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {})
},
u'feed.emgsensor': {
'Meta': {'ordering': "['id']", 'object_name': 'EmgSensor', '_ormbases': [u'feed.Sensor']},
'axisdepth': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['feed.DepthAxis']", 'null': 'True', 'blank': 'True'}),
'electrode_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['feed.ElectrodeType']", 'null': 'True', 'blank': 'True'}),
'location_controlled': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['feed.AnatomicalLocation']"}),
'muscle': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['feed.MuscleOwl']", 'null': 'True'}),
u'sensor_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': u"orm['feed.Sensor']", 'unique': 'True', 'primary_key': 'True'})
},
u'feed.emgsetup': {
'Meta': {'object_name': 'EmgSetup', '_ormbases': [u'feed.Setup']},
'preamplifier': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
u'setup_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': u"orm['feed.Setup']", 'unique': 'True', 'primary_key': 'True'})
},
u'feed.eventchannel': {
'Meta': {'object_name': 'EventChannel', '_ormbases': [u'feed.Channel']},
u'channel_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': u"orm['feed.Channel']", 'unique': 'True', 'primary_key': 'True'}),
'unit': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'})
},
u'feed.eventsetup': {
'Meta': {'object_name': 'EventSetup', '_ormbases': [u'feed.Setup']},
u'setup_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': u"orm['feed.Setup']", 'unique': 'True', 'primary_key': 'True'})
},
u'feed.experiment': {
'Meta': {'object_name': 'Experiment'},
'accession': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'bookkeeping': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {}),
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'experiment_related'", 'null': 'True', 'to': u"orm['auth.User']"}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'end': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'impl_notes': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'start': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'study': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['feed.Study']"}),
'subj_age': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '19', 'decimal_places': '5', 'blank': 'True'}),
'subj_ageunit': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['feed.AgeUnit']", 'null': 'True', 'blank': 'True'}),
'subj_devstage': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['feed.DevelopmentStage']"}),
'subj_tooth': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'subj_weight': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '19', 'decimal_places': '5', 'blank': 'True'}),
'subject': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['feed.Subject']"}),
'subject_notes': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {})
},
u'feed.feeduserprofile': {
'Meta': {'object_name': 'FeedUserProfile'},
'institutional_affiliation': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'user': ('django.db.models.fields.related.OneToOneField', [], {'to': u"orm['auth.User']", 'unique': 'True', 'primary_key': 'True'})
},
u'feed.forcechannel': {
'Meta': {'object_name': 'ForceChannel', '_ormbases': [u'feed.Channel']},
u'channel_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': u"orm['feed.Channel']", 'unique': 'True', 'primary_key': 'True'}),
'sensor': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['feed.ForceSensor']"}),
'unit': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['feed.Unit']", 'null': 'True'})
},
u'feed.forcesensor': {
'Meta': {'object_name': 'ForceSensor', '_ormbases': [u'feed.Sensor']},
u'sensor_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': u"orm['feed.Sensor']", 'unique': 'True', 'primary_key': 'True'})
},
u'feed.forcesetup': {
'Meta': {'object_name': 'ForceSetup', '_ormbases': [u'feed.Setup']},
u'setup_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': u"orm['feed.Setup']", 'unique': 'True', 'primary_key': 'True'})
},
u'feed.illustration': {
'Meta': {'object_name': 'Illustration'},
'created_at': ('django.db.models.fields.DateTimeField', [], {}),
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'illustration_related'", 'null': 'True', 'to': u"orm['auth.User']"}),
'experiment': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['feed.Experiment']", 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'notes': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'picture': ('django.db.models.fields.files.FileField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'setup': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['feed.Setup']", 'null': 'True', 'blank': 'True'}),
'subject': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['feed.Subject']", 'null': 'True', 'blank': 'True'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {})
},
u'feed.kinematicschannel': {
'Meta': {'object_name': 'KinematicsChannel', '_ormbases': [u'feed.Channel']},
u'channel_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': u"orm['feed.Channel']", 'unique': 'True', 'primary_key': 'True'}),
'sensor': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['feed.KinematicsSensor']"}),
'unit': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['feed.Unit']", 'null': 'True'})
},
u'feed.kinematicssensor': {
'Meta': {'object_name': 'KinematicsSensor', '_ormbases': [u'feed.Sensor']},
u'sensor_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': u"orm['feed.Sensor']", 'unique': 'True', 'primary_key': 'True'})
},
u'feed.kinematicssetup': {
'Meta': {'object_name': 'KinematicsSetup', '_ormbases': [u'feed.Setup']},
u'setup_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': u"orm['feed.Setup']", 'unique': 'True', 'primary_key': 'True'})
},
u'feed.mediallateralaxis': {
'Meta': {'object_name': 'MedialLateralAxis'},
'created_at': ('django.db.models.fields.DateTimeField', [], {}),
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'mediallateralaxis_related'", 'null': 'True', 'to': u"orm['auth.User']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'label': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {})
},
u'feed.muscleowl': {
'Meta': {'object_name': 'MuscleOwl'},
'bfo_part_of_some': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'has_parts'", 'symmetrical': 'False', 'to': u"orm['feed.MuscleOwl']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'label': ('django.db.models.fields.CharField', [], {'max_length': '1000'}),
'obo_definition': ('django.db.models.fields.TextField', [], {}),
'rdfs_comment': ('django.db.models.fields.TextField', [], {}),
'rdfs_is_class': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'rdfs_subClassOf_ancestors': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'has_subClass_descendants'", 'symmetrical': 'False', 'to': u"orm['feed.MuscleOwl']"}),
'uri': ('django.db.models.fields.CharField', [], {'max_length': '1500'})
},
u'feed.pressurechannel': {
'Meta': {'object_name': 'PressureChannel', '_ormbases': [u'feed.Channel']},
u'channel_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': u"orm['feed.Channel']", 'unique': 'True', 'primary_key': 'True'}),
'sensor': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['feed.PressureSensor']"}),
'unit': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['feed.Unit']", 'null': 'True'})
},
u'feed.pressuresensor': {
'Meta': {'object_name': 'PressureSensor', '_ormbases': [u'feed.Sensor']},
u'sensor_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': u"orm['feed.Sensor']", 'unique': 'True', 'primary_key': 'True'})
},
u'feed.pressuresetup': {
'Meta': {'object_name': 'PressureSetup', '_ormbases': [u'feed.Setup']},
u'setup_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': u"orm['feed.Setup']", 'unique': 'True', 'primary_key': 'True'})
},
u'feed.proximaldistalaxis': {
'Meta': {'object_name': 'ProximalDistalAxis'},
'created_at': ('django.db.models.fields.DateTimeField', [], {}),
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'proximaldistalaxis_related'", 'null': 'True', 'to': u"orm['auth.User']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'label': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {})
},
u'feed.restraint': {
'Meta': {'ordering': "['label']", 'object_name': 'Restraint'},
'created_at': ('django.db.models.fields.DateTimeField', [], {}),
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'restraint_related'", 'null': 'True', 'to': u"orm['auth.User']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'label': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {})
},
u'feed.sensor': {
'Meta': {'object_name': 'Sensor'},
'created_at': ('django.db.models.fields.DateTimeField', [], {}),
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'sensor_related'", 'null': 'True', 'to': u"orm['auth.User']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'loc_ap': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['feed.AnteriorPosteriorAxis']", 'null': 'True', 'blank': 'True'}),
'loc_dv': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['feed.DorsalVentralAxis']", 'null': 'True', 'blank': 'True'}),
'loc_ml': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['feed.MedialLateralAxis']", 'null': 'True', 'blank': 'True'}),
'loc_pd': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['feed.ProximalDistalAxis']", 'null': 'True', 'blank': 'True'}),
'loc_side': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['feed.Side']"}),
'location_freetext': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'notes': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'setup': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['feed.Setup']"}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {})
},
u'feed.session': {
'Meta': {'ordering': "['position']", 'object_name': 'Session'},
'accession': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'bookkeeping': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'channels': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['feed.Channel']", 'through': u"orm['feed.ChannelLineup']", 'symmetrical': 'False'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {}),
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'session_related'", 'null': 'True', 'to': u"orm['auth.User']"}),
'end': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'experiment': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['feed.Experiment']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'position': ('django.db.models.fields.IntegerField', [], {}),
'start': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'study': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['feed.Study']"}),
'subj_anesthesia_sedation': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'subj_notes': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'subj_restraint': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['feed.Restraint']"}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {})
},
u'feed.setup': {
'Meta': {'object_name': 'Setup'},
'created_at': ('django.db.models.fields.DateTimeField', [], {}),
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'setup_related'", 'null': 'True', 'to': u"orm['auth.User']"}),
'experiment': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['feed.Experiment']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'notes': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'sampling_rate': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {})
},
u'feed.side': {
'Meta': {'ordering': "['label']", 'object_name': 'Side'},
'created_at': ('django.db.models.fields.DateTimeField', [], {}),
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'side_related'", 'null': 'True', 'to': u"orm['auth.User']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'label': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {})
},
u'feed.sonochannel': {
'Meta': {'object_name': 'SonoChannel', '_ormbases': [u'feed.Channel']},
u'channel_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': u"orm['feed.Channel']", 'unique': 'True', 'primary_key': 'True'}),
'crystal1': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'crystals1_related'", 'to': u"orm['feed.SonoSensor']"}),
'crystal2': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'crystals2_related'", 'to': u"orm['feed.SonoSensor']"}),
'unit': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['feed.Unit']"})
},
u'feed.sonosensor': {
'Meta': {'object_name': 'SonoSensor', '_ormbases': [u'feed.Sensor']},
'axisdepth': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['feed.DepthAxis']", 'null': 'True', 'blank': 'True'}),
'location_controlled': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['feed.AnatomicalLocation']"}),
'muscle': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['feed.MuscleOwl']", 'null': 'True'}),
u'sensor_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': u"orm['feed.Sensor']", 'unique': 'True', 'primary_key': 'True'})
},
u'feed.sonosetup': {
'Meta': {'object_name': 'SonoSetup', '_ormbases': [u'feed.Setup']},
u'setup_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': u"orm['feed.Setup']", 'unique': 'True', 'primary_key': 'True'}),
'sonomicrometer': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'})
},
u'feed.strainchannel': {
'Meta': {'object_name': 'StrainChannel', '_ormbases': [u'feed.Channel']},
u'channel_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': u"orm['feed.Channel']", 'unique': 'True', 'primary_key': 'True'}),
'sensor': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['feed.StrainSensor']"}),
'unit': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['feed.Unit']", 'null': 'True'})
},
u'feed.strainsensor': {
'Meta': {'object_name': 'StrainSensor', '_ormbases': [u'feed.Sensor']},
u'sensor_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': u"orm['feed.Sensor']", 'unique': 'True', 'primary_key': 'True'})
},
u'feed.strainsetup': {
'Meta': {'object_name': 'StrainSetup', '_ormbases': [u'feed.Setup']},
u'setup_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': u"orm['feed.Setup']", 'unique': 'True', 'primary_key': 'True'})
},
u'feed.study': {
'Meta': {'ordering': "['title']", 'object_name': 'Study'},
'approval': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'approval_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['feed.AnimalApprovalType']", 'null': 'True'}),
'bookkeeping': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {}),
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'study_related'", 'null': 'True', 'to': u"orm['auth.User']"}),
'description': ('django.db.models.fields.TextField', [], {}),
'end': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'funding': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'funding_agency': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'lab': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'notes': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'organization': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'pi': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True'}),
'resources': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'start': ('django.db.models.fields.DateField', [], {}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {})
},
u'feed.subject': {
'Meta': {'object_name': 'Subject'},
'breed': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {}),
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'subject_related'", 'null': 'True', 'to': u"orm['auth.User']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'notes': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'sex': ('django.db.models.fields.CharField', [], {'max_length': '2', 'null': 'True', 'blank': 'True'}),
'source': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'study': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['feed.Study']"}),
'taxon': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['feed.Taxon']"}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {})
},
u'feed.taxon': {
'Meta': {'ordering': "['genus']", 'object_name': 'Taxon'},
'common_name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {}),
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'taxon_related'", 'null': 'True', 'to': u"orm['auth.User']"}),
'genus': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'label': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'species': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {})
},
u'feed.trial': {
'Meta': {'object_name': 'Trial'},
'accession': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'behavior_notes': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'behavior_primary': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['feed.Behavior']", 'null': 'True'}),
'behavior_secondary': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'behaviorowl_primary': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'primary_in_trials'", 'null': 'True', 'to': u"orm['feed.BehaviorOwl']"}),
'behaviorowl_secondary': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'secondary_in_trials'", 'null': 'True', 'to': u"orm['feed.BehaviorOwl']"}),
'bookkeeping': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {}),
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'trial_related'", 'null': 'True', 'to': u"orm['auth.User']"}),
'data_file': ('django.db.models.fields.files.FileField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'end': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'experiment': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['feed.Experiment']"}),
'food_property': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'food_size': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'food_type': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'position': ('django.db.models.fields.IntegerField', [], {}),
'session': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['feed.Session']"}),
'start': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'study': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['feed.Study']"}),
'subj_notes': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'subj_treatment': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {}),
'waveform_picture': ('django.db.models.fields.files.FileField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'})
},
u'feed.unit': {
'Meta': {'ordering': "['technique', 'label']", 'object_name': 'Unit'},
'created_at': ('django.db.models.fields.DateTimeField', [], {}),
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'unit_related'", 'null': 'True', 'to': u"orm['auth.User']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'label': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'technique': ('django.db.models.fields.IntegerField', [], {}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {})
}
}
complete_apps = ['feed']
|
Squishymedia/feedingdb
|
src/feeddb/feed/migrations/0083_auto__del_field_setup_technique.py
|
Python
|
gpl-3.0
| 41,413
|
#!/usr/bin/env python
# Author: Prathamesh Shivade
# A quick (read 15 min) BFS web crawler written for a college assignment
from collections import deque
from bs4 import BeautifulSoup
from urlparse import urljoin
import sys
import urllib2
# Read URL from command line
url = sys.argv[1]
print "==================="
print "Page to be crawled:", url
print "==================="
print
# Create queue
queue = deque([])
# Maintains list of visited pages
visited_list = []
# Crawl the page and populate the queue with newly found URLs
def crawl(url):
visited_list.append(url)
if len(queue) > 99:
return
urlf = urllib2.urlopen(url)
soup = BeautifulSoup(urlf.read())
urls = soup.findAll("a", href=True)
for i in urls:
flag = 0
# Complete relative URLs and strip trailing slash
complete_url = urljoin(url, i["href"]).rstrip('/')
# Check if the URL already exists in the queue
for j in queue:
if j == complete_url:
flag = 1
break
# If not found in queue
if flag == 0:
if len(queue) > 99:
return
if (visited_list.count(complete_url)) == 0:
queue.append(complete_url)
# Pop one URL from the queue from the left side so that it can be crawled
current = queue.popleft()
# Recursive call to crawl until the queue is populated with 100 URLs
crawl(current)
crawl(url)
# Print queue
for i in queue:
print i
print
print "=============="
print "Pages crawled:"
print "=============="
print
# Print list of visited pages
for i in visited_list:
print i
|
matharp/bfs-crawler
|
web-crawler.py
|
Python
|
gpl-3.0
| 1,659
|
# -*- coding: utf-8 -*-
import os, pickle, sys, datetime, random
from core.message_factory import MsgState, Message, MessageFactory
import uuid
from PySide import QtGui
from PySide import QtCore
from PySide.QtCore import *
from PySide.QtGui import *
from gui.notes import SolidNote
from gui.utils import pea_app
from gui.windows import SettingsWindow
from gui.utils import STYLES
from core.filters import FilterQueue
trUtf8 = QObject.trUtf8
class TrayIcon(QSystemTrayIcon):
def __init__(self, mainGui):
QSystemTrayIcon.__init__(self)
self.mainGui = mainGui
icon = pea_app().tray_icon
self.setIcon(icon)
self.activated.connect(self.handleActivation)
self.show()
# --- MENU ---
self.menu = QMenu(QApplication.desktop())
# TODO: ikonki dla pozycji w menu
self.actQuit = QAction(u"&Quit", self.menu)
self.menu.addAction(self.actQuit)
self.actQuit.triggered.connect(mainGui.closeApplication)
self.actSettings = QAction(u"&Settings", self.menu)
self.menu.addAction(self.actSettings)
self.actSettings.triggered.connect(mainGui.showSettings)
self.actHideNotes = QAction(u"&Hide notes", self.menu)
self.menu.addAction(self.actHideNotes)
self.actHideNotes.triggered.connect(mainGui.hideNotes)
self.actShowNotes = QAction(u"&Show notes", self.menu)
self.menu.addAction(self.actShowNotes)
self.actShowNotes.triggered.connect(mainGui.showNotes)
self.actNewNote = QAction(u"&New note", self.menu)
self.menu.addAction(self.actNewNote)
self.actNewNote.triggered.connect(mainGui.newNote)
self.setContextMenu(self.menu)
@Slot(QSystemTrayIcon.ActivationReason)
def handleActivation(self, reason):
global NOTES
global NOTE_ID
if reason == QSystemTrayIcon.DoubleClick:
print "Double"
elif reason == QSystemTrayIcon.Trigger: # lewy przycisk
self.mainGui.showNotes()
# elif reason == QSystemTrayIcon.Context: # prawy przycisk
# print 'bye!'
# sys.exit(0)
# # TODO: menu
elif reason == QSystemTrayIcon.MiddleClick: # środkowy przycisk
self.mainGui.newNote()
class LocalSettings(object):
def __init__(self):
self.notes = {} # msgId -> note
class MainGui(QObject):
# SETTINGS_PATH = ".config"
def __init__(self, client):
self.client = client
self.settingsWindow = SettingsWindow(self)
self.settingsWindow.hide()
QTextCodec.setCodecForTr(QTextCodec.codecForName("UTF-8"))
# połączenia ->
# self.client.loggedIn.connect(self.handleLoginState)
# TODO: dodano notatkę
# TODO: zmieniono zawartość skrzynki
# <-
# self.localSettings = self.loadSettings()
# self.loginWindow = LoginWindow()
# self.loginWindow.show()
#
# self.loginWindow.formSubmitted.connect(self.handleLoginForm)
self.trayIcon = TrayIcon(self)
self.localMessagesIds = []
self.allNotes = {}
self.handleUpdateMessageBox()
self.client.boxUpdated.connect(self.handleUpdateMessageBox)
# TODO:
self.__knownUsersSet__ = set(['kuba', 'marek', 'piotrek'])
self.updateNotes()
# @Slot(str, str)
# def handleLoginForm(self, user, password):
# self.client.login(user, password)
# @Slot(LoginState)
# def handleLoginState(self, loginState):
# if loginState == LoginState.OK:
# print "logged in successfully!"
# # TODO: raczej powinno reagować na powiadomienie od klienta
# self.handleUpdateMessageBox()
# else:
# print "login failed!"
def userName(self):
return self.client.user_name
# TODO:
def knownUsers(self):
return list(self.__knownUsersSet__)
def addKnownUser(self, username):
self.__knownUsersSet__.add(username)
@Slot()
def handleUpdateMessageBox(self):
# TODO: zmienić na obliczanie różnicy zbiorów
# NOTICE, WARNING!
# for note in self.allNotes.values():
# note.close()
for mid, msg in self.client.getMsgAll().items():
if mid not in self.allNotes.keys():
# utworzenie nowej notatki
if not msg.state == MsgState.DELETED:
self.allNotes[mid] = SolidNote(msg, self)
else:
print 'debug: already exists: %s' % str(mid)
# tylko ew. zmiana stanu
if msg.state == MsgState.DELETED:
print 'state DELETED'
self.allNotes[mid].close()
del self.allNotes[mid]
else:
self.allNotes[mid].setMessageState(msg.state)
self.updateNotes() # FIXME: ...
for note in self.allNotes.values(): note.show()
@Slot()
def closeApplication(self):
# self.saveSettings(self.localSettings) # TODO: settings
self.settingsWindow.persistData(self.settingsWindow.filters)
self.client.stopClient()
QApplication.quit()
# TODO: ikona w trayu nie znika
@Slot()
def showSettings(self):
self.settingsWindow.show()
@Slot()
def hideNotes(self):
for note in self.allNotes.values():
note.hide()
@Slot()
def showNotes(self):
for note in self.allNotes.values():
note.show()
note.raise_()
note.activateWindow()
self.updateNotes()
@Slot()
def newNote(self):
# TODO: domyślna data ważności, możliwość zmiany daty ważności
# do domyslnej daty waznosci mozna wykorzystac MessageFactory, pozniej mozemy podpiac do fabryki wstrzykiwanie domyslnych ustawien
messageFactory = MessageFactory()
messageFactory.set_sender(self.userName())
messageFactory.set_recipients([])
messageFactory.set_expiredate_policy(MessageFactory.POLICY_EXPIREDATE_DAYS)
messageFactory.set_days_to_expire(31)
messageFactory.set_state(MsgState.GUI)
messageFactory.set_content('')
m = messageFactory.build()
nnote = SolidNote(m, self)
self.allNotes[m.msg_uuid] = nnote
# HACK TODO
self.handleUpdateMessageBox()
# TODO: niepotrzebne, lepiej jakiś refresh
#self.client.addMsg(m) # TODO: addMsg emituje zmianę zawartości
# self.handleUpdateMessageBox()
def updateNotes(self):
fq = FilterQueue()
for f in self.settingsWindow.filters.values():
fq.add_filter(f[0], f[1]) # FIXME: !
for note in self.allNotes.values():
try:
style = STYLES[fq.get_first_matching(note.__message__)]
except KeyError:
style = STYLES['yellow']
note.setStyleSheet(style)
# def loadSettings(self):
# 'TODO: dodać domyślne'
# with open(self.SETTINGS_PATH, "rb") as f:
# try:
# s = pickle.load(f)
# except Exception as e:
# print 'settings file not found or corrupted - creating new configuration in %s' % self.SETTINGS_PATH
# s = LocalSettings()
# self.saveSettings(LocalSettings())
#
# return s
# def saveSettings(self, settings):
# with open(self.SETTINGS_PATH, "wb") as f:
# pickle.dump(settings, f)
|
kliput/peanotes
|
gui/main_gui.py
|
Python
|
gpl-3.0
| 7,981
|
#!/usr/bin/env python
import sys
import os
sys.path.append(os.path.abspath("/Projects/OpenStackAdapter"))
import novaclient.v2.client as nvclient
from Logger import VMLogger
import Auth
class OpenStackAdapterVMStatus(object):
_objLogs = VMLogger()
def GetServerStatus(self, UserName, CustomerName, ServerId):
try:
objAuth = Auth.OpenStackAdapterAuth()
novacreds = objAuth.get_nova_neutron_credentials("nova", UserName, CustomerName)
objnova = nvclient.Client(**novacreds)
print UserName
print CustomerName
servers_status = objnova.servers.get(ServerId)
message = {'Status': True,'ServerStatus':servers_status.status}
return message
except Exception, e:
message = {'Status': 'Error', 'Type': type(e).__name__, 'Message': e.args}
return message
#objTest = OpenStackAdapterVMStatus()
#print objTest.GetServerStatus('turki89','Turki_45','f1ea08f5-9bef-4337-a8a0-d4c217619095')
|
omermahgoub/MigTool
|
common/VMStatus.py
|
Python
|
gpl-3.0
| 1,061
|
#!/usr/bin/env python
#encoding:utf-8
''' test_pyhello.py -- unit testing of pyhello program
Copyright 2015 Ing María Andrea Vignau <mavignau@gmail.com>
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>. '''
from pyhello import *
import unittest
import StringIO
import gettext
#needed to compare translated strings correctly
t = gettext.translation('pyhello', 'locale', fallback=True)
_ = t.ugettext
class PyHelloTest(unittest.TestCase):
'''Test cases to test different cases of pyHello
'''
def testSimple(self):
'To test default without arguments'
#create to redirect standard output & error
out = [StringIO.StringIO(),StringIO.StringIO()]
main([],out)
self.assertEqual(out[0].getvalue(),_("Hello, world!")+"\n")
def testFail(self):
'To test some erroneous arguments'
out = [StringIO.StringIO(),StringIO.StringIO()]
with self.assertRaises(SystemExit):
main(['-i'],out)
def testTraditional(self):
'To test traditional greeting'
out = [StringIO.StringIO(),StringIO.StringIO()]
main(['-t'],out)
self.assertEqual(out[0].getvalue(),_("hello, world")+"\n")
def testCustom(self):
'To test custom greeting message'
out = [StringIO.StringIO(),StringIO.StringIO()]
main(['-gbuenas tardes, profesor'],out)
self.assertEqual(out[0].getvalue(),"buenas tardes, profesor\n")
if __name__ == '__main__':
unittest.main()
|
UniversidadDelEste/hello
|
MAVignau/src/test_pyHello.py
|
Python
|
gpl-3.0
| 1,960
|