repo_name
stringlengths 5
92
| path
stringlengths 4
221
| copies
stringclasses 19
values | size
stringlengths 4
6
| content
stringlengths 766
896k
| license
stringclasses 15
values | hash
int64 -9,223,277,421,539,062,000
9,223,102,107B
| line_mean
float64 6.51
99.9
| line_max
int64 32
997
| alpha_frac
float64 0.25
0.96
| autogenerated
bool 1
class | ratio
float64 1.5
13.6
| config_test
bool 2
classes | has_no_keywords
bool 2
classes | few_assignments
bool 1
class |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
lmazuel/azure-sdk-for-python
|
azure-batch/azure/batch/models/application_get_options.py
|
1
|
1743
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class ApplicationGetOptions(Model):
"""Additional parameters for get operation.
:param timeout: The maximum time that the server can spend processing the
request, in seconds. The default is 30 seconds. Default value: 30 .
:type timeout: int
:param client_request_id: The caller-generated request identity, in the
form of a GUID with no decoration such as curly braces, e.g.
9C4D50EE-2D56-4CD3-8152-34347DC9F2B0.
:type client_request_id: str
:param return_client_request_id: Whether the server should return the
client-request-id in the response. Default value: False .
:type return_client_request_id: bool
:param ocp_date: The time the request was issued. Client libraries
typically set this to the current system clock time; set it explicitly if
you are calling the REST API directly.
:type ocp_date: datetime
"""
def __init__(self, timeout=30, client_request_id=None, return_client_request_id=False, ocp_date=None):
super(ApplicationGetOptions, self).__init__()
self.timeout = timeout
self.client_request_id = client_request_id
self.return_client_request_id = return_client_request_id
self.ocp_date = ocp_date
|
mit
| -6,436,945,429,809,833,000
| 43.692308
| 106
| 0.659208
| false
| 4.2
| false
| false
| false
|
Vgr255/Wolfbot
|
settings/wolfgame.py
|
1
|
23664
|
import botconfig
#####################################################################################
PING_WAIT = 300 # seconds #
PING_MIN_WAIT = 30 # amount of time between first !join and !ping can be allowed #
MINIMUM_WAIT = 60 # amount of time the players must wait between the first !join and !start #
EXTRA_WAIT = 20 # amount of time !wait adds before starting the game #
EXTRA_WAIT_JOIN = 0 # Add this many seconds to the waiting time for each !join (unusued yet) #
WAIT_AFTER_JOIN = 10 # Wait at least this many seconds after the last join (still yet to implement) #
MAXIMUM_WAITED = 3 # limit for amount of !wait's #
STATS_RATE_LIMIT = 15 # time between two !stats #
VOTES_RATE_LIMIT = 15 # time between two !votes #
ADMINS_RATE_LIMIT = 300 # time between two !admins #
GSTATS_RATE_LIMIT = 0 # time to wait for each !gamestats #
PSTATS_RATE_LIMIT = 0 # time to wait for each !player #
TIME_RATE_LIMIT = 30 # time to wait for each !time #
SHOTS_MULTIPLIER = .16 # ceil(shots_multiplier * len_players) = bullets given #
MOLOTOV_AMOUNT = .11 # ceil(molotov_ammount * len_players) = molotovs given #
MIN_PLAYERS = 4 # minimum amount of players needed to start a game #
MAX_PLAYERS = 30 # maximum amount of players allowed #
DRUNK_SHOTS_MULTIPLIER = 3 # drunk gets more bullets #
DRUNK_FIRE_MULTIPLIER = 5 # drunk gets way more molotovs. but he can die as easily #
NIGHT_TIME_WARN = 90 # should be less than NIGHT_TIME_LIMIT #
NIGHT_TIME_LIMIT = 120 # night ends after x seconds (default is 120) #
DAY_TIME_LIMIT_WARN = 480 # warns before the day changes #
DAY_TIME_LIMIT_CHANGE = 120 # seconds after DAY_TIME_LIMIT_WARN has passed #
JOIN_TIME_LIMIT = 1800 # amount of time (in seconds) before game is cancelled after first join #
SHORT_DAY_PLAYERS = 6 # Number of players left to have a short day #
SHORT_DAY_LIMIT_WARN = 180 # same as above, except for small days. only set if above is also set #
SHORT_DAY_LIMIT_CHANGE = 120 # same as above, except for small days #
START_WITH_DAY = False # obviously, does game starts with day? #
WOLF_STEALS_GUN = True # if True, gun will be handed to a random wolf/traitor/werecrow when gunner dies #
WOLF_STEALS_FIRE = True # same, but for the arsonist instead #
KILL_IDLE_TIME = 300 # amount of seconds before the player is removed from the game #
WARN_IDLE_TIME = 180 # warns after x seconds, before the player is removed from the game #
PART_GRACE_TIME = 30 # amount of seconds the bot waits before removing when user /parts #
QUIT_GRACE_TIME = 30 # amount of seconds the bot waits before removing when user /quits #
MIN_LOG_PLAYERS = 12 # number of players needed to disable logging (reducing lag) #
MAX_PRIVMSG_TARGETS = 1 # better not touch that... #
LEAVE_STASIS_PENALTY = 0 # number of games user is not allowed to join if they !leave #
IDLE_STASIS_PENALTY = 0 # same, if they idle out #
PART_STASIS_PENALTY = 0 # same but for /part instead #
SELF_LYNCH_ALLOWED = True # can you lynch yourself? #
GOAT_HERDER = True # new role? not sure #
HIDDEN_TRAITOR = True # something about hiding the traitor, making it look like a villager? #
CANT_KILL_TRAITOR = True # Can the wolves kill the traitor? #
CARE_BOLD = False # determines if the bot cares about bolds in channel #
CARE_COLOR = False # same, except for color #
KILL_COLOR = False # does the bot kick you for using color #
KILL_BOLD = False # same, but for bold #
CARE_ADVERTISING = False # warns any message containing a '#' in it (advertising, hashtag, etc) #
KILL_ADVERTISING = False # kicks on advertising #
EXEMPT_ADMINS = True # doesn't kick admins/owners #
BAN_AFTER_KICKS = True # decide whether user will be banned/quieted after being kicked #
TIME_BEFORE_UNSET = 30 # amount of time (in seconds) before user is un-banned/quieted #
BAN_TYPE = "q" # should be either q or b (WITHOUT the +) to decide between ban or quiet #
AUTO_OP_FLAG = True # used to decide whether the bot will send /msg ChanServ op on connect #
AUTO_OP_FAIL = False # if set to True it will send an error to the channel upon connecting #
RAW_JOIN = True # allow to join other chans than the default one #
LOG_CHAN = False # logs activity in botconfig.ADMIN_CHAN #
LOG_AUTO_TOGGLE = True # automatically disables logging if there are too many players #
AUTO_LOG_TOGGLE = False # automatically toggle logging when an admin gets in the admin_chan #
MINIMALIST_LOG = True # only displays sensible commands. only useful if LOG_CHAN = False #
EXT_PING = "" # external pinging in the special channel. leave blank to disable it #
MAX_ERRORS = 4 # max amount of errors that can happen before the bot quits #
USE_IDENT = False # defines if should use ident along with host for !ping and similar #
ALLOW_GIT = True # joins the development channel and automatically fetches commits #
AUTO_OP_DEOP = True # determines if bot ops and deops chanops on start and endgame #
#####################################################################################
LOG_FILENAME = ""
BARE_LOG_FILENAME = ""
# HIT MISS SUICIDE
GUN_CHANCES = ( 5/7 , 1/7 , 1/7 )
DRUNK_GUN_CHANCES = ( 3/7 , 3/7 , 1/7 )
MANSLAUGHTER_CHANCE = 1/5 # ACCIDENTAL HEADSHOT (FATAL)
# SUCCESS MISS SUICIDE
FIRE_CHANCES = ( 3/7 , 3/7 , 1/7 )
DRUNK_FIRE_CHANCES = ( 2/7 , 2/7 , 3/7 )
GUNNER_KILLS_WOLF_AT_NIGHT_CHANCE = 7/10
PYRO_KILLS_WOLF_AT_NIGHT_CHANCE = 4/5
GUARDIAN_ANGEL_DIES_CHANCE = 1/2
DETECTIVE_REVEALED_CHANCE = 2/5
#########################################################################################################################
# ROLE INDEX: PLAYERS SEER WOLF CURSED DRUNK HARLOT TRAITOR GUNNER CROW ANGEL DETECTIVE PYRO ##
#########################################################################################################################
ROLES_GUIDE = { 4 : ( 0 , 1 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 ), ##
5 : ( 1 , 1 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 ), ##
6 : ( 1 , 1 , 1 , 1 , 0 , 0 , 0 , 0 , 0 , 0 , 0 ), ##
7 : ( 1 , 1 , 1 , 1 , 0 , 0 , 0 , 0 , 0 , 0 , 0 ), ##
8 : ( 1 , 2 , 1 , 1 , 1 , 0 , 0 , 0 , 0 , 0 , 0 ), ##
9 : ( 1 , 2 , 1 , 1 , 1 , 0 , 0 , 0 , 0 , 0 , 0 ), ##
10 : ( 1 , 2 , 1 , 1 , 1 , 1 , 1 , 0 , 0 , 0 , 0 ), ##
11 : ( 1 , 2 , 1 , 1 , 1 , 1 , 1 , 0 , 0 , 0 , 0 ), ##
12 : ( 1 , 2 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 0 , 0 ), ##
13 : ( 1 , 2 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 0 , 0 ), ##
14 : ( 1 , 2 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 0 ), ##
15 : ( 1 , 2 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 0 ), ##
16 : ( 1 , 2 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 0 ), ##
17 : ( 1 , 2 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 0 ), ##
18 : ( 1 , 3 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 0 ), ##
19 : ( 1 , 3 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 0 ), ##
20 : ( 1 , 3 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 0 ), ##
21 : ( 2 , 4 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 0 ), ##
22 : ( 2 , 4 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 0 ), ##
23 : ( 2 , 4 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 0 ), ##
24 : ( 2 , 4 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 0 ), ##
25 : ( 2 , 4 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 1 , 0 ), ##
26 : ( 2 , 5 , 1 , 1 , 1 , 1 , 2 , 1 , 1 , 1 , 0 ), ##
27 : ( 2 , 5 , 1 , 1 , 1 , 1 , 2 , 1 , 1 , 1 , 0 ), ##
28 : ( 2 , 5 , 1 , 1 , 1 , 1 , 2 , 1 , 1 , 1 , 0 ), ##
29 : ( 2 , 5 , 1 , 1 , 1 , 1 , 2 , 1 , 1 , 1 , 0 ), ##
30 : ( 2 , 5 , 1 , 1 , 1 , 1 , 2 , 1 , 1 , 1 , 0 ), ##
None : ( 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 )} ##
#########################################################################################################################
# Notes: It is not needed to have a line for every combination, but it helps when you want to tweak a bit ##
# Notes: If one line is not specified (aka left out, doesn't appear) it will consider the next lower one ##
#########################################################################################################################
GAME_MODES = {}
AWAY = [] # cloaks of people who are away.
PING_IN = [] # cloaks of people who used !in to get in the ping list. works only with botconfig.REVERSE_PING set to True
SIMPLE_NOTIFY = [] # cloaks of people who !simple, who want everything /notice'd
ROLE_INDICES = {0 : "seer",
1 : "wolf",
2 : "cursed villager",
3 : "village drunk",
4 : "harlot",
5 : "traitor",
6 : "gunner",
7 : "werecrow",
8 : "guardian angel",
9 : "detective",
10: "arsonist"}
INDEX_OF_ROLE = dict((v,k) for k,v in ROLE_INDICES.items())
NO_VICTIMS_MESSAGES = ("The body of a young penguin pet is found.",
"A pool of blood and wolf paw prints are found.",
"Traces of wolf fur are found.")
LYNCH_MESSAGES = ("The villagers, after much debate, finally decide on lynching \u0002{0}\u0002, who turned out to be... a \u0002{1}\u0002.",
"Under a lot of noise, the pitchfork-bearing villagers lynch \u0002{0}\u0002, who turned out to be... a \u0002{1}\u0002.",
"The villagers drag the poor \u0002{0}\u0002 to the tree on the edge of the village. The poor guy was... a \u0002{1}\u0002.",
"The mob drags a protesting \u0002{0}\u0002 to the hanging tree. S/He succumbs to the will of the horde, and is hanged. It is discovered (s)he was a \u0002{1}\u0002.",
"Resigned to his/her fate, \u0002{0}\u0002 is led to the gallows. After death, it is discovered (s)he was a \u0002{1}\u0002.")
RULES = (botconfig.CHANNEL + " channel rules:\n"+
"1) Do not share information after death. "+
"2) Do not play with bots or clones. "+
"3) Do not quit unless you need to leave. "+
"4) Do not paste messages from the bot during the game. "+
"5) Do not ping people unless they have played recently.\n"+
"6) Do not advertise another channel or network. "+
"7) Do not take advantage of a player timing out. "+
"8) Using anti-idle messages or /whois idle times \u0002IS\u0002 cheating. "+
"9) If you are unsure whether you can do something or not, ask an operator. "+
"10) Channel and bot operators have the final word.")
is_role = lambda plyr, rol: rol in ROLES and plyr in ROLES[rol]
def plural(role):
if role == "wolf": return "wolves"
elif role == "person": return "people"
else: return role + "s"
def list_players():
pl = []
burnt = []
for burned in BURNED: # burned players' roles still appear, but they mustn't be marked as alive
burnt.append(burned)
for x in ROLES.values():
if x in burnt:
continue
pl.extend(x)
return pl
def list_players_and_roles():
plr = {}
for x in ROLES.keys():
for p in ROLES[x]:
plr[p] = x
return plr
def get_reveal_role(nick):
if HIDDEN_TRAITOR and get_role(nick) == "traitor":
return "villager"
else:
return get_role(nick)
get_role = lambda plyr: list_players_and_roles()[plyr]
def del_player(pname):
prole = get_role(pname)
ROLES[prole].remove(pname)
class InvalidModeException(Exception): pass
def game_mode(name):
def decor(c):
GAME_MODES[name] = c
return c
return decor
CHANGEABLE_ROLES = { "seers" : INDEX_OF_ROLE["seer"],
"wolves" : INDEX_OF_ROLE["wolf"],
"cursed" : INDEX_OF_ROLE["cursed villager"],
"drunks" : INDEX_OF_ROLE["village drunk"],
"harlots" : INDEX_OF_ROLE["harlot"],
"traitors" : INDEX_OF_ROLE["traitor"],
"gunners" : INDEX_OF_ROLE["gunner"],
"werecrows" : INDEX_OF_ROLE["werecrow"],
"angels" : INDEX_OF_ROLE["guardian angel"],
"detectives" : INDEX_OF_ROLE["detective"],
"arsonists" : INDEX_OF_ROLE["arsonist"]}
# TODO: implement game modes
@game_mode("roles")
class ChangedRolesMode(object):
"""Example: !fgame roles=wolves:1,seers:0,angels:1"""
def __init__(self, arg):
self.ROLES_GUIDE = ROLES_GUIDE.copy()
lx = list(ROLES_GUIDE[None])
pairs = arg.split(",")
pl = list_players()
if not pairs:
raise InvalidModeException("Invalid syntax for mode roles.")
for pair in pairs:
change = pair.split(":")
if len(change) != 2:
raise InvalidModeException("Invalid syntax for mode roles.")
role, num = change
try:
num = int(num)
try:
lx[CHANGEABLE_ROLES[role.lower()]] = num
except KeyError:
raise InvalidModeException(("The role \u0002{0}\u0002 "+
"is not valid.").format(role))
except ValueError:
raise InvalidModeException("A bad value was used in mode roles.")
for k in ROLES_GUIDE.keys():
self.ROLES_GUIDE[k] = tuple(lx)
# Persistence
# Load saved settings
import sqlite3
import os
conn = sqlite3.connect("data.sqlite3", check_same_thread = False)
with conn:
c = conn.cursor()
c.execute('CREATE TABLE IF NOT EXISTS away (nick TEXT)') # whoops, i mean cloak, not nick
c.execute('CREATE TABLE IF NOT EXISTS simple_role_notify (cloak TEXT)') # people who understand each role
c.execute('SELECT * FROM away')
for row in c:
AWAY.append(row[0])
c.execute('SELECT * FROM simple_role_notify')
for row in c:
SIMPLE_NOTIFY.append(row[0])
# populate the roles table
c.execute('DROP TABLE IF EXISTS roles')
c.execute('CREATE TABLE roles (id INTEGER PRIMARY KEY AUTOINCREMENT, role TEXT)')
for x in ["villager"]+list(ROLE_INDICES.values()):
c.execute("INSERT OR REPLACE INTO roles (role) VALUES (?)", (x,))
c.execute(('CREATE TABLE IF NOT EXISTS rolestats (player TEXT, role TEXT, '+
'teamwins SMALLINT, individualwins SMALLINT, totalgames SMALLINT, '+
'UNIQUE(player, role))'))
c.execute(('CREATE TABLE IF NOT EXISTS gamestats (size SMALLINT, villagewins SMALLINT, ' +
'wolfwins SMALLINT, totalgames SMALLINT, UNIQUE(size))'))
#def remove_away(clk):
# with conn:
# c.execute('DELETE from away where nick=?', (clk,))
#def add_away(clk):
# with conn:
# c.execute('INSERT into away VALUES (?)', (clk,))
#def add_ping(clk):
# with conn:
# c.execute('INSERT into ping VALUES (?)', (clk,))
#def remove_ping(clk):
# with conn:
# c.execute('DELETE from ping where nick=?', (clk,))
#def remove_simple_rolemsg(clk):
# with conn:
# c.execute('DELETE from simple_role_notify where cloak=?', (clk,))
#def add_simple_rolemsg(clk):
# with conn:
# c.execute('INSERT into simple_role_notify VALUES (?)', (clk,))
def update_role_stats(acc, role, won, iwon):
with conn:
wins, iwins, total = 0, 0, 0
c.execute(("SELECT teamwins, individualwins, totalgames FROM rolestats "+
"WHERE player=? AND role=?"), (acc, role))
row = c.fetchone()
if row:
wins, iwins, total = row
if won:
wins += 1
if iwon:
iwins += 1
total += 1
c.execute("INSERT OR REPLACE INTO rolestats VALUES (?,?,?,?,?)",
(acc, role, wins, iwins, total))
def update_game_stats(size, winner):
with conn:
vwins, wwins, total = 0, 0, 0
c.execute("SELECT villagewins, wolfwins, totalgames FROM gamestats "+
"WHERE size=?", (size,))
row = c.fetchone()
if row:
vwins, wwins, total = row
if winner == "wolves":
wwins += 1
elif winner == "villagers":
vwins += 1
total += 1
c.execute("INSERT OR REPLACE INTO gamestats VALUES (?,?,?,?)",
(size, vwins, wwins, total))
def get_player_stats(acc, role):
if role.lower() not in ["villager"] + [v.lower() for k, v in ROLE_INDICES.items()]:
return "No such role: {0}".format(role)
with conn:
c.execute("SELECT player FROM rolestats WHERE player LIKE ? COLLATE NOCASE", (acc,))
player = c.fetchone()
if player:
for row in c.execute("SELECT * FROM rolestats WHERE player=? COLLATE NOCASE AND role=? COLLATE NOCASE", (acc, role)):
msg = "\u0002{0}\u0002 as \u0002{1}\u0002 | Team wins: {2} (%d%%), Individual wins: {3} (%d%%), Total games: {4}".format(*row)
return msg % (round(row[2]/row[4] * 100), round(row[3]/row[4] * 100))
else:
return "No stats for {0} as {1}.".format(player[0], role)
return "{0} has not played any games.".format(acc)
def get_player_totals(acc):
role_totals = []
with conn:
c.execute("SELECT player FROM rolestats WHERE player LIKE ? COLLATE NOCASE", (acc,))
player = c.fetchone()
if player:
c.execute("SELECT role, totalgames FROM rolestats WHERE player=? COLLATE NOCASE", (acc,))
rows = c.fetchall()
total = 0
for row in rows:
total += row[1]
for row in rows:
role_totals.append("\u0002{row[0]}\u0002: {row[1]} ({prct:.2%})".format(row=row, prct=row[1]/total))
return "\u0002{0}\u0002's totals | \u0002{1} total games\u0002 | {2}".format(player[0], total, ", ".join(role_totals))
else:
return "{0} has not played any games.".format(acc)
def get_game_stats(size):
with conn:
for row in c.execute("SELECT * FROM gamestats WHERE size=?", (size,)):
msg = "\u0002{0}\u0002 player games | Village wins: {1} (%d%%), Wolf wins: {2} (%d%%), Total games: {3}".format(*row)
return msg % (round(row[1]/row[3] * 100), round(row[2]/row[3] * 100))
else:
return "No stats for \u0002{0}\u0002 player games.".format(size)
def get_game_totals():
size_totals = []
total = 0
with conn:
for size in range(MIN_PLAYERS, MAX_PLAYERS + 1):
c.execute("SELECT size, totalgames FROM gamestats WHERE size=?", (size,))
row = c.fetchone()
if row:
size_totals.append("\u0002{0}p\u0002: {1}".format(*row))
total += row[1]
if len(size_totals) == 0:
return "No games have been played."
else:
return "Total games ({0}) | {1}".format(total, ", ".join(size_totals))
|
bsd-2-clause
| 7,256,666,831,428,034,000
| 55.748201
| 185
| 0.440585
| false
| 3.617795
| false
| false
| false
|
thelectronicnub/redditswapbot
|
util/flair_sql_import.py
|
1
|
1111
|
#!/usr/bin/env python2
import sys, os
import json
import argparse
import sqlite3 as lite
con = None
def extant_file(x):
if not os.path.exists(x):
raise argparse.ArgumentError("{0} does not exist".format(x))
return x
def main():
parser = argparse.ArgumentParser(description="Import flairs")
parser.add_argument("-f", "--file", dest="filename", help="json input file", metavar="FILE", type=extant_file, required=True)
args = parser.parse_args()
try:
con = lite.connect('flair.db')
except lite.Error, e:
print "Error %s:" % e.args[0]
sys.exit(1)
curs = con.cursor()
curs.execute('''CREATE TABLE IF NOT EXISTS flair (
username TEXT PRIMARY KEY NOT NULL ,
flair_text TEXT,
flair_css_class TEXT,
lastpost timestamp,
lastpostid TEXT,
lastid TEXT DEFAULT ''
)''')
flair_json = json.load(open(args.filename))
curs.executemany('INSERT INTO flair (username, flair_text, flair_css_class) VALUES (:user, :flair_text, :flair_css_class)', flair_json)
con.commit()
if con:
con.close()
if __name__ == "__main__":
main()
|
gpl-3.0
| -6,648,211,901,216,044,000
| 22.638298
| 139
| 0.648065
| false
| 3.296736
| false
| false
| false
|
lesina/labs2016
|
Laba19/G.py
|
1
|
1494
|
def makeAdjacencyMatrix():
for i in range(size):
adjacency_matrix.append([1000000] * size)
for i in range(n):
vertex1, vertex2, weight = list(map(int, input().split()))
adjacency_matrix[vertex1][vertex2] = weight
adjacency_matrix[vertex2][vertex1] = weight
def Dexter(size, adjacency_matrix, start = 0):
valid = [True] * size
weight = [1000000] * size
weight[start] = 0
for i in range(size):
min_weight = 1000001
ID_min_weight = -1
for i in range(size):
if valid[i] and weight[i] < min_weight:
min_weight = weight[i]
ID_min_weight = i
for i in range(size):
if weight[ID_min_weight] + adjacency_matrix[ID_min_weight][i] < weight[i]:
weight[i] = weight[ID_min_weight] + adjacency_matrix[ID_min_weight][i]
valid[ID_min_weight] = False
return weight
def returnPath(retStart):
Path.append(retStart)
newWeight = weight[retStart]
for j in range(size):
for i in range(size):
if (newWeight - adjacency_matrix[i][retStart] == weight[i]):
newWeight -= adjacency_matrix[i][retStart]
retStart = i
Path.append(i)
Path = []
adjacency_matrix = []
size, n, start, end = list(map(int, input().split()))
makeAdjacencyMatrix()
weight = Dexter(size, adjacency_matrix, start)
print(weight[end])
# retStart = int(input())
# returnPath(retStart)
# print(*Path)
|
gpl-3.0
| 6,825,533,243,467,289,000
| 30.808511
| 86
| 0.591031
| false
| 3.410959
| false
| false
| false
|
RAPD/RAPD
|
src/old_agents/rapd_agent_integrate.py
|
1
|
129537
|
"""
RAPD agent for fast integration with XDS
"""
__license__ = """
This file is part of RAPD
Copyright (C) 2011-2018, Cornell University
All rights reserved.
RAPD is free software: you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License as published by
the Free Software Foundation, version 3.
RAPD is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Affero General Public License for more details.
You should have received a copy of the GNU Affero General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
__created__ = "2011-06-29"
__maintainer__ = "David Neau"
__email__ = "dneau@anl.gov"
__status__ = "Production"
# This is an active rapd agent
RAPD_AGENT = True
# This handler's request type
AGENT_TYPE = "INTEGRATE"
AGENT_SUBTYPE = "CORE"
# A unique UUID for this handler (uuid.uuid1().hex)
ID = "bd11f4401eaa11e697c3ac87a3333966"
VERSION = "2.0.0"
# Standard imports
from distutils.spawn import find_executable
import logging
import logging.handlers
import math
from multiprocessing import Process
import os
# import os.path
from pprint import pprint
# import shutil
import stat
import subprocess
import sys
import threading
import time
import numpy
# RAPD imports
from subcontractors.xdsme.xds2mos import Xds2Mosflm
from utils.communicate import rapd_send
from subcontractors.stats import AutoStats
import utils.text as text
from utils.text import json
from bson.objectid import ObjectId
import utils.xutils as Utils
# Import smartie.py from the installed CCP4 package
# smartie.py is a python script for parsing log files from CCP4
sys.path.append(os.path.join(os.environ["CCP4"], "share", "smartie"))
import smartie
# Software dependencies
VERSIONS = {
"aimless": (
"version 0.5.23",
"version 0.5.29",
"version 0.5.31",
),
"freerflag": (
"version 2.2",
),
"gnuplot": (
" 5.0 ",
),
"mtz2various": (
"version 1.1",
),
"pointless": (
"version 1.10.19",
"version 1.10.28",
),
"truncate": (
"version 7.0.004",
"version 7.0.024",
"version 7.0.028",
),
"xds": (
"VERSION Nov 1, 2016",
),
"xds_par": (
"VERSION Nov 1, 2016",
),
}
def try_float(number, default="NO DEFAULT"):
"""Attempt to cast to a float, but return string if not"""
try:
return float(number)
except ValueError:
if default != "NO DEFAULT":
return default
else:
return number
def try_int(number, default="NO DEFAULT"):
"""Attempt to cast to an int, but return string if not"""
try:
return float(number)
except ValueError:
if default != "NO DEFAULT":
return default
else:
return number
class RapdAgent(Process):
"""
classdocs
command format
{
"command":"INDEX+STRATEGY",
"directories":
{
"data_root_dir":"" # Root directory for the data session
"work":"" # Where to perform the work
},
"image_data":{}, # Image information
["image2":{},] # 2nd image information
"preferences":{} # Settings for calculations
"return_address":("127.0.0.1", 50000) # Location of control process
}
"""
spacegroup = False
low_res = False
hi_res = False
results = {}
def __init__(self, site, command, tprint=False, logger=False):
"""
Initialize the agent
Keyword arguments
site -- full site settings
command -- dict of all information for this agent to run
"""
# Store tprint for use throughout
if tprint:
self.tprint = tprint
# Dead end if no tprint passed
else:
def func(arg=False, level=False, verbosity=False, color=False):
pass
self.tprint = func
# Get the logger Instance
if logger:
self.logger = logger
else:
self.logger = logging.getLogger("RAPDLogger")
self.logger.debug("__init__")
# Store passed-in variables
self.site = site
self.command = command
self.settings = self.command.get("preferences")
self.controller_address = self.command.get("return_address", False)
self.dirs = self.command["directories"]
self.image_data = self.command.get("data").get("image_data")
self.run_data = self.command.get("data").get("run_data")
self.process_id = self.command["process_id"]
self.logger.debug("self.image_data = %s", self.image_data)
if self.settings.get("start_frame", False):
self.image_data["start"] = self.settings.get("start_frame")
else:
self.image_data["start"] = self.run_data.get("start")
# print "self.image_data[\"start\"]", self.image_data["start"]
if self.settings.get("end_frame", False):
self.image_data["total"] = self.settings.get("end_frame") - self.image_data["start"] + 1
else:
self.image_data["total"] = self.run_data.get("total")
# print "self.image_data[\"total\"]", self.image_data["total"]
self.image_data['image_template'] = self.run_data['image_template']
# Check for 2theta tilt:
if 'twotheta' in self.run_data:
self.image_data['twotheta'] = self.run_data['twotheta']
# self.image_data['start'] = self.settings['request']['frame_start']
# self.image_data['total'] = str( int(self.settings['request']['frame_start'])
# + int(self.settings['request']['frame_finish']) - 1)
if self.settings.get('spacegroup', False):
self.spacegroup = self.settings['spacegroup']
if self.settings.get("hi_res", False):
self.hi_res = self.settings.get("hi_res")
if self.settings.get("low_res", False):
self.low_res = self.settings.get("low_res")
if 'multiprocessing' in self.settings:
self.cluster_use = self.settings['multiprocessing']
if self.cluster_use == 'True':
self.cluster_use = True
elif self.cluster_use == 'False':
self.cluster_use = False
else:
self.cluster_use = False
if 'ram_integrate' in self.settings:
self.ram_use = self.settings['ram_integrate']
if self.ram_use == 'True':
self.ram_use = True
elif self.ram_use == 'False':
self.ram_use = False
if self.ram_use == True:
self.ram_nodes = self.settings['ram_nodes']
# ram_nodes is a list containing three lists.
# ram_nodes[0] is a list containing the name of the nodes where
# data was distributed to.
# ram_nodes[1] is a list of the first frame number for the wedge
# of images copied to the corresponding node.
# ram_nodes[2] is a list of the last frame number for the wedge
# of images copied to the corresponding node.
else:
self.ram_nodes = None
else:
self.ram_use = False
self.ram_nodes = None
if 'standalone' in self.settings:
self.standalone = self.settings['standalone']
if self.standalone == 'True':
self.standalone = True
elif self.standalone == 'False':
self.standalone = False
else:
self.standalone = False
if 'work_dir_override' in self.settings:
if (self.settings['work_dir_override'] == True
or self.settings['work_dir_override'] == 'True'):
self.dirs['work'] = self.settings['work_directory']
if 'beam_center_override' in self.settings:
if (self.settings['beam_center_override'] == True
or self.settings['beam_center_override'] == 'True'):
self.image_data['x_beam'] = self.settings['x_beam']
self.image_data['y_beam'] = self.settings['y_beam']
# Some detectord need flipped for XDS
if self.settings.get('flip_beam', False):
x = self.image_data['y_beam']
self.image_data['y_beam'] = self.image_data['x_beam']
self.image_data['x_beam'] = x
self.xds_default = []
# Parameters likely to be changed based on beamline setup.
# Directory containing XDS.INP default files for detectors.
#if os.path.isdir('/home/necat/DETECTOR_DEFAULTS'):
# self.detector_directory = '/home/necat/DETECTOR_DEFAULTS/'
#Also check set_detector_data for other detector dependent values!
# XDS parameters for number of JOBS and PROCESSORS.
# Values are beamline specific, depending on computing resources.
# self.jobs is number of nodes XDS can use for colspot and/or integration.
# self.procs is number of procesors XDS can use per job.
if self.cluster_use == True:
if self.ram_use == True:
self.jobs = len(self.ram_nodes[0])
self.procs = 8
else:
# Set self.jobs and self.procs based on available cluster resources
self.jobs = 20
self.procs = 8
else:
# Setting self.jobs > 1 provides some speed up on
# multiprocessor machines.
# Should be set based on computer used for processing
self.jobs = 1
self.procs = 4
Process.__init__(self, name="FastIntegration")
self.start()
def run(self):
self.logger.debug('Fastintegration::run')
self.preprocess()
self.process()
#self.postprocess()
def preprocess(self):
"""
Things to do before main proces runs.
1. Change to the correct working directory.
2. Read in detector specific parameters.
"""
self.logger.debug('FastIntegration::preprocess')
if os.path.isdir(self.dirs['work']) == False:
os.makedirs(self.dirs['work'])
os.chdir(self.dirs['work'])
self.xds_default = self.createXDSinp(self.settings['xdsinp'])
def process(self):
"""
Things to do in main process:
1. Run integration and scaling.
2. Report integration results.
3. Run analysis of data set.
"""
self.logger.debug('FastIntegration::process')
if not self.command["command"] in ("INTEGRATE", "XDS"):
self.logger.debug('Program did not request an integration')
self.logger.debug('Now Exiting!')
return
xds_input = self.xds_default
if self.command["command"] == 'XDS':
integration_results = self.xds_total(xds_input)
else:
if os.path.isfile(self.last_image) == True:
if self.ram_use == True:
integration_results = self.ram_total(xds_input)
else:
integration_results = self.xds_total(xds_input)
else:
if self.ram_use == True:
integration_results = self.ram_integrate(xds_input)
elif (self.image_data['detector'] == 'ADSC' or
self.cluster_use == False):
integration_results = self.xds_split(xds_input)
else:
integration_results = self.xds_processing(xds_input)
os.chdir(self.dirs['work'])
if integration_results == 'False':
# Do a quick clean up?
pass
else:
final_results = self.finish_data(integration_results)
# Set up the results for return
self.results['process'] = {'agent_process_id': self.process_id,
'status': 100}
self.results['results'] = final_results
self.logger.debug(self.results)
#self.sendBack2(results)
self.write_json(self.results)
self.print_info()
return
# Skip this for now
analysis = self.run_analysis(final_results['files']['mtzfile'], self.dirs['work'])
analysis = 'Success'
if analysis == 'Failed':
self.logger.debug(analysis)
# Add method for dealing with a failure by run_analysis.
pass
elif analysis == 'Success':
self.logger.debug(analysis)
self.results["status"] = "SUCCESS"
self.logger.debug(self.results)
# self.sendBack2(results)
if self.controller_address:
rapd_send(self.controller_address, self.results)
return
def ram_total(self, xdsinput):
"""
This function controls processing by XDS when the complete data
is present and distributed to ramdisks on the cluster
"""
self.logger.debug('Fastintegration::ram_total')
first = int(self.image_data['start'])
last = int(self.image_data['start']) + int(self.image_data['total']) -1
data_range = '%s %s' %(first, last)
dir = 'wedge_%s_%s' %(first, last)
xdsdir = os.path.join(self.dirs['work'], dir)
if os.path.isdir(xdsdir) == False:
os.mkdir(xdsdir)
os.chdir(xdsdir)
# Figure out how many images are on the first node.
# If greater than self.procs, simply set up spot ranges with a number
# of images equal to self.procs from the first and last ram nodes.
# If less than self.procs, reduce self.procs and set up spot ranges
# with all of the images on the first and last ram nodes.
Num_images = self.ram_nodes[2][0] - self.ram_nodes[1][0] + 1
if Num_images < self.procs:
self.procs = Num_images
spot_range = self.ram_nodes[1][0] + self.procs - 1
xdsinp = xdsinput[:]
xdsinp.append('JOB=XYCORR INIT COLSPOT IDXREF DEFPIX INTEGRATE CORRECT\n\n')
# Add the spot ranges.
xdsinp.append('SPOT_RANGE=%s %s\n' %(self.ram_nodes[1][0], spot_range))
# Make sure the last ram node has an adequate number of images available.
spot_range = self.ram_nodes[1][-1] + self.procs - 1
if self.ram_nodes[2][-1] < spot_range:
spot_range = self.ram_nodes[2][-1]
xdsinp.append('SPOT_RANGE=%s %s\n' %(self.ram_nodes[1][-1], spot_range))
xdsinp.append('DATA_RANGE=%s\n' % data_range)
self.write_file('XDS.INP', xdsinp)
self.write_forkscripts(self.ram_nodes, self.image_data['osc_range'])
self.xds_ram(self.ram_nodes[0][0])
newinp = self.check_for_xds_errors(xdsdir, xdsinp)
if newinp == False:
self.logger.debug(' Unknown xds error occurred. Please check for cause!')
self.tprint(arg="Unknown xds error occurred. Please check for cause!",
level=10,
color="red")
raise Exception("Unknown XDS error")
return False
else:
# Find a suitable cutoff for resolution
# Returns False if no new cutoff, otherwise returns the value of
# the high resolution cutoff as a float value.
new_rescut = self.find_correct_res(xdsdir, 1.0)
if new_rescut != False:
os.rename('%s/CORRECT.LP' %xdsdir, '%s/CORRECT.LP.nocutoff' %xdsdir)
os.rename('%s/XDS.LOG' %xdsdir, '%s/XDS.LOG.nocutoff' %xdsdir)
newinp[-2] = 'JOB=CORRECT\n\n'
newinp[-3] = 'INCLUDE_RESOLUTION_RANGE=200.0 %.2f\n' % new_rescut
self.write_file('XDS.INP', newinp)
self.xds_ram(self.ram_nodes[0][0])
# Prepare the display of results.
final_results = self.run_results(xdsdir)
# Polish up xds processing by moving GXPARM.XDS to XPARM.XDS
# and rerunning xds.
#
# Don't polish if low resolution, as this tend to blow up.
if new_rescut <= 4.5:
os.rename('%s/GXPARM.XDS' %xdsdir, '%s/XPARM.XDS' %xdsdir)
os.rename('%s/CORRECT.LP' %xdsdir, '%s/CORRECT.LP.old' %xdsdir)
os.rename('%s/XDS.LOG' %xdsdir, '%s/XDS.LOG.old' %xdsdir)
newinp[-2] = 'JOB=INTEGRATE CORRECT\n\n'
newinp[-3] = '\n'
self.write_file('XDS.INP', newinp)
self.xds_ram(self.ram_nodes[0][0])
#Check to see if a new resolution cutoff should be applied
#new_rescut = self.find_correct_res(xdsdir, 1.0)
#if new_rescut != False:
# os.rename('%s/CORRECT.LP' %xdsdir, '%s/CORRECT.LP.nocutoff' %xdsdir)
# os.rename('%s/XDS.LOG' %xdsdir, '%s/XDS.LOG.nocutoff' %xdsdir)
# newinp[-2] = 'JOB=CORRECT !XYCORR INIT COLSPOT IDXREF DEFPIX INTEGRATE CORRECT\n\n'
# newinp[-5] = 'INCLUDE_RESOLUTION_RANGE=200.0 %.2f\n' % new_rescut
# self.write_file('XDS.INP', newinp)
# self.xds_ram(self.ram_nodes[0][0])
# new_rescut = self.find_correct_res(xdsdir, 1.0)
# if new_rescut != False:
# os.rename('%s/CORRECT.LP' %xdsdir, '%s/CORRECT.LP.oldcutoff' %xdsdir)
# os.rename('%s/XDS.LOG' %xdsdir, '%s/XDS.LOG.oldcutoff' %xdsdir)
# newinp[-5] = 'INCLUDE_RESOLUTION_RANGE=200.0 %.2f\n' % new_rescut
# self.write_file('XDS.INP', newinp)
# self.xds_ram(self.ram_nodes[0][0])
final_results = self.run_results(xdsdir)
final_results['status'] = 'SUCCESS'
return final_results
def change_xds_inp(self, xds_input, new_line):
"""Modify the XDS.INP lines with the input line"""
param = new_line.split("=")[0].strip()
xds_output = []
found = False
for line in xds_input:
if param+"=" in line:
xds_output.append(new_line)
else:
xds_output.append(line)
# Append the line if it is new
if not found:
xds_output.append(new_line)
return xds_output
def xds_total(self, xdsinput):
"""
This function controls processing by XDS when the complete data
set is already present on the computer system.
"""
self.logger.debug('Fastintegration::xds_total')
self.tprint(arg="\nXDS processing", level=99, color="blue")
first = int(self.image_data['start'])
last = int(self.image_data['start']) + int(self.image_data['total']) -1
data_range = '%s %s' %(first, last)
self.logger.debug('start = %s, total = %s',
self.image_data['start'],
self.image_data['total'])
self.logger.debug('first - %s, last = %s', first, last)
self.logger.debug('data_range = %s', data_range)
dir = 'wedge_%s_%s' % (first, last)
xdsdir = os.path.join(self.dirs['work'], dir)
if os.path.isdir(xdsdir) == False:
os.mkdir(xdsdir)
xdsinp = xdsinput[:]
if self.low_res or self.hi_res:
if not self.low_res:
low_res = 200.0
else:
low_res = self.low_res
if not self.hi_res:
hi_res = 0.9
else:
hi_res = self.hi_res
xdsinp = self.change_xds_inp(xdsinp,
"INCLUDE_RESOLUTION_RANGE=%.2f %.2f\n" % (low_res, hi_res))
xdsinp = self.change_xds_inp(xdsinp, "MAXIMUM_NUMBER_OF_PROCESSORS=%s\n" % self.procs)
xdsinp = self.change_xds_inp(xdsinp, "MAXIMUM_NUMBER_OF_JOBS=%s\n" % self.jobs)
xdsinp = self.change_xds_inp(xdsinp, "JOB=XYCORR INIT COLSPOT \n\n")
xdsinp = self.change_xds_inp(xdsinp, "DATA_RANGE=%s\n" % data_range)
xdsfile = os.path.join(xdsdir, 'XDS.INP')
self.write_file(xdsfile, xdsinp)
self.tprint(arg=" Searching for peaks",
level=99,
color="white",
newline=False)
self.xds_run(xdsdir)
# Index
xdsinp[-2] = ("JOB=IDXREF \n\n")
self.write_file(xdsfile, xdsinp)
self.tprint(arg=" Indexing",
level=99,
color="white",
newline=False)
self.xds_run(xdsdir)
# Integrate
# Override spacegroup?
if self.spacegroup != False:
# Check consistency of spacegroup, and modify if necessary.
xdsinp = self.find_xds_symm(xdsdir, xdsinp)
else:
xdsinp = self.change_xds_inp(xdsinp, "JOB=DEFPIX INTEGRATE CORRECT \n\n")
self.write_file(xdsfile, xdsinp)
self.tprint(arg=" Integrating",
level=99,
color="white",
newline=False)
self.xds_run(xdsdir)
# If known xds_errors occur, catch them and take corrective action
newinp = self.check_for_xds_errors(xdsdir, xdsinp)
if newinp == False:
self.logger.exception('Unknown xds error occurred. Please check for cause!')
self.tprint(arg="\nXDS error unknown to RAPD has occurred. Please check for cause!",
level=30,
color="red")
# TODO put out failing JSON
raise Exception("XDS error unknown to RAPD has occurred.")
# Prepare the display of results.
prelim_results = self.run_results(xdsdir)
self.tprint("\nPreliminary results summary", 99, "blue")
self.print_results(prelim_results)
# Already have hi res cutoff
if self.hi_res:
new_rescut = self.hi_res
# Find a suitable cutoff for resolution
else:
if self.low_res:
low_res = self.low_res
else:
low_res = 200.0
# Returns False if no new cutoff, otherwise returns the value of
# the high resolution cutoff as a float value.
new_rescut = self.find_correct_res(xdsdir, 1.0)
newinp = self.change_xds_inp(newinp, "JOB= INTEGRATE CORRECT \n\n")
# newinp[-2] = 'JOB= INTEGRATE CORRECT \n\n'
if new_rescut != False:
os.rename('%s/CORRECT.LP' %xdsdir, '%s/CORRECT.LP.nocutoff' %xdsdir)
os.rename('%s/XDS.LOG' %xdsdir, '%s/XDS.LOG.nocutoff' %xdsdir)
newinp = self.change_xds_inp(
newinp,
"%sINCLUDE_RESOLUTION_RANGE=%.2f %.2f\n" % (newinp[-2], low_res, new_rescut))
# newinp[-2] = '%sINCLUDE_RESOLUTION_RANGE=200.0 %.2f\n' % (newinp[-2], new_rescut)
self.write_file(xdsfile, newinp)
self.tprint(arg=" Reintegrating with new resolution cutoff",
level=99,
color="white",
newline=False)
self.xds_run(xdsdir)
# Prepare the display of results.
prelim_results_2 = self.run_results(xdsdir)
self.tprint("\nIntermediate results summary", 99, "blue")
self.print_results(prelim_results_2)
# Polish up xds processing by moving GXPARM.XDS to XPARM.XDS
# and rerunning xds.
#
# If low resolution, don't try to polish the data, as this tends to blow up.
if new_rescut <= 4.5:
os.rename('%s/GXPARM.XDS' %xdsdir, '%s/XPARM.XDS' %xdsdir)
os.rename('%s/CORRECT.LP' %xdsdir, '%s/CORRECT.LP.old' %xdsdir)
os.rename('%s/XDS.LOG' %xdsdir, '%s/XDS.LOG.old' %xdsdir)
#newinp[-2] = 'JOB=INTEGRATE CORRECT !XYCORR INIT COLSPOT IDXREF DEFPIX INTEGRATE CORRECT\n\n'
self.write_file(xdsfile, newinp)
self.tprint(arg=" Polishing",
level=99,
color="white",
newline=False)
self.xds_run(xdsdir)
final_results = self.run_results(xdsdir)
else:
# Check to see if a new resolution cutoff should be applied
new_rescut = self.find_correct_res(xdsdir, 1.0)
if new_rescut != False:
os.rename('%s/CORRECT.LP' %xdsdir, '%s/CORRECT.LP.oldcutoff' %xdsdir)
os.rename('%s/XDS.LOG' %xdsdir, '%s/XDS.LOG.oldcutoff' %xdsdir)
newinp[-2] = '%sINCLUDE_RESOLUTION_RANGE=200.0 %.2f\n' % (newinp[-2], new_rescut)
self.write_file(xdsfile, newinp)
self.tprint(arg=" New resolution cutoff", level=99, color="white", newline=False)
self.xds_run(xdsdir)
final_results = self.run_results(xdsdir)
# Put data into the commanline
self.tprint("\nFinal results summary", 99, "blue")
self.print_results(final_results)
self.print_plots(final_results)
final_results['status'] = 'ANALYSIS'
return final_results
def xds_split(self, xdsinput):
"""
Controls xds processing for unibinned ADSC data
Launches XDS when half the data set has been collected and again once
the complete data set has been collected.
"""
self.logger.debug("FastIntegration::xds_split")
first_frame = int(self.image_data['start'])
half_set = (int(self.image_data['total']) / 2) + first_frame - 1
last_frame = int(self.image_data['start']) + int(self.image_data['total']) - 1
frame_count = first_frame + 1
file_template = os.path.join(self.image_data['directory'], self.image_template)
# Figure out how many digits needed to pad image number.
# First split off the <image number>.<extension> portion of the file_template.
numimg = self.image_template.split('_')[-1]
# Then split off the image number portion.
num = numimg.split('.')[0]
# Then find the length of the number portion
pad = len(num)
replace_string = ''
for i in range(0, pad, 1):
replace_string += '?'
look_for_file = file_template.replace(replace_string,
'%0*d' %(pad, frame_count))
# Maximum wait time for next image is exposure time + 30 seconds.
wait_time = int(math.ceil(float(self.image_data['time']))) + 30
timer = Process(target=time.sleep, args=(wait_time,))
timer.start()
while frame_count < last_frame:
if os.path.isfile(look_for_file) == True:
if timer.is_alive():
timer.terminate()
timer = Process(target=time.sleep, args=(wait_time,))
timer.start()
if frame_count == half_set:
proc_dir = 'wedge_%s_%s' % (first_frame, frame_count)
xds_job = Process(target=self.xds_wedge,
args=(proc_dir, frame_count, xdsinput))
xds_job.start()
frame_count += 1
look_for_file = file_template.replace(replace_string,
'%0*d' %(pad, frame_count))
elif timer.is_alive() == False:
self.logger.debug(' Image %s not found after waiting %s seconds.',
look_for_file,
wait_time)
self.logger.debug(' RAPD assumes the data collection has been aborted.')
self.logger.debug(' Launching a final xds job with last image detected.')
self.image_data['last'] = frame_count - 1
results = self.xds_total(xdsinput)
return results
# If you reach here, frame_count equals the last frame, so look for the
# last frame and then launch xds_total.
while timer.is_alive():
if os.path.isfile(self.last_image):
if xds_job.is_alive():
xds_job.terminate()
results = self.xds_total(xdsinput)
timer.terminate()
break
# If timer expires (ending the above loop) and last frame has not been
# detected, launch xds_total with last detected image.
if os.path.isfile(self.last_image) == False:
if xds_job.is_alive():
xds_job.terminate()
self.image_data['last'] = frame_count - 1
results = self.xds_total(xdsinput)
return results
def xds_processing(self, xdsinput):
"""
Controls processing of data on disks (i.e. not stored in RAM)
by xds. Attempts to process every 10 images up to 100 and then
every 20 images after that. This function should be used for NE-CAT
data collected on ADSC in binned mode
"""
"""
Need to set up a control where every ten frames an XDS processing is launched.
Need to keep track of what's been launched. To avoid launching too many XDS
jobs, if an XDS job is running when next ten frames are collected, don't launch
new wedge but rather wait for next multiple of 10. XDS jobs should be checked for
common errors and rerun if needed. A resolution cutoff should be generated at the
CORRECT stage (pass this cutoff on to next wedge?). Once the data set is complete,
last XDS should be "polished" by moving GXPARM.XDS to XPARM.XDS
As XDS jobs finish, launch whatever generates the GUI display
"""
self.logger.debug('FastIntegration::xds_processing')
first_frame = int(self.image_data['start'])
last_frame = + int(self.image_data['total']) - int(self.image_data['start']) + 1
frame_count = first_frame
# Maximum wait time for next image is exposure time + 15 seconds.
#wait_time = int(math.ceil(float(self.image_data['time']))) + 15
# Maximum wait time for next image is exposure time + 60 seconds.
if self.image_data['detector'] == 'PILATUS' or self.image_data['detector'] == 'HF4M':
wait_time = int(math.ceil(float(self.image_data['time']))) + 15
else:
wait_time = int(math.ceil(float(self.image_data['time']))) + 60
try:
wedge_size = int(10 // float(self.image_data['osc_range']))
except:
self.logger.debug('xds_processing:: dynamic wedge size allocation failed!')
self.logger.debug(' Setting wedge size to 10.')
wedge_size = 10
file_template = os.path.join(self.image_data['directory'], self.image_template)
# Figure out how many digits needed to pad image number.
# First split off the <image number>.<extension> portion of the file_template.
numimg = self.image_template.split('_')[-1]
# Then split off the image number portion.
num = numimg.split('.')[0]
# Then find the length of the number portion
pad = len(num)
replace_string = ''
for _ in range(0, pad, 1):
replace_string += '?'
look_for_file = file_template.replace(replace_string,
'%0*d' % (pad, frame_count))
timer = Process(target=time.sleep, args=(wait_time,))
timer.start()
# Create the process xds_job (runs a timer with no delay).
# This is so xds_job exists when it is checked for later on.
# Eventually xds_job is replaced by the actual integration jobs.
xds_job = Process(target=time.sleep, args=(0,))
xds_job.start()
while frame_count < last_frame:
# Look for next look_for_file to see if it exists.
# If it does, check to see if it is a tenth image.
# If it is a tenth image, launch an xds job.
# If it isn't a tenth image, index the look_for_file
# If it doesn't exist, keep checking until time_process expires.
if os.path.isfile(look_for_file) == True:
# Reset the timer process
if timer.is_alive():
timer.terminate()
timer = Process(target=time.sleep, args=(wait_time,))
timer.start()
# If frame_count is a tenth image, launch and xds job
# remainder = ((frame_count + 1) - first_frame) % wedge_size
# self.logger.debug(' remainder = %s' % remainder)
if xds_job.is_alive == True:
self.logger.debug(' xds_job.is_alive = True')
if (((frame_count + 1) - first_frame) % wedge_size == 0 and
xds_job.is_alive() == False):
proc_dir = 'wedge_%s_%s' %(first_frame, frame_count)
xds_job = Process(target=self.xds_wedge,
args=(proc_dir, frame_count, xdsinput))
xds_job.start()
# Increment the frame count to look for next image
frame_count += 1
look_for_file = file_template.replace(replace_string,
'%0*d' % (pad, frame_count))
# If next frame does not exist, check to see if timer has expired.
# If timer has expired, assume an abort has occurred.
elif timer.is_alive() == False:
self.logger.debug(' Image %s not found after waiting %s seconds.',
look_for_file,
wait_time)
# There have been a few cases, particularly with Pilatus's
# Furka file transfer has failed to copy an image to disk.
# So check for the next two files before assuming there has
# been an abort.
self.logger.debug(' RAPD assumes the data collection has been aborted.')
self.logger.debug(' RAPD checking for next two subsequent images to be sure.')
frame_count += 1
look_for_file = file_template.replace(replace_string, '%0*d' % (pad, frame_count))
if os.path.isfile(look_for_file) == True:
timer = Process(target=time.sleep, args=(wait_time,))
timer.start()
# Increment the frame count to look for next image
frame_count += 1
look_for_file = file_template.replace(replace_string,
'%0*d' %(pad, frame_count))
else:
self.logger.debug(' RAPD did not fine the next image, checking for one more.')
frame_count += 1
look_for_file = file_template.replace(replace_string, '%0*d' %(pad, frame_count))
if os.path.isfile(look_for_file) == True:
timer = Process(target=time.sleep, args=(wait_time,))
timer.start()
frame_count += 1
look_for_file = file_template.replace(
replace_string,
'%0*d' % (pad, frame_count))
else:
self.logger.debug(' RAPD did not find the next image either.')
self.logger.debug(
' Launching a final xds job with last image detected.')
self.image_data['total'] = frame_count - 2 - first_frame
results = self.xds_total(xdsinput)
return results
# If you reach here, frame_count equals the last frame, so look for the
# last frame and then launch xds_total.
while timer.is_alive():
if os.path.isfile(self.last_image):
if xds_job.is_alive():
xds_job.terminate()
results = self.xds_total(xdsinput)
timer.terminate()
break
# If timer expires (ending the above loop) and last frame has not been
# detected, launch xds_total with last detected image.
if os.path.isfile(self.last_image) == False:
if xds_job.is_alive():
xds_job.terminate()
self.image_data['total'] = frame_count - first_frame
results = self.xds_total(xdsinput)
return results
def xds_wedge(self, dir, last, xdsinput):
"""
This function controls processing by XDS for an intermediate wedge
"""
self.logger.debug('Fastintegration::xds_wedge')
self.tprint(arg="\nXDS processing", level=99, color="blue")
first = int(self.image_data['start'])
data_range = '%s %s' % (first, last)
xdsdir = os.path.join(self.dirs['work'], dir)
if os.path.isdir(xdsdir) == False:
os.mkdir(xdsdir)
xdsinp = xdsinput[:]
#xdsinp = self.find_spot_range(first, last, self.image_data['osc_range'],xdsinput[:])
xdsinp.append('MAXIMUM_NUMBER_OF_PROCESSORS=%s\n' % self.procs)
xdsinp.append('MAXIMUM_NUMBER_OF_JOBS=%s\n' % self.jobs)
#xdsinp.append('MAXIMUM_NUMBER_OF_JOBS=1\n')
xdsinp.append('JOB=XYCORR INIT COLSPOT !IDXREF DEFPIX INTEGRATE CORRECT\n\n')
xdsinp.append('DATA_RANGE=%s\n' % data_range)
xdsfile = os.path.join(xdsdir, 'XDS.INP')
self.write_file(xdsfile, xdsinp)
self.tprint(arg=" Searching for peaks wedge", level=99, color="white", newline=False)
self.xds_run(xdsdir)
#xdsinp[-3]=('MAXIMUM_NUMBER_OF_JOBS=%s\n' % self.jobs)
xdsinp[-2] = ('JOB=IDXREF DEFPIX INTEGRATE CORRECT\n\n')
self.write_file(xdsfile, xdsinp)
self.tprint(arg=" Integrating", level=99, color="white", newline=False)
self.xds_run(xdsdir)
# If known xds_errors occur, catch them and take corrective action
newinp = 'check_again'
while newinp == 'check_again':
newinp = self.check_for_xds_errors(xdsdir, xdsinp)
if newinp == False:
self.logger.debug(' Unknown xds error occurred for %s.', dir)
self.logger.debug(' Please check for cause!')
return
else:
# Find a suitable cutoff for resolution
# Returns False if no new cutoff, otherwise returns the value of
# the high resolution cutoff as a float value.
new_rescut = self.find_correct_res(xdsdir, 1.0)
if new_rescut != False:
os.rename('%s/CORRECT.LP' %xdsdir, '%s/CORRECT.LP.nocutoff' %xdsdir)
os.rename('%s/XDS.LOG' %xdsdir, '%s/XDS.LOG.nocutoff' %xdsdir)
newinp[-2] = 'JOB=INTEGRATE CORRECT\n'
newinp[-2] = '%sINCLUDE_RESOLUTION_RANGE=200.0 %.2f\n' % (newinp[-2], new_rescut)
self.write_file(xdsfile, newinp)
self.tprint(arg=" Reintegrating", level=99, color="white", newline=False)
self.xds_run(xdsdir)
results = self.run_results(xdsdir)
return results
def createXDSinp(self, xds_dict):
"""
This function takes the dict holding XDS keywords and values
and converts them into a list of strings that serves as the
basis for writing out an XDS.INP file.
"""
self.logger.debug("FastIntegration::createXDSinp")
# print self.image_data["start"]
# print self.image_data["total"]
last_frame = self.image_data['start'] + self.image_data["total"] - 1
self.logger.debug('last_frame = %s', last_frame)
# print last_frame
# self.logger.debug('detector_type = %s' % detector_type)
background_range = '%s %s' %(int(self.image_data['start']), int(self.image_data['start']) + 4)
x_beam = float(self.image_data['x_beam']) / float(self.image_data['pixel_size'])
y_beam = float(self.image_data['y_beam']) / float(self.image_data['pixel_size'])
#if x_beam < 0 or x_beam > int(xds_dict['NX']):
# raise RuntimeError, 'x beam coordinate outside detector'
#if y_beam < 0 or y_beam > int(xds_dict['NY']):
# raise RuntimeError, 'y beam coordinate outside detector'
if 'image_template' in self.image_data:
self.image_template = self.image_data['image_template']
else:
raise RuntimeError, '"image_template" not defined in input data.'
file_template = os.path.join(self.image_data['directory'], self.image_template)
# Count the number of '?' that need to be padded in a image filename.
pad = file_template.count('?')
# Replace the first instance of '?' with the padded out image number
# of the last frame. Need pad + 1 to get the right number of digits.
self.last_image = file_template.replace('?', '%d'.zfill(pad + 1) % last_frame, 1)
# Remove the remaining '?'
self.last_image = self.last_image.replace('?', '')
# Repeat the last two steps for the first image's filename.
self.first_image = file_template.replace('?', str(self.image_data['start']).zfill(pad), 1)
self.first_image = self.first_image.replace('?', '')
# Begin constructing the list that will represent the XDS.INP file.
xds_input = ['!===== DATA SET DEPENDENT PARAMETERS =====\n',
'ORGX=%.2f ORGY=%.2f ! Beam Center (pixels)\n' % (x_beam, y_beam),
'DETECTOR_DISTANCE=%.2f ! (mm)\n' %
(float(self.image_data['distance'])),
'OSCILLATION_RANGE=%.2f ! (degrees)\n' %
(float(self.image_data['osc_range'])),
'X-RAY_WAVELENGTH=%.5f ! (Angstroems)\n' %
(float(self.image_data['wavelength'])),
'NAME_TEMPLATE_OF_DATA_FRAMES=%s\n\n' % file_template,
'BACKGROUND_RANGE=%s\n\n' % background_range,
'!===== DETECTOR_PARAMETERS =====\n']
for key, value in xds_dict.iteritems():
# Regions that are excluded are defined with
# various keyword containing the word UNTRUSTED.
# Since multiple regions may be specified using
# the same keyword on XDS but a dict cannot
# have multiple values assigned to a key,
# the following if statements work though any
# of these regions and add them to xdsinput.
if 'UNTRUSTED' in key:
if 'RECTANGLE' in key:
line = 'UNTRUSTED_RECTANGLE=%s\n' %value
elif 'ELLIPSE' in key:
line = 'UNTRUSTED_ELLIPSE=%s\n' %value
elif 'QUADRILATERL' in key:
line = 'UNTRUSTED_QUADRILATERAL=%s\n' %value
else:
line = "%s=%s\n" % (key, value)
xds_input.append(line)
# If the detector is tilted in 2theta, adjust the value of
# DIRECTION_OF_DETECTOR_Y-AXIS.
# **** IMPORTANT ****
# This adjustment assumes that the 2theta tilt affects only
# the DIRECTION_OF_DETECTOR_Y-AXIS, and not the
# DIRECTION_OF_DETECTOR_X-AXIS.
#
# If 2theta is not inclined, self.image_data should not have the key
# 'twotheta', or have that key set to a value of None.
#
# If 2theta is inclined, it should be give in self.image_data
# with the key 'twotheta' and a value in degrees.
#
if 'twotheta' in self.image_data and self.image_data['twotheta'] != None:
twotheta = math.radians(float(self.image_data['twotheta']))
tilty = math.cos(twotheta)
tiltz = math.sin(twotheta)
xds_input.append('!***** Detector is tilted in 2theta *****\n')
xds_input.append('! 2THETA = %s degrees\n' % self.image_data['twotheta'])
xds_input.append('!*** Resetting DIRECTION_OF_DETECTOR_Y-AXIS ***\n')
xds_input.append('DIRECTION_OF_DETECTOR_Y-AXIS= 0.0 %.4f %.4f\n' %(tilty, tiltz))
xds_input.append('! 0.0 cos(2theta) sin(2theta)\n\n')
# pprint(xds_input)
return xds_input
def write_file(self, filename, file_input):
"""
Writes out file_input as filename.
file_input should be a list containing the desired contents
of the file to be written.
"""
self.logger.debug('FastIntegration::write_file')
self.logger.debug(' Filename = %s' % filename )
# pprint(file_input)
with open (filename, 'w') as file:
file.writelines(file_input)
return
def find_spot_range(self, first, last, osc, input):
"""
Finds up to two spot ranges for peak picking.
Ideally the two ranges each cover 5 degrees of data and
are 90 degrees apart. If the data set is 10 degrees or
less, return a single spot range equal to the entire data
set. If the data set is less than 90 degrees, return two
spot ranges representing the first 5 degrees and the middle
5 degrees of data.
"""
self.logger.debug('FastIntegration::find_spot_range')
self.logger.debug(' first_frame = %s', first)
self.logger.debug(' last_frame = %s', last)
self.logger.debug(' frame_width = %s', osc)
# Determine full oscillation range of the data set.
fullrange = (float(last) - float(first) + 1) * float(osc)
# If the full oscillation range is 10 degrees or less
# return a single spot_range equal to the full data set
if fullrange <= 10:
input.append('SPOT_RANGE=%s %s\n\n' %(first, last))
else:
endspot1 = int(first) + int(5 / float(osc)) - 1
input.append('SPOT_RANGE=%s %s\n\n' %(first, endspot1))
if fullrange < 95:
spot2_start = int((int(last) - int(first) + 1) / 2)
else:
spot2_start = int(90 / float(osc))
spot2_end = spot2_start + int(5 / float(osc)) - 1
input.append('SPOT_RANGE=%s %s\n\n' %(spot2_start, spot2_end))
return input
def xds_run(self, directory):
"""
Launches the running of xds.
"""
self.logger.debug('FastIntegration::xds_run')
self.logger.debug(' directory = %s', directory)
self.logger.debug(' detector = %s', self.image_data['detector'])
xds_command = 'xds_par'
os.chdir(directory)
# TODO skip processing for now
if self.cluster_use == True:
job = Process(target=BLspec.processCluster,
args=(self, (xds_command, 'XDS.LOG', '8', 'phase2.q')))
else:
job = Process(target=Utils.processLocal,
args=((xds_command, "XDS.LOG"),
self.logger))
job.start()
while job.is_alive():
time.sleep(1)
self.tprint(arg=".", level=99, color="white", newline=False)
self.tprint(arg=" done", level=99, color="white")
os.chdir(self.dirs['work'])
return
def xds_ram(self, first_node):
"""
Launches xds_par via ssh on the first_node.
This ensures that xds runs properly when trying to use
data distributed to the cluster's ramdisks
"""
self.logger.debug('FastIntegration::xds_ram')
my_command = ('ssh -x %s "cd $PWD && xds_par > XDS.LOG"' % first_node)
self.logger.debug(' %s', command)
p = subprocess.Popen(my_command, shell=True, )
p.wait()
return
def find_correct_res(self, directory, isigi):
"""
Looks at CORRECT.LP to find a resolution cutoff, where I/sigma is
approximately 1.5
"""
self.logger.debug(' directory = %s', directory)
self.logger.debug(' isigi = %s', isigi)
self.tprint(arg=" Determining resolution cutoff ",
level=99,
color="white",
newline=False)
new_hi_res = False
correctlp = os.path.join(directory, 'CORRECT.LP')
try:
correct_log = open(correctlp, 'r').readlines()
except IOError as e:
self.logger.debug('Could not open CORRECT.LP')
self.logger.debug(e)
return new_hi_res
flag = 0
IsigI = 0
hires = 0
# Read from the bottom of CORRECT.LP up, looking for the first
# occurence of "total", which signals that you've found the
# last statistic table given giving I/sigma values in the file.
for i in range(len(correct_log)-1, 0, -1):
if correct_log[i].strip().startswith('total'):
flag = 1
elif flag == 1:
if len(correct_log[i]) == 1:
new_hi_res = hires
break
line = correct_log[i].split()
if line[0][0].isdigit():
#if line[8] == '-99.00':
# self.logger.debug(' IsigI = -99.00')
# return False
prev_hires = hires
prev_IsigI = IsigI
hires = float(line[0])
try:
IsigI = float(line[8])
except ValueError:
pass
#self.logger.debug(' hires = %s, IsigI = %s' %(hires, IsigI))
if IsigI >= isigi:
# If the first IsigI value greater than 2, break and
# return False as new_hires.
if prev_IsigI == 0:
break
else:
new_hi_res = '%0.2f' % numpy.interp([isigi],
[prev_IsigI, IsigI],
[prev_hires, hires])
# print [isigi]
# print [prev_IsigI, IsigI]
# print [prev_hires, hires]
# print interp([isigi], [prev_IsigI, IsigI], [prev_hires, hires])
break
else: # If first character in line is not a digit, you;ve
# read through the entire table, so break.
new_hi_res = hires
break
self.logger.debug(' prev_hires = %s prev_IsigI = %s' % (prev_hires, prev_IsigI))
self.logger.debug(' hires = %s IsigI = %s' %(hires, IsigI))
self.logger.debug(' New cutoff = %s' %new_hi_res)
hi_res = float(new_hi_res)
self.tprint(arg="new cutoff = %4.2f %s" % (hi_res, text.aring),
level=99,
color="white")
return hi_res
def check_for_xds_errors(self, dir, input):
"""
Examines results of an XDS run and searches for known problems.
"""
self.logger.debug('FastIntegration::check_for_xds_errors')
self.tprint(arg=" Checking XDS output for errors",
level=99,
color="white")
os.chdir(dir)
# Enter a loop that looks for an error, then tries to correct it
# and the reruns xds.
# Loop should continue until all errors are corrected, or only
# an unknown error is detected.
xdslog = open('XDS.LOG', 'r').readlines()
for line in xdslog:
if '! ERROR !' in line:
# An error was found in XDS.LOG, now figure out what it was.
if 'CANNOT CONTINUE WITH A TWO DIMENSION' in line:
self.logger.debug(' Found an indexing error')
self.tprint(arg="\n Found an indexing error",
level=10,
color="red")
# Try to fix by extending the data range
tmp = input[-1].split('=')
first, last = tmp.split()
if int(last) == (int(self.image_data('start')) + int(self.image_data('total')) -1):
self.logger.debug(' FAILURE: Already using the full data range available.')
return False
else:
input[-1] = 'SPOT_RANGE=%s %s' % (first, (int(last) + 1))
self.write_file('XDS.INP', input)
os.system('mv XDS.LOG initialXDS.LOG')
self.tprint(arg="\n Extending spot range",
level=10,
color="white",
newline=False)
self.xds_run(dir)
return input
elif 'SOLUTION IS INACCURATE' in line or 'INSUFFICIENT PERCENTAGE' in line:
self.logger.debug(' Found inaccurate indexing solution error')
self.logger.debug(' Will try to continue anyway')
self.tprint(arg=" Found inaccurate indexing solution error - trying to continue anyway",
level=30,
color="red")
# Inaccurate indexing solution, can try to continue with DEFPIX,
# INTEGRATE, and CORRECT anyway
self.logger.debug(' The length of input is %s' % len(input))
if 'JOB=DEFPIX' in input[-2]:
self.logger.debug('Error = %s' %line)
self.logger.debug('XDS failed to run with inaccurate indexing solution error.')
self.tprint(arg="\n XDS failed to run with inaccurate indexing solution error.",
level=30,
color="red")
return False
else:
input[-2] = ('JOB=DEFPIX INTEGRATE CORRECT !XYCORR INIT COLSPOT'
+ ' IDXREF DEFPIX INTEGRATE CORRECT\n')
self.write_file('XDS.INP', input)
os.system('mv XDS.LOG initialXDS.LOG')
self.tprint(arg="\n Integrating with suboptimal indexing solution",
level=99,
color="white",
newline=False)
self.xds_run(dir)
return input
elif 'SPOT SIZE PARAMETERS HAS FAILED' in line:
self.logger.debug(' Found failure in determining spot size parameters.')
self.logger.debug(' Will use default values for REFLECTING_RANGE and BEAM_DIVERGENCE.')
self.tprint(arg="\n Found failure in determining spot size parameters.",
level=99,
color="red")
input.append('\nREFLECTING_RANGE=1.0 REFLECTING_RANGE_E.S.D.=0.10\n')
input.append('BEAM_DIVERGENCE=0.9 BEAM_DIVERGENCE_E.S.D.=0.09\n')
self.write_file('XDS.INP', input)
os.system('mv XDS.LOG initialXDS.LOG')
self.tprint(arg=" Integrating after failure in determining spot size parameters",
level=99,
color="white",
newline=False)
self.xds_run(dir)
return input
else:
# Unanticipated Error, fail the error check by returning False.
self.logger.debug('Error = %s' %line)
return False
return input
def write_forkscripts(self, node_list, osc):
"""
Creates two small script files that are run in place of
XDS's forkcolspot and forkintegrate scripts to allow
utilization of data distributed on the cluster's ramdisks.
In order for the forkscripts to work, the forkcolspot and
forkintegrate scripts in the xds directory should be modified
appropriately.
"""
self.logger.debug('FastIntegration::write_forkscripts')
niba0 = 5 // float(osc) # minimum number of images per batch
ntask = len(node_list[0]) # Total number of jobs
nodes = node_list[0] # list of nodes where data is distributed
fframes = node_list[1] # list of first image on each node
lframes = node_list[2] # list of last image on each node
forkc = ['#!/bin/bash\n']
forkc.append('echo "1" | ssh -x %s "cd $PWD && mcolspot_par" &\n'
% nodes[0])
forkc.append('echo "2" | ssh -x %s "cd $PWD && mcolspot_par" &\n'
% nodes[-1])
forkc.append('wait\n')
forkc.append('rm -f mcolspot.tmp')
forki = ['#!/bin/bash\n']
for x in range(0, ntask, 1):
itask = x + 1
nitask = lframes[x] - fframes[x] + 1
if nitask < niba0:
nbatask = 1
else:
nbatask = nitask // niba0
forki.append('echo "%s %s %s %s" | ssh -x %s "cd $PWD && mintegrate_par" &\n'
% (fframes[x], nitask, itask, nbatask, nodes[x]))
forki.append('wait\n')
forki.append('rm -f mintegrate.tmp')
self.write_file('forkc', forkc)
self.write_file('forki', forki)
os.chmod('forkc', stat.S_IRWXU)
os.chmod('forki', stat.S_IRWXU)
return
def run_results(self, directory):
"""
Takes the results from xds integration/scaling and prepares
tables and plots for the user interface.
"""
self.logger.debug('FastIntegration::run_results')
os.chdir(directory)
orig_rescut = False
# Run xdsstat on XDS_ASCII.HKL.
xdsstat_log = self.xdsstat()
# Run pointless to convert XDS_ASCII.HKL to mtz format.
mtzfile = self.pointless()
# Run dummy run of aimless to generate various stats and plots.
# i.e. We don't use aimless for actual scaling, it's already done by XDS.
if mtzfile != 'Failed':
aimless_log = self.aimless(mtzfile)
else:
self.logger.debug(' Pointless did not run properly!')
self.logger.debug(' Please check logs and files in %s' %self.dirs['work'])
return('Failed')
# Parse the aimless logfile to look for resolution cutoff.
aimlog = open(aimless_log, "r").readlines()
for line in aimlog:
if 'High resolution limit' in line:
current_resolution = line.split()[-1]
elif 'from half-dataset correlation' in line:
resline = line
elif 'from Mn(I/sd) > 1.50' in line:
resline2 = line
break
res_cut = resline.split('=')[1].split('A')[0].strip()
res_cut2 = resline2.split('=')[1].split('A')[0].strip()
if float(res_cut2) < float(res_cut):
res_cut = res_cut2
# Run aimless with a higher resolution cutoff if the suggested resolution
# is greater than the initial resolution + 0.05.
if (float(res_cut) > float(current_resolution) + 0.05):
# Save information on original resolution suggestions
orig_rescut = resline
# rerun aimless
aimless_log = self.aimless(mtzfile, res_cut)
#graphs, tables, summary = self.parse_aimless(aimless_log)
graphs, summary = self.parse_aimless2(aimless_log)
wedge = directory.split('_')[-2:]
summary['wedge'] = '-'.join(wedge)
# Parse INTEGRATE.LP and add information about mosaicity to summary.
summary['mosaicity'] = float(self.parse_integrateLP())
# Parse CORRECT.LP and add information from that to summary.
summary['ISa'] = float(self.parse_correctLP())
# Parse CORRECT.LP and pull out per wedge statistics
#self.parse_correct()
#scalamtz = mtzfile.replace('pointless','scala')
#scalalog = scalamtz.replace('mtz','log')
scalamtz = mtzfile.replace('pointless', 'aimless')
scalalog = scalamtz.replace('mtz', 'log')
results = {'status': 'WORKING',
'plots': graphs,
'summary': summary,
'mtzfile': scalamtz,
'dir': directory
}
self.logger.debug("Returning results!")
self.logger.debug(results)
# Set up the results for return
self.results['process'] = {
'agent_process_id':self.process_id,
'status':50
}
self.results['results'] = results
self.logger.debug(self.results)
# self.sendBack2(tmp)
if self.controller_address:
rapd_send(self.controller_address, self.results)
return results
def make_plots(self, graphs, tables):
"""
Generates the plots html file.
Keyword arguments
graphs --
tables --
"""
self.logger.debug('FastIntegration::make_plots')
# plotThese contains a list of graph titles that you want plotted
# addition plots may be requested by adding the title (stripped of
# leading and trailing whitespace) to plotThese.
# The plot titles also serve as keys for the tab titles.
plotThese = {
#'Mn(k) & 0k (theta=0) v. batch' : 'Scale vs frame',
#'Relative Bfactor v. batch' : 'Bfactor vs frame',
'Rmerge v Batch for all runs' : 'R vs frame',
#'Imean & RMS Scatter' : 'I vs frame',
'Imean/RMS scatter' : 'I/sd vs frame',
'I/sigma, Mean Mn(I)/sd(Mn(I))' : 'I/sigma',
'Rmerge v Resolution' : 'R vs Res',
'Rmerge, Rfull, Rmeas, Rpim v Resolution' : 'R vs Res',
'Average I,sd and Sigma' : 'I vs Res',
'Average I, RMSdeviation and Sd' : 'I vs Res',
'Completeness v Resolution' : 'Completeness',
'Multiplicity v Resolution' : 'Redundancy',
'Rmeas, Rsym & PCV v Resolution' : 'Rmeas',
'Rpim (precision R) v Resolution' : 'Rpim',
#'Rd vs frame_difference' : 'Rd',
'Anom & Imean CCs v resolution -' : 'Anom Corr',
'Anom & Imean CCs v resolution' : 'CCanom and CC1/2',
'RMS correlation ratio' : 'RCR',
'Rcp v. batch' : 'Rcp v batch'
}
plotfile = ['<html>\n',
'<head>\n',
' <style type="text/css">\n',
' body { background-image: none; }\n',
' .x-label { position:relative; text-align:center; top: 10px; }\n',
' .title { font-size:30px; text-align:center; }\n',
' </style>\n',
' <script type="text/javascript">\n',
'$(function() {\n',
' // Tabs\n',
" $('.tabs').tabs();\n",
' });\n',
' </script>\n',
'</head>\n',
'<body>\n',
' <table>\n',
' <tr>\n',
' <td width="100%">\n',
' <div class="tabs">\n',
' <!-- This is where the tabl labels are defined\n',
' 221 = tab2 (on page) tab2 (full output tab) tab1 -->\n',
' <ul>\n'
]
# Define tab labels for each graph.
for i, graph in enumerate(graphs):
if graph[0] in plotThese:
title = plotThese[graph[0]]
plotfile.append(' <li><a href="#tabs-22%s">%s</a></li>\n'
% (i, title))
plotfile.append(' </ul>\n')
# Define title and x-axis labels for each graph.
for i,graph in enumerate(graphs):
if graph[0] in plotThese:
plotfile.extend([' <div id="tabs-22%s">\n' % i,
' <div class="title"><b>%s</b></div>\n'
% graph[0],
' <div id="chart%s_div" style=' % i,
'"width:800px; height:600px"></div>\n',
' <div class="x-label">%s</div>\n'
% graph[1],
' </div>\n'
])
plotfile.extend([' </div> <!-- End of Tabs -->\n',
' </td>\n',
' </tr>\n',
' </table>\n\n',
'<script id="source" language="javascript" type="text/javascript">\n',
'$(function () {\n'
])
# varNames is a counter, such that the variables used for plotting
# will simply be y+varName (i.e. y0, y1, y2, etc)
# actual labels are stored transiently in varLabel, and added
# as comments next to the variable when it is initialized
varNum = 0
for i,graph in enumerate(graphs):
title, xlabel, ylabels, xcol, ycols, tableNum = graph
if title in plotThese:
varLabel = []
data = []
plotline = ' var '
# graph[2] is the label for the y-values.
#ylabels = graph[2]
for ylabel in ylabels:
varLabel.append(ylabel)
var = 'y%s' %varNum
varNum += 1
data.append(var)
if ylabel == ylabels[-1]:
plotline += ('%s= [];\n' % var)
else:
plotline += ('%s= [], ' % var)
plotfile.append(plotline)
#xcol = int(graph[3])
#ycols = graph[4]
#tableNum = graph[5]
self.logger.debug('table # %s' %tableNum)
for line in tables[tableNum]:
#self.logger.debug('tableNum = %s line=%s line[0]=%s' %(tableNum,line, line[0]))
if line[0] == '$$':
#self.logger.debug("line == '$$' is TRUE")
break
for y, ycol in enumerate(ycols):
#self.logger.debug("ycols == %s" %ycols)
if line[ycol] !='-':
plotfile.append(' %s.push([%s,%s]);\n'
%(data[y], line[xcol], line[ycol]))
plotfile.extend([' var plot%s' % i,
' = $.plot($("#chart%s_div"), [\n' % i
])
for x in range(0, len(data), 1):
plotfile.append(' {data:%s, label:"%s" },\n'
% (data[x], varLabel[x]))
plotfile.extend([' ],\n',
' { lines: {show: true},\n',
' points: {show: false},\n',
" selection: {mode: 'xy' },\n",
' grid: {hoverable: true, clickable: true },\n'
] )
if xlabel == 'Dmin (A)':
plotfile.append(' xaxis: {ticks: [\n')
for line in tables[tableNum]:
if line[0] == '$$':
break
plotfile.append(' [%s,"%s"],\n'
%(line[xcol], line[xcol+1]))
plotfile.append(' ]},\n')
plotfile.append(' });\n\n')
plotfile.extend(['function showTooltip(x, y, contents) {\n',
" $('<div id=tooltip>' + contents + '</div>').css( {\n",
" position: 'absolute',\n",
" display: 'none',\n",
" top: y + 5,\n",
' left: x + 5, \n',
" border: '1px solid #fdd',\n",
" padding: '2px',\n",
" 'background-color': '#fee',\n",
" opacity: 0.80\n"
' }).appendTo("body").fadeIn(200);\n',
' }\n\n',
' var previousPoint = null;\n'
])
for i, graph in enumerate(graphs):
title = graph[0]
xlabel = graph[1]
if title in plotThese:
plotfile.append(' $("#chart%s_div").bind' %str(i) )
plotfile.extend(['("plothover", function (event, pos, item) {\n',
' $("#x").text(pos.x.toFixed(2));\n',
' $("#y").text(pos.y.toFixed(2));\n\n',
'if (true) {\n',
' if (item) {\n',
' if (previousPoint != item.datapoint) {\n',
' previousPoint = item.datapoint;\n\n',
' $("#tooltip").remove();\n',
])
if xlabel == 'Dmin (A)':
plotfile.append(' var x = (Math.sqrt(1/item.datapoint[0])).toFixed(2),\n')
else:
plotfile.append(' var x = item.datapoint[0].toFixed(2),\n')
plotfile.extend([' y = item.datapoint[1].toFixed(2);\n',
' showTooltip(item.pageX, item.pageY,\n',
' item.series.label + " at " + x + " = " + y);\n',
' }\n',
' }\n',
' else {\n',
' $("#tooltip").remove();\n',
' previousPoint = null;\n',
' }\n',
' }\n });\n\n'
])
plotfile.append('});\n</script>\n</body>\n</html>\n')
self.write_file('plot.html', plotfile)
return('plot.html')
def parse_aimless(self, logfile):
"""
Parses the aimless logfile in order to pull out data for graphing
and the results table.
Relevant values from teh summary table are stored into a results
dictionary.
Returns a list of lists called graphs that contains information on
data labels and where to pull data from the nested list called tables.
Returns a nested list called tables, which is a copy of the data
tables in the aimless logfile.
Returns a dict called int_results that contains the information
found in the results summary table of the aimless log file.
"""
log = smartie.parselog(logfile)
# The program expect there to be 10 tables in the aimless log file.
ntables = log.ntables()
if ntables != 10:
#raise RuntimeError, '%s tables found in aimless output, program expected 10.' %ntables
self.logger.debug('%s tables found in aimless output, program exepected 10.' %ntables)
tables = []
for i in range(0,ntables):
data = []
# Ignore the Anisotropy analysis table (it's not always present
# and if you don't ignore it, it causes problems when it is not
# there.)
if 'Anisotropy analysis' in log.tables()[i].title():
pass
else:
for line in log.tables()[i].data().split('\n'):
if line != '':
data.append(line.split())
tables.append(data)
# Pull out information for the summary table.
flag = True
summary = log.keytext(0).message().split('\n')
# For some reason, 'Anomalous flag switched ON' is not always being found.
# so this line creates a blank entry of anomalous_report so that it cannot
# be referenced before assignment.
anomalous_report = ''
for line in summary:
if 'Space group' in line:
space_group = line.strip().split(': ')[-1]
elif 'Average unit cell' in line:
unit_cell = map(float, line.split()[3:])
elif 'Anomalous flag switched ON' in line:
anomalous_report = line
#elif flag == True and 'from half-dataset correlation' in line:
# flag = False
# res_cut = line
int_results={
'bins_low': map(float, summary[3].split()[-3:]),
'bins_high': map(float, summary[4].split()[-3:]),
'rmerge_anom': map(float, summary[6].split()[-3:]),
'rmerge_norm': map(float, summary[7].split()[-3:]),
'rmeas_anom': map(float, summary[8].split()[-3:]),
'rmeas_norm': map(float, summary[9].split()[-3:]),
'rpim_anom': map(float, summary[10].split()[-3:]),
'rpim_norm': map(float, summary[11].split()[-3:]),
'rmerge_top': float(summary[12].split()[-3]),
'total_obs': map(int, summary[13].split()[-3:]),
'unique_obs': map(int, summary[14].split()[-3:]),
'isigi': map(float, summary[15].split()[-3:]),
'cc-half': map(float, summary[16].split()[-3:]),
'completeness': map(float, summary[17].split()[-3:]),
'multiplicity': map(float, summary[18].split()[-3:]),
'anom_completeness': map(float, summary[20].split()[-3:]),
'anom_multiplicity': map(float, summary[21].split()[-3:]),
'anom_correlation': map(float, summary[22].split()[-3:]),
'anom_slope': [float(summary[23].split()[-3])],
'scaling_spacegroup': space_group,
'scaling_unit_cell': unit_cell,
#'text': res_cut,
'text2': anomalous_report
}
# Now create a list for each graph to be plotted.
# This list should have [title, xlabel, ylabels, xcol, ycols, tableNum]
# title is the graph title in the aimless logfile,
# xlabel is the label to be used for the x-axis, ylabels are the labels
# to be used for the data sets in the graph, xcol is the position within
# the table where the x-values are , ycols are the position of the y-vaules,
# and tableNum is the position of the table within the list tables.
graphs = [
['Mn(k) & 0k (theta=0) v. batch', 'image_number', ['Mn(k)', '0k'], 0, [5,6], 0],
['Relative Bfactor v. batch', 'image_number', ['Bfactor'], 0, [4], 0],
['Rmerge v Batch for all runs', 'image_number', ['Rmerge', 'SmRmerge'], 0, [5,12], 1],
['Maximum resolution limit, I/sigma > 1.0', 'image_number', ['MaxRes','SmMaxRes'], 0, [10,13], 1],
['Cumulative multiplicity', 'image_number', ['CMlplc'], 0, [11], 1],
['Imean & RMS Scatter', 'image_number', ['Mn(I)','RMSdev'], 0, [2,3], 1],
['Imean/RMS scatter', 'image_number', ['I/rms'], 0, [4], 1],
['Number of rejects', 'image_number', ['Nrej'], 0, [7], 1],
['Anom & Imean CCs v resolution', 'Dmin (A)', ['CCanom', 'CC1/2'], 1, [3,6], 2],
['RMS correlation ratio', 'Dmin (A)', ['RCRanom'], 1, [5], 2],
#['Imean CCs v resolution', 'Dmin (A)', ['CC_d12', 'CC_d3'], 1, [3,4], 3],
#['Mn(I/sd) v resolution', 'Dmin (A)', ['(I/sd)d12', '(I/sd)d3'], 1, [5,6], 3],
#['Projected Imean CCs v resolution', 'Dmin (A)', ['CCp1', 'CCp3'], 1, [7,8], 3],
['I/sigma, Mean Mn(I)/sd(Mn(I))', 'Dmin (A)', ['I/RMS','Mn(I/sd)'], 1, [12,13], 3],
['Rmerge, Rfull, Rmeas, Rpim v Resolution', 'Dmin (A)', ['Rmerge', 'Rfull', 'Rmeas', 'Rpim'], 1, [3,4,6,7], 3],
['Average I, RMSdeviation and Sd', 'Dmin (A)', ['AvI', 'RMSdev', 'sd'], 1, [9,10,11], 3],
['Fractional bias', 'Dmin (A)', ['FrcBias'], 1, [14], 3],
['Rmerge, Rmeas, Rpim v Resolution', 'Dmin (A)',
['Rmerge', 'RmergeOv', 'Rmeas', 'RmeasOv', 'Rpim', 'RpimOv'], 1, [3,4,7,8,9,10], 4],
['Rmerge v Intensity', 'Imax', ['Rmerge', 'Rmeas', 'Rpim'], 0, [1,3,4], 5],
['Completeness v Resolution', 'Dmin (A)', ['%poss', 'C%poss', 'AnoCmp', 'AnoFrc'], 1, [6,7,9,10], 6],
['Multiplicity v Resolution', 'Dmin (A)', ['Mlpclct', 'AnoMlt'], 1, [8,11], 6],
['Sigma(scatter/SD), within 5 sd', '<I>', ['SdFc'], 1, [7], 7],
['Sigma(scatter/SD, within 5 SD, all and within', '<I>', ['SdF', 'SdFc'], 1, [4,7], 7],
['Rcp v. batch', 'relative frame difference', ['Rcp'], 1, [-1], 8]
]
return(graphs, tables, int_results)
def parse_aimless2(self, logfile):
"""
Parses the aimless logfile in order to pull out data for
graphing and the results summary table.
Relevant values for the summary table are stored in a dict.
Relevant information for creating plots are stored in a dict,
with the following format for each entry (i.e. each plot):
{"<*plot label*>":{
"data":{
"parameters":{<*line parameters*>},
"series":[
{xs : [],
ys : []
}
]
}
"parameters" : {<*plot parameters*>}
}
...
...
}
"""
log = smartie.parselog(logfile)
# Pull out information for the results summary table.
flag = True
summary = log.keytext(0).message().split("\n")
# For some reason "Anomalous flag switched ON" is not always
# found, so the line below creates a blank entry for the
# the variable that should be created when that phrase is
# found, eliminating the problem where the program reports that
# the variable anomalous_report is referenced before assignment.
anomalous_report = ""
for line in summary:
if "Space group" in line:
space_group = line.strip().split(": ")[-1]
elif "Average unit cell" in line:
unit_cell = map(float, line.split()[3:])
elif "Anomalous flag switched ON" in line:
anomalous_report = line
int_results = {
"bins_low": map(float, summary[3].split()[-3:]),
"bins_high": map(float, summary[4].split()[-3:]),
"rmerge_anom": map(float, summary[6].split()[-3:]),
"rmerge_norm": map(float, summary[7].split()[-3:]),
"rmeas_anom": map(float, summary[8].split()[-3:]),
"rmeas_norm": map(float, summary[9].split()[-3:]),
"rpim_anom": map(float, summary[10].split()[-3:]),
"rpim_norm": map(float, summary[11].split()[-3:]),
"rmerge_top": float(summary[12].split()[-3]),
"total_obs": map(int, summary[13].split()[-3:]),
"unique_obs": map(int, summary[14].split()[-3:]),
"isigi": map(float, summary[15].split()[-3:]),
"cc-half": map(float, summary[16].split()[-3:]),
"completeness": map(float, summary[17].split()[-3:]),
"multiplicity": map(float, summary[18].split()[-3:]),
"anom_completeness": map(float, summary[20].split()[-3:]),
"anom_multiplicity": map(float, summary[21].split()[-3:]),
"anom_correlation": map(float, summary[22].split()[-3:]),
"anom_slope": [float(summary[23].split()[-3])],
"scaling_spacegroup": space_group,
"scaling_unit_cell": unit_cell,
"text2": anomalous_report
}
# Smartie can pull table information based on a regular
# expression pattern that matches the table title from
# the aimless log file.
# NOTE : the regular expression must match the beginning
# of the table's title, but does not need to be the entire
# title.
#
# We will use this to pull out the data from tables we are
# interested in.
#
# The beginning of the titles for all common tables in the
# aimless log file are given below, but not all of them
# are currently used to generate a plot.
scales = "=== Scales v rotation"
rfactor = "Analysis against all Batches"
cchalf = "Correlations CC(1/2)"
anisotropy = "Anisotropy analysis"
vresolution = "Analysis against resolution, XDSdataset"
anomalous = "Analysis against resolution, with & without"
intensity = "Analysis against intensity"
completeness = "Completeness & multiplicity"
deviation = "Run 1, standard deviation"
rcp = "Radiation damage"
plots = {
"Rmerge vs Frame": {
"data" :[
{
"parameters" :
{
"linecolor" : "3",
"linelabel" : "Rmerge",
"linetype" : "11",
"linewidth" : "3"
},
"series" :
[ {
"xs" : [int(x) for x in log.tables(rfactor)[0].col("N")],
"ys" : [try_float(x, 0.0) for x in log.tables(rfactor)[0].col("Rmerge")]
} ]
},
{
"parameters" :
{
"linecolor" : "4",
"linelabel" : "SmRmerge",
"linetype" : "11",
"linewidth" : "3"
},
"series" :
[ {
"xs" : [try_int(x) for x in log.tables(rfactor)[0].col("N")],
"ys" : [try_float(x, 0.0) for x in log.tables(rfactor)[0].col("SmRmerge")]
} ]
} ],
"parameters" :
{
"toplabel" : "Rmerge vs Batch for all Runs",
"xlabel" : "Image Number"
}
},
"Imean/RMS scatter" :
{
"data" :
[ {
"parameters" :
{
"linecolor" : "3",
"linelabel" : "I/rms",
"linetype" : "11",
"linewidth" : "3"
},
"series" :
[ {
"xs" : [int(x) for x in log.tables(rfactor)[0].col("N")],
"ys" : [try_float(x, 0.0) for x in log.tables(rfactor)[0].col("I/rms")]
} ]
} ],
"parameters" :
{
"toplabel" : "Imean / RMS scatter",
"xlabel" : "Image Number"
}
},
"Anomalous & Imean CCs vs Resolution" :
{
"data" :
[ {
"parameters" :
{
"linecolor" : "3",
"linelabel" : "CCanom",
"linetype" : "11",
"linewidth" : "3"
},
"series" :
[ {
"xs" : [try_float(x, 0.0) for x in log.tables(cchalf)[0].col("1/d^2")],
"ys" : [try_float(x, 0.0) for x in log.tables(cchalf)[0].col("CCanom")]
} ]
},
{
"parameters" :
{
"linecolor" : "4",
"linelabel" : "CC1/2",
"linetype" : "11",
"linewidth" : "3"
},
"series" :
[ {
"xs" : [try_float(x, 0.0) for x in log.tables(cchalf)[0].col("1/d^2")],
"ys" : [try_float(x, 0.0) for x in log.tables(cchalf)[0].col("CC1/2")]
} ]
} ],
"parameters" :
{
"toplabel" : "Anomalous & Imean CCs vs. Resolution",
"xlabel" : "Dmid (Angstroms)"
}
},
"RMS correlation ration" :
{
"data" :
[ {
"parameters" :
{
"linecolor" : "3",
"linelabel" : "RCRanom",
"linetype" : "11",
"linewidth" : "3"
},
"series" :
[ {
"xs" : [try_float(x, 0.0) for x in log.tables(cchalf)[0].col("1/d^2")],
"ys" : [try_float(x, 0.0) for x in log.tables(cchalf)[0].col("RCRanom")]
} ]
} ],
"parameters" :
{
"toplabel" : "RMS correlation ratio",
"xlabel" : "Dmid (Angstroms)"
}
},
"I/sigma, Mean Mn(I)/sd(Mn(I))" :
{
"data" :
[ {
"parameters" :
{
"linecolor" : "3",
"linelabel" : "I/RMS",
"linetype" : "11",
"linewidth" : "3"
},
"series" :
[ {
"xs" : [try_float(x, 0.0) for x in log.tables(vresolution)[0].col("1/d^2")],
"ys" : [try_float(x, 0.0) for x in log.tables(vresolution)[0].col("I/RMS")]
} ]
},
{
"parameters" :
{
"linecolor" : "4",
"linelabel" : "Mn(I/sd)",
"linetype" : "11",
"linewidth" : "3"
},
"series" :
[ {
"xs" : [try_float(x, 0.0) for x in log.tables(vresolution)[0].col("1/d^2")],
"ys" : [try_float(x, 0.0) for x in log.tables(vresolution)[0].col("Mn(I/sd)")]
} ]
} ],
"parameters" :
{
"toplabel" : "I/sigma, Mean Mn(I)/sd(Mn(I))",
"xlabel" : "Dmid (Angstroms)"
}
},
"Rmerge, Rfull, Rmeas, Rpim vs. Resolution" :
{
"data" :
[ {
"parameters" :
{
"linecolor" : "3",
"linelabel" : "Remerge",
"linetype" : "11",
"linewidth" : "3"
},
"series" :
[ {
"xs" : [try_float(x, 0.0) for x in log.tables(vresolution)[0].col("1/d^2")],
"ys" : [try_float(x, 0.0) for x in log.tables(vresolution)[0].col("Rmrg")]
} ]
},
{
"parameters" :
{
"linecolor" : "4",
"linelabel" : "Rfull",
"linetype" : "11",
"linewidth" : "3"
},
"series" :
[ {
"xs" : [try_float(x, 0.0) for x in log.tables(vresolution)[0].col("1/d^2")],
"ys" : [try_float(x, 0.0) for x in log.tables(vresolution)[0].col("Rfull")]
} ]
},
{
"parameters" :
{
"linecolor" : "5",
"linelabel" : "Rmeas",
"linetype" : "11",
"linewidth" : "3"
},
"series" :
[ {
"xs" : [try_float(x, 0.0) for x in log.tables(vresolution)[0].col("1/d^2")],
"ys" : [try_float(x, 0.0) for x in log.tables(vresolution)[0].col("Rmeas")]
} ]
},
{
"parameters" :
{
"linecolor" : "6",
"linelabel" : "Rpim",
"linetype" : "11",
"linewidth" : "3"
},
"series" :
[ {
"xs" : [try_float(x, 0.0) for x in log.tables(vresolution)[0].col("1/d^2")],
"ys" : [try_float(x, 0.0) for x in log.tables(vresolution)[0].col("Rpim")]
} ]
} ],
"parameters" :
{
"toplabel" : "Rmerge, Rfull, Rmeas, Rpim vs. Resolution",
"xlabel" : "Dmid (Angstroms)"
}
},
"Average I, RMS deviation, and Sd" :
{
"data" :
[ {
"parameters" :
{
"linecolor" : "3",
"linelabel" : "Average I",
"linetype" : "11",
"linewidth" : "3"
},
"series" :
[ {
"xs" : [try_float(x, 0.0) for x in log.tables(vresolution)[0].col("1/d^2")],
"ys" : [try_int(x, 0) for x in log.tables(vresolution)[0].col("AvI")]
} ]
},
{
"parameters" :
{
"linecolor" : "4",
"linelabel" : "RMS deviation",
"linetype" : "11",
"linewidth" : "3"
},
"series" :
[ {
"xs" : [try_float(x, 0.0) for x in log.tables(vresolution)[0].col("1/d^2")],
"ys" : [try_float(x, 0.0) for x in log.tables(vresolution)[0].col("RMSdev")]
} ]
},
{
"parameters" :
{
"linecolor" : "5",
"linelabel" : "std. dev.",
"linetype" : "11",
"linewidth" : "3"
},
"series" :
[ {
"xs" : [try_float(x, 0.0) for x in log.tables(vresolution)[0].col("1/d^2")],
"ys" : [try_float(x, 0.0) for x in log.tables(vresolution)[0].col("sd")]
} ]
} ],
"parameters" :
{
"toplabel" : "Average I, RMS dev., and std. dev.",
"xlabel" : "Dmid (Ansgstroms)"
}
},
"Completeness" :
{
"data" :
[ {
"parameters" :
{
"linecolor" : "3",
"linelabel" : "%poss",
"linetype" : "11",
"linewidth" : "3"
},
"series" :
[ {
"xs" : [try_float(x, 0.0) for x in log.tables(completeness)[0].col("1/d^2")],
"ys" : [try_float(x, 0.0) for x in log.tables(completeness)[0].col("%poss")]
} ]
},
{
"parameters" :
{
"linecolor" : "4",
"linelabel" : "C%poss",
"linetype" : "11",
"linewidth" : "3"
},
"series" :
[ {
"xs" : [try_float(x, 0.0) for x in log.tables(completeness)[0].col("1/d^2")],
"ys" : [try_float(x, 0.0) for x in log.tables(completeness)[0].col("C%poss")]
} ]
},
{
"parameters" :
{
"linecolor" : "5",
"linelabel" : "AnoCmp",
"linetype" : "11",
"linewidth" : "3"
},
"series" :
[ {
"xs" : [try_float(x, 0.0) for x in log.tables(completeness)[0].col("1/d^2")],
"ys" : [try_float(x, 0.0) for x in log.tables(completeness)[0].col("AnoCmp")]
} ]
},
{
"parameters" :
{
"linecolor" : "6",
"linelabel" : "AnoFrc",
"linetype" : "11",
"linewidth" : "3"
},
"series" :
[ {
"xs" : [try_float(x, 0.0) for x in log.tables(completeness)[0].col("1/d^2")],
"ys" : [try_float(x, 0.0) for x in log.tables(completeness)[0].col("AnoFrc")]
} ]
} ],
"parameters" :
{
"toplabel" : "Completeness vs. Resolution",
"xlabel" : "Dmid (Angstroms)"
}
},
"Redundancy" :
{
"data" :
[ {
"parameters" :
{
"linecolor" : "3",
"linelabel" : "multiplicity",
"linetype" : "11",
"linewidth" : "3"
},
"series" :
[ {
"xs" : [try_float(x, 0.0) for x in log.tables(completeness)[0].col("1/d^2")],
"ys" : [try_float(x, 0.0) for x in log.tables(completeness)[0].col("Mlplct")]
} ]
},
{
"parameters" :
{
"linecolor" : "4",
"linelabel" : "anomalous multiplicity",
"linetype" : "11",
"linewidth" : "3"
},
"series" :
[ {
"xs" : [try_float(x, 0.0) for x in log.tables(completeness)[0].col("1/d^2")],
"ys" : [try_float(x, 0.0) for x in log.tables(completeness)[0].col("AnoMlt")]
} ]
} ],
"parameters" :
{
"toplabel" : "Redundancy",
"xlabel" : "Dmid (Angstroms)"
}
},
"Radiation Damage" :
{
"data" :
[ {
"parameters" :
{
"linecolor" : "3",
"linelabel" : "Rcp",
"linetype" : "11",
"linewidth" : "3"
},
"series" :
[ {
"xs" : [ int(x) for x in log.tables(rcp)[0].col("Batch")],
"ys" : [try_float(x, 0.0) for x in log.tables(rcp)[0].col("Rcp")]
} ]
} ],
"parameters" :
{
"toplabel" : "Rcp vs. Batch",
"xlabel" : "Relative frame difference"
}
}
}
# Return to the main program.
return (plots, int_results)
def aimless(self, mtzin, resolution=False):
"""
Runs aimless on the data, including the scaling step.
"""
self.logger.debug('FastIntegration::aimless')
self.tprint(arg=" Running Aimless",
level=99,
color="white")
mtzout = mtzin.replace('pointless', 'aimless')
logfile = mtzout.replace('mtz', 'log')
comfile = mtzout.replace('mtz', 'com')
aimless_file = ['#!/bin/tcsh\n',
#'/share/apps/necat/programs/ccp4-6.3.0/ccp4-6.3.0/bin/aimless hklin %s hklout %s << eof > %s\n' % (mtzin, mtzout, logfile),
'aimless hklin %s hklout %s << eof > %s\n' % (mtzin, mtzout, logfile),
'anomalous on\n',
'scales constant\n',
'sdcorrection norefine full 1 0 0 partial 1 0 0\n',
'cycles 0\n']#, Change made on Feb. 20, 2015 to exclude bins resolution
#'bins resolution 10\n']
if resolution != False:
aimless_file.append('resolution %s\n' % resolution)
aimless_file.append('eof')
self.write_file(comfile, aimless_file)
os.chmod(comfile, stat.S_IRWXU)
cmd = './%s' % comfile
# os.system(cmd)
p = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
p.wait()
return logfile
def pointless(self):
"""
Runs pointless on the default reflection file, XDS_ASCII.HKl
to produce an mtz file suitable for input to aimless.
"""
self.logger.debug("FastIntegration::pointless")
self.tprint(arg=" Running Pointless", level=10, color="white")
hklfile = 'XDS_ASCII.HKL'
mtzfile = '_'.join([self.image_data['image_prefix'], 'pointless.mtz'])
logfile = mtzfile.replace('mtz', 'log')
if self.spacegroup:
cmd = ('pointless xdsin %s hklout %s << eof > %s\nSETTING C2\nSPACEGROUP HKLIN\n eof'
% (hklfile, mtzfile, logfile))
else:
cmd = ('pointless xdsin %s hklout %s << eof > %s\n SETTING C2 \n eof'
% (hklfile, mtzfile, logfile))
self.logger.debug("cmd = %s", cmd)
p = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
p.wait()
# sts = os.waitpid(p.pid, 0)[1]
tmp = open(logfile, "r").readlines()
return_value="Failed"
for i in range(-10, -1):
if tmp[i].startswith('P.R.Evans'):
return_value=mtzfile
break
return(return_value)
def parse_xdsstat(self, log, tables_length):
"""
Parses the output of xdsstat (XDSSTAT.LP) to pull out the Rd
information
"""
self.logger.debug('FastIntegration::parsse_xdsstat')
rd_table = []
xdsstat = open(log,'r').readlines()
for line in xdsstat:
if 'DIFFERENCE' in line:
split_line = line.split()
# extract Framediff, R_d, Rd_notfriedel, Rd_friedel.
table_line = [split_line[0], split_line[2], split_line[4], split_line[6] ]
rd_table.append(table_line)
title = 'Rd vs frame_difference'
xlabel = 'Frame Difference'
ylabels = ['Rd', 'Rd_notfriedel', 'Rd_friedel']
xcol = 0
ycols = [1,2,3]
tableNum = tables_length
rd_graph = (title, xlabel, ylabels, xcol, ycols, tableNum)
return(rd_graph, rd_table)
def xdsstat(self):
"""
Runs xdsstat, a program that extracts some extra statistics
from the results of XDS CORRECT.
In order for this to run, xdsstat should be installed in the user's path.
And a script called xdsstat.sh should also be created and available in the path.
Information about the availability of xdssstat can be obtained at the xdswiki:
http://strucbio.biologie.uni-konstanz.de/xdswiki/index.php/Xdsstat#Availability
xdsstat.sh is a simple three line shell script:
#!/bin/tcsh
xdsstat << eof > XDSSTAT.LP
XDS_ASCII.HKL
eof
It runs xdsstat on the default reflection file XDS_ASCII.HKL and sends the
output to the file XDSSTAT.LP
"""
self.logger.debug('FastIntegration::xdsstat')
self.tprint(arg=" Running XDSSTAT", level=10, color="white")
# Check to see if xdsstat exists in the path
test = find_executable("xdsstat.sh")
if test == None:
self.logger.debug(' xdsstat.sh is not in the defined PATH')
# Write xdsstat.sh
xdsststsh = ["#!/bin/bash\n",
"xdsstat << eof > XDSSTAT.LP\n",
"XDS_ASCII.HKL\n",
"eof\n"]
self.write_file("xdsstat.sh", xdsststsh)
os.chmod("./xdsstat.sh", stat.S_IRWXU)
try:
job = Process(target=Utils.processLocal, args=(('xdsstat.sh'), self.logger))
job.start()
while job.is_alive():
time.sleep(1)
except IOError as e:
self.logger.debug(' xdsstat.sh failed to run properly')
self.logger.debug(e)
return('Failed')
if os.path.isfile('XDSSTAT.LP'):
return('XDSSTAT.LP')
else:
self.logger.debug(' XDSSTAT.LP does not exist')
return('Failed')
def finish_data(self, results):
"""
Final creation of various files (e.g. an mtz file with R-flag added,
.sca files with native or anomalous data treatment)
"""
in_file = os.path.join(results['dir'], results['mtzfile'])
self.logger.debug('FastIntegration::finish_data - in_file = %s', in_file)
# Truncate the data.
comfile = ['#!/bin/csh\n',
'truncate hklin %s hklout truncated.mtz << eof > truncate.log\n'
% in_file,
'ranges 60\n',
'eof\n']
self.write_file('truncate.sh', comfile)
os.chmod('truncate.sh', stat.S_IRWXU)
p = subprocess.Popen('./truncate.sh',
shell=True,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
p.wait()
# Set the free R flag.
comfile = ['#!/bin/csh\n',
'freerflag hklin truncated.mtz hklout freer.mtz <<eof > freer.log\n',
'END\n',
'eof']
self.write_file('freer.sh', comfile)
os.chmod('freer.sh', stat.S_IRWXU)
p = subprocess.Popen('./freer.sh',
shell=True,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
p.wait()
# Create the merged scalepack format file.
comfile = ['#!/bin/csh\n',
'mtz2various hklin truncated.mtz hklout NATIVE.sca ',
'<< eof > mtz2scaNAT.log\n',
'OUTPUT SCALEPACK\n',
'labin I=IMEAN SIGI=SIGIMEAN\n',
'END\n',
'eof']
self.write_file('mtz2scaNAT.sh', comfile)
os.chmod('mtz2scaNAT.sh', stat.S_IRWXU)
p = subprocess.Popen('./mtz2scaNAT.sh',
shell=True,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
p.wait()
self.fixMtz2Sca('NATIVE.sca')
Utils.fixSCA(self, 'NATIVE.sca')
# Create the unmerged scalepack format file.
comfile = ['#!/bin/csh\n',
'mtz2various hklin truncated.mtz hklout ANOM.sca ',
'<< eof > mtz2scaANOM.log\n',
'OUTPUT SCALEPACK\n',
'labin I(+)=I(+) SIGI(+)=SIGI(+) I(-)=I(-) SIGI(-)=SIGI(-)\n',
'END\n',
'eof']
self.write_file('mtz2scaANOM.sh', comfile)
os.chmod('mtz2scaANOM.sh', stat.S_IRWXU)
p = subprocess.Popen('./mtz2scaANOM.sh',
shell=True,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
p.wait()
self.fixMtz2Sca('ANOM.sca')
Utils.fixSCA(self, 'ANOM.sca')
# Create a mosflm matrix file
correct_file = os.path.join(results['dir'], 'CORRECT.LP')
Xds2Mosflm(xds_file=correct_file, mat_file="reference.mat")
# Clean up the filesystem.
# Move some files around
if os.path.isdir('%s/xds_lp_files' % self.dirs['work']) == False:
os.mkdir('%s/xds_lp_files' % self.dirs['work'])
os.system('cp %s/*.LP %s/xds_lp_files/' % (results['dir'], self.dirs['work']))
tar_name = '_'.join([self.image_data['image_prefix'], str(self.image_data['run_number'])])
results_dir = os.path.join(self.dirs['work'], tar_name)
if os.path.isdir(results_dir) == False:
os.mkdir(results_dir)
prefix = '%s/%s_%s' %(results_dir, self.image_data['image_prefix'],
self.image_data['run_number'])
os.system('cp freer.mtz %s_free.mtz' % prefix)
os.system('cp NATIVE.sca %s_NATIVE.sca' % prefix)
os.system('cp ANOM.sca %s_ANOM.sca' % prefix)
os.system('cp %s/*aimless.log %s_aimless.log' %(results['dir'], prefix))
os.system('cp %s/*aimless.com %s_aimless.com' %(results['dir'], prefix))
os.system('cp %s/*pointless.mtz %s_mergable.mtz' %(results['dir'], prefix))
os.system('cp %s/*pointless.log %s_pointless.log' %(results['dir'], prefix))
os.system('cp %s/XDS.LOG %s_XDS.LOG' %(results['dir'], prefix))
os.system('cp %s/XDS.INP %s_XDS.INP' %(results['dir'], prefix))
os.system('cp %s/CORRECT.LP %s_CORRECT.LP' %(results['dir'], prefix))
os.system('cp %s/INTEGRATE.LP %s_INTEGRATE.LP' %(results['dir'], prefix))
os.system('cp %s/XDSSTAT.LP %s_XDSSTAT.LP' %(results['dir'], prefix))
os.system('cp %s/XDS_ASCII.HKL %s_XDS.HKL' %(results['dir'], prefix))
# Remove any integration directories.
os.system('rm -rf wedge_*')
# Remove extra files in working directory.
os.system('rm -f *.mtz *.sca *.sh *.log junk_*')
# Create a downloadable tar file.
tar_dir = tar_name
tar_name += '.tar.bz2'
tarname = os.path.join(self.dirs['work'], tar_name)
# print 'tar -cjf %s %s' %(tar_name, tar_dir)
# print os.getcwd()
os.chdir(self.dirs['work'])
# print os.getcwd()
os.system('tar -cjf %s %s' %(tar_name, tar_dir))
# Tarball the XDS log files
lp_name = 'xds_lp_files.tar.bz2'
# print "tar -cjf %s xds_lp_files/" % lp_name
os.system("tar -cjf %s xds_lp_files/" % lp_name)
# Remove xds_lp_files directory
os.system('rm -rf xds_lp_files')
# If ramdisks were used, erase files from ram_disks.
if self.ram_use == True and self.settings['ram_cleanup'] == True:
remove_command = 'rm -rf /dev/shm/%s' % self.image_data['image_prefix']
for node in self.ram_nodes[0]:
command2 = 'ssh -x %s "%s"' % (node, remove_command)
p = subprocess.Popen(command2,
shell=True,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
p.wait()
tmp = results
#if shelxc_results != None:
# tmp['shelxc_results'] = shelxc_results
files = {'mergable' : '%s_mergable.mtz' % prefix,
'mtzfile' : '%s_free.mtz' % prefix,
'ANOM_sca' : '%s_ANOM.sca' % prefix,
'NATIVE_sca' : '%s_NATIVE.sca' % prefix,
'scala_log' : '%s_scala.log' % prefix,
'scala_com' : '%s_scala.com' % prefix,
'xds_data' : '%s_XDS.HKL' % prefix,
'xds_log' : '%s_XDS.LOG' % prefix,
'xds_com' : '%s_XDS.INP' % prefix,
'downloadable' : tarname
}
tmp['files'] = files
return(tmp)
def fixMtz2Sca(self, scafile):
"""
Corrects the scalepack file generated by mtz2various by removing
whitespace in the spacegroup name.
"""
self.logger.debug('FastIntegration::fixMtz2Sca scafile = %s' % scafile)
inlines = open(scafile, 'r').readlines()
symline = inlines[2]
newline = (symline[:symline.index(symline.split()[6])]
+ ''.join(symline.split()[6:]) + '\n')
inlines[2] = newline
self.write_file(scafile, inlines)
return
def run_analysis(self, data_to_analyze, dir):
"""
Runs "pdbquery" and xtriage on the integrated data.
data_to_analyze = the integrated mtzfile
dir = the working integration directory
"""
self.logger.debug('FastIntegration::run_analysis')
self.logger.debug(' data = %s' % data_to_analyze)
self.logger.debug(' dir = %s' % dir)
analysis_dir = os.path.join(dir, 'analysis')
if os.path.isdir(analysis_dir) == False:
os.mkdir(analysis_dir)
run_dict = {'fullname' : self.image_data['fullname'],
# 'fullname' : self.first_image
'total' : self.image_data['total'],
'osc_range' : self.image_data['osc_range'],
'x_beam' : self.image_data['x_beam'],
'y_beam' : self.image_data['y_beam'],
'two_theta' : self.image_data.get("twotheta", 0),
'distance' : self.image_data['distance']
}
pdb_input = []
pdb_dict = {}
pdb_dict['run'] = run_dict
pdb_dict['dir'] = analysis_dir
pdb_dict['data'] = data_to_analyze
pdb_dict["agent_directories"] = self.dirs.get("agent_directories", False)
pdb_dict['control'] = self.controller_address
pdb_dict['process_id'] = self.process_id
pdb_input.append(pdb_dict)
self.logger.debug(' Sending pdb_input to Autostats')
# try:
T = AutoStats(pdb_input, self.logger)
self.logger.debug('I KNOW WHO YOU ARE')
# except:
# self.logger.debug(' Execution of AutoStats failed')
# return('Failed')
return "Success"
# def process_shelxC(self, unitcell, spacegroup, scafile):
# """
# Runs shelxC. Determines an appropriate cutoff for anomalous signal.
# Inserts table of shelxC results into the results summary page.
# """
# self.logger.debug('FastIntegration::process_shelxC')
# command = ('shelxc junk << EOF\nCELL %s\nSPAG %s\nSAD %s\nEOF'
# % (unitcell, spacegroup, scafile) )
# shelx_log = []
# output0 = subprocess.Popen(command, shell=True, stdout=subprocess.PIPE,
# stderr=subprocess.STDOUT)
# output0.wait()
# for line in output0.stdout:
# shelx_log.append(line.strip())
# self.logger.debug(line)
# results = self.parse_shelxC(shelx_log)
# res = False
# for i,v in enumerate(results['shelx_dsig']):
# dsig = float(v)
# if dsig > 1.0:
# res =results['shelx_res'][i]
# results['shelx_rescut'] = res
# #self.insert_shelx_results(results)
# return results
# def parse_shelxC(self, logfile):
# """
# Parses the shelxc output.
# """
# self.logger.debug('FastIntegration::parse_shelxC')
# shelxc_results={}
# for line in logfile:
# if line.startswith('Resl'):
# if line.split()[2] == '-':
# shelxc_results['shelx_res'] = line.split()[3::2]
# else:
# shelxc_results['shelx_res'] = line.split()[2:]
# #shelxc_results['shelx_res'] = line.split()[3::2]
# shelxc_results['shelx_res'] = line.split()[2:]
# elif line.startswith('N(data)'):
# shelxc_results['shelx_data'] = line.split()[1:]
# elif line.startswith('<I/sig>'):
# shelxc_results['shelx_isig'] = line.split()[1:]
# elif line.startswith('%Complete'):
# shelxc_results['shelx_comp'] = line.split()[1:]
# elif line.startswith('<d"/sig>'):
# shelxc_results['shelx_dsig'] = line.split()[1:]
# return(shelxc_results)
# def insert_shelx_results(self, results):
# """
# Inserts shelxC results into the results summary webpage.
# """
# self.logger.debug('FastIntegration::insert_shelx_results')
#
# htmlfile = open('results.php', 'r').readlines()
# if results['shelx_rescut'] == False:
# text = ('\nAnalysis of ShelxC results finds no resolution shell '
# + 'where d"/sig is greater than 1.0.\n')
# htmlfile.insert(-10, text)
# else:
# text = ('\nAnalsysis of ShelxC results finds d"/sig greater than '
# + '1.0 for at least one resolution shell.\n')
# htmlfile.insert(-10, text)
# shelxc = ('<div align ="center">\n' +
# '<h3 class="green">ShelxC analysis of data</h3>\n' +
# '<table class="integrate">\n' +
# '<tr><th>Resl.</th>')
# for item in results['shelx_res']:
# shelxc += ('<td>%s</td>' % item)
# shelxc += ('</tr>\n<tr class="alt"><th>N(data)</th>')
# for item in results['shelx_data']:
# shelxc += ('<td>%s</td>' % item)
# shelxc +=('</tr>\n<tr><th>IsigI</th>')
# for item in results['shelx_isig']:
# shelxc += ('<td>%s</td>' % item)
# shelxc += ('</tr>\n<tr class="alt"><th>%Complete</th>')
# for item in results['shelx_comp']:
# shelxc += ('<td>%s</td>' % item)
# shelxc += ('</tr>\n<tr><th>d"/sig</th>')
# for item in results['shelx_dsig']:
# shelxc += ('<td>%s</td>' % item)
# shelxc += ('</tr>\n<caption>For zero signal d"/sig should be '
# + 'about 0.80</caption>\n</table></div><br>\n')
# htmlfile.insert(-9, shelxc)
# self.write_file('results.php', htmlfile)
# return
def parse_integrateLP(self):
"""
Parse the INTEGRATE.LP file and extract information
about the mosaicity.
"""
self.logger.debug('FastIntegration::parse_integrateLP')
lp = open('INTEGRATE.LP', 'r').readlines()
for linenum, line in enumerate(lp):
if 'SUGGESTED VALUES FOR INPUT PARAMETERS' in line:
avg_mosaicity_line = lp[linenum + 2]
avg_mosaicity = avg_mosaicity_line.strip().split(' ')[-1]
return(avg_mosaicity)
def parse_correctLP(self):
"""
Parses the CORRECT.LP file to extract information
"""
self.logger.debug('FastIntegration::parse_correctLP')
lp = open('CORRECT.LP', 'r').readlines()
for i, line in enumerate(lp):
if 'ISa\n' in line:
isa_line = lp[i + 1]
break
ISa = isa_line.strip().split()[-1]
return(ISa)
def find_xds_symm(self, xdsdir, xdsinp):
"""
Checks xds results for consistency with user input spacegroup.
If inconsistent, tries to force user input spacegroup on data.
Returns new input file for intgration
"""
sg_num = int(Utils.std2intl[self.spacegroup])
# Change to directory
os.chdir(xdsdir)
new_inp = self.modify_xdsinput_for_symm(xdsinp, sg_num, "IDXREF.LP")
# Make sure we end in the right place
os.chdir(self.dirs['work'])
return new_inp
def modify_xdsinput_for_symm(self, xdsinp, sg_num, logfile):
"""
Modifys the XDS input to rerun integration in user input spacegroup
"""
if sg_num == 1:
bravais = 'aP'
elif sg_num >= 3 <= 4:
bravais = 'mP'
elif sg_num == 5:
bravais = 'mC'
elif sg_num >= 16 <= 19:
bravais = 'oP'
elif sg_num >= 20 <= 21:
bravais = 'oC'
elif sg_num == 22:
bravais = 'oF'
elif sg_num >= 23 <= 24:
bravais = 'oI'
elif sg_num >= 75 <= 78 or sg_num >= 89 <= 96:
bravais = 'tP'
elif sg_num >= 79 <= 80 or sg_num >= 97 <=98:
bravais = 'tI'
elif sg_num >= 143 <= 145 or sg_num >= 149 <= 154 or sg_num >= 168 <= 182:
bravais = 'hP'
elif sg_num == 146 or sg_num == 155:
bravais = 'hR'
elif sg_num == 195 or sg_num == 198 or sg_num >= 207 <= 208 or sg_num >= 212 <= 213:
bravais = 'cP'
elif sg_num == 196 or sg_num >= 209 <= 210:
bravais = 'cF'
elif sg_num == 197 or sg_num == 199 or sg_num == 211 or sg_num == 214:
bravais = 'cI'
# Now search IDXREF.LP for matching cell information.
idxref = open(logfile, 'r').readlines()
for line in idxref:
# print line
if bravais in line and '*' in line:
splitline = line.split()
# print splitline
# print splitline[4:]
break
cell = ('%s %s %s %s %s %s' % tuple(splitline[4:]))
xdsinp[-2] = 'JOB=DEFPIX INTEGRATE CORRECT\n\n'
xdsinp.append('SPACE_GROUP_NUMBER=%d\n' % sg_num)
xdsinp.append('UNIT_CELL_CONSTANTS=%s\n' % cell)
# self.write_file('XDS.INP', xdsinp)
return xdsinp
def print_results(self, results):
"""Print out results to the terminal"""
if isinstance(results, dict):
# Print summary
summary = results["summary"]
# pprint(summary)
self.tprint(" Spacegroup: %s" % summary["scaling_spacegroup"], 99, "white")
self.tprint(" Unit cell: %5.1f %5.1f %5.1f %5.2f %5.2f %5.2f" %
tuple(summary["scaling_unit_cell"]), 99, "white")
self.tprint(" Mosaicity: %5.3f" % summary["mosaicity"], 99, "white")
self.tprint(" overall inner shell outer shell", 99, "white")
self.tprint(" High res limit %5.2f %5.2f %5.2f" %
tuple(summary["bins_high"]), 99, "white")
self.tprint(" Low res limit %5.2f %5.2f %5.2f" %
tuple(summary["bins_low"]), 99, "white")
self.tprint(" Completeness %5.1f %5.1f %5.1f" %
tuple(summary["completeness"]), 99, "white")
self.tprint(" Multiplicity %4.1f %4.1f %4.1f" %
tuple(summary["multiplicity"]), 99, "white")
self.tprint(" I/sigma(I) %4.1f %4.1f %4.1f" %
tuple(summary["isigi"]), 99, "white")
self.tprint(" CC(1/2) %5.3f %5.3f %5.3f" %
tuple(summary["cc-half"]), 99, "white")
self.tprint(" Rmerge %5.3f %5.3f %5.3f" %
tuple(summary["rmerge_norm"]), 99, "white")
self.tprint(" Anom Rmerge %5.3f %5.3f %5.3f" %
tuple(summary["rmerge_anom"]), 99, "white")
self.tprint(" Rmeas %5.3f %5.3f %5.3f" %
tuple(summary["rmeas_norm"]), 99, "white")
self.tprint(" Anom Rmeas %5.3f %5.3f %5.3f" %
tuple(summary["rmeas_anom"]), 99, "white")
self.tprint(" Rpim %5.3f %5.3f %5.3f" %
tuple(summary["rpim_norm"]), 99, "white")
self.tprint(" Anom Rpim %5.3f %5.3f %5.3f" %
tuple(summary["rpim_anom"]), 99, "white")
self.tprint(" Anom Completeness %5.1f %5.1f %5.1f" %
tuple(summary["anom_completeness"]), 99, "white")
self.tprint(" Anom Multiplicity %4.1f %4.1f %4.1f" %
tuple(summary["anom_multiplicity"]), 99, "white")
self.tprint(" Anom Correlation %5.3f %5.3f %5.3f" %
tuple(summary["anom_correlation"]), 99, "white")
self.tprint(" Anom Slope %5.3f" % summary["anom_slope"][0], 99, "white")
self.tprint(" Observations %7d %7d %7d" %
tuple(summary["total_obs"]), 99, "white")
self.tprint(" Unique Observations %7d %7d %7d\n" %
tuple(summary["unique_obs"]), 99, "white")
def print_plots(self, results):
"""
Display plots on the commandline
Possible titles
plot_titles = [
'I/sigma, Mean Mn(I)/sd(Mn(I))',
'Average I, RMS deviation, and Sd',
'Completeness',
'RMS correlation ration',
'Imean/RMS scatter',
'Rmerge, Rfull, Rmeas, Rpim vs. Resolution',
'Radiation Damage',
'Rmerge vs Frame',
'Redundancy',
'Anomalous & Imean CCs vs Resolution'
]
"""
# Plot as long as JSON output is not selected
if self.settings.get("show_plots", True) and (not self.settings.get("json", False)):
plots = results["plots"]
# Determine the open terminal size
term_size = os.popen('stty size', 'r').read().split()
plot_type = "Rmerge vs Frame"
if plot_type in plots:
plot_data = plots[plot_type]["data"]
# plot_params = plots[plot_type]["parameters"]
# Get each subplot
raw = False
# smoothed = False
for subplot in plot_data:
if subplot["parameters"]["linelabel"] == "Rmerge":
raw = subplot
# Determine plot extent
y_array = numpy.array(raw["series"][0]["ys"])
y_max = y_array.max() * 1.1
y_min = 0 # max(0, (y_array.min() - 10))
x_array = numpy.array(raw["series"][0]["xs"])
x_max = x_array.max()
x_min = x_array.min()
gnuplot = subprocess.Popen(["gnuplot"],
stdin=subprocess.PIPE,
stderr=subprocess.PIPE)
gnuplot.stdin.write("""set term dumb %d,%d
set title 'Rmerge vs. Batch'
set xlabel 'Image #'
set ylabel 'Rmerge' rotate by 90 \n""" %
(int(term_size[1])-20, 30))
# Create the plot string
plot_string = "plot [%d:%d] [%f:%f] " % (x_min, x_max, y_min, y_max)
plot_string += "'-' using 1:2 title 'Rmerge' with lines\n"
# plot_string += "'-' using 1:2 title 'Smooth' with points\n"
gnuplot.stdin.write(plot_string)
# Run through the data and add to gnuplot
for plot in (raw, ): #smoothed):
# plot = plot_data["data"][i]
xs = plot["series"][0]["xs"]
ys = plot["series"][0]["ys"]
# print xs
# print ys
for i, j in zip(xs, ys):
gnuplot.stdin.write("%f %f\n" % (i, j))
gnuplot.stdin.write("e\n")
# Now plot!
gnuplot.stdin.flush()
time.sleep(2)
gnuplot.terminate()
def print_info(self):
"""
Print information regarding programs utilized by RAPD
"""
self.logger.debug('AutoindexingStrategy::print_info')
# try:
self.tprint(arg="\nRAPD integration uses:", level=99, color="blue")
"""
'\n\nRAPD used the following programs for integrating and scaling the dataset:\n',
' XDS - \n',
' "XDS", W. Kabsch (2010) Acta Cryst. D66, 125-132.\n',
' "Integration, scaling, space-group assignment and post-refinement",',
' W. Kabsch (2010) Acta Cryst. D66, 133-144.\n',
' pointless and aimless - \n',
' "Scaling and assessment of data quality", P.R.',
' Evans (2006) Acta Cryst. D62, 72-82.\n',
' "An introduction to data reduction: space-group',
' determination and intensity statistics,',
' P.R. Evans (2011) Acta Cryst. D67, 282-292\n',
' "How good are my data and what is the resolution?"',
' P.R. Evans and G.N. Murshudov (2013) Acta Cryst. D66,',
' 1204-1214.\n',
' truncate, freerflag, and mtz2various - \n',
' "The CCP4 Suite: Programs for Protein ',
'Crystallography". Acta Cryst. D50, 760-763 \n',
' xdsstat - \n http://strucbio.biologie.',
'uni-konstanz.de/xdswiki/index.php/Xdsstat\n',
'\n</pre></div></div></body>'
]
"""
info_string = """ XDS
"XDS", W. Kabsch (2010) Acta Cryst. D66, 125-132.
"Integration, scaling, space-group assignment and post-refinement",
W. Kabsch (2010) Acta Cryst. D66, 133-144.
Pointless & Aimless
"Scaling and assessment of data quality", P.R. Evans (2006) Acta Cryst.
D62, 72-82.
"An introduction to data reduction: space-group determination and
intensity statistics", P.R. Evans (2011) Acta Cryst. D67, 282-292.
"How good are my data and what is the resolution?", P.R. Evans and
G.N. Murshudov (2013) Acta Cryst. D66, 1204-1214.
"""
self.tprint(arg=info_string, level=99, color="white")
self.logger.debug(info_string)
def write_json(self, results):
"""Write a file with the JSON version of the results"""
json_string = json.dumps(results)
# Output to terminal?
if self.settings["json"]:
print json_string
# Write a file
with open("result.json", 'w') as outfile:
outfile.writelines(json_string)
class DataHandler(threading.Thread):
"""
Handles the data that is received from the incoming clientsocket
Creates a new process by instantiating a subclassed multiprocessing.Process
instance which will act on the information which is passed to it upon
instantiation. That class will then send back results on the pipe
which it is passed and Handler will send that up the clientsocket.
"""
def __init__(self, input, tprint=False, logger=False, verbose=True):
threading.Thread.__init__(self)
self.input = input
self.verbose = verbose
# If the logging instance is passed in...
if logger:
self.logger = logger
else:
# Otherwise get the logger Instance
self.logger = logging.getLogger("RAPDLogger")
self.logger.debug("DataHandler.__init__")
# Store tprint for use throughout
if tprint:
self.tprint = tprint
# Dead end if no tprint passed
else:
def func(arg=False, level=False, verbosity=False, color=False):
pass
self.tprint = func
self.start()
def run(self):
# Create a pipe to allow interprocess communication.
#parent_pipe,child_pipe = Pipe()
# Instantiate the integration case
tmp = RapdAgent(None, self.input, self.tprint, self.logger)
# Print out what would be sent back to the RAPD caller via the pipe
# self.logger.debug parent_pipe.recv()
if __name__ == '__main__':
# Set up logging
LOG_FILENAME = '/gpfs5/users/necat/David/process/temp3/fast_integration.logger'
logger = logging.getLogger('RAPDLogger')
logger.setLevel(logging.DEBUG)
handler = logging.handlers.RotatingFileHandler(
LOG_FILENAME, maxBytes=1000000, backupCount=5)
formatter = logging.Formatter('%(asctime)s - %(message)s')
handler.setFormatter(formatter)
logger.addHandler(handler
)
# Construct test input
command = 'INTEGRATE'
dirs = { 'images' : \
'/gpfs6/users/necat/test_data/lyso/',
'data_root_dir' : 'gpfs6/users/necat/',
'work' : '/gpfs5/users/necat/David/process/temp3/',
'html' : '/gpfs5/users/necat/David/process/temp3/',
'user' : '/home/dneau/RAPD_testing/test/'}
image_data = {'osc_start' : '0.00',
'osc_range' : '0.10',
'size1' : '2463',
'size2' : '2527',
'image_prefix' : 'lysozym-1',
'beamline' : '24_ID_C',
'ID' : 'lysozym-1_1',
'detector' : 'PILATUS',
'distance' : '380.00',
'x_beam' : '215.1',
'y_beam' : '211.2',
'pixel_size' : '0.172',
'wavelength' : '0.9999',
'run_number' : '1',
'twotheta' : 0.0,
'ccd_image_saturation' : '65535',
'directory' : '/gpfs6/users/necat/test_data/lyso/',
'directory' : '/gpfs6/users/necat/test_data/lyso/',
'process_id' : '0',
'fullname' : \
'/gpfs6/users/yale/Pyle_Aug11/image/marco/GIIi/mm2-2/mm2-2_1_005.img' }
run_data = {'distance' : '380.0',
'image_prefix' : 'lysozym-1',
'run_number' : '1',
'start' : 1,
'time' : 1.0,
'directory' : '/gpfs6/users/necat/test_data/lyso/',
'total' : 500}
data = {'image_data' : image_data,
'run_data' : run_data}
settings = {'spacegroup' : 'P41212',
'work_directory' : '/home/dneau/RAPD_testing/test/mosflm_test',
'work_dir_override' : 'False',
'anomalous' : 'False',
'multiprocessing' : 'True',
'ram_integrate' : False,
'ram_nodes' : [['compute-0-15', 'compute-0-1', 'compute-0-2', 'compute-0-3', 'compute-0-4',
'compute-0-5','compute-0-6', 'compute-0-7', 'compute-0-8', 'compute-0-9',
'compute-0-10', 'compute-0-11', 'compute-0-12', 'compute-0-13',
'compute-0-14'],
[1, 61, 121, 181, 241, 301, 361, 421, 481, 541, 601, 661, 721, 781, 841],
[60, 120, 180, 240, 300, 360, 420, 480, 540, 600, 660, 720, 780, 840, 900]
],
'ram_cleanup' : False
}
controller_address = ['127.0.0.1' , 50001]
input = [command, dirs, data, settings, controller_address]
# Call the handler.
T = DataHandler(input, logger)
|
agpl-3.0
| 9,169,712,204,212,176,000
| 41.180723
| 148
| 0.501756
| false
| 3.645746
| false
| false
| false
|
vencejo/LSystem-en-Minecraft-
|
math3D.py
|
1
|
3574
|
''' Programa basado en el trabajo de Daniel Bates http://www.cl.cam.ac.uk/~db434/
cuyo codigo fuente se puede ver en: http://www.cl.cam.ac.uk/~db434/files/setblockdemo.py '''
from math import sin, cos, radians,degrees, sqrt, pow , acos
class coordinate3d:
"""Class used to represent a point in 3D space."""
def __init__(self,x,y,z):
self.x = x
self.y = y
self.z = z
def __add__(self, other):
return coordinate3d(self.x+other.x, self.y+other.y, self.z+other.z)
def __mul__(self, other):
#Multiplicacion por un escalar
return coordinate3d(self.x*other, self.y*other, self.z*other)
def __str__(self):
return str([self.x, self.y, self.z])
def modulo(self):
return sqrt(pow(self.x,2)+pow(self.y,2)+pow(self.z,2))
class transformation:
"""Representation of homogeneous matrices used to apply transformations to
coordinates - using a 4x4 matrix allows shifts as well as scales/rotations.
Transformations can be combined by multiplying them together."""
def __init__(self, matrix):
self.matrix = matrix
def __mul__(self, other):
if isinstance(other, transformation):
return self.compose(other)
elif isinstance(other, coordinate3d):
return self.apply(other)
else:
print "Can't multiply transformation by {0}".format(type(other))
def compose(self, other):
"""Compose this transformation with another, returning a new transformation."""
newmatrix = [[0,0,0,0],[0,0,0,0],[0,0,0,0],[0,0,0,0]]
for i in range(4):
for j in range(4):
for k in range(4):
newmatrix[i][k] += self.matrix[i][j]*other.matrix[j][k]
return transformation(newmatrix)
def apply(self, point):
"""Apply this transformation to a coordinate, returning a new coordinate."""
return coordinate3d(
self.matrix[0][0]*point.x + self.matrix[0][1]*point.y + self.matrix[0][2]*point.z + self.matrix[0][3],
self.matrix[1][0]*point.x + self.matrix[1][1]*point.y + self.matrix[1][2]*point.z + self.matrix[1][3],
self.matrix[2][0]*point.x + self.matrix[2][1]*point.y + self.matrix[2][2]*point.z + self.matrix[2][3])
## Transformation functions
def identity():
return transformation([[1,0,0,0],
[0,1,0,0],
[0,0,1,0],
[0,0,0,1]])
def shift(x,y,z):
"""Move by a given offset."""
return transformation([[1,0,0,x],
[0,1,0,y],
[0,0,1,z],
[0,0,0,1]])
def rotationx(angle):
"""Rotate about the x axis by the given number of degrees."""
angle = radians(angle)
return transformation([[1, 0, 0, 0],
[0, cos(angle), sin(angle), 0],
[0, -sin(angle), cos(angle), 0],
[0, 0, 0, 1]])
def rotationy(angle):
"""Rotate about the y axis by the given number of degrees."""
angle = radians(angle)
return transformation([[ cos(angle), 0, sin(angle), 0],
[ 0, 1, 0, 0],
[-sin(angle), 0, cos(angle), 0],
[ 0, 0, 0, 1]])
def rotationz(angle):
"""Rotate about the z axis by the given number of degrees."""
angle = radians(angle)
return transformation([[ cos(angle), sin(angle), 0, 0],
[-sin(angle), cos(angle), 0, 0],
[ 0, 0, 1, 0],
[ 0, 0, 0, 1]])
|
gpl-2.0
| 2,883,782,418,861,403,600
| 35.10101
| 110
| 0.547566
| false
| 3.397338
| false
| false
| false
|
EDUlib/edx-platform
|
common/djangoapps/student/migrations/0031_auto_20200317_1122.py
|
1
|
1461
|
# Generated by Django 1.11.29 on 2020-03-17 11:22
import django.db.models.deletion
from django.conf import settings
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('student', '0030_userprofile_phone_number'),
]
operations = [
migrations.CreateModel(
name='AccountRecoveryConfiguration',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('change_date', models.DateTimeField(auto_now_add=True, verbose_name='Change date')),
('enabled', models.BooleanField(default=False, verbose_name='Enabled')),
('csv_file', models.FileField(help_text='It expect that the data will be provided in a csv file format with first row being the header and columns will be as follows: username, email, new_email', upload_to='', validators=[django.core.validators.FileExtensionValidator(allowed_extensions=['csv'])])),
('changed_by', models.ForeignKey(editable=False, null=True, on_delete=django.db.models.deletion.PROTECT, to=settings.AUTH_USER_MODEL, verbose_name='Changed by')),
],
options={
'ordering': ('-change_date',),
'abstract': False,
},
)
]
|
agpl-3.0
| -8,898,915,915,143,417,000
| 47.7
| 355
| 0.622861
| false
| 4.440729
| false
| false
| false
|
sunnynarayan/Complaint-Redressal
|
crs/wardenOffice/views.py
|
1
|
12845
|
from django.shortcuts import render
from django.shortcuts import render, redirect
from django.shortcuts import render_to_response
from django.http import HttpResponse
from django.template import RequestContext, loader
from django.contrib import auth
from django.core.context_processors import csrf
from django.contrib.auth.models import User
from django.contrib.auth import authenticate,login,logout
from django.contrib.sessions.models import Session
import hashlib
import datetime
from login.models import *
import re
from student.views import *
def isWardenOffice(request):
user_type = request.session.get("user_type",'')
if user_type != "wardenOffice":
return False
else:
return True
def wardenOfficeComplainView(request):
if not (isWardenOffice(request)):
return redirect('/crs/')
uid=request.session.get('uid')
# PublicComplainObjects = Complainlink.objects.all?().filter(wardenid = uid).filter(studid = 0);
# query1 = 'SELECT * FROM complainLink WHERE woID = ' + str(uid) + ' AND studID = 0'
# query2 = 'SELECT * FROM complainLink WHERE woID = ' + str(uid) + ' AND studID != 0'
query1 = 'SELECT * FROM `complain`, complainLink WHERE (complain.status = 2 OR complain.status = 22 OR complain.status=12 OR complain.status=3 OR complain.status=23 OR complain.status=13) AND (complainLink.woID = ' + str(uid) + ' AND complainLink.studID = 0) AND complain.cid = complainLink.CID'
query2 = 'SELECT * FROM `complain`, complainLink WHERE (complain.status = 2 OR complain.status=22 OR complain.status=12 OR complain.status=3 OR complain.status=23 OR complain.status=13) AND (complainLink.woID = ' + str(uid) + ' AND complainLink.studID != 0) AND complain.cid = complainLink.CID'
PublicComplainObjects = Complainlink.objects.raw(query1)
PrivateComplainObjects = Complainlink.objects.raw(query2)
# PrivateComplainObjects=Complainlink.objects.all().filter(wardenid = uid).exclude(studid = 0);
Privatelist=[];
Publiclist=[];
for num in PrivateComplainObjects:
numCid=num.cid
Privatelist.append(Complain.objects.get(cid=numCid)); #username in fac table
for num in PublicComplainObjects:
numCid=num.cid
Publiclist.append(Complain.objects.get(cid=numCid))
return render_to_response('wardenOffice/wardenHome.html',{'list1' : Publiclist, 'list2':Privatelist, 'msg': request.session.get('name')});
def wardenOfficeViewComplain(complainObject):
# indexF = request.GET.get('CID')
# index = int(indexF)
# qry = "SELECT * FROM complain a, complainLink b WHERE b.CID = " + str(index) + " AND (b.secID = " + str(request.session.get('uid')) + " OR b.studID = 0 ) AND b.CID = a.cid"
# complainObject = Complain.objects.raw(qry)
# return render_to_response("secretary/complainDetail.html", {'item': complainObject[0]})
comment = []
documents = []
try:
documents.extend(Document.objects.get(cid=complainObject[0].cid))
except:
pass
try:
comment.extend(Comment.objects.filter(cid = complainObject[0].cid))
except:
pass
return render_to_response("wardenOffice/complainDetail.html", {'item': complainObject[0],'documents':documents,'comment':comment})
def wardenOfficeHome(request):
if not (isWardenOffice(request)):
return redirect('/crs/')
return render_to_response('wardenOffice/wardenHome.html', {'msg' : request.session.get('name') });
def forwardToWarden(request):
if not (isWardenOffice(request)):
return redirect('/crs/')
complainArray=request.POST.getlist('complain')
length = len(complainArray)
for x in range(0,length):
comid = complainArray[x]
ClO =Complainlink.objects.get(cid=comid)
hostel=(Complain.objects.get(cid=comid)).hostel
wardenId = (Warden.objects.get(hostel=hostel)).fid
ClO.wardenid = wardenId
obj=Complain.objects.get(cid=ClO.cid)
ClO.save()
if obj.status==2:
obj.status=3
obj.save()
elif obj.status==12:
obj.status=13
obj.save()
else:
obj.status=23
obj.save()
# complainObj.wardenID = wardenID
# complainObj.save()
return redirect('../wardenComplain');
def getHostelType(hostelstr):
if hostelstr == "Ashoka":
return 1
elif hostelstr == "Aryabhatta":
return 2
elif hostelstr == "Chanakya1":
return 3
elif hostelstr == "Chanakya2":
return 4
elif hostelstr == "GBH":
return 5
else:
return 0
def isAddressed(address):
if address == "Addressed":
return 0
elif address == "New":
return 1
else:
return 2
def complainType(typec):
if typec=="Mess":
return 1
elif typec=="Environment":
return 2
elif typec=="Technical":
return 3
elif typec=="Maintenance":
return 4
elif typec=="Mess":
return 5
else:
return 6
def showHostelWiseComplain(request,hostel,isadd):
if not (isWardenOffice(request)):
return redirect('/crs/')
uid=request.session.get('uid')
hostelType = getHostelType(hostel)
isadd=isAddressed(isadd)
if hostelType == 0:
return HttpResponse('error')
if isadd==1:
query1 = 'SELECT * FROM `complain`, complainLink WHERE (complain.status = 2 OR complain.status = 22 OR complain.status=12 OR complain.status=3 OR complain.status=23 OR complain.status=13) AND (complainLink.woID = ' + str(uid) + ' AND complainLink.studID = 0) AND complain.cid = complainLink.CID AND complain.hostel = ' + str(hostelType)
query2 = 'SELECT * FROM `complain`, complainLink WHERE (complain.status = 2 OR complain.status=22 OR complain.status=12 OR complain.status=3 OR complain.status=23 OR complain.status=13) AND (complainLink.woID = ' + str(uid) + ' AND complainLink.studID != 0) AND complain.cid = complainLink.CID AND complain.hostel = ' + str(hostelType)
elif isadd==0:
query1 = 'SELECT * FROM `complain`, complainLink WHERE complain.status=0 AND (complainLink.woID = ' + str(uid) + ' AND complainLink.studID = 0) AND complain.cid = complainLink.CID AND complain.hostel = ' + str(hostelType)
query2 = 'SELECT * FROM `complain`, complainLink WHERE complain.status=0 AND (complainLink.woID = ' + str(uid) + ' AND complainLink.studID != 0) AND complain.cid = complainLink.CID AND complain.hostel = ' + str(hostelType)
else:
return HttpResponse('error')
PublicComplainObjects = Complainlink.objects.raw(query1)
PrivateComplainObjects = Complainlink.objects.raw(query2)
# PrivateComplainObjects=Complainlink.objects.all().filter(wardenid = uid).exclude(studid = 0);
Privatelist=[];
Publiclist=[];
for num in PrivateComplainObjects:
numCid=num.cid
Privatelist.append(Complain.objects.get(cid=numCid)); #username in fac table
for num in PublicComplainObjects:
numCid=num.cid
Publiclist.append(Complain.objects.get(cid=numCid));
return render_to_response('wardenOffice/wardenHome.html',{'list1' : Publiclist, 'list2':Privatelist, 'msg': request.session.get('name')});
def showHostelTypeWiseComplain(request,hostel,typeComplain):
if not (isWardenOffice(request)):
return redirect('/crs/')
uid=request.session.get('uid')
hostelType = getHostelType(hostel)
typec = complainType(typeComplain)
if hostelType == 0 or typec==6:
return HttpResponse('error')
query1 = "SELECT * FROM complain, complainLink WHERE (complain.status = 2 OR complain.status = 22 OR complain.status=12 OR complain.status=3 OR complain.status=23 OR complain.status=13) AND (complainLink.woID = " + str(uid) + " AND complainLink.studID = 0) AND complain.cid = complainLink.CID AND complain.hostel = " + str(hostelType) + " AND complain.type = " + str(typec)
query2 = 'SELECT * FROM complain, complainLink WHERE (complain.status = 2 OR complain.status=22 OR complain.status=12 OR complain.status=3 OR complain.status=23 OR complain.status=13) AND (complainLink.woID = ' + str(uid) + ' AND complainLink.studID != 0) AND complain.cid = complainLink.CID AND complain.hostel = ' + str(hostelType) + ' AND complain.type = ' + str(typec)
PublicComplainObjects = Complainlink.objects.raw(query1)
PrivateComplainObjects = Complainlink.objects.raw(query2)
PrivateComplainObjects=Complainlink.objects.all().filter(wardenid = uid).exclude(studid = 0);
Privatelist=[];
Publiclist=[];
for num in PrivateComplainObjects:
numCid=num.cid
Privatelist.append(Complain.objects.get(cid=numCid)); #username in fac table
for num in PublicComplainObjects:
numCid=num.cid
Publiclist.append(Complain.objects.get(cid=numCid));
return render_to_response('wardenOffice/wardenHome.html',{'list1' : Publiclist, 'list2':Privatelist, 'msg': request.session.get('name')});
def showHostelAdUnadWiseComplain(request,hostel,typec,isadd):
if not (isWardenOffice(request)):
return redirect('/crs/')
uid=request.session.get('uid')
hostelType = getHostelType(hostel)
typec=complainType(typec)
addressed=isAddressed(isadd)
if hostelType==0 or typec == 6 or addressed == 2:
return HttpResponse('error1')
if addressed==1:
query1 = 'SELECT * FROM complain, complainLink WHERE (complain.status = 2 OR complain.status = 22 OR complain.status=12 OR complain.status=3 OR complain.status=23 OR complain.status=13) AND (complainLink.woID = ' + str(uid) + ' AND complainLink.studID = 0) AND complain.cid = complainLink.CID AND complain.hostel = ' + str(hostelType) + ' AND complain.type = ' + str(typec)
query2 = 'SELECT * FROM `complain`, complainLink WHERE (complain.status = 2 OR complain.status=22 OR complain.status=12 OR complain.status=3 OR complain.status=23 OR complain.status=13) AND (complainLink.woID = ' + str(uid) + ' AND complainLink.studID != 0) AND complain.cid = complainLink.CID AND complain.hostel = ' + str(hostelType) + ' AND complain.type = ' + str(typec)
elif addressed==0:
query1 = 'SELECT * FROM `complain`, complainLink WHERE complain.status=0 AND (complainLink.woID = ' + str(uid) + ' AND complainLink.studID = 0) AND complain.cid = complainLink.CID AND complain.hostel = ' + str(hostelType) + ' AND complain.type = ' + str(typec)
query2 = 'SELECT * FROM `complain`, complainLink WHERE complain.status=0 AND (complainLink.woID = ' + str(uid) + ' AND complainLink.studID != 0) AND complain.cid = complainLink.CID AND complain.hostel = ' + str(hostelType) + ' AND complain.type = ' + str(typec)
else:
return HttpResponse('error2')
PublicComplainObjects = Complainlink.objects.raw(query1)
PrivateComplainObjects = Complainlink.objects.raw(query2)
# PrivateComplainObjects=Complainlink.objects.all().filter(wardenid = uid).exclude(studid = 0);
Privatelist=[];
Publiclist=[];
for num in PrivateComplainObjects:
numCid=num.cid
Privatelist.append(Complain.objects.get(cid=numCid)); #username in fac table
for num in PublicComplainObjects:
numCid=num.cid
Publiclist.append(Complain.objects.get(cid=numCid));
return render_to_response('wardenOffice/wardenHome.html',{'list1' : Publiclist, 'list2':Privatelist, 'msg': request.session.get('name')});
def showHostelSecWiseInfo(request,hostel):
if not (isWardenOffice(request)):
return redirect('/crs/')
uid=request.session.get('uid')
hostelType = getHostelType(hostel)
if hostelType == 0:
return HttpResponse('error')
obj1=Secretary.objects.filter(hostel=hostelType)
stud=[]
for sec in obj1:
stud.append(Student.objects.get(uid=sec.uid))
# obj=Student.objects.filter()
# return HttpResponse(obj)
return render_to_response('wardenOffice/viewSecretary.html',{'list1':obj1,'list2':stud})
def showHostelStudWiseInfo(request,hostel):
if not (isWardenOffice(request)):
return redirect('/crs/')
uid=request.session.get('uid')
hostelType = getHostelType(hostel)
if hostelType == 0:
return HttpResponse('error')
obj=Student.objects.filter(hostel=hostelType)
# return HttpResponse(obj)
return render_to_response('wardenOffice/viewStudent.html',{'list':obj})
def viewSecretary(request):
if not (isWardenOffice(request)):
return redirect('/crs/')
# try:
uid=request.session.get('uid')
ashokaseclist=[];
aryabhattaseclist=[];
chanakya1seclist=[];
chanakya2seclist=[];
test=[1,2,3,4];
for num in test:
ashokaseclist.append(Secretary.objects.filter(hostel = 0).filter(type = num));
aryabhattaseclist.append(Secretary.objects.filter(hostel = 1).filter(type = num));
chanakya1seclist.append(Secretary.objects.filter(hostel = 2).filter(type = num));
chanakya2seclist.append(Secretary.objects.filter(hostel = 3).filter(type = num));
return render_to_response('wardenOffice/wardenOfficeViewComplain.html',{'list1':ashokaseclist, 'list2' :aryabhattaseclist,'list3':chanakya1seclist,'list4':chanakya2seclist});
# except:
# return render_to_response('login/loginPage.html');
# def ForwardComplain(request):
# try:
# uid=request.session.get('uid');
#
# except:
# return render_to_response('login/loginPage.html');
# Create your views here.
|
mit
| -603,925,967,535,629,800
| 45.224265
| 376
| 0.720125
| false
| 3.046004
| false
| false
| false
|
tingelst/pymanopt
|
pymanopt/manifolds/complexcircle.py
|
1
|
2567
|
from __future__ import division
import warnings
import numpy as np
import numpy.linalg as la
import numpy.random as rnd
from pymanopt.manifolds.manifold import Manifold
class ComplexCircle(Manifold):
"""
The manifold of complex numbers with unit-modulus.
Description of vectors z in C^n (complex) such that each component z(i)
has unit modulus. The manifold structure is the Riemannian submanifold
structure from the embedding space R^2 x ... x R^2, i.e., the complex
circle is identified with the unit circle in the real plane. This
implementation is based on complexcirclefactory.m from the Manopt MATLAB
package.
"""
def __init__(self, n=1):
if n == 1:
self._name = "Complex circle S^1"
else:
self._name = "Complex circle (S^1)^{:d}".format(n)
self._n = n
def __str__(self):
return self._name
@property
def dim(self):
return self._n
def inner(self, z, v, w):
return v.conj().dot(w).real
def norm(self, x, v):
return la.norm(v)
def dist(self, x, y):
return la.norm(np.arccos((x.conj() * y).real))
@property
def typicaldist(self):
return np.pi * np.sqrt(self._n)
def proj(self, z, u):
return u - (u.conj() * z).real * z
tangent = proj
def ehess2rhess(self, z, egrad, ehess, zdot):
return self.proj(z, (z * egrad.conj()).real * zdot)
def exp(self, z, v):
y = np.zeros(self._n)
abs_v = np.abs(v)
mask = abs_v > 0
not_mask = np.logical_not(mask)
y[mask] = (z[mask] * np.cos(abs_v[mask]) +
v[mask] * (np.sin(abs_v[mask]) / abs_v[mask]))
y[not_mask] = z[not_mask]
return y
def retr(self, z, v):
return self._normalize(z + v)
def log(self, x1, x2):
v = self.proj(x1, x2 - x1)
abs_v = np.abs(v)
di = np.arccos((x1.conj() * x2).real)
factors = di / abs_v
factors[di <= 1e-6] = 1
return v * factors
def rand(self):
n = self._n
return self._normalize(rnd.randn(n) + 1j * rnd.randn(n))
def randvec(self, z):
v = rnd.randn(self._n) * (1j * z)
return v / self.norm(z, v)
def transp(self, x1, x2, d):
return self.proj(x2, d)
def pairmean(self, z1, z2):
return self._normalize(z1 + z2)
@staticmethod
def _normalize(x):
"""
Normalize the entries of x element-wise by their absolute values.
"""
return x / np.abs(x)
|
bsd-3-clause
| -8,596,242,755,595,871,000
| 24.929293
| 76
| 0.558239
| false
| 3.192786
| false
| false
| false
|
DorianDepriester/mtex2abaqus
|
MTEX2abaqus/AbaqusImport.py
|
1
|
2602
|
import string
import csv
import os
from abaqusConstants import *
from part import *
from material import *
from section import *
from assembly import *
from load import *
from mesh import *
from visualization import *
def importEBSD(inpFileName):
while True:
fileName, file_extension = os.path.splitext(inpFileName)
# Load grain properties
try:
file = open(fileName+'.csv', "r")
reader = csv.DictReader(file,delimiter='\t',lineterminator='\n',quoting = csv.QUOTE_NONNUMERIC)
phase=[];Xx=[];Xy=[];Xz=[];Yx=[];Yy=[];Yz=[]
for row in reader:
phase.append(row['Phase'],)
Xx.append(row['Xx'],)
Xy.append(row['Xy'],)
Xz.append(row['Xz'],)
Yx.append(row['Yx'],)
Yy.append(row['Yy'],)
Yz.append(row['Yz'],)
file.close()
except IOError:
print 'Error:',fileName+'.csv','not found.'
break
mdbName=os.path.basename(fileName)
# Import INP file
try:
mdb.ModelFromInputFile(name=mdbName,inputFileName=inpFileName)
pk=mdb.models[mdbName].parts.keys()
partName=pk[0]
except IndexError:
print 'Error:',fileName+'.inp','not found.'
break
# Set the new part visible
p1 = mdb.models[mdbName].parts[partName]
session.viewports['Viewport: 1'].setValues(displayedObject=p1)
# Copy sets from assembly to part
a=mdb.models[mdbName].rootAssembly
sets=a.sets
sets_list=sets.keys()
p = mdb.models[mdbName].parts[partName]
for grainID in sets_list:
set_i=sets[grainID]
if grainID.startswith('GRAIN'):
IDs=[j.label for j in set_i.elements]
p.SetFromElementLabels(elementLabels=IDs,name=grainID)
# Assign properties to elements
phaseList=set(phase)
for i in list(phaseList):
mdb.models[mdbName].Material(name=i)
for i in range(0,len(phase)):
sectionID='GRAIN_{:d}'.format(i+1)
mdb.models[mdbName].HomogeneousSolidSection(name=sectionID, material=phase[i],thickness=None)
region = p.sets[sectionID]
p.SectionAssignment(region=region, sectionName=sectionID,offset=0.0,offsetType=MIDDLE_SURFACE, offsetField='',thicknessAssignment=FROM_SECTION)
datumName='ORIENT_{:d}'.format(i+1)
p.DatumCsysByThreePoints(name=datumName, coordSysType=CARTESIAN, origin=(.0,.0,.0), point1=(Xx[i], Xy[i], Xz[i]), point2=(Yx[i], Yy[i], Yz[i]))
id=p.features[datumName].id
orientation = p.datums[id]
p.MaterialOrientation(region=region,orientationType=SYSTEM,axis=AXIS_3,localCsys=orientation,fieldName='',additionalRotationType=ROTATION_NONE, angle=0.0,additionalRotationField='', stackDirection=STACK_3)
break
|
mit
| -6,537,968,607,731,280,000
| 31.818182
| 208
| 0.684858
| false
| 2.904018
| false
| false
| false
|
xor-xor/electre_diviz
|
ElectreTriClassAssignments/ElectreTriClassAssignments.py
|
1
|
4490
|
#!/usr/bin/env python
"""
ElectreTriClassAssignments - computes assignments according to the Electre TRI
method. It generates separate outputs for the conjuctive ('pessimistic') and
disjunctive ('optimistic') assignments.
Usage:
ElectreTriClassAssignments.py -i DIR -o DIR
Options:
-i DIR Specify input directory. It should contain the following files:
alternatives.xml
classes.xml
classes_profiles.xml
outranking.xml
-o DIR Specify output directory. Files generated as output:
assignments_conjuctive.xml
assignments_disjunctive.xml
--version Show version.
-h --help Show this screen.
"""
from __future__ import absolute_import, division, print_function, unicode_literals
import os
import sys
import traceback
from docopt import docopt
from common import assignments_to_xmcda, create_messages_file, get_dirs, \
get_error_message, get_input_data, get_relation_type, write_xmcda, Vividict
__version__ = '0.2.0'
def assign_class(alternatives, categories_rank, categories_profiles,
outranking):
# sort categories by their rank, but we want the worst one on the 'left'
# - hence 'reverse=True'
categories = [i[0] for i in sorted(categories_rank.items(),
key=lambda x: x[1], reverse=True)]
exploitation = Vividict()
for alternative in alternatives:
# conjuctive ('pessimistic' - from 'best' to 'worst')
conjuctive_idx = 0
for profile_idx, profile in list(enumerate(categories_profiles))[::-1]:
relation = get_relation_type(alternative, profile, outranking)
if relation in ('indifference', 'preference'):
conjuctive_idx = profile_idx + 1
break
else:
continue
# disjunctive ('optimistic' - from 'worst' to 'best')
disjunctive_idx = len(categories_profiles)
for profile_idx, profile in enumerate(categories_profiles):
relation = get_relation_type(profile, alternative, outranking)
if relation == 'preference':
disjunctive_idx = profile_idx
break
else:
continue
exploitation[alternative] = (categories[conjuctive_idx],
categories[disjunctive_idx])
return exploitation
def main():
try:
args = docopt(__doc__, version=__version__)
output_dir = None
input_dir, output_dir = get_dirs(args)
filenames = [
# every tuple below == (filename, is_optional)
('alternatives.xml', False),
('classes.xml', False),
('classes_profiles.xml', False),
('outranking.xml', False),
]
params = [
'alternatives',
'categories_profiles',
'categories_rank',
'outranking',
]
d = get_input_data(input_dir, filenames, params,
comparison_with='boundary_profiles')
assignments = assign_class(d.alternatives, d.categories_rank,
d.categories_profiles, d.outranking)
# uncomment this if you want output combined as a single file (and
# remember to import assignments_as_intervals_to_xmcda):
# xmcda_intervals = assignments_as_intervals_to_xmcda(assignments)
# write_xmcda(xmcda_intervals,
# os.path.join(output_dir, 'assignments_intervals.xml'))
assignments_con = {i[0]: i[1][0] for i in assignments.iteritems()}
xmcda_con = assignments_to_xmcda(assignments_con)
write_xmcda(xmcda_con, os.path.join(output_dir,
'assignments_conjuctive.xml'))
assignments_dis = {i[0]: i[1][1] for i in assignments.iteritems()}
xmcda_dis = assignments_to_xmcda(assignments_dis)
write_xmcda(xmcda_dis, os.path.join(output_dir,
'assignments_disjunctive.xml'))
create_messages_file(None, ('Everything OK.',), output_dir)
return 0
except Exception, err:
err_msg = get_error_message(err)
log_msg = traceback.format_exc()
print(log_msg.strip())
create_messages_file((err_msg, ), (log_msg, ), output_dir)
return 1
if __name__ == '__main__':
sys.exit(main())
|
mit
| 3,253,285,074,870,855,700
| 37.376068
| 82
| 0.58686
| false
| 4.07441
| false
| false
| false
|
zlohner/TournamentOrganizer
|
model/player.py
|
1
|
2904
|
#!/usr/bin/env python3
WIN_MATCH_POINTS = 3
LOSE_MATCH_POINTS = 0
DRAW_MATCH_POINTS = 1
import sys
import random
import numpy
class Player(object):
def __init__(self, name, user):
self.name = name
self.user = user
self.match_wins = 0
self.match_losses = 0
self.match_draws = 0
self.game_wins = 0
self.game_losses = 0
self.game_draws = 0
self.opponents = set()
self.byes = 0
self.sort_constant = random.randint(1, sys.maxsize)
def add_record(self, record):
game_wins, game_losses, game_draws = record
self.game_wins += game_wins
self.game_losses += game_losses
self.game_draws += game_draws
def record_win(self, record):
self.add_record(record)
self.match_wins += 1
self.user.match_wins += 1
def record_loss(self, record):
self.add_record(record)
self.match_losses += 1
self.user.match_losses += 1
def record_draw(self, record):
self.add_record(record)
self.match_draws += 1
self.user.match_draws += 1
def match_points(self):
return \
WIN_MATCH_POINTS * self.match_wins + \
DRAW_MATCH_POINTS * self.match_draws + \
LOSE_MATCH_POINTS * self.match_losses
def match_win_percent(self):
matches = self.match_wins + self.match_losses + self.match_draws
if matches == 0:
return 0
else:
return float(self.match_wins) / float(matches)
def game_win_percent(self):
games = self.game_wins + self.game_losses + self.game_draws
if games == 0:
return 0
else:
return float(self.game_wins) / float(games)
def played(self, player):
return player in self.opponents
def __eq__(self, other):
return other != None and self.name == other.name
def __ne__(self, other):
return not self.__eq__(other)
def __hash__(self):
val = 0
for c in self.name:
val += ord(c)
val *= 31
return val
def __lt__(self, other):
self_OMWP = 1
other_OMWP = 1
if len(self.opponents) > 0:
self_OMWP = numpy.mean([opp.match_win_percent() for opp in self.opponents])
if len(other.opponents) > 0:
other_OMWP = numpy.mean([opp.match_win_percent() for opp in other.opponents])
self_GWP = self.game_win_percent()
other_GWP = other.game_win_percent()
if self.match_points() > other.match_points():
return True
elif self.match_points() == other.match_points() \
and self_OMWP > other_OMWP:
return True
elif self.match_points() == other.match_points() \
and self_OMWP == other_OMWP \
and self_GWP > other_GWP:
return True
elif self.match_points() == other.match_points() \
and self_OMWP == other_OMWP \
and self_GWP == other_GWP \
and self.sort_constant < other.sort_constant:
return True
else:
return False
def record_str(self):
return str(self.match_wins) + '-' + str(self.match_losses) + '-' + str(self.match_draws)
def formatted(self):
return self.name + '\t\t ' + self.record_str()
def __str__(self):
return '(' + self.name + ' - ' + self.record_str() + ')'
|
gpl-3.0
| 1,534,049,206,389,733,400
| 23.610169
| 90
| 0.65668
| false
| 2.693878
| false
| false
| false
|
AltSchool/django-allauth
|
allauth/account/adapter.py
|
1
|
19322
|
from __future__ import unicode_literals
import hashlib
import json
import time
import warnings
from django import forms
from django.conf import settings
from django.contrib import messages
from django.contrib.auth import (
authenticate,
get_backends,
login as django_login,
logout as django_logout,
)
from django.contrib.auth.models import AbstractUser
from django.contrib.auth.password_validation import validate_password
from django.contrib.sites.shortcuts import get_current_site
from django.core.cache import cache
from django.core.mail import EmailMessage, EmailMultiAlternatives
from django.http import HttpResponse, HttpResponseRedirect
from django.shortcuts import resolve_url
from django.template import TemplateDoesNotExist
from django.template.loader import render_to_string
from django.urls import reverse
from django.utils import timezone
from allauth.compat import force_str, ugettext_lazy as _
from ..utils import (
build_absolute_uri,
email_address_exists,
generate_unique_username,
get_user_model,
import_attribute,
)
from . import app_settings
class DefaultAccountAdapter(object):
error_messages = {
'username_blacklisted':
_('Username can not be used. Please use other username.'),
'username_taken':
AbstractUser._meta.get_field('username').error_messages['unique'],
'too_many_login_attempts':
_('Too many failed login attempts. Try again later.'),
'email_taken':
_("A user is already registered with this e-mail address."),
}
def __init__(self, request=None):
self.request = request
def stash_verified_email(self, request, email):
request.session['account_verified_email'] = email
def unstash_verified_email(self, request):
ret = request.session.get('account_verified_email')
request.session['account_verified_email'] = None
return ret
def stash_user(self, request, user):
request.session['account_user'] = user
def unstash_user(self, request):
return request.session.pop('account_user', None)
def is_email_verified(self, request, email):
"""
Checks whether or not the email address is already verified
beyond allauth scope, for example, by having accepted an
invitation before signing up.
"""
ret = False
verified_email = request.session.get('account_verified_email')
if verified_email:
ret = verified_email.lower() == email.lower()
return ret
def format_email_subject(self, subject):
prefix = app_settings.EMAIL_SUBJECT_PREFIX
if prefix is None:
site = get_current_site(self.request)
prefix = "[{name}] ".format(name=site.name)
return prefix + force_str(subject)
def get_from_email(self):
"""
This is a hook that can be overridden to programatically
set the 'from' email address for sending emails
"""
return settings.DEFAULT_FROM_EMAIL
def render_mail(self, template_prefix, email, context):
"""
Renders an e-mail to `email`. `template_prefix` identifies the
e-mail that is to be sent, e.g. "account/email/email_confirmation"
"""
subject = render_to_string('{0}_subject.txt'.format(template_prefix),
context)
# remove superfluous line breaks
subject = " ".join(subject.splitlines()).strip()
subject = self.format_email_subject(subject)
from_email = self.get_from_email()
bodies = {}
for ext in ['html', 'txt']:
try:
template_name = '{0}_message.{1}'.format(template_prefix, ext)
bodies[ext] = render_to_string(template_name,
context).strip()
except TemplateDoesNotExist:
if ext == 'txt' and not bodies:
# We need at least one body
raise
if 'txt' in bodies:
msg = EmailMultiAlternatives(subject,
bodies['txt'],
from_email,
[email])
if 'html' in bodies:
msg.attach_alternative(bodies['html'], 'text/html')
else:
msg = EmailMessage(subject,
bodies['html'],
from_email,
[email])
msg.content_subtype = 'html' # Main content is now text/html
return msg
def send_mail(self, template_prefix, email, context):
msg = self.render_mail(template_prefix, email, context)
msg.send()
def get_login_redirect_url(self, request):
"""
Returns the default URL to redirect to after logging in. Note
that URLs passed explicitly (e.g. by passing along a `next`
GET parameter) take precedence over the value returned here.
"""
assert request.user.is_authenticated
url = getattr(settings, "LOGIN_REDIRECT_URLNAME", None)
if url:
warnings.warn("LOGIN_REDIRECT_URLNAME is deprecated, simply"
" use LOGIN_REDIRECT_URL with a URL name",
DeprecationWarning)
else:
url = settings.LOGIN_REDIRECT_URL
return resolve_url(url)
def get_logout_redirect_url(self, request):
"""
Returns the URL to redirect to after the user logs out. Note that
this method is also invoked if you attempt to log out while no users
is logged in. Therefore, request.user is not guaranteed to be an
authenticated user.
"""
return resolve_url(app_settings.LOGOUT_REDIRECT_URL)
def get_email_confirmation_redirect_url(self, request):
"""
The URL to return to after successful e-mail confirmation.
"""
if request.user.is_authenticated:
if app_settings.EMAIL_CONFIRMATION_AUTHENTICATED_REDIRECT_URL:
return \
app_settings.EMAIL_CONFIRMATION_AUTHENTICATED_REDIRECT_URL
else:
return self.get_login_redirect_url(request)
else:
return app_settings.EMAIL_CONFIRMATION_ANONYMOUS_REDIRECT_URL
def is_open_for_signup(self, request):
"""
Checks whether or not the site is open for signups.
Next to simply returning True/False you can also intervene the
regular flow by raising an ImmediateHttpResponse
"""
return True
def new_user(self, request):
"""
Instantiates a new User instance.
"""
user = get_user_model()()
return user
def populate_username(self, request, user):
"""
Fills in a valid username, if required and missing. If the
username is already present it is assumed to be valid
(unique).
"""
from .utils import user_username, user_email, user_field
first_name = user_field(user, 'first_name')
last_name = user_field(user, 'last_name')
email = user_email(user)
username = user_username(user)
if app_settings.USER_MODEL_USERNAME_FIELD:
user_username(
user,
username or self.generate_unique_username([
first_name,
last_name,
email,
username,
'user']))
def generate_unique_username(self, txts, regex=None):
return generate_unique_username(txts, regex)
def save_user(self, request, user, form, commit=True):
"""
Saves a new `User` instance using information provided in the
signup form.
"""
from .utils import user_username, user_email, user_field
data = form.cleaned_data
first_name = data.get('first_name')
last_name = data.get('last_name')
email = data.get('email')
username = data.get('username')
user_email(user, email)
user_username(user, username)
if first_name:
user_field(user, 'first_name', first_name)
if last_name:
user_field(user, 'last_name', last_name)
if 'password1' in data:
user.set_password(data["password1"])
else:
user.set_unusable_password()
self.populate_username(request, user)
if commit:
# Ability not to commit makes it easier to derive from
# this adapter by adding
user.save()
return user
def clean_username(self, username, shallow=False):
"""
Validates the username. You can hook into this if you want to
(dynamically) restrict what usernames can be chosen.
"""
for validator in app_settings.USERNAME_VALIDATORS:
validator(username)
# TODO: Add regexp support to USERNAME_BLACKLIST
username_blacklist_lower = [ub.lower()
for ub in app_settings.USERNAME_BLACKLIST]
if username.lower() in username_blacklist_lower:
raise forms.ValidationError(
self.error_messages['username_blacklisted'])
# Skipping database lookups when shallow is True, needed for unique
# username generation.
if not shallow:
from .utils import filter_users_by_username
if filter_users_by_username(username).exists():
user_model = get_user_model()
username_field = app_settings.USER_MODEL_USERNAME_FIELD
error_message = user_model._meta.get_field(
username_field).error_messages.get('unique')
if not error_message:
error_message = self.error_messages['username_taken']
raise forms.ValidationError(
error_message,
params={
'model_name': user_model.__name__,
'field_label': username_field,
}
)
return username
def clean_email(self, email):
"""
Validates an email value. You can hook into this if you want to
(dynamically) restrict what email addresses can be chosen.
"""
return email
def clean_password(self, password, user=None):
"""
Validates a password. You can hook into this if you want to
restric the allowed password choices.
"""
min_length = app_settings.PASSWORD_MIN_LENGTH
if min_length and len(password) < min_length:
raise forms.ValidationError(_("Password must be a minimum of {0} "
"characters.").format(min_length))
validate_password(password, user)
return password
def validate_unique_email(self, email):
if email_address_exists(email):
raise forms.ValidationError(self.error_messages['email_taken'])
return email
def add_message(self, request, level, message_template,
message_context=None, extra_tags=''):
"""
Wrapper of `django.contrib.messages.add_message`, that reads
the message text from a template.
"""
if 'django.contrib.messages' in settings.INSTALLED_APPS:
try:
if message_context is None:
message_context = {}
message = render_to_string(message_template,
message_context).strip()
if message:
messages.add_message(request, level, message,
extra_tags=extra_tags)
except TemplateDoesNotExist:
pass
def ajax_response(self, request, response, redirect_to=None, form=None,
data=None):
resp = {}
status = response.status_code
if redirect_to:
status = 200
resp['location'] = redirect_to
if form:
if request.method == 'POST':
if form.is_valid():
status = 200
else:
status = 400
else:
status = 200
resp['form'] = self.ajax_response_form(form)
if hasattr(response, 'render'):
response.render()
resp['html'] = response.content.decode('utf8')
if data is not None:
resp['data'] = data
return HttpResponse(json.dumps(resp),
status=status,
content_type='application/json')
def ajax_response_form(self, form):
form_spec = {
'fields': {},
'field_order': [],
'errors': form.non_field_errors()
}
for field in form:
field_spec = {
'label': force_str(field.label),
'value': field.value(),
'help_text': force_str(field.help_text),
'errors': [
force_str(e) for e in field.errors
],
'widget': {
'attrs': {
k: force_str(v)
for k, v in field.field.widget.attrs.items()
}
}
}
form_spec['fields'][field.html_name] = field_spec
form_spec['field_order'].append(field.html_name)
return form_spec
def login(self, request, user):
# HACK: This is not nice. The proper Django way is to use an
# authentication backend
if not hasattr(user, 'backend'):
from .auth_backends import AuthenticationBackend
backends = get_backends()
backend = None
for b in backends:
if isinstance(b, AuthenticationBackend):
# prefer our own backend
backend = b
break
elif not backend and hasattr(b, 'get_user'):
# Pick the first vald one
backend = b
backend_path = '.'.join([backend.__module__,
backend.__class__.__name__])
user.backend = backend_path
django_login(request, user)
def logout(self, request):
django_logout(request)
def confirm_email(self, request, email_address):
"""
Marks the email address as confirmed on the db
"""
email_address.verified = True
email_address.set_as_primary(conditional=True)
email_address.save()
def set_password(self, user, password):
user.set_password(password)
user.save()
def get_user_search_fields(self):
user = get_user_model()()
return filter(lambda a: a and hasattr(user, a),
[app_settings.USER_MODEL_USERNAME_FIELD,
'first_name', 'last_name', 'email'])
def is_safe_url(self, url):
from django.utils.http import is_safe_url
return is_safe_url(url, allowed_hosts=None)
def get_email_confirmation_url(self, request, emailconfirmation):
"""Constructs the email confirmation (activation) url.
Note that if you have architected your system such that email
confirmations are sent outside of the request context `request`
can be `None` here.
"""
url = reverse(
"account_confirm_email",
args=[emailconfirmation.key])
ret = build_absolute_uri(
request,
url)
return ret
def send_confirmation_mail(self, request, emailconfirmation, signup):
current_site = get_current_site(request)
activate_url = self.get_email_confirmation_url(
request,
emailconfirmation)
ctx = {
"user": emailconfirmation.email_address.user,
"activate_url": activate_url,
"current_site": current_site,
"key": emailconfirmation.key,
}
if signup:
email_template = 'account/email/email_confirmation_signup'
else:
email_template = 'account/email/email_confirmation'
self.send_mail(email_template,
emailconfirmation.email_address.email,
ctx)
def respond_user_inactive(self, request, user):
return HttpResponseRedirect(
reverse('account_inactive'))
def respond_email_verification_sent(self, request, user):
return HttpResponseRedirect(
reverse('account_email_verification_sent'))
def _get_login_attempts_cache_key(self, request, **credentials):
site = get_current_site(request)
login = credentials.get('email', credentials.get('username', ''))
login_key = hashlib.sha256(login.encode('utf8')).hexdigest()
return 'allauth/login_attempts@{site_id}:{login}'.format(
site_id=site.pk,
login=login_key)
def pre_authenticate(self, request, **credentials):
if app_settings.LOGIN_ATTEMPTS_LIMIT:
cache_key = self._get_login_attempts_cache_key(
request, **credentials)
login_data = cache.get(cache_key, None)
if login_data:
dt = timezone.now()
current_attempt_time = time.mktime(dt.timetuple())
if (len(login_data) >= app_settings.LOGIN_ATTEMPTS_LIMIT and
current_attempt_time < (
login_data[-1] +
app_settings.LOGIN_ATTEMPTS_TIMEOUT)):
raise forms.ValidationError(
self.error_messages['too_many_login_attempts'])
def authenticate(self, request, **credentials):
"""Only authenticates, does not actually login. See `login`"""
from allauth.account.auth_backends import AuthenticationBackend
self.pre_authenticate(request, **credentials)
AuthenticationBackend.unstash_authenticated_user()
user = authenticate(request, **credentials)
alt_user = AuthenticationBackend.unstash_authenticated_user()
user = user or alt_user
if user and app_settings.LOGIN_ATTEMPTS_LIMIT:
cache_key = self._get_login_attempts_cache_key(
request, **credentials)
cache.delete(cache_key)
else:
self.authentication_failed(request, **credentials)
return user
def authentication_failed(self, request, **credentials):
if app_settings.LOGIN_ATTEMPTS_LIMIT:
cache_key = self._get_login_attempts_cache_key(
request, **credentials
)
data = cache.get(cache_key, [])
dt = timezone.now()
data.append(time.mktime(dt.timetuple()))
cache.set(cache_key, data, app_settings.LOGIN_ATTEMPTS_TIMEOUT)
def is_ajax(self, request):
return request.is_ajax()
def get_adapter(request=None):
return import_attribute(app_settings.ADAPTER)(request)
|
mit
| -2,770,081,460,482,284,500
| 36.518447
| 78
| 0.568782
| false
| 4.558151
| false
| false
| false
|
albertoconnor/apisdk
|
relateiq/lists.py
|
1
|
3641
|
from .riq_obj import RIQObject
from .riq_base import RIQBase
from .listitems import ListItems
from .listitem import ListItem
# TODO: Add version, externalId, category
# TODO: Payload exception if missing required fields
class List(RIQObject,RIQBase) :
# Object Attributes
_id = None
_modifiedDate = None
_title = None
_listType = None
_fields = None
_size = None
def __init__(self, _id=None, title=None, modifiedDate=None, fields=None, data=None) :
if data != None :
self.parse(data)
elif self.id(_id) != None :
self.get()
self.title(title)
self.modifiedDate(modifiedDate)
self.fields(fields)
self.ListItems = ListItems(self)
@classmethod
def node(cls) :
return 'lists'
def parse(self,data) :
self.id(data.get('id',None))
self.modifiedDate(data.get('modifiedDate',None))
self.title(data.get('title',None))
self.listType(data.get('listType',None))
self.fields(data.get('fields',None))
self.size(data.get('size', None))
return self
# Data Payload
def payload(self) :
payload = {
'title' : self.title(),
'fields' : self.fields()
}
if self.id() != None :
payload['id'] = self.id()
return payload
# Hybrid
def id(self,value=None) :
if value != None :
self._id = value
return self._id
def modifiedDate(self,value=None) :
if value != None :
self._modifiedDate = value
return self._modifiedDate
def title(self,value=None) :
if value != None :
self._title = value
return self._title
def listType(self,value=None) :
if value != None :
self._listType = value
return self._listType
def fields(self,value=None) :
if value != None :
self._fields = value
return self._fields or []
def size(self, value=None):
if value != None:
self._size = value
return self._size
# Sub Endpoints
def ListItem(self,*args,**kwargs) :
kwargs['parent'] = self
return ListItem(*args,**kwargs)
# Lookup Functions
# Convert a field name to a field key (eg "Status" --> "0")
def fieldKey(self,name) :
#if the "name" is already a key, just return it
for field in self.fields() :
if field.get('id',None) == name :
return name
#otherwise, find the field whose "name" is name, and return that field's id
for field in self.fields() :
if field.get('name',None) == name :
return field.get('id',name)
#print "[WARN] Field is a Linked Field and has no Schema in List: " + name
return name
def fieldValue(self,key,value=None) :
for field in self.fields() :
if field.get('id',None) == key :
return key
for field in self.fields() :
if field.get('display',None) == key :
return field.get('id',key)
return key
def fieldOption(self,key,value=None) :
for field in self.fields() :
if field.get('id',None) == key :
return key
for field in self.fields() :
if field.get('display',None) == key :
return field.get('id',key)
return key
def fieldMap(self):
return {field["id"]: field for field in self._fields}
def fetchListSize(self):
self.get({"includeSize" : True})
return self.size()
|
apache-2.0
| -4,409,821,769,299,002,400
| 27.677165
| 89
| 0.552046
| false
| 3.931965
| false
| false
| false
|
hgn/hippod
|
hippod/report_generator.py
|
1
|
27079
|
import os
import shutil
import markdown
import tempfile
import datetime
import json
import re
import tempfile
import logging
import glob
import hippod.api_shared
import hippod.error_object
log = logging.getLogger()
class ReportGenerator(object):
LAST_ACHIEVEMENTS = 1
FILTER_BY_ANCHOR = 2
FILTER_BY_CHOICE = 3
PDF = 4
@staticmethod
def generate(app, outputs, report_filter, report_meta):
reports_path = app['REPORT_PATH']
tmp_path = os.path.join(app['DB_ROOT_PATH'], 'tmp')
if not os.path.isdir(tmp_path):
os.mkdir(tmp_path)
list_of_lists = ReportGenerator.ReportGeneratorCollector.search(app, report_filter, report_meta)
date = str(datetime.datetime.now().replace(second=0, microsecond=0).isoformat(sep='-'))
doc_name = '{}-report.pdf'.format(date)
pdf_out_path = os.path.join(reports_path, doc_name)
rpd = ReportGenerator.ReportGeneratorDocument(list_of_lists, tmp_path)
stored_data = rpd.store_data_in_tmp(app)
rpd.generate_pdf(app, pdf_out_path, stored_data)
class ReportGeneratorDocument(object):
def __init__(self, list_of_lists, tmp_path):
self.list_of_lists = list_of_lists
# self.tmp_path = tempfile.TemporaryFile()
self.tmp_path = tmp_path
# def __del__(self):
# shutil.rmtree(self.tmp_path)
def get_format_snippet(app, data_id):
db_path = app['DB_DATA_PATH']
attr_path = os.path.join(db_path, data_id, 'attr.db')
with open(attr_path, 'r') as f:
content = json.load(f)
snippet_format = content['mime-type'].split('-')[-1]
return snippet_format
def get_dst_path(self, data_type, sub_dir, name):
if data_type == '.png':
dst_path = os.path.join(sub_dir, '{}.png'.format(name))
elif data_type == '.jpg':
dst_path = os.path.join(sub_dir, '{}.jpg'.format(name))
elif data_type == '.jpeg':
dst_path = os.path.join(sub_dir, '{}.jpeg'.format(name))
elif data_type == '.gif':
dst_path = os.path.join(sub_dir, '{}.gif'.format(name))
else:
# FIXME: not sure, but this function should return. If
# not dst_path is undefined and will definitly crash some
# lines later!
log.error("data type not supported: {}".format(data_type))
return None
return dst_path
def copy_to_dst(self, src_path, dst_path):
with open(src_path, 'rb') as file:
data = file.read()
decoded = hippod.hasher.decode_base64_data(data)
with open(dst_path, 'wb') as file:
file.write(decoded)
with open(dst_path, 'wb') as dst:
shutil.copyfile(src_path, dst_path)
def store_data(self, app, data, sub_dir):
src_path = os.path.join(app['DB_DATA_PATH'], data['data-id'], 'blob.bin')
src_path_snippet = os.path.join(app['DB_SNIPPET_PATH'], '{}.png'.format(data['data-id']))
if not os.path.isdir(sub_dir):
os.mkdir(sub_dir)
# check whether data is a image or description or snippet
if 'type' not in data:
head, tail = os.path.split(data['name'])
name, data_type = os.path.splitext(tail)
dst_path = self.get_dst_path(data_type, sub_dir, name)
if not dst_path:
return None
self.copy_to_dst(src_path, dst_path)
elif data['type'] == 'description':
dst_path = os.path.join(sub_dir, 'description.md')
with open(dst_path, 'wb') as dst:
shutil.copyfile(src_path, dst_path)
elif data['type'] == 'snippet':
# in case of snippet root of src is snippet_db
if 'name' in data:
head, tail = os.path.split(data['name'])
name, data_type = os.path.splitext(tail)
else:
name = data['data-id']
data_type = get_format_snippet(app, data['data-id'])
src_path = os.path.join(app['DB_SNIPPET_PATH'], '{}{}'.format(data['data-id'], data_type))
dst_path = self.get_dst_path(data_type, sub_dir, name)
if not dst_path:
return None
self.copy_to_dst(src_path, dst_path)
# else:
# FIXME: error handling
return dst_path
def store_achievement(self, app, achievement_path, sub_dir):
with open(achievement_path, 'r') as achiev:
content = json.load(achiev)
if not os.path.isdir(sub_dir):
os.mkdir(sub_dir)
# check whether data is achievement like
if 'result' in content:
dst_path = os.path.join(sub_dir, 'achievement.db')
with open(dst_path, 'w') as dst:
content = json.dumps(content, sort_keys=True,indent=4,
separators=(',', ': '))
dst.write(content)
return dst_path
else:
return None
def store_attachment(self, app, attachment_path, sub_dir):
with open(attachment_path, 'r') as attach:
content = json.load(attach)
if not os.path.isdir(sub_dir):
os.mkdir(sub_dir)
dst_path = os.path.join(sub_dir, 'attachment.db')
with open(dst_path, 'w') as dst:
content = json.dumps(content, sort_keys=True,indent=4,
separators=(',', ': '))
dst.write(content)
return dst_path
def get_achievement_content(self, achievement_path):
with open(achievement_path) as achievement:
content = json.load(achievement)
return content
def get_attachment_content(self, attachment_path):
default_attach = dict()
default_attach['responsible'] = 'anonymous'
if not attachment_path:
return default_attach
with open(attachment_path) as attach:
content = json.load(attach)
return content
def add_data(self, description_path, file_path):
with open(description_path, 'r') as file:
description = file.read()
with open(description_path, 'w') as file:
description = str(description) + '\n' + ''.format(file_path) + '\n'
file.write(description)
def design_description(self, achievement_content, categories, attach_content, title):
result = achievement_content['result']
submitter = achievement_content['submitter']
test_date = achievement_content['test-date']
categories = categories
responsible = attach_content['responsible']
description = '# {} #\n\n'.format(title)
description += '----------------------- ----------\n'
description += '**Test Result** {}\n'.format(result)
description += '**Categories** {}\n'.format(categories)
description += '**Submitter** {}\n'.format(submitter)
description += '**Responsible** {}\n'.format(responsible)
description += '**Test-Date** {}\n'.format(test_date)
description += '----------------------- ----------\n\n'
for data in achievement_content['data-references']:
description += '\n\n'.format(data)
return description
def add_achievement(self, description_path, achievement_path, title, \
achievement_data, attachment_path, categories):
attach_content = self.get_attachment_content(attachment_path)
achievement_content = self.get_achievement_content(achievement_path)
achievement_content['data-references'] = achievement_data
if description_path == None:
# remove '/achievement.db' of the path and create a 'description.md' file in this directory
tmp_item_path = os.path.dirname(achievement_path)
description_path = os.path.join(tmp_item_path, 'description.md')
with open(description_path, 'w') as file:
descr = self.design_description(achievement_content, categories, attach_content, title)
file.write(descr)
return description_path
else:
with open(description_path, 'r') as file:
description_only = file.read()
with open(description_path, 'w') as file:
descr = self.design_description(achievement_content, categories, attach_content, title)
descr += str(description_only)
file.write(descr)
return description_path
def sanitize_description(self, description_path):
with open(description_path, 'r') as input_file:
descr_lines = input_file.readlines()
with open(description_path, 'w') as output_file:
for line in descr_lines:
match = re.search(r'^#[#]*', line)
p = re.compile(r'(#[#]*)')
if match != None:
newline = p.sub('{}#'.format(match.group(0)), line)
output_file.write(newline)
else:
output_file.write(line)
def adjust_image_reference(self, description_path, attach_path, data_type):
# looks for available references and arrange the path in the refeferences to the images
# stored in the tmp file, then returns bool whether image is referenced or not
data_type = data_type.replace(".", "")
reference_available = False
head, tail = os.path.split(attach_path)
with open(description_path, 'r') as input_file:
in_descr = input_file.readlines()
with open(description_path, 'w') as output_file:
# search for 'xxx(xxx.data_type)'
regex = r'(\()(.*[.]' + '{})'.format(data_type)
# regex_compile is a pattern which only looks for the part after the caption
# in the reference
regex_compile = r'\(.*[.]' + '{}'.format(data_type)
p = re.compile(regex_compile)
for line in in_descr:
match = re.search(regex, line)
if match:
# check whether match 'xxx.xxx' is the wanted image data like 'image.png'
if match.group(2) == tail:
reference_available = True
# exchange only the file path in the refernce(after the caption) with the
# new tmp file path
newline = p.sub('({}'.format(attach_path), line)
output_file.write(newline)
else:
output_file.write(line)
else:
output_file.write(line)
return reference_available
def fetch_data_list_subcontainer(self, subcontainer_path):
with open(subcontainer_path, 'r') as subc:
content = json.load(subc)
if 'data' not in content['object-item'] or len(content['object-item']['data']) == 0:
return None
data_list = content['object-item']['data']
return data_list
def fetch_data_list_achievement(self, achievement_path):
with open(achievement_path, 'r') as achievement:
content = json.load(achievement)
if 'data' not in content or len(content['data']) == 0:
return None
data_list = content['data']
return data_list
def store_data_subcontainer(self, app, data_list, sub_dir):
stored_paths = list()
for i, data in enumerate(data_list):
stored_data_path = self.store_data(app, data, sub_dir)
if stored_data_path == None:
continue
stored_paths.append(stored_data_path)
return stored_paths
def store_data_achievement(self, app, data_list, sub_dir):
stored_paths = list()
for i, data in enumerate(data_list):
stored_data_path = self.store_data(app, data, sub_dir)
if stored_data_path != None:
stored_paths.append(stored_data_path)
return stored_paths
def store_data_in_tmp(self, app):
db_path = app['DB_OBJECT_PATH']
files_catalog = dict()
for j, item in enumerate(self.list_of_lists):
sub_dir = os.path.join(self.tmp_path, 'item{}'.format(j))
files_catalog[sub_dir] = dict()
files_catalog[sub_dir]['data'] = dict()
files_catalog[sub_dir]['data']['achievements'] = list()
files_catalog[sub_dir]['data']['subcontainer'] = list()
sha_major = item[0]
sha_minor = item[1]
achievement_id = item[2]
title = item[3]
last_attachment = item[4]
categories = item[5]
files_catalog[sub_dir]['title'] = title
files_catalog[sub_dir]['categories'] = categories
subcontainer = os.path.join(db_path, sha_major[0:2], sha_major, sha_minor, 'subcontainer.db')
achievement = os.path.join(db_path, sha_major[0:2], sha_major, sha_minor, 'achievements', '{}.db'.format(achievement_id))
if not last_attachment:
files_catalog[sub_dir]['attachment'] = None
else:
attachment = os.path.join(db_path, sha_major[0:2], sha_major, 'attachments', last_attachment)
stored_data_path = self.store_attachment(app, attachment, sub_dir)
files_catalog[sub_dir]['attachment'] = stored_data_path
stored_data_path = self.store_achievement(app, achievement, sub_dir)
files_catalog[sub_dir]['achievement'] = stored_data_path
data_list_achievement = self.fetch_data_list_achievement(achievement)
if data_list_achievement != None:
stored_paths = self.store_data_achievement(app, data_list_achievement, sub_dir)
for path in stored_paths: files_catalog[sub_dir]['data']['achievements'].append(path)
data_list_subcontainer = self.fetch_data_list_subcontainer(subcontainer)
if data_list_subcontainer == None:
continue
stored_paths = self.store_data_subcontainer(app, data_list_subcontainer, sub_dir)
for path in stored_paths: files_catalog[sub_dir]['data']['subcontainer'].append(path)
return files_catalog
def _pandoc_generate(self, app, markdown_in_path, pdf_out_path):
assert(os.path.isfile(markdown_in_path))
cmd = "pandoc "
cmd += "--latex-engine xelatex "
if "REPORT-PDF-TEMPLATE" in app:
cmd += "--template {} ".format(app["REPORT-PDF-TEMPLATE"])
cmd += "--listings "
cmd += "--toc "
cmd += "{} ".format(markdown_in_path)
cmd += " -o \"{}\" ".format(pdf_out_path)
log.debug("executing: \"{}\"".format(cmd))
os.system(cmd)
def generate_pdf(self, app, pdf_out_path, tmp_data):
sub_reports = list()
for key, item in tmp_data.items():
title = item['title']
categories = item['categories']
achievement_data = item['data']['achievements']
attachment_path = item['attachment']
counter = 0
for d in item['data']['subcontainer']:
counter += 1
name, data_type = os.path.splitext(d)
if data_type == '.md':
self.sanitize_description(d)
description_path = d
if 'achievement' in item:
achievement_path = item['achievement']
self.add_achievement(description_path, achievement_path, title, \
achievement_data, attachment_path, categories)
counter = 0
# if no '.md' found --> use at least title and test result for the report
elif counter == len(item['data']['subcontainer']):
if 'achievement' in item:
achievement_path = item['achievement']
description_path = self.add_achievement(None, achievement_path, title, \
achievement_data, attachment_path, categories)
else:
continue
for d in item['data']['subcontainer']:
name, data_type = os.path.splitext(d)
if data_type == '.png':
attach_path = d
elif data_type == '.jpg':
attach_path = d
elif data_type == '.jpeg':
attach_path = d
elif data_type == '.gif':
attach_path = d
else:
continue
ok = self.adjust_image_reference(description_path, attach_path, data_type)
if not ok:
self.add_data(description_path, attach_path)
if len(item['data']['subcontainer']) == 0:
achievement_path = item['achievement']
description_path = self.add_achievement(None, achievement_path, title, \
achievement_data, attachment_path, categories)
sub_reports.append(description_path)
for i in range(len(sub_reports) - 1):
with open(sub_reports[i+1], 'r') as file2:
description2 = file2.read()
with open(sub_reports[0], 'r') as file1:
description1 = file1.read()
description1 = str(description1) + '\n \n \n' + str(description2)
with open(sub_reports[0], 'w') as file1:
file1.write(description1)
# FIXME, need arguments
self._pandoc_generate(app, sub_reports[0], pdf_out_path)
# shutil.rmtree(self.tmp_path)
class ReportGeneratorCollector(object):
@staticmethod
def null_func(data):
pass
@staticmethod
def search(app, filter_type, filter_meta):
object_index_data = hippod.api_shared.object_index_read(app)
if not object_index_data:
return None
# maybe specify limit in filter?
search_list = list()
# list_sort_func = (null_func(), reversed)[bool(True)] # here variable reversed instead of hardcoded True
# for cont in list_sort_func(object_index_data):
for cont in object_index_data:
ok, cont_obj = hippod.api_shared.read_cont_obj_by_id(app, cont['object-item-id'])
if not ok:
log.error("cannot read container {} by sha although it's in object-index.db".format(cont['object-item-id']))
continue
title = cont_obj['title']
categories = cont_obj['categories']
if filter_type == ReportGenerator.LAST_ACHIEVEMENTS:
last_achiev_list = ReportGenerator.ReportGeneratorCollector.search_last_achievements(app, cont['object-item-id'], cont_obj)
last_attach = ReportGenerator.ReportGeneratorCollector.search_last_attachment(app, cont['object-item-id'])
last_achiev_list.append(title)
last_achiev_list.append(last_attach)
last_achiev_list.append(categories)
search_list.append(last_achiev_list)
elif filter_type == ReportGenerator.FILTER_BY_ANCHOR:
ReportGenerator.ReportGeneratorCollector.search_anchored_achievements(app, cont['object-item-id'], cont_obj)
elif filter_type == ReportGenerator.FILTER_BY_CHOICE:
choiced_achiev_list = ReportGenerator.ReportGeneratorCollector.search_choiced_achievements(app, cont['object-item-id'], cont_obj, filter_meta)
if not choiced_achiev_list:
continue
for sub_list in choiced_achiev_list:
last_attach = ReportGenerator.ReportGeneratorCollector.search_last_attachment(app, cont['object-item-id'])
# last attachment?
sub_list.append(title)
sub_list.append(last_attach)
sub_list.append(categories)
search_list.append(sub_list)
return search_list
@staticmethod
def search_last_achievements(app, sha_major, cont_obj):
ret_list = list()
buff_dict = dict()
# fetch latest subcontainer (subcontainer with latest achievement) and related meta
for sub_cont in cont_obj['subcontainer-list']:
sc = sub_cont['sha-minor']
ok, full_sub_cont = hippod.api_shared.read_subcont_obj_by_id(app, sha_major, sc)
if not ok:
log.error('cannot read subcontainer {}/{} by sha although sha_minor in subcontainer-list'.format(sha_major, sha_minor))
continue
data = hippod.api_object_get_full.get_all_achievement_data(app, sha_major, sc, full_sub_cont)
if data:
buff_dict[sc] = data[0]['date-added']
if data:
latest_sha_minor = max(buff_dict, key=lambda key: buff_dict[key])
latest_index = next(index for (index,d) in enumerate(cont_obj['subcontainer-list']) if d['sha-minor'] == latest_sha_minor)
ret_list.append(sha_major)
if not data:
sub_cont_last = cont_obj['subcontainer-list'][0]
latest_sha_minor = sub_cont_last['sha-minor']
else:
sub_cont_last = cont_obj['subcontainer-list'][latest_index]
ret_list.append(sub_cont_last['sha-minor'])
db_root_path = app['DB_OBJECT_PATH']
subcntr_path = os.path.join(db_root_path, sha_major[0:2], sha_major,\
latest_sha_minor, 'subcontainer.db')
with open(subcntr_path) as file:
full_sub_cont_last = json.load(file)
data = hippod.api_object_get_detail.get_last_achievement_data(app, sha_major, latest_sha_minor, full_sub_cont_last)
ret_list.append(data['id'])
return ret_list
@staticmethod
def search_choiced_achievements(app, sha_major, cont_obj, filter_meta):
ret_list = list()
if 'anchors' in filter_meta:
anchors_filter = filter_meta['anchors']
if 'submitter' in filter_meta:
submitter_filter = filter_meta['submitter']
# FIXME: multiple entries shouldn't be in the ret_list!
for sub_cont in cont_obj['subcontainer-list']:
sub_ret_list = list()
sha_minor = sub_cont['sha-minor']
ok, full_sub_cont = hippod.api_shared.read_subcont_obj_by_id(app, sha_major, sha_minor)
achievements = full_sub_cont['achievements']
achievement_data_list = hippod.api_object_get_full.get_all_achievement_data(app, sha_major, sha_minor, full_sub_cont)
for achievement in achievement_data_list:
sub_ret_list1 = list()
submitter = achievement['submitter']
if 'anchors' in filter_meta and 'submitter' in filter_meta:
if 'anchor' in achievement:
anchor = achievement['anchor']
if submitter in submitter_filter and anchor in anchors_filter:
sub_ret_list1.append(sha_major)
sub_ret_list1.append(sha_minor)
sub_ret_list1.append(str(achievement['id']))
else:
continue
else:
continue
elif 'anchors' in filter_meta:
if 'anchor' in achievement:
anchor = achievement['anchor']
if anchor in anchors_filter:
sub_ret_list1.append(sha_major)
sub_ret_list1.append(sha_minor)
sub_ret_list1.append(str(achievement['id']))
else:
continue
else:
continue
elif 'submitter' in filter_meta and submitter in submitter_filter:
sub_ret_list1.append(sha_major)
sub_ret_list1.append(sha_minor)
sub_ret_list1.append(str(achievement['id']))
else:
continue
sub_ret_list.append(sub_ret_list1)
for sub in sub_ret_list:
ret_list.append(sub)
return ret_list
@staticmethod
def search_anchored_achievements(app, sha_major, cont_obj):
pass
@staticmethod
def search_last_attachment(app, sha_major):
obj_path = os.path.join(app['DB_OBJECT_PATH'])
attach_path = os.path.join(obj_path, sha_major[0:2], sha_major, 'attachments')
attach_files = os.path.join(attach_path, '*')
attach_list = glob.glob(attach_files)
if len(attach_list) == 0:
return None
last_attach = max(attach_list, key=os.path.getctime)
return last_attach
|
mit
| 7,860,579,975,598,945,000
| 45.932409
| 162
| 0.518778
| false
| 4.197644
| true
| false
| false
|
anovak10/plots
|
DDTmethod/DDT.py
|
1
|
3479
|
#
import os
import ROOT
from ROOT import *
from array import array
import math
from math import *
import sys
import pdb
def ComputeDDT(name, point, nPtBins, nRhoBins, H):
DDT = TH2F(name, "", nRhoBins, 50, 250 , nPtBins, 380, 1000)
DDT.SetStats(0)
nXb = H.GetXaxis().GetNbins()
nYb = H.GetYaxis().GetNbins()
for x in range(nXb):
for y in range(nYb):
proj = H.ProjectionZ("H3"+str(x)+str(y),x+1,x+1,y+1,y+1)
print str(x+1) + "," + str(y+1) + ": "+ str(proj.Integral())
p = array('d', [point*0.01])
q = array('d', [0.0]*len(p))
proj.GetQuantiles(len(p), q, p)
DDT.SetBinContent( x+1, y+1, q[0] )
return DDT
def DisplayDDT(DDT, toretain, SaveName, cut=""):
CMSLABL = TLatex()
CMSLABL.SetNDC()
CMSLABL.SetTextSize(0.045)
PRELABL = TLatex()
PRELABL.SetNDC()
PRELABL.SetTextSize(0.04)
THILABL = TLatex()
THILABL.SetNDC()
THILABL.SetTextSize(0.035)
CUTLABL = TLatex()
CUTLABL.SetNDC()
CUTLABL.SetTextSize(0.02)
C = TCanvas("TempCanvas", "Title", 800, 600)
plot = TPad("pad1", "The pad 80% of the height",0.02,0,0.95,1)
plot.Draw()
plot.cd()
DDT.SetStats(0)
DDT.GetXaxis().SetTitle("TAGM")
DDT.GetXaxis().SetTitleSize(0.045)
DDT.GetZaxis().SetTitle("TAGTau_{32}")
DDT.GetZaxis().SetTitleSize(0.045)
DDT.GetZaxis().SetRangeUser(0.5,0.75)
DDT.SetTitle("DDT at "+str(toretain)+"% efficinecy")
if SaveName.startswith("DDTdiff") == True:
DDT.GetZaxis().SetRangeUser(-0.1,0.1)
DDT.GetZaxis().SetTitle("#Delta TAGTau_{32}")
DDT.SetTitle("#Delta DDT at "+str(toretain)+"% efficinecy")
DDT.GetYaxis().SetTitle("TAGp_{T}")
DDT.GetYaxis().SetTitleSize(0.045)
DDT.GetYaxis().SetTitleOffset(1.145)
DDT.Draw("COLZ")
CMSLABL.DrawLatex(0.1465,0.85,"CMS")
THILABL.DrawLatex(0.81,0.91,"#bf{13 TeV}")
PRELABL.DrawLatex(0.1465,0.812,"#bf{#it{Simulation Preliminary}}")
CUTLABL.DrawLatex(0.1465,0.780,cut)
C.Print("MAP_"+SaveName+".png")
def histo(Bkgs, cut="T.lepJetCSV<100"):
H3 = TH3F("H3", "", 9, 50, 250, 12, 380, 1000, 500, 0, 1)
H3.SetStats(0)
for B in Bkgs:
F = TFile(B)
T = F.Get("tree_T1")
n = T.GetEntries()
for j in range(0, n): # Here is where we loop over all events.
T.GetEntry(j)
if T.TAGTau32 > 0.001:
if eval(cut):
weight = T.weight
PT = T.TAGPt
M = T.TAGM
H3.Fill(M, PT, T.TAGTau32, weight)
return H3
# Fetch samples
pwd = "/home/storage/andrzejnovak/March/"
Bkgs =[]
Bkgs.append(pwd+"WJetsToQQ.root")
for w in ["100To200", "200To400", "400To600", "600To800", "800To1200", "1200To2500", "2500ToInf"]:
Bkgs.append(pwd+"WJetsToLNu_HT-"+w+".root")
def study(cut,savename, toretain=20):
#Set-up
#toretain = toretain # Percentage to keep
if cut != "": Tcut = "T."+cut+" and " #Preselection cuts on data in pythonic form e.g. T.LepPt < 50 and ..
else: Tcut = ""
nbins1, nbins2 = 12, 9
# Signal Region
H3 = histo(Bkgs, cut=Tcut +"T.lepJetCSV >0.46")
DDT_sr = ComputeDDT("DDT_sr", toretain, nbins1, nbins2, H3)
DisplayDDT(DDT_sr, toretain, "DDT_SR"+cut, cut=cut)
# Sidebands
H3 = histo(Bkgs, cut=Tcut +"T.lepJetCSV <0.46")
DDT_br = ComputeDDT("DDT_sb", toretain, nbins1, nbins2, H3)
DisplayDDT(DDT_sb, toretain, "DDT_SB"+cut, cut=cut)
# Difference
DisplayDDT(DDT_sr-DDT_sb, toretain, "DDTdiff"+cut, cut=cut)
# Saving a file
Fout = TFile(savename+".root", "recreate")
Fout.cd()
DDT_sr.Write()
DDT_sb.Write()
Fout.Close()
study("", "DDT")
clist = ["LepPt<400", "LepTightness>2.9", "WPt>500"]
for i, c in enumerate(clist):
study(c, "DDT"+str(i))
|
mit
| 8,447,145,614,358,003,000
| 27.516393
| 109
| 0.652774
| false
| 2.117468
| false
| false
| false
|
fredokun/TikZ-Editor
|
tikz_editor/tools/documentIO/template.py
|
1
|
1780
|
# Copyright 2012 (C) Mickael Menu <mickael.menu@gmail.com>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program. If not, see <http://www.gnu.org/licenses/>.
from string import Template
from .tags import *
class FileTemplate(object):
"""
The file template tool generates a full LaTeX/TikZ source from a template, preamble
and source.
"""
def __init__(self, template, preamble, source):
assert preamble is not None and source is not None
super(FileTemplate, self).__init__()
self.content = ""
self.preamble = preamble
self.source = source
self.latex_template = Template(template)
def buildFileContent(self):
"""
Builds the TikZ document with given preamble and source and the document template.
"""
self._buildPreambleChunk()
self._buildSourceChunk()
self._buildContentFromTemplate()
return self.content
def _buildPreambleChunk(self):
self.preamble = "%s\n%s\n%s\n" % (PREAMBLE_BEGIN_TAG, self.preamble, PREAMBLE_END_TAG)
def _buildSourceChunk(self):
self.source = "%s\n%s\n%s\n" % (SOURCE_BEGIN_TAG, self.source, SOURCE_END_TAG)
def _buildContentFromTemplate(self):
self.content = TIKZ_TAG + "\n"
self.content += self.latex_template.safe_substitute(PREAMBLE=self.preamble, SOURCE=self.source)
|
gpl-2.0
| 7,310,290,482,443,180,000
| 33.901961
| 97
| 0.738764
| false
| 3.449612
| false
| false
| false
|
MISP/misp-modules
|
misp_modules/modules/expansion/joesandbox_query.py
|
1
|
2330
|
# -*- coding: utf-8 -*-
import jbxapi
import json
from joe_parser import JoeParser
misperrors = {'error': 'Error'}
inputSource = ['link']
moduleinfo = {'version': '0.2', 'author': 'Christian Studer',
'description': 'Query Joe Sandbox API with a report URL to get the parsed data.',
'module-type': ['expansion']}
moduleconfig = ['apiurl', 'apikey', 'import_pe', 'import_mitre_attack']
def handler(q=False):
if q is False:
return False
request = json.loads(q)
apiurl = request['config'].get('apiurl') or 'https://jbxcloud.joesecurity.org/api'
apikey = request['config'].get('apikey')
parser_config = {
"import_pe": request["config"].get('import_pe', "false") == "true",
"mitre_attack": request["config"].get('import_mitre_attack', "false") == "true",
}
if not apikey:
return {'error': 'No API key provided'}
url = request['attribute']['value']
if "/submissions/" not in url:
return {'error': "The URL does not point to a Joe Sandbox analysis."}
submission_id = url.split('/')[-1] # The URL has the format https://example.net/submissions/12345
joe = jbxapi.JoeSandbox(apiurl=apiurl, apikey=apikey, user_agent='MISP joesandbox_query')
try:
joe_info = joe.submission_info(submission_id)
except jbxapi.ApiError as e:
return {'error': str(e)}
if joe_info["status"] != "finished":
return {'error': "The analysis has not finished yet."}
if joe_info['most_relevant_analysis'] is None:
return {'error': "No analysis belongs to this submission."}
analysis_webid = joe_info['most_relevant_analysis']['webid']
joe_parser = JoeParser(parser_config)
joe_data = json.loads(joe.analysis_download(analysis_webid, 'jsonfixed')[1])
joe_parser.parse_data(joe_data['analysis'])
joe_parser.finalize_results()
return {'results': joe_parser.results}
def introspection():
modulesetup = {}
try:
userConfig
modulesetup['userConfig'] = userConfig
except NameError:
pass
try:
inputSource
modulesetup['input'] = inputSource
except NameError:
pass
modulesetup['format'] = 'misp_standard'
return modulesetup
def version():
moduleinfo['config'] = moduleconfig
return moduleinfo
|
agpl-3.0
| 5,023,869,576,487,750,000
| 29.657895
| 102
| 0.632189
| false
| 3.568147
| true
| false
| false
|
Twangist/log_calls
|
log_calls/proxy_descriptors.py
|
1
|
4911
|
__author__ = 'brianoneill'
__doc__ = """
Module version = '0.1.14'
See docstrings for install_proxy_descriptor and ClassInstanceAttrProxy.
"""
from itertools import product, chain
__all__ = ['install_proxy_descriptor', 'ClassInstanceAttrProxy' ]
def install_proxy_descriptor(proxy_obj, attr_name_proxied_instance, descr_name, data=True, readonly=False):
"""
Create and install (setattr) on proxy_obj a descriptor named descr_name,
assuming proxy_obj has an attribute named attr_name_proxied_instance
which 'points' to an object that already has an attr/descr named descr_name;
the created descriptor will then just defer to that anterior attr/descr.
Suppose a, b are instances of classes A, B resp.,
and suppose b has an attr 'my_a' that points to a:
assert b.my_a is a
Thus proxy_obj == b,
attr_name_proxied_instance == 'my_a'.
Suppose a has an attribute 'x' which b wants to reflect
aka proxy, so that the value of b.x will be (will invoke) a.x.
b can set this up as follows:
install_proxy_descriptor(b, 'my_a', 'x') # b: b itself would say, self
data: True iff we should create & install a data descriptor,
else create & install a non-data-descriptor.
readonly: True iff created data descriptor should be readonly
(i.e. raise AttributeError on attempted 'set' operations).
"""
class ProxyDataDescr():
def __get__(this_descr, proxy, owner):
"todo"
### print("**** descriptor %s __get__ called" % descr_name)
return getattr(
getattr(proxy, attr_name_proxied_instance),
descr_name)
def __set__(this_descr, proxy, value):
"todo"
if not readonly:
setattr(
getattr(proxy, attr_name_proxied_instance),
descr_name,
value)
else:
# no can do:
raise AttributeError("%s is read-only on %r" % (descr_name, proxy))
class ProxyMethodDescr():
def __get__(this_descr, proxy, owner):
"todo"
### print("**** descriptor %s __get__ called" % descr_name)
return getattr(
getattr(proxy, attr_name_proxied_instance),
descr_name)
proxy_descr = (ProxyDataDescr if data else ProxyMethodDescr)()
setattr(proxy_obj.__class__, descr_name, proxy_descr)
class ClassInstanceAttrProxy():
"""Attributes on (instances of) some other class ==>
descriptors on (instances of) this class
(data descriptors are readonly).
The transform '==>' is accomplished by install_proxy_descriptor.
This class keeps a record of which other classes it has already created
descriptors for (_classes_and_attrs_proxied, initially empty set)
-- a little naively,
classname + marker + tuple(data_descriptor_names) + marker + tuple(method_descriptor_names).
Note that the attributes of instances of other class that are exposed
in this way can themselves be descriptors (e.g. properties).
"""
# Only create descriptors on the class once,
# for class of class_instance + these attributes/descr names:
# for a given descr_name (attr name) they'd be the same :)
_classes_and_attrs_proxied = set()
def __init__(self, *, class_instance, data_descriptor_names, method_descriptor_names):
"""What makes these work is the class_instance arg,
which a descriptor uses to access a class_instance
and from that its attr of the same name."""
self._proxied_instance_ = class_instance
class_and_descr_names = (
class_instance.__class__.__name__
+ '|'
+ ','.join(data_descriptor_names)
+ '|'
+ ','.join(method_descriptor_names)
)
if class_and_descr_names not in self._classes_and_attrs_proxied:
# Create descriptors *** on the class ***, once only per class.
# Same __get__/__set__ functions, called on different instances.
# It doesn't work to create them on instances:
# setattr(self, ... ) doesn't fly.
class_descr_names = chain(product(data_descriptor_names, {True}),
product(method_descriptor_names, {False})
)
for descr_name, is_data in list(class_descr_names):
# Create & add descriptor to this class. readonly only matters if is_data
install_proxy_descriptor(self, '_proxied_instance_', descr_name,
data=is_data, readonly=is_data)
# Record this class as 'already (successfully!) handled'
self._classes_and_attrs_proxied.add(class_and_descr_names)
|
mit
| -3,673,459,081,506,548,700
| 43.243243
| 107
| 0.598656
| false
| 4.270435
| false
| false
| false
|
neuroidss/nupic.vision
|
parameters.py
|
1
|
5662
|
# ----------------------------------------------------------------------
# Numenta Platform for Intelligent Computing (NuPIC)
# Copyright (C) 2015, Numenta, Inc. Unless you have purchased from
# Numenta, Inc. a separate commercial license for this software code, the
# following terms and conditions apply:
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see http://www.gnu.org/licenses.
#
# http://numenta.org/licenses/
# ----------------------------------------------------------------------
import random
def linearRange(start, stop, step):
"""Make a list of allowed parameter values."""
pval = start
plist = [pval]
while pval < stop:
pval = pval + step
plist.append(pval)
return plist
class Parameters(object):
"""
This class provides methods for searching ranges of parameters to see how
they affect performance.
"""
def __init__(self):
"""
Have to keep track of the names and valid values of each parameter
defined by the user.
"""
# list of parameter names
self._names = []
# list of allowed parameter values
self._allowedValues = []
# list of past and present parameter value indexes
self._valueIndexes = []
# list of past and present results that correspond to each set of parameter
# values
self._results = []
# the number of possible combinations of parameter values for all parameters
self.numCombinations = 1
def define(self, name, allowedValues):
"""
This method allows users to define a parameter by providing its name and
a list of values for the parameter.
"""
if name not in self._names:
self._names.append(name)
self._allowedValues.append(allowedValues)
self.numCombinations = self.numCombinations * len(allowedValues)
else:
print "Parameter: ", name, " is already defined!"
def getNames(self):
"""
This method returns the names of all defined parameters.
"""
return self._names
def getValue(self, name):
"""
This method returns the current value of the parameter specified by name.
"""
assert name in self._names
i = self._names.index(name)
assert len(self._valueIndexes[-1]) > i
return self._allowedValues[i][self._valueIndexes[-1][i]]
def getAllValues(self):
"""
This method returns the current values of all defined parameters.
"""
return [self._allowedValues[i][j] for i,j in
enumerate(self._valueIndexes[-1])]
def appendResults(self,item):
"""
This method adds an item to the results list.
"""
print "Just completed parameter Combination: ", self.getAllValues()
self._results.append(item)
print
print "Parameter combinations completed: ",
print len(self._results), "/", self.numCombinations
print
def getNumResults(self):
"""
This method returns the number of items in the results list.
"""
return len(self._results)
def printResults(self, resultNames, formatStrings):
"""
This method prints a summary of all the results.
"""
print
print "Summary of Results"
print
headerList = self.getNames()
headerList.extend(resultNames)
headerString = ", ".join(headerList)
print headerString
for i, result in enumerate(self._results):
valueString = str([self._allowedValues[j][k] for j,k in
enumerate(self._valueIndexes[i])])[1:-1]
for j,formatString in enumerate(formatStrings):
valueString += formatString % result[j]
print valueString
def nextRandomCombination(self):
"""
This method randomly selects a value for each parameter from its list of
allowed parameter values. If the resulting combination has already been
used then it tries again.
"""
random_combination = [random.choice(self._allowedValues[i])
for i in range(len(self._names))]
if random_combination in self._values:
self.nextRandomCombination()
else:
self._values.append(random_combination)
print "Parameter Combination: ", self.getAllValues()
print
def nextCombination(self):
"""
This method finds the next combination of parameter values using the
allowed value lists for each parameter.
"""
if len(self._valueIndexes) == 0:
# list of value indexes is empty so this is the first combination,
# each parameter gets the first value in its list of allowed values
self._valueIndexes.append([0 for i in range(len(self._names))])
else:
newValueIndexes = self._valueIndexes[-1][:]
i = 0
while i < len(self._names):
# if current value is not the last in the list
if self._valueIndexes[-1][i] != len(self._allowedValues[i]) - 1:
# change parameter to next value in allowed value list and return
newValueIndexes[i] += 1
break
else:
# change parameter to first value in allowed value list
newValueIndexes[i] = 0
# move next parameter to next value in its allowed value list
i = i + 1
self._valueIndexes.append(newValueIndexes)
print "Parameter Combination: ", self.getAllValues()
print
|
gpl-3.0
| -6,478,117,063,875,853,000
| 29.771739
| 80
| 0.656658
| false
| 4.289394
| false
| false
| false
|
YudinYury/Python_Netology_homework
|
les16_hw_2.py
|
1
|
3666
|
'''lesson_1_6 homework «Разбор алгоритмических задач с собеседований»
решение задач с HackerRank
'''
#TODO 2:
# CSS colors are defined using a hexadecimal (HEX) notation for the combination of Red, Green, and Blue color values (RGB).
# Specifications of HEX Color Code
#
# ■ It must start with a '#' symbol.
# ■ It can have 3 or 6 digits.
# ■ Each digit is in the range of 0 to F. (1,...,9,0,A,B,C,D,E and F).
# ■ A-F letters can be lower case. (a,b,c,d,e and f are also valid digits).
#
# Valid Hex Color Codes
# #FFF
# #025
# #F0A1FB
#
# Invalid Hex Color Codes
# #fffabg
# #abcf
# #12365erff
#
# You are given N lines of CSS code. Your task is to print all valid Hex Color Codes, in order of their occurrence from top to bottom.
#
# CSS Code Pattern
# Selector
# {
# Property: Value;
# }
#
# Input Format
# The first line contains N, the number of code lines.
# The next N lines contains CSS Codes.
#
# Constraints
# 0 < N < 50
#
# Output Format
# Output the color codes with '#' symbols on separate lines.
#
# Sample Input
# 11
# #BED
# {
# color: #FfFdF8; background-color:#aef;
# font-size: 123px;
# background: -webkit-linear-gradient(top, #f9f9f9, #fff);
# }
# #Cab
# {
# background-color: #ABC;
# border: 2px dashed #fff;
# }
# Sample Output
# #FfFdF8
# #aef
# #f9f9f9
# #fff
# #ABC
# #fff
signature_list = ['0','1','2','3','4','5','6','7','8','9','a','b','c','d','e','f']
hex_list=[] # список для результата
ss_list=[
'#BED',
'{',
' color: #FfFdF8; background-color:#aef;',
' font-size: 123px;', '',
'}',
'#Cab',
'{',
' background-color: #ABC;',
' border: 2px dashed #fff;',
'}'
]
sstr='#BED{ color: #FfFdF8; background-color:#aef; font-size: 123px;}#Cab{ background-color: #ABC; border: 2px dashed #fff;}'
# n = int(input())
n=11
ss=''
it=0
body_of_tag=0
for ss in ss_list:
step = 0
if ss.find('{') != -1:
body_of_tag=1
# print('Вошли в тело')
continue
if ss.find('}') != -1:
body_of_tag=0
# print('Вышли из тела')
continue
while body_of_tag:
it=ss.find('#',it,)
if it == -1:
it=0
break
it+=1
# print('begin find in ', ss[it::1])
new_num_str = '#'
for single in ss[it::1]:
if single.lower() in signature_list:
new_num_str+=single
# print('new_num_str=',new_num_str)
else: # закончился порядок цифр (0...9, A...F)
# print('end of find')
it += len(new_num_str) # пропускаем уже проверенную строку и дальше будем искать с нового места
step += len(new_num_str)
# print('it=', it, ' ss()=',ss[it::1])
# print('body_of_tag =', body_of_tag)
if len(new_num_str)==4 or len(new_num_str)==7:
hex_list.append(new_num_str)
# print('hex_list', hex_list)
new_num_str = ''
break
else:
new_num_str=''
break
for out in hex_list:
print(out)
# for i in range(11):
# # sstr+=input()
# # # ss.append(input())
# ss=ss_list.pop([0])
# print(ss)
# print(sstr)
# si=sstr.find('#')
# if si==-1:
# print('Not found "#"')
# exit()
# hex_num_str=''
# for ch in sstr:
# print(ch)
# if not ch.isdigit():
# continue
# if
|
gpl-3.0
| -8,185,796,143,026,287,000
| 22.369128
| 137
| 0.525847
| false
| 2.635882
| false
| false
| false
|
xpostudio4/red-de-emprendimiento
|
app/institutions/forms.py
|
1
|
7474
|
# -*- encoding: utf-8 -*-
#core Django Imports
from django import forms
from django.contrib.auth.models import Group
from django.contrib.auth.admin import UserAdmin
from django.contrib.auth.forms import ReadOnlyPasswordHashField, AuthenticationForm
from django.forms.extras import widgets
#Third party apps
#Project apps import
from .models import UserProfile, Category, Event, MailingList, Organization
class CustomUserCreationForm(forms.ModelForm):
"""A form for creating new users. Includes all the required
fields, plus a repeated password."""
password1 = forms.CharField(label='Password',
widget=forms.PasswordInput(attrs={'placeholder':'Contraseña'}))
password2 = forms.CharField(label='Password confirmation',
widget=forms.PasswordInput(attrs={'placeholder':'Confirmar Contraseña'}))
username = forms.CharField(required=False, max_length=30)
full_name = forms.CharField(max_length=30,
widget=forms.TextInput(attrs={"placeholder":'Nombre Completo'}))
email = forms.CharField(max_length=30,
widget=forms.TextInput(attrs={"placeholder":'Email'}))
class Meta:
model = UserProfile
fields = ('email', 'full_name')
def clean_password(self):
# Regardless of what the user provides, return the initial value.
# field does not have access to the initial value
# This is done here, rather than on the field, because the
return self.initial["password"]
def clean_password2(self):
# Check that the two password entries match
password1 = self.cleaned_data.get("password1")
password2 = self.cleaned_data.get("password2")
if password1 and password2 and password1 != password2:
raise forms.ValidationError("Passwords don't match")
return password2
def __init__(self, *args, **kwargs):
super(CustomUserCreationForm, self).__init__(*args, **kwargs)
#change the html class of all the elements
for field in self.fields:
#of the form to get bootstrap 3 styling
self.fields[field].widget.attrs.update({'class':'form-control'})
self.fields.keyOrder = ['email', 'full_name', 'password1', 'password2']
def save(self, commit=True):
# Save the provided password in hashed format
user = super(CustomUserCreationForm, self).save(commit=False)
user.set_password(self.cleaned_data["password1"])
if commit:
user.save()
return user
class DashboardUserCreationForm(forms.ModelForm):
"""
This form is used to create a user from the dashboard.
"""
class Meta:
model = UserProfile
fields = ('email', 'full_name')
def __init__(self, *args, **kwargs):
super(DashboardUserCreationForm, self).__init__(*args, **kwargs)
#change the html class of all the elements of the form to get
#bootstrap 3 styling
for field in self.fields:
self.fields[field].widget.attrs.update({'class':'form-control'})
class UserProfileChangeForm(forms.ModelForm):
"""A form for updating users. Includes all the fields on
the user, but replaces the password field with admin's
password hash display field.
"""
password = ReadOnlyPasswordHashField()
class Meta:
model = UserProfile
fields = ('full_name', 'email')
widgets = {'user_form': forms.HiddenInput()}
def __init__(self, *args, **kwargs):
super(UserProfileChangeForm, self).__init__(*args, **kwargs)
#change the html class of all the elements of
#the form to get bootstrap 3 styling
for field in self.fields:
self.fields[field].widget.attrs.update({'class':'form-control'})
class UserProfileLoginForm(AuthenticationForm):
"""A Form for user login."""
form_fields = ["username", "password"]
username = forms.CharField(max_length=254, label="Correo Electronico",
widget=forms.TextInput(attrs={"placeholder":'Usuario'}))
password = forms.CharField(label='Password',
widget=forms.PasswordInput(attrs={'placeholder': 'Contraseña'}))
def __init__(self, *args, **kwargs):
super(UserProfileLoginForm, self).__init__(*args, **kwargs)
#change the html class of all the elements of the form to get bootstrap 3 styling
for field in self.fields:
self.fields[field].widget.attrs.update({'class':'form-control'})
class OrganizationForm(forms.ModelForm):
"""Form used when creating a new organization"""
categories = forms.ModelMultipleChoiceField(queryset=Category.objects.all())
description = forms.CharField(label="Descripción", required=False,
widget=forms.Textarea(attrs={'rows':'2'}))
url = forms.URLField(max_length=255,
help_text='Por favor introduzca la URL de la pagina',
widget=forms.TextInput(attrs={'placeholder': 'http://www.website.com'}))
class Meta:
"""declaration of the inherited class"""
model = Organization
fields = ('name',
'description',
'phone',
'url',
'address',
'province',
'categories',
)
def __init__(self, *args, **kwargs):
super(OrganizationForm, self).__init__(*args, **kwargs)
#change the html class of all the elements
#of the form to get bootstrap 3 styling
for field in self.fields:
if field != 'categories':
self.fields[field].widget.attrs.update({'class':'form-control'})
else:
self.fields[field].widget.attrs.update({'class':'organization-category'})
class OrganizationPictureForm(forms.ModelForm):
picture = forms.ImageField()
class Meta:
model = Organization
fields = (
'logo',
)
class EventForm(forms.ModelForm):
"""Form to handle event forms"""
description = forms.CharField(label="Descripción", widget=forms.Textarea(attrs={'rows':'5'}))
from_date = forms.CharField(widget=forms.TextInput(attrs={
'class':'date',
})
)
to_date = forms.CharField(widget=forms.TextInput(attrs={
'class':'date',
})
)
class Meta:
"""Model inheritance settings"""
model = Event
fields = ('name',
'categories',
'cost',
'description',
'from_date',
'to_date',
'url',
)
def __init__(self, *args, **kwargs):
super(EventForm, self).__init__(*args, **kwargs)
#change the html class of all the elements of the form to get bootstrap 3 styling
for field in self.fields:
if field != 'categories':
self.fields[field].widget.attrs.update({'class':'form-control'})
else:
self.fields[field].widget.attrs.update({'class':'event-category'})
class MailingListForm(forms.ModelForm):
class Meta:
Model = MailingList
fields = (
'full_name',
'email',
'province',
)
|
mit
| 2,371,676,960,138,874,400
| 35.975248
| 105
| 0.597269
| false
| 4.518451
| false
| false
| false
|
cmusatyalab/isr-next
|
vmnetx/define.py
|
1
|
2225
|
#
# vmnetx.define - Creation of a new VMNetX-compatible VM
#
# Copyright (C) 2013 Carnegie Mellon University
#
# This program is free software; you can redistribute it and/or modify it
# under the terms of version 2 of the GNU General Public License as published
# by the Free Software Foundation. A copy of the GNU General Public License
# should have been distributed along with this program in the file
# COPYING.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
# or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
# for more details.
#
from contextlib import closing
import libvirt
import os
import subprocess
from .domain import DomainXML, DomainXMLError
from .util import DetailException
class MachineDefinitionError(DetailException):
pass
def define_machine(name, memory_mb, disk_gb):
with closing(libvirt.open('qemu:///session')) as conn:
# Ensure machine doesn't exist
try:
conn.lookupByName(name)
raise MachineDefinitionError('Machine already exists')
except libvirt.libvirtError:
pass
# Ensure disk doesn't exist
disk_dir = os.path.join(os.path.expanduser('~'), 'VirtualMachines')
disk_path = os.path.join(disk_dir, name + '.qcow')
if os.path.exists(disk_path):
raise MachineDefinitionError('%s already exists' % disk_path)
# Create disk
if not os.path.exists(disk_dir):
os.makedirs(disk_dir)
with open('/dev/null', 'r+') as null:
ret = subprocess.call(['qemu-img', 'create', '-f', 'qcow2',
disk_path, str(disk_gb) + 'G'], stdout=null)
if ret != 0:
raise MachineDefinitionError("Couldn't create disk image")
# Create machine
try:
domain_xml = DomainXML.get_template(conn, name, disk_path,
'qcow2', memory_mb)
conn.defineXML(domain_xml.xml)
except DomainXMLError, e:
raise MachineDefinitionError(str(e), e.detail)
except libvirt.libvirtError, e:
raise MachineDefinitionError(str(e))
|
gpl-2.0
| 898,212,150,496,650,500
| 34.887097
| 77
| 0.659775
| false
| 4.090074
| false
| false
| false
|
pyro-ppl/numpyro
|
numpyro/contrib/funsor/infer_util.py
|
1
|
9920
|
# Copyright Contributors to the Pyro project.
# SPDX-License-Identifier: Apache-2.0
from collections import defaultdict
from contextlib import contextmanager
import functools
import re
import funsor
import numpyro
from numpyro.contrib.funsor.enum_messenger import (
infer_config,
plate as enum_plate,
trace as packed_trace,
)
from numpyro.distributions.util import is_identically_one
from numpyro.handlers import substitute
funsor.set_backend("jax")
@contextmanager
def plate_to_enum_plate():
"""
A context manager to replace `numpyro.plate` statement by a funsor-based
:class:`~numpyro.contrib.funsor.enum_messenger.plate`.
This is useful when doing inference for the usual NumPyro programs with
`numpyro.plate` statements. For example, to get trace of a `model` whose discrete
latent sites are enumerated, we can use::
enum_model = numpyro.contrib.funsor.enum(model)
with plate_to_enum_plate():
model_trace = numpyro.contrib.funsor.trace(enum_model).get_trace(
*model_args, **model_kwargs)
"""
try:
numpyro.plate.__new__ = lambda cls, *args, **kwargs: enum_plate(*args, **kwargs)
yield
finally:
numpyro.plate.__new__ = lambda *args, **kwargs: object.__new__(numpyro.plate)
def config_enumerate(fn=None, default="parallel"):
"""
Configures enumeration for all relevant sites in a NumPyro model.
When configuring for exhaustive enumeration of discrete variables, this
configures all sample sites whose distribution satisfies
``.has_enumerate_support == True``.
This can be used as either a function::
model = config_enumerate(model)
or as a decorator::
@config_enumerate
def model(*args, **kwargs):
...
.. note:: Currently, only ``default='parallel'`` is supported.
:param callable fn: Python callable with NumPyro primitives.
:param str default: Which enumerate strategy to use, one of
"sequential", "parallel", or None. Defaults to "parallel".
"""
if fn is None: # support use as a decorator
return functools.partial(config_enumerate, default=default)
def config_fn(site):
if (
site["type"] == "sample"
and (not site["is_observed"])
and site["fn"].has_enumerate_support
):
return {"enumerate": site["infer"].get("enumerate", default)}
return {}
return infer_config(fn, config_fn)
def _get_shift(name):
"""helper function used internally in sarkka_bilmes_product"""
return len(re.search(r"^(_PREV_)*", name).group(0)) // 6
def _shift_name(name, t):
"""helper function used internally in sarkka_bilmes_product"""
if t >= 0:
return t * "_PREV_" + name
return name.replace("_PREV_" * -t, "", 1)
def compute_markov_factors(
time_to_factors,
time_to_init_vars,
time_to_markov_dims,
sum_vars,
prod_vars,
history,
sum_op,
prod_op,
):
"""
:param dict time_to_factors: a map from time variable to the log prob factors.
:param dict time_to_init_vars: a map from time variable to init discrete sites.
:param dict time_to_markov_dims: a map from time variable to dimensions at markov sites
(discrete sites that depend on previous steps).
:param frozenset sum_vars: all plate and enum dimensions in the trace.
:param frozenset prod_vars: all plate dimensions in the trace.
:param int history: The number of previous contexts visible from the current context.
:returns: a list of factors after eliminate time dimensions
"""
markov_factors = []
for time_var, log_factors in time_to_factors.items():
prev_vars = time_to_init_vars[time_var]
# we eliminate all plate and enum dimensions not available at markov sites.
eliminate_vars = (sum_vars | prod_vars) - time_to_markov_dims[time_var]
with funsor.interpretations.lazy:
lazy_result = funsor.sum_product.sum_product(
sum_op,
prod_op,
log_factors,
eliminate=eliminate_vars,
plates=prod_vars,
)
trans = funsor.optimizer.apply_optimizer(lazy_result)
if history > 1:
global_vars = frozenset(
set(trans.inputs)
- {time_var.name}
- prev_vars
- {_shift_name(k, -_get_shift(k)) for k in prev_vars}
)
markov_factors.append(
funsor.sum_product.sarkka_bilmes_product(
sum_op, prod_op, trans, time_var, global_vars
)
)
else:
# remove `_PREV_` prefix to convert prev to curr
prev_to_curr = {k: _shift_name(k, -_get_shift(k)) for k in prev_vars}
markov_factors.append(
funsor.sum_product.sequential_sum_product(
sum_op, prod_op, trans, time_var, prev_to_curr
)
)
return markov_factors
def _enum_log_density(model, model_args, model_kwargs, params, sum_op, prod_op):
"""Helper function to compute elbo and extract its components from execution traces."""
model = substitute(model, data=params)
with plate_to_enum_plate():
model_trace = packed_trace(model).get_trace(*model_args, **model_kwargs)
log_factors = []
time_to_factors = defaultdict(list) # log prob factors
time_to_init_vars = defaultdict(frozenset) # PP... variables
time_to_markov_dims = defaultdict(frozenset) # dimensions at markov sites
sum_vars, prod_vars = frozenset(), frozenset()
history = 1
log_measures = {}
for site in model_trace.values():
if site["type"] == "sample":
value = site["value"]
intermediates = site["intermediates"]
scale = site["scale"]
if intermediates:
log_prob = site["fn"].log_prob(value, intermediates)
else:
log_prob = site["fn"].log_prob(value)
if (scale is not None) and (not is_identically_one(scale)):
log_prob = scale * log_prob
dim_to_name = site["infer"]["dim_to_name"]
log_prob_factor = funsor.to_funsor(
log_prob, output=funsor.Real, dim_to_name=dim_to_name
)
time_dim = None
for dim, name in dim_to_name.items():
if name.startswith("_time"):
time_dim = funsor.Variable(name, funsor.Bint[log_prob.shape[dim]])
time_to_factors[time_dim].append(log_prob_factor)
history = max(
history, max(_get_shift(s) for s in dim_to_name.values())
)
time_to_init_vars[time_dim] |= frozenset(
s for s in dim_to_name.values() if s.startswith("_PREV_")
)
break
if time_dim is None:
log_factors.append(log_prob_factor)
if not site["is_observed"]:
log_measures[site["name"]] = log_prob_factor
sum_vars |= frozenset({site["name"]})
prod_vars |= frozenset(
f.name for f in site["cond_indep_stack"] if f.dim is not None
)
for time_dim, init_vars in time_to_init_vars.items():
for var in init_vars:
curr_var = _shift_name(var, -_get_shift(var))
dim_to_name = model_trace[curr_var]["infer"]["dim_to_name"]
if var in dim_to_name.values(): # i.e. _PREV_* (i.e. prev) in dim_to_name
time_to_markov_dims[time_dim] |= frozenset(
name for name in dim_to_name.values()
)
if len(time_to_factors) > 0:
markov_factors = compute_markov_factors(
time_to_factors,
time_to_init_vars,
time_to_markov_dims,
sum_vars,
prod_vars,
history,
sum_op,
prod_op,
)
log_factors = log_factors + markov_factors
with funsor.interpretations.lazy:
lazy_result = funsor.sum_product.sum_product(
sum_op,
prod_op,
log_factors,
eliminate=sum_vars | prod_vars,
plates=prod_vars,
)
result = funsor.optimizer.apply_optimizer(lazy_result)
if len(result.inputs) > 0:
raise ValueError(
"Expected the joint log density is a scalar, but got {}. "
"There seems to be something wrong at the following sites: {}.".format(
result.data.shape, {k.split("__BOUND")[0] for k in result.inputs}
)
)
return result, model_trace, log_measures
def log_density(model, model_args, model_kwargs, params):
"""
Similar to :func:`numpyro.infer.util.log_density` but works for models
with discrete latent variables. Internally, this uses :mod:`funsor`
to marginalize discrete latent sites and evaluate the joint log probability.
:param model: Python callable containing NumPyro primitives. Typically,
the model has been enumerated by using
:class:`~numpyro.contrib.funsor.enum_messenger.enum` handler::
def model(*args, **kwargs):
...
log_joint = log_density(enum(config_enumerate(model)), args, kwargs, params)
:param tuple model_args: args provided to the model.
:param dict model_kwargs: kwargs provided to the model.
:param dict params: dictionary of current parameter values keyed by site
name.
:return: log of joint density and a corresponding model trace
"""
result, model_trace, _ = _enum_log_density(
model, model_args, model_kwargs, params, funsor.ops.logaddexp, funsor.ops.add
)
return result.data, model_trace
|
apache-2.0
| -2,505,284,325,910,366,000
| 35.336996
| 91
| 0.594758
| false
| 3.885625
| true
| false
| false
|
davidastephens/zipline
|
zipline/utils/tradingcalendar.py
|
1
|
11044
|
#
# Copyright 2013 Quantopian, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pandas as pd
import pytz
from datetime import datetime, timedelta
from dateutil import rrule
start = pd.Timestamp('1990-01-01', tz='UTC')
end_base = pd.Timestamp('today', tz='UTC')
# Give an aggressive buffer for logic that needs to use the next trading
# day or minute.
end = end_base + timedelta(days=365)
def canonicalize_datetime(dt):
# Strip out any HHMMSS or timezone info in the user's datetime, so that
# all the datetimes we return will be 00:00:00 UTC.
return datetime(dt.year, dt.month, dt.day, tzinfo=pytz.utc)
def get_non_trading_days(start, end):
non_trading_rules = []
start = canonicalize_datetime(start)
end = canonicalize_datetime(end)
weekends = rrule.rrule(
rrule.YEARLY,
byweekday=(rrule.SA, rrule.SU),
cache=True,
dtstart=start,
until=end
)
non_trading_rules.append(weekends)
new_years = rrule.rrule(
rrule.MONTHLY,
byyearday=1,
cache=True,
dtstart=start,
until=end
)
non_trading_rules.append(new_years)
new_years_sunday = rrule.rrule(
rrule.MONTHLY,
byyearday=2,
byweekday=rrule.MO,
cache=True,
dtstart=start,
until=end
)
non_trading_rules.append(new_years_sunday)
mlk_day = rrule.rrule(
rrule.MONTHLY,
bymonth=1,
byweekday=(rrule.MO(+3)),
cache=True,
dtstart=datetime(1998, 1, 1, tzinfo=pytz.utc),
until=end
)
non_trading_rules.append(mlk_day)
presidents_day = rrule.rrule(
rrule.MONTHLY,
bymonth=2,
byweekday=(rrule.MO(3)),
cache=True,
dtstart=start,
until=end
)
non_trading_rules.append(presidents_day)
good_friday = rrule.rrule(
rrule.DAILY,
byeaster=-2,
cache=True,
dtstart=start,
until=end
)
non_trading_rules.append(good_friday)
memorial_day = rrule.rrule(
rrule.MONTHLY,
bymonth=5,
byweekday=(rrule.MO(-1)),
cache=True,
dtstart=start,
until=end
)
non_trading_rules.append(memorial_day)
july_4th = rrule.rrule(
rrule.MONTHLY,
bymonth=7,
bymonthday=4,
cache=True,
dtstart=start,
until=end
)
non_trading_rules.append(july_4th)
july_4th_sunday = rrule.rrule(
rrule.MONTHLY,
bymonth=7,
bymonthday=5,
byweekday=rrule.MO,
cache=True,
dtstart=start,
until=end
)
non_trading_rules.append(july_4th_sunday)
july_4th_saturday = rrule.rrule(
rrule.MONTHLY,
bymonth=7,
bymonthday=3,
byweekday=rrule.FR,
cache=True,
dtstart=start,
until=end
)
non_trading_rules.append(july_4th_saturday)
labor_day = rrule.rrule(
rrule.MONTHLY,
bymonth=9,
byweekday=(rrule.MO(1)),
cache=True,
dtstart=start,
until=end
)
non_trading_rules.append(labor_day)
thanksgiving = rrule.rrule(
rrule.MONTHLY,
bymonth=11,
byweekday=(rrule.TH(4)),
cache=True,
dtstart=start,
until=end
)
non_trading_rules.append(thanksgiving)
christmas = rrule.rrule(
rrule.MONTHLY,
bymonth=12,
bymonthday=25,
cache=True,
dtstart=start,
until=end
)
non_trading_rules.append(christmas)
christmas_sunday = rrule.rrule(
rrule.MONTHLY,
bymonth=12,
bymonthday=26,
byweekday=rrule.MO,
cache=True,
dtstart=start,
until=end
)
non_trading_rules.append(christmas_sunday)
# If Christmas is a Saturday then 24th, a Friday is observed.
christmas_saturday = rrule.rrule(
rrule.MONTHLY,
bymonth=12,
bymonthday=24,
byweekday=rrule.FR,
cache=True,
dtstart=start,
until=end
)
non_trading_rules.append(christmas_saturday)
non_trading_ruleset = rrule.rruleset()
for rule in non_trading_rules:
non_trading_ruleset.rrule(rule)
non_trading_days = non_trading_ruleset.between(start, end, inc=True)
# Add September 11th closings
# http://en.wikipedia.org/wiki/Aftermath_of_the_September_11_attacks
# Due to the terrorist attacks, the stock market did not open on 9/11/2001
# It did not open again until 9/17/2001.
#
# September 2001
# Su Mo Tu We Th Fr Sa
# 1
# 2 3 4 5 6 7 8
# 9 10 11 12 13 14 15
# 16 17 18 19 20 21 22
# 23 24 25 26 27 28 29
# 30
for day_num in range(11, 17):
non_trading_days.append(
datetime(2001, 9, day_num, tzinfo=pytz.utc))
# Add closings due to Hurricane Sandy in 2012
# http://en.wikipedia.org/wiki/Hurricane_sandy
#
# The stock exchange was closed due to Hurricane Sandy's
# impact on New York.
# It closed on 10/29 and 10/30, reopening on 10/31
# October 2012
# Su Mo Tu We Th Fr Sa
# 1 2 3 4 5 6
# 7 8 9 10 11 12 13
# 14 15 16 17 18 19 20
# 21 22 23 24 25 26 27
# 28 29 30 31
for day_num in range(29, 31):
non_trading_days.append(
datetime(2012, 10, day_num, tzinfo=pytz.utc))
# Misc closings from NYSE listing.
# http://www.nyse.com/pdfs/closings.pdf
#
# National Days of Mourning
# - President Richard Nixon
non_trading_days.append(datetime(1994, 4, 27, tzinfo=pytz.utc))
# - President Ronald W. Reagan - June 11, 2004
non_trading_days.append(datetime(2004, 6, 11, tzinfo=pytz.utc))
# - President Gerald R. Ford - Jan 2, 2007
non_trading_days.append(datetime(2007, 1, 2, tzinfo=pytz.utc))
non_trading_days.sort()
return pd.DatetimeIndex(non_trading_days)
non_trading_days = get_non_trading_days(start, end)
trading_day = pd.tseries.offsets.CDay(holidays=non_trading_days)
def get_trading_days(start, end, trading_day=trading_day):
return pd.date_range(start=start.date(),
end=end.date(),
freq=trading_day).tz_localize('UTC')
trading_days = get_trading_days(start, end)
def get_early_closes(start, end):
# 1:00 PM close rules based on
# http://quant.stackexchange.com/questions/4083/nyse-early-close-rules-july-4th-and-dec-25th # noqa
# and verified against http://www.nyse.com/pdfs/closings.pdf
# These rules are valid starting in 1993
start = canonicalize_datetime(start)
end = canonicalize_datetime(end)
start = max(start, datetime(1993, 1, 1, tzinfo=pytz.utc))
end = max(end, datetime(1993, 1, 1, tzinfo=pytz.utc))
# Not included here are early closes prior to 1993
# or unplanned early closes
early_close_rules = []
day_after_thanksgiving = rrule.rrule(
rrule.MONTHLY,
bymonth=11,
# 4th Friday isn't correct if month starts on Friday, so restrict to
# day range:
byweekday=(rrule.FR),
bymonthday=range(23, 30),
cache=True,
dtstart=start,
until=end
)
early_close_rules.append(day_after_thanksgiving)
christmas_eve = rrule.rrule(
rrule.MONTHLY,
bymonth=12,
bymonthday=24,
byweekday=(rrule.MO, rrule.TU, rrule.WE, rrule.TH),
cache=True,
dtstart=start,
until=end
)
early_close_rules.append(christmas_eve)
friday_after_christmas = rrule.rrule(
rrule.MONTHLY,
bymonth=12,
bymonthday=26,
byweekday=rrule.FR,
cache=True,
dtstart=start,
# valid 1993-2007
until=min(end, datetime(2007, 12, 31, tzinfo=pytz.utc))
)
early_close_rules.append(friday_after_christmas)
day_before_independence_day = rrule.rrule(
rrule.MONTHLY,
bymonth=7,
bymonthday=3,
byweekday=(rrule.MO, rrule.TU, rrule.TH),
cache=True,
dtstart=start,
until=end
)
early_close_rules.append(day_before_independence_day)
day_after_independence_day = rrule.rrule(
rrule.MONTHLY,
bymonth=7,
bymonthday=5,
byweekday=rrule.FR,
cache=True,
dtstart=start,
# starting in 2013: wednesday before independence day
until=min(end, datetime(2012, 12, 31, tzinfo=pytz.utc))
)
early_close_rules.append(day_after_independence_day)
wednesday_before_independence_day = rrule.rrule(
rrule.MONTHLY,
bymonth=7,
bymonthday=3,
byweekday=rrule.WE,
cache=True,
# starting in 2013
dtstart=max(start, datetime(2013, 1, 1, tzinfo=pytz.utc)),
until=max(end, datetime(2013, 1, 1, tzinfo=pytz.utc))
)
early_close_rules.append(wednesday_before_independence_day)
early_close_ruleset = rrule.rruleset()
for rule in early_close_rules:
early_close_ruleset.rrule(rule)
early_closes = early_close_ruleset.between(start, end, inc=True)
# Misc early closings from NYSE listing.
# http://www.nyse.com/pdfs/closings.pdf
#
# New Year's Eve
nye_1999 = datetime(1999, 12, 31, tzinfo=pytz.utc)
if start <= nye_1999 and nye_1999 <= end:
early_closes.append(nye_1999)
early_closes.sort()
return pd.DatetimeIndex(early_closes)
early_closes = get_early_closes(start, end)
def get_open_and_closes(trading_days, early_closes, tz='US/Eastern'):
open_and_closes = pd.DataFrame(index=trading_days,
columns=('market_open', 'market_close'))
for day in trading_days:
market_open = pd.Timestamp(
datetime(
year=day.year,
month=day.month,
day=day.day,
hour=9,
minute=31),
tz='US/Eastern').tz_convert('UTC')
# 1 PM if early close, 4 PM otherwise
close_hour = 13 if day in early_closes else 16
market_close = pd.Timestamp(
datetime(
year=day.year,
month=day.month,
day=day.day,
hour=close_hour),
tz='US/Eastern').tz_convert('UTC')
open_and_closes.ix[day]['market_open'] = market_open
open_and_closes.ix[day]['market_close'] = market_close
return open_and_closes
open_and_closes = get_open_and_closes(trading_days, early_closes)
|
apache-2.0
| -441,858,406,999,512,400
| 26.679198
| 103
| 0.605397
| false
| 3.196527
| false
| false
| false
|
monk-ee/puppetdb-python
|
puppetdb/v4/reports.py
|
1
|
3924
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (c) 2013 Arcus, Inc.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
"""
reports.py: A bunch of API methods for interacting with v4 reports in the PuppetDB API.
Operators
The only available OPERATOR is =.
Fields
FIELD may be any of the following. All fields support only the equality operator.
certname
the name of the node that the report was received from.
hash
the id of the report; these ids can be acquired via event queries (see the /events query endpoint).
The response is a JSON array of report summaries for all reports that matched the input parameters.
The summaries are sorted by the completion time of the report, in descending order:
[
{
"end-time": "2012-10-29T18:38:01.000Z",
"puppet-version": "3.0.1",
"receive-time": "2012-10-29T18:38:04.238Z",
"configuration-version": "1351535883",
"start-time": "2012-10-29T18:38:00.000Z",
"hash": "bd899b1ee825ec1d2c671fe5541b5c8f4a783472",
"certname": "foo.local",
"report-format": 4,
"transaction-uuid": "030c1717-f175-4644-b048-ac9ea328f221"
},
{
"end-time": "2012-10-26T22:39:32.000Z",
"puppet-version": "3.0.1",
"receive-time": "2012-10-26T22:39:35.305Z",
"configuration-version": "1351291174",
"start-time": "2012-10-26T22:39:31.000Z",
"hash": "cd4e5fd8846bac26d15d151664a40e0f2fa600b0",
"certname": "foo.local",
"report-format": 4,
"transaction-uuid": null
}
]
"""
__author__ = "monkee"
__version__ = "1.0.1"
__maintainer__ = "monk-ee"
__email__ = "magic.monkee.magic@gmail.com"
__status__ = "Development"
from puppetdb import utils
API_VERSION = 'v3'
def get_reports(api_url=None, query='', verify=False, cert=list()):
"""
Returns reports
:param api_url: Base PuppetDB API url
:param query: Required. A JSON array of query predicates, in prefix form. (The standard ["<OPERATOR>", "<FIELD>", "<VALUE>"] format.)
Response
[
{
"end-time": "2012-10-29T18:38:01.000Z",
"puppet-version": "3.0.1",
"receive-time": "2012-10-29T18:38:04.238Z",
"configuration-version": "1351535883",
"start-time": "2012-10-29T18:38:00.000Z",
"hash": "bd899b1ee825ec1d2c671fe5541b5c8f4a783472",
"certname": "foo.local",
"report-format": 4,
"transaction-uuid": "030c1717-f175-4644-b048-ac9ea328f221"
},
{
"end-time": "2012-10-26T22:39:32.000Z",
"puppet-version": "3.0.1",
"receive-time": "2012-10-26T22:39:35.305Z",
"configuration-version": "1351291174",
"start-time": "2012-10-26T22:39:31.000Z",
"hash": "cd4e5fd8846bac26d15d151664a40e0f2fa600b0",
"certname": "foo.local",
"report-format": 4,
"transaction-uuid": null
}
]
"""
return utils._make_api_request(api_url, '/reports', verify, cert, params={'query': query})
|
mit
| 27,615,388,178,460,460
| 35.333333
| 137
| 0.675331
| false
| 3.169628
| false
| false
| false
|
laurentb/weboob
|
modules/serebii/module.py
|
1
|
2111
|
# -*- coding: utf-8 -*-
# Copyright(C) 2019-2020 Célande Adrien
#
# This file is part of a weboob module.
#
# This weboob module is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This weboob module is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this weboob module. If not, see <http://www.gnu.org/licenses/>.
from __future__ import unicode_literals
from weboob.tools.backend import Module
from weboob.capabilities.rpg import CapRPG
from .browser import SerebiiBrowser
__all__ = ['SerebiiModule']
class SerebiiModule(Module, CapRPG):
NAME = 'serebii'
DESCRIPTION = 'This website collects any data about Pokémon games.'
MAINTAINER = 'Célande Adrien'
EMAIL = 'celande.adrien@gmail.com'
LICENSE = 'LGPLv3+'
VERSION = '2.1'
BROWSER = SerebiiBrowser
def iter_characters(self):
return self.browser.iter_characters()
def get_character(self, character_id):
return self.browser.get_character(character_id)
def iter_skills(self, skill_type=None):
return self.browser.iter_skills(skill_type)
def get_skill(self, skill_id):
return self.browser.get_skill(skill_id)
def iter_skill_set(self, character_id, skill_type=None):
return self.browser.iter_skill_set(character_id, skill_type)
def iter_character_classes(self):
return self.browser.iter_character_classes()
def get_character_class(self, class_id):
"""
List weakness and strength of a Pokémon Type
"""
return self.browser.get_character_class(class_id)
def iter_collectable_items(self):
return self.browser.iter_collectable_items()
|
lgpl-3.0
| -6,983,604,243,544,496,000
| 30.447761
| 77
| 0.710014
| false
| 3.583333
| false
| false
| false
|
jamiebull1/eppy
|
eppy/tests/test_IDF.py
|
1
|
2313
|
# Copyright (c) 2012 Santosh Philip
# =======================================================================
# Distributed under the MIT License.
# (See accompanying file LICENSE or copy at
# http://opensource.org/licenses/MIT)
# =======================================================================
"""py.test for class IDF"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from six import StringIO
from eppy.iddcurrent import iddcurrent
from eppy.modeleditor import IDF
def test_IDF():
"""py.test for class IDF"""
stored_idd = IDF.iddname
IDF.iddname = None
assert IDF.iddname == None
IDF.setiddname("gumby", testing=True)
assert IDF.iddname == "gumby"
IDF.setiddname("karamba", testing=True)
assert IDF.iddname != "karamba"
assert IDF.iddname == "gumby"
IDF.iddname = stored_idd
iddsnippet = iddcurrent.iddtxt
iddfhandle = StringIO(iddsnippet)
if IDF.getiddname() == None:
IDF.setiddname(iddfhandle)
class TestIDF(object):
"""py.test for IDF function"""
def test_removeidfobject(self):
"""py.test for IDF.removeidfobject """
idftxt = ""
idfhandle = StringIO(idftxt)
idf = IDF(idfhandle)
key = "BUILDING"
idf.newidfobject(key, Name="Building_remove")
idf.newidfobject(key, Name="Building1")
idf.newidfobject(key, Name="Building_remove")
idf.newidfobject(key, Name="Building2")
buildings = idf.idfobjects["building"]
removethis = buildings[-2]
idf.removeidfobject(removethis)
assert buildings[2].Name == "Building2"
assert idf.model.dt[key][2][1] == "Building2"
def test_popidfobject(self):
idftxt = ""
idfhandle = StringIO(idftxt)
idf = IDF(idfhandle)
key = "BUILDING"
idf.newidfobject(key, Name="Building_remove")
idf.newidfobject(key, Name="Building1")
idf.newidfobject(key, Name="Building_remove")
idf.newidfobject(key, Name="Building2")
buildings = idf.idfobjects["building"]
removethis = buildings[-2]
idf.popidfobject(key, 2)
assert buildings[2].Name == "Building2"
assert idf.model.dt[key][2][1] == "Building2"
|
mit
| -7,503,793,358,429,532,000
| 30.684932
| 73
| 0.609166
| false
| 3.262341
| true
| false
| false
|
oblalex/django-workflow
|
src/workflow/migrations/0004_auto__add_field_version_object_type.py
|
1
|
6324
|
# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'Version.object_type'
db.add_column('workflow_version', 'object_type',
self.gf('django.db.models.fields.CharField')(default=u'ADD', max_length=3),
keep_default=False)
def backwards(self, orm):
# Deleting field 'Version.object_type'
db.delete_column('workflow_version', 'object_type')
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'workflow.revision': {
'Meta': {'ordering': "[u'-date_created']", 'object_name': 'Revision'},
'comment': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'created_by_set'", 'null': 'True', 'to': "orm['auth.User']"}),
'date_created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'date_moderated': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'deleted': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'level': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'lft': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'moderated_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'moderated_by_set'", 'null': 'True', 'to': "orm['auth.User']"}),
'parent': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'children'", 'null': 'True', 'to': "orm['workflow.Revision']"}),
'rght': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'status': ('django.db.models.fields.CharField', [], {'default': "u'DR'", 'max_length': '2'}),
'tree_id': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'})
},
'workflow.version': {
'Meta': {'object_name': 'Version'},
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'format': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'object_id': ('django.db.models.fields.TextField', [], {}),
'object_id_int': ('django.db.models.fields.IntegerField', [], {'db_index': 'True', 'null': 'True', 'blank': 'True'}),
'object_repr': ('django.db.models.fields.TextField', [], {}),
'object_type': ('django.db.models.fields.CharField', [], {'default': "u'ADD'", 'max_length': '3'}),
'revision': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['workflow.Revision']"}),
'serialized_data': ('django.db.models.fields.TextField', [], {})
}
}
complete_apps = ['workflow']
|
mit
| -7,054,477,247,670,684
| 70.067416
| 182
| 0.555503
| false
| 3.717813
| false
| false
| false
|
masahir0y/barebox-yamada
|
scripts/remote/ratp.py
|
6
|
21432
|
#!/usr/bin/env python2
# -*- coding: utf-8 -*-
from __future__ import absolute_import, division, print_function
import crcmod
import logging
import struct
from enum import Enum
from time import sleep
try:
from time import monotonic
except:
from .missing import monotonic
csum_func = crcmod.predefined.mkCrcFun('xmodem')
class RatpState(Enum):
listen = "listen" # 1
syn_sent = "syn-sent" # 2
syn_received = "syn-received" # 3
established = "established" # 4
fin_wait = "fin-wait" # 5
last_ack = "last-ack" # 6
closing = "closing" # 7
time_wait = "time-wait" # 8
closed = "closed" # 9
class RatpInvalidHeader(ValueError):
pass
class RatpInvalidPayload(ValueError):
pass
class RatpError(ValueError):
pass
class RatpPacket(object):
def __init__(self, data=None, flags=''):
self.payload = None
self.synch = 0x01
self._control = 0
self.length = 0
self.csum = 0
self.c_syn = False
self.c_ack = False
self.c_fin = False
self.c_rst = False
self.c_sn = 0
self.c_an = 0
self.c_eor = False
self.c_so = False
if data:
(self.synch, self._control, self.length, self.csum) = \
struct.unpack('!BBBB', data)
if self.synch != 0x01:
raise RatpInvalidHeader("invalid synch octet (%x != %x)" %
(self.synch, 0x01))
csum = (self._control + self.length + self.csum) & 0xff
if csum != 0xff:
raise RatpInvalidHeader("invalid csum octet (%x != %x)" %
(csum, 0xff))
self._unpack_control()
elif flags:
if 'S' in flags:
self.c_syn = True
if 'A' in flags:
self.c_ack = True
if 'F' in flags:
self.c_fin = True
if 'R' in flags:
self.c_rst = True
if 'E' in flags:
self.c_eor = True
def __repr__(self):
s = "RatpPacket("
if self.c_syn:
s += "SYN,"
if self.c_ack:
s += "ACK,"
if self.c_fin:
s += "FIN,"
if self.c_rst:
s += "RST,"
s += "SN=%i,AN=%i," % (self.c_sn, self.c_an)
if self.c_eor:
s += "EOR,"
if self.c_so:
s += "SO,DATA=%i)" % self.length
else:
s += "DATA=%i)" % self.length
return s
def _pack_control(self):
self._control = 0 | \
self.c_syn << 7 | \
self.c_ack << 6 | \
self.c_fin << 5 | \
self.c_rst << 4 | \
self.c_sn << 3 | \
self.c_an << 2 | \
self.c_eor << 1 | \
self.c_so << 0
def _unpack_control(self):
self.c_syn = bool(self._control & 1 << 7)
self.c_ack = bool(self._control & 1 << 6)
self.c_fin = bool(self._control & 1 << 5)
self.c_rst = bool(self._control & 1 << 4)
self.c_sn = bool(self._control & 1 << 3)
self.c_an = bool(self._control & 1 << 2)
self.c_eor = bool(self._control & 1 << 1)
self.c_so = bool(self._control & 1 << 0)
def pack(self):
self._pack_control()
self.csum = 0
self.csum = (self._control + self.length + self.csum)
self.csum = (self.csum & 0xff) ^ 0xff
return struct.pack('!BBBB', self.synch, self._control, self.length,
self.csum)
def unpack_payload(self, payload):
(c_recv,) = struct.unpack('!H', payload[-2:])
c_calc = csum_func(payload[:-2])
if c_recv != c_calc:
raise RatpInvalidPayload("bad checksum (%04x != %04x)" %
(c_recv, c_calc))
self.payload = payload[:-2]
def pack_payload(self):
c_calc = csum_func(self.payload)
return self.payload+struct.pack('!H', c_calc)
class RatpConnection(object):
def __init__(self):
self._state = RatpState.closed
self._passive = True
self._input = b''
self._s_sn = 0
self._r_sn = 0
self._retrans = None
self._retrans_counter = None
self._retrans_deadline = None
self._r_mdl = None
self._s_mdl = 0xff
self._rx_buf = [] # reassembly buffer
self._rx_queue = []
self._tx_queue = []
self._rtt_alpha = 0.8
self._rtt_beta = 2.0
self._srtt = 0.2
self._rto_min, self._rto_max = 0.2, 1
self._tx_timestamp = None
self.total_retransmits = 0
self.total_crc_errors = 0
def _update_srtt(self, rtt):
self._srtt = (self._rtt_alpha * self._srtt) + \
((1.0 - self._rtt_alpha) * rtt)
logging.info("SRTT: %r", self._srtt)
def _get_rto(self):
return min(self._rto_max,
max(self._rto_min, self._rtt_beta * self._srtt))
def _write(self, pkt):
if pkt.payload or pkt.c_so or pkt.c_syn or pkt.c_rst or pkt.c_fin:
self._s_sn = pkt.c_sn
if not self._retrans:
self._retrans = pkt
self._retrans_counter = 0
else:
self.total_retransmits += 1
self._retrans_counter += 1
if self._retrans_counter > 10:
raise RatpError("Maximum retransmit count exceeded")
self._retrans_deadline = monotonic()+self._get_rto()
logging.info("Write: %r", pkt)
self._write_raw(pkt.pack())
if pkt.payload:
self._write_raw(pkt.pack_payload())
self._tx_timestamp = monotonic()
def _check_rto(self):
if self._retrans is None:
return
if self._retrans_deadline < monotonic():
logging.debug("Retransmit...")
self._write(self._retrans)
def _check_time_wait(self):
if not self._state == RatpState.time_wait:
return
remaining = self._time_wait_deadline - monotonic()
if remaining < 0:
self._state = RatpState.closed
else:
logging.debug("Time-Wait: %.2f remaining" % remaining)
sleep(min(remaining, 0.1))
def _read(self):
if len(self._input) < 4:
self._input += self._read_raw(4-len(self._input))
if len(self._input) < 4:
return
try:
pkt = RatpPacket(data=self._input[:4])
except RatpInvalidHeader as e:
logging.info("%r", e)
self._input = self._input[1:]
return
self._input = self._input[4:]
logging.info("Read: %r", pkt)
if pkt.c_syn or pkt.c_rst or pkt.c_so or pkt.c_fin:
return pkt
if pkt.length == 0:
return pkt
while len(self._input) < pkt.length+2:
self._input += self._read_raw()
try:
pkt.unpack_payload(self._input[:pkt.length+2])
except RatpInvalidPayload as e:
self.total_crc_errors += 1
return
finally:
self._input = self._input[pkt.length+2:]
return pkt
def _close(self):
pass
def _a(self, r):
logging.info("A")
if r.c_rst:
return True
if r.c_ack:
s = RatpPacket(flags='R')
s.c_sn = r.c_an
self._write(s)
return False
if r.c_syn:
self._r_mdl = r.length
s = RatpPacket(flags='SA')
s.c_sn = 0
s.c_an = (r.c_sn + 1) % 2
s.length = self._s_mdl
self._write(s)
self._state = RatpState.syn_received
return False
return False
def _b(self, r):
logging.info("B")
if r.c_ack and r.c_an != (self._s_sn + 1) % 2:
if r.c_rst:
return False
else:
s = RatpPacket(flags='R')
s.c_sn = r.c_an
self._write(s)
return False
if r.c_rst:
if r.c_ack:
self._retrans = None
# FIXME: delete the TCB
self._state = RatpState.closed
return False
else:
return False
if r.c_syn:
if r.c_ack:
self._r_mdl = r.length
self._retrans = None
self._r_sn = r.c_sn
s = RatpPacket(flags='A')
s.c_sn = r.c_an
s.c_an = (r.c_sn + 1) % 2
self._write(s)
self._state = RatpState.established
return False
else:
self._retrans = None
s = RatpPacket(flags='SA')
s.c_sn = 0
s.c_an = (r.c_sn + 1) % 2
s.length = self._s_mdl
self._write(s)
self._state = RatpState.syn_received
return False
return False
def _c1(self, r):
logging.info("C1")
if r.c_sn != self._r_sn:
return True
if r.c_rst or r.c_fin:
return False
s = RatpPacket(flags='A')
s.c_sn = r.c_an
s.c_an = (r.c_sn + 1) % 2
self._write(s)
return False
def _c2(self, r):
logging.info("C2")
if r.c_sn != self._r_sn:
return True
if r.c_rst or r.c_fin:
return False
if r.c_syn:
s = RatpPacket(flags='RA')
s.c_sn = r.c_an
s.c_an = (r.c_sn + 1) % 2
self._write(s)
self._retrans = None
# FIXME: inform the user "Error: Connection reset"
self._state = RatpState.closed
return False
logging.info("C2: duplicate packet")
s = RatpPacket(flags='A')
s.c_sn = r.c_an
s.c_an = (r.c_sn + 1) % 2
self._write(s)
return False
def _d1(self, r):
logging.info("D1")
if not r.c_rst:
return True
if self._passive:
self._retrans = None
self._state = RatpState.listen
return False
else:
self._retrans = None
self._state = RatpState.closed
raise RatpError("Connection refused")
def _d2(self, r):
logging.info("D2")
if not r.c_rst:
return True
self._retrans = None
self._state = RatpState.closed
raise RatpError("Connection reset")
def _d3(self, r):
logging.info("C3")
if not r.c_rst:
return True
self._state = RatpState.closed
return False
def _e(self, r):
logging.info("E")
if not r.c_syn:
return True
self._retrans = None
s = RatpPacket(flags='R')
if r.c_ack:
s.c_sn = r.c_an
else:
s.c_sn = 0
self._write(s)
self._state = RatpState.closed
raise RatpError("Connection reset")
def _f1(self, r):
logging.info("F1")
if not r.c_ack:
return False
if r.c_an == (self._s_sn + 1) % 2:
return True
if self._passive:
self._retrans = None
s = RatpPacket(flags='R')
s.c_sn = r.c_an
self._write(s)
self._state = RatpState.listen
return False
else:
self._retrans = None
s = RatpPacket(flags='R')
s.c_sn = r.c_an
self._write(s)
self._state = RatpState.closed
raise RatpError("Connection refused")
def _f2(self, r):
logging.info("F2")
if not r.c_ack:
return False
if r.c_an == (self._s_sn + 1) % 2:
if self._retrans:
self._retrans = None
self._update_srtt(monotonic()-self._tx_timestamp)
# FIXME: inform the user with an "Ok" if a buffer has been
# entirely acknowledged. Another packet containing data may
# now be sent.
return True
return True
def _f3(self, r):
logging.info("F3")
if not r.c_ack:
return False
if r.c_an == (self._s_sn + 1) % 2:
return True
return True
def _g(self, r):
logging.info("G")
if not r.c_rst:
return False
self._retrans = None
if r.c_ack:
s = RatpPacket(flags='R')
s.c_sn = r.c_an
self._write(s)
else:
s = RatpPacket(flags='RA')
s.c_sn = r.c_an
s.c_an = (r.c_sn + 1) % 2
self._write(s)
return False
def _h1(self, r):
logging.info("H1")
self._state = RatpState.established
return self._common_i1(r)
def _h2(self, r):
logging.info("H2")
if not r.c_fin:
return True
if self._retrans is not None:
# FIXME: inform the user "Warning: Data left unsent.", "Connection closing."
self._retrans = None
s = RatpPacket(flags='FA')
s.c_sn = r.c_an
s.c_an = (r.c_sn + 1) % 2
self._write(s)
self._state = RatpState.last_ack
raise RatpError("Connection closed by remote")
def _h3(self, r):
logging.info("H3")
if not r.c_fin:
# Our fin was lost, rely on retransmission
return False
if (r.length and not r.c_syn and not r.c_rst and not r.c_fin) or r.c_so:
self._retrans = None
s = RatpPacket(flags='RA')
s.c_sn = r.c_an
s.c_an = (r.c_sn + 1) % 2
self._write(s)
self._state = RatpState.closed
raise RatpError("Connection reset")
if r.c_an == (self._s_sn + 1) % 2:
self._retrans = None
s = RatpPacket(flags='A')
s.c_sn = r.c_an
s.c_an = (r.c_sn + 1) % 2
self._write(s)
self._time_wait_deadline = monotonic() + self._get_rto()
self._state = RatpState.time_wait
return False
else:
self._retrans = None
s = RatpPacket(flags='A')
s.c_sn = r.c_an
s.c_an = (r.c_sn + 1) % 2
self._write(s)
self._state = RatpState.closing
return False
def _h4(self, r):
logging.info("H4")
if r.c_an == (self._s_sn + 1) % 2:
self._retrans = None
self._time_wait_deadline = monotonic() + self._get_rto()
self._state = RatpState.time_wait
return False
return False
def _h5(self, r):
logging.info("H5")
if r.c_an == (self._s_sn + 1) % 2:
self._time_wait_deadline = monotonic() + self._get_rto()
self._state = RatpState.time_wait
return False
return False
def _h6(self, r):
logging.info("H6")
if not r.c_ack:
return False
if not r.c_fin:
return False
self._retrans = None
s = RatpPacket(flags='A')
s.c_sn = r.c_an
s.c_an = (r.c_sn + 1) % 2
self._write(s)
self._time_wait_deadline = monotonic() + self._get_rto()
return False
def _common_i1(self, r):
if r.c_so:
self._r_sn = r.c_sn
self._rx_buf.append(chr(r.length))
elif r.length and not r.c_syn and not r.c_rst and not r.c_fin:
self._r_sn = r.c_sn
self._rx_buf.append(r.payload)
else:
return False
# reassemble
if r.c_eor:
logging.info("Reassembling %i frames", len(self._rx_buf))
self._rx_queue.append(''.join(self._rx_buf))
self._rx_buf = []
s = RatpPacket(flags='A')
s.c_sn = r.c_an
s.c_an = (r.c_sn + 1) % 2
self._write(s)
return False
def _i1(self, r):
logging.info("I1")
return self._common_i1(r)
def _machine(self, pkt):
logging.info("State: %r", self._state)
if self._state == RatpState.listen:
self._a(pkt)
elif self._state == RatpState.syn_sent:
self._b(pkt)
elif self._state == RatpState.syn_received:
self._c1(pkt) and \
self._d1(pkt) and \
self._e(pkt) and \
self._f1(pkt) and \
self._h1(pkt)
elif self._state == RatpState.established:
self._c2(pkt) and \
self._d2(pkt) and \
self._e(pkt) and \
self._f2(pkt) and \
self._h2(pkt) and \
self._i1(pkt)
elif self._state == RatpState.fin_wait:
self._c2(pkt) and \
self._d2(pkt) and \
self._e(pkt) and \
self._f3(pkt) and \
self._h3(pkt)
elif self._state == RatpState.last_ack:
self._c2(pkt) and \
self._d3(pkt) and \
self._e(pkt) and \
self._f3(pkt) and \
self._h4(pkt)
elif self._state == RatpState.closing:
self._c2(pkt) and \
self._d3(pkt) and \
self._e(pkt) and \
self._f3(pkt) and \
self._h5(pkt)
elif self._state == RatpState.time_wait:
self._d3(pkt) and \
self._e(pkt) and \
self._f3(pkt) and \
self._h6(pkt)
elif self._state == RatpState.closed:
self._g(pkt)
def wait(self, deadline):
while deadline is None or deadline > monotonic():
pkt = self._read()
if pkt:
self._machine(pkt)
else:
self._check_rto()
self._check_time_wait()
if not self._retrans or self._rx_queue:
return
def wait1(self, deadline):
while deadline is None or deadline > monotonic():
pkt = self._read()
if pkt:
self._machine(pkt)
else:
self._check_rto()
self._check_time_wait()
if not self._retrans:
return
def listen(self):
logging.info("LISTEN")
self._state = RatpState.listen
def connect(self, timeout=5.0):
deadline = monotonic() + timeout
logging.info("CONNECT")
self._retrans = None
syn = RatpPacket(flags='S')
syn.length = self._s_mdl
self._write(syn)
self._state = RatpState.syn_sent
self.wait(deadline)
def send_one(self, data, eor=True, timeout=1.0):
deadline = monotonic() + timeout
logging.info("SEND_ONE (len=%i, eor=%r)", len(data), eor)
assert self._state == RatpState.established
assert self._retrans is None
snd = RatpPacket(flags='A')
snd.c_eor = eor
snd.c_sn = (self._s_sn + 1) % 2
snd.c_an = (self._r_sn + 1) % 2
snd.length = len(data)
snd.payload = data
self._write(snd)
self.wait1(deadline=None)
def send(self, data, timeout=1.0):
logging.info("SEND (len=%i)", len(data))
while len(data) > 255:
self.send_one(data[:255], eor=False, timeout=timeout)
data = data[255:]
self.send_one(data, eor=True, timeout=timeout)
def recv(self, timeout=1.0):
deadline = monotonic() + timeout
assert self._state == RatpState.established
if self._rx_queue:
return self._rx_queue.pop(0)
self.wait(deadline)
if self._rx_queue:
return self._rx_queue.pop(0)
def close(self, timeout=1.0):
deadline = monotonic() + timeout
logging.info("CLOSE")
if self._state == RatpState.established or self._state == RatpState.syn_received:
fin = RatpPacket(flags='FA')
fin.c_sn = (self._s_sn + 1) % 2
fin.c_an = (self._r_sn + 1) % 2
self._write(fin)
self._state = RatpState.fin_wait
while deadline > monotonic() and not self._state == RatpState.time_wait:
self.wait(deadline)
while self._state == RatpState.time_wait:
self.wait(None)
if self._state == RatpState.closed:
logging.info("CLOSE: success")
else:
logging.info("CLOSE: failure")
def abort(self):
logging.info("ABORT")
def status(self):
logging.info("STATUS")
return self._state
class SerialRatpConnection(RatpConnection):
def __init__(self, port):
super(SerialRatpConnection, self).__init__()
self.__port = port
self.__port.timeout = 0.01
self.__port.writeTimeout = None
self.__port.flushInput()
def _write_raw(self, data):
if data:
logging.debug("-> %r", bytearray(data))
return self.__port.write(data)
def _read_raw(self, size=1):
data = self.__port.read(size)
if data:
logging.debug("<- %r", bytearray(data))
return data
|
gpl-2.0
| -2,351,006,769,777,201,700
| 27.015686
| 89
| 0.474524
| false
| 3.462918
| false
| false
| false
|
mstritt/orbit-image-analysis
|
src/main/python/deeplearn/export2tensorboard.py
|
1
|
1049
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import argparse
import sys
from tensorflow.core.framework import graph_pb2
from tensorflow.python.client import session
from tensorflow.python.framework import importer
from tensorflow.python.framework import ops
from tensorflow.python.platform import app
from tensorflow.python.platform import gfile
from tensorflow.python.summary import summary
model_dir = 'D:/data/glomeruli/20180202_glomeruli_detection_noquant.pb'
log_dir = 'd:/temp/tf'
with session.Session(graph=ops.Graph()) as sess:
with gfile.FastGFile(model_dir, "rb") as f:
graph_def = graph_pb2.GraphDef()
graph_def.ParseFromString(f.read())
importer.import_graph_def(graph_def)
# pb_visual_writer = summary.FileWriter(log_dir)
# pb_visual_writer.add_graph(sess.graph)
file_writer = summary.FileWriter(log_dir, sess.graph)
print("Model Imported. Visualize by running: tensorboard --logdir={}".format(log_dir))
|
gpl-3.0
| 359,202,195,924,344,060
| 32.83871
| 94
| 0.741659
| false
| 3.693662
| false
| false
| false
|
whitehorse-io/encarnia
|
pyenv/lib/python2.7/site-packages/hyperlink/_url.py
|
3
|
49264
|
# -*- coding: utf-8 -*-
u"""Hyperlink provides Pythonic URL parsing, construction, and rendering.
Usage is straightforward::
>>> from hyperlink import URL
>>> url = URL.from_text(u'http://github.com/mahmoud/hyperlink?utm_source=docs')
>>> url.host
u'github.com'
>>> secure_url = url.replace(scheme=u'https')
>>> secure_url.get('utm_source')[0]
u'docs'
As seen here, the API revolves around the lightweight and immutable
:class:`URL` type, documented below.
"""
import re
import string
import socket
from unicodedata import normalize
try:
from socket import inet_pton
except ImportError:
# based on https://gist.github.com/nnemkin/4966028
# this code only applies on Windows Python 2.7
import ctypes
class _sockaddr(ctypes.Structure):
_fields_ = [("sa_family", ctypes.c_short),
("__pad1", ctypes.c_ushort),
("ipv4_addr", ctypes.c_byte * 4),
("ipv6_addr", ctypes.c_byte * 16),
("__pad2", ctypes.c_ulong)]
WSAStringToAddressA = ctypes.windll.ws2_32.WSAStringToAddressA
WSAAddressToStringA = ctypes.windll.ws2_32.WSAAddressToStringA
def inet_pton(address_family, ip_string):
addr = _sockaddr()
ip_string = ip_string.encode('ascii')
addr.sa_family = address_family
addr_size = ctypes.c_int(ctypes.sizeof(addr))
if WSAStringToAddressA(ip_string, address_family, None, ctypes.byref(addr), ctypes.byref(addr_size)) != 0:
raise socket.error(ctypes.FormatError())
if address_family == socket.AF_INET:
return ctypes.string_at(addr.ipv4_addr, 4)
if address_family == socket.AF_INET6:
return ctypes.string_at(addr.ipv6_addr, 16)
raise socket.error('unknown address family')
unicode = type(u'')
try:
unichr
except NameError:
unichr = chr # py3
NoneType = type(None)
# from boltons.typeutils
def make_sentinel(name='_MISSING', var_name=None):
"""Creates and returns a new **instance** of a new class, suitable for
usage as a "sentinel", a kind of singleton often used to indicate
a value is missing when ``None`` is a valid input.
Args:
name (str): Name of the Sentinel
var_name (str): Set this name to the name of the variable in
its respective module enable pickleability.
>>> make_sentinel(var_name='_MISSING')
_MISSING
The most common use cases here in boltons are as default values
for optional function arguments, partly because of its
less-confusing appearance in automatically generated
documentation. Sentinels also function well as placeholders in queues
and linked lists.
.. note::
By design, additional calls to ``make_sentinel`` with the same
values will not produce equivalent objects.
>>> make_sentinel('TEST') == make_sentinel('TEST')
False
>>> type(make_sentinel('TEST')) == type(make_sentinel('TEST'))
False
"""
class Sentinel(object):
def __init__(self):
self.name = name
self.var_name = var_name
def __repr__(self):
if self.var_name:
return self.var_name
return '%s(%r)' % (self.__class__.__name__, self.name)
if var_name:
def __reduce__(self):
return self.var_name
def __nonzero__(self):
return False
__bool__ = __nonzero__
return Sentinel()
_unspecified = _UNSET = make_sentinel('_UNSET')
# RFC 3986 Section 2.3, Unreserved URI Characters
# https://tools.ietf.org/html/rfc3986#section-2.3
_UNRESERVED_CHARS = frozenset('~-._0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZ'
'abcdefghijklmnopqrstuvwxyz')
# URL parsing regex (based on RFC 3986 Appendix B, with modifications)
_URL_RE = re.compile(r'^((?P<scheme>[^:/?#]+):)?'
r'((?P<_netloc_sep>//)'
r'(?P<authority>[^/?#]*))?'
r'(?P<path>[^?#]*)'
r'(\?(?P<query>[^#]*))?'
r'(#(?P<fragment>.*))?$')
_SCHEME_RE = re.compile(r'^[a-zA-Z0-9+-.]*$')
_AUTHORITY_RE = re.compile(r'^(?:(?P<userinfo>[^@/?#]*)@)?'
r'(?P<host>'
r'(?:\[(?P<ipv6_host>[^[\]/?#]*)\])'
r'|(?P<plain_host>[^:/?#[\]]*)'
r'|(?P<bad_host>.*?))?'
r'(?::(?P<port>.*))?$')
_HEX_CHAR_MAP = dict([((a + b).encode('ascii'),
unichr(int(a + b, 16)).encode('charmap'))
for a in string.hexdigits for b in string.hexdigits])
_ASCII_RE = re.compile('([\x00-\x7f]+)')
# RFC 3986 section 2.2, Reserved Characters
# https://tools.ietf.org/html/rfc3986#section-2.2
_GEN_DELIMS = frozenset(u':/?#[]@')
_SUB_DELIMS = frozenset(u"!$&'()*+,;=")
_ALL_DELIMS = _GEN_DELIMS | _SUB_DELIMS
_USERINFO_SAFE = _UNRESERVED_CHARS | _SUB_DELIMS
_USERINFO_DELIMS = _ALL_DELIMS - _USERINFO_SAFE
_PATH_SAFE = _UNRESERVED_CHARS | _SUB_DELIMS | set(u':@%')
_PATH_DELIMS = _ALL_DELIMS - _PATH_SAFE
_SCHEMELESS_PATH_SAFE = _PATH_SAFE - set(':')
_SCHEMELESS_PATH_DELIMS = _ALL_DELIMS - _SCHEMELESS_PATH_SAFE
_FRAGMENT_SAFE = _UNRESERVED_CHARS | _PATH_SAFE | set(u'/?')
_FRAGMENT_DELIMS = _ALL_DELIMS - _FRAGMENT_SAFE
_QUERY_SAFE = _UNRESERVED_CHARS | _FRAGMENT_SAFE - set(u'&=+')
_QUERY_DELIMS = _ALL_DELIMS - _QUERY_SAFE
def _make_decode_map(delims, allow_percent=False):
ret = dict(_HEX_CHAR_MAP)
if not allow_percent:
delims = set(delims) | set([u'%'])
for delim in delims:
_hexord = '{0:02X}'.format(ord(delim)).encode('ascii')
_hexord_lower = _hexord.lower()
ret.pop(_hexord)
if _hexord != _hexord_lower:
ret.pop(_hexord_lower)
return ret
def _make_quote_map(safe_chars):
ret = {}
# v is included in the dict for py3 mostly, because bytestrings
# are iterables of ints, of course!
for i, v in zip(range(256), range(256)):
c = chr(v)
if c in safe_chars:
ret[c] = ret[v] = c
else:
ret[c] = ret[v] = '%{0:02X}'.format(i)
return ret
_USERINFO_PART_QUOTE_MAP = _make_quote_map(_USERINFO_SAFE)
_USERINFO_DECODE_MAP = _make_decode_map(_USERINFO_DELIMS)
_PATH_PART_QUOTE_MAP = _make_quote_map(_PATH_SAFE)
_SCHEMELESS_PATH_PART_QUOTE_MAP = _make_quote_map(_SCHEMELESS_PATH_SAFE)
_PATH_DECODE_MAP = _make_decode_map(_PATH_DELIMS)
_QUERY_PART_QUOTE_MAP = _make_quote_map(_QUERY_SAFE)
_QUERY_DECODE_MAP = _make_decode_map(_QUERY_DELIMS)
_FRAGMENT_QUOTE_MAP = _make_quote_map(_FRAGMENT_SAFE)
_FRAGMENT_DECODE_MAP = _make_decode_map(_FRAGMENT_DELIMS)
_ROOT_PATHS = frozenset(((), (u'',)))
def _encode_path_part(text, maximal=True):
"Percent-encode a single segment of a URL path."
if maximal:
bytestr = normalize('NFC', text).encode('utf8')
return u''.join([_PATH_PART_QUOTE_MAP[b] for b in bytestr])
return u''.join([_PATH_PART_QUOTE_MAP[t] if t in _PATH_DELIMS else t
for t in text])
def _encode_schemeless_path_part(text, maximal=True):
"""Percent-encode the first segment of a URL path for a URL without a
scheme specified.
"""
if maximal:
bytestr = normalize('NFC', text).encode('utf8')
return u''.join([_SCHEMELESS_PATH_PART_QUOTE_MAP[b] for b in bytestr])
return u''.join([_SCHEMELESS_PATH_PART_QUOTE_MAP[t]
if t in _SCHEMELESS_PATH_DELIMS else t for t in text])
def _encode_path_parts(text_parts, rooted=False, has_scheme=True,
has_authority=True, joined=True, maximal=True):
"""
Percent-encode a tuple of path parts into a complete path.
Setting *maximal* to False percent-encodes only the reserved
characters that are syntactically necessary for serialization,
preserving any IRI-style textual data.
Leaving *maximal* set to its default True percent-encodes
everything required to convert a portion of an IRI to a portion of
a URI.
RFC 3986 3.3:
If a URI contains an authority component, then the path component
must either be empty or begin with a slash ("/") character. If a URI
does not contain an authority component, then the path cannot begin
with two slash characters ("//"). In addition, a URI reference
(Section 4.1) may be a relative-path reference, in which case the
first path segment cannot contain a colon (":") character.
"""
if not text_parts:
return u'' if joined else text_parts
if rooted:
text_parts = (u'',) + text_parts
# elif has_authority and text_parts:
# raise Exception('see rfc above') # TODO: too late to fail like this?
encoded_parts = []
if has_scheme:
encoded_parts = [_encode_path_part(part, maximal=maximal)
if part else part for part in text_parts]
else:
encoded_parts = [_encode_schemeless_path_part(text_parts[0])]
encoded_parts.extend([_encode_path_part(part, maximal=maximal)
if part else part for part in text_parts[1:]])
if joined:
return u'/'.join(encoded_parts)
return tuple(encoded_parts)
def _encode_query_part(text, maximal=True):
"""
Percent-encode a single query string key or value.
"""
if maximal:
bytestr = normalize('NFC', text).encode('utf8')
return u''.join([_QUERY_PART_QUOTE_MAP[b] for b in bytestr])
return u''.join([_QUERY_PART_QUOTE_MAP[t] if t in _QUERY_DELIMS else t
for t in text])
def _encode_fragment_part(text, maximal=True):
"""Quote the fragment part of the URL. Fragments don't have
subdelimiters, so the whole URL fragment can be passed.
"""
if maximal:
bytestr = normalize('NFC', text).encode('utf8')
return u''.join([_FRAGMENT_QUOTE_MAP[b] for b in bytestr])
return u''.join([_FRAGMENT_QUOTE_MAP[t] if t in _FRAGMENT_DELIMS else t
for t in text])
def _encode_userinfo_part(text, maximal=True):
"""Quote special characters in either the username or password
section of the URL.
"""
if maximal:
bytestr = normalize('NFC', text).encode('utf8')
return u''.join([_USERINFO_PART_QUOTE_MAP[b] for b in bytestr])
return u''.join([_USERINFO_PART_QUOTE_MAP[t] if t in _USERINFO_DELIMS
else t for t in text])
# This port list painstakingly curated by hand searching through
# https://www.iana.org/assignments/uri-schemes/uri-schemes.xhtml
# and
# https://www.iana.org/assignments/service-names-port-numbers/service-names-port-numbers.xhtml
SCHEME_PORT_MAP = {'acap': 674, 'afp': 548, 'dict': 2628, 'dns': 53,
'file': None, 'ftp': 21, 'git': 9418, 'gopher': 70,
'http': 80, 'https': 443, 'imap': 143, 'ipp': 631,
'ipps': 631, 'irc': 194, 'ircs': 6697, 'ldap': 389,
'ldaps': 636, 'mms': 1755, 'msrp': 2855, 'msrps': None,
'mtqp': 1038, 'nfs': 111, 'nntp': 119, 'nntps': 563,
'pop': 110, 'prospero': 1525, 'redis': 6379, 'rsync': 873,
'rtsp': 554, 'rtsps': 322, 'rtspu': 5005, 'sftp': 22,
'smb': 445, 'snmp': 161, 'ssh': 22, 'steam': None,
'svn': 3690, 'telnet': 23, 'ventrilo': 3784, 'vnc': 5900,
'wais': 210, 'ws': 80, 'wss': 443, 'xmpp': None}
# This list of schemes that don't use authorities is also from the link above.
NO_NETLOC_SCHEMES = set(['urn', 'about', 'bitcoin', 'blob', 'data', 'geo',
'magnet', 'mailto', 'news', 'pkcs11',
'sip', 'sips', 'tel'])
# As of Mar 11, 2017, there were 44 netloc schemes, and 13 non-netloc
def register_scheme(text, uses_netloc=True, default_port=None):
"""Registers new scheme information, resulting in correct port and
slash behavior from the URL object. There are dozens of standard
schemes preregistered, so this function is mostly meant for
proprietary internal customizations or stopgaps on missing
standards information. If a scheme seems to be missing, please
`file an issue`_!
Args:
text (unicode): Text representing the scheme.
(the 'http' in 'http://hatnote.com')
uses_netloc (bool): Does the scheme support specifying a
network host? For instance, "http" does, "mailto" does
not. Defaults to True.
default_port (int): The default port, if any, for netloc-using
schemes.
.. _file an issue: https://github.com/mahmoud/hyperlink/issues
"""
text = text.lower()
if default_port is not None:
try:
default_port = int(default_port)
except (ValueError, TypeError):
raise ValueError('default_port expected integer or None, not %r'
% (default_port,))
if uses_netloc is True:
SCHEME_PORT_MAP[text] = default_port
elif uses_netloc is False:
if default_port is not None:
raise ValueError('unexpected default port while specifying'
' non-netloc scheme: %r' % default_port)
NO_NETLOC_SCHEMES.add(text)
else:
raise ValueError('uses_netloc expected bool, not: %r' % uses_netloc)
return
def scheme_uses_netloc(scheme, default=None):
"""Whether or not a URL uses :code:`:` or :code:`://` to separate the
scheme from the rest of the URL depends on the scheme's own
standard definition. There is no way to infer this behavior
from other parts of the URL. A scheme either supports network
locations or it does not.
The URL type's approach to this is to check for explicitly
registered schemes, with common schemes like HTTP
preregistered. This is the same approach taken by
:mod:`urlparse`.
URL adds two additional heuristics if the scheme as a whole is
not registered. First, it attempts to check the subpart of the
scheme after the last ``+`` character. This adds intuitive
behavior for schemes like ``git+ssh``. Second, if a URL with
an unrecognized scheme is loaded, it will maintain the
separator it sees.
"""
if not scheme:
return False
scheme = scheme.lower()
if scheme in SCHEME_PORT_MAP:
return True
if scheme in NO_NETLOC_SCHEMES:
return False
if scheme.split('+')[-1] in SCHEME_PORT_MAP:
return True
return default
class URLParseError(ValueError):
"""Exception inheriting from :exc:`ValueError`, raised when failing to
parse a URL. Mostly raised on invalid ports and IPv6 addresses.
"""
pass
def _optional(argument, default):
if argument is _UNSET:
return default
else:
return argument
def _typecheck(name, value, *types):
"""
Check that the given *value* is one of the given *types*, or raise an
exception describing the problem using *name*.
"""
if not types:
raise ValueError('expected one or more types, maybe use _textcheck?')
if not isinstance(value, types):
raise TypeError("expected %s for %s, got %r"
% (" or ".join([t.__name__ for t in types]),
name, value))
return value
def _textcheck(name, value, delims=frozenset(), nullable=False):
if not isinstance(value, unicode):
if nullable and value is None:
return value # used by query string values
else:
str_name = "unicode" if bytes is str else "str"
exp = str_name + ' or NoneType' if nullable else str_name
raise TypeError('expected %s for %s, got %r' % (exp, name, value))
if delims and set(value) & set(delims): # TODO: test caching into regexes
raise ValueError('one or more reserved delimiters %s present in %s: %r'
% (''.join(delims), name, value))
return value
def _decode_userinfo_part(text):
return _percent_decode(text, _decode_map=_USERINFO_DECODE_MAP)
def _decode_path_part(text):
return _percent_decode(text, _decode_map=_PATH_DECODE_MAP)
def _decode_query_part(text):
return _percent_decode(text, _decode_map=_QUERY_DECODE_MAP)
def _decode_fragment_part(text):
return _percent_decode(text, _decode_map=_FRAGMENT_DECODE_MAP)
def _percent_decode(text, _decode_map=_HEX_CHAR_MAP):
"""Convert percent-encoded text characters to their normal,
human-readable equivalents.
All characters in the input text must be valid ASCII. All special
characters underlying the values in the percent-encoding must be
valid UTF-8.
Only called by field-tailored variants, e.g.,
:func:`_decode_path_part`, as every percent-encodable part of the
URL has characters which should not be percent decoded.
>>> _percent_decode(u'abc%20def')
u'abc def'
Args:
text (unicode): The ASCII text with percent-encoding present.
Returns:
unicode: The percent-decoded version of *text*, with UTF-8
decoding applied.
"""
try:
quoted_bytes = text.encode("ascii")
except UnicodeEncodeError:
return text
bits = quoted_bytes.split(b'%')
if len(bits) == 1:
return text
res = [bits[0]]
append = res.append
for item in bits[1:]:
try:
append(_decode_map[item[:2]])
append(item[2:])
except KeyError:
append(b'%')
append(item)
unquoted_bytes = b''.join(res)
try:
return unquoted_bytes.decode("utf-8")
except UnicodeDecodeError:
return text
def _resolve_dot_segments(path):
"""Normalize the URL path by resolving segments of '.' and '..'. For
more details, see `RFC 3986 section 5.2.4, Remove Dot Segments`_.
Args:
path (list): path segments in string form
Returns:
list: a new list of path segments with the '.' and '..' elements
removed and resolved.
.. _RFC 3986 section 5.2.4, Remove Dot Segments: https://tools.ietf.org/html/rfc3986#section-5.2.4
"""
segs = []
for seg in path:
if seg == u'.':
pass
elif seg == u'..':
if segs:
segs.pop()
else:
segs.append(seg)
if list(path[-1:]) in ([u'.'], [u'..']):
segs.append(u'')
return segs
def parse_host(host):
"""Parse the host into a tuple of ``(family, host)``, where family
is the appropriate :mod:`socket` module constant when the host is
an IP address. Family is ``None`` when the host is not an IP.
Will raise :class:`URLParseError` on invalid IPv6 constants.
Returns:
tuple: family (socket constant or None), host (string)
>>> parse_host('googlewebsite.com') == (None, 'googlewebsite.com')
True
>>> parse_host('::1') == (socket.AF_INET6, '::1')
True
>>> parse_host('192.168.1.1') == (socket.AF_INET, '192.168.1.1')
True
"""
if not host:
return None, u''
if u':' in host:
try:
inet_pton(socket.AF_INET6, host)
except socket.error as se:
raise URLParseError('invalid IPv6 host: %r (%r)' % (host, se))
except UnicodeEncodeError:
pass # TODO: this can't be a real host right?
else:
family = socket.AF_INET6
return family, host
try:
inet_pton(socket.AF_INET, host)
except (socket.error, UnicodeEncodeError):
family = None # not an IP
else:
family = socket.AF_INET
return family, host
class URL(object):
"""From blogs to billboards, URLs are so common, that it's easy to
overlook their complexity and power. With hyperlink's
:class:`URL` type, working with URLs doesn't have to be hard.
URLs are made of many parts. Most of these parts are officially
named in `RFC 3986`_ and this diagram may prove handy in identifying
them::
foo://user:pass@example.com:8042/over/there?name=ferret#nose
\_/ \_______/ \_________/ \__/\_________/ \_________/ \__/
| | | | | | |
scheme userinfo host port path query fragment
While :meth:`~URL.from_text` is used for parsing whole URLs, the
:class:`URL` constructor builds a URL from the individual
components, like so::
>>> from hyperlink import URL
>>> url = URL(scheme=u'https', host=u'example.com', path=[u'hello', u'world'])
>>> print(url.to_text())
https://example.com/hello/world
The constructor runs basic type checks. All strings are expected
to be decoded (:class:`unicode` in Python 2). All arguments are
optional, defaulting to appropriately empty values. A full list of
constructor arguments is below.
Args:
scheme (unicode): The text name of the scheme.
host (unicode): The host portion of the network location
port (int): The port part of the network location. If
``None`` or no port is passed, the port will default to
the default port of the scheme, if it is known. See the
``SCHEME_PORT_MAP`` and :func:`register_default_port`
for more info.
path (tuple): A tuple of strings representing the
slash-separated parts of the path.
query (tuple): The query parameters, as a tuple of
key-value pairs.
fragment (unicode): The fragment part of the URL.
rooted (bool): Whether or not the path begins with a slash.
userinfo (unicode): The username or colon-separated
username:password pair.
uses_netloc (bool): Indicates whether two slashes appear
between the scheme and the host (``http://eg.com`` vs
``mailto:e@g.com``). Set automatically based on scheme.
All of these parts are also exposed as read-only attributes of
URL instances, along with several useful methods.
.. _RFC 3986: https://tools.ietf.org/html/rfc3986
.. _RFC 3987: https://tools.ietf.org/html/rfc3987
"""
def __init__(self, scheme=None, host=None, path=(), query=(), fragment=u'',
port=None, rooted=None, userinfo=u'', uses_netloc=None):
if host is not None and scheme is None:
scheme = u'http' # TODO: why
if port is None:
port = SCHEME_PORT_MAP.get(scheme)
if host and query and not path:
# per RFC 3986 6.2.3, "a URI that uses the generic syntax
# for authority with an empty path should be normalized to
# a path of '/'."
path = (u'',)
# Now that we're done detecting whether they were passed, we can set
# them to their defaults:
if scheme is None:
scheme = u''
if host is None:
host = u''
if rooted is None:
rooted = bool(host)
# Set attributes.
self._scheme = _textcheck("scheme", scheme)
if self._scheme:
if not _SCHEME_RE.match(self._scheme):
raise ValueError('invalid scheme: %r. Only alphanumeric, "+",'
' "-", and "." allowed. Did you meant to call'
' %s.from_text()?'
% (self._scheme, self.__class__.__name__))
_, self._host = parse_host(_textcheck('host', host, '/?#@'))
if isinstance(path, unicode):
raise TypeError("expected iterable of text for path, not: %r"
% (path,))
self._path = tuple((_textcheck("path segment", segment, '/?#')
for segment in path))
self._query = tuple(
(_textcheck("query parameter name", k, '&=#'),
_textcheck("query parameter value", v, '&#', nullable=True))
for (k, v) in query
)
self._fragment = _textcheck("fragment", fragment)
self._port = _typecheck("port", port, int, NoneType)
self._rooted = _typecheck("rooted", rooted, bool)
self._userinfo = _textcheck("userinfo", userinfo, '/?#@')
uses_netloc = scheme_uses_netloc(self._scheme, uses_netloc)
self._uses_netloc = _typecheck("uses_netloc",
uses_netloc, bool, NoneType)
return
@property
def scheme(self):
"""The scheme is a string, and the first part of an absolute URL, the
part before the first colon, and the part which defines the
semantics of the rest of the URL. Examples include "http",
"https", "ssh", "file", "mailto", and many others. See
:func:`~hyperlink.register_scheme()` for more info.
"""
return self._scheme
@property
def host(self):
"""The host is a string, and the second standard part of an absolute
URL. When present, a valid host must be a domain name, or an
IP (v4 or v6). It occurs before the first slash, or the second
colon, if a :attr:`~hyperlink.URL.port` is provided.
"""
return self._host
@property
def port(self):
"""The port is an integer that is commonly used in connecting to the
:attr:`host`, and almost never appears without it.
When not present in the original URL, this attribute defaults
to the scheme's default port. If the scheme's default port is
not known, and the port is not provided, this attribute will
be set to None.
>>> URL.from_text(u'http://example.com/pa/th').port
80
>>> URL.from_text(u'foo://example.com/pa/th').port
>>> URL.from_text(u'foo://example.com:8042/pa/th').port
8042
.. note::
Per the standard, when the port is the same as the schemes
default port, it will be omitted in the text URL.
"""
return self._port
@property
def path(self):
"""A tuple of strings, created by splitting the slash-separated
hierarchical path. Started by the first slash after the host,
terminated by a "?", which indicates the start of the
:attr:`~hyperlink.URL.query` string.
"""
return self._path
@property
def query(self):
"""Tuple of pairs, created by splitting the ampersand-separated
mapping of keys and optional values representing
non-hierarchical data used to identify the resource. Keys are
always strings. Values are strings when present, or None when
missing.
For more operations on the mapping, see
:meth:`~hyperlink.URL.get()`, :meth:`~hyperlink.URL.add()`,
:meth:`~hyperlink.URL.set()`, and
:meth:`~hyperlink.URL.delete()`.
"""
return self._query
@property
def fragment(self):
"""A string, the last part of the URL, indicated by the first "#"
after the :attr:`~hyperlink.URL.path` or
:attr:`~hyperlink.URL.query`. Enables indirect identification
of a secondary resource, like an anchor within an HTML page.
"""
return self._fragment
@property
def rooted(self):
"""Whether or not the path starts with a forward slash (``/``).
This is taken from the terminology in the BNF grammar,
specifically the "path-rootless", rule, since "absolute path"
and "absolute URI" are somewhat ambiguous. :attr:`path` does
not contain the implicit prefixed ``"/"`` since that is
somewhat awkward to work with.
"""
return self._rooted
@property
def userinfo(self):
"""The colon-separated string forming the username-password
combination.
"""
return self._userinfo
@property
def uses_netloc(self):
"""
"""
return self._uses_netloc
@property
def user(self):
"""
The user portion of :attr:`~hyperlink.URL.userinfo`.
"""
return self.userinfo.split(u':')[0]
def authority(self, with_password=False, **kw):
"""Compute and return the appropriate host/port/userinfo combination.
>>> url = URL.from_text(u'http://user:pass@localhost:8080/a/b?x=y')
>>> url.authority()
u'user:@localhost:8080'
>>> url.authority(with_password=True)
u'user:pass@localhost:8080'
Args:
with_password (bool): Whether the return value of this
method include the password in the URL, if it is
set. Defaults to False.
Returns:
str: The authority (network location and user information) portion
of the URL.
"""
# first, a bit of twisted compat
with_password = kw.pop('includeSecrets', with_password)
if kw:
raise TypeError('got unexpected keyword arguments: %r' % kw.keys())
host = self.host
if ':' in host:
hostport = ['[' + host + ']']
else:
hostport = [self.host]
if self.port != SCHEME_PORT_MAP.get(self.scheme):
hostport.append(unicode(self.port))
authority = []
if self.userinfo:
userinfo = self.userinfo
if not with_password and u":" in userinfo:
userinfo = userinfo[:userinfo.index(u":") + 1]
authority.append(userinfo)
authority.append(u":".join(hostport))
return u"@".join(authority)
def __eq__(self, other):
if not isinstance(other, self.__class__):
return NotImplemented
for attr in ['scheme', 'userinfo', 'host', 'query',
'fragment', 'port', 'uses_netloc']:
if getattr(self, attr) != getattr(other, attr):
return False
if self.path == other.path or (self.path in _ROOT_PATHS
and other.path in _ROOT_PATHS):
return True
return False
def __ne__(self, other):
if not isinstance(other, self.__class__):
return NotImplemented
return not self.__eq__(other)
def __hash__(self):
return hash((self.__class__, self.scheme, self.userinfo, self.host,
self.path, self.query, self.fragment, self.port,
self.rooted, self.uses_netloc))
@property
def absolute(self):
"""Whether or not the URL is "absolute". Absolute URLs are complete
enough to resolve to a network resource without being relative
to a base URI.
>>> URL.from_text(u'http://wikipedia.org/').absolute
True
>>> URL.from_text(u'?a=b&c=d').absolute
False
Absolute URLs must have both a scheme and a host set.
"""
return bool(self.scheme and self.host)
def replace(self, scheme=_UNSET, host=_UNSET, path=_UNSET, query=_UNSET,
fragment=_UNSET, port=_UNSET, rooted=_UNSET, userinfo=_UNSET,
uses_netloc=_UNSET):
""":class:`URL` objects are immutable, which means that attributes
are designed to be set only once, at construction. Instead of
modifying an existing URL, one simply creates a copy with the
desired changes.
If any of the following arguments is omitted, it defaults to
the value on the current URL.
Args:
scheme (unicode): The text name of the scheme.
host (unicode): The host portion of the network location
port (int): The port part of the network location.
path (tuple): A tuple of strings representing the
slash-separated parts of the path.
query (tuple): The query parameters, as a tuple of
key-value pairs.
fragment (unicode): The fragment part of the URL.
rooted (bool): Whether or not the path begins with a slash.
userinfo (unicode): The username or colon-separated
username:password pair.
uses_netloc (bool): Indicates whether two slashes appear
between the scheme and the host (``http://eg.com`` vs
``mailto:e@g.com``)
Returns:
URL: a copy of the current :class:`URL`, with new values for
parameters passed.
"""
return self.__class__(
scheme=_optional(scheme, self.scheme),
host=_optional(host, self.host),
path=_optional(path, self.path),
query=_optional(query, self.query),
fragment=_optional(fragment, self.fragment),
port=_optional(port, self.port),
rooted=_optional(rooted, self.rooted),
userinfo=_optional(userinfo, self.userinfo),
uses_netloc=_optional(uses_netloc, self.uses_netloc)
)
@classmethod
def from_text(cls, text):
"""Whereas the :class:`URL` constructor is useful for constructing
URLs from parts, :meth:`~URL.from_text` supports parsing whole
URLs from their string form::
>>> URL.from_text(u'http://example.com')
URL.from_text(u'http://example.com')
>>> URL.from_text(u'?a=b&x=y')
URL.from_text(u'?a=b&x=y')
As you can see above, it's also used as the :func:`repr` of
:class:`URL` objects. The natural counterpart to
:func:`~URL.to_text()`. This method only accepts *text*, so be
sure to decode those bytestrings.
Args:
text (unicode): A valid URL string.
Returns:
URL: The structured object version of the parsed string.
.. note::
Somewhat unexpectedly, URLs are a far more permissive
format than most would assume. Many strings which don't
look like URLs are still valid URLs. As a result, this
method only raises :class:`URLParseError` on invalid port
and IPv6 values in the host portion of the URL.
"""
um = _URL_RE.match(_textcheck('text', text))
try:
gs = um.groupdict()
except AttributeError:
raise URLParseError('could not parse url: %r' % text)
au_text = gs['authority'] or u''
au_m = _AUTHORITY_RE.match(au_text)
try:
au_gs = au_m.groupdict()
except AttributeError:
raise URLParseError('invalid authority %r in url: %r'
% (au_text, text))
if au_gs['bad_host']:
raise URLParseError('invalid host %r in url: %r')
userinfo = au_gs['userinfo'] or u''
host = au_gs['ipv6_host'] or au_gs['plain_host']
port = au_gs['port']
if port is not None:
try:
port = int(port)
except ValueError:
if not port: # TODO: excessive?
raise URLParseError('port must not be empty: %r' % au_text)
raise URLParseError('expected integer for port, not %r' % port)
scheme = gs['scheme'] or u''
fragment = gs['fragment'] or u''
uses_netloc = bool(gs['_netloc_sep'])
if gs['path']:
path = gs['path'].split(u"/")
if not path[0]:
path.pop(0)
rooted = True
else:
rooted = False
else:
path = ()
rooted = bool(au_text)
if gs['query']:
query = ((qe.split(u"=", 1) if u'=' in qe else (qe, None))
for qe in gs['query'].split(u"&"))
else:
query = ()
return cls(scheme, host, path, query, fragment, port,
rooted, userinfo, uses_netloc)
def child(self, *segments):
"""Make a new :class:`URL` where the given path segments are a child
of this URL, preserving other parts of the URL, including the
query string and fragment.
For example::
>>> url = URL.from_text(u'http://localhost/a/b?x=y')
>>> child_url = url.child(u"c", u"d")
>>> child_url.to_text()
u'http://localhost/a/b/c/d?x=y'
Args:
segments (unicode): Additional parts to be joined and added to
the path, like :func:`os.path.join`. Special characters
in segments will be percent encoded.
Returns:
URL: A copy of the current URL with the extra path segments.
"""
segments = [_textcheck('path segment', s) for s in segments]
new_segs = _encode_path_parts(segments, joined=False, maximal=False)
new_path = self.path[:-1 if (self.path and self.path[-1] == u'')
else None] + new_segs
return self.replace(path=new_path)
def sibling(self, segment):
"""Make a new :class:`URL` with a single path segment that is a
sibling of this URL path.
Args:
segment (unicode): A single path segment.
Returns:
URL: A copy of the current URL with the last path segment
replaced by *segment*. Special characters such as
``/?#`` will be percent encoded.
"""
_textcheck('path segment', segment)
new_path = self.path[:-1] + (_encode_path_part(segment),)
return self.replace(path=new_path)
def click(self, href=u''):
"""Resolve the given URL relative to this URL.
The resulting URI should match what a web browser would
generate if you visited the current URL and clicked on *href*.
>>> url = URL.from_text(u'http://blog.hatnote.com/')
>>> url.click(u'/post/155074058790').to_text()
u'http://blog.hatnote.com/post/155074058790'
>>> url = URL.from_text(u'http://localhost/a/b/c/')
>>> url.click(u'../d/./e').to_text()
u'http://localhost/a/b/d/e'
Args:
href (unicode): A string representing a clicked URL.
Return:
URL: A copy of the current URL with navigation logic applied.
For more information, see `RFC 3986 section 5`_.
.. _RFC 3986 section 5: https://tools.ietf.org/html/rfc3986#section-5
"""
_textcheck("relative URL", href)
if href:
clicked = URL.from_text(href)
if clicked.absolute:
return clicked
else:
clicked = self
query = clicked.query
if clicked.scheme and not clicked.rooted:
# Schemes with relative paths are not well-defined. RFC 3986 calls
# them a "loophole in prior specifications" that should be avoided,
# or supported only for backwards compatibility.
raise NotImplementedError('absolute URI with rootless path: %r'
% (href,))
else:
if clicked.rooted:
path = clicked.path
elif clicked.path:
path = self.path[:-1] + clicked.path
else:
path = self.path
if not query:
query = self.query
return self.replace(scheme=clicked.scheme or self.scheme,
host=clicked.host or self.host,
port=clicked.port or self.port,
path=_resolve_dot_segments(path),
query=query,
fragment=clicked.fragment)
def to_uri(self):
u"""Make a new :class:`URL` instance with all non-ASCII characters
appropriately percent-encoded. This is useful to do in preparation
for sending a :class:`URL` over a network protocol.
For example::
>>> URL.from_text(u'https://→example.com/foo⇧bar/').to_uri()
URL.from_text(u'https://xn--example-dk9c.com/foo%E2%87%A7bar/')
Returns:
URL: A new instance with its path segments, query parameters, and
hostname encoded, so that they are all in the standard
US-ASCII range.
"""
new_userinfo = u':'.join([_encode_userinfo_part(p) for p in
self.userinfo.split(':', 1)])
new_path = _encode_path_parts(self.path, has_scheme=bool(self.scheme),
rooted=False, joined=False, maximal=True)
return self.replace(
userinfo=new_userinfo,
host=self.host.encode("idna").decode("ascii"),
path=new_path,
query=tuple([tuple(_encode_query_part(x, maximal=True)
if x is not None else None
for x in (k, v))
for k, v in self.query]),
fragment=_encode_fragment_part(self.fragment, maximal=True)
)
def to_iri(self):
u"""Make a new :class:`URL` instance with all but a few reserved
characters decoded into human-readable format.
Percent-encoded Unicode and IDNA-encoded hostnames are
decoded, like so::
>>> url = URL.from_text(u'https://xn--example-dk9c.com/foo%E2%87%A7bar/')
>>> print(url.to_iri().to_text())
https://→example.com/foo⇧bar/
.. note::
As a general Python issue, "narrow" (UCS-2) builds of
Python may not be able to fully decode certain URLs, and
the in those cases, this method will return a best-effort,
partially-decoded, URL which is still valid. This issue
does not affect any Python builds 3.4+.
Returns:
URL: A new instance with its path segments, query parameters, and
hostname decoded for display purposes.
"""
new_userinfo = u':'.join([_decode_userinfo_part(p) for p in
self.userinfo.split(':', 1)])
try:
asciiHost = self.host.encode("ascii")
except UnicodeEncodeError:
textHost = self.host
else:
try:
textHost = asciiHost.decode("idna")
except ValueError:
# only reached on "narrow" (UCS-2) Python builds <3.4, see #7
textHost = self.host
return self.replace(userinfo=new_userinfo,
host=textHost,
path=[_decode_path_part(segment)
for segment in self.path],
query=[tuple(_decode_query_part(x)
if x is not None else None
for x in (k, v))
for k, v in self.query],
fragment=_decode_fragment_part(self.fragment))
def to_text(self, with_password=False):
"""Render this URL to its textual representation.
By default, the URL text will *not* include a password, if one
is set. RFC 3986 considers using URLs to represent such
sensitive information as deprecated. Quoting from RFC 3986,
`section 3.2.1`:
"Applications should not render as clear text any data after the
first colon (":") character found within a userinfo subcomponent
unless the data after the colon is the empty string (indicating no
password)."
Args:
with_password (bool): Whether or not to include the
password in the URL text. Defaults to False.
Returns:
str: The serialized textual representation of this URL,
such as ``u"http://example.com/some/path?some=query"``.
The natural counterpart to :class:`URL.from_text()`.
.. _section 3.2.1: https://tools.ietf.org/html/rfc3986#section-3.2.1
"""
scheme = self.scheme
authority = self.authority(with_password)
path = _encode_path_parts(self.path,
rooted=self.rooted,
has_scheme=bool(scheme),
has_authority=bool(authority),
maximal=False)
query_string = u'&'.join(
u'='.join((_encode_query_part(x, maximal=False)
for x in ([k] if v is None else [k, v])))
for (k, v) in self.query)
fragment = self.fragment
parts = []
_add = parts.append
if scheme:
_add(scheme)
_add(':')
if authority:
_add('//')
_add(authority)
elif (scheme and path[:2] != '//' and self.uses_netloc):
_add('//')
if path:
if scheme and authority and path[:1] != '/':
_add('/') # relpaths with abs authorities auto get '/'
_add(path)
if query_string:
_add('?')
_add(query_string)
if fragment:
_add('#')
_add(fragment)
return u''.join(parts)
def __repr__(self):
"""Convert this URL to an representation that shows all of its
constituent parts, as well as being a valid argument to
:func:`eval`.
"""
return '%s.from_text(%r)' % (self.__class__.__name__, self.to_text())
# # Begin Twisted Compat Code
asURI = to_uri
asIRI = to_iri
@classmethod
def fromText(cls, s):
return cls.from_text(s)
def asText(self, includeSecrets=False):
return self.to_text(with_password=includeSecrets)
def __dir__(self):
try:
ret = object.__dir__(self)
except AttributeError:
# object.__dir__ == AttributeError # pdw for py2
ret = dir(self.__class__) + list(self.__dict__.keys())
ret = sorted(set(ret) - set(['fromText', 'asURI', 'asIRI', 'asText']))
return ret
# # End Twisted Compat Code
def add(self, name, value=None):
"""Make a new :class:`URL` instance with a given query argument,
*name*, added to it with the value *value*, like so::
>>> URL.from_text(u'https://example.com/?x=y').add(u'x')
URL.from_text(u'https://example.com/?x=y&x')
>>> URL.from_text(u'https://example.com/?x=y').add(u'x', u'z')
URL.from_text(u'https://example.com/?x=y&x=z')
Args:
name (unicode): The name of the query parameter to add. The
part before the ``=``.
value (unicode): The value of the query parameter to add. The
part after the ``=``. Defaults to ``None``, meaning no
value.
Returns:
URL: A new :class:`URL` instance with the parameter added.
"""
return self.replace(query=self.query + ((name, value),))
def set(self, name, value=None):
"""Make a new :class:`URL` instance with the query parameter *name*
set to *value*. All existing occurences, if any are replaced
by the single name-value pair.
>>> URL.from_text(u'https://example.com/?x=y').set(u'x')
URL.from_text(u'https://example.com/?x')
>>> URL.from_text(u'https://example.com/?x=y').set(u'x', u'z')
URL.from_text(u'https://example.com/?x=z')
Args:
name (unicode): The name of the query parameter to set. The
part before the ``=``.
value (unicode): The value of the query parameter to set. The
part after the ``=``. Defaults to ``None``, meaning no
value.
Returns:
URL: A new :class:`URL` instance with the parameter set.
"""
# Preserve the original position of the query key in the list
q = [(k, v) for (k, v) in self.query if k != name]
idx = next((i for (i, (k, v)) in enumerate(self.query)
if k == name), -1)
q[idx:idx] = [(name, value)]
return self.replace(query=q)
def get(self, name):
"""Get a list of values for the given query parameter, *name*::
>>> url = URL.from_text(u'?x=1&x=2')
>>> url.get('x')
[u'1', u'2']
>>> url.get('y')
[]
If the given *name* is not set, an empty list is returned. A
list is always returned, and this method raises no exceptions.
Args:
name (unicode): The name of the query parameter to get.
Returns:
list: A list of all the values associated with the key, in
string form.
"""
return [value for (key, value) in self.query if name == key]
def remove(self, name):
"""Make a new :class:`URL` instance with all occurrences of the query
parameter *name* removed. No exception is raised if the
parameter is not already set.
Args:
name (unicode): The name of the query parameter to remove.
Returns:
URL: A new :class:`URL` instance with the parameter removed.
"""
return self.replace(query=((k, v) for (k, v) in self.query
if k != name))
|
mit
| -4,986,171,367,355,831,000
| 36.006762
| 114
| 0.571382
| false
| 3.945846
| false
| false
| false
|
disqus/sentry-graphite
|
sentry_graphite/__init__.py
|
1
|
3332
|
"""
sentry_graphite
~~~~~~~~
:copyright: (c) 2012 DISQUS.
:license: Apache License 2.0, see LICENSE for more details.
"""
from django import forms
from sentry.conf import settings
from sentry.plugins import Plugin
from pystatsd import Client
NOTSET = object()
class GraphiteConfigurationForm(forms.Form):
host = forms.CharField(max_length=64, widget=forms.TextInput(attrs={
'placeholder': 'graphite.local',
}))
port = forms.IntegerField(max_value=65535, widget=forms.TextInput(attr={
'placeholder': '8125',
}))
prefix = forms.CharField(max_length=64, widget=forms.TextInput(attrs={
'placeholder': 'sentry',
}))
class GraphiteProcessor(Plugin):
title = 'Graphite'
conf_key = 'graphite'
project_conf_form = GraphiteConfigurationForm
def __init__(self, min_level=NOTSET, include_loggers=NOTSET, exclude_loggers=NOTSET,
host=NOTSET, port=NOTSET, prefix=NOTSET, *args, **kwargs):
super(GraphiteProcessor, self).__init__(*args, **kwargs)
if min_level is NOTSET:
min_level = settings.GRAPIHTE_LEVEL
if include_loggers is NOTSET:
include_loggers = settings.GRAPHITE_INCLUDE_LOGGERS
if exclude_loggers is NOTSET:
exclude_loggers = settings.GRAPHITE_EXCLUDE_LOGGERS
if host is NOTSET:
host = settings.GRAPHITE_HOST
if port is NOTSET:
port = settings.GRAPHITE_PORT
if prefix is NOTSET:
prefix = settings.GRAPHITE_PREFIX
self.min_level = min_level
self.include_loggers = include_loggers
self.exclude_loggers = exclude_loggers
self.host = host
self.port = port
self.prefix = prefix
self.client = Client(host=self.host, port=self.port)
def record_event(self, group, event, fail_silently=True):
project = group.project
host = self.get_option('host', project) or self.host
port = self.get_option('port', project) or self.port
prefix = self.get_option('prefix', project) or self.prefix
key = '.'.join([prefix, event.message_top])
self.client.increment(key)
def should_record(self, group, event):
project = group.project
host = self.get_option('host', project) or self.host
if not host:
return False
port = self.get_option('port', project) or self.port
if not port:
return False
prefix = self.get_option('prefix', project) or self.prefix
if not prefix:
return False
min_level = self.get_option('min_level', project) or self.min_level
if min_level is not None and int(group.level) < min_level:
return False
include_loggers = self.get_option('include_loggers', project) or self.include_loggers
if include_loggers is not None and group.logger not in include_loggers:
return False
exclude_loggers = self.get_option('exclude_loggers', project) or self.exclude_loggers
if exclude_loggers and group.logger in exclude_loggers:
return False
return True
def post_process(self, group, event, is_new, is_sample, **kwargs):
if not self.should_record(group, event):
return
self.record_event(group, event)
|
apache-2.0
| -4,973,079,646,618,907,000
| 31.990099
| 93
| 0.634754
| false
| 3.95255
| false
| false
| false
|
dionbosschieter/NetworkMonitor
|
samples/sniff.py
|
1
|
3578
|
#! /usr/bin/env python3
"""
Example to sniff all HTTP traffic on eth0 interface:
sudo ./sniff.py eth0 "port 80"
"""
import sys
import pcap
import time
import socket
import struct
if sys.version_info[0] > 2:
IPPROTO = bytes ((0x08, 0x00))
bord = int
else:
IPPROTO = '\x08\x00'
bord = ord
protocols={socket.IPPROTO_TCP:'tcp',
socket.IPPROTO_UDP:'udp',
socket.IPPROTO_ICMP:'icmp'}
def decode_ip_packet(s):
d={}
d['version']=(bord(s[0]) & 0xf0) >> 4
d['header_len']=bord(s[0]) & 0x0f
d['tos']=bord(s[1])
d['total_len']=socket.ntohs(struct.unpack('H',s[2:4])[0])
d['id']=socket.ntohs(struct.unpack('H',s[4:6])[0])
d['flags']=(bord(s[6]) & 0xe0) >> 5
d['fragment_offset']=socket.ntohs(struct.unpack('H',s[6:8])[0] & 0x1f)
d['ttl']=bord(s[8])
d['protocol']=bord(s[9])
d['checksum']=socket.ntohs(struct.unpack('H',s[10:12])[0])
d['source_address']=pcap.ntoa(struct.unpack('i',s[12:16])[0])
d['destination_address']=pcap.ntoa(struct.unpack('i',s[16:20])[0])
if d['header_len']>5:
d['options']=s[20:4*(d['header_len']-5)]
else:
d['options']=None
d['data']=s[4*d['header_len']:]
return d
def dumphex(s):
bytes = map(lambda x: '%.2x' % x, map(bord, s))
if sys.version_info[0] > 2:
bytes = list (bytes)
for i in range(0,len(bytes)//16):
print (' %s' % ' '.join(bytes[i*16:(i+1)*16]))
print (' %s' % ' '.join(bytes[(i+1)*16:]))
def print_packet(pktlen, data, timestamp):
if not data:
return
if data[12:14]==IPPROTO:
decoded=decode_ip_packet(data[14:])
print ('\n%s.%f %s > %s' % (time.strftime('%H:%M',
time.localtime(timestamp)),
timestamp % 60,
decoded['source_address'],
decoded['destination_address']))
for key in ['version', 'header_len', 'tos', 'total_len', 'id',
'flags', 'fragment_offset', 'ttl']:
print (' %s: %d' % (key, decoded[key]))
print (' protocol: %s' % protocols[decoded['protocol']])
print (' header checksum: %d' % decoded['checksum'])
print (' data:')
dumphex(decoded['data'])
if __name__=='__main__':
if len(sys.argv) < 3:
print ('usage: sniff.py <interface> <expr>')
sys.exit(0)
p = pcap.pcapObject()
#dev = pcap.lookupdev()
dev = sys.argv[1]
net, mask = pcap.lookupnet(dev)
# note: to_ms does nothing on linux
p.open_live(dev, 1600, 0, 100)
#p.dump_open('dumpfile')
p.setfilter(' '.join(sys.argv[2:]), 0, 0)
# try-except block to catch keyboard interrupt. Failure to shut
# down cleanly can result in the interface not being taken out of promisc.
# mode
#p.setnonblock(1)
try:
while 1:
p.dispatch(1, print_packet)
# specify 'None' to dump to dumpfile, assuming you have called
# the dump_open method
# p.dispatch(0, None)
# the loop method is another way of doing things
# p.loop(1, print_packet)
# as is the next() method
# p.next() returns a (pktlen, data, timestamp) tuple
# apply(print_packet,p.next())
except KeyboardInterrupt:
print ('%s' % sys.exc_type)
print ('shutting down')
print ('%d packets received, %d packets dropped, %d packets dropped by interface' % p.stats())
# vim:set ts=4 sw=4 et:
|
mit
| -4,214,800,356,882,184,000
| 30.672566
| 102
| 0.536333
| false
| 3.166372
| false
| false
| false
|
butla/PyDAS
|
tests/integrated/test_service.py
|
1
|
6259
|
import copy
import json
import time
from urllib.parse import urljoin
import requests
from data_acquisition.consts import ACQUISITION_PATH, UPLOADER_REQUEST_PATH
from data_acquisition.resources import get_download_callback_url, get_metadata_callback_url
from data_acquisition.acquisition_request import AcquisitionRequest
from tests.consts import (TEST_AUTH_HEADER, TEST_DOWNLOAD_REQUEST, TEST_ACQUISITION_REQ,
TEST_DOWNLOAD_CALLBACK, TEST_METADATA_CALLBACK, TEST_ORG_UUID)
from tests.utils import dict_is_part_of
def test_acquisition_request(das_client, req_store_real, downloader_imposter):
resp_object = das_client.rest.submitAcquisitionRequest(
body=TEST_DOWNLOAD_REQUEST,
_request_options={'headers': {'authorization': TEST_AUTH_HEADER}}).result()
assert req_store_real.get(resp_object.id).state == 'VALIDATED'
request_to_imposter = downloader_imposter.wait_for_requests()[0]
assert json.loads(request_to_imposter.body) == {
'source': TEST_DOWNLOAD_REQUEST['source'],
'callback': get_download_callback_url('https://das.example.com', resp_object.id)
}
assert dict_is_part_of(request_to_imposter.headers, {'authorization': TEST_AUTH_HEADER})
def test_download_callback(req_store_real, das, metadata_parser_imposter):
# arrange
req_store_real.put(TEST_ACQUISITION_REQ)
req_id = TEST_ACQUISITION_REQ.id
# act
response = requests.post(
get_download_callback_url(das.url, req_id=req_id),
json=TEST_DOWNLOAD_CALLBACK,
headers={'Authorization': TEST_AUTH_HEADER})
# assert
assert response.status_code == 200
assert req_store_real.get(req_id).state == 'DOWNLOADED'
request_to_imposter = metadata_parser_imposter.wait_for_requests()[0]
proper_metadata_req = {
'orgUUID': TEST_ACQUISITION_REQ.orgUUID,
'publicRequest': TEST_ACQUISITION_REQ.publicRequest,
'source': TEST_ACQUISITION_REQ.source,
'category': TEST_ACQUISITION_REQ.category,
'title': TEST_ACQUISITION_REQ.title,
'id': req_id,
'idInObjectStore': TEST_DOWNLOAD_CALLBACK['savedObjectId'],
'callbackUrl': get_metadata_callback_url('https://das.example.com', req_id)
}
assert json.loads(request_to_imposter.body) == proper_metadata_req
assert dict_is_part_of(request_to_imposter.headers, {'authorization': TEST_AUTH_HEADER})
def test_metadata_callback(req_store_real, das):
req_store_real.put(TEST_ACQUISITION_REQ)
req_id = TEST_ACQUISITION_REQ.id
response = requests.post(
get_metadata_callback_url(das.url, req_id=req_id),
json=TEST_METADATA_CALLBACK,
headers={'Authorization': TEST_AUTH_HEADER})
assert response.status_code == 200
assert req_store_real.get(req_id).state == 'FINISHED'
def test_uploader_request(req_store_real, das, metadata_parser_imposter):
# arrange
test_uploader_req = dict(TEST_DOWNLOAD_REQUEST)
test_uploader_req.update({
'idInObjectStore': 'fake-guid/000000_1',
'objectStoreId': 'hdfs://some-fake-hdfs-path',
})
# act
response = requests.post(
urljoin(das.url, UPLOADER_REQUEST_PATH),
json=test_uploader_req,
headers={'Authorization': TEST_AUTH_HEADER})
# assert
assert response.status_code == 200
stored_request = req_store_real.get_for_org(test_uploader_req['orgUUID'])[0]
assert stored_request.state == 'DOWNLOADED'
request_to_imposter = metadata_parser_imposter.wait_for_requests()[0]
proper_metadata_req = {
'orgUUID': TEST_ACQUISITION_REQ.orgUUID,
'publicRequest': TEST_ACQUISITION_REQ.publicRequest,
'source': TEST_ACQUISITION_REQ.source,
'category': TEST_ACQUISITION_REQ.category,
'title': TEST_ACQUISITION_REQ.title,
'id': stored_request.id,
'idInObjectStore': test_uploader_req['idInObjectStore'],
'callbackUrl': get_metadata_callback_url('https://das.example.com', stored_request.id)
}
assert json.loads(request_to_imposter.body) == proper_metadata_req
assert dict_is_part_of(request_to_imposter.headers, {'authorization': TEST_AUTH_HEADER})
def test_get_requests(req_store_real, das):
test_requests = [copy.deepcopy(TEST_ACQUISITION_REQ) for _ in range(3)]
test_requests[1].id = 'qzawx'
test_requests[2].orgUUID = 'some-other-org-uuid'
for test_request in test_requests:
req_store_real.put(test_request)
response = requests.get(
urljoin(das.url, ACQUISITION_PATH),
params={'orgs': TEST_ACQUISITION_REQ.orgUUID},
headers={'Authorization': TEST_AUTH_HEADER})
assert response.status_code == 200
returned_requests = [AcquisitionRequest(**req_json) for req_json in response.json()]
assert set(returned_requests) == set(test_requests[:-1])
def test_access_to_forbidden_org(das):
# Only one organization is allowed by the User Management impostor (bound to "das" fixture).
# That's why this should fail.
response = requests.get(
urljoin(das.url, ACQUISITION_PATH),
params={'orgs': 'org-the-user-has-no-access-to'},
headers={'Authorization': TEST_AUTH_HEADER})
assert response.status_code == 403
def test_access_with_invalid_token(das):
header_with_invalid_signature = TEST_AUTH_HEADER[:-1] + 'P'
response = requests.get(
urljoin(das.url, ACQUISITION_PATH),
params={'orgs': TEST_ORG_UUID},
headers={'Authorization': header_with_invalid_signature})
assert response.status_code == 401
def test_mark_request_failed_on_failed_connection_to_external_service(
das, downloader_imposter, req_store_real):
# simulating that the external service is unavailable
downloader_imposter.destroy()
response = requests.post(
das.url + ACQUISITION_PATH,
json=TEST_DOWNLOAD_REQUEST,
headers={'Authorization': TEST_AUTH_HEADER})
req_id = response.json()['id']
start_time = time.perf_counter()
while True:
if time.perf_counter() - start_time >= 2.0:
assert False, "Request state didn't change to ERROR after some time."
elif req_store_real.get(req_id).state == 'ERROR':
break
time.sleep(0.001)
|
mit
| 4,670,217,954,056,264,000
| 37.635802
| 96
| 0.681099
| false
| 3.448485
| true
| false
| false
|
pearkes/stripe-hooks
|
shared/parser.py
|
1
|
2270
|
import stripe
from .app import app
from shared.mail import send_notification, send_receipt
from shared.helpers import CleanParseException, format_stripe_object
def parse_hook(payload):
"""Parses a dictionary representation of the stripe webhook
by requesting a new version of the event by it's ID from the stripe
API. This is done for security reasons.
See https://github.com/pearkes/stripe-hooks#security
"""
# Request the event from Stripe, raises stripe.InvalidRequestError if
# not found
event = stripe.Event.retrieve(payload.get("id"))
# Determine what type of event it is and send any nots/receipts
determine_event_type(event)
def determine_event_type(event):
"Determines what type of hook an event is"
config = app.config['email']
if config['notifications'].get(event.type):
parse_notification(event)
if config['receipts'].get(event.type):
parse_receipt(event)
def parse_notification(event):
"Parse the details of an event for a notification"
# Format the data for the email
data = format_stripe_object(event.data.object)
send_notification(event.type, data)
def parse_receipt(event):
"Parse the details of an event for a receipt"
recepient = find_email_address(event.data.object)
# A CleanParseException tells the webhook to respond
# succesfully with a message back to the stripe dashboard
if not recepient:
raise CleanParseException(
"Can't find customer email address for receipt")
# Format the data for the email
data = format_stripe_object(event.data.object)
send_receipt(event.type, recepient, data)
def find_email_address(stripe_object):
"""Looks for an email in a stripe object, returns an email or None
if there wasn't one found, which may be the case sometimes."""
# Some objects have an "email" field, this makes it easy
email = stripe_object.get("email")
if email:
return email
# Others have a customer ID, we'll need to request
# it from Stripe in this case.
customer = stripe_object.get("customer")
if customer:
full_customer = stripe.Customer.retrieve(customer)
if full_customer.email:
return full_customer.email
|
mit
| 6,396,329,389,875,048,000
| 29.266667
| 73
| 0.701322
| false
| 3.975482
| false
| false
| false
|
thewisenerd/pymoviedb
|
src/pymoviedb/__main__.py
|
1
|
2130
|
#! /usr/bin/env python3
# Copyright (c) 2015 - thewisenerd <thewisenerd@protonmail.com>
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
# MA 02110-1301, USA.
#
import argparse
import json
import signal
import sys
from operator import itemgetter, attrgetter
import __cfg
import __pymoviedb
from __pymoviedb import __pymoviedb_init, __pymoviedb_check, __pymoviedb_do
from __helpers import _cfg_list_file, _cfg_err_file
def sigint_handler(signum, frame):
# sort back movies
n = sorted(__pymoviedb.movies.values(), key=itemgetter('base'))
__pymoviedb.movies = {}
for v in n:
__pymoviedb.movies[v['imdbID']] = v
# write moviews
with open(_cfg_list_file(), "w") as f:
json.dump(n, f, indent=2)
# write err
with open(_cfg_err_file(), "w") as f:
f.writelines(sorted(__pymoviedb.err_lines))
# exit gracefully.
exit()
signal.signal(signal.SIGINT, sigint_handler)
signal.signal(signal.SIGTSTP, sigint_handler)
signal.signal(signal.SIGTERM, sigint_handler)
if (__name__ == "__main__"):
global args
parser = argparse.ArgumentParser()
parser.add_argument("action", help="action", choices=["init", "check", "do"])
parser.add_argument("-v", "--verbose", help="be more verbose", action="store_true")
args = parser.parse_args()
if args.verbose:
__cfg.__verbose = True
if args.action == "init":
__pymoviedb_init()
elif args.action == "check":
__pymoviedb_check()
elif args.action == "do":
__pymoviedb_do()
exit()
|
gpl-2.0
| 1,112,315,185,582,330,800
| 26.662338
| 85
| 0.696244
| false
| 3.317757
| false
| false
| false
|
rush2catch/algorithms-leetcode
|
Trees/leet_104_MaxDepthOfBinaryTree.py
|
1
|
1493
|
# Problem: Maximum Depth of Binary Tree
# Difficulty: Easy
# Category: Tree
# Leetcode 104: https://leetcode.com/problems/maximum-depth-of-binary-tree/#/description
# Description:
"""
Given a binary tree, find its maximum depth.
The maximum depth is the number of nodes along the longest path from the root node down to the farthest leaf node.
_______3______
/ \
___5__ ___1__
/ \ / \
6 _2_ 0 8
/ \
7 4
"""
from Tree import BinaryTree
class Solution(object):
def max_depth(self, root):
if root is None:
return 0
return self.get_max(root)
def get_max(self, root):
if root is None:
return 0
if root.leftChild is None and root.rightChild is None:
return 1
return max(self.get_max(root.leftChild), self.get_max(root.rightChild)) + 1
# Construct a binary tree to test
Node_3 = BinaryTree(3)
Node_3.insertLeft(5)
Node_3.insertRight(1)
Node_5 = Node_3.getLeftChild()
Node_1 = Node_3.getRightChild()
Node_5.insertLeft(6)
Node_6 = Node_5.getLeftChild()
Node_5.insertRight(2)
Node_2 = Node_5.getRightChild()
Node_2.insertLeft(7)
Node_7 = Node_2.getLeftChild()
Node_2.insertRight(4)
Node_4 = Node_2.getRightChild()
Node_1.insertLeft(0)
Node_0 = Node_1.getLeftChild()
Node_1.insertRight(8)
Node_8 = Node_1.getRightChild()
obj = Solution()
print(obj.max_depth(Node_3))
print(obj.max_depth(Node_5))
print(obj.max_depth(Node_6))
print(obj.max_depth(Node_6.getLeftChild()))
|
mit
| 8,618,353,277,831,722,000
| 23.491803
| 114
| 0.656397
| false
| 2.827652
| false
| false
| false
|
AlfredoSequeida/sendanywherecli
|
sendanywherecli.py
|
1
|
3713
|
from splinter import Browser
from selenium import webdriver
from pyvirtualdisplay import Display
import pyvirtualdisplay
import pyqrcode
import argparse
import os
import time
import urllib
import sys
# allow for graceful exit
try:
# file to send
file_to_send = ''
# file receive code
file_receive_code = ''
# save directory for received file
save_receiving_dir= ''
# display setup
display = Display(visible=0, size = (1024, 768))
# display start
display.start()
# setting up browser
browser = Browser(profile = './sendanywherecliprofile')
browser = Browser()
browser.driver.set_window_size(1024, 768)
# print help if no arguments are given
if len(sys.argv) == 1:
sys.argv.append('-h')
# required arguments
parser = argparse.ArgumentParser(description='command line interface for usign send-anywhere.com')
parser.add_argument('-s','--send', help='send file(s) <file(s)>', required=False)
parser.add_argument('-r','--receive', help='receive a file <key>', required=False)
args = vars(parser.parse_args())
#functions
# waiting for file key
def wait_for_key():
print("Retrieving Share Key ...")
while not browser.find_by_id('key').text:
pass
# countdown timer
def countdown_and_check(t):
while t:
mins, secs = divmod(t, 60)
#formatign timer
timeformat = '{:02d}:{:02d}'.format(mins, secs)
print(timeformat, end='\r')
time.sleep(1)
t -= 1
# check is file has been sent, if so, quit
check_for_sent()
def receive_file(key):
#load page
load_page()
receive_key_element = browser.find_by_id('receive-key').first
receive_key_element.click()
receive_key_element.type(key, slowly = False)
browser.find_by_id('receive-btn').first.click()
receive_frame = browser.find_by_id('receive-frame').value
print ('receive_frame: ' + receive_frame)
file_to_download = urllib.URLopener()
file_to_download.retrieve("", dir + 'file.gz')
def send_file(file):
# loading page
load_page()
print('Sending File ...')
file_path = os.path.abspath(file)
# uploading file
browser.attach_file('file[]', file_path)
browser.find_by_id('send-btn').first.click()
# waiting for key
wait_for_key()
# getting file retrieval key
file_receive_code = browser.find_by_id('key').first.text
# displaying file retrieval key for user
print ('key: ' + file_receive_code)
# qr code
render_qr(file_receive_code)
# waiting for user to retrieve file
print ('press CTRL + C at any time to exit')
print ('file available for:')
countdown_and_check(600)
def render_qr(receive_key):
qr_code = pyqrcode.create('http://sendanywhe.re/' + receive_key)
print('qr code:' + qr_code.terminal())
# check if file has been sent to quit
def check_for_sent():
sent_result = browser.find_by_id('sent-result').text
if sent_result == 'Completed':
print ("Complete")
quit();
# load web widget
def load_page():
print ('Establishing Connection With Server ...')
browser.visit('https://send-anywhere.com')
if args ['send']:
file_to_send = args ['send']
send_file(file_to_send)
elif args ['receive']:
receive_file(args ['receive'])
# stoping diplay
display.stop()
# allow for graceful exit
except KeyboardInterrupt:
quit()
|
mit
| 8,244,624,096,845,569,000
| 23.427632
| 102
| 0.595475
| false
| 3.871741
| false
| false
| false
|
bt3gl/Numerical-Methods-for-Physics
|
homework6_rayleigh-taylor_instability/compressible/unsplitFluxes.py
|
1
|
20308
|
"""
Implementation of the Colella 2nd order unsplit Godunov scheme. This
is a 2-dimensional implementation only. We assume that the grid is
uniform, but it is relatively straightforward to relax this
assumption.
There are several different options for this solver (they are all
discussed in the Colella paper).
limiter = 0 to use no limiting
= 1 to use the 2nd order MC limiter
= 2 to use the 4th order MC limiter
riemann = HLLC to use the HLLC solver
= CGF to use the Colella, Glaz, and Ferguson solver
use_flattening = 1 to use the multidimensional flattening
algorithm at shocks
delta, z0, z1 these are the flattening parameters. The default
are the values listed in Colella 1990.
j+3/2--+---------+---------+---------+
| | | |
j+1 _| | | |
| | | |
| | | |
j+1/2--+---------XXXXXXXXXXX---------+
| X X |
j _| X X |
| X X |
| X X |
j-1/2--+---------XXXXXXXXXXX---------+
| | | |
j-1 _| | | |
| | | |
| | | |
j-3/2--+---------+---------+---------+
| | | | | | |
i-1 i i+1
i-3/2 i-1/2 i+1/2 i+3/2
We wish to solve
U_t + F^x_x + F^y_y = H
we want U_{i+1/2}^{n+1/2} -- the interface values that are input to
the Riemann problem through the faces for each zone.
Taylor expanding yields
n+1/2 dU dU
U = U + 0.5 dx -- + 0.5 dt --
i+1/2,j,L i,j dx dt
dU dF^x dF^y
= U + 0.5 dx -- - 0.5 dt ( ---- + ---- - H )
i,j dx dx dy
dU dF^x dF^y
= U + 0.5 ( dx -- - dt ---- ) - 0.5 dt ---- + 0.5 dt H
i,j dx dx dy
dt dU dF^y
= U + 0.5 dx ( 1 - -- A^x ) -- - 0.5 dt ---- + 0.5 dt H
i,j dx dx dy
dt _ dF^y
= U + 0.5 ( 1 - -- A^x ) DU - 0.5 dt ---- + 0.5 dt H
i,j dx dy
+----------+-----------+ +----+----+ +---+---+
| | |
this is the monotonized this is the source term
central difference term transverse
flux term
There are two components, the central difference in the normal to the
interface, and the transverse flux difference. This is done for the
left and right sides of all 4 interfaces in a zone, which are then
used as input to the Riemann problem, yielding the 1/2 time interface
values,
n+1/2
U
i+1/2,j
Then, the zone average values are updated in the usual finite-volume
way:
n+1 n dt x n+1/2 x n+1/2
U = U + -- { F (U ) - F (U ) }
i,j i,j dx i-1/2,j i+1/2,j
dt y n+1/2 y n+1/2
+ -- { F (U ) - F (U ) }
dy i,j-1/2 i,j+1/2
Updating U_{i,j}:
-- We want to find the state to the left and right (or top and
bottom) of each interface, ex. U_{i+1/2,j,[lr]}^{n+1/2}, and use
them to solve a Riemann problem across each of the four
interfaces.
-- U_{i+1/2,j,[lr]}^{n+1/2} is comprised of two parts, the
computation of the monotonized central differences in the normal
direction (eqs. 2.8, 2.10) and the computation of the transverse
derivatives, which requires the solution of a Riemann problem in
the transverse direction (eqs. 2.9, 2.14).
-- the monotonized central difference part is computed using
the primitive variables.
-- We compute the central difference part in both directions
before doing the transverse flux differencing, since for the
high-order transverse flux implementation, we use these as
the input to the transverse Riemann problem.
"""
import numpy
import vars
import eos
import mesh.reconstruction_f as reconstruction_f
from util import runparams
from util import profile
import interface_f
def unsplitFluxes(myData, dt):
"""
unsplitFluxes returns the fluxes through the x and y interfaces by
doing an unsplit reconstruction of the interface values and then
solving the Riemann problem through all the interfaces at once
currently we assume a gamma-law EOS
grav is the gravitational acceleration in the y-direction
"""
pf = profile.timer("unsplitFluxes")
pf.begin()
myg = myData.grid
#=========================================================================
# compute the primitive variables
#=========================================================================
# Q = (rho, u, v, p)
dens = myData.getVarPtr("density")
xmom = myData.getVarPtr("x-momentum")
ymom = myData.getVarPtr("y-momentum")
ener = myData.getVarPtr("energy")
r = dens
# get the velocities
u = xmom/dens
v = ymom/dens
# get the pressure
e = (ener - 0.5*(xmom**2 + ymom**2)/dens)/dens
p = eos.pres(dens, e)
smallp = 1.e-10
p = p.clip(smallp) # apply a floor to the pressure
#=========================================================================
# compute the flattening coefficients
#=========================================================================
# there is a single flattening coefficient (xi) for all directions
use_flattening = runparams.getParam("compressible.use_flattening")
if (use_flattening):
smallp = 1.e-10
delta = runparams.getParam("compressible.delta")
z0 = runparams.getParam("compressible.z0")
z1 = runparams.getParam("compressible.z1")
xi_x = reconstruction_f.flatten(1, p, u, myg.qx, myg.qy, myg.ng, smallp, delta, z0, z1)
xi_y = reconstruction_f.flatten(2, p, v, myg.qx, myg.qy, myg.ng, smallp, delta, z0, z1)
xi = reconstruction_f.flatten_multid(xi_x, xi_y, p, myg.qx, myg.qy, myg.ng)
else:
xi = 1.0
#=========================================================================
# x-direction
#=========================================================================
# monotonized central differences in x-direction
pfa = profile.timer("limiting")
pfa.begin()
limiter = runparams.getParam("compressible.limiter")
if (limiter == 0):
limitFunc = reconstruction_f.nolimit
elif (limiter == 1):
limitFunc = reconstruction_f.limit2
else:
limitFunc = reconstruction_f.limit4
ldelta_r = xi*limitFunc(1, r, myg.qx, myg.qy, myg.ng)
ldelta_u = xi*limitFunc(1, u, myg.qx, myg.qy, myg.ng)
ldelta_v = xi*limitFunc(1, v, myg.qx, myg.qy, myg.ng)
ldelta_p = xi*limitFunc(1, p, myg.qx, myg.qy, myg.ng)
pfa.end()
# left and right primitive variable states
pfb = profile.timer("interfaceStates")
pfb.begin()
gamma = runparams.getParam("eos.gamma")
V_l = numpy.zeros((myg.qx, myg.qy, vars.nvar), dtype=numpy.float64)
V_r = numpy.zeros((myg.qx, myg.qy, vars.nvar), dtype=numpy.float64)
(V_l, V_r) = interface_f.states(1, myg.qx, myg.qy, myg.ng, myg.dx, dt,
vars.nvar,
gamma,
r, u, v, p,
ldelta_r, ldelta_u, ldelta_v, ldelta_p)
pfb.end()
# transform interface states back into conserved variables
U_xl = numpy.zeros((myg.qx, myg.qy, myData.nvar), dtype=numpy.float64)
U_xr = numpy.zeros((myg.qx, myg.qy, myData.nvar), dtype=numpy.float64)
U_xl[:,:,vars.idens] = V_l[:,:,vars.irho]
U_xl[:,:,vars.ixmom] = V_l[:,:,vars.irho]*V_l[:,:,vars.iu]
U_xl[:,:,vars.iymom] = V_l[:,:,vars.irho]*V_l[:,:,vars.iv]
U_xl[:,:,vars.iener] = eos.rhoe(V_l[:,:,vars.ip]) + \
0.5*V_l[:,:,vars.irho]*(V_l[:,:,vars.iu]**2 + V_l[:,:,vars.iv]**2)
U_xr[:,:,vars.idens] = V_r[:,:,vars.irho]
U_xr[:,:,vars.ixmom] = V_r[:,:,vars.irho]*V_r[:,:,vars.iu]
U_xr[:,:,vars.iymom] = V_r[:,:,vars.irho]*V_r[:,:,vars.iv]
U_xr[:,:,vars.iener] = eos.rhoe(V_r[:,:,vars.ip]) + \
0.5*V_r[:,:,vars.irho]*(V_r[:,:,vars.iu]**2 + V_r[:,:,vars.iv]**2)
#=========================================================================
# y-direction
#=========================================================================
# monotonized central differences in y-direction
pfa.begin()
ldelta_r = xi*limitFunc(2, r, myg.qx, myg.qy, myg.ng)
ldelta_u = xi*limitFunc(2, u, myg.qx, myg.qy, myg.ng)
ldelta_v = xi*limitFunc(2, v, myg.qx, myg.qy, myg.ng)
ldelta_p = xi*limitFunc(2, p, myg.qx, myg.qy, myg.ng)
pfa.end()
# left and right primitive variable states
pfb.begin()
(V_l, V_r) = interface_f.states(2, myg.qx, myg.qy, myg.ng, myg.dy, dt,
vars.nvar,
gamma,
r, u, v, p,
ldelta_r, ldelta_u, ldelta_v, ldelta_p)
pfb.end()
# transform interface states back into conserved variables
U_yl = numpy.zeros((myg.qx, myg.qy, myData.nvar), dtype=numpy.float64)
U_yr = numpy.zeros((myg.qx, myg.qy, myData.nvar), dtype=numpy.float64)
U_yl[:,:,vars.idens] = V_l[:,:,vars.irho]
U_yl[:,:,vars.ixmom] = V_l[:,:,vars.irho]*V_l[:,:,vars.iu]
U_yl[:,:,vars.iymom] = V_l[:,:,vars.irho]*V_l[:,:,vars.iv]
U_yl[:,:,vars.iener] = eos.rhoe(V_l[:,:,vars.ip]) + \
0.5*V_l[:,:,vars.irho]*(V_l[:,:,vars.iu]**2 + V_l[:,:,vars.iv]**2)
U_yr[:,:,vars.idens] = V_r[:,:,vars.irho]
U_yr[:,:,vars.ixmom] = V_r[:,:,vars.irho]*V_r[:,:,vars.iu]
U_yr[:,:,vars.iymom] = V_r[:,:,vars.irho]*V_r[:,:,vars.iv]
U_yr[:,:,vars.iener] = eos.rhoe(V_r[:,:,vars.ip]) + \
0.5*V_r[:,:,vars.irho]*(V_r[:,:,vars.iu]**2 + V_r[:,:,vars.iv]**2)
#=========================================================================
# apply source terms
#=========================================================================
grav = runparams.getParam("compressible.grav")
# ymom_xl[i,j] += 0.5*dt*dens[i-1,j]*grav
U_xl[myg.ilo-1:myg.ihi+2,myg.jlo-1:myg.jhi+2,vars.iymom] += \
0.5*dt*dens[myg.ilo-2:myg.ihi+1,myg.jlo-1:myg.jhi+2]*grav
U_xl[myg.ilo-1:myg.ihi+2,myg.jlo-1:myg.jhi+2,vars.iener] += \
0.5*dt*ymom[myg.ilo-2:myg.ihi+1,myg.jlo-1:myg.jhi+2]*grav
# ymom_xr[i,j] += 0.5*dt*dens[i,j]*grav
U_xr[myg.ilo-1:myg.ihi+2,myg.jlo-1:myg.jhi+2,vars.iymom] += \
0.5*dt*dens[myg.ilo-1:myg.ihi+2,myg.jlo-1:myg.jhi+2]*grav
U_xr[myg.ilo-1:myg.ihi+2,myg.jlo-1:myg.jhi+2,vars.iener] += \
0.5*dt*ymom[myg.ilo-1:myg.ihi+2,myg.jlo-1:myg.jhi+2]*grav
# ymom_yl[i,j] += 0.5*dt*dens[i,j-1]*grav
U_yl[myg.ilo-1:myg.ihi+2,myg.jlo-1:myg.jhi+2,vars.iymom] += \
0.5*dt*dens[myg.ilo-1:myg.ihi+2,myg.jlo-2:myg.jhi+1]*grav
U_yl[myg.ilo-1:myg.ihi+2,myg.jlo-1:myg.jhi+2,vars.iener] += \
0.5*dt*ymom[myg.ilo-1:myg.ihi+2,myg.jlo-2:myg.jhi+1]*grav
# ymom_yr[i,j] += 0.5*dt*dens[i,j]*grav
U_yr[myg.ilo-1:myg.ihi+2,myg.jlo-1:myg.jhi+2,vars.iymom] += \
0.5*dt*dens[myg.ilo-1:myg.ihi+2,myg.jlo-1:myg.jhi+2]*grav
U_yr[myg.ilo-1:myg.ihi+2,myg.jlo-1:myg.jhi+2,vars.iener] += \
0.5*dt*ymom[myg.ilo-1:myg.ihi+2,myg.jlo-1:myg.jhi+2]*grav
#=========================================================================
# compute transverse fluxes
#=========================================================================
pfc = profile.timer("riemann")
pfc.begin()
riemann = runparams.getParam("compressible.riemann")
if (riemann == "HLLC"):
riemannFunc = interface_f.riemann_hllc
elif (riemann == "CGF"):
riemannFunc = interface_f.riemann_cgf
else:
msg.fail("ERROR: Riemann solver undefined")
F_x = riemannFunc(1, myg.qx, myg.qy, myg.ng,
vars.nvar, vars.idens, vars.ixmom, vars.iymom, vars.iener,
gamma, U_xl, U_xr)
F_y = riemannFunc(2, myg.qx, myg.qy, myg.ng,
vars.nvar, vars.idens, vars.ixmom, vars.iymom, vars.iener,
gamma, U_yl, U_yr)
pfc.end()
#=========================================================================
# construct the interface values of U now
#=========================================================================
"""
finally, we can construct the state perpendicular to the interface
by adding the central difference part to the trasverse flux
difference.
The states that we represent by indices i,j are shown below
(1,2,3,4):
j+3/2--+----------+----------+----------+
| | | |
| | | |
j+1 -+ | | |
| | | |
| | | | 1: U_xl[i,j,:] = U
j+1/2--+----------XXXXXXXXXXXX----------+ i-1/2,j,L
| X X |
| X X |
j -+ 1 X 2 X | 2: U_xr[i,j,:] = U
| X X | i-1/2,j,R
| X 4 X |
j-1/2--+----------XXXXXXXXXXXX----------+
| | 3 | | 3: U_yl[i,j,:] = U
| | | | i,j-1/2,L
j-1 -+ | | |
| | | |
| | | | 4: U_yr[i,j,:] = U
j-3/2--+----------+----------+----------+ i,j-1/2,R
| | | | | | |
i-1 i i+1
i-3/2 i-1/2 i+1/2 i+3/2
remember that the fluxes are stored on the left edge, so
F_x[i,j,:] = F_x
i-1/2, j
F_y[i,j,:] = F_y
i, j-1/2
"""
pfd = profile.timer("transverse flux addition")
pfd.begin()
# U_xl[i,j,:] = U_xl[i,j,:] - 0.5*dt/dy * (F_y[i-1,j+1,:] - F_y[i-1,j,:])
U_xl[myg.ilo-2:myg.ihi+2,myg.jlo-2:myg.jhi+2,:] += \
- 0.5*dt/myg.dy * (F_y[myg.ilo-3:myg.ihi+1,myg.jlo-1:myg.jhi+3,:] - \
F_y[myg.ilo-3:myg.ihi+1,myg.jlo-2:myg.jhi+2,:])
# U_xr[i,j,:] = U_xr[i,j,:] - 0.5*dt/dy * (F_y[i,j+1,:] - F_y[i,j,:])
U_xr[myg.ilo-2:myg.ihi+2,myg.jlo-2:myg.jhi+2,:] += \
- 0.5*dt/myg.dy * (F_y[myg.ilo-2:myg.ihi+2,myg.jlo-1:myg.jhi+3,:] - \
F_y[myg.ilo-2:myg.ihi+2,myg.jlo-2:myg.jhi+2,:])
# U_yl[i,j,:] = U_yl[i,j,:] - 0.5*dt/dx * (F_x[i+1,j-1,:] - F_x[i,j-1,:])
U_yl[myg.ilo-2:myg.ihi+2,myg.jlo-2:myg.jhi+2,:] += \
- 0.5*dt/myg.dx * (F_x[myg.ilo-1:myg.ihi+3,myg.jlo-3:myg.jhi+1,:] - \
F_x[myg.ilo-2:myg.ihi+2,myg.jlo-3:myg.jhi+1,:])
# U_yr[i,j,:] = U_yr[i,j,:] - 0.5*dt/dx * (F_x[i+1,j,:] - F_x[i,j,:])
U_yr[myg.ilo-2:myg.ihi+2,myg.jlo-2:myg.jhi+2,:] += \
- 0.5*dt/myg.dx * (F_x[myg.ilo-1:myg.ihi+3,myg.jlo-2:myg.jhi+2,:] - \
F_x[myg.ilo-2:myg.ihi+2,myg.jlo-2:myg.jhi+2,:])
pfd.end()
#=========================================================================
# construct the fluxes normal to the interfaces
#=========================================================================
# up until now, F_x and F_y stored the transverse fluxes, now we
# overwrite with the fluxes normal to the interfaces
pfc.begin()
F_x = riemannFunc(1, myg.qx, myg.qy, myg.ng,
vars.nvar, vars.idens, vars.ixmom, vars.iymom, vars.iener,
gamma, U_xl, U_xr)
F_y = riemannFunc(2, myg.qx, myg.qy, myg.ng,
vars.nvar, vars.idens, vars.ixmom, vars.iymom, vars.iener,
gamma, U_yl, U_yr)
pfc.end()
#=========================================================================
# apply artificial viscosity
#=========================================================================
cvisc = runparams.getParam("compressible.cvisc")
(avisco_x, avisco_y) = interface_f.artificial_viscosity( \
myg.qx, myg.qy, myg.ng, myg.dx, myg.dy, \
cvisc, u, v)
# F_x = F_x + avisco_x * (U(i-1,j) - U(i,j))
F_x[myg.ilo-2:myg.ihi+2,myg.jlo-2:myg.jhi+2,vars.idens] += \
avisco_x[myg.ilo-2:myg.ihi+2,myg.jlo-2:myg.jhi+2]* \
(dens[myg.ilo-3:myg.ihi+1,myg.jlo-2:myg.jhi+2] - \
dens[myg.ilo-2:myg.ihi+2,myg.jlo-2:myg.jhi+2])
F_x[myg.ilo-2:myg.ihi+2,myg.jlo-2:myg.jhi+2,vars.ixmom] += \
avisco_x[myg.ilo-2:myg.ihi+2,myg.jlo-2:myg.jhi+2]* \
(xmom[myg.ilo-3:myg.ihi+1,myg.jlo-2:myg.jhi+2] - \
xmom[myg.ilo-2:myg.ihi+2,myg.jlo-2:myg.jhi+2])
F_x[myg.ilo-2:myg.ihi+2,myg.jlo-2:myg.jhi+2,vars.iymom] += \
avisco_x[myg.ilo-2:myg.ihi+2,myg.jlo-2:myg.jhi+2]* \
(ymom[myg.ilo-3:myg.ihi+1,myg.jlo-2:myg.jhi+2] - \
ymom[myg.ilo-2:myg.ihi+2,myg.jlo-2:myg.jhi+2])
F_x[myg.ilo-2:myg.ihi+2,myg.jlo-2:myg.jhi+2,vars.iener] += \
avisco_x[myg.ilo-2:myg.ihi+2,myg.jlo-2:myg.jhi+2]* \
(ener[myg.ilo-3:myg.ihi+1,myg.jlo-2:myg.jhi+2] - \
ener[myg.ilo-2:myg.ihi+2,myg.jlo-2:myg.jhi+2])
# F_y = F_y + avisco_y * (U(i,j-1) - U(i,j))
F_y[myg.ilo-2:myg.ihi+2,myg.jlo-2:myg.jhi+2,vars.idens] += \
avisco_y[myg.ilo-2:myg.ihi+2,myg.jlo-2:myg.jhi+2]* \
(dens[myg.ilo-2:myg.ihi+2,myg.jlo-3:myg.jhi+1] - \
dens[myg.ilo-2:myg.ihi+2,myg.jlo-2:myg.jhi+2])
F_y[myg.ilo-2:myg.ihi+2,myg.jlo-2:myg.jhi+2,vars.ixmom] += \
avisco_y[myg.ilo-2:myg.ihi+2,myg.jlo-2:myg.jhi+2]* \
(xmom[myg.ilo-2:myg.ihi+2,myg.jlo-3:myg.jhi+1] - \
xmom[myg.ilo-2:myg.ihi+2,myg.jlo-2:myg.jhi+2])
F_y[myg.ilo-2:myg.ihi+2,myg.jlo-2:myg.jhi+2,vars.iymom] += \
avisco_y[myg.ilo-2:myg.ihi+2,myg.jlo-2:myg.jhi+2]* \
(ymom[myg.ilo-2:myg.ihi+2,myg.jlo-3:myg.jhi+1] - \
ymom[myg.ilo-2:myg.ihi+2,myg.jlo-2:myg.jhi+2])
F_y[myg.ilo-2:myg.ihi+2,myg.jlo-2:myg.jhi+2,vars.iener] += \
avisco_y[myg.ilo-2:myg.ihi+2,myg.jlo-2:myg.jhi+2]* \
(ener[myg.ilo-2:myg.ihi+2,myg.jlo-3:myg.jhi+1] - \
ener[myg.ilo-2:myg.ihi+2,myg.jlo-2:myg.jhi+2])
pf.end()
return F_x, F_y
|
apache-2.0
| -1,976,983,250,665,921,000
| 38.664063
| 111
| 0.429043
| false
| 2.826051
| false
| false
| false
|
pmoleri/memorize-accesible
|
speak/voice.py
|
1
|
3767
|
# Speak.activity
# A simple front end to the espeak text-to-speech engine on the XO laptop
# http://wiki.laptop.org/go/Speak
#
# Copyright (C) 2008 Joshua Minor
# This file is part of Speak.activity
#
# Parts of Speak.activity are based on code from Measure.activity
# Copyright (C) 2007 Arjun Sarwal - arjun@laptop.org
#
# Speak.activity is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Speak.activity is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Speak.activity. If not, see <http://www.gnu.org/licenses/>.
import re, os
from gettext import gettext as _
import logging
logger = logging.getLogger('speak')
import espeak
# Lets trick gettext into generating entries for the voice names we expect espeak to have
# If espeak actually has new or different names then they won't get translated, but they
# should still show up in the interface.
expectedVoiceNames = [
_("Brazil"),
_("Swedish"),
_("Icelandic"),
_("Romanian"),
_("Swahili"),
_("Hindi"),
_("Dutch"),
_("Latin"),
_("Hungarian"),
_("Macedonian"),
_("Welsh"),
_("French"),
_("Norwegian"),
_("Russian"),
_("Afrikaans"),
_("Finnish"),
_("Default"),
_("Cantonese"),
_("Scottish"),
_("Greek"),
_("Vietnam"),
_("English"),
_("Lancashire"),
_("Italian"),
_("Portugal"),
_("German"),
_("Whisper"),
_("Croatian"),
_("Czech"),
_("Slovak"),
_("Spanish"),
_("Polish"),
_("Esperanto")
]
_allVoices = {}
_defaultVoice = None
class Voice:
def __init__(self, language, name):
self.language = language
self.name = name
friendlyname = name
friendlyname = friendlyname.replace('-test','')
friendlyname = friendlyname.replace('_test','')
friendlyname = friendlyname.replace('en-','')
friendlyname = friendlyname.replace('english-wisper','whisper')
friendlyname = friendlyname.capitalize()
self.friendlyname = _(friendlyname)
def allVoices():
if _allVoices:
return _allVoices
for language, name in espeak.voices():
voice = Voice(language, name)
_allVoices[voice.friendlyname] = voice
return _allVoices
def by_name(name):
return allVoices().get(name, defaultVoice())
def defaultVoice():
"""Try to figure out the default voice, from the current locale ($LANG).
Fall back to espeak's voice called Default."""
global _defaultVoice
if _defaultVoice:
return _defaultVoice
voices = allVoices()
def fit(a,b):
"Compare two language ids to see if they are similar."
as_ = re.split(r'[^a-z]+', a.lower())
bs = re.split(r'[^a-z]+', b.lower())
for count in range(0, min(len(as_),len(bs))):
if as_[count] != bs[count]:
count -= 1
break
return count
try:
lang = os.environ["LANG"]
except:
lang = ""
best = voices[_("Default")]
for voice in voices.values():
voiceMetric = fit(voice.language, lang)
bestMetric = fit(best.language, lang)
if voiceMetric > bestMetric:
best = voice
print "Best voice for LANG %s seems to be %s %s" % (lang, best.language, best.friendlyname)
_defaultVoice = best
return best
|
gpl-2.0
| -5,171,734,326,967,312,000
| 27.11194
| 95
| 0.617202
| false
| 3.604785
| false
| false
| false
|
all-of-us/raw-data-repository
|
rdr_service/lib_fhir/fhirclient_3_0_0/models/searchparameter.py
|
1
|
7798
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Generated from FHIR 3.0.0.11832 (http://hl7.org/fhir/StructureDefinition/SearchParameter) on 2017-03-22.
# 2017, SMART Health IT.
from . import domainresource
class SearchParameter(domainresource.DomainResource):
""" Search Parameter for a resource.
A search parameter that defines a named search item that can be used to
search/filter on a resource.
"""
resource_type = "SearchParameter"
def __init__(self, jsondict=None, strict=True):
""" Initialize all valid properties.
:raises: FHIRValidationError on validation errors, unless strict is False
:param dict jsondict: A JSON dictionary to use for initialization
:param bool strict: If True (the default), invalid variables will raise a TypeError
"""
self.base = None
""" The resource type(s) this search parameter applies to.
List of `str` items. """
self.chain = None
""" Chained names supported.
List of `str` items. """
self.code = None
""" Code used in URL.
Type `str`. """
self.comparator = None
""" eq | ne | gt | lt | ge | le | sa | eb | ap.
List of `str` items. """
self.component = None
""" For Composite resources to define the parts.
List of `SearchParameterComponent` items (represented as `dict` in JSON). """
self.contact = None
""" Contact details for the publisher.
List of `ContactDetail` items (represented as `dict` in JSON). """
self.date = None
""" Date this was last changed.
Type `FHIRDate` (represented as `str` in JSON). """
self.derivedFrom = None
""" Original Definition for the search parameter.
Type `str`. """
self.description = None
""" Natural language description of the search parameter.
Type `str`. """
self.experimental = None
""" For testing purposes, not real usage.
Type `bool`. """
self.expression = None
""" FHIRPath expression that extracts the values.
Type `str`. """
self.jurisdiction = None
""" Intended jurisdiction for search parameter (if applicable).
List of `CodeableConcept` items (represented as `dict` in JSON). """
self.modifier = None
""" missing | exact | contains | not | text | in | not-in | below |
above | type.
List of `str` items. """
self.name = None
""" Name for this search parameter (computer friendly).
Type `str`. """
self.publisher = None
""" Name of the publisher (organization or individual).
Type `str`. """
self.purpose = None
""" Why this search parameter is defined.
Type `str`. """
self.status = None
""" draft | active | retired | unknown.
Type `str`. """
self.target = None
""" Types of resource (if a resource reference).
List of `str` items. """
self.type = None
""" number | date | string | token | reference | composite | quantity |
uri.
Type `str`. """
self.url = None
""" Logical URI to reference this search parameter (globally unique).
Type `str`. """
self.useContext = None
""" Context the content is intended to support.
List of `UsageContext` items (represented as `dict` in JSON). """
self.version = None
""" Business version of the search parameter.
Type `str`. """
self.xpath = None
""" XPath that extracts the values.
Type `str`. """
self.xpathUsage = None
""" normal | phonetic | nearby | distance | other.
Type `str`. """
super(SearchParameter, self).__init__(jsondict=jsondict, strict=strict)
def elementProperties(self):
js = super(SearchParameter, self).elementProperties()
js.extend([
("base", "base", str, True, None, True),
("chain", "chain", str, True, None, False),
("code", "code", str, False, None, True),
("comparator", "comparator", str, True, None, False),
("component", "component", SearchParameterComponent, True, None, False),
("contact", "contact", contactdetail.ContactDetail, True, None, False),
("date", "date", fhirdate.FHIRDate, False, None, False),
("derivedFrom", "derivedFrom", str, False, None, False),
("description", "description", str, False, None, True),
("experimental", "experimental", bool, False, None, False),
("expression", "expression", str, False, None, False),
("jurisdiction", "jurisdiction", codeableconcept.CodeableConcept, True, None, False),
("modifier", "modifier", str, True, None, False),
("name", "name", str, False, None, True),
("publisher", "publisher", str, False, None, False),
("purpose", "purpose", str, False, None, False),
("status", "status", str, False, None, True),
("target", "target", str, True, None, False),
("type", "type", str, False, None, True),
("url", "url", str, False, None, True),
("useContext", "useContext", usagecontext.UsageContext, True, None, False),
("version", "version", str, False, None, False),
("xpath", "xpath", str, False, None, False),
("xpathUsage", "xpathUsage", str, False, None, False),
])
return js
from . import backboneelement
class SearchParameterComponent(backboneelement.BackboneElement):
""" For Composite resources to define the parts.
Used to define the parts of a composite search parameter.
"""
resource_type = "SearchParameterComponent"
def __init__(self, jsondict=None, strict=True):
""" Initialize all valid properties.
:raises: FHIRValidationError on validation errors, unless strict is False
:param dict jsondict: A JSON dictionary to use for initialization
:param bool strict: If True (the default), invalid variables will raise a TypeError
"""
self.definition = None
""" Defines how the part works.
Type `FHIRReference` referencing `SearchParameter` (represented as `dict` in JSON). """
self.expression = None
""" Subexpression relative to main expression.
Type `str`. """
super(SearchParameterComponent, self).__init__(jsondict=jsondict, strict=strict)
def elementProperties(self):
js = super(SearchParameterComponent, self).elementProperties()
js.extend([
("definition", "definition", fhirreference.FHIRReference, False, None, True),
("expression", "expression", str, False, None, True),
])
return js
import sys
try:
from . import codeableconcept
except ImportError:
codeableconcept = sys.modules[__package__ + '.codeableconcept']
try:
from . import contactdetail
except ImportError:
contactdetail = sys.modules[__package__ + '.contactdetail']
try:
from . import fhirdate
except ImportError:
fhirdate = sys.modules[__package__ + '.fhirdate']
try:
from . import fhirreference
except ImportError:
fhirreference = sys.modules[__package__ + '.fhirreference']
try:
from . import usagecontext
except ImportError:
usagecontext = sys.modules[__package__ + '.usagecontext']
|
bsd-3-clause
| -2,993,350,882,617,828,400
| 35.269767
| 107
| 0.574506
| false
| 4.484186
| false
| false
| false
|
gammapy/gamma-sky
|
make.py
|
1
|
2880
|
#!/usr/bin/env python
"""Make gamma-sky.net input data.
"""
import click
import gammasky
@click.group()
def cli():
"""The gamma-sky.net Python cli"""
pass
@cli.group()
def cat():
"""Dump catalog to JSON"""
@cli.group()
def source():
"""Dump source objects to JSON"""
@cat.command('all')
@click.pass_context
def cat_all(ctx):
"""Dump all catalogs to JSON"""
ctx.invoke(cat_tev)
ctx.invoke(cat_3fhl)
ctx.invoke(cat_3fgl)
ctx.invoke(cat_snrcat)
@cat.command('tev')
def cat_tev():
"""Dump TeV catalog to JSON"""
gammasky.make_tev_catalog_data()
@cat.command('3fhl')
def cat_3fhl():
"""Dump 3FHL catalog to JSON"""
gammasky.make_3fhl_catalog_data()
@cat.command('3fgl')
def cat_3fgl():
"""Dump 3FGL catalog to JSON"""
gammasky.make_3fgl_catalog_data()
@cat.command('snrcat')
def cat_snrcat():
"""Dump SNRCat catalog to JSON"""
gammasky.make_snrcat_catalog_data()
@source.command('all')
@click.pass_context
def source_all(ctx):
"""Dump all source objects to JSON"""
ctx.invoke(source_tev)
ctx.invoke(source_3fhl)
ctx.invoke(source_3fgl)
@source.command('tev')
@click.option('--sources', default='all', help='Either "all" or comma-separated string of source IDs')
def source_tev(sources):
"""Dump TeV source objects to JSON"""
gammasky.make_tev_source_data(sources)
@source.command('3fhl')
@click.option('--sources', default='all', help='Either "all" or comma-separated string of source IDs')
def source_3fhl(sources):
"""Dump 3FHL source objects to JSON"""
gammasky.make_3fhl_source_data(sources)
@source.command('3fgl')
@click.option('--sources', default='all', help='Either "all" or comma-separated string of source IDs')
def source_3fgl(sources):
"""Dump 3FGL source objects to JSON"""
gammasky.make_3fgl_source_data(sources)
@cli.command()
def maps():
"""Make map data"""
gammasky.make_maps_data()
@cli.group()
def fetch():
"""Fetch input data files"""
@fetch.command('cats')
def fetch_cats():
"""Fetch all source catalog files"""
gammasky.fetch_all_cats()
@fetch.command('maps')
def fetch_maps():
"""Fetch all input files to make maps"""
gammasky.fetch_all_cats()
@fetch.command('all')
def fetch_all():
"""Fetch all data files"""
gammasky.fetch_all_data()
@cli.command()
@click.pass_context
def all(ctx):
"""Generate all data for the webpage"""
ctx.invoke(cat_all)
ctx.invoke(source_all)
ctx.invoke(maps)
@cli.command('test-dataset')
@click.option('--sources', default='0')
@click.pass_context
def test_dataset(ctx, sources):
"""Dump all data needed for testing."""
ctx.invoke(cat_all)
ctx.forward(source_tev)
ctx.forward(source_3fhl)
ctx.forward(source_3fgl)
if __name__ == '__main__':
import logging
logging.basicConfig(level=logging.INFO)
cli()
|
mit
| -9,101,641,954,529,314,000
| 19.425532
| 102
| 0.655556
| false
| 2.990654
| false
| false
| false
|
jualjiman/knowledge-base
|
src/knowledge_base/settings/staging.py
|
1
|
2155
|
# -*- coding: utf-8 -*-
"""
Django staging settings for knowledge_base project.
"""
import os
import urlparse
from . import * # noqa
DEBUG = False
TEMPLATE_DEBUG = DEBUG
ALLOWED_HOSTS = [
'kb.pythonballz.com'
]
# Application definition
INSTALLED_APPS += (
'opbeat.contrib.django',
)
MIDDLEWARE_CLASSES += (
'opbeat.contrib.django.middleware.OpbeatAPMMiddleware',
)
# Database settings
urlparse.uses_netloc.append('postgres')
url = urlparse.urlparse(os.environ['DATABASE_URL'])
DATABASES = {
'default': {
'ENGINE': {
'postgres': 'django.db.backends.postgresql_psycopg2'
}[url.scheme],
'NAME': url.path[1:],
'USER': url.username,
'PASSWORD': url.password,
'HOST': url.hostname,
'PORT': url.port
}
}
# Static files and uploads
MEDIA_ROOT = os.path.realpath(os.path.join(
os.environ['DATA_DIR'], 'uploads'))
STATIC_ROOT = os.path.realpath(os.path.join(
os.environ['DATA_DIR'], 'assets'))
MEDIA_URL = '/uploads/'
STATIC_URL = '/static/'
# Opbeat
OPBEAT = {
'ORGANIZATION_ID': os.environ['OPBEAT_ORGANIZATION_ID'],
'APP_ID': os.environ['OPBEAT_APP_ID'],
'SECRET_TOKEN': os.environ['OPBEAT_SECRET_TOKEN'],
'INSTRUMENT_DJANGO_MIDDLEWARE': True,
}
# Email
EMAIL_BACKEND = 'django.core.mail.backends.smtp.EmailBackend'
EMAIL_HOST = os.environ['EMAIL_HOST']
EMAIL_HOST_USER = os.environ['EMAIL_HOST_USER']
EMAIL_HOST_PASSWORD = os.environ['EMAIL_HOST_PASSWORD']
EMAIL_PORT = int(os.environ['EMAIL_HOST_PORT'])
EMAIL_USE_TLS = os.environ['EMAIL_USE_TLS'] == 'True'
DEFAULT_FROM_EMAIL = os.environ['DEFAULT_FROM_EMAIL']
# Haystack Connections
if 'HAYSTACK_CONNECTION_URL' in os.environ:
HAYSTACK_CONNECTIONS = {
'default': {
'ENGINE': 'haystack.backends.solr_backend.SolrEngine',
'URL': os.environ['HAYSTACK_CONNECTION_URL']
},
}
# Cache
if 'MEMCACHED_URL' in os.environ:
CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.memcached.MemcachedCache',
'LOCATION': os.environ['MEMCACHED_URL'],
'KEY_PREFIX': 'kb::'
}
}
|
apache-2.0
| -418,241,262,382,939,200
| 22.423913
| 77
| 0.634339
| false
| 3.197329
| false
| false
| false
|
kapilt/cloud-custodian
|
tools/c7n_azure/tests_azure/test_actions_mark-for-op.py
|
1
|
2151
|
# Copyright 2019 Microsoft Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import, division, print_function, unicode_literals
import datetime
from c7n_azure import utils
from c7n_azure.actions.tagging import TagDelayedAction
from mock import patch, Mock
from . import tools_tags as tools
from .azure_common import BaseTest
class ActionsMarkForOpTest(BaseTest):
existing_tags = {'pre-existing-1': 'unmodified', 'pre-existing-2': 'unmodified'}
DAYS = 10
def _get_action(self, data):
return TagDelayedAction(data=data, manager=Mock())
def test_schema_validate(self):
self.assertTrue(
self.load_policy(
tools.get_policy([
{'type': 'mark-for-op',
'op': 'delete',
'days': 10},
]),
validate=True))
@patch('c7n_azure.tags.TagHelper.update_resource_tags')
def test_mark_for_op(self, update_resource_tags):
self.patch(TagDelayedAction, 'type', 'mark-for-op')
action = self._get_action({'op': 'stop', 'days': self.DAYS})
resource = tools.get_resource(self.existing_tags)
action.process([resource])
tags = tools.get_tags_parameter(update_resource_tags)
date = (utils.now(tz=action.tz) + datetime.timedelta(days=self.DAYS)).strftime('%Y/%m/%d')
expected_value = TagDelayedAction.default_template.format(op='stop', action_date=date)
expected_tags = self.existing_tags.copy()
expected_tags.update({'custodian_status': expected_value})
self.assertEqual(tags, expected_tags)
|
apache-2.0
| 8,334,312,290,813,750,000
| 35.457627
| 98
| 0.670386
| false
| 3.834225
| false
| false
| false
|
RianFuro/vint
|
vint/linting/policy/prohibit_encoding_opt_after_scriptencoding.py
|
1
|
1158
|
import re
from vint.ast.node_type import NodeType
from vint.linting.level import Level
from vint.linting.policy.abstract_policy import AbstractPolicy
from vint.linting.policy_registry import register_policy
@register_policy
class ProhibitEncodingOptionAfterScriptEncoding(AbstractPolicy):
def __init__(self):
super(ProhibitEncodingOptionAfterScriptEncoding, self).__init__()
self.description = 'Set encoding before setting scriptencoding'
self.reference = ':help :scriptencoding'
self.level = Level.WARNING
self.was_scriptencoding_found = False
self.has_encoding_opt_after_scriptencoding = False
def listen_node_types(self):
return [NodeType.EXCMD]
def is_valid(self, excmd_node, lint_context):
""" Whether the specified node is valid.
This policy prohibits encoding option after scriptencoding.
"""
cmd_str = excmd_node['str']
if re.match(r':*scripte', cmd_str):
self.was_scriptencoding_found = True
if re.match(r':*set? +enc', cmd_str) and self.was_scriptencoding_found:
return False
return True
|
mit
| -3,076,817,734,591,732,700
| 29.473684
| 79
| 0.684801
| false
| 3.847176
| false
| false
| false
|
wziyong/casperfpga
|
src/casperfpga.py
|
1
|
17912
|
"""
Created on Feb 28, 2013
@author: paulp
"""
import logging
import struct
import time
import register
import sbram
import snap
import tengbe
import qdr
from attribute_container import AttributeContainer
from utils import parse_fpg
LOGGER = logging.getLogger(__name__)
# known CASPER memory-accessible devices and their associated classes and containers
CASPER_MEMORY_DEVICES = {
'xps:bram': {'class': sbram.Sbram, 'container': 'sbrams'},
'xps:qdr': {'class': qdr.Qdr, 'container': 'qdrs'},
'xps:sw_reg': {'class': register.Register, 'container': 'registers'},
'xps:tengbe_v2': {'class': tengbe.TenGbe, 'container': 'tengbes'},
'casper:snapshot': {'class': snap.Snap, 'container': 'snapshots'},}
# other devices - blocks that aren't memory devices, but about which we'd like to know
# tagged in the simulink diagram
CASPER_OTHER_DEVICES = {
'casper:bitsnap': 'bitsnap',
'casper:dec_fir': 'dec_fir',
'casper:fft': 'fft',
'casper:fft_biplex_real_2x': 'fft_biplex_real_2x',
'casper:fft_biplex_real_4x': 'fft_biplex_real_4x',
'casper:fft_wideband_real': 'fft_wideband_real',
'casper:info': 'info',
'casper:pfb_fir': 'pfb_fir',
'casper:pfb_fir_async': 'pfb_fir_async',
'casper:pfb_fir_generic': 'pfb_fir_generic',
'casper:pfb_fir_real': 'pfb_fir_real',
'casper:spead_pack': 'spead_pack',
'casper:spead_unpack': 'spead_unpack',
'casper:vacc': 'vacc',
'casper:xeng': 'xeng',
'xps:xsg': 'xps',
'xps:katadc': 'katadc',
}
class CasperFpga(object):
"""
A FPGA host board that has a CASPER design running on it. Or will soon have.
"""
def __init__(self, host):
"""
:param host: the hostname of this CasperFpga
:return:
"""
self.host = host
self.__reset_device_info()
LOGGER.debug('%s: now a CasperFpga' % self.host)
def read(self, device_name, size, offset=0):
raise NotImplementedError
def blindwrite(self, device_name, data, offset=0):
raise NotImplementedError
def listdev(self):
"""
Get a list of the memory bus items in this design.
:return: a list of memory devices
"""
raise NotImplementedError
def deprogram(self):
"""
The child class will deprogram the FPGA, we just reset out device information
:return:
"""
self.__reset_device_info()
def __reset_device_info(self):
"""
Reset information of devices this FPGA knows about.
"""
# device dictionaries:
# devices: all of them
# memory_devices: only devices on the bus
# other_devices: anything not on the bus
self.devices = {}
self.memory_devices = {}
self.other_devices = {}
# containers
for container_ in CASPER_MEMORY_DEVICES.values():
setattr(self, container_['container'], AttributeContainer())
# hold misc information about the bof file, program time, etc
self.system_info = {}
self.rcs_info = {}
def test_connection(self):
"""
Write to and read from the scratchpad to test the connection to the FPGA.
"""
for val in [0xa5a5a5, 0x000000]:
self.write_int('sys_scratchpad', val)
rval = self.read_int('sys_scratchpad')
if rval != val:
raise RuntimeError('%s: cannot write scratchpad? %i != %i' % (self.host, rval, val))
return True
# def __getattribute__(self, name):
# if name == 'registers':
# return {self.memory_devices[r].name: self.memory_devices[r] for r in self.memory_devices_memory['register']['items']}
# return object.__getattribute__(self, name)
def read_dram(self, size, offset=0):
"""
Reads data from a ROACH's DRAM. Reads are done up to 1MB at a time.
The 64MB indirect address register is automatically incremented as necessary.
It returns a string, as per the normal 'read' function.
ROACH has a fixed device name for the DRAM (dram memory).
Uses bulkread internally.
:param size: amount of data to read, in bytes
:param offset: offset at which to read, in bytes
:return: binary data string
"""
data = []
n_reads = 0
last_dram_page = -1
dram_indirect_page_size = (64*1024*1024)
#read_chunk_size = (1024*1024)
LOGGER.debug('%s: reading a total of %8i bytes from offset %8i...' %
(self.host, size, offset))
while n_reads < size:
dram_page = (offset + n_reads) / dram_indirect_page_size
local_offset = (offset + n_reads) % dram_indirect_page_size
#local_reads = min(read_chunk_size, size-n_reads, dram_indirect_page_size-(offset%dram_indirect_page_size))
local_reads = min(size - n_reads, dram_indirect_page_size - (offset % dram_indirect_page_size))
if last_dram_page != dram_page:
self.write_int('dram_controller', dram_page)
last_dram_page = dram_page
local_data = (self.bulkread('dram_memory', local_reads, local_offset))
data.append(local_data)
LOGGER.debug('%s: reading %8i bytes from indirect '
'address %4i at local offset %8i... done.' %
(self.host, local_reads, dram_page, local_offset))
n_reads += local_reads
return ''.join(data)
def write_dram(self, data, offset=0):
"""
Writes data to a ROACH's DRAM. Writes are done up to 512KiB at a time.
The 64MB indirect address register is automatically incremented as necessary.
ROACH has a fixed device name for the DRAM (dram memory) and so the user does not need to specify the write
register.
:param data: packed binary string data to write
:param offset: the offset at which to write
:return:
"""
size = len(data)
n_writes = 0
last_dram_page = -1
dram_indirect_page_size = (64*1024*1024)
write_chunk_size = (1024*512)
LOGGER.debug('%s: writing a total of %8i bytes from offset %8i...' %
(self.host, size, offset))
while n_writes < size:
dram_page = (offset+n_writes)/dram_indirect_page_size
local_offset = (offset+n_writes) % dram_indirect_page_size
local_writes = min(write_chunk_size, size-n_writes,
dram_indirect_page_size-(offset % dram_indirect_page_size))
LOGGER.debug('%s: writing %8i bytes from indirect address %4i at local offset %8i...' %
(self.host, local_writes, dram_page, local_offset))
if last_dram_page != dram_page:
self.write_int('dram_controller', dram_page)
last_dram_page = dram_page
self.blindwrite('dram_memory', data[n_writes:n_writes+local_writes], local_offset)
n_writes += local_writes
def write(self, device_name, data, offset=0):
"""
Write data, then read it to confirm a successful write.
:param device_name: memory device name to write
:param data: packed binary data string to write
:param offset: offset at which to write, in bytes
:return:
"""
self.blindwrite(device_name, data, offset)
new_data = self.read(device_name, len(data), offset)
if new_data != data:
unpacked_wrdata = struct.unpack('>L', data[0:4])[0]
unpacked_rddata = struct.unpack('>L', new_data[0:4])[0]
LOGGER.error('%s: verification of write to %s at offset %d failed. Wrote 0x%08x... '
'but got back 0x%08x...' % (self.host, device_name, offset,
unpacked_wrdata, unpacked_rddata))
raise ValueError('%s: verification of write to %s at offset %d failed. Wrote 0x%08x... '
'but got back 0x%08x...' % (self.host, device_name, offset,
unpacked_wrdata, unpacked_rddata))
def read_int(self, device_name, word_offset=0):
"""
Read an integer from memory device.
i.e. calls self.read(device_name, size=4, offset=0) and uses struct to unpack it into an integer
:param device_name: device from which to read
:param word_offset: the 32-bit word offset at which to read
:return: signed 32-bit integer
"""
data = self.read(device_name, 4, word_offset*4)
return struct.unpack('>i', data)[0]
def read_uint(self, device_name, word_offset=0):
"""
Read an unsigned integer from memory device.
:param device_name: device from which to read
:param word_offset: the 32-bit word offset at which to read
:return: unsigned 32-bit integer
"""
data = self.read(device_name, 4, word_offset*4)
return struct.unpack('>I', data)[0]
def write_int(self, device_name, integer, blindwrite=False, word_offset=0):
"""
Writes an integer to the device specified at the offset specified.
A blind write is optional.
:param device_name: device to be written
:param integer: the integer to write
:param blindwrite: True for blind write, default False
:param word_offset: the offset at which to write, in 32-bit words
:return:
"""
# careful of packing input data into 32 bit - check range: if
# negative, must be signed int; if positive over 2^16, must be unsigned
# int.
data = struct.pack('>i' if integer < 0 else '>I', integer)
if blindwrite:
self.blindwrite(device_name, data, word_offset*4)
else:
self.write(device_name, data, word_offset*4)
LOGGER.debug('%s: write_int %8x to register %s at word offset %d okay%s.' %
(self.host, integer, device_name,
word_offset, ' (blind)' if blindwrite else ''))
def get_rcs(self, rcs_block_name='rcs'):
"""Retrieves and decodes a revision control block."""
raise NotImplementedError
rv = {'user': self.read_uint(rcs_block_name + '_user')}
app = self.read_uint(rcs_block_name+'_app')
lib = self.read_uint(rcs_block_name+'_lib')
if lib & (1 << 31):
rv['compile_timestamp'] = lib & ((2 ** 31)-1)
else:
if lib & (1 << 30):
#type is svn
rv['lib_rcs_type'] = 'svn'
else:
#type is git
rv['lib_rcs_type'] = 'git'
if lib & (1 << 28):
#dirty bit
rv['lib_dirty'] = True
else:
rv['lib_dirty'] = False
rv['lib_rev'] = lib & ((2 ** 28)-1)
if app & (1 << 31):
rv['app_last_modified'] = app & ((2 ** 31)-1)
else:
if app & (1 << 30):
#type is svn
rv['app_rcs_type'] = 'svn'
else:
#type is git
rv['app_rcs_type'] = 'git'
if app & (1 << 28):
#dirty bit
rv['app_dirty'] = True
else:
rv['lib_dirty'] = False
rv['app_rev'] = app & ((2 ** 28)-1)
return rv
def __create_memory_devices(self, device_dict, memorymap_dict):
"""
Create memory devices from dictionaries of design information.
:param device_dict: raw dictionary of information from tagged blocks in Simulink design, keyed on device name
:param memorymap_dict: dictionary of information that would have been in coreinfo.tab - memory bus information
:return:
"""
# create and add memory devices to the memory device dictionary
for device_name, device_info in device_dict.items():
if device_name == '':
raise NameError('There\'s a problem somewhere, got a blank device name?')
if device_name in self.memory_devices.keys():
raise NameError('Memory device %s already exists.' % device_name)
# get the class from the known devices, if it exists there
tag = device_info['tag']
try:
known_device_class = CASPER_MEMORY_DEVICES[tag]['class']
known_device_container = CASPER_MEMORY_DEVICES[tag]['container']
except KeyError:
pass
else:
if not callable(known_device_class):
raise TypeError('%s is not a callable Memory class - that\'s a problem.' % known_device_class)
new_device = known_device_class.from_device_info(self, device_name, device_info, memorymap_dict)
if new_device.name in self.memory_devices.keys():
raise NameError('Device called %s of type %s already exists in devices list.' %
(new_device.name, type(new_device)))
self.devices[device_name] = new_device
self.memory_devices[device_name] = new_device
container = getattr(self, known_device_container)
setattr(container, device_name, new_device)
assert id(getattr(container, device_name)) == id(new_device) == id(self.memory_devices[device_name])
# allow created devices to update themselves with full device info
# link control registers, etc
for name, device in self.memory_devices.items():
try:
device.post_create_update(device_dict)
except AttributeError: # the device may not have an update function
pass
def __create_other_devices(self, device_dict):
"""
Store non-memory device information in a dictionary
:param device_dict: raw dictionary of information from tagged blocks in Simulink design, keyed on device name
:return:
"""
for device_name, device_info in device_dict.items():
if device_name == '':
raise NameError('There\'s a problem somewhere, got a blank device name?')
if device_name in self.other_devices.keys():
raise NameError('Other device %s already exists.' % device_name)
if device_info['tag'] in CASPER_OTHER_DEVICES.keys():
self.devices[device_name] = device_info
self.other_devices[device_name] = device_info
def device_names_by_container(self, container_name):
"""Return a list of devices in a certain container.
"""
return [devname for devname, container in self.memory_devices.iteritems() if container == container_name]
def devices_by_container(self, container):
"""Get devices using container type.
"""
return getattr(self, container)
def get_system_information(self, filename=None, fpg_info=None):
"""
Get information about the design running on the FPGA.
If filename is given, get it from there, otherwise query the host via KATCP.
:param filename: fpg filename
:param fpg_info: a tuple containing device_info and coreinfo dictionaries
:return: <nothing> the information is populated in the class
"""
if (filename is None) and (fpg_info is None):
raise RuntimeError('Either filename or parsed fpg data must be given.')
if filename is not None:
device_dict, memorymap_dict = parse_fpg(filename)
else:
device_dict = fpg_info[0]
memorymap_dict = fpg_info[1]
# add system registers
device_dict.update(self.__add_sys_registers())
# reset current devices and create new ones from the new design information
self.__reset_device_info()
self.__create_memory_devices(device_dict, memorymap_dict)
self.__create_other_devices(device_dict)
# populate some system information
try:
self.system_info.update(device_dict['77777'])
except KeyError:
LOGGER.warn('%s: no sys info key in design info!' % self.host)
# and RCS information if included
if '77777_git' in device_dict:
self.rcs_info['git'] = device_dict['77777_git']
if '77777_svn' in device_dict:
self.rcs_info['svn'] = device_dict['77777_svn']
def estimate_fpga_clock(self):
"""
Get the estimated clock of the running FPGA, in Mhz.
"""
firstpass = self.read_uint('sys_clkcounter')
time.sleep(2.0)
secondpass = self.read_uint('sys_clkcounter')
if firstpass > secondpass:
secondpass += (2**32)
return (secondpass - firstpass) / 2000000.0
@staticmethod
def __add_sys_registers():
standard_reg = {'tag': 'xps:sw_reg', 'mode': 'one value', 'io_dir': 'To Processor',
'io_delay': '1', 'sample_period': '1', 'sim_port': 'off', 'show_format': 'off',
'names': 'reg', 'bitwidths': '32', 'arith_types': '0', 'bin_pts': '0'}
sys_registers = {'sys_board_id': standard_reg.copy(),
'sys_rev': standard_reg.copy(),
'sys_rev_rcs': standard_reg.copy(),
'sys_scratchpad': standard_reg.copy(),
'sys_clkcounter': standard_reg.copy()}
return sys_registers
# end
|
gpl-2.0
| 1,421,237,457,292,584,200
| 42.581509
| 130
| 0.566771
| false
| 3.904098
| false
| false
| false
|
frippe12573/geonode
|
geonode/catalogue/models.py
|
1
|
4616
|
#########################################################################
#
# Copyright (C) 2012 OpenPlans
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
#########################################################################
import errno
import logging
from django.conf import settings
from django.db.models import signals
from geonode.layers.models import Layer
from geonode.catalogue import get_catalogue
from geonode.base.models import Link
LOGGER = logging.getLogger(__name__)
def catalogue_pre_delete(instance, sender, **kwargs):
"""Removes the layer from the catalogue
"""
catalogue = get_catalogue()
catalogue.remove_record(instance.uuid)
def catalogue_post_save(instance, sender, **kwargs):
"""Get information from catalogue
"""
try:
catalogue = get_catalogue()
catalogue.create_record(instance)
record = catalogue.get_record(instance.uuid)
except EnvironmentError, err:
msg = 'Could not connect to catalogue' \
'to save information for layer "%s"' % (instance.name)
if err.reason.errno == errno.ECONNREFUSED:
LOGGER.warn(msg, err)
return
else:
raise err
msg = ('Metadata record for %s does not exist,'
' check the catalogue signals.' % instance.name)
assert record is not None, msg
msg = ('Metadata record for %s should contain links.' % instance.name)
assert hasattr(record, 'links'), msg
# Create the different metadata links with the available formats
for mime, name, metadata_url in record.links['metadata']:
Link.objects.get_or_create(resource=instance.resourcebase_ptr,
url=metadata_url,
defaults=dict(
name=name,
extension='xml',
mime=mime,
link_type='metadata',
)
)
# generate and save CSW specific fields
signals.post_save.disconnect(catalogue_post_save, sender=Layer)
# generate an XML document (GeoNode's default is ISO)
md_doc = catalogue.catalogue.csw_gen_xml(instance,
'catalogue/full_metadata.xml')
instance.metadata_xml = md_doc
instance.csw_anytext = \
catalogue.catalogue.csw_gen_anytext(instance.metadata_xml)
instance.csw_wkt_geometry = instance.geographic_bounding_box.split(';')[-1]
instance.save()
signals.post_save.connect(catalogue_post_save, sender=Layer)
def catalogue_pre_save(instance, sender, **kwargs):
"""Send information to catalogue
"""
record = None
try:
catalogue = get_catalogue()
record = catalogue.get_record(instance.uuid)
except EnvironmentError, err:
msg = 'Could not connect to catalogue' \
'to save information for layer "%s"' % (instance.name)
if err.reason.errno == errno.ECONNREFUSED:
LOGGER.warn(msg, err)
else:
raise err
if record is None:
return
# Fill in the url for the catalogue
if hasattr(record.distribution, 'online'):
onlineresources = [r for r in record.distribution.online \
if r.protocol == "WWW:LINK-1.0-http--link"]
if len(onlineresources) == 1:
res = onlineresources[0]
instance.distribution_url = res.url
instance.distribution_description = res.description
else:
durl = settings.SITEURL
if durl[-1] == '/': # strip trailing slash
durl = durl[:-1]
durl = '%s%s' % (durl, instance.get_absolute_url())
instance.distribution_url = durl
instance.distribution_description = \
'Online link to the \'%s\' description on GeoNode ' % instance.title
if 'geonode.catalogue' in settings.INSTALLED_APPS:
signals.pre_save.connect(catalogue_pre_save, sender=Layer)
signals.post_save.connect(catalogue_post_save, sender=Layer)
signals.pre_delete.connect(catalogue_pre_delete, sender=Layer)
|
gpl-3.0
| -4,810,848,301,429,058,000
| 34.236641
| 80
| 0.6276
| false
| 4.219378
| false
| false
| false
|
bitmazk/django-multilingual-news
|
multilingual_news/south_migrations/0004_auto__add_field_newsentry_image_width__add_field_newsentry_image_heigh.py
|
1
|
13838
|
# flake8: noqa
# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'NewsEntry.image_width'
db.add_column(u'multilingual_news_newsentry', 'image_width',
self.gf('django.db.models.fields.IntegerField')(null=True, blank=True),
keep_default=False)
# Adding field 'NewsEntry.image_height'
db.add_column(u'multilingual_news_newsentry', 'image_height',
self.gf('django.db.models.fields.IntegerField')(null=True, blank=True),
keep_default=False)
# Adding field 'NewsEntry.image_source_url'
db.add_column(u'multilingual_news_newsentry', 'image_source_url',
self.gf('django.db.models.fields.CharField')(default='', max_length=1024, blank=True),
keep_default=False)
# Adding field 'NewsEntry.image_source_text'
db.add_column(u'multilingual_news_newsentry', 'image_source_text',
self.gf('django.db.models.fields.CharField')(default='', max_length=1024, blank=True),
keep_default=False)
def backwards(self, orm):
# Deleting field 'NewsEntry.image_width'
db.delete_column(u'multilingual_news_newsentry', 'image_width')
# Deleting field 'NewsEntry.image_height'
db.delete_column(u'multilingual_news_newsentry', 'image_height')
# Deleting field 'NewsEntry.image_source_url'
db.delete_column(u'multilingual_news_newsentry', 'image_source_url')
# Deleting field 'NewsEntry.image_source_text'
db.delete_column(u'multilingual_news_newsentry', 'image_source_text')
models = {
u'auth.group': {
'Meta': {'object_name': 'Group'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'cms.cmsplugin': {
'Meta': {'object_name': 'CMSPlugin'},
'changed_date': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'creation_date': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'language': ('django.db.models.fields.CharField', [], {'max_length': '15', 'db_index': 'True'}),
'level': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'lft': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'parent': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['cms.CMSPlugin']", 'null': 'True', 'blank': 'True'}),
'placeholder': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['cms.Placeholder']", 'null': 'True'}),
'plugin_type': ('django.db.models.fields.CharField', [], {'max_length': '50', 'db_index': 'True'}),
'position': ('django.db.models.fields.PositiveSmallIntegerField', [], {'null': 'True', 'blank': 'True'}),
'rght': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'tree_id': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'})
},
'cms.placeholder': {
'Meta': {'object_name': 'Placeholder'},
'default_width': ('django.db.models.fields.PositiveSmallIntegerField', [], {'null': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'slot': ('django.db.models.fields.CharField', [], {'max_length': '50', 'db_index': 'True'})
},
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'filer.file': {
'Meta': {'object_name': 'File'},
'_file_size': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'file': ('django.db.models.fields.files.FileField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'folder': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'all_files'", 'null': 'True', 'to': "orm['filer.Folder']"}),
'has_all_mandatory_data': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_public': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'modified_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '255', 'blank': 'True'}),
'original_filename': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'owner': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'owned_files'", 'null': 'True', 'to': u"orm['auth.User']"}),
'polymorphic_ctype': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'polymorphic_filer.file_set'", 'null': 'True', 'to': u"orm['contenttypes.ContentType']"}),
'sha1': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '40', 'blank': 'True'}),
'uploaded_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'})
},
'filer.folder': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('parent', 'name'),)", 'object_name': 'Folder'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'level': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'lft': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'modified_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'owner': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'filer_owned_folders'", 'null': 'True', 'to': u"orm['auth.User']"}),
'parent': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'children'", 'null': 'True', 'to': "orm['filer.Folder']"}),
'rght': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'tree_id': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'uploaded_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'})
},
'filer.image': {
'Meta': {'object_name': 'Image', '_ormbases': ['filer.File']},
'_height': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'_width': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'author': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'date_taken': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'default_alt_text': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'default_caption': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
u'file_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['filer.File']", 'unique': 'True', 'primary_key': 'True'}),
'must_always_publish_author_credit': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'must_always_publish_copyright': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'subject_location': ('django.db.models.fields.CharField', [], {'default': 'None', 'max_length': '64', 'null': 'True', 'blank': 'True'})
},
u'multilingual_news.newsentry': {
'Meta': {'ordering': "('-pub_date',)", 'object_name': 'NewsEntry'},
'author': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']", 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['filer.Image']", 'null': 'True', 'blank': 'True'}),
'image_float': ('django.db.models.fields.CharField', [], {'max_length': '8', 'blank': 'True'}),
'image_height': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'image_source_text': ('django.db.models.fields.CharField', [], {'max_length': '1024', 'blank': 'True'}),
'image_source_url': ('django.db.models.fields.CharField', [], {'max_length': '1024', 'blank': 'True'}),
'image_width': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'placeholders': ('djangocms_utils.fields.M2MPlaceholderField', [], {'to': "orm['cms.Placeholder']", 'symmetrical': 'False'}),
'pub_date': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'})
},
u'multilingual_news.newsentrytitle': {
'Meta': {'object_name': 'NewsEntryTitle'},
'entry': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['multilingual_news.NewsEntry']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_published': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'language': ('django.db.models.fields.CharField', [], {'max_length': '5'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '512'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '512'})
},
u'multilingual_news.recentplugin': {
'Meta': {'object_name': 'RecentPlugin', 'db_table': "u'cmsplugin_recentplugin'", '_ormbases': ['cms.CMSPlugin']},
u'cmsplugin_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['cms.CMSPlugin']", 'unique': 'True', 'primary_key': 'True'}),
'current_language_only': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'limit': ('django.db.models.fields.PositiveIntegerField', [], {})
}
}
complete_apps = ['multilingual_news']
|
mit
| -2,912,040,837,918,217,000
| 76.307263
| 192
| 0.561497
| false
| 3.634883
| false
| false
| false
|
google/apis-client-generator
|
src/googleapis/codegen/utilities/__init__.py
|
1
|
3526
|
#!/usr/bin/python2.7
# Copyright 2011 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Copyright 2011 Google Inc. All Rights Reserved.
"""Assorted utility methods for the code generator."""
__author__ = 'aiuto@google.com (Tony Aiuto)'
import re
_WORD_SPLIT_PATTERN = re.compile(r'[\._/-]+')
def CamelCase(s):
"""CamelCase a string so that it is more readable as a variable name.
Camelcases a string, begining new words after any instances of '.', '_',
'/', or '-'.
Args:
s: (str) A string.
Returns:
s, with the first letter of each word capitalized.
"""
title = lambda x: x[0].upper() + x[1:] if x else x
return ''.join([title(x) for x in _WORD_SPLIT_PATTERN.split(s)])
def UnCamelCase(phrase, separator='_'):
"""Convert CamelCased phrase into lower-case delimited words.
Args:
phrase: CamelCased phrase.
separator: The word separator to inject between lowercased words.
Returns:
lower case phrase with separators between case changes from lower
to upper or acronyms (all upper) to lower case.
"""
phrase_len = len(phrase)
if not phrase_len:
return ''
ch = phrase[0]
text_run = ch.isalnum()
last_was_separator = ch.isupper() or not text_run
caps_run = False
result = ch.lower()
# already did first index
for i in range(phrase_len - 1):
ch = phrase[i + 1]
if ch.isupper():
caps_run = text_run and last_was_separator
text_run = True
if not last_was_separator:
result += separator
last_was_separator = True
elif not ch.isalnum():
caps_run = False
text_run = False
last_was_separator = True
else:
text_run = True
last_was_separator = False
if caps_run:
result += separator
last_was_separator = True
caps_run = False
result += ch.lower()
return result
def SanitizeDomain(s):
"""Sanitize a domain name to ch aracters suitable for use in code.
We only want text characters, digits, and '.'. For now, we only allow ASCII,
characters but we may revisit that in the future if there is demand from
Endpoints customers.
Since the pattern 'my-custom-app.appspot.com' is a popular one, preserve the
'-' in a useful way.
Args:
s: (str) A domain name
Returns:
(str) A version of the domain name suitable for use in code structures
like Java package names. None if s is None.
"""
if s is None:
return None
s = s.lower().replace('-', '_')
return ''.join([c for c in s
if (c.isalnum() and ord(c) < 128) or c in ['.', '_']])
def ReversedDomainComponents(s):
"""Returns a list of domain components in reverse order.
Args:
s: (str) A string of the form "a.b.c"
Returns:
list(s) E.g. ['c', 'b', 'a']
"""
if not s:
return []
parts = s.split('.')
parts.reverse()
return parts
def NoSpaces(s):
"""Remove spaces from a string, but preserves None-ness."""
if s:
return s.replace(' ', '')
return s
|
apache-2.0
| 7,937,206,098,041,840,000
| 26.123077
| 78
| 0.650879
| false
| 3.609007
| false
| false
| false
|
django-de/django-de-v2
|
django_de/apps/aggregator/views.py
|
1
|
2922
|
# -*- coding: utf-8 -*-
from django.db.models import get_model
from django.http import HttpResponseRedirect, HttpResponseForbidden
from django.shortcuts import render_to_response
from django.template.context import RequestContext
from django.template.defaultfilters import mark_safe
from django.utils.translation import ugettext as _
from django.core.mail import mail_admins
from django.views.generic.list_detail import object_list
from django_de.apps.aggregator.models import Feed, Item
from django_de.apps.aggregator.forms import NewFeedForm
def overview(request):
params = {
'queryset': Item.objects.latest_public(),
'allow_empty': True,
'template_object_name': 'item',
'template_name': 'aggregator/overview.html',
'extra_context': {
'feed_list': Feed.objects.public().order_by('title'),
},
}
return object_list(request, **params)
def add_feed(request):
if request.POST:
form = NewFeedForm(request.POST)
if form.is_valid():
form.save()
message = _('A new feed has been added and awaits activation: %s') % form.cleaned_data.get('feed_url', '')
mail_admins(_('Community: New feed added.'), message, True)
return HttpResponseRedirect('/community/add/thankyou/')
else:
form = NewFeedForm()
template_context = {
'form': form,
'feed_list': Feed.objects.public().order_by('title'),
}
return render_to_response(
'aggregator/add_feed.html',
template_context,
RequestContext(request),
)
def admin_actions(request, modelname, appname):
if not request.user.is_superuser:
return HttpResponseForbidden('Superuser only!')
model = get_model(modelname, appname)
id_list = request.POST.getlist('item_id_list')
if id_list:
for id in id_list:
obj = model.objects.get(pk=id)
# Delete Item
if request.POST.has_key('_delete'):
obj.delete()
request.user.message_set.create(message=_('"%s" was deleted') % mark_safe(obj.title))
# Open Item
elif request.POST.has_key('_markopen'):
obj.public = True
obj.save()
request.user.message_set.create(message=_('"%s" was opened') % mark_safe(obj.title))
# Close Item
elif request.POST.has_key('_markclosed'):
obj.public = False
obj.save()
request.user.message_set.create(message=_('"%s" was closed') % mark_safe(obj.title))
# Wrong Action Parameter
else:
request.user.message_set.create(message='Wrong Action Parameter')
# None Checkbox checked
else:
request.user.message_set.create(message=_('Nothing to do...'))
return HttpResponseRedirect('/admin/%s/%s/' % (modelname, appname))
|
bsd-3-clause
| 4,984,823,657,608,090,000
| 35.525
| 118
| 0.616016
| false
| 4.075314
| false
| false
| false
|
erikrose/more-itertools
|
setup.py
|
1
|
1967
|
from re import sub
from setuptools import setup
from more_itertools import __version__
def get_long_description():
# Fix display issues on PyPI caused by RST markup
readme = open('README.rst').read()
version_lines = []
with open('docs/versions.rst') as infile:
next(infile)
for line in infile:
line = line.rstrip().replace('.. automodule:: more_itertools', '')
version_lines.append(line)
version_history = '\n'.join(version_lines)
version_history = sub(r':func:`([a-zA-Z0-9._]+)`', r'\1', version_history)
ret = readme + '\n\n' + version_history
return ret
setup(
name='more-itertools',
version=__version__,
description='More routines for operating on iterables, beyond itertools',
long_description=get_long_description(),
author='Erik Rose',
author_email='erikrose@grinchcentral.com',
license='MIT',
packages=['more_itertools'],
package_data={'more_itertools': ['py.typed', '*.pyi']},
include_package_data=True,
python_requires='>=3.5',
test_suite='tests',
url='https://github.com/erikrose/more-itertools',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'Natural Language :: English',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'Programming Language :: Python :: 3 :: Only',
'Programming Language :: Python :: Implementation :: CPython',
'Programming Language :: Python :: Implementation :: PyPy',
'Topic :: Software Development :: Libraries'],
keywords=['itertools', 'iterator', 'iteration', 'filter', 'peek',
'peekable', 'collate', 'chunk', 'chunked'],
)
|
mit
| 1,240,349,848,939,387,100
| 34.763636
| 78
| 0.614133
| false
| 4.080913
| false
| false
| false
|
anish/buildbot
|
worker/buildbot_worker/__init__.py
|
1
|
4662
|
# This file is part of Buildbot. Buildbot is free software: you can
# redistribute it and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation, version 2.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Copyright Buildbot Team Members
#
# Keep in sync with master/buildbot/__init__.py
#
# We can't put this method in utility modules, because they import dependency packages
#
from __future__ import division
from __future__ import print_function
import datetime
import os
import re
from subprocess import PIPE
from subprocess import STDOUT
from subprocess import Popen
def gitDescribeToPep440(version):
# git describe produce version in the form: v0.9.8-20-gf0f45ca
# where 20 is the number of commit since last release, and gf0f45ca is the short commit id preceded by 'g'
# we parse this a transform into a pep440 release version 0.9.9.dev20 (increment last digit and add dev before 20)
VERSION_MATCH = re.compile(r'(?P<major>\d+)\.(?P<minor>\d+)\.(?P<patch>\d+)(\.post(?P<post>\d+))?(-(?P<dev>\d+))?(-g(?P<commit>.+))?')
v = VERSION_MATCH.search(version)
if v:
major = int(v.group('major'))
minor = int(v.group('minor'))
patch = int(v.group('patch'))
if v.group('dev'):
patch += 1
dev = int(v.group('dev'))
return "{0}.{1}.{2}-dev{3}".format(major, minor, patch, dev)
if v.group('post'):
return "{0}.{1}.{2}.post{3}".format(major, minor, patch, v.group('post'))
return "{0}.{1}.{2}".format(major, minor, patch)
return v
def mTimeVersion(init_file):
cwd = os.path.dirname(os.path.abspath(init_file))
m = 0
for root, dirs, files in os.walk(cwd):
for f in files:
m = max(os.path.getmtime(os.path.join(root, f)), m)
d = datetime.datetime.utcfromtimestamp(m)
return d.strftime("%Y.%m.%d")
def getVersionFromArchiveId(git_archive_id='$Format:%ct %d$'):
""" Extract the tag if a source is from git archive.
When source is exported via `git archive`, the git_archive_id init value is modified
and placeholders are expanded to the "archived" revision:
%ct: committer date, UNIX timestamp
%d: ref names, like the --decorate option of git-log
See man gitattributes(5) and git-log(1) (PRETTY FORMATS) for more details.
"""
# mangle the magic string to make sure it is not replaced by git archive
if not git_archive_id.startswith('$For''mat:'):
# source was modified by git archive, try to parse the version from
# the value of git_archive_id
match = re.search(r'tag:\s*v([^,)]+)', git_archive_id)
if match:
# archived revision is tagged, use the tag
return gitDescribeToPep440(match.group(1))
# archived revision is not tagged, use the commit date
tstamp = git_archive_id.strip().split()[0]
d = datetime.datetime.utcfromtimestamp(int(tstamp))
return d.strftime('%Y.%m.%d')
return None
def getVersion(init_file):
"""
Return BUILDBOT_VERSION environment variable, content of VERSION file, git
tag or 'latest'
"""
try:
return os.environ['BUILDBOT_VERSION']
except KeyError:
pass
try:
cwd = os.path.dirname(os.path.abspath(init_file))
fn = os.path.join(cwd, 'VERSION')
with open(fn) as f:
return f.read().strip()
except IOError:
pass
version = getVersionFromArchiveId()
if version is not None:
return version
try:
p = Popen(['git', 'describe', '--tags', '--always'], stdout=PIPE, stderr=STDOUT, cwd=cwd)
out = p.communicate()[0]
if (not p.returncode) and out:
v = gitDescribeToPep440(str(out))
if v:
return v
except OSError:
pass
try:
# if we really can't find the version, we use the date of modification of the most recent file
# docker hub builds cannot use git describe
return mTimeVersion(init_file)
except Exception:
# bummer. lets report something
return "latest"
version = getVersion(__file__)
__version__ = version
|
gpl-2.0
| 2,448,081,944,275,590,700
| 33.029197
| 138
| 0.639211
| false
| 3.765751
| false
| false
| false
|
kevin-coder/tensorflow-fork
|
tensorflow/python/tpu/tensor_tracer.py
|
1
|
67356
|
# Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ========================================================================
"""A utility to trace tensor values on TPU."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
import os.path
import re
import sys
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import graph_io
from tensorflow.python.framework import ops
from tensorflow.python.framework import tensor_util
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import control_flow_ops
from tensorflow.python.ops import control_flow_util
from tensorflow.python.ops import gen_math_ops
from tensorflow.python.ops import init_ops
from tensorflow.python.ops import linalg_ops
from tensorflow.python.ops import logging_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import state_ops
from tensorflow.python.ops import variable_scope
from tensorflow.python.platform import gfile
from tensorflow.python.platform import tf_logging as logging
from tensorflow.python.tpu import tpu
from tensorflow.python.tpu.ops import tpu_ops
_TRACER_LOG_PREFIX = ' [>>>TT>>>]'
_DEVICE_TYPE_TPU = 'tpu'
_DEVICE_TYPE_CPU = 'cpu'
_TRACE_MODE_NAN_INF = 'nan-inf'
_TRACE_MODE_PART_TENSOR = 'part-tensor'
_TRACE_MODE_PART_TENSOR_SIZE = 3
_TRACE_MODE_FULL_TENSOR = 'full-tensor'
_TRACE_MODE_NORM = 'norm'
_TRACE_MODE_MAX_ABS = 'max-abs'
_SUBMODE_BRIEF = 'brief'
_SUBMODE_DETAILED = 'detailed'
_REASON_OUTSIDE_OP_RANGE = 'not-traced-outside-op-range'
_REASON_UNSAFE_OP = 'not-traced-unsafe-op'
_REASON_WHILELOOP_OP = 'not-traced-special-whileloop-op'
_REASON_UNSAFE_SCALAR = 'not-traced-unsafe-scalar'
_REASON_SKIP_SCALAR = 'not-traced-scalar'
_REASON_LESS_INTERESTING_OP = 'not-traced-less-interesting-op'
_REASON_DEVICE_MISMATCH = 'not-traced-device-mismatch'
_REASON_DYNAMIC_SHAPE = 'not-traced-dynamic-shape'
_REASON_SCALAR_GET_TRACED = 'traced-scalar'
_REASON_TENSOR_GET_TRACED = 'traced-tensor'
_REASON_USER_INCLUDED = 'traced-user-included'
_REASON_USER_EXCLUDED = 'not-traced-user-excluded'
_REASON_NOT_EXECUTED = 'not-traced-not-in-exec-path'
_REASON_NON_NUMERIC_TENSOR = 'not-traced-non-numeric-tensor'
_REASON_FEEDS_WHILELOOP_OP = 'not-traced-feeds-special-whileloop-op'
_MARKER_SECTION_BEGIN = '!!!!!!! section-begin:'
_MARKER_SECTION_END = '!!!!!!! section-end:'
_SECTION_NAME_CONFIG = 'configuration'
_SECTION_NAME_REASON = 'reason'
_SECTION_NAME_OP_LIST = 'op-list'
_SECTION_NAME_TENSOR_LIST = 'tensor-list'
_SECTION_NAME_CACHE_INDEX_MAP = 'cache-index-map'
_SECTION_NAME_GRAPH = 'graph'
_FIELD_NAME_VERSION = 'version:'
_FIELD_NAME_DEVICE = 'device:'
_FIELD_NAME_TRACE_MODE = 'trace-mode:'
_FIELD_NAME_SUBMODE = 'submode:'
_FIELD_NAME_NUM_REPLICAS = 'num-replicas:'
_FIELD_NAME_NUM_REPLICAS_PER_HOST = 'num-replicas-per-host:'
_FIELD_NAME_NUM_HOSTS = 'num-hosts:'
_FIELD_NAME_NUM_OPS = 'number-of-ops:'
_FIELD_NAME_NUM_TENSORS = 'number-of-tensors:'
_FIELD_NAME_NUM_CACHE_INDICES = 'number-of-indices:'
_FIELD_NAME_TOPOLOGICAL_SORT_SUCCEED = 'topological-sort-succeed:'
_FLAGS_ENV_VAR = 'TENSOR_TRACER_FLAGS'
_FLAG_SINGLE_QUOTE_PAT = re.compile(r"\s*--([^=]+)='([^']*)'")
_FLAG_DOUBLE_QUOTE_PAT = re.compile(r'\s*--([^=]+)="([^"]*)"')
_FLAG_NO_QUOTE_PAT = re.compile(r'\s*--([^=]+)=(\S*)')
_FLAG_NO_EQUAL_PAT = re.compile(r'\s*--([^=]+)\s*')
_FLAG_NAME_ENABLE = 'enable'
_FLAG_NAME_TRACE_MODE = 'trace_mode'
_FLAG_NAME_USE_COMPACT_TRACE = 'compact_trace'
_FLAG_NAME_TRACE_SCALAR_OPS = 'trace_scalar'
_FLAG_NAME_TRACE_BEFORE_OPS = 'trace_before_included_ops'
_FLAG_NAME_TRACE_AFTER_OPS = 'trace_after_included_ops'
_FLAG_NAME_SUBMODE = 'submode'
_FLAG_NAME_INCLUDE_LESS_INTERESTING_OPS = 'include_less_interesting_ops'
_FLAG_NAME_EXCLUDED_OPNAMES = 'excluded_opnames'
_FLAG_NAME_EXCLUDED_OPTYPES = 'excluded_optypes'
_FLAG_NAME_INCLUDED_OPNAMES = 'included_opnames'
_FLAG_NAME_INCLUDED_OPTYPES = 'included_optypes'
_FLAG_NAME_TRACE_DIR = 'trace_dir'
_FLAG_NAME_REPORT_FILE = 'report_file'
_FLAG_NAME_USE_TEST_UNDECLARED_OUTPUTS_DIR = 'use_test_undeclared_outputs_dir'
_FLAG_NAME_OP_RANGE = 'op_range'
# Folder to dump the pre (before tensor tracer updates) and post graphs (after
# tensor tracer updates).
_FLAG_DUMP_BEFORE_AFTER_GRAPHS = 'dump_graphs'
_OP_RANGE_PAT = re.compile(r'(\d+):(\d+)')
_OUTPUT_STREAM_ESCAPE = 'file://'
_TEST_UNDECLARED_OUTPUTS_DIR_ENV_VAR = 'TEST_UNDECLARED_OUTPUTS_DIR'
_TENSOR_TRACER_COLLECTION = 'tensor_tracer_variables'
_TENSOR_TRACER_CHECKPOINT = 'tensor_tracer_checkpoint'
_TRACE_FILE_NAME = 'trace.all'
_COMPACT_TRACE_FILE_PREFIX = 'compact_trace.'
_COMPACT_TRACE_ENTRY_INIT_VALUE = -1.0
_TENSOR_TRACER_STORAGE = 'tensor_tracer_storage'
_TENSOR_VALUES_CACHE = 'tensor_values_cache'
_REPLICA_ID_TAG = '#replica-id: '
def tensor_tracepoint(tensor, checkpoint_name):
"""Adds a checkpoint with the given checkpoint name for the given tensor.
The tensor will be added to the list of tensors that will be traced by the
tensor tracer.
Args:
tensor: the tensor object for which the tracing is requested.
checkpoint_name: a string name for the checkpoint. This name has to be a
unique name if used within model comparison. The tensors that have the same
checkpoint identifier is compared in model comparison.
Returns:
The provided tensor.
"""
tensor.graph.get_collection(_TENSOR_TRACER_COLLECTION)
tensor.graph.add_to_collection(_TENSOR_TRACER_COLLECTION,
(tensor, checkpoint_name))
return tensor
def keras_layer_tracepoint(layer, checkpoint_name):
"""An interface for adding the tensor outputs of a keras layer.
Encapsulates tensor_tracepoint.
Args:
layer: A keras layer.
checkpoint_name: a string name for the checkpoint. This name has to be a
unique name if used within model comparison. The tensors that have the same
checkpoint identifier is compared in model comparison.
Returns:
The provided layer.
"""
try:
outputs = layer.output
if tensor_util.is_tensor(outputs):
tensor_tracepoint(outputs, '%s' % (checkpoint_name))
else:
idx = 0
for output_tensor in outputs:
if tensor_util.is_tensor(outputs):
tensor_tracepoint(output_tensor, '%s_%d' % (checkpoint_name, idx))
idx += 1
except AttributeError:
pass
except RuntimeError:
pass
return layer
def _trace_files_need_precreated(output_dir):
"""Return True if trace files must be pre-created by users."""
if not output_dir.startswith('/'):
return False
if len(output_dir) < 5:
return False
if output_dir[2] != 'n':
return False
if output_dir[3] != 's':
return False
if output_dir[1] != 'c':
return False
if output_dir[4] != '/':
return False
return True
def _get_tensor_values_cache(graph=None):
"""Returns the variable that implements tensor-value caching."""
graph = graph or ops.get_default_graph()
collection = graph.get_collection(_TENSOR_TRACER_STORAGE)
if len(collection) == 1:
return collection[0]
elif not collection:
raise RuntimeError('%s has not been created'%_TENSOR_VALUES_CACHE)
else:
raise RuntimeError('Multiple %s created'%_TENSOR_VALUES_CACHE)
return None
def _create_tensor_values_cache(graph, num_tensors):
"""Creates a variable as the cache to store intermediate tensor values."""
graph = graph or ops.get_default_graph()
# Create in proper graph and base name_scope.
with graph.as_default() as g, g.name_scope(None):
return variable_scope.get_variable(
_TENSOR_VALUES_CACHE,
shape=[num_tensors],
dtype=dtypes.float32,
initializer=init_ops.constant_initializer(
_COMPACT_TRACE_ENTRY_INIT_VALUE),
trainable=False,
use_resource=True,
collections=[_TENSOR_TRACER_STORAGE, ops.GraphKeys.LOCAL_VARIABLES])
class TensorTracer(object):
"""A software construct for tracing tensor values in a TF graph on TPU.
This utility is disabled by default. It can be enabled by setting
the TENSOR_TRACER_FLAGS env variable as:
export TENSOR_TRACER_FLAGS="--enable=1"
If it is enabled, it will trace the output tensor values of
selected Ops in the graph. It has two outputs: (1) the traces and (2)
a report. The traces are dumped to a specified local file on the TPU
host. The report is printed to the log.info of the TPU job.
By passing options via the env variable, users can change:
(1) the trace mode (e.g., detecting NaN/Inf, printing partial or
full tensor values)
(2) which Ops to be traced (via op.name or op.type)
(3) output trace file path.
"""
# The set of graphs that are rewritten by tensor tracer.
_traced_graphs = set()
@staticmethod
def _match_next_flag(flags, pos):
"""Returns the match for the next TensorTracer flag.
Args:
flags: a string that contains the flags.
pos: where in flags to start the search.
Returns:
A pair where the first element is the regular-expression
match found and the second element indicates if the match
has a value.
"""
match = _FLAG_DOUBLE_QUOTE_PAT.match(flags, pos)
if match:
return match, True
match = _FLAG_SINGLE_QUOTE_PAT.match(flags, pos)
if match:
return match, True
match = _FLAG_NO_QUOTE_PAT.match(flags, pos)
if match:
return match, True
match = _FLAG_NO_EQUAL_PAT.match(flags, pos)
if match:
# The flag is found but is not given a value.
return match, False
# The flag is not found.
return None, False
@staticmethod
def validate_flag_names():
"""Validates if the TensorTrace flags passed are valid."""
valid_flag_names = [_FLAG_NAME_ENABLE, _FLAG_NAME_TRACE_MODE,
_FLAG_NAME_USE_COMPACT_TRACE,
_FLAG_NAME_TRACE_SCALAR_OPS,
_FLAG_NAME_TRACE_BEFORE_OPS,
_FLAG_NAME_TRACE_AFTER_OPS,
_FLAG_NAME_SUBMODE,
_FLAG_NAME_EXCLUDED_OPNAMES,
_FLAG_NAME_EXCLUDED_OPTYPES,
_FLAG_NAME_INCLUDED_OPNAMES,
_FLAG_NAME_INCLUDED_OPTYPES,
_FLAG_NAME_TRACE_DIR,
_FLAG_NAME_REPORT_FILE,
_FLAG_NAME_USE_TEST_UNDECLARED_OUTPUTS_DIR,
_FLAG_NAME_INCLUDE_LESS_INTERESTING_OPS,
_FLAG_NAME_OP_RANGE,
_FLAG_DUMP_BEFORE_AFTER_GRAPHS]
tensor_tracer_flags = os.environ.get(_FLAGS_ENV_VAR)
if not tensor_tracer_flags:
return
pos = 0
while True:
match, _ = TensorTracer._match_next_flag(tensor_tracer_flags, pos)
if not match:
break
flag_name = match.group(1)
if flag_name not in valid_flag_names:
raise ValueError(
'The flag name "%s" passed via the environment variable "%s" '
'is invalid. Valid flag names are:'
'\n%s'%(flag_name, _FLAGS_ENV_VAR, valid_flag_names))
pos = match.end()
@staticmethod
def print_flag_values():
"""Prints all TensorTracer flags passed via environment variables."""
tensor_tracer_flags = os.environ.get(_FLAGS_ENV_VAR)
if not tensor_tracer_flags:
return 'Env variable "%s" is not set'%_FLAGS_ENV_VAR
result = 'Env variable "%s" is set to "%s"\n'%(_FLAGS_ENV_VAR,
tensor_tracer_flags)
result += 'Individual flag value:\n'
pos = 0
while True:
match, has_value = TensorTracer._match_next_flag(
tensor_tracer_flags, pos)
if not match:
break
flag_name = match.group(1)
if has_value:
flag_value = match.group(2)
else:
flag_value = None
result += ' %s: %s\n'%(flag_name, flag_value)
pos = match.end()
result += '\n'
return result
@staticmethod
def flag_value_as_int(wanted_flag_name, default_value):
"""Returns the int value of a TensorTracer flag.
Args:
wanted_flag_name: the name of the flag we are looking for.
default_value: the default value for the flag, if not provided.
Returns:
the value of the flag.
Raises:
RuntimeError: If supposedly deadcode is reached.
"""
flag_int_value = default_value
found, flag_value = TensorTracer.get_flag_value(wanted_flag_name)
if found:
try:
flag_int_value = int(flag_value)
except ValueError:
logging.warning('Cannot convert %s to int for flag %s' % (
flag_int_value, wanted_flag_name))
return flag_int_value
@staticmethod
def get_flag_value(wanted_flag_name):
"""Returns the value of a TensorTracer flags.
Args:
wanted_flag_name: the name of the flag we are looking for.
Returns:
A pair where the first element indicates if the flag is
found and the second element is the value of the flag.
Raises:
RuntimeError: If supposedly deadcode is reached.
"""
tensor_tracer_flags = os.getenv(_FLAGS_ENV_VAR)
if not tensor_tracer_flags:
return False, None
pos = 0
while True:
match, has_value = TensorTracer._match_next_flag(
tensor_tracer_flags, pos)
if not match:
return False, None
flag_name = match.group(1)
if has_value:
flag_value = match.group(2)
else:
flag_value = None
if flag_name == wanted_flag_name:
return True, flag_value
pos = match.end()
raise RuntimeError('Should not reach here.')
@staticmethod
def flag_value_to_re_list(flag_name):
"""Converts list of strings to compiled RE."""
re_list = []
found, flag_value = TensorTracer.get_flag_value(flag_name)
if not found or not flag_value:
return re_list
list_of_values = flag_value.split()
for v in list_of_values:
r = re.compile(v)
re_list.append(r)
return re_list
@staticmethod
def _is_flag_on(flag_name):
"""Returns True if the given flag is on."""
found, flag_value = TensorTracer.get_flag_value(flag_name)
if not found:
return False
if flag_value is None:
return True
# Depends on the flag value.
flag_value = flag_value.lower()
enabled = flag_value in ['1', 't', 'true', 'y', 'yes']
return enabled
@staticmethod
def is_enabled():
"""Returns True if TensorTracer is enabled."""
return TensorTracer._is_flag_on(_FLAG_NAME_ENABLE)
@staticmethod
def use_test_undeclared_outputs_dir():
"""Decides the output directory of the report and trace files.
Args:
None.
Returns:
True if the output files should be written to the
test-undeclared-outputs-directory defined via an
env variable.
"""
return TensorTracer._is_flag_on(
_FLAG_NAME_USE_TEST_UNDECLARED_OUTPUTS_DIR)
@staticmethod
def use_compact_trace():
return TensorTracer._is_flag_on(
_FLAG_NAME_USE_COMPACT_TRACE)
@staticmethod
def check_device_type(device_type):
"""Checks if the given device type is valid."""
if device_type not in [_DEVICE_TYPE_TPU, _DEVICE_TYPE_CPU]:
raise ValueError('Invalid device_type "%s"'%device_type)
@staticmethod
def check_trace_mode(trace_mode):
"""Checks if the given trace mode is valid."""
valid_trace_modes = [_TRACE_MODE_NAN_INF, _TRACE_MODE_PART_TENSOR,
_TRACE_MODE_FULL_TENSOR, _TRACE_MODE_NORM,
_TRACE_MODE_MAX_ABS]
if trace_mode not in valid_trace_modes:
raise ValueError('Invalid trace mode "%s" given to the Tensor_Tracer.'
'Valid trace modes are: %s'%(trace_mode,
valid_trace_modes))
@staticmethod
def check_submode(submode):
"""Checks if the given submode is valid."""
if not submode:
return
valid_submodes = [_SUBMODE_DETAILED, _SUBMODE_BRIEF]
if submode not in valid_submodes:
raise ValueError('Invalid submode "%s" given to the Tensor_Tracer.'
'Valid submodes are: %s'%(submode,
valid_submodes))
@staticmethod
def loop_cond_op(op):
return op.type in ('LoopCond', 'RefLoopCond')
@staticmethod
def while_loop_op(op):
"""Returns true if op is one of the special ops of in a while loop.
Args:
op: A tf.Operation.
Returns:
True if the given op is one of [Switch, Merge, Enter, Exit,
NextIteration, LoopCond], which are all building blocks for TF while
loops.
"""
return (control_flow_util.IsLoopSwitch(op) or
control_flow_util.IsLoopMerge(op) or
control_flow_util.IsLoopEnter(op) or
control_flow_util.IsLoopExit(op) or
TensorTracer.loop_cond_op(op) or
op.type in ('RefNextIteration', 'NextIteration'))
@staticmethod
def unsafe_op(op):
"""Returns True if this op is not safe to be traced."""
if control_flow_util.IsInCond(op):
return True
# Reasons for not including following op types:
# Assign: cause incorrect result with CPU tracing.
if op.type in ['Assign']:
return True
return False
@staticmethod
def device_mismatch(device_type, op):
if device_type == _DEVICE_TYPE_TPU:
# pylint: disable=protected-access
return tpu._TPU_REPLICATE_ATTR not in op.node_def.attr
# pylint: enable=protected-access
return False
@staticmethod
def unsafe_scalar_trace(op):
"""Return true if scalar output tensor from Op is not safe to be traced."""
# Tracing the following causes cycle in the graph on TPU.
if op.type in ['LoopCond', 'Enter', 'Merge', 'Const',
'Switch', 'Less', 'ReadVariableOp']:
return True
# Tracing the following will cause casting-issue
# with the norm tracing mode or other compilation issues on CPU.
if op.type in ['VarHandleOp', 'IteratorToStringHandle',
'IteratorGetNext', 'OneShotIterator',
'IteratorV2', 'MakeIterator',
'BatchDatasetV2', 'MapDataset',
'FixedLengthRecordDataset', 'TakeDataset', 'ZipDataset',
'Placeholder', 'PlaceholderWithDefault', 'StridedSlice']:
return True
return False
@staticmethod
def less_interesting_op(op):
"""Returns True if the given Op is not an interesting one to be traced."""
found, _ = TensorTracer.get_flag_value(
_FLAG_NAME_INCLUDE_LESS_INTERESTING_OPS)
if found:
# users force to include all ops.
return False
# Following ops are highly unlikey to cause bugs.
return op.type in ['Const', 'Identity', 'Cast', 'Shape']
@staticmethod
def reason(op_idx, details):
"""Returns reason why the Op at op_idx is traced or not."""
return '%d %s'%(op_idx, details)
@staticmethod
def topological_sort(g):
"""Performs topological sort on the given graph.
Args:
g: the graph.
Returns:
A pair where the first element indicates if the topological
sort succeeded (True if there is no cycle found; False if a
cycle is found) and the second element is either the sorted
list of nodes or the cycle of nodes found.
"""
def _is_loop_edge(op):
"""Returns true if the op is the end of a while-loop creating a cycle."""
return op.type in ['NextIteration']
def _in_op_degree(op):
"""Returns the number of incoming edges to the given op.
The edge calculation skips the edges that come from 'NextIteration' ops.
NextIteration creates a cycle in the graph. We break cycles by treating
this op as 'sink' and ignoring all outgoing edges from it.
Args:
op: Tf.Operation
Returns:
the number of incoming edges.
"""
count = 0
for op in op.control_inputs + [in_tensor.op for in_tensor in op.inputs]:
if not _is_loop_edge(op):
count += 1
return count
sorted_ops = []
op_in_degree = {op: _in_op_degree(op) for op in g.get_operations()}
frontier = [op for (op, degree) in op_in_degree.items() if degree == 0]
while frontier:
op = frontier.pop()
# Remove the op from graph, and remove its outgoing edges.
sorted_ops.append(op)
if _is_loop_edge(op):
continue
# pylint: disable=protected-access
consumers = list(op._control_outputs)
# pylint: enable=protected-access
for out_tensor in op.outputs:
consumers += [consumer_op for consumer_op in out_tensor.consumers()]
for consumer in consumers:
# For each deleted edge shift the bucket of the vertex.
op_in_degree[consumer] -= 1
if op_in_degree[consumer] == 0:
frontier.append(consumer)
if op_in_degree[consumer] < 0:
raise ValueError('consumer:%s degree mismatch'%consumer.name)
left_ops = set([op for (op, degree) in op_in_degree.items() if degree > 0])
if left_ops:
return (False, left_ops)
else:
assert len(g.get_operations()) == len(sorted_ops)
return (True, sorted_ops)
@staticmethod
def _make_op_and_tensor_maps(op_list):
"""Creates various maps and lists from op_list.
Args:
op_list: a list of Ops
Returns:
opname_idx_map: a map from Op's name to its index in op_list.
tensor_list: a list of output tensors of the Ops in op_list.
tensorname_idx_map: a map from output tensor name to its index
in tensor_list.
"""
opname_idx_map = {}
tensor_list = []
tensorname_idx_map = {}
for op_id, op in enumerate(op_list):
if op.name in opname_idx_map:
raise ValueError('Duplicated Op name: %s'%op.name)
opname_idx_map[op.name] = op_id
for output_tensor in op.outputs:
if output_tensor.name not in tensorname_idx_map:
tensor_list.append(output_tensor)
tensorname_idx_map[output_tensor.name] = len(tensor_list)-1
return (opname_idx_map, tensor_list, tensorname_idx_map)
def __init__(self):
"""Initializes a TensorTracer.
Sets the various member fields from the flags (if given) or the defaults.
"""
self._version = 'use-outside-compilation'
self._device_type = None
TensorTracer.validate_flag_names()
found, self._trace_mode = TensorTracer.get_flag_value(_FLAG_NAME_TRACE_MODE)
if not found or not self._trace_mode:
self._trace_mode = _TRACE_MODE_NAN_INF
TensorTracer.check_trace_mode(self._trace_mode)
found, self._submode = TensorTracer.get_flag_value(_FLAG_NAME_SUBMODE)
if not found or not self._submode:
self._submode = _SUBMODE_DETAILED
TensorTracer.check_submode(self._submode)
self._part_tensor_size = _TRACE_MODE_PART_TENSOR_SIZE
self._instrument_records = {}
self._set_trace_dir()
self._set_report_file()
self._set_op_range()
self._set_excluded_opnames()
self._set_excluded_optypes()
self._set_included_opnames()
self._set_included_optypes()
self._num_replicas = None
self._num_replicas_per_host = None
self._num_hosts = None
self._replica_id = None
self._included_op_full_names = set()
self._trace_scalar_ops = TensorTracer._is_flag_on(
_FLAG_NAME_TRACE_SCALAR_OPS)
# _trace_ops_before_included and _trace_ops_after_included denotes to depth
# of tracing relative to the ops given in --included_opnames or
# --included_optypes
# For example, in the below graph
# op1 --> op2 --> op3 --> op4 --> op5
# If --included_opnames=op3 then only op3 will be traced.
# If also --trace_before_included_ops=2 (_trace_ops_before_included), then
# op1 and op2 will be traced as they are at most 2 hops apart from an
# included op. Similarly, if --trace_after_included_ops=2, then op4 and op5
# will also be traced.
self._trace_ops_before_included = TensorTracer.flag_value_as_int(
_FLAG_NAME_TRACE_BEFORE_OPS, 0)
self._trace_ops_after_included = TensorTracer.flag_value_as_int(
_FLAG_NAME_TRACE_AFTER_OPS, 0)
_, self._graph_dump_path = TensorTracer.get_flag_value(
_FLAG_DUMP_BEFORE_AFTER_GRAPHS)
def _add_replica_id_to_graph(self):
"""Adds nodes for computing the replica ID to the graph."""
if self._num_replicas:
with ops.control_dependencies(None):
# Uses None as dependency to run outside of TPU graph rewrites.
self._replica_id = tpu_ops.tpu_replicated_input(
list(range(self._num_replicas)),
name='tt_replica_id')
else:
self._replica_id = 'unknown'
def _set_trace_dir(self):
found, self._trace_dir = TensorTracer.get_flag_value(_FLAG_NAME_TRACE_DIR)
if found and self._trace_dir \
and TensorTracer.use_test_undeclared_outputs_dir():
raise ValueError('Cannot not use --%s and --%s at the same time'
%(_FLAG_NAME_TRACE_DIR,
_FLAG_NAME_USE_TEST_UNDECLARED_OUTPUTS_DIR))
if TensorTracer.use_test_undeclared_outputs_dir():
self._trace_dir = os.environ.get(_TEST_UNDECLARED_OUTPUTS_DIR_ENV_VAR)
def _set_report_file(self):
"""Sets the path of the output report file."""
found, self._report_file_path = TensorTracer.get_flag_value(
_FLAG_NAME_REPORT_FILE)
if found and self._report_file_path \
and TensorTracer.use_test_undeclared_outputs_dir():
if os.path.isabs(self._report_file_path):
raise ValueError('If use_test_undeclared_outputs_dir is set,'
'report_file_path cannot be an absolute path (%s)'
%self._report_file_path)
outputs_dir = os.environ.get(_TEST_UNDECLARED_OUTPUTS_DIR_ENV_VAR)
self._report_file_path = os.path.join(outputs_dir,
self._report_file_path)
if not self._report_file_path:
self._report_file = None
return
try:
self._report_file = gfile.Open(self._report_file_path, 'w')
except IOError as e:
raise e
def _close_report_file(self):
if self._report_file:
self._report_file.close()
def _set_op_range(self):
"""Sets the index range of the Ops that we will consider tracing."""
found, op_range = TensorTracer.get_flag_value(_FLAG_NAME_OP_RANGE)
if not found or not op_range:
self._op_range = (-1, -1) # this means including all ops.
return
match = _OP_RANGE_PAT.match(op_range)
if not match:
self._op_range = (-1, -1) # this means including all ops.
return
self._op_range = (int(match.group(1)), int(match.group(2)))
def _inside_op_range(self, idx):
"""Return True if the given index is inside the selected range."""
if idx < self._op_range[0]:
return False
return self._op_range[1] < 0 or idx <= self._op_range[1]
def _set_excluded_opnames(self):
self._excluded_opname_re_list = TensorTracer.flag_value_to_re_list(
_FLAG_NAME_EXCLUDED_OPNAMES)
def _set_excluded_optypes(self):
self._excluded_optype_re_list = TensorTracer.flag_value_to_re_list(
_FLAG_NAME_EXCLUDED_OPTYPES)
def _set_included_opnames(self):
self._included_opname_re_list = TensorTracer.flag_value_to_re_list(
_FLAG_NAME_INCLUDED_OPNAMES)
def _set_included_optypes(self):
self._included_optype_re_list = TensorTracer.flag_value_to_re_list(
_FLAG_NAME_INCLUDED_OPTYPES)
def _is_user_included_op(self, op):
"""Checks whether the op is included in the tensor tracer flags.
Args:
op: tf Operation
Returns:
True, if the op is included.
An op is included if:
- Its op name is given in included_opnames
- Its op type is given in included_optypes
- The op is at most _trace_ops_before_included hops before an included op
- The op is at most _trace_ops_after_included hops after an included op
"""
def _is_op_or_any_neighbor_included(op, check_before=0, check_after=0):
"""Helper function to check if op is included or not."""
if op.name in self._included_op_full_names:
return True
for opname_re in self._included_opname_re_list:
if opname_re.match(op.name):
self._included_op_full_names.add(op.name)
return True
if check_after > 0:
for out_tensor in op.outputs:
for consumer in out_tensor.consumers():
if _is_op_or_any_neighbor_included(consumer, check_after - 1, 0):
self._included_op_full_names.add(op.name)
return True
if check_before > 0:
for input_tensor in op.inputs:
if _is_op_or_any_neighbor_included(input_tensor.op,
0,
check_before - 1):
self._included_op_full_names.add(op.name)
return True
return False
# check_after and check_before are swapped below, as below operation
# checks the distance from an arbitrary op to included ops.
return _is_op_or_any_neighbor_included(op,
self._trace_ops_after_included,
self._trace_ops_before_included)
def _is_user_excluded_op(self, op):
for opname_re in self._excluded_opname_re_list:
if opname_re.match(op.name):
return True
for optype_re in self._excluded_optype_re_list:
if optype_re.match(op.type):
return True
return False
def _use_tensor_values_cache(self):
"""Returns True if immediate tensors should be first saved to a cache."""
if self._trace_mode not in set([_TRACE_MODE_NAN_INF,
_TRACE_MODE_NORM, _TRACE_MODE_MAX_ABS]):
return False
if self._trace_dir and _trace_files_need_precreated(self._trace_dir):
return True
if TensorTracer.use_compact_trace():
return True
return False
def _save_tensor_value_to_cache_op(self, graph, cache_idx, updates):
"""Returns an Op that will save the given updates to an entry in the cache."""
cache = _get_tensor_values_cache(graph)
indices = constant_op.constant([cache_idx])
return state_ops.scatter_update(cache, indices, updates).op
def _write_report(self, content):
"""Writes the given content to the report."""
line = '%s %s'%(_TRACER_LOG_PREFIX, content)
if self._report_file:
self._report_file.write(line)
else:
logging.info(line)
def _write_config_section(self):
"""Writes the config section of the report."""
self._write_report('%s %s\n'%(_MARKER_SECTION_BEGIN, _SECTION_NAME_CONFIG))
self._write_report('%s %s\n'%(_FIELD_NAME_VERSION, self._version))
self._write_report('%s %s\n'%(_FIELD_NAME_DEVICE, self._device_type))
self._write_report('%s %s\n'%(_FIELD_NAME_TRACE_MODE, self._trace_mode))
self._write_report('%s %s\n'%(_FIELD_NAME_SUBMODE, self._submode))
self._write_report('%s %s\n'%(_FIELD_NAME_NUM_REPLICAS, self._num_replicas))
self._write_report('%s %s\n'%(_FIELD_NAME_NUM_REPLICAS_PER_HOST,
self._num_replicas_per_host))
self._write_report('%s %s\n'%(_FIELD_NAME_NUM_HOSTS, self._num_hosts))
self._write_report('%s %s\n'%(_MARKER_SECTION_END, _SECTION_NAME_CONFIG))
def _write_reason_section(self):
"""Writes the reason section of the report."""
self._write_report('%s %s\n'%(_MARKER_SECTION_BEGIN, _SECTION_NAME_REASON))
for key in sorted(self._instrument_records):
self._write_report('"%s" %s\n'%(key, self._instrument_records[key]))
self._write_report('%s %s\n'%(_MARKER_SECTION_END, _SECTION_NAME_REASON))
def _write_op_list_section(self, op_list):
"""Writes the Op-list section of the report."""
self._write_report('%s %s\n'%(_MARKER_SECTION_BEGIN, _SECTION_NAME_OP_LIST))
self._write_report('%s %d\n'%(_FIELD_NAME_NUM_OPS, len(op_list)))
for i in range(0, len(op_list)):
op = op_list[i]
line = '%d "%s" %s'%(i, op.name, op.type)
for out_tensor in op.outputs:
if out_tensor.name not in self._tensorname_idx_map:
raise ValueError(
'out_tensor %s is not in tensorname_idx_map'%out_tensor.name)
line += ' %d'%self._tensorname_idx_map[out_tensor.name]
line += '\n'
self._write_report(line)
self._write_report('%s %s\n'%(_MARKER_SECTION_END, _SECTION_NAME_OP_LIST))
def _write_tensor_list_section(self, tensor_list, opname_idx_map):
"""Writes the tensor-list section of the report."""
self._write_report('%s %s\n'%(_MARKER_SECTION_BEGIN,
_SECTION_NAME_TENSOR_LIST))
self._write_report('%s %d\n'%(_FIELD_NAME_NUM_TENSORS, len(tensor_list)))
for i in range(0, len(tensor_list)):
tensor = tensor_list[i]
line = '%d "%s"'%(i, tensor.name)
for consumer_op in tensor.consumers():
if consumer_op.name not in opname_idx_map:
raise ValueError(
'consumer_op %s is not in opname_idx_map'%consumer_op.name)
line += ' %d'%opname_idx_map[consumer_op.name]
line += '\n'
self._write_report(line)
self._write_report('%s %s\n'%(_MARKER_SECTION_END,
_SECTION_NAME_TENSOR_LIST))
def _write_cache_index_map_section(self):
"""Writes the mapping from cache index to tensor index to the report."""
self._write_report('%s %s\n'%(_MARKER_SECTION_BEGIN,
_SECTION_NAME_CACHE_INDEX_MAP))
self._write_report('%s %d\n'%(_FIELD_NAME_NUM_CACHE_INDICES,
len(self._cache_idx_to_tensor_idx)))
for cache_idx in range(0, len(self._cache_idx_to_tensor_idx)):
tensor_idx = self._cache_idx_to_tensor_idx[cache_idx]
line = '%d %d\n'%(cache_idx, tensor_idx)
self._write_report(line)
self._write_report('%s %s\n'%(_MARKER_SECTION_END,
_SECTION_NAME_CACHE_INDEX_MAP))
def _write_graph_section(self, succeed, sorted_or_cycle):
"""Writes the graph section of the report."""
self._write_report('%s %s\n'%(_MARKER_SECTION_BEGIN, _SECTION_NAME_GRAPH))
self._write_report('%s %s\n'%(_FIELD_NAME_TOPOLOGICAL_SORT_SUCCEED,
succeed))
l = list(sorted_or_cycle)
for i in range(0, len(l)):
self._write_report('%d "%s"\n'%(i, l[i].name))
self._write_report('%s %s\n'%(_MARKER_SECTION_END, _SECTION_NAME_GRAPH))
def _preprocess_traced_tensor(self, tensor):
"""Computes NAN/Norm/Max on TPUs before sending to CPU.
Args:
tensor: The tensor to be traced.
Returns:
A tensor that should be input to the trace_function.
Raises:
RuntimeError: If the trace mode is invalid.
"""
def _detect_nan_inf(tensor):
"""Trace function for detecting any NaN/Inf in the tensor."""
if tensor.dtype.is_floating:
mask = math_ops.reduce_any(
gen_math_ops.logical_or(
gen_math_ops.is_nan(tensor), gen_math_ops.is_inf(tensor)))
output_tensor = control_flow_ops.cond(mask,
lambda: constant_op.constant(1.0),
lambda: constant_op.constant(0.0))
else:
output_tensor = constant_op.constant(0.0)
# The shape has to be 1. Set it if it does not have the information.
output_tensor = array_ops.reshape(output_tensor, [1])
return output_tensor
def _show_norm(tensor):
tensor = math_ops.cast(tensor, dtypes.float32)
output_tensor = linalg_ops.norm(tensor)
# The shape has to be 1. Set it if it does not have the information.
output_tensor = array_ops.reshape(output_tensor, [1])
return output_tensor
def _show_max_abs(tensor):
tensor = math_ops.cast(tensor, dtypes.float32)
output_tensor = math_ops.reduce_max(math_ops.abs(tensor))
zero = constant_op.constant(0, dtypes.float32)
output_tensor = gen_math_ops.maximum(zero, output_tensor)
# The shape has to be 1. Set it if it does not have the information.
output_tensor = array_ops.reshape(output_tensor, [1])
return output_tensor
if self._trace_mode == _TRACE_MODE_NAN_INF:
return _detect_nan_inf(tensor)
if self._trace_mode == _TRACE_MODE_PART_TENSOR:
return tensor
if self._trace_mode == _TRACE_MODE_FULL_TENSOR:
return tensor
if self._trace_mode == _TRACE_MODE_NORM:
return _show_norm(tensor)
if self._trace_mode == _TRACE_MODE_MAX_ABS:
return _show_max_abs(tensor)
raise RuntimeError(
'Tensor trace fun for %s is not yet implemented' % self._trace_mode)
def _make_tensor_trace_fun(self, tensor_name):
"""Makes the tensor tracing function called by outside compilation.
Args:
tensor_name: name of the tensor being traced.
Returns:
A function to be passed as the first argument to outside compilation.
Raises:
RuntimeError: If the trace mode is invalid.
"""
def _print_tensor(tensor_name, num_elements, tensor, output_tensor):
"""Prints a tensor value to a file.
Args:
tensor_name: name of the tensor being traced.
num_elements: number of elements to print (-1 means print all).
tensor: the tensor needs to be returned.
output_tensor: the tensor needs to be printed.
Returns:
The same tensor passed via the "tensor" argument.
Raises:
ValueError: If tensor_name is not already in
self._tensorname_idx_map.
"""
if self._submode == _SUBMODE_BRIEF:
if tensor_name not in self._tensorname_idx_map:
raise ValueError(
'Tensor name %s is not in the tensorname_idx_map'%tensor_name)
msg = '%d'%self._tensorname_idx_map[tensor_name]
else:
msg = '"%s"'%tensor_name
if self._trace_dir:
output_path = os.path.join(self._trace_dir, _TRACE_FILE_NAME)
output_stream = _OUTPUT_STREAM_ESCAPE + output_path
else:
output_stream = sys.stderr
return logging_ops.print_v2(msg, array_ops.shape(output_tensor),
'@', self._replica_id,
'\n', output_tensor, '\n',
summarize=num_elements,
output_stream=output_stream)
def _show_part_tensor(tensor):
"""Trace function for printing part of the tensor."""
return _print_tensor(tensor_name, self._part_tensor_size,
tensor, tensor)
def _show_full_tensor(tensor):
"""Trace function for printing the entire tensor."""
return _print_tensor(tensor_name, -1, tensor, tensor)
if self._trace_mode == _TRACE_MODE_PART_TENSOR:
return _show_part_tensor
# The input tensor has a shape of "[1]" for _TRACE_MODE_NAN_INF,
# _TRACE_MODE_NORM, and _TRACE_MODE_MAX_ABS, as related computations are
# performed within TPUs and only their results are transferred to CPU.
# Simply, print the full tensor for these trace modes.
if self._trace_mode in [
_TRACE_MODE_NAN_INF, _TRACE_MODE_NORM, _TRACE_MODE_FULL_TENSOR,
_TRACE_MODE_MAX_ABS
]:
return _show_full_tensor
raise RuntimeError('Tensor trace fun for %s is not yet implemented'
%self._trace_mode)
def _skip_op(self, op_id, op, user_included, user_excluded,
in_exec_path=True):
"""Returns True if we should not trace Op."""
if TensorTracer.while_loop_op(op):
self._instrument_records[op.name] = TensorTracer.reason(
op_id, _REASON_WHILELOOP_OP)
return True
if TensorTracer.unsafe_op(op):
self._instrument_records[op.name] = TensorTracer.reason(
op_id, _REASON_UNSAFE_OP)
return True
if TensorTracer.device_mismatch(self._device_type, op):
self._instrument_records[op.name] = TensorTracer.reason(
op_id, _REASON_DEVICE_MISMATCH)
return True
if not in_exec_path:
self._instrument_records[op.name] = TensorTracer.reason(
op_id, _REASON_NOT_EXECUTED)
return True
if not self._inside_op_range(op_id):
self._instrument_records[op.name] = TensorTracer.reason(
op_id, _REASON_OUTSIDE_OP_RANGE)
return True
if TensorTracer.less_interesting_op(op):
self._instrument_records[op.name] = TensorTracer.reason(
op_id, _REASON_LESS_INTERESTING_OP)
return True
if user_included:
self._instrument_records[op.name] = TensorTracer.reason(
op_id, _REASON_USER_INCLUDED)
return False
if user_excluded:
self._instrument_records[op.name] = TensorTracer.reason(
op_id, _REASON_USER_EXCLUDED)
return True
return False
def _skip_tensor(self, op_id, out_tensor, user_included,
user_excluded):
"""Returns True if we should not trace out_tensor."""
# Skips a tensor if the tensor has a non-numeric type.
# Note: we cannot use check_ops.is_numeric_tensor(out_tensor)
# because it also excludes tensors with dtypes, bool, and
# float32_ref, which we actually want to trace.
non_numeric_tensor_types = set([dtypes.variant, dtypes.resource,
dtypes.string])
if out_tensor.dtype in non_numeric_tensor_types:
self._instrument_records[out_tensor.name] = TensorTracer.reason(
op_id, _REASON_NON_NUMERIC_TENSOR)
return True
# Skip a tensor if it feeds a special while loop op.
if [consumer for consumer in out_tensor.consumers() if
TensorTracer.while_loop_op(consumer)]:
self._instrument_records[out_tensor.name] = TensorTracer.reason(
op_id, _REASON_FEEDS_WHILELOOP_OP)
return True
if user_included:
self._instrument_records[out_tensor.name] = TensorTracer.reason(
op_id, _REASON_USER_INCLUDED)
return False
if user_excluded:
self._instrument_records[out_tensor.name] = TensorTracer.reason(
op_id, _REASON_USER_EXCLUDED)
return True
if not out_tensor.get_shape().is_fully_defined():
# If trace mode is nan-inf, norm or max, then the tensor will be reduced
# to a scalar before the outside compilation call.
if self._trace_mode in [
_TRACE_MODE_NAN_INF, _TRACE_MODE_NORM, _TRACE_MODE_MAX_ABS
]:
self._instrument_records[out_tensor.name] = TensorTracer.reason(
op_id, _REASON_TENSOR_GET_TRACED)
return False
else:
self._instrument_records[out_tensor.name] = TensorTracer.reason(
op_id, _REASON_DYNAMIC_SHAPE)
return True
rank = len(out_tensor.shape)
if rank < 1:
# scalar
if self._trace_scalar_ops:
if TensorTracer.unsafe_scalar_trace(out_tensor.op):
self._instrument_records[out_tensor.name] = TensorTracer.reason(
op_id, _REASON_UNSAFE_SCALAR)
return True
else:
self._instrument_records[out_tensor.name] = TensorTracer.reason(
op_id, _REASON_SCALAR_GET_TRACED)
return False
else:
self._instrument_records[out_tensor.name] = TensorTracer.reason(
op_id, _REASON_SKIP_SCALAR)
return True
else:
# tensor
self._instrument_records[out_tensor.name] = TensorTracer.reason(
op_id, _REASON_TENSOR_GET_TRACED)
return False
def _filter_execution_path_operations(self, operations, fetches):
"""Returns the set of ops in the execution path to compute given fetches."""
# If no fetch provided, then return all operations.
if fetches is None:
return set(operations)
# Convert to list, if a single element is provided.
if not isinstance(fetches, (list, tuple)):
fetches = [fetches]
# If a tensor is given as fetch, convert it to op.
op_fetches = []
for fetch in fetches:
if isinstance(fetch, ops.Operation):
op_fetches.append(fetch)
elif isinstance(fetch, ops.Tensor):
op_fetches.append(fetch.op)
else:
raise RuntimeError('Given fetch:%s is neither a tensor nor an op.'
%fetch)
execution_path_operations = set(op_fetches)
traverse_stack = list(op_fetches)
while True:
if not traverse_stack:
break
head_op = traverse_stack.pop()
input_ops = [tensor_input.op for tensor_input in head_op.inputs]
input_ops.extend(head_op.control_inputs)
for input_op in input_ops:
if input_op not in execution_path_operations:
# Filter out loop condition operations, tracing them causes a cycle.
# Trace only the loop-body.
if TensorTracer.loop_cond_op(input_op):
continue
execution_path_operations.add(input_op)
traverse_stack.append(input_op)
return execution_path_operations
def _determine_traced_tensors(self, graph, ops_in_exec_path):
"""Determines the tensors that will be traced."""
self._traced_tensorname_to_cache_idx_map = {}
self._cache_idx_to_tensor_idx = []
operations = graph.get_operations()
checkpoint_operations = self._get_checkpoints(graph)
for op_id, op in enumerate(operations):
if checkpoint_operations and op.name not in checkpoint_operations:
continue
user_included = self._is_user_included_op(op)
user_excluded = self._is_user_excluded_op(op)
in_exec_path = op in ops_in_exec_path
if self._skip_op(op_id, op, user_included, user_excluded, in_exec_path):
continue
for i in range(len(op.outputs)):
out_tensor = op.outputs[i]
if self._skip_tensor(op_id, out_tensor, user_included,
user_excluded):
continue
tensor_name = out_tensor.name
if tensor_name in self._traced_tensorname_to_cache_idx_map:
raise ValueError(
'Tensor name %s should not be already in '
'traced_tensorname_to_cache_idx_map'%tensor_name)
if tensor_name not in self._tensorname_idx_map:
raise ValueError(
'Tensor name %s is not in the tensorname_idx_map'%tensor_name)
tensor_idx = self._tensorname_idx_map[tensor_name]
cache_idx = len(self._traced_tensorname_to_cache_idx_map)
self._traced_tensorname_to_cache_idx_map[tensor_name] = cache_idx
self._cache_idx_to_tensor_idx.append(tensor_idx)
if len(self._traced_tensorname_to_cache_idx_map) != len(
self._cache_idx_to_tensor_idx):
raise RuntimeError('len(self._traced_tensorname_to_cache_idx_map) != '
'len(self._cache_idx_to_tensor_idx')
def _check_trace_files(self):
"""Checks if any requirements for trace files are satisfied."""
if not self._trace_dir:
# traces will be written to stderr. No need to check trace files.
return
if _trace_files_need_precreated(self._trace_dir):
for replica_id in range(0, self._num_replicas):
trace_file_path = os.path.join(
self._trace_dir,
_COMPACT_TRACE_FILE_PREFIX) + '%d'%replica_id
if not gfile.Exists(trace_file_path):
raise RuntimeError(
'%s must be pre-created with the '
'appropriate properties.'%trace_file_path)
else:
if not gfile.Exists(self._trace_dir):
gfile.MkDir(self._trace_dir)
if not gfile.Exists(self._trace_dir):
raise RuntimeError('Failed to create %s'%self._trace_dir)
def _pre_tracing(self, graph, fetches):
"""Work needs to be done prior to TPU or CPU tracing."""
self._check_trace_files()
operations = graph.get_operations()
(opname_idx_map, tensor_list, self._tensorname_idx_map) = (
TensorTracer._make_op_and_tensor_maps(operations))
self._write_config_section()
self._write_op_list_section(operations)
self._write_tensor_list_section(tensor_list, opname_idx_map)
# Filter out the operations that won't be executed.
# if fetches=None, then ops_in_exec_path = set(operations)
ops_in_exec_path = self._filter_execution_path_operations(operations,
fetches)
self._determine_traced_tensors(graph, ops_in_exec_path)
self._write_cache_index_map_section()
# Does the topological sort before adding any nodes to the graph.
(succeed, sorted_or_cycle) = TensorTracer.topological_sort(graph)
if self._use_tensor_values_cache():
_create_tensor_values_cache(graph,
len(self._cache_idx_to_tensor_idx))
return (ops_in_exec_path, succeed, sorted_or_cycle)
def _post_tracing(self, succeed, sorted_or_cycle):
"""Work needs to be done after TPU or CPU tracing."""
self._write_reason_section()
self._write_graph_section(succeed, sorted_or_cycle)
self._close_report_file()
def _get_checkpoints(self, graph):
"""Returns the list of Ops that produce the tensors traced with API.
Args:
graph: the graph of Ops.
Returns:
A set of operation names which should be traced.
"""
self._write_report('%s %s\n'%(_MARKER_SECTION_BEGIN,
_TENSOR_TRACER_CHECKPOINT))
checkpoint_operations = set()
tensor_tracer_variables = graph.get_collection(_TENSOR_TRACER_COLLECTION)
for (tensor, checkpoint_name) in tensor_tracer_variables:
self._write_report('%s %s\n'%(tensor.name, checkpoint_name))
checkpoint_operations.add(tensor.op.name)
self._write_report('%s %s\n'%(_MARKER_SECTION_END,
_TENSOR_TRACER_CHECKPOINT))
return checkpoint_operations
def _generate_flush_cache_op(self, graph, start_replica, on_tpu):
"""Generates an Op that will flush the cache to file.
Args:
graph: the graph of Ops
start_replica: the ID of the first replica being flushed by this Op.
on_tpu: if the graph is executed on TPU.
Returns:
The Op to flush the cache to file.
"""
def _make_flush_fun(replica_id):
"""Makes a function for flushing the cache for the given replica."""
def _fun():
"""A function that flushes the cache to a file."""
def _flush_fun(cache):
"""Flushes the cache to a file."""
if isinstance(replica_id, str):
replica_id_str = replica_id
else:
replica_id_str = '%d'%replica_id
if self._trace_dir:
output_path = os.path.join(self._trace_dir,
_COMPACT_TRACE_FILE_PREFIX) \
+ replica_id_str
output_stream = _OUTPUT_STREAM_ESCAPE + output_path
else:
output_stream = sys.stderr
new_step_line = _REPLICA_ID_TAG + replica_id_str
print_op = logging_ops.print_v2(
new_step_line, '\n',
cache, '\n',
summarize=-1,
output_stream=output_stream)
with ops.control_dependencies([print_op]):
return constant_op.constant(0).op
cache = _get_tensor_values_cache(graph)
if on_tpu:
flush_op = tpu.outside_compilation(_flush_fun, cache.value())
else:
flush_op = _flush_fun(cache.value())
with ops.control_dependencies([flush_op]):
reset_value = constant_op.constant(_COMPACT_TRACE_ENTRY_INIT_VALUE,
dtype=cache.dtype,
shape=cache.shape)
assign_op = state_ops.assign(cache, reset_value).op
with ops.control_dependencies([assign_op]):
return flush_op.outputs[0]
return _fun
def _f(replica_id):
return _make_flush_fun(replica_id)
def _eq(x):
return math_ops.equal(x, self._replica_id)
def _do_nothing():
return constant_op.constant(0)
return control_flow_ops.case({\
_eq(start_replica): _f(start_replica), \
_eq(start_replica+1): _f(start_replica+1), \
_eq(start_replica+2): _f(start_replica+2), \
_eq(start_replica+3): _f(start_replica+3), \
_eq(start_replica+4): _f(start_replica+4), \
_eq(start_replica+5): _f(start_replica+5), \
_eq(start_replica+6): _f(start_replica+6), \
_eq(start_replica+7): _f(start_replica+7), \
},
default=_do_nothing,
exclusive=True).op
def _flush_tensor_values_cache(self, graph, tensor_fetches, op_fetches,
on_tpu):
"""Flushes the intermediate tensor values in the graph to the cache.
Args:
graph: the graph of Ops
tensor_fetches: list of tensor results returned by the model_fn.
op_fetches: list of ops that are returned by the model_fn, e.g., train_op.
on_tpu: if the graph is executed on TPU.
Returns:
An identical copy of tensor_fetches.
"""
# Add a dependency to op and tensor fetches to make sure that all tracing
# ops are executed before flushing trace results.
with ops.control_dependencies(op_fetches +
[tensor.op for tensor in tensor_fetches]):
flush_cache_op_list = []
for host in range(self._num_hosts):
start_replica = host * 8
flush_op = self._generate_flush_cache_op(graph, start_replica, on_tpu)
flush_cache_op_list.append(flush_op)
return control_flow_ops.tuple(tensor_fetches,
control_inputs=flush_cache_op_list)
def _process_tensor_fetches(self, tensor_fetches):
"""Check that tensor_fetches is not empty and have valid tensors."""
# If none or empty list.
if tensor_fetches is None:
raise RuntimeError('tensor_fetches provided to tensor_tracer cannot be '
'None.')
if not isinstance(tensor_fetches, (list, tuple)):
tensor_fetches = [tensor_fetches]
elif not tensor_fetches:
raise RuntimeError('tensor_fetches provided to tensor_tracer cannot be '
'empty list.')
fetches = []
for fetch in tensor_fetches:
if isinstance(fetch, ops.Tensor):
fetches.append(fetch)
else:
raise RuntimeError('Given tensor_fetch:%s is not a tensor.' % fetch)
return fetches
def _process_op_fetches(self, op_fetches):
"""Check that op_fetches have valid ops."""
if op_fetches is None:
return []
if not isinstance(op_fetches, (list, tuple)):
op_fetches = [op_fetches]
fetches = []
for fetch in op_fetches:
if isinstance(fetch, ops.Operation):
fetches.append(fetch)
else:
logging.warning('Ignoring the given op_fetch:%s, which is not an op.' %
fetch)
return fetches
def _convert_fetches_to_input_format(self, input_fetches, current_fetches):
"""Changes current_fetches' format, so that it matches input_fetches."""
if isinstance(input_fetches, ops.Tensor):
if len(current_fetches) != 1:
raise RuntimeError('Tensor tracer input/output fetches do not match.')
return current_fetches[0]
else:
if len(current_fetches) != len(current_fetches):
raise RuntimeError('Tensor tracer input/output fetches do not match.')
elif isinstance(input_fetches, tuple):
return tuple(current_fetches)
else:
return current_fetches
def _get_op_control_flow_context(self, op):
"""Returns the control flow of the given op.
Args:
op: tf.Operation for which the control flow context is requested.
Returns:
op_control_flow_context: which the is control flow context of the given
op. If the operation type is LoopExit, returns the outer control flow
context.
"""
# pylint: disable=protected-access
op_control_flow_context = op._control_flow_context
# pylint: enable=protected-access
if control_flow_util.IsLoopExit(op):
op_control_flow_context = op_control_flow_context.outer_context
return op_control_flow_context
def _trace_execution(self, graph,
tensor_fetches,
op_fetches=None,
on_tpu=True):
"""Commong tracing function for both CPU and TPUs.
The caller function should set _device_type, _num_replicas,
_num_replicas_per_host, _num_hosts and _replica_id before calling
_trace_execution.
Args:
graph: the graph of Ops executed on the TPU.
tensor_fetches: a (list,tuple,or a single object) of tensor fetches
returned by model_fn given to session.run. Function must be provided
with as least one tensor to fetch.
op_fetches: A list of op fetches returned by model_fn given to
session.run. op_fetches and tensor_fetches are used to determine the
nodes that will be executed. Can be None.
on_tpu: True if executing on TPU.
Returns:
tensor_fetches: an exact copy of tensor_fetches that has additional
dependencies.
Raises:
RuntimeError: If tensor_fetches is None or empty.
"""
def _cast_unsupported_dtypes(tensor):
"""Casts tensor to a supported type."""
if tensor.dtype.__eq__(dtypes.int64):
# outside-compilation doesn't support int64 input yet.
return math_ops.cast(tensor, dtypes.int32)
if tensor.dtype.__eq__(dtypes.bfloat16) or tensor.dtype.__eq__(
dtypes.float16):
# Since host can't handle bf16, convert tensor to f32.
return math_ops.cast(tensor, dtypes.float32)
return tensor
TensorTracer.check_device_type(self._device_type)
# Check in_tensor_fetches, and op_fetches and convert them to lists.
processed_t_fetches = self._process_tensor_fetches(tensor_fetches)
op_fetches = self._process_op_fetches(op_fetches)
all_fetches = op_fetches + [tensor.op for tensor in processed_t_fetches]
# Filter the set of ops that will be executed, and topological sort.
(exec_op_set, succeed, sorted_or_cycle) = self._pre_tracing(graph,
all_fetches)
tensor_fetch_set = set(processed_t_fetches)
tracing_ops = []
# pylint: disable=protected-access
current_control_flow_context = graph._get_control_flow_context()
# pylint: enable=protected-access
sorted_exec_op_list = list(exec_op_set)
sorted_exec_op_list.sort(key=lambda op: op.name)
# Trace ops only if they are in the execution path.
for op in sorted_exec_op_list:
for i in range(len(op.outputs)):
out_tensor = op.outputs[i]
tensor_name = out_tensor.name
if tensor_name not in self._traced_tensorname_to_cache_idx_map:
continue
# Create the list of consumers before calling _preprocess_traced_tensor.
# Otherwise, adding control input below, will introduce a cycle in the
# graph.
consumers = out_tensor.consumers()
# Not all consumers may be in the exec path. Filter out the consumers
# to keep the graph simpler.
consumers = [cop for cop in consumers if cop in exec_op_set]
# If there is no consumer of the tensor, there is no need to trace it;
# unless the tensor itself is one of the fetches.
is_a_fetched_tensor = out_tensor in tensor_fetch_set
if (not consumers) and (not is_a_fetched_tensor):
continue
op_control_flow_context = self._get_op_control_flow_context(op)
# pylint: disable=protected-access
graph._set_control_flow_context(op_control_flow_context)
# pylint: enable=protected-access
processed_out_tensor = self._preprocess_traced_tensor(out_tensor)
if on_tpu:
processed_out_tensor = _cast_unsupported_dtypes(processed_out_tensor)
if self._use_tensor_values_cache():
cache_idx = self._traced_tensorname_to_cache_idx_map[tensor_name]
trace_op = self._save_tensor_value_to_cache_op(graph,
cache_idx,
processed_out_tensor)
elif on_tpu:
trace_op = tpu.outside_compilation(
self._make_tensor_trace_fun(tensor_name), processed_out_tensor)
else:
trace_fun = self._make_tensor_trace_fun(tensor_name)
trace_op = trace_fun(processed_out_tensor)
if is_a_fetched_tensor:
tracing_ops.append(trace_op)
continue
# Add it to all consumers, as some consumers may not be executed if they
# are in a control flow.
for consumer_op in consumers:
# pylint: disable=protected-access
consumer_op._add_control_input(trace_op)
# pylint: enable=protected-access
# pylint: disable=protected-access
graph._set_control_flow_context(current_control_flow_context)
# pylint: enable=protected-access
if tracing_ops:
# If we are tracing a fetched tensor, their dependency is stored in
# tracing_ops.
processed_t_fetches = control_flow_ops.tuple(processed_t_fetches,
control_inputs=tracing_ops)
if self._use_tensor_values_cache():
processed_t_fetches = self._flush_tensor_values_cache(graph,
processed_t_fetches,
op_fetches,
on_tpu=on_tpu)
self._post_tracing(succeed, sorted_or_cycle)
# processed_t_fetches is a list at this point. Convert it to the same
# format as given in tensor_fetches.
return self._convert_fetches_to_input_format(tensor_fetches,
processed_t_fetches)
def trace_tpu(self, graph,
tensor_fetches,
op_fetches=None,
num_replicas=None,
num_replicas_per_host=None,
num_hosts=None):
"""Traces the tensors generated by TPU Ops in a TF graph.
Args:
graph: the graph of Ops executed on the TPU.
tensor_fetches: a (list,tuple,or a single object) of tensor fetches
returned by model_fn given to session.run. Function must be provided
with as least one tensor to fetch.
op_fetches: A list of op fetches returned by model_fn given to
session.run. op_fetches and tensor_fetches are used to determine the
nodes that will be executed. Can be None.
num_replicas: number of replicas used on the TPU.
num_replicas_per_host: number of replicas per TPU host.
num_hosts: total number of TPU hosts.
Returns:
tensor_fetches: an exact copy of tensor_fetches that has additional
dependencies.
Raises:
RuntimeError: If num_replicas_per_host > 8.
RuntimeError: If tensor_fetches is None or empty.
"""
if graph in TensorTracer._traced_graphs:
logging.warning('Graph is already rewritten with tensor tracer, ignoring '
'multiple calls.')
return tensor_fetches
else:
TensorTracer._traced_graphs.add(graph)
self._device_type = _DEVICE_TYPE_TPU
self._num_replicas = num_replicas
self._num_replicas_per_host = num_replicas_per_host
self._num_hosts = num_hosts
if self._num_replicas is not None:
if self._num_replicas_per_host is None:
self._num_replicas_per_host = 8
if self._num_hosts is None:
self._num_hosts = num_replicas // self._num_replicas_per_host + \
(num_replicas % self._num_replicas_per_host > 0)
if self._num_replicas_per_host > 8:
# Checks for the assumption in _generate_flush_cache_op().
raise RuntimeError('num_replicas_per_host (%d) is '
'greater than 8'%self._num_replicas_per_host)
if self._graph_dump_path:
graph_io.write_graph(graph, self._graph_dump_path,
'graph_before_tt.pbtxt')
with graph.as_default():
self._add_replica_id_to_graph()
tensor_fetches = self._trace_execution(graph, tensor_fetches, op_fetches,
on_tpu=True)
if self._graph_dump_path:
graph_io.write_graph(graph, self._graph_dump_path,
'graph_after_tt.pbtxt')
return tensor_fetches
def trace_cpu(self, graph, tensor_fetches, op_fetches=None):
"""Traces the tensors generated by CPU Ops in a TF graph.
Args:
graph: the graph of Ops executed on the CPU.
tensor_fetches: a (list,tuple,or a single object) of tensor fetches
returned by model_fn given to session.run. Function must be provided
with as least one tensor to fetch.
op_fetches: A list of op fetches returned by model_fn given to
session.run. op_fetches and tensor_fetches are used to determine the
nodes that will be executed. Can be None.
Returns:
tensor_fetches: an exact copy of tensor_fetches that has additional
dependencies.
Raises:
RuntimeError: If tensor_fetches is None or empty.
"""
if graph in TensorTracer._traced_graphs:
logging.warning('Graph is already rewritten with tensor tracer, ignoring '
'multiple calls.')
return tensor_fetches
else:
TensorTracer._traced_graphs.add(graph)
self._device_type = _DEVICE_TYPE_CPU
self._num_replicas = 1
self._num_replicas_per_host = 1
self._num_hosts = 1
self._replica_id = 0
if self._graph_dump_path:
graph_io.write_graph(graph, self._graph_dump_path,
'graph_before_tt.pbtxt')
with graph.as_default():
tensor_fetches = self._trace_execution(graph, tensor_fetches, op_fetches,
on_tpu=False)
if self._graph_dump_path:
graph_io.write_graph(graph, self._graph_dump_path,
'graph_after_tt.pbtxt')
return tensor_fetches
|
apache-2.0
| 968,700,902,319,309,300
| 37.911612
| 82
| 0.626032
| false
| 3.658068
| false
| false
| false
|
pamfilos/invenio
|
modules/websearch/lib/websearchadminlib.py
|
1
|
168088
|
## This file is part of Invenio.
## Copyright (C) 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011, 2012, 2013 CERN.
##
## Invenio is free software; you can redistribute it and/or
## modify it under the terms of the GNU General Public License as
## published by the Free Software Foundation; either version 2 of the
## License, or (at your option) any later version.
##
## Invenio is distributed in the hope that it will be useful, but
## WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with Invenio; if not, write to the Free Software Foundation, Inc.,
## 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
# pylint: disable=C0301
"""Invenio WebSearch Administrator Interface."""
__revision__ = "$Id$"
import cgi
import random
import time
import sys
from invenio.dateutils import strftime
import os
import traceback
if sys.hexversion < 0x2040000:
# pylint: disable=W0622
from sets import Set as set
# pylint: enable=W0622
from invenio.config import \
CFG_CACHEDIR, \
CFG_SITE_LANG, \
CFG_SITE_NAME, \
CFG_SITE_URL,\
CFG_WEBCOMMENT_ALLOW_COMMENTS, \
CFG_WEBSEARCH_SHOW_COMMENT_COUNT, \
CFG_WEBCOMMENT_ALLOW_REVIEWS, \
CFG_WEBSEARCH_SHOW_REVIEW_COUNT, \
CFG_BIBRANK_SHOW_CITATION_LINKS, \
CFG_INSPIRE_SITE, \
CFG_CERN_SITE
from invenio.bibrankadminlib import \
write_outcome, \
modify_translations, \
get_def_name, \
get_name, \
get_languages, \
addadminbox, \
tupletotable, \
createhiddenform
from invenio.dbquery import \
run_sql, \
get_table_update_time
from invenio.websearch_external_collections import \
external_collections_dictionary, \
external_collection_sort_engine_by_name, \
external_collection_get_state, \
external_collection_get_update_state_list, \
external_collection_apply_changes
from invenio.websearch_external_collections_utils import \
get_collection_descendants
from invenio.websearch_external_collections_config import CFG_EXTERNAL_COLLECTION_STATES_NAME
#from invenio.bibformat_elements import bfe_references
#from invenio.bibformat_engine import BibFormatObject
from invenio.bibdocfile import BibRecDocs
from invenio.messages import gettext_set_language
#from invenio.bibrank_citation_searcher import get_cited_by
from invenio.access_control_admin import acc_get_action_id
from invenio.access_control_config import VIEWRESTRCOLL
from invenio.errorlib import register_exception
from invenio.intbitset import intbitset
from invenio.bibrank_citation_searcher import get_cited_by, get_cited_by_count
from invenio.bibrecord import record_get_field_instances
def getnavtrail(previous = ''):
"""Get the navtrail"""
navtrail = """<a class="navtrail" href="%s/help/admin">Admin Area</a> """ % (CFG_SITE_URL,)
navtrail = navtrail + previous
return navtrail
def fix_collection_scores():
"""
Re-calculate and re-normalize de scores of the collection relationship.
"""
for id_dad in intbitset(run_sql("SELECT id_dad FROM collection_collection")):
for index, id_son in enumerate(run_sql("SELECT id_son FROM collection_collection WHERE id_dad=%s ORDER BY score DESC", (id_dad, ))):
run_sql("UPDATE collection_collection SET score=%s WHERE id_dad=%s AND id_son=%s", (index * 10 + 10, id_dad, id_son[0]))
def perform_modifytranslations(colID, ln, sel_type='', trans=[], confirm=-1, callback='yes'):
"""Modify the translations of a collection
sel_type - the nametype to modify
trans - the translations in the same order as the languages from get_languages()"""
output = ''
subtitle = ''
sitelangs = get_languages()
if sel_type in ('r', 'v', 'l'):
table = 'collectionbox'
identifier_column = "id_collection"
else:
table = 'collection'
identifier_column = None
if type(trans) is str:
trans = [trans]
if confirm in ["2", 2] and colID:
finresult = modify_translations(colID, sitelangs, sel_type, trans, table, identifier_column)
col_dict = dict(get_def_name('', "collection"))
if colID and col_dict.has_key(int(colID)):
colID = int(colID)
subtitle = """<a name="3">3. Modify translations for collection '%s'</a> <small>[<a href="%s/help/admin/websearch-admin-guide#3.3">?</a>]</small>""" % (col_dict[colID], CFG_SITE_URL)
if sel_type == '':
sel_type = get_col_nametypes()[0][0]
header = ['Language', 'Translation']
actions = []
types = get_col_nametypes()
types.extend([('v', '"Focus on" box'), ('r', '"Narrow by" box'), ('l', '"Latest additions" box')])
if len(types) > 1:
text = """
<span class="adminlabel">Name type</span>
<select name="sel_type" class="admin_w200">
"""
for (key, value) in types:
text += """<option value="%s" %s>%s""" % (key, key == sel_type and 'selected="selected"' or '', value)
trans_names = get_name(colID, ln, key, "collection")
if trans_names and trans_names[0][0]:
text += ": %s" % trans_names[0][0]
text += "</option>"
text += """</select>"""
output += createhiddenform(action="modifytranslations#3",
text=text,
button="Select",
colID=colID,
ln=ln,
confirm=0)
if confirm in [-1, "-1", 0, "0"]:
trans = []
for (key, value) in sitelangs:
try:
trans_names = get_name(colID, key, sel_type, table, identifier_column)
trans.append(trans_names[0][0])
except StandardError, e:
trans.append('')
for nr in range(0, len(sitelangs)):
actions.append(["%s" % (sitelangs[nr][1],)])
actions[-1].append('<input type="text" name="trans" size="30" value="%s"/>' % trans[nr])
text = tupletotable(header=header, tuple=actions)
output += createhiddenform(action="modifytranslations#3",
text=text,
button="Modify",
colID=colID,
sel_type=sel_type,
ln=ln,
confirm=2)
if sel_type and len(trans) and confirm in ["2", 2]:
output += write_outcome(finresult)
body = [output]
if callback:
return perform_editcollection(colID, ln, "perform_modifytranslations", addadminbox(subtitle, body))
else:
return addadminbox(subtitle, body)
def perform_modifyrankmethods(colID, ln, func='', rnkID='', confirm=0, callback='yes'):
"""Modify which rank methods is visible to the collection
func - remove or add rank method
rnkID - the id of the rank method."""
output = ""
subtitle = ""
col_dict = dict(get_def_name('', "collection"))
rnk_dict = dict(get_def_name('', "rnkMETHOD"))
if colID and col_dict.has_key(int(colID)):
colID = int(colID)
if func in ["0", 0] and confirm in ["1", 1]:
finresult = attach_rnk_col(colID, rnkID)
elif func in ["1", 1] and confirm in ["1", 1]:
finresult = detach_rnk_col(colID, rnkID)
subtitle = """<a name="9">9. Modify rank options for collection '%s'</a> <small>[<a title="See guide" href="%s/help/admin/websearch-admin-guide#3.9">?</a>]</small>""" % (col_dict[colID], CFG_SITE_URL)
output = """
<dl>
<dt>The rank methods enabled for the collection '%s' is:</dt>
""" % col_dict[colID]
rnkmethods = get_col_rnk(colID, ln)
output += """<dd>"""
if not rnkmethods:
output += """No rank methods"""
else:
for id, name in rnkmethods:
output += """%s, """ % name
output += """</dd>
</dl>
"""
rnk_list = get_def_name('', "rnkMETHOD")
rnk_dict_in_col = dict(get_col_rnk(colID, ln))
rnk_list = filter(lambda x: not rnk_dict_in_col.has_key(x[0]), rnk_list)
if rnk_list:
text = """
<span class="adminlabel">Enable:</span>
<select name="rnkID" class="admin_w200">
<option value="-1">- select rank method -</option>
"""
for (id, name) in rnk_list:
text += """<option value="%s" %s>%s</option>""" % (id, (func in ["0", 0] and confirm in ["0", 0] and int(rnkID) == int(id)) and 'selected="selected"' or '' , name)
text += """</select>"""
output += createhiddenform(action="modifyrankmethods#9",
text=text,
button="Enable",
colID=colID,
ln=ln,
func=0,
confirm=1)
if confirm in ["1", 1] and func in ["0", 0] and int(rnkID) != -1:
output += write_outcome(finresult)
elif confirm not in ["0", 0] and func in ["0", 0]:
output += """<b><span class="info">Please select a rank method.</span></b>"""
coll_list = get_col_rnk(colID, ln)
if coll_list:
text = """
<span class="adminlabel">Disable:</span>
<select name="rnkID" class="admin_w200">
<option value="-1">- select rank method-</option>
"""
for (id, name) in coll_list:
text += """<option value="%s" %s>%s</option>""" % (id, (func in ["1", 1] and confirm in ["0", 0] and int(rnkID) == int(id)) and 'selected="selected"' or '' , name)
text += """</select>"""
output += createhiddenform(action="modifyrankmethods#9",
text=text,
button="Disable",
colID=colID,
ln=ln,
func=1,
confirm=1)
if confirm in ["1", 1] and func in ["1", 1] and int(rnkID) != -1:
output += write_outcome(finresult)
elif confirm not in ["0", 0] and func in ["1", 1]:
output += """<b><span class="info">Please select a rank method.</span></b>"""
body = [output]
if callback:
return perform_editcollection(colID, ln, "perform_modifyrankmethods", addadminbox(subtitle, body))
else:
return addadminbox(subtitle, body)
def perform_addcollectiontotree(colID, ln, add_dad='', add_son='', rtype='', mtype='', callback='yes', confirm=-1):
"""Form to add a collection to the tree.
add_dad - the dad to add the collection to
add_son - the collection to add
rtype - add it as a regular or virtual
mtype - add it to the regular or virtual tree."""
output = ""
output2 = ""
subtitle = """Attach collection to tree <small>[<a title="See guide" href="%s/help/admin/websearch-admin-guide#2.2">?</a>]</small>""" % (CFG_SITE_URL)
col_dict = dict(get_def_name('', "collection"))
if confirm not in [-1, "-1"] and not (add_son and add_dad and rtype):
output2 += """<b><span class="info">All fields must be filled.</span></b><br /><br />
"""
elif add_son and add_dad and rtype:
add_son = int(add_son)
add_dad = int(add_dad)
if confirm not in [-1, "-1"]:
if add_son == add_dad:
output2 += """<b><span class="info">Cannot add a collection as a pointer to itself.</span></b><br /><br />
"""
elif check_col(add_dad, add_son):
res = add_col_dad_son(add_dad, add_son, rtype)
output2 += write_outcome(res)
if res[0] == 1:
output2 += """<b><span class="info"><br /> The collection will appear on your website after the next webcoll run. You can either run it manually or wait until bibsched does it for you.</span></b><br /><br />
"""
else:
output2 += """<b><span class="info">Cannot add the collection '%s' as a %s subcollection of '%s' since it will either create a loop, or the association already exists.</span></b><br /><br />
""" % (col_dict[add_son], (rtype=="r" and 'regular' or 'virtual'), col_dict[add_dad])
add_son = ''
add_dad = ''
rtype = ''
tree = get_col_tree(colID)
col_list = col_dict.items()
col_list.sort(compare_on_val)
output = show_coll_not_in_tree(colID, ln, col_dict)
text = """
<span class="adminlabel">Attach collection:</span>
<select name="add_son" class="admin_w200">
<option value="">- select collection -</option>
"""
for (id, name) in col_list:
if id != colID:
text += """<option value="%s" %s>%s</option>""" % (id, str(id)==str(add_son) and 'selected="selected"' or '', name)
text += """
</select><br />
<span class="adminlabel">to parent collection:</span>
<select name="add_dad" class="admin_w200">
<option value="">- select parent collection -</option>
"""
for (id, name) in col_list:
text += """<option value="%s" %s>%s</option>
""" % (id, str(id)==add_dad and 'selected="selected"' or '', name)
text += """</select><br />
"""
text += """
<span class="adminlabel">with relationship:</span>
<select name="rtype" class="admin_w200">
<option value="">- select relationship -</option>
<option value="r" %s>Regular (Narrow by...)</option>
<option value="v" %s>Virtual (Focus on...)</option>
</select>
""" % ((rtype=="r" and 'selected="selected"' or ''), (rtype=="v" and 'selected="selected"' or ''))
output += createhiddenform(action="%s/admin/websearch/websearchadmin.py/addcollectiontotree" % CFG_SITE_URL,
text=text,
button="Add",
colID=colID,
ln=ln,
confirm=1)
output += output2
#output += perform_showtree(colID, ln)
body = [output]
if callback:
return perform_index(colID, ln, mtype="perform_addcollectiontotree", content=addadminbox(subtitle, body))
else:
return addadminbox(subtitle, body)
def perform_addcollection(colID, ln, colNAME='', dbquery='', callback="yes", confirm=-1):
"""form to add a new collection.
colNAME - the name of the new collection
dbquery - the dbquery of the new collection"""
output = ""
subtitle = """Create new collection <small>[<a title="See guide" href="%s/help/admin/websearch-admin-guide#2.1">?</a>]</small>""" % (CFG_SITE_URL)
text = """
<span class="adminlabel">Default name</span>
<input class="admin_w200" type="text" name="colNAME" value="%s" /><br />
""" % colNAME
output = createhiddenform(action="%s/admin/websearch/websearchadmin.py/addcollection" % CFG_SITE_URL,
text=text,
colID=colID,
ln=ln,
button="Add collection",
confirm=1)
if colNAME and confirm in ["1", 1]:
res = add_col(colNAME, '')
output += write_outcome(res)
if res[0] == 1:
output += perform_addcollectiontotree(colID=colID, ln=ln, add_son=res[1], callback='')
elif confirm not in ["-1", -1]:
output += """<b><span class="info">Please give the collection a name.</span></b>"""
body = [output]
if callback:
return perform_index(colID, ln=ln, mtype="perform_addcollection", content=addadminbox(subtitle, body))
else:
return addadminbox(subtitle, body)
def perform_modifydbquery(colID, ln, dbquery='', callback='yes', confirm=-1):
"""form to modify the dbquery of the collection.
dbquery - the dbquery of the collection."""
subtitle = ''
output = ""
col_dict = dict(get_def_name('', "collection"))
if colID and col_dict.has_key(int(colID)):
colID = int(colID)
subtitle = """<a name="1">1. Modify collection query for collection '%s'</a> <small>[<a title="See guide" href="%s/help/admin/websearch-admin-guide#3.1">?</a>]</small>""" % (col_dict[colID], CFG_SITE_URL)
if confirm == -1:
res = run_sql("SELECT dbquery FROM collection WHERE id=%s" , (colID,))
dbquery = res[0][0]
if not dbquery:
dbquery = ''
reg_sons = len(get_col_tree(colID, 'r'))
vir_sons = len(get_col_tree(colID, 'v'))
if reg_sons > 1:
if dbquery:
output += "Warning: This collection got subcollections, and should because of this not have a collection query, for further explanation, check the WebSearch Guide<br />"
elif reg_sons <= 1:
if not dbquery:
output += "Warning: This collection does not have any subcollections, and should because of this have a collection query, for further explanation, check the WebSearch Guide<br />"
text = """
<span class="adminlabel">Query</span>
<input class="admin_w200" type="text" name="dbquery" value="%s" /><br />
""" % cgi.escape(dbquery, 1)
output += createhiddenform(action="modifydbquery",
text=text,
button="Modify",
colID=colID,
ln=ln,
confirm=1)
if confirm in ["1", 1]:
res = modify_dbquery(colID, dbquery)
if res:
if dbquery == "":
text = """<b><span class="info">Query removed for this collection.</span></b>"""
else:
text = """<b><span class="info">Query set for this collection.</span></b>"""
else:
text = """<b><span class="info">Sorry, could not change query.</span></b>"""
output += text
body = [output]
if callback:
return perform_editcollection(colID, ln, "perform_modifydbquery", addadminbox(subtitle, body))
else:
return addadminbox(subtitle, body)
def perform_modifycollectiontree(colID, ln, move_up='', move_down='', move_from='', move_to='', delete='', rtype='', callback='yes', confirm=0):
"""to modify the collection tree: move a collection up and down, delete a collection, or change the father of the collection.
colID - the main collection of the tree, the root
move_up - move this collection up (is not the collection id, but the place in the tree)
move_up - move this collection down (is not the collection id, but the place in the tree)
move_from - move this collection from the current positon (is not the collection id, but the place in the tree)
move_to - move the move_from collection and set this as it's father. (is not the collection id, but the place in the tree)
delete - delete this collection from the tree (is not the collection id, but the place in the tree)
rtype - the type of the collection in the tree, regular or virtual"""
colID = int(colID)
tree = get_col_tree(colID, rtype)
col_dict = dict(get_def_name('', "collection"))
subtitle = """Modify collection tree: %s <small>[<a title="See guide" href="%s/help/admin/websearch-admin-guide#2.3">?</a>] <a href="%s/admin/websearch/websearchadmin.py/showtree?colID=%s&ln=%s">Printer friendly version</a></small>""" % (col_dict[colID], CFG_SITE_URL, CFG_SITE_URL, colID, ln)
fin_output = ""
output = ""
try:
if move_up:
move_up = int(move_up)
switch = find_last(tree, move_up)
if switch and switch_col_treescore(tree[move_up], tree[switch]):
output += """<b><span class="info">Moved the %s collection '%s' up and '%s' down.</span></b><br /><br />
""" % ((rtype=="r" and 'regular' or 'virtual'), col_dict[tree[move_up][0]], col_dict[tree[switch][0]])
else:
output += """<b><span class="info">Could not move the %s collection '%s' up and '%s' down.</span></b><br /><br />
""" % ((rtype=="r" and 'regular' or 'virtual'), col_dict[tree[move_up][0]], col_dict[tree[switch][0]])
elif move_down:
move_down = int(move_down)
switch = find_next(tree, move_down)
if switch and switch_col_treescore(tree[move_down], tree[switch]):
output += """<b><span class="info">Moved the %s collection '%s' down and '%s' up.</span></b><br /><br />
""" % ((rtype=="r" and 'regular' or 'virtual'), col_dict[tree[move_down][0]], col_dict[tree[switch][0]])
else:
output += """<b><span class="info">Could not move the %s collection '%s' up and '%s' down.</span></b><br /><br />
""" % ((rtype=="r" and 'regular' or 'virtual'), col_dict[tree[move_up][0]],col_dict[tree[switch][0]])
elif delete:
delete = int(delete)
if confirm in [0, "0"]:
if col_dict[tree[delete][0]] != col_dict[tree[delete][3]]:
text = """<b>Do you want to remove the %s collection '%s' and its subcollections in the %s collection '%s'.</b>
""" % ((tree[delete][4]=="r" and 'regular' or 'virtual'), col_dict[tree[delete][0]], (rtype=="r" and 'regular' or 'virtual'), col_dict[tree[delete][3]])
else:
text = """<b>Do you want to remove all subcollections of the %s collection '%s'.</b>
""" % ((rtype=="r" and 'regular' or 'virtual'), col_dict[tree[delete][3]])
output += createhiddenform(action="%s/admin/websearch/websearchadmin.py/modifycollectiontree#tree" % CFG_SITE_URL,
text=text,
button="Confirm",
colID=colID,
delete=delete,
rtype=rtype,
ln=ln,
confirm=1)
output += createhiddenform(action="%s/admin/websearch/websearchadmin.py/index?mtype=perform_modifycollectiontree#tree" % CFG_SITE_URL,
text="<b>To cancel</b>",
button="Cancel",
colID=colID,
ln=ln)
else:
if remove_col_subcol(tree[delete][0], tree[delete][3], rtype):
if col_dict[tree[delete][0]] != col_dict[tree[delete][3]]:
output += """<b><span class="info">Removed the %s collection '%s' and its subcollections in subdirectory '%s'.</span></b><br /><br />
""" % ((tree[delete][4]=="r" and 'regular' or 'virtual'), col_dict[tree[delete][0]], col_dict[tree[delete][3]])
else:
output += """<b><span class="info">Removed the subcollections of the %s collection '%s'.</span></b><br /><br />
""" % ((rtype=="r" and 'regular' or 'virtual'), col_dict[tree[delete][3]])
else:
output += """<b><span class="info">Could not remove the collection from the tree.</span></b><br /><br />
"""
delete = ''
elif move_from and not move_to:
move_from_rtype = move_from[0]
move_from_id = int(move_from[1:len(move_from)])
text = """<b>Select collection to place the %s collection '%s' under.</b><br /><br />
""" % ((move_from_rtype=="r" and 'regular' or 'virtual'), col_dict[tree[move_from_id][0]])
output += createhiddenform(action="%s/admin/websearch/websearchadmin.py/index?mtype=perform_modifycollectiontree#tree" % CFG_SITE_URL,
text=text,
button="Cancel",
colID=colID,
ln=ln)
elif move_from and move_to:
move_from_rtype = move_from[0]
move_from_id = int(move_from[1:len(move_from)])
move_to_rtype = move_to[0]
move_to_id = int(move_to[1:len(move_to)])
tree_from = get_col_tree(colID, move_from_rtype)
tree_to = get_col_tree(colID, move_to_rtype)
if confirm in [0, '0']:
if move_from_id == move_to_id and move_from_rtype == move_to_rtype:
output += """<b><span class="info">Cannot move to itself.</span></b><br /><br />
"""
elif tree_from[move_from_id][3] == tree_to[move_to_id][0] and move_from_rtype==move_to_rtype:
output += """<b><span class="info">The collection is already there.</span></b><br /><br />
"""
elif check_col(tree_to[move_to_id][0], tree_from[move_from_id][0]) or (tree_to[move_to_id][0] == 1 and tree_from[move_from_id][3] == tree_to[move_to_id][0] and move_from_rtype != move_to_rtype):
text = """<b>Move %s collection '%s' to the %s collection '%s'.</b>
""" % ((tree_from[move_from_id][4]=="r" and 'regular' or 'virtual'), col_dict[tree_from[move_from_id][0]], (tree_to[move_to_id][4]=="r" and 'regular' or 'virtual'), col_dict[tree_to[move_to_id][0]])
output += createhiddenform(action="%s/admin/websearch/websearchadmin.py/modifycollectiontree#tree" % CFG_SITE_URL,
text=text,
button="Confirm",
colID=colID,
move_from=move_from,
move_to=move_to,
ln=ln,
rtype=rtype,
confirm=1)
output += createhiddenform(action="%s/admin/websearch/websearchadmin.py/index?mtype=perform_modifycollectiontree#tree" % CFG_SITE_URL,
text="""<b>To cancel</b>""",
button="Cancel",
colID=colID,
ln=ln)
else:
output += """<b><span class="info">Cannot move the collection '%s' and set it as a subcollection of '%s' since it will create a loop.</span></b><br /><br />
""" % (col_dict[tree_from[move_from_id][0]], col_dict[tree_to[move_to_id][0]])
else:
if (move_to_id != 0 and move_col_tree(tree_from[move_from_id], tree_to[move_to_id])) or (move_to_id == 0 and move_col_tree(tree_from[move_from_id], tree_to[move_to_id], move_to_rtype)):
output += """<b><span class="info">Moved %s collection '%s' to the %s collection '%s'.</span></b><br /><br />
""" % ((move_from_rtype=="r" and 'regular' or 'virtual'), col_dict[tree_from[move_from_id][0]], (move_to_rtype=="r" and 'regular' or 'virtual'), col_dict[tree_to[move_to_id][0]])
else:
output += """<b><span class="info">Could not move %s collection '%s' to the %s collection '%s'.</span></b><br /><br />
""" % ((move_from_rtype=="r" and 'regular' or 'virtual'), col_dict[tree_from[move_from_id][0]], (move_to_rtype=="r" and 'regular' or 'virtual'), col_dict[tree_to[move_to_id][0]])
move_from = ''
move_to = ''
else:
output += """
"""
except StandardError, e:
register_exception()
return """<b><span class="info">An error occured.</span></b>
"""
output += """<table border ="0" width="100%">
<tr><td width="50%">
<b>Narrow by collection:</b>
</td><td width="50%">
<b>Focus on...:</b>
</td></tr><tr><td valign="top">
"""
tree = get_col_tree(colID, 'r')
output += create_colltree(tree, col_dict, colID, ln, move_from, move_to, 'r', "yes")
output += """</td><td valign="top">
"""
tree = get_col_tree(colID, 'v')
output += create_colltree(tree, col_dict, colID, ln, move_from, move_to, 'v', "yes")
output += """</td>
</tr>
</table>
"""
body = [output]
if callback:
return perform_index(colID, ln, mtype="perform_modifycollectiontree", content=addadminbox(subtitle, body))
else:
return addadminbox(subtitle, body)
def perform_showtree(colID, ln):
"""create collection tree/hiarchy"""
col_dict = dict(get_def_name('', "collection"))
subtitle = "Collection tree: %s" % col_dict[int(colID)]
output = """<table border ="0" width="100%">
<tr><td width="50%">
<b>Narrow by collection:</b>
</td><td width="50%">
<b>Focus on...:</b>
</td></tr><tr><td valign="top">
"""
tree = get_col_tree(colID, 'r')
output += create_colltree(tree, col_dict, colID, ln, '', '', 'r', '')
output += """</td><td valign="top">
"""
tree = get_col_tree(colID, 'v')
output += create_colltree(tree, col_dict, colID, ln, '', '', 'v', '')
output += """</td>
</tr>
</table>
"""
body = [output]
return addadminbox(subtitle, body)
def perform_addportalbox(colID, ln, title='', body='', callback='yes', confirm=-1):
"""form to add a new portalbox
title - the title of the portalbox
body - the body of the portalbox"""
col_dict = dict(get_def_name('', "collection"))
colID = int(colID)
subtitle = """<a name="5.1"></a>Create new portalbox"""
text = """
<span class="adminlabel">Title</span>
<textarea cols="50" rows="1" class="admin_wvar" type="text" name="title">%s</textarea><br />
<span class="adminlabel">Body</span>
<textarea cols="50" rows="10" class="admin_wvar" type="text" name="body">%s</textarea><br />
""" % (cgi.escape(title), cgi.escape(body))
output = createhiddenform(action="addportalbox#5.1",
text=text,
button="Add",
colID=colID,
ln=ln,
confirm=1)
if body and confirm in [1, "1"]:
res = add_pbx(title, body)
output += write_outcome(res)
if res[1] == 1:
output += """<b><span class="info"><a href="addexistingportalbox?colID=%s&ln=%s&pbxID=%s#5">Add portalbox to collection</a></span></b>""" % (colID, ln, res[1])
elif confirm not in [-1, "-1"]:
output += """<b><span class="info">Body field must be filled.</span></b>
"""
body = [output]
return perform_showportalboxes(colID, ln, content=addadminbox(subtitle, body))
def perform_addexistingportalbox(colID, ln, pbxID=-1, score=0, position='', sel_ln='', callback='yes', confirm=-1):
"""form to add an existing portalbox to a collection.
colID - the collection to add the portalbox to
pbxID - the portalbox to add
score - the importance of the portalbox.
position - the position of the portalbox on the page
sel_ln - the language of the portalbox"""
subtitle = """<a name="5.2"></a>Add existing portalbox to collection"""
output = ""
colID = int(colID)
res = get_pbx()
pos = get_pbx_pos()
lang = dict(get_languages())
col_dict = dict(get_def_name('', "collection"))
pbx_dict = dict(map(lambda x: (x[0], x[1]), res))
col_pbx = get_col_pbx(colID)
col_pbx = dict(map(lambda x: (x[0], x[5]), col_pbx))
if len(res) > 0:
text = """
<span class="adminlabel">Portalbox</span>
<select name="pbxID" class="admin_w200">
<option value="-1">- Select portalbox -</option>
"""
for (id, t_title, t_body) in res:
text += """<option value="%s" %s>%s - %s...</option>\n""" % \
(id, id == int(pbxID) and 'selected="selected"' or '',
t_title[:40], cgi.escape(t_body[0:40 - min(40, len(t_title))]))
text += """</select><br />
<span class="adminlabel">Language</span>
<select name="sel_ln" class="admin_w200">
<option value="">- Select language -</option>
"""
listlang = lang.items()
listlang.sort()
for (key, name) in listlang:
text += """<option value="%s" %s>%s</option>
""" % (key, key == sel_ln and 'selected="selected"' or '', name)
text += """</select><br />
<span class="adminlabel">Position</span>
<select name="position" class="admin_w200">
<option value="">- Select position -</option>
"""
listpos = pos.items()
listpos.sort()
for (key, name) in listpos:
text += """<option value="%s" %s>%s</option>""" % (key, key==position and 'selected="selected"' or '', name)
text += "</select>"
output += createhiddenform(action="addexistingportalbox#5.2",
text=text,
button="Add",
colID=colID,
ln=ln,
confirm=1)
else:
output = """No existing portalboxes to add, please create a new one.
"""
if pbxID > -1 and position and sel_ln and confirm in [1, "1"]:
pbxID = int(pbxID)
res = add_col_pbx(colID, pbxID, sel_ln, position, '')
output += write_outcome(res)
elif pbxID > -1 and confirm not in [-1, "-1"]:
output += """<b><span class="info">All fields must be filled.</span></b>
"""
body = [output]
output = "<br />" + addadminbox(subtitle, body)
return perform_showportalboxes(colID, ln, content=output)
def perform_deleteportalbox(colID, ln, pbxID=-1, callback='yes', confirm=-1):
"""form to delete a portalbox which is not in use.
colID - the current collection.
pbxID - the id of the portalbox"""
subtitle = """<a name="5.3"></a>Delete an unused portalbox"""
output = ""
colID = int(colID)
if pbxID not in [-1, "-1"] and confirm in [1, "1"]:
ares = get_pbx()
pbx_dict = dict(map(lambda x: (x[0], x[1]), ares))
if pbx_dict.has_key(int(pbxID)):
pname = pbx_dict[int(pbxID)]
ares = delete_pbx(int(pbxID))
else:
return """<b><span class="info">This portalbox does not exist</span></b>"""
res = get_pbx()
col_dict = dict(get_def_name('', "collection"))
pbx_dict = dict(map(lambda x: (x[0], x[1]), res))
col_pbx = get_col_pbx()
col_pbx = dict(map(lambda x: (x[0], x[5]), col_pbx))
if len(res) > 0:
text = """
<span class="adminlabel">Portalbox</span>
<select name="pbxID" class="admin_w200">
"""
text += """<option value="-1">- Select portalbox -"""
for (id, t_title, t_body) in res:
if not col_pbx.has_key(id):
text += """<option value="%s" %s>%s - %s...""" % (id, id == int(pbxID) and 'selected="selected"' or '', t_title, cgi.escape(t_body[0:10]))
text += "</option>"
text += """</select><br />"""
output += createhiddenform(action="deleteportalbox#5.3",
text=text,
button="Delete",
colID=colID,
ln=ln,
confirm=1)
if pbxID not in [-1, "-1"]:
pbxID = int(pbxID)
if confirm in [1, "1"]:
output += write_outcome(ares)
elif confirm not in [-1, "-1"]:
output += """<b><span class="info">Choose a portalbox to delete.</span></b>
"""
body = [output]
output = "<br />" + addadminbox(subtitle, body)
return perform_showportalboxes(colID, ln, content=output)
def perform_modifyportalbox(colID, ln, pbxID=-1, score='', position='', sel_ln='', title='', body='', callback='yes', confirm=-1):
"""form to modify a portalbox in a collection, or change the portalbox itself.
colID - the id of the collection.
pbxID - the portalbox to change
score - the score of the portalbox connected to colID which should be changed.
position - the position of the portalbox in collection colID to change."""
subtitle = ""
output = ""
colID = int(colID)
res = get_pbx()
pos = get_pbx_pos()
lang = dict(get_languages())
col_dict = dict(get_def_name('', "collection"))
pbx_dict = dict(map(lambda x: (x[0], x[1]), res))
col_pbx = get_col_pbx(colID)
col_pbx = dict(map(lambda x: (x[0], x[5]), col_pbx))
if pbxID not in [-1, "-1"]:
pbxID = int(pbxID)
subtitle = """<a name="5.4"></a>Modify portalbox '%s' for this collection""" % pbx_dict[pbxID]
col_pbx = get_col_pbx(colID)
if not (score and position) and not (body and title):
for (id_pbx, id_collection, tln, score, position, title, body) in col_pbx:
if id_pbx == pbxID:
break
output += """Collection (presentation) specific values (Changes implies only to this collection.)<br />"""
text = """
<span class="adminlabel">Position</span>
<select name="position" class="admin_w200">
"""
listpos = pos.items()
listpos.sort()
for (key, name) in listpos:
text += """<option value="%s" %s>%s""" % (key, key==position and 'selected="selected"' or '', name)
text += "</option>"
text += """</select><br />"""
output += createhiddenform(action="modifyportalbox#5.4",
text=text,
button="Modify",
colID=colID,
pbxID=pbxID,
score=score,
title=title,
body=cgi.escape(body, 1),
sel_ln=sel_ln,
ln=ln,
confirm=3)
if pbxID > -1 and score and position and confirm in [3, "3"]:
pbxID = int(pbxID)
res = modify_pbx(colID, pbxID, sel_ln, score, position, '', '')
res2 = get_pbx()
pbx_dict = dict(map(lambda x: (x[0], x[1]), res2))
output += write_outcome(res)
output += """<br />Portalbox (content) specific values (any changes appears everywhere the portalbox is used.)"""
text = """
<span class="adminlabel">Title</span>
<textarea cols="50" rows="1" class="admin_wvar" type="text" name="title">%s</textarea><br />
""" % cgi.escape(title)
text += """
<span class="adminlabel">Body</span>
<textarea cols="50" rows="10" class="admin_wvar" type="text" name="body">%s</textarea><br />
""" % cgi.escape(body)
output += createhiddenform(action="modifyportalbox#5.4",
text=text,
button="Modify",
colID=colID,
pbxID=pbxID,
sel_ln=sel_ln,
score=score,
position=position,
ln=ln,
confirm=4)
if pbxID > -1 and confirm in [4, "4"]:
pbxID = int(pbxID)
res = modify_pbx(colID, pbxID, sel_ln, '', '', title, body)
output += write_outcome(res)
else:
output = """No portalbox to modify."""
body = [output]
output = "<br />" + addadminbox(subtitle, body)
return perform_showportalboxes(colID, ln, content=output)
def perform_switchpbxscore(colID, id_1, id_2, sel_ln, ln):
"""Switch the score of id_1 and id_2 in collection_portalbox.
colID - the current collection
id_1/id_2 - the id's to change the score for.
sel_ln - the language of the portalbox"""
output = ""
res = get_pbx()
pbx_dict = dict(map(lambda x: (x[0], x[1]), res))
res = switch_pbx_score(colID, id_1, id_2, sel_ln)
output += write_outcome(res)
return perform_showportalboxes(colID, ln, content=output)
def perform_showportalboxes(colID, ln, callback='yes', content='', confirm=-1):
"""show the portalboxes of this collection.
colID - the portalboxes to show the collection for."""
colID = int(colID)
col_dict = dict(get_def_name('', "collection"))
subtitle = """<a name="5">5. Modify portalboxes for collection '%s'</a> <small>[<a title="See guide" href="%s/help/admin/websearch-admin-guide#3.5">?</a>]</small>""" % (col_dict[colID], CFG_SITE_URL)
output = ""
pos = get_pbx_pos()
output = """<dl>
<dt>Portalbox actions (not related to this collection)</dt>
<dd><a href="addportalbox?colID=%s&ln=%s#5.1">Create new portalbox</a></dd>
<dd><a href="deleteportalbox?colID=%s&ln=%s#5.3">Delete an unused portalbox</a></dd>
<dt>Collection specific actions</dt>
<dd><a href="addexistingportalbox?colID=%s&ln=%s#5.2">Add existing portalbox to collection</a></dd>
</dl>
""" % (colID, ln, colID, ln, colID, ln)
header = ['Position', 'Language', '', 'Title', 'Actions']
actions = []
sitelangs = get_languages()
lang = dict(sitelangs)
pos_list = pos.items()
pos_list.sort()
if len(get_col_pbx(colID)) > 0:
for (key, value) in sitelangs:
for (pos_key, pos_value) in pos_list:
res = get_col_pbx(colID, key, pos_key)
i = 0
for (pbxID, colID_pbx, tln, score, position, title, body) in res:
move = """<table cellspacing="1" cellpadding="0" border="0"><tr><td>"""
if i != 0:
move += """<a href="%s/admin/websearch/websearchadmin.py/switchpbxscore?colID=%s&ln=%s&id_1=%s&id_2=%s&sel_ln=%s&rand=%s#5"><img border="0" src="%s/img/smallup.gif" title="Move portalbox up" alt="up" /></a>""" % (CFG_SITE_URL, colID, ln, pbxID, res[i - 1][0], tln, random.randint(0, 1000), CFG_SITE_URL)
else:
move += " "
move += "</td><td>"
i += 1
if i != len(res):
move += """<a href="%s/admin/websearch/websearchadmin.py/switchpbxscore?colID=%s&ln=%s&id_1=%s&id_2=%s&sel_ln=%s&rand=%s#5"><img border="0" src="%s/img/smalldown.gif" title="Move portalbox down" alt="down" /></a>""" % (CFG_SITE_URL, colID, ln, pbxID, res[i][0], tln, random.randint(0, 1000), CFG_SITE_URL)
move += """</td></tr></table>"""
actions.append(["%s" % (i==1 and pos[position] or ''), "%s" % (i==1 and lang[tln] or ''), move, "%s" % title])
for col in [(('Modify', 'modifyportalbox'), ('Remove', 'removeportalbox'),)]:
actions[-1].append('<a href="%s/admin/websearch/websearchadmin.py/%s?colID=%s&ln=%s&pbxID=%s&sel_ln=%s#5.4">%s</a>' % (CFG_SITE_URL, col[0][1], colID, ln, pbxID, tln, col[0][0]))
for (str, function) in col[1:]:
actions[-1][-1] += ' / <a href="%s/admin/websearch/websearchadmin.py/%s?colID=%s&ln=%s&pbxID=%s&sel_ln=%s#5.5">%s</a>' % (CFG_SITE_URL, function, colID, ln, pbxID, tln, str)
output += tupletotable(header=header, tuple=actions)
else:
output += """No portalboxes exists for this collection"""
output += content
body = [output]
if callback:
return perform_editcollection(colID, ln, "perform_showportalboxes", addadminbox(subtitle, body))
else:
return addadminbox(subtitle, body)
def perform_removeportalbox(colID, ln, pbxID='', sel_ln='', callback='yes', confirm=0):
"""form to remove a portalbox from a collection.
colID - the current collection, remove the portalbox from this collection.
sel_ln - remove the portalbox with this language
pbxID - remove the portalbox with this id"""
subtitle = """<a name="5.5"></a>Remove portalbox"""
output = ""
col_dict = dict(get_def_name('', "collection"))
res = get_pbx()
pbx_dict = dict(map(lambda x: (x[0], x[1]), res))
if colID and pbxID and sel_ln:
colID = int(colID)
pbxID = int(pbxID)
if confirm in ["0", 0]:
text = """Do you want to remove the portalbox '%s' from the collection '%s'.""" % (pbx_dict[pbxID], col_dict[colID])
output += createhiddenform(action="removeportalbox#5.5",
text=text,
button="Confirm",
colID=colID,
pbxID=pbxID,
sel_ln=sel_ln,
confirm=1)
elif confirm in ["1", 1]:
res = remove_pbx(colID, pbxID, sel_ln)
output += write_outcome(res)
body = [output]
output = "<br />" + addadminbox(subtitle, body)
return perform_showportalboxes(colID, ln, content=output)
def perform_switchfmtscore(colID, type, id_1, id_2, ln):
"""Switch the score of id_1 and id_2 in the table type.
colID - the current collection
id_1/id_2 - the id's to change the score for.
type - like "format" """
fmt_dict = dict(get_def_name('', "format"))
res = switch_score(colID, id_1, id_2, type)
output = write_outcome(res)
return perform_showoutputformats(colID, ln, content=output)
def perform_switchfldscore(colID, id_1, id_2, fmeth, ln):
"""Switch the score of id_1 and id_2 in collection_field_fieldvalue.
colID - the current collection
id_1/id_2 - the id's to change the score for."""
fld_dict = dict(get_def_name('', "field"))
res = switch_fld_score(colID, id_1, id_2)
output = write_outcome(res)
if fmeth == "soo":
return perform_showsortoptions(colID, ln, content=output)
elif fmeth == "sew":
return perform_showsearchfields(colID, ln, content=output)
elif fmeth == "seo":
return perform_showsearchoptions(colID, ln, content=output)
def perform_switchfldvaluescore(colID, id_1, id_fldvalue_1, id_fldvalue_2, ln):
"""Switch the score of id_1 and id_2 in collection_field_fieldvalue.
colID - the current collection
id_1/id_2 - the id's to change the score for."""
name_1 = run_sql("SELECT name from fieldvalue where id=%s", (id_fldvalue_1, ))[0][0]
name_2 = run_sql("SELECT name from fieldvalue where id=%s", (id_fldvalue_2, ))[0][0]
res = switch_fld_value_score(colID, id_1, id_fldvalue_1, id_fldvalue_2)
output = write_outcome(res)
return perform_modifyfield(colID, fldID=id_1, ln=ln, content=output)
def perform_addnewfieldvalue(colID, fldID, ln, name='', value='', callback="yes", confirm=-1):
"""form to add a new fieldvalue.
name - the name of the new fieldvalue
value - the value of the new fieldvalue
"""
output = ""
subtitle = """<a name="7.4"></a>Add new value"""
text = """
<span class="adminlabel">Display name</span>
<input class="admin_w200" type="text" name="name" value="%s" /><br />
<span class="adminlabel">Search value</span>
<input class="admin_w200" type="text" name="value" value="%s" /><br />
""" % (name, value)
output = createhiddenform(action="%s/admin/websearch/websearchadmin.py/addnewfieldvalue" % CFG_SITE_URL,
text=text,
colID=colID,
fldID=fldID,
ln=ln,
button="Add",
confirm=1)
if name and value and confirm in ["1", 1]:
res = add_fldv(name, value)
output += write_outcome(res)
if res[0] == 1:
res = add_col_fld(colID, fldID, 'seo', res[1])
if res[0] == 0:
output += "<br />" + write_outcome(res)
elif confirm not in ["-1", -1]:
output += """<b><span class="info">Please fill in name and value.</span></b>
"""
body = [output]
output = "<br />" + addadminbox(subtitle, body)
return perform_modifyfield(colID, fldID=fldID, ln=ln, content=output)
def perform_modifyfieldvalue(colID, fldID, fldvID, ln, name='', value='', callback="yes", confirm=-1):
"""form to modify a fieldvalue.
name - the name of the fieldvalue
value - the value of the fieldvalue
"""
if confirm in [-1, "-1"]:
res = get_fld_value(fldvID)
(id, name, value) = res[0]
output = ""
subtitle = """<a name="7.4"></a>Modify existing value"""
output = """<dl>
<dt><b><span class="info">Warning: Modifications done below will also inflict on all places the modified data is used.</span></b></dt>
</dl>"""
text = """
<span class="adminlabel">Display name</span>
<input class="admin_w200" type="text" name="name" value="%s" /><br />
<span class="adminlabel">Search value</span>
<input class="admin_w200" type="text" name="value" value="%s" /><br />
""" % (name, value)
output += createhiddenform(action="%s/admin/websearch/websearchadmin.py/modifyfieldvalue" % CFG_SITE_URL,
text=text,
colID=colID,
fldID=fldID,
fldvID=fldvID,
ln=ln,
button="Update",
confirm=1)
output += createhiddenform(action="%s/admin/websearch/websearchadmin.py/modifyfieldvalue" % CFG_SITE_URL,
text="Delete value and all associations",
colID=colID,
fldID=fldID,
fldvID=fldvID,
ln=ln,
button="Delete",
confirm=2)
if name and value and confirm in ["1", 1]:
res = update_fldv(fldvID, name, value)
output += write_outcome(res)
#if res:
# output += """<b><span class="info">Operation successfully completed.</span></b>"""
#else:
# output += """<b><span class="info">Operation failed.</span></b>"""
elif confirm in ["2", 2]:
res = delete_fldv(fldvID)
output += write_outcome(res)
elif confirm not in ["-1", -1]:
output += """<b><span class="info">Please fill in name and value.</span></b>"""
body = [output]
output = "<br />" + addadminbox(subtitle, body)
return perform_modifyfield(colID, fldID=fldID, ln=ln, content=output)
def perform_removefield(colID, ln, fldID='', fldvID='', fmeth='', callback='yes', confirm=0):
"""form to remove a field from a collection.
colID - the current collection, remove the field from this collection.
sel_ln - remove the field with this language
fldID - remove the field with this id"""
if fmeth == "soo":
field = "sort option"
elif fmeth == "sew":
field = "search field"
elif fmeth == "seo":
field = "search option"
else:
field = "field"
subtitle = """<a name="6.4"><a name="7.4"><a name="8.4"></a>Remove %s""" % field
output = ""
col_dict = dict(get_def_name('', "collection"))
fld_dict = dict(get_def_name('', "field"))
res = get_fld_value()
fldv_dict = dict(map(lambda x: (x[0], x[1]), res))
if colID and fldID:
colID = int(colID)
fldID = int(fldID)
if fldvID and fldvID != "None":
fldvID = int(fldvID)
if confirm in ["0", 0]:
text = """Do you want to remove the %s '%s' %s from the collection '%s'.""" % (field, fld_dict[fldID], (fldvID not in["", "None"] and "with value '%s'" % fldv_dict[fldvID] or ''), col_dict[colID])
output += createhiddenform(action="removefield#6.5",
text=text,
button="Confirm",
colID=colID,
fldID=fldID,
fldvID=fldvID,
fmeth=fmeth,
confirm=1)
elif confirm in ["1", 1]:
res = remove_fld(colID, fldID, fldvID)
output += write_outcome(res)
body = [output]
output = "<br />" + addadminbox(subtitle, body)
if fmeth == "soo":
return perform_showsortoptions(colID, ln, content=output)
elif fmeth == "sew":
return perform_showsearchfields(colID, ln, content=output)
elif fmeth == "seo":
return perform_showsearchoptions(colID, ln, content=output)
def perform_removefieldvalue(colID, ln, fldID='', fldvID='', fmeth='', callback='yes', confirm=0):
"""form to remove a field from a collection.
colID - the current collection, remove the field from this collection.
sel_ln - remove the field with this language
fldID - remove the field with this id"""
subtitle = """<a name="7.4"></a>Remove value"""
output = ""
col_dict = dict(get_def_name('', "collection"))
fld_dict = dict(get_def_name('', "field"))
res = get_fld_value()
fldv_dict = dict(map(lambda x: (x[0], x[1]), res))
if colID and fldID:
colID = int(colID)
fldID = int(fldID)
if fldvID and fldvID != "None":
fldvID = int(fldvID)
if confirm in ["0", 0]:
text = """Do you want to remove the value '%s' from the search option '%s'.""" % (fldv_dict[fldvID], fld_dict[fldID])
output += createhiddenform(action="removefieldvalue#7.4",
text=text,
button="Confirm",
colID=colID,
fldID=fldID,
fldvID=fldvID,
fmeth=fmeth,
confirm=1)
elif confirm in ["1", 1]:
res = remove_fld(colID, fldID, fldvID)
output += write_outcome(res)
body = [output]
output = "<br />" + addadminbox(subtitle, body)
return perform_modifyfield(colID, fldID=fldID, ln=ln, content=output)
def perform_rearrangefieldvalue(colID, fldID, ln, callback='yes', confirm=-1):
"""rearrang the fieldvalues alphabetically
colID - the collection
fldID - the field to rearrange the fieldvalue for
"""
subtitle = "Order values alphabetically"
output = ""
col_fldv = get_col_fld(colID, 'seo', fldID)
col_fldv = dict(map(lambda x: (x[1], x[0]), col_fldv))
fldv_names = get_fld_value()
fldv_names = map(lambda x: (x[0], x[1]), fldv_names)
if not col_fldv.has_key(None):
vscore = len(col_fldv)
for (fldvID, name) in fldv_names:
if col_fldv.has_key(fldvID):
run_sql("UPDATE collection_field_fieldvalue SET score_fieldvalue=%s WHERE id_collection=%s and id_field=%s and id_fieldvalue=%s", (vscore, colID, fldID, fldvID))
vscore -= 1
output += write_outcome((1, ""))
else:
output += write_outcome((0, (0, "No values to order")))
body = [output]
output = "<br />" + addadminbox(subtitle, body)
return perform_modifyfield(colID, fldID, ln, content=output)
def perform_rearrangefield(colID, ln, fmeth, callback='yes', confirm=-1):
"""rearrang the fields alphabetically
colID - the collection
"""
subtitle = "Order fields alphabetically"
output = ""
col_fld = dict(map(lambda x: (x[0], x[1]), get_col_fld(colID, fmeth)))
fld_names = get_def_name('', "field")
if len(col_fld) > 0:
score = len(col_fld)
for (fldID, name) in fld_names:
if col_fld.has_key(fldID):
run_sql("UPDATE collection_field_fieldvalue SET score=%s WHERE id_collection=%s and id_field=%s", (score, colID, fldID))
score -= 1
output += write_outcome((1, ""))
else:
output += write_outcome((0, (0, "No fields to order")))
body = [output]
output = "<br />" + addadminbox(subtitle, body)
if fmeth == "soo":
return perform_showsortoptions(colID, ln, content=output)
elif fmeth == "sew":
return perform_showsearchfields(colID, ln, content=output)
elif fmeth == "seo":
return perform_showsearchoptions(colID, ln, content=output)
def perform_addexistingfieldvalue(colID, fldID, fldvID=-1, ln=CFG_SITE_LANG, callback='yes', confirm=-1):
"""form to add an existing fieldvalue to a field.
colID - the collection
fldID - the field to add the fieldvalue to
fldvID - the fieldvalue to add"""
subtitle = """</a><a name="7.4"></a>Add existing value to search option"""
output = ""
if fldvID not in [-1, "-1"] and confirm in [1, "1"]:
fldvID = int(fldvID)
ares = add_col_fld(colID, fldID, 'seo', fldvID)
colID = int(colID)
fldID = int(fldID)
lang = dict(get_languages())
res = get_def_name('', "field")
col_dict = dict(get_def_name('', "collection"))
fld_dict = dict(res)
col_fld = dict(map(lambda x: (x[0], x[1]), get_col_fld(colID, 'seo')))
fld_value = get_fld_value()
fldv_dict = dict(map(lambda x: (x[0], x[1]), fld_value))
text = """
<span class="adminlabel">Value</span>
<select name="fldvID" class="admin_w200">
<option value="-1">- Select value -</option>
"""
res = run_sql("SELECT id,name,value FROM fieldvalue ORDER BY name")
for (id, name, value) in res:
text += """<option value="%s" %s>%s - %s</option>
""" % (id, id == int(fldvID) and 'selected="selected"' or '', name, value)
text += """</select><br />"""
output += createhiddenform(action="addexistingfieldvalue#7.4",
text=text,
button="Add",
colID=colID,
fldID=fldID,
ln=ln,
confirm=1)
if fldvID not in [-1, "-1"] and confirm in [1, "1"]:
output += write_outcome(ares)
elif confirm in [1, "1"]:
output += """<b><span class="info">Select a value to add and try again.</span></b>"""
body = [output]
output = "<br />" + addadminbox(subtitle, body)
return perform_modifyfield(colID, fldID, ln, content=output)
def perform_addexistingfield(colID, ln, fldID=-1, fldvID=-1, fmeth='', callback='yes', confirm=-1):
"""form to add an existing field to a collection.
colID - the collection to add the field to
fldID - the field to add
sel_ln - the language of the field"""
subtitle = """<a name="6.2"></a><a name="7.2"></a><a name="8.2"></a>Add existing field to collection"""
output = ""
if fldID not in [-1, "-1"] and confirm in [1, "1"]:
fldID = int(fldID)
ares = add_col_fld(colID, fldID, fmeth, fldvID)
colID = int(colID)
lang = dict(get_languages())
res = get_def_name('', "field")
col_dict = dict(get_def_name('', "collection"))
fld_dict = dict(res)
col_fld = dict(map(lambda x: (x[0], x[1]), get_col_fld(colID, fmeth)))
fld_value = get_fld_value()
fldv_dict = dict(map(lambda x: (x[0], x[1]), fld_value))
if fldvID:
fldvID = int(fldvID)
text = """
<span class="adminlabel">Field</span>
<select name="fldID" class="admin_w200">
<option value="-1">- Select field -</option>
"""
for (id, var) in res:
if fmeth == 'seo' or (fmeth != 'seo' and not col_fld.has_key(id)):
text += """<option value="%s" %s>%s</option>
""" % (id, '', fld_dict[id])
text += """</select><br />"""
output += createhiddenform(action="addexistingfield#6.2",
text=text,
button="Add",
colID=colID,
fmeth=fmeth,
ln=ln,
confirm=1)
if fldID not in [-1, "-1"] and confirm in [1, "1"]:
output += write_outcome(ares)
elif fldID in [-1, "-1"] and confirm not in [-1, "-1"]:
output += """<b><span class="info">Select a field.</span></b>
"""
body = [output]
output = "<br />" + addadminbox(subtitle, body)
if fmeth == "soo":
return perform_showsortoptions(colID, ln, content=output)
elif fmeth == "sew":
return perform_showsearchfields(colID, ln, content=output)
elif fmeth == "seo":
return perform_showsearchoptions(colID, ln, content=output)
def perform_showsortoptions(colID, ln, callback='yes', content='', confirm=-1):
"""show the sort fields of this collection.."""
colID = int(colID)
col_dict = dict(get_def_name('', "collection"))
fld_dict = dict(get_def_name('', "field"))
fld_type = get_sort_nametypes()
subtitle = """<a name="8">8. Modify sort options for collection '%s'</a> <small>[<a title="See guide" href="%s/help/admin/websearch-admin-guide#3.8">?</a>]</small>""" % (col_dict[colID], CFG_SITE_URL)
output = """<dl>
<dt>Field actions (not related to this collection)</dt>
<dd>Go to the BibIndex interface to modify the available sort options</dd>
<dt>Collection specific actions
<dd><a href="addexistingfield?colID=%s&ln=%s&fmeth=soo#8.2">Add sort option to collection</a></dd>
<dd><a href="rearrangefield?colID=%s&ln=%s&fmeth=soo#8.2">Order sort options alphabetically</a></dd>
</dl>
""" % (colID, ln, colID, ln)
header = ['', 'Sort option', 'Actions']
actions = []
sitelangs = get_languages()
lang = dict(sitelangs)
fld_type_list = fld_type.items()
if len(get_col_fld(colID, 'soo')) > 0:
res = get_col_fld(colID, 'soo')
i = 0
for (fldID, fldvID, stype, score, score_fieldvalue) in res:
move = """<table cellspacing="1" cellpadding="0" border="0"><tr><td>"""
if i != 0:
move += """<a href="%s/admin/websearch/websearchadmin.py/switchfldscore?colID=%s&ln=%s&id_1=%s&id_2=%s&fmeth=soo&rand=%s#8"><img border="0" src="%s/img/smallup.gif" title="Move up"></a>""" % (CFG_SITE_URL, colID, ln, fldID, res[i - 1][0], random.randint(0, 1000), CFG_SITE_URL)
else:
move += " "
move += "</td><td>"
i += 1
if i != len(res):
move += """<a href="%s/admin/websearch/websearchadmin.py/switchfldscore?colID=%s&ln=%s&id_1=%s&id_2=%s&fmeth=soo&rand=%s#8"><img border="0" src="%s/img/smalldown.gif" title="Move down"></a>""" % (CFG_SITE_URL, colID, ln, fldID, res[i][0], random.randint(0, 1000), CFG_SITE_URL)
move += """</td></tr></table>"""
actions.append([move, fld_dict[int(fldID)]])
for col in [(('Remove sort option', 'removefield'),)]:
actions[-1].append('<a href="%s/admin/websearch/websearchadmin.py/%s?colID=%s&ln=%s&fldID=%s&fmeth=soo#8.4">%s</a>' % (CFG_SITE_URL, col[0][1], colID, ln, fldID, col[0][0]))
for (str, function) in col[1:]:
actions[-1][-1] += ' / <a href="%s/admin/websearch/websearchadmin.py/%s?colID=%s&ln=%s&fldID=%s&fmeth=soo#8.5">%s</a>' % (CFG_SITE_URL, function, colID, ln, fldID, str)
output += tupletotable(header=header, tuple=actions)
else:
output += """No sort options exists for this collection"""
output += content
body = [output]
if callback:
return perform_editcollection(colID, ln, "perform_showsortoptions", addadminbox(subtitle, body))
else:
return addadminbox(subtitle, body)
def perform_showsearchfields(colID, ln, callback='yes', content='', confirm=-1):
"""show the search fields of this collection.."""
colID = int(colID)
col_dict = dict(get_def_name('', "collection"))
fld_dict = dict(get_def_name('', "field"))
fld_type = get_sort_nametypes()
subtitle = """<a name="6">6. Modify search fields for collection '%s'</a> <small>[<a title="See guide" href="%s/help/admin/websearch-admin-guide#3.6">?</a>]</small>""" % (col_dict[colID], CFG_SITE_URL)
output = """<dl>
<dt>Field actions (not related to this collection)</dt>
<dd>Go to the BibIndex interface to modify the available search fields</dd>
<dt>Collection specific actions
<dd><a href="addexistingfield?colID=%s&ln=%s&fmeth=sew#6.2">Add search field to collection</a></dd>
<dd><a href="rearrangefield?colID=%s&ln=%s&fmeth=sew#6.2">Order search fields alphabetically</a></dd>
</dl>
""" % (colID, ln, colID, ln)
header = ['', 'Search field', 'Actions']
actions = []
sitelangs = get_languages()
lang = dict(sitelangs)
fld_type_list = fld_type.items()
if len(get_col_fld(colID, 'sew')) > 0:
res = get_col_fld(colID, 'sew')
i = 0
for (fldID, fldvID, stype, score, score_fieldvalue) in res:
move = """<table cellspacing="1" cellpadding="0" border="0"><tr><td>"""
if i != 0:
move += """<a href="%s/admin/websearch/websearchadmin.py/switchfldscore?colID=%s&ln=%s&id_1=%s&id_2=%s&fmeth=sew&rand=%s#6"><img border="0" src="%s/img/smallup.gif" title="Move up"></a>""" % (CFG_SITE_URL, colID, ln, fldID, res[i - 1][0], random.randint(0, 1000), CFG_SITE_URL)
else:
move += " "
move += "</td><td>"
i += 1
if i != len(res):
move += '<a href="%s/admin/websearch/websearchadmin.py/switchfldscore?colID=%s&ln=%s&id_1=%s&id_2=%s&fmeth=sew&rand=%s#6"><img border="0" src="%s/img/smalldown.gif" title="Move down"></a>' % (CFG_SITE_URL, colID, ln, fldID, res[i][0], random.randint(0, 1000), CFG_SITE_URL)
move += """</td></tr></table>"""
actions.append([move, fld_dict[int(fldID)]])
for col in [(('Remove search field', 'removefield'),)]:
actions[-1].append('<a href="%s/admin/websearch/websearchadmin.py/%s?colID=%s&ln=%s&fldID=%s&fmeth=sew#6.4">%s</a>' % (CFG_SITE_URL, col[0][1], colID, ln, fldID, col[0][0]))
for (str, function) in col[1:]:
actions[-1][-1] += ' / <a href="%s/admin/websearch/websearchadmin.py/%s?colID=%s&ln=%s&fldID=%s#6.5">%s</a>' % (CFG_SITE_URL, function, colID, ln, fldID, str)
output += tupletotable(header=header, tuple=actions)
else:
output += """No search fields exists for this collection"""
output += content
body = [output]
if callback:
return perform_editcollection(colID, ln, "perform_showsearchfields", addadminbox(subtitle, body))
else:
return addadminbox(subtitle, body)
def perform_showsearchoptions(colID, ln, callback='yes', content='', confirm=-1):
"""show the sort and search options of this collection.."""
colID = int(colID)
col_dict = dict(get_def_name('', "collection"))
fld_dict = dict(get_def_name('', "field"))
fld_type = get_sort_nametypes()
subtitle = """<a name="7">7. Modify search options for collection '%s'</a> <small>[<a title="See guide" href="%s/help/admin/websearch-admin-guide#3.7">?</a>]</small>""" % (col_dict[colID], CFG_SITE_URL)
output = """<dl>
<dt>Field actions (not related to this collection)</dt>
<dd>Go to the BibIndex interface to modify the available search options</dd>
<dt>Collection specific actions
<dd><a href="addexistingfield?colID=%s&ln=%s&fmeth=seo#7.2">Add search option to collection</a></dd>
<dd><a href="rearrangefield?colID=%s&ln=%s&fmeth=seo#7.2">Order search options alphabetically</a></dd>
</dl>
""" % (colID, ln, colID, ln)
header = ['', 'Search option', 'Actions']
actions = []
sitelangs = get_languages()
lang = dict(sitelangs)
fld_type_list = fld_type.items()
fld_distinct = run_sql("SELECT distinct(id_field) FROM collection_field_fieldvalue WHERE type='seo' AND id_collection=%s ORDER by score desc", (colID, ))
if len(fld_distinct) > 0:
i = 0
for (id) in fld_distinct:
fldID = id[0]
col_fld = get_col_fld(colID, 'seo', fldID)
move = ""
if i != 0:
move += """<a href="%s/admin/websearch/websearchadmin.py/switchfldscore?colID=%s&ln=%s&id_1=%s&id_2=%s&fmeth=seo&rand=%s#7"><img border="0" src="%s/img/smallup.gif" title="Move up"></a>""" % (CFG_SITE_URL, colID, ln, fldID, fld_distinct[i - 1][0], random.randint(0, 1000), CFG_SITE_URL)
else:
move += " "
i += 1
if i != len(fld_distinct):
move += '<a href="%s/admin/websearch/websearchadmin.py/switchfldscore?colID=%s&ln=%s&id_1=%s&id_2=%s&fmeth=seo&rand=%s#7"><img border="0" src="%s/img/smalldown.gif" title="Move down"></a>' % (CFG_SITE_URL, colID, ln, fldID, fld_distinct[i][0], random.randint(0, 1000), CFG_SITE_URL)
actions.append([move, "%s" % fld_dict[fldID]])
for col in [(('Modify values', 'modifyfield'), ('Remove search option', 'removefield'),)]:
actions[-1].append('<a href="%s/admin/websearch/websearchadmin.py/%s?colID=%s&ln=%s&fldID=%s#7.3">%s</a>' % (CFG_SITE_URL, col[0][1], colID, ln, fldID, col[0][0]))
for (str, function) in col[1:]:
actions[-1][-1] += ' / <a href="%s/admin/websearch/websearchadmin.py/%s?colID=%s&ln=%s&fldID=%s&fmeth=seo#7.3">%s</a>' % (CFG_SITE_URL, function, colID, ln, fldID, str)
output += tupletotable(header=header, tuple=actions)
else:
output += """No search options exists for this collection"""
output += content
body = [output]
if callback:
return perform_editcollection(colID, ln, "perform_showsearchoptions", addadminbox(subtitle, body))
else:
return addadminbox(subtitle, body)
def perform_modifyfield(colID, fldID, fldvID='', ln=CFG_SITE_LANG, content='', callback='yes', confirm=0):
"""Modify the fieldvalues for a field"""
colID = int(colID)
col_dict = dict(get_def_name('', "collection"))
fld_dict = dict(get_def_name('', "field"))
fld_type = get_sort_nametypes()
fldID = int(fldID)
subtitle = """<a name="7.3">Modify values for field '%s'</a>""" % (fld_dict[fldID])
output = """<dl>
<dt>Value specific actions
<dd><a href="addexistingfieldvalue?colID=%s&ln=%s&fldID=%s#7.4">Add existing value to search option</a></dd>
<dd><a href="addnewfieldvalue?colID=%s&ln=%s&fldID=%s#7.4">Add new value to search option</a></dd>
<dd><a href="rearrangefieldvalue?colID=%s&ln=%s&fldID=%s#7.4">Order values alphabetically</a></dd>
</dl>
""" % (colID, ln, fldID, colID, ln, fldID, colID, ln, fldID)
header = ['', 'Value name', 'Actions']
actions = []
sitelangs = get_languages()
lang = dict(sitelangs)
fld_type_list = fld_type.items()
col_fld = list(get_col_fld(colID, 'seo', fldID))
if len(col_fld) == 1 and col_fld[0][1] is None:
output += """<b><span class="info">No values added for this search option yet</span></b>"""
else:
j = 0
for (fldID, fldvID, stype, score, score_fieldvalue) in col_fld:
fieldvalue = get_fld_value(fldvID)
move = ""
if j != 0:
move += """<a href="%s/admin/websearch/websearchadmin.py/switchfldvaluescore?colID=%s&ln=%s&id_1=%s&id_fldvalue_1=%s&id_fldvalue_2=%s&rand=%s#7.3"><img border="0" src="%s/img/smallup.gif" title="Move up"></a>""" % (CFG_SITE_URL, colID, ln, fldID, fldvID, col_fld[j - 1][1], random.randint(0, 1000), CFG_SITE_URL)
else:
move += " "
j += 1
if j != len(col_fld):
move += """<a href="%s/admin/websearch/websearchadmin.py/switchfldvaluescore?colID=%s&ln=%s&id_1=%s&id_fldvalue_1=%s&id_fldvalue_2=%s&rand=%s#7.3"><img border="0" src="%s/img/smalldown.gif" title="Move down"></a>""" % (CFG_SITE_URL, colID, ln, fldID, fldvID, col_fld[j][1], random.randint(0, 1000), CFG_SITE_URL)
if fieldvalue[0][1] != fieldvalue[0][2] and fldvID is not None:
actions.append([move, "%s - %s" % (fieldvalue[0][1], fieldvalue[0][2])])
elif fldvID is not None:
actions.append([move, "%s" % fieldvalue[0][1]])
move = ''
for col in [(('Modify value', 'modifyfieldvalue'), ('Remove value', 'removefieldvalue'),)]:
actions[-1].append('<a href="%s/admin/websearch/websearchadmin.py/%s?colID=%s&ln=%s&fldID=%s&fldvID=%s&fmeth=seo#7.4">%s</a>' % (CFG_SITE_URL, col[0][1], colID, ln, fldID, fldvID, col[0][0]))
for (str, function) in col[1:]:
actions[-1][-1] += ' / <a href="%s/admin/websearch/websearchadmin.py/%s?colID=%s&ln=%s&fldID=%s&fldvID=%s#7.4">%s</a>' % (CFG_SITE_URL, function, colID, ln, fldID, fldvID, str)
output += tupletotable(header=header, tuple=actions)
output += content
body = [output]
output = "<br />" + addadminbox(subtitle, body)
if len(col_fld) == 0:
output = content
return perform_showsearchoptions(colID, ln, content=output)
def perform_showoutputformats(colID, ln, callback='yes', content='', confirm=-1):
"""shows the outputformats of the current collection
colID - the collection id."""
colID = int(colID)
col_dict = dict(get_def_name('', "collection"))
subtitle = """<a name="10">10. Modify output formats for collection '%s'</a> <small>[<a title="See guide" href="%s/help/admin/websearch-admin-guide#3.10">?</a>]</small>""" % (col_dict[colID], CFG_SITE_URL)
output = """
<dl>
<dt>Output format actions (not specific to the chosen collection)
<dd>Go to the BibFormat interface to modify</dd>
<dt>Collection specific actions
<dd><a href="addexistingoutputformat?colID=%s&ln=%s#10.2">Add existing output format to collection</a></dd>
</dl>
""" % (colID, ln)
header = ['', 'Code', 'Output format', 'Actions']
actions = []
col_fmt = get_col_fmt(colID)
fmt_dict = dict(get_def_name('', "format"))
i = 0
if len(col_fmt) > 0:
for (id_format, colID_fld, code, score) in col_fmt:
move = """<table cellspacing="1" cellpadding="0" border="0"><tr><td>"""
if i != 0:
move += """<a href="%s/admin/websearch/websearchadmin.py/switchfmtscore?colID=%s&ln=%s&type=format&id_1=%s&id_2=%s&rand=%s#10"><img border="0" src="%s/img/smallup.gif" title="Move format up"></a>""" % (CFG_SITE_URL, colID, ln, id_format, col_fmt[i - 1][0], random.randint(0, 1000), CFG_SITE_URL)
else:
move += " "
move += "</td><td>"
i += 1
if i != len(col_fmt):
move += '<a href="%s/admin/websearch/websearchadmin.py/switchfmtscore?colID=%s&ln=%s&type=format&id_1=%s&id_2=%s&rand=%s#10"><img border="0" src="%s/img/smalldown.gif" title="Move format down"></a>' % (CFG_SITE_URL, colID, ln, id_format, col_fmt[i][0], random.randint(0, 1000), CFG_SITE_URL)
move += """</td></tr></table>"""
actions.append([move, code, fmt_dict[int(id_format)]])
for col in [(('Remove', 'removeoutputformat'),)]:
actions[-1].append('<a href="%s/admin/websearch/websearchadmin.py/%s?colID=%s&ln=%s&fmtID=%s#10">%s</a>' % (CFG_SITE_URL, col[0][1], colID, ln, id_format, col[0][0]))
for (str, function) in col[1:]:
actions[-1][-1] += ' / <a href="%s/admin/websearch/websearchadmin.py/%s?colID=%s&ln=%s&fmtID=%s#10">%s</a>' % (CFG_SITE_URL, function, colID, ln, id_format, str)
output += tupletotable(header=header, tuple=actions)
else:
output += """No output formats exists for this collection"""
output += content
body = [output]
if callback:
return perform_editcollection(colID, ln, "perform_showoutputformats", addadminbox(subtitle, body))
else:
return addadminbox(subtitle, body)
def external_collections_build_select(colID, external_collection):
output = '<select name="state" class="admin_w200">'
if external_collection.parser:
max_state = 4
else:
max_state = 2
num_selected = external_collection_get_state(external_collection, colID)
for num in range(max_state):
state_name = CFG_EXTERNAL_COLLECTION_STATES_NAME[num]
if num == num_selected:
selected = ' selected'
else:
selected = ''
output += '<option value="%(num)d"%(selected)s>%(state_name)s</option>' % {'num': num, 'selected': selected, 'state_name': state_name}
output += '</select>\n'
return output
def perform_manage_external_collections(colID, ln, callback='yes', content='', confirm=-1):
"""Show the interface to configure external collections to the user."""
colID = int(colID)
subtitle = """<a name="11">11. Configuration of related external collections</a>
<small>[<a title="See guide" href="%s/help/admin/websearch-admin-guide#3.11">?</a>]</small>""" % CFG_SITE_URL
output = '<form action="update_external_collections" method="POST"><input type="hidden" name="colID" value="%(colID)d">' % {'colID': colID}
table_header = ['External collection', 'Mode', 'Apply also to daughter collections?']
table_content = []
external_collections = external_collection_sort_engine_by_name(external_collections_dictionary.values())
for external_collection in external_collections:
collection_name = external_collection.name
select = external_collections_build_select(colID, external_collection)
recurse = '<input type=checkbox name="recurse" value="%(collection_name)s">' % {'collection_name': collection_name}
table_content.append([collection_name, select, recurse])
output += tupletotable(header=table_header, tuple=table_content)
output += '<input class="adminbutton" type="submit" value="Modify"/>'
output += '</form>'
return addadminbox(subtitle, [output])
def perform_update_external_collections(colID, ln, state_list, recurse_list):
colID = int(colID)
changes = []
output = ""
if not state_list:
return 'Warning : No state found.<br />' + perform_manage_external_collections(colID, ln)
external_collections = external_collection_sort_engine_by_name(external_collections_dictionary.values())
if len(external_collections) != len(state_list):
return 'Warning : Size of state_list different from external_collections!<br />' + perform_manage_external_collections(colID, ln)
for (external_collection, state) in zip(external_collections, state_list):
state = int(state)
collection_name = external_collection.name
recurse = recurse_list and collection_name in recurse_list
oldstate = external_collection_get_state(external_collection, colID)
if oldstate != state or recurse:
changes += external_collection_get_update_state_list(external_collection, colID, state, recurse)
external_collection_apply_changes(changes)
return output + '<br /><br />' + perform_manage_external_collections(colID, ln)
def perform_showdetailedrecordoptions(colID, ln, callback='yes', content='', confirm=-1):
"""Show the interface to configure detailed record page to the user."""
colID = int(colID)
subtitle = """<a name="12">12. Configuration of detailed record page</a>
<small>[<a title="See guide" href="%s/help/admin/websearch-admin-guide#3.12">?</a>]</small>""" % CFG_SITE_URL
output = '''<form action="update_detailed_record_options" method="post">
<table><tr><td>
<input type="hidden" name="colID" value="%(colID)d">
<dl>
<dt><b>Show tabs:</b></dt>
<dd>
''' % {'colID': colID}
for (tab_id, tab_info) in get_detailed_page_tabs(colID).iteritems():
if tab_id == 'comments' and \
not CFG_WEBCOMMENT_ALLOW_REVIEWS and \
not CFG_WEBCOMMENT_ALLOW_COMMENTS:
continue
check = ''
output += '''<input type="checkbox" id="id%(tabid)s" name="tabs" value="%(tabid)s" %(check)s />
<label for="id%(tabid)s"> %(label)s</label><br />
''' % {'tabid':tab_id,
'check':((tab_info['visible'] and 'checked="checked"') or ''),
'label':tab_info['label']}
output += '</dd></dl></td><td>'
output += '</td></tr></table><input class="adminbutton" type="submit" value="Modify"/>'
output += '''<input type="checkbox" id="recurse" name="recurse" value="1" />
<label for="recurse"> Also apply to subcollections</label>'''
output += '</form>'
return addadminbox(subtitle, [output])
def perform_update_detailed_record_options(colID, ln, tabs, recurse):
"""Update the preferences for the tab to show/hide in the detailed record page."""
colID = int(colID)
changes = []
output = '<b><span class="info">Operation successfully completed.</span></b>'
if '' in tabs:
tabs.remove('')
tabs.append('metadata')
def update_settings(colID, tabs, recurse):
run_sql("DELETE FROM collectiondetailedrecordpagetabs WHERE id_collection=%s", (colID, ))
run_sql("REPLACE INTO collectiondetailedrecordpagetabs" + \
" SET id_collection=%s, tabs=%s", (colID, ';'.join(tabs)))
## for enabled_tab in tabs:
## run_sql("REPLACE INTO collectiondetailedrecordpagetabs" + \
## " SET id_collection='%s', tabs='%s'" % (colID, ';'.join(tabs)))
if recurse:
for descendant_id in get_collection_descendants(colID):
update_settings(descendant_id, tabs, recurse)
update_settings(colID, tabs, recurse)
## for colID in colIDs:
## run_sql("DELETE FROM collectiondetailedrecordpagetabs WHERE id_collection='%s'" % colID) # kwalitee: disable=sql
## for enabled_tab in tabs:
## run_sql("REPLACE INTO collectiondetailedrecordpagetabs" + \
## " SET id_collection='%s', tabs='%s'" % (colID, ';'.join(tabs)))
#if callback:
return perform_editcollection(colID, ln, "perform_modifytranslations",
'<br /><br />' + output + '<br /><br />' + \
perform_showdetailedrecordoptions(colID, ln))
#else:
# return addadminbox(subtitle, body)
#return output + '<br /><br />' + perform_showdetailedrecordoptions(colID, ln)
def perform_addexistingoutputformat(colID, ln, fmtID=-1, callback='yes', confirm=-1):
"""form to add an existing output format to a collection.
colID - the collection the format should be added to
fmtID - the format to add."""
subtitle = """<a name="10.2"></a>Add existing output format to collection"""
output = ""
if fmtID not in [-1, "-1"] and confirm in [1, "1"]:
ares = add_col_fmt(colID, fmtID)
colID = int(colID)
res = get_def_name('', "format")
fmt_dict = dict(res)
col_dict = dict(get_def_name('', "collection"))
col_fmt = get_col_fmt(colID)
col_fmt = dict(map(lambda x: (x[0], x[2]), col_fmt))
if len(res) > 0:
text = """
<span class="adminlabel">Output format</span>
<select name="fmtID" class="admin_w200">
<option value="-1">- Select output format -</option>
"""
for (id, name) in res:
if not col_fmt.has_key(id):
text += """<option value="%s" %s>%s</option>
""" % (id, id == int(fmtID) and 'selected="selected"' or '', name)
text += """</select><br />
"""
output += createhiddenform(action="addexistingoutputformat#10.2",
text=text,
button="Add",
colID=colID,
ln=ln,
confirm=1)
else:
output = """No existing output formats to add, please create a new one."""
if fmtID not in [-1, "-1"] and confirm in [1, "1"]:
output += write_outcome(ares)
elif fmtID in [-1, "-1"] and confirm not in [-1, "-1"]:
output += """<b><span class="info">Please select output format.</span></b>"""
body = [output]
output = "<br />" + addadminbox(subtitle, body)
return perform_showoutputformats(colID, ln, content=output)
def perform_deleteoutputformat(colID, ln, fmtID=-1, callback='yes', confirm=-1):
"""form to delete an output format not in use.
colID - the collection id of the current collection.
fmtID - the format id to delete."""
subtitle = """<a name="10.3"></a>Delete an unused output format"""
output = """
<dl>
<dd>Deleting an output format will also delete the translations associated.</dd>
</dl>
"""
colID = int(colID)
if fmtID not in [-1, "-1"] and confirm in [1, "1"]:
fmt_dict = dict(get_def_name('', "format"))
old_colNAME = fmt_dict[int(fmtID)]
ares = delete_fmt(int(fmtID))
res = get_def_name('', "format")
fmt_dict = dict(res)
col_dict = dict(get_def_name('', "collection"))
col_fmt = get_col_fmt()
col_fmt = dict(map(lambda x: (x[0], x[2]), col_fmt))
if len(res) > 0:
text = """
<span class="adminlabel">Output format</span>
<select name="fmtID" class="admin_w200">
"""
text += """<option value="-1">- Select output format -"""
for (id, name) in res:
if not col_fmt.has_key(id):
text += """<option value="%s" %s>%s""" % (id, id == int(fmtID) and 'selected="selected"' or '', name)
text += "</option>"
text += """</select><br />"""
output += createhiddenform(action="deleteoutputformat#10.3",
text=text,
button="Delete",
colID=colID,
ln=ln,
confirm=0)
if fmtID not in [-1, "-1"]:
fmtID = int(fmtID)
if confirm in [0, "0"]:
text = """<b>Do you want to delete the output format '%s'.</b>
""" % fmt_dict[fmtID]
output += createhiddenform(action="deleteoutputformat#10.3",
text=text,
button="Confirm",
colID=colID,
fmtID=fmtID,
ln=ln,
confirm=1)
elif confirm in [1, "1"]:
output += write_outcome(ares)
elif confirm not in [-1, "-1"]:
output += """<b><span class="info">Choose a output format to delete.</span></b>
"""
body = [output]
output = "<br />" + addadminbox(subtitle, body)
return perform_showoutputformats(colID, ln, content=output)
def perform_removeoutputformat(colID, ln, fmtID='', callback='yes', confirm=0):
"""form to remove an output format from a collection.
colID - the collection id of the current collection.
fmtID - the format id.
"""
subtitle = """<a name="10.5"></a>Remove output format"""
output = ""
col_dict = dict(get_def_name('', "collection"))
fmt_dict = dict(get_def_name('', "format"))
if colID and fmtID:
colID = int(colID)
fmtID = int(fmtID)
if confirm in ["0", 0]:
text = """Do you want to remove the output format '%s' from the collection '%s'.""" % (fmt_dict[fmtID], col_dict[colID])
output += createhiddenform(action="removeoutputformat#10.5",
text=text,
button="Confirm",
colID=colID,
fmtID=fmtID,
confirm=1)
elif confirm in ["1", 1]:
res = remove_fmt(colID, fmtID)
output += write_outcome(res)
body = [output]
output = "<br />" + addadminbox(subtitle, body)
return perform_showoutputformats(colID, ln, content=output)
def perform_index(colID=1, ln=CFG_SITE_LANG, mtype='', content='', confirm=0):
"""The index method, calling methods to show the collection tree, create new collections and add collections to tree.
"""
subtitle = "Overview"
colID = int(colID)
col_dict = dict(get_def_name('', "collection"))
output = ""
fin_output = ""
if not col_dict.has_key(1):
res = add_col(CFG_SITE_NAME, '')
if res:
fin_output += """<b><span class="info">Created root collection.</span></b><br />"""
else:
return "Cannot create root collection, please check database."
if CFG_SITE_NAME != run_sql("SELECT name from collection WHERE id=1")[0][0]:
res = run_sql("update collection set name=%s where id=1", (CFG_SITE_NAME, ))
if res:
fin_output += """<b><span class="info">The name of the root collection has been modified to be the same as the %(sitename)s installation name given prior to installing %(sitename)s.</span><b><br />""" % {'sitename' : CFG_SITE_NAME}
else:
return "Error renaming root collection."
fin_output += """
<table>
<tr>
<td>0. <small><a href="%s/admin/websearch/websearchadmin.py?colID=%s&ln=%s&mtype=perform_showall">Show all</a></small></td>
<td>1. <small><a href="%s/admin/websearch/websearchadmin.py?colID=%s&ln=%s&mtype=perform_addcollection">Create new collection</a></small></td>
<td>2. <small><a href="%s/admin/websearch/websearchadmin.py?colID=%s&ln=%s&mtype=perform_addcollectiontotree">Attach collection to tree</a></small></td>
<td>3. <small><a href="%s/admin/websearch/websearchadmin.py?colID=%s&ln=%s&mtype=perform_modifycollectiontree">Modify collection tree</a></small></td>
<td>4. <small><a href="%s/admin/websearch/websearchadmin.py?colID=%s&ln=%s&mtype=perform_checkwebcollstatus">Webcoll Status</a></small></td>
</tr><tr>
<td>5. <small><a href="%s/admin/websearch/websearchadmin.py?colID=%s&ln=%s&mtype=perform_checkcollectionstatus">Collection Status</a></small></td>
<td>6. <small><a href="%s/admin/websearch/websearchadmin.py?colID=%s&ln=%s&mtype=perform_checkexternalcollections">Check external collections</a></small></td>
<td>7. <small><a href="%s/admin/websearch/websearchadmin.py?colID=%s&ln=%s&mtype=perform_checksearchservices">Search services</a></small></td>
<td>8. <small><a href="%s/help/admin/websearch-admin-guide?ln=%s">Guide</a></small></td>
</tr>
</table>
""" % (CFG_SITE_URL, colID, ln, CFG_SITE_URL, colID, ln, CFG_SITE_URL, colID, ln, CFG_SITE_URL, colID, ln, CFG_SITE_URL, colID, ln, CFG_SITE_URL, colID, ln, CFG_SITE_URL, colID, ln, CFG_SITE_URL, colID, ln, CFG_SITE_URL, ln)
if mtype == "":
fin_output += """<br /><br /><b><span class="info">To manage the collections, select an item from the menu.</span><b><br />"""
if mtype == "perform_addcollection" and content:
fin_output += content
elif mtype == "perform_addcollection" or mtype == "perform_showall":
fin_output += perform_addcollection(colID=colID, ln=ln, callback='')
fin_output += "<br />"
if mtype == "perform_addcollectiontotree" and content:
fin_output += content
elif mtype == "perform_addcollectiontotree" or mtype == "perform_showall":
fin_output += perform_addcollectiontotree(colID=colID, ln=ln, callback='')
fin_output += "<br />"
if mtype == "perform_modifycollectiontree" and content:
fin_output += content
elif mtype == "perform_modifycollectiontree" or mtype == "perform_showall":
fin_output += perform_modifycollectiontree(colID=colID, ln=ln, callback='')
fin_output += "<br />"
if mtype == "perform_checkwebcollstatus" and content:
fin_output += content
elif mtype == "perform_checkwebcollstatus" or mtype == "perform_showall":
fin_output += perform_checkwebcollstatus(colID, ln, callback='')
if mtype == "perform_checkcollectionstatus" and content:
fin_output += content
elif mtype == "perform_checkcollectionstatus" or mtype == "perform_showall":
fin_output += perform_checkcollectionstatus(colID, ln, callback='')
if mtype == "perform_checkexternalcollections" and content:
fin_output += content
elif mtype == "perform_checkexternalcollections" or mtype == "perform_showall":
fin_output += perform_checkexternalcollections(colID, ln, callback='')
if mtype == "perform_checksearchservices" and content:
fin_output += content
elif mtype == "perform_checksearchservices" or mtype == "perform_showall":
fin_output += perform_checksearchservices(colID, ln, callback='')
body = [fin_output]
body = [fin_output]
return addadminbox('<b>Menu</b>', body)
def show_coll_not_in_tree(colID, ln, col_dict):
"""Returns collections not in tree"""
tree = get_col_tree(colID)
in_tree = {}
output = "These collections are not in the tree, and should be added:<br />"
for (id, up, down, dad, reltype) in tree:
in_tree[id] = 1
in_tree[dad] = 1
res = run_sql("SELECT id from collection")
if len(res) != len(in_tree):
for id in res:
if not in_tree.has_key(id[0]):
output += """<a href="%s/admin/websearch/websearchadmin.py/editcollection?colID=%s&ln=%s" title="Edit collection">%s</a> ,
""" % (CFG_SITE_URL, id[0], ln, col_dict[id[0]])
output += "<br /><br />"
else:
output = ""
return output
def create_colltree(tree, col_dict, colID, ln, move_from='', move_to='', rtype='', edit=''):
"""Creates the presentation of the collection tree, with the buttons for modifying it.
tree - the tree to present, from get_tree()
col_dict - the name of the collections in a dictionary
colID - the collection id to start with
move_from - if a collection to be moved has been chosen
move_to - the collection which should be set as father of move_from
rtype - the type of the tree, regular or virtual
edit - if the method should output the edit buttons."""
if move_from:
move_from_rtype = move_from[0]
move_from_id = int(move_from[1:len(move_from)])
tree_from = get_col_tree(colID, move_from_rtype)
tree_to = get_col_tree(colID, rtype)
tables = 0
tstack = []
i = 0
text = """
<table border ="0" cellspacing="0" cellpadding="0">"""
for i in range(0, len(tree)):
id_son = tree[i][0]
up = tree[i][1]
down = tree[i][2]
dad = tree[i][3]
reltype = tree[i][4]
tmove_from = ""
j = i
while j > 0:
j = j - 1
try:
if tstack[j][1] == dad:
table = tstack[j][2]
for k in range(0, tables - table):
tables = tables - 1
text += """</table></td></tr>
"""
break
except StandardError, e:
pass
text += """<tr><td>
"""
if i > 0 and tree[i][1] == 0:
tables = tables + 1
text += """</td><td></td><td></td><td></td><td><table border="0" cellspacing="0" cellpadding="0"><tr><td>
"""
if i == 0:
tstack.append((id_son, dad, 1))
else:
tstack.append((id_son, dad, tables))
if up == 1 and edit:
text += """<a href="%s/admin/websearch/websearchadmin.py/modifycollectiontree?colID=%s&ln=%s&move_up=%s&rtype=%s#%s"><img border="0" src="%s/img/smallup.gif" title="Move collection up"></a>""" % (CFG_SITE_URL, colID, ln, i, rtype, tree[i][0], CFG_SITE_URL)
else:
text += """ """
text += "</td><td>"
if down == 1 and edit:
text += """<a href="%s/admin/websearch/websearchadmin.py/modifycollectiontree?colID=%s&ln=%s&move_down=%s&rtype=%s#%s"><img border="0" src="%s/img/smalldown.gif" title="Move collection down"></a>""" % (CFG_SITE_URL, colID, ln, i, rtype, tree[i][0], CFG_SITE_URL)
else:
text += """ """
text += "</td><td>"
if edit:
if move_from and move_to:
tmove_from = move_from
move_from = ''
if not (move_from == "" and i == 0) and not (move_from != "" and int(move_from[1:len(move_from)]) == i and rtype == move_from[0]):
check = "true"
if move_from:
#if tree_from[move_from_id][0] == tree_to[i][0] or not check_col(tree_to[i][0], tree_from[move_from_id][0]):
# check = ''
#elif not check_col(tree_to[i][0], tree_from[move_from_id][0]):
# check = ''
#if not check and (tree_to[i][0] == 1 and tree_from[move_from_id][3] == tree_to[i][0] and move_from_rtype != rtype):
# check = "true"
if check:
text += """<a href="%s/admin/websearch/websearchadmin.py/modifycollectiontree?colID=%s&ln=%s&move_from=%s&move_to=%s%s&rtype=%s#tree"><img border="0" src="%s/img/move_to.gif" title="Move '%s' to '%s'"></a>
""" % (CFG_SITE_URL, colID, ln, move_from, rtype, i, rtype, CFG_SITE_URL, col_dict[tree_from[int(move_from[1:len(move_from)])][0]], col_dict[tree_to[i][0]])
else:
try:
text += """<a href="%s/admin/websearch/websearchadmin.py/modifycollectiontree?colID=%s&ln=%s&move_from=%s%s&rtype=%s#%s"><img border="0" src="%s/img/move_from.gif" title="Move '%s' from this location."></a>""" % (CFG_SITE_URL, colID, ln, rtype, i, rtype, tree[i][0], CFG_SITE_URL, col_dict[tree[i][0]])
except KeyError:
pass
else:
text += """<img border="0" src="%s/img/white_field.gif">
""" % CFG_SITE_URL
else:
text += """<img border="0" src="%s/img/white_field.gif">
""" % CFG_SITE_URL
text += """
</td>
<td>"""
if edit:
try:
text += """<a href="%s/admin/websearch/websearchadmin.py/modifycollectiontree?colID=%s&ln=%s&delete=%s&rtype=%s#%s"><img border="0" src="%s/img/iconcross.gif" title="Remove colletion from tree"></a>""" % (CFG_SITE_URL, colID, ln, i, rtype, tree[i][0], CFG_SITE_URL)
except KeyError:
pass
elif i != 0:
text += """<img border="0" src="%s/img/white_field.gif">
""" % CFG_SITE_URL
text += """</td><td>
"""
if tmove_from:
move_from = tmove_from
try:
text += """<a name="%s"></a>%s<a href="%s/admin/websearch/websearchadmin.py/editcollection?colID=%s&ln=%s" title="Edit collection">%s</a>%s%s%s""" % (tree[i][0], (reltype=="v" and '<i>' or ''), CFG_SITE_URL, tree[i][0], ln, col_dict[id_son], (move_to=="%s%s" %(rtype, i) and ' <img border="0" src="%s/img/move_to.gif">' % CFG_SITE_URL or ''), (move_from=="%s%s" % (rtype, i) and ' <img border="0" src="%s/img/move_from.gif">' % CFG_SITE_URL or ''), (reltype=="v" and '</i>' or ''))
except KeyError:
pass
text += """</td></tr>
"""
while tables > 0:
text += """</table></td></tr>
"""
tables = tables - 1
text += """</table>"""
return text
def perform_deletecollection(colID, ln, confirm=-1, callback='yes'):
"""form to delete a collection
colID - id of collection
"""
subtitle =''
output = """
<span class="warning">
<strong>
<dl>
<dt>WARNING:</dt>
<dd>When deleting a collection, you also deletes all data related to the collection like translations, relations to other collections and information about which rank methods to use.
<br />For more information, please go to the <a title="See guide" href="%s/help/admin/websearch-admin-guide">WebSearch guide</a> and read the section regarding deleting a collection.</dd>
</dl>
</strong>
</span>
""" % CFG_SITE_URL
col_dict = dict(get_def_name('', "collection"))
if colID != 1 and colID and col_dict.has_key(int(colID)):
colID = int(colID)
subtitle = """<a name="4">4. Delete collection '%s'</a> <small>[<a title="See guide" href="%s/help/admin/websearch-admin-guide#3.4">?</a>]</small>""" % (col_dict[colID], CFG_SITE_URL)
res = run_sql("SELECT id_dad,id_son,type,score from collection_collection WHERE id_dad=%s", (colID, ))
res2 = run_sql("SELECT id_dad,id_son,type,score from collection_collection WHERE id_son=%s", (colID, ))
if not res and not res2:
if confirm in ["-1", -1]:
text = """Do you want to delete this collection."""
output += createhiddenform(action="deletecollection#4",
text=text,
colID=colID,
button="Delete",
confirm=0)
elif confirm in ["0", 0]:
text = """Are you sure you want to delete this collection."""
output += createhiddenform(action="deletecollection#4",
text=text,
colID=colID,
button="Confirm",
confirm=1)
elif confirm in ["1", 1]:
result = delete_col(colID)
if not result:
raise Exception
else:
output = """<b><span class="info">Can not delete a collection that is a part of the collection tree, remove collection from the tree and try again.</span></b>"""
else:
subtitle = """4. Delete collection"""
output = """<b><span class="info">Not possible to delete the root collection</span></b>"""
body = [output]
if callback:
return perform_editcollection(colID, ln, "perform_deletecollection", addadminbox(subtitle, body))
else:
return addadminbox(subtitle, body)
def perform_editcollection(colID=1, ln=CFG_SITE_LANG, mtype='', content=''):
"""interface to modify a collection. this method is calling other methods which again is calling this and sending back the output of the method.
if callback, the method will call perform_editcollection, if not, it will just return its output.
colID - id of the collection
mtype - the method that called this method.
content - the output from that method."""
colID = int(colID)
col_dict = dict(get_def_name('', "collection"))
if not col_dict.has_key(colID):
return """<b><span class="info">Collection deleted.</span></b>
"""
fin_output = """
<table>
<tr>
<td><b>Menu</b></td>
</tr>
<tr>
<td>0. <small><a href="editcollection?colID=%s&ln=%s">Show all</a></small></td>
<td>1. <small><a href="editcollection?colID=%s&ln=%s&mtype=perform_modifydbquery">Modify collection query</a></small></td>
<td>2. <small><a href="editcollection?colID=%s&ln=%s&mtype=perform_modifyrestricted">Modify access restrictions</a></small></td>
<td>3. <small><a href="editcollection?colID=%s&ln=%s&mtype=perform_modifytranslations">Modify translations</a></small></td>
<td>4. <small><a href="editcollection?colID=%s&ln=%s&mtype=perform_deletecollection">Delete collection</a></small></td>
</tr><tr>
<td>5. <small><a href="editcollection?colID=%s&ln=%s&mtype=perform_showportalboxes">Modify portalboxes</a></small></td>
<td>6. <small><a href="editcollection?colID=%s&ln=%s&mtype=perform_showsearchfields#6">Modify search fields</a></small></td>
<td>7. <small><a href="editcollection?colID=%s&ln=%s&mtype=perform_showsearchoptions#7">Modify search options</a></small></td>
<td>8. <small><a href="editcollection?colID=%s&ln=%s&mtype=perform_showsortoptions#8">Modify sort options</a></small></td>
<td>9. <small><a href="editcollection?colID=%s&ln=%s&mtype=perform_modifyrankmethods#9">Modify rank options</a></small></td>
</tr><tr>
<td>10. <small><a href="editcollection?colID=%s&ln=%s&mtype=perform_showoutputformats#10">Modify output formats</a></small></td>
<td>11. <small><a href="editcollection?colID=%s&ln=%s&mtype=perform_manage_external_collections#11">Configuration of related external collections</a></small></td>
<td>12. <small><a href="editcollection?colID=%s&ln=%s&mtype=perform_showdetailedrecordoptions#12">Detailed record page options</a></small></td>
</tr>
</table>
""" % (colID, ln, colID, ln, colID, ln, colID, ln, colID, ln, colID, ln, colID, ln, colID, ln, colID, ln, colID, ln, colID, ln, colID, ln, colID, ln)
if mtype == "perform_modifydbquery" and content:
fin_output += content
elif mtype == "perform_modifydbquery" or not mtype:
fin_output += perform_modifydbquery(colID, ln, callback='')
if mtype == "perform_modifyrestricted" and content:
fin_output += content
elif mtype == "perform_modifyrestricted" or not mtype:
fin_output += perform_modifyrestricted(colID, ln, callback='')
if mtype == "perform_modifytranslations" and content:
fin_output += content
elif mtype == "perform_modifytranslations" or not mtype:
fin_output += perform_modifytranslations(colID, ln, callback='')
if mtype == "perform_deletecollection" and content:
fin_output += content
elif mtype == "perform_deletecollection" or not mtype:
fin_output += perform_deletecollection(colID, ln, callback='')
if mtype == "perform_showportalboxes" and content:
fin_output += content
elif mtype == "perform_showportalboxes" or not mtype:
fin_output += perform_showportalboxes(colID, ln, callback='')
if mtype == "perform_showsearchfields" and content:
fin_output += content
elif mtype == "perform_showsearchfields" or not mtype:
fin_output += perform_showsearchfields(colID, ln, callback='')
if mtype == "perform_showsearchoptions" and content:
fin_output += content
elif mtype == "perform_showsearchoptions" or not mtype:
fin_output += perform_showsearchoptions(colID, ln, callback='')
if mtype == "perform_showsortoptions" and content:
fin_output += content
elif mtype == "perform_showsortoptions" or not mtype:
fin_output += perform_showsortoptions(colID, ln, callback='')
if mtype == "perform_modifyrankmethods" and content:
fin_output += content
elif mtype == "perform_modifyrankmethods" or not mtype:
fin_output += perform_modifyrankmethods(colID, ln, callback='')
if mtype == "perform_showoutputformats" and content:
fin_output += content
elif mtype == "perform_showoutputformats" or not mtype:
fin_output += perform_showoutputformats(colID, ln, callback='')
if mtype == "perform_manage_external_collections" and content:
fin_output += content
elif mtype == "perform_manage_external_collections" or not mtype:
fin_output += perform_manage_external_collections(colID, ln, callback='')
if mtype == "perform_showdetailedrecordoptions" and content:
fin_output += content
elif mtype == "perform_showdetailedrecordoptions" or not mtype:
fin_output += perform_showdetailedrecordoptions(colID, ln, callback='')
return addadminbox("Overview of edit options for collection '%s'" % col_dict[colID], [fin_output])
def perform_checkwebcollstatus(colID, ln, confirm=0, callback='yes'):
"""Check status of the collection tables with respect to the webcoll cache."""
subtitle = """<a name="11"></a>Webcoll Status [<a href="%s/help/admin/websearch-admin-guide#5">?</a>]""" % CFG_SITE_URL
output = ""
colID = int(colID)
col_dict = dict(get_def_name('', "collection"))
output += """<br /><b>Last updates:</b><br />"""
collection_table_update_time = ""
collection_web_update_time = ""
collection_table_update_time = get_table_update_time('collection')
output += "Collection table last updated: %s<br />" % collection_table_update_time
try:
file = open("%s/collections/last_updated" % CFG_CACHEDIR)
collection_web_update_time = file.readline().strip()
output += "Collection cache last updated: %s<br />" % collection_web_update_time
file.close()
except:
pass
# reformat collection_web_update_time to the format suitable for comparisons
try:
collection_web_update_time = strftime("%Y-%m-%d %H:%M:%S",
time.strptime(collection_web_update_time, "%d %b %Y %H:%M:%S"))
except ValueError, e:
pass
if collection_table_update_time > collection_web_update_time:
output += """<br /><b><span class="info">Warning: The collections have been modified since last time Webcoll was executed, to process the changes, Webcoll must be executed.</span></b><br />"""
header = ['ID', 'Name', 'Time', 'Status', 'Progress']
actions = []
output += """<br /><b>Last BibSched tasks:</b><br />"""
res = run_sql("select id, proc, host, user, runtime, sleeptime, arguments, status, progress from schTASK where proc='webcoll' and runtime< now() ORDER by runtime")
if len(res) > 0:
(id, proc, host, user, runtime, sleeptime, arguments, status, progress) = res[len(res) - 1]
webcoll__update_time = runtime
actions.append([id, proc, runtime, (status !="" and status or ''), (progress !="" and progress or '')])
else:
actions.append(['', 'webcoll', '', '', 'Not executed yet'])
res = run_sql("select id, proc, host, user, runtime, sleeptime, arguments, status, progress from schTASK where proc='bibindex' and runtime< now() ORDER by runtime")
if len(res) > 0:
(id, proc, host, user, runtime, sleeptime, arguments, status, progress) = res[len(res) - 1]
actions.append([id, proc, runtime, (status !="" and status or ''), (progress !="" and progress or '')])
else:
actions.append(['', 'bibindex', '', '', 'Not executed yet'])
output += tupletotable(header=header, tuple=actions)
output += """<br /><b>Next scheduled BibSched run:</b><br />"""
actions = []
res = run_sql("select id, proc, host, user, runtime, sleeptime, arguments, status, progress from schTASK where proc='webcoll' and runtime > now() ORDER by runtime")
webcoll_future = ""
if len(res) > 0:
(id, proc, host, user, runtime, sleeptime, arguments, status, progress) = res[0]
webcoll__update_time = runtime
actions.append([id, proc, runtime, (status !="" and status or ''), (progress !="" and progress or '')])
webcoll_future = "yes"
else:
actions.append(['', 'webcoll', '', '', 'Not scheduled'])
res = run_sql("select id, proc, host, user, runtime, sleeptime, arguments, status, progress from schTASK where proc='bibindex' and runtime > now() ORDER by runtime")
bibindex_future = ""
if len(res) > 0:
(id, proc, host, user, runtime, sleeptime, arguments, status, progress) = res[0]
actions.append([id, proc, runtime, (status !="" and status or ''), (progress !="" and progress or '')])
bibindex_future = "yes"
else:
actions.append(['', 'bibindex', '', '', 'Not scheduled'])
output += tupletotable(header=header, tuple=actions)
if webcoll_future == "":
output += """<br /><b><span class="info">Warning: Webcoll is not scheduled for a future run by bibsched, any updates to the collection will not be processed.</span></b><br />"""
if bibindex_future == "":
output += """<br /><b><span class="info">Warning: Bibindex is not scheduled for a future run by bibsched, any updates to the records will not be processed.</span></b><br />"""
body = [output]
if callback:
return perform_index(colID, ln, "perform_checkwebcollstatus", addadminbox(subtitle, body))
else:
return addadminbox(subtitle, body)
def perform_modifyrestricted(colID, ln, rest='', callback='yes', confirm=-1):
"""modify which apache group is allowed to access the collection.
rest - the groupname"""
subtitle = ''
output = ""
col_dict = dict(get_def_name('', "collection"))
action_id = acc_get_action_id(VIEWRESTRCOLL)
if colID and col_dict.has_key(int(colID)):
colID = int(colID)
subtitle = """<a name="2">2. Modify access restrictions for collection '%s'</a> <small>[<a title="See guide" href="%s/help/admin/websearch-admin-guide#3.2">?</a>]</small>""" % (col_dict[colID], CFG_SITE_URL)
output = """<p>Please note that Invenio versions greater than <em>0.92.1</em> manage collection restriction via the standard
<strong><a href="/admin/webaccess/webaccessadmin.py/showactiondetails?id_action=%i">WebAccess Admin Interface</a></strong> (action '%s').</p>
""" % (action_id, VIEWRESTRCOLL)
body = [output]
if callback:
return perform_editcollection(colID, ln, "perform_modifyrestricted", addadminbox(subtitle, body))
else:
return addadminbox(subtitle, body)
def perform_checkcollectionstatus(colID, ln, confirm=0, callback='yes'):
"""Check the configuration of the collections."""
from invenio.search_engine import collection_restricted_p, restricted_collection_cache
subtitle = """<a name="11"></a>Collection Status [<a href="%s/help/admin/websearch-admin-guide#6">?</a>]""" % CFG_SITE_URL
output = ""
colID = int(colID)
col_dict = dict(get_def_name('', "collection"))
collections = run_sql("SELECT id, name, dbquery, nbrecs FROM collection "
"ORDER BY id")
header = ['ID', 'Name','Query', 'Subcollections', 'Restricted', 'Hosted',
'I18N', 'Status', 'Number of records']
rnk_list = get_def_name('', "rnkMETHOD")
actions = []
restricted_collection_cache.recreate_cache_if_needed()
for (id, name, dbquery, nbrecs) in collections:
reg_sons = col_has_son(id, 'r')
vir_sons = col_has_son(id, 'v')
status = ""
hosted = ""
if str(dbquery).startswith("hostedcollection:"): hosted = """<b><span class="info">Yes</span></b>"""
else: hosted = """<b><span class="info">No</span></b>"""
langs = run_sql("SELECT ln from collectionname where id_collection=%s", (id, ))
i8n = ""
if len(langs) > 0:
for lang in langs:
i8n += "%s, " % lang
else:
i8n = """<b><span class="info">None</span></b>"""
if reg_sons and dbquery:
status = """<b><span class="warning">1:Conflict</span></b>"""
elif not dbquery and not reg_sons:
status = """<b><span class="warning">2:Empty</span></b>"""
if (reg_sons or vir_sons):
subs = """<b><span class="info">Yes</span></b>"""
else:
subs = """<b><span class="info">No</span></b>"""
if dbquery is None:
dbquery = """<b><span class="info">No</span></b>"""
restricted = collection_restricted_p(name, recreate_cache_if_needed=False)
if restricted:
restricted = """<b><span class="warning">Yes</span></b>"""
if status:
status += """<b><span class="warning">,3:Restricted</span></b>"""
else:
status += """<b><span class="warning">3:Restricted</span></b>"""
else:
restricted = """<b><span class="info">No</span></b>"""
if status == "":
status = """<b><span class="info">OK</span></b>"""
actions.append([id, """<a href="%s/admin/websearch/websearchadmin.py/editcollection?colID=%s&ln=%s">%s</a>""" % (CFG_SITE_URL, id, ln, name), dbquery, subs, restricted, hosted, i8n, status, nbrecs])
output += tupletotable(header=header, tuple=actions)
body = [output]
return addadminbox(subtitle, body)
if callback:
return perform_index(colID, ln, "perform_checkcollectionstatus", addadminbox(subtitle, body))
else:
return addadminbox(subtitle, body)
def perform_checkexternalcollections(colID, ln, icl=None, update="", confirm=0, callback='yes'):
"""Check the external collections for inconsistencies."""
subtitle = """<a name="7"></a>Check external collections [<a href="%s/help/admin/websearch-admin-guide#7">?</a>]""" % CFG_SITE_URL
output = ""
colID = int(colID)
if icl:
if update == "add":
# icl : the "inconsistent list" comes as a string, it has to be converted back into a list
icl = eval(icl)
#icl = icl[1:-1].split(',')
for collection in icl:
#collection = str(collection[1:-1])
query_select = "SELECT name FROM externalcollection WHERE name like '%(name)s';" % {'name': collection}
results_select = run_sql(query_select)
if not results_select:
query_insert = "INSERT INTO externalcollection (name) VALUES ('%(name)s');" % {'name': collection}
run_sql(query_insert)
output += """<br /><span class=info>New collection \"%s\" has been added to the database table \"externalcollection\".</span><br />""" % (collection)
else:
output += """<br /><span class=info>Collection \"%s\" has already been added to the database table \"externalcollection\" or was already there.</span><br />""" % (collection)
elif update == "del":
# icl : the "inconsistent list" comes as a string, it has to be converted back into a list
icl = eval(icl)
#icl = icl[1:-1].split(',')
for collection in icl:
#collection = str(collection[1:-1])
query_select = "SELECT id FROM externalcollection WHERE name like '%(name)s';" % {'name': collection}
results_select = run_sql(query_select)
if results_select:
query_delete = "DELETE FROM externalcollection WHERE id like '%(id)s';" % {'id': results_select[0][0]}
query_delete_states = "DELETE FROM collection_externalcollection WHERE id_externalcollection like '%(id)s';" % {'id': results_select[0][0]}
run_sql(query_delete)
run_sql(query_delete_states)
output += """<br /><span class=info>Collection \"%s\" has been deleted from the database table \"externalcollection\".</span><br />""" % (collection)
else:
output += """<br /><span class=info>Collection \"%s\" has already been delete from the database table \"externalcollection\" or was never there.</span><br />""" % (collection)
external_collections_file = []
external_collections_db = []
for coll in external_collections_dictionary.values():
external_collections_file.append(coll.name)
external_collections_file.sort()
query = """SELECT name from externalcollection"""
results = run_sql(query)
for result in results:
external_collections_db.append(result[0])
external_collections_db.sort()
number_file = len(external_collections_file)
number_db = len(external_collections_db)
if external_collections_file == external_collections_db:
output += """<br /><span class="info">External collections are consistent.</span><br /><br />
- database table \"externalcollection\" has %(number_db)s collections<br />
- configuration file \"websearch_external_collections_config.py\" has %(number_file)s collections""" % {
"number_db" : number_db,
"number_file" : number_file}
elif len(external_collections_file) > len(external_collections_db):
external_collections_diff = list(set(external_collections_file) - set(external_collections_db))
external_collections_db.extend(external_collections_diff)
external_collections_db.sort()
if external_collections_file == external_collections_db:
output += """<br /><span class="warning">There is an inconsistency:</span><br /><br />
- database table \"externalcollection\" has %(number_db)s collections
(<span class="warning">missing: %(diff)s</span>)<br />
- configuration file \"websearch_external_collections_config.py\" has %(number_file)s collections
<br /><br /><a href="%(site_url)s/admin/websearch/websearchadmin.py/checkexternalcollections?colID=%(colID)s&icl=%(diff)s&update=add&ln=%(ln)s">
Click here</a> to update your database adding the missing collections. If the problem persists please check your configuration manually.""" % {
"number_db" : number_db,
"number_file" : number_file,
"diff" : external_collections_diff,
"site_url" : CFG_SITE_URL,
"colID" : colID,
"ln" : ln}
else:
output += """<br /><span class="warning">There is an inconsistency:</span><br /><br />
- database table \"externalcollection\" has %(number_db)s collections<br />
- configuration file \"websearch_external_collections_config.py\" has %(number_file)s collections
<br /><br /><span class="warning">The external collections do not match.</span>
<br />To fix the problem please check your configuration manually.""" % {
"number_db" : number_db,
"number_file" : number_file}
elif len(external_collections_file) < len(external_collections_db):
external_collections_diff = list(set(external_collections_db) - set(external_collections_file))
external_collections_file.extend(external_collections_diff)
external_collections_file.sort()
if external_collections_file == external_collections_db:
output += """<br /><span class="warning">There is an inconsistency:</span><br /><br />
- database table \"externalcollection\" has %(number_db)s collections
(<span class="warning">extra: %(diff)s</span>)<br />
- configuration file \"websearch_external_collections_config.py\" has %(number_file)s collections
<br /><br /><a href="%(site_url)s/admin/websearch/websearchadmin.py/checkexternalcollections?colID=%(colID)s&icl=%(diff)s&update=del&ln=%(ln)s">
Click here</a> to force remove the extra collections from your database (warning: use with caution!). If the problem persists please check your configuration manually.""" % {
"number_db" : number_db,
"number_file" : number_file,
"diff" : external_collections_diff,
"site_url" : CFG_SITE_URL,
"colID" : colID,
"ln" : ln}
else:
output += """<br /><span class="warning">There is an inconsistency:</span><br /><br />
- database table \"externalcollection\" has %(number_db)s collections<br />
- configuration file \"websearch_external_collections_config.py\" has %(number_file)s collections
<br /><br /><span class="warning">The external collections do not match.</span>
<br />To fix the problem please check your configuration manually.""" % {
"number_db" : number_db,
"number_file" : number_file}
else:
output += """<br /><span class="warning">There is an inconsistency:</span><br /><br />
- database table \"externalcollection\" has %(number_db)s collections<br />
- configuration file \"websearch_external_collections_config.py\" has %(number_file)s collections
<br /><br /><span class="warning">The number of external collections is the same but the collections do not match.</span>
<br />To fix the problem please check your configuration manually.""" % {
"number_db" : number_db,
"number_file" : number_file}
body = [output]
return addadminbox(subtitle, body)
if callback:
return perform_index(colID, ln, "perform_checkexternalcollections", addadminbox(subtitle, body))
else:
return addadminbox(subtitle, body)
def perform_checksearchservices(colID, ln, icl=None, update="", confirm=0, callback='yes'):
"""Check the enabled search services, and possible errors"""
from invenio.pluginutils import PluginContainer
from invenio.websearch_services import CFG_SEARCH_SERVICES_PATH, \
__required_plugin_API_version__, \
SearchService
subtitle = """<a name="10"></a>Check search services [<a href="%s/help/admin/websearch-admin-guide#10">?</a>]""" % CFG_SITE_URL
output = ""
output += "<p>You can enable a search service by dropping the corresonpding plugin at <code>%s</code>.</p>" % \
cgi.escape(CFG_SEARCH_SERVICES_PATH)
search_service_plugins = PluginContainer(os.path.join(CFG_SEARCH_SERVICES_PATH, '*Service.py'),
api_version=__required_plugin_API_version__,
plugin_signature=SearchService)
output += "<br /><b>Enabled search services:</b><br />"
header = ['Service', 'Description', 'Status']
actions = []
for name, plugin in search_service_plugins.get_enabled_plugins().iteritems():
description = plugin().get_description()
actions.append((name, description, '<span style="color:#080">OK</a>'))
if actions:
output += tupletotable(header=header, tuple=actions)
else:
output += '<em style="color:#f80;font-size:small">No search service enabled</em>'
output += "<br /><b>Search services with errors:</b><br />"
header = ['Service', 'Error']
actions = []
for name, error in search_service_plugins.get_broken_plugins().iteritems():
actions.append((name, '<pre style="color:#800">' + cgi.escape(repr(error[0]) + " " + repr(error[1]) + "\n" + "\n".join(traceback.format_tb(error[2]))) + '</pre>'))
if actions:
output += tupletotable(header=header, tuple=actions)
else:
output += '<em style="color:#080;font-size:small">No error found</em>'
body = [output]
if callback:
return perform_index(colID, ln, "perform_checksearchservices", addadminbox(subtitle, body))
else:
return addadminbox(subtitle, body)
def col_has_son(colID, rtype='r'):
"""Return True if the collection has at least one son."""
return run_sql("SELECT id_son FROM collection_collection WHERE id_dad=%s and type=%s LIMIT 1", (colID, rtype)) != ()
def get_col_tree(colID, rtype=''):
"""Returns a presentation of the tree as a list. TODO: Add loop detection
colID - startpoint for the tree
rtype - get regular or virtual part of the tree"""
try:
colID = int(colID)
stack = [colID]
ssize = 0
tree = [(colID, 0, 0, colID, 'r')]
while len(stack) > 0:
ccolID = stack.pop()
if ccolID == colID and rtype:
res = run_sql("SELECT id_son, score, type FROM collection_collection WHERE id_dad=%s AND type=%s ORDER BY score ASC,id_son", (ccolID, rtype))
else:
res = run_sql("SELECT id_son, score, type FROM collection_collection WHERE id_dad=%s ORDER BY score ASC,id_son", (ccolID, ))
ssize += 1
ntree = []
for i in range(0, len(res)):
id_son = res[i][0]
score = res[i][1]
rtype = res[i][2]
stack.append(id_son)
if i == (len(res) - 1):
up = 0
else:
up = 1
if i == 0:
down = 0
else:
down = 1
ntree.insert(0, (id_son, up, down, ccolID, rtype))
tree = tree[0:ssize] + ntree + tree[ssize:len(tree)]
return tree
except StandardError, e:
register_exception()
return ()
def add_col_dad_son(add_dad, add_son, rtype):
"""Add a son to a collection (dad)
add_dad - add to this collection id
add_son - add this collection id
rtype - either regular or virtual"""
try:
res = run_sql("SELECT score FROM collection_collection WHERE id_dad=%s ORDER BY score ASC", (add_dad, ))
highscore = 0
for score in res:
if int(score[0]) > highscore:
highscore = int(score[0])
highscore += 1
res = run_sql("INSERT INTO collection_collection(id_dad,id_son,score,type) values(%s,%s,%s,%s)", (add_dad, add_son, highscore, rtype))
return (1, highscore)
except StandardError, e:
register_exception()
return (0, e)
def compare_on_val(first, second):
"""Compare the two values"""
return cmp(first[1], second[1])
def get_col_fld(colID=-1, type = '', id_field=''):
"""Returns either all portalboxes associated with a collection, or based on either colID or language or both.
colID - collection id
ln - language id"""
sql = "SELECT id_field,id_fieldvalue,type,score,score_fieldvalue FROM collection_field_fieldvalue, field WHERE id_field=field.id"
params = []
if colID > -1:
sql += " AND id_collection=%s"
params.append(colID)
if id_field:
sql += " AND id_field=%s"
params.append(id_field)
if type:
sql += " AND type=%s"
params.append(type)
sql += " ORDER BY type, score desc, score_fieldvalue desc"
res = run_sql(sql, tuple(params))
return res
def get_col_pbx(colID=-1, ln='', position = ''):
"""Returns either all portalboxes associated with a collection, or based on either colID or language or both.
colID - collection id
ln - language id"""
sql = "SELECT id_portalbox, id_collection, ln, score, position, title, body FROM collection_portalbox, portalbox WHERE id_portalbox = portalbox.id"
params = []
if colID > -1:
sql += " AND id_collection=%s"
params.append(colID)
if ln:
sql += " AND ln=%s"
params.append(ln)
if position:
sql += " AND position=%s"
params.append(position)
sql += " ORDER BY position, ln, score desc"
res = run_sql(sql, tuple(params))
return res
def get_col_fmt(colID=-1):
"""Returns all formats currently associated with a collection, or for one specific collection
colID - the id of the collection"""
if colID not in [-1, "-1"]:
res = run_sql("SELECT id_format, id_collection, code, score FROM collection_format, format WHERE id_format = format.id AND id_collection=%s ORDER BY score desc", (colID, ))
else:
res = run_sql("SELECT id_format, id_collection, code, score FROM collection_format, format WHERE id_format = format.id ORDER BY score desc")
return res
def get_col_rnk(colID, ln):
""" Returns a list of the rank methods the given collection is attached to
colID - id from collection"""
try:
res1 = dict(run_sql("SELECT id_rnkMETHOD, '' FROM collection_rnkMETHOD WHERE id_collection=%s", (colID, )))
res2 = get_def_name('', "rnkMETHOD")
result = filter(lambda x: res1.has_key(x[0]), res2)
return result
except StandardError, e:
return ()
def get_pbx():
"""Returns all portalboxes"""
res = run_sql("SELECT id, title, body FROM portalbox ORDER by title,body")
return res
def get_fld_value(fldvID = ''):
"""Returns fieldvalue"""
sql = "SELECT id, name, value FROM fieldvalue"
params = []
if fldvID:
sql += " WHERE id=%s"
params.append(fldvID)
sql += " ORDER BY name"
res = run_sql(sql, tuple(params))
return res
def get_pbx_pos():
"""Returns a list of all the positions for a portalbox"""
position = {}
position["rt"] = "Right Top"
position["lt"] = "Left Top"
position["te"] = "Title Epilog"
position["tp"] = "Title Prolog"
position["ne"] = "Narrow by coll epilog"
position["np"] = "Narrow by coll prolog"
return position
def get_sort_nametypes():
"""Return a list of the various translationnames for the fields"""
type = {}
type['soo'] = 'Sort options'
type['seo'] = 'Search options'
type['sew'] = 'Search within'
return type
def get_fmt_nametypes():
"""Return a list of the various translationnames for the output formats"""
type = []
type.append(('ln', 'Long name'))
return type
def get_fld_nametypes():
"""Return a list of the various translationnames for the fields"""
type = []
type.append(('ln', 'Long name'))
return type
def get_col_nametypes():
"""Return a list of the various translationnames for the collections"""
type = []
type.append(('ln', 'Collection name'))
return type
def find_last(tree, start_son):
"""Find the previous collection in the tree with the same father as start_son"""
id_dad = tree[start_son][3]
while start_son > 0:
start_son -= 1
if tree[start_son][3] == id_dad:
return start_son
def find_next(tree, start_son):
"""Find the next collection in the tree with the same father as start_son"""
id_dad = tree[start_son][3]
while start_son < len(tree):
start_son += 1
if tree[start_son][3] == id_dad:
return start_son
def remove_col_subcol(id_son, id_dad, type):
"""Remove a collection as a son of another collection in the tree, if collection isn't used elsewhere in the tree, remove all registered sons of the id_son.
id_son - collection id of son to remove
id_dad - the id of the dad"""
try:
if id_son != id_dad:
tree = get_col_tree(id_son)
run_sql("DELETE FROM collection_collection WHERE id_son=%s and id_dad=%s", (id_son, id_dad))
else:
tree = get_col_tree(id_son, type)
run_sql("DELETE FROM collection_collection WHERE id_son=%s and id_dad=%s and type=%s", (id_son, id_dad, type))
if not run_sql("SELECT id_dad,id_son,type,score from collection_collection WHERE id_son=%s and type=%s", (id_son, type)):
for (id, up, down, dad, rtype) in tree:
run_sql("DELETE FROM collection_collection WHERE id_son=%s and id_dad=%s", (id, dad))
return (1, "")
except StandardError, e:
return (0, e)
def check_col(add_dad, add_son):
"""Check if the collection can be placed as a son of the dad without causing loops.
add_dad - collection id
add_son - collection id"""
try:
stack = [add_dad]
res = run_sql("SELECT id_dad FROM collection_collection WHERE id_dad=%s AND id_son=%s", (add_dad, add_son))
if res:
raise StandardError
while len(stack) > 0:
colID = stack.pop()
res = run_sql("SELECT id_dad FROM collection_collection WHERE id_son=%s", (colID, ))
for id in res:
if int(id[0]) == int(add_son):
# raise StandardError # this was the original but it didnt work
return(0)
else:
stack.append(id[0])
return (1, "")
except StandardError, e:
return (0, e)
def attach_rnk_col(colID, rnkID):
"""attach rank method to collection
rnkID - id from rnkMETHOD table
colID - id of collection, as in collection table """
try:
res = run_sql("INSERT INTO collection_rnkMETHOD(id_collection, id_rnkMETHOD) values (%s,%s)", (colID, rnkID))
return (1, "")
except StandardError, e:
register_exception()
return (0, e)
def detach_rnk_col(colID, rnkID):
"""detach rank method from collection
rnkID - id from rnkMETHOD table
colID - id of collection, as in collection table """
try:
res = run_sql("DELETE FROM collection_rnkMETHOD WHERE id_collection=%s AND id_rnkMETHOD=%s", (colID, rnkID))
return (1, "")
except StandardError, e:
register_exception()
return (0, e)
def switch_col_treescore(col_1, col_2):
try:
res1 = run_sql("SELECT score FROM collection_collection WHERE id_dad=%s and id_son=%s", (col_1[3], col_1[0]))
res2 = run_sql("SELECT score FROM collection_collection WHERE id_dad=%s and id_son=%s", (col_2[3], col_2[0]))
res = run_sql("UPDATE collection_collection SET score=%s WHERE id_dad=%s and id_son=%s", (res2[0][0], col_1[3], col_1[0]))
res = run_sql("UPDATE collection_collection SET score=%s WHERE id_dad=%s and id_son=%s", (res1[0][0], col_2[3], col_2[0]))
return (1, "")
except StandardError, e:
register_exception()
return (0, e)
def move_col_tree(col_from, col_to, move_to_rtype=''):
"""Move a collection from one point in the tree to another. becomes a son of the endpoint.
col_from - move this collection from current point
col_to - and set it as a son of this collection.
move_to_rtype - either virtual or regular collection"""
try:
res = run_sql("SELECT score FROM collection_collection WHERE id_dad=%s ORDER BY score asc", (col_to[0], ))
highscore = 0
for score in res:
if int(score[0]) > highscore:
highscore = int(score[0])
highscore += 1
if not move_to_rtype:
move_to_rtype = col_from[4]
res = run_sql("DELETE FROM collection_collection WHERE id_son=%s and id_dad=%s", (col_from[0], col_from[3]))
res = run_sql("INSERT INTO collection_collection(id_dad,id_son,score,type) values(%s,%s,%s,%s)", (col_to[0], col_from[0], highscore, move_to_rtype))
return (1, "")
except StandardError, e:
register_exception()
return (0, e)
def remove_pbx(colID, pbxID, ln):
"""Removes a portalbox from the collection given.
colID - the collection the format is connected to
pbxID - the portalbox which should be removed from the collection.
ln - the language of the portalbox to be removed"""
try:
res = run_sql("DELETE FROM collection_portalbox WHERE id_collection=%s AND id_portalbox=%s AND ln=%s", (colID, pbxID, ln))
return (1, "")
except StandardError, e:
register_exception()
return (0, e)
def remove_fmt(colID, fmtID):
"""Removes a format from the collection given.
colID - the collection the format is connected to
fmtID - the format which should be removed from the collection."""
try:
res = run_sql("DELETE FROM collection_format WHERE id_collection=%s AND id_format=%s", (colID, fmtID))
return (1, "")
except StandardError, e:
register_exception()
return (0, e)
def remove_fld(colID, fldID, fldvID=''):
"""Removes a field from the collection given.
colID - the collection the format is connected to
fldID - the field which should be removed from the collection."""
try:
sql = "DELETE FROM collection_field_fieldvalue WHERE id_collection=%s AND id_field=%s"
params = [colID, fldID]
if fldvID:
if fldvID != "None":
sql += " AND id_fieldvalue=%s"
params.append(fldvID)
else:
sql += " AND id_fieldvalue is NULL"
res = run_sql(sql, tuple(params))
return (1, "")
except StandardError, e:
register_exception()
return (0, e)
def delete_fldv(fldvID):
"""Deletes all data for the given fieldvalue
fldvID - delete all data in the tables associated with fieldvalue and this id"""
try:
res = run_sql("DELETE FROM collection_field_fieldvalue WHERE id_fieldvalue=%s", (fldvID, ))
res = run_sql("DELETE FROM fieldvalue WHERE id=%s", (fldvID, ))
return (1, "")
except StandardError, e:
register_exception()
return (0, e)
def delete_pbx(pbxID):
"""Deletes all data for the given portalbox
pbxID - delete all data in the tables associated with portalbox and this id """
try:
res = run_sql("DELETE FROM collection_portalbox WHERE id_portalbox=%s", (pbxID, ))
res = run_sql("DELETE FROM portalbox WHERE id=%s", (pbxID, ))
return (1, "")
except StandardError, e:
register_exception()
return (0, e)
def delete_fmt(fmtID):
"""Deletes all data for the given format
fmtID - delete all data in the tables associated with format and this id """
try:
res = run_sql("DELETE FROM format WHERE id=%s", (fmtID, ))
res = run_sql("DELETE FROM collection_format WHERE id_format=%s", (fmtID, ))
res = run_sql("DELETE FROM formatname WHERE id_format=%s", (fmtID, ))
return (1, "")
except StandardError, e:
register_exception()
return (0, e)
def delete_col(colID):
"""Deletes all data for the given collection
colID - delete all data in the tables associated with collection and this id """
try:
res = run_sql("DELETE FROM collection WHERE id=%s", (colID, ))
res = run_sql("DELETE FROM collectionname WHERE id_collection=%s", (colID, ))
res = run_sql("DELETE FROM collection_rnkMETHOD WHERE id_collection=%s", (colID, ))
res = run_sql("DELETE FROM collection_collection WHERE id_dad=%s", (colID, ))
res = run_sql("DELETE FROM collection_collection WHERE id_son=%s", (colID, ))
res = run_sql("DELETE FROM collection_portalbox WHERE id_collection=%s", (colID, ))
res = run_sql("DELETE FROM collection_format WHERE id_collection=%s", (colID, ))
res = run_sql("DELETE FROM collection_field_fieldvalue WHERE id_collection=%s", (colID, ))
return (1, "")
except StandardError, e:
register_exception()
return (0, e)
def add_fmt(code, name, rtype):
"""Add a new output format. Returns the id of the format.
code - the code for the format, max 6 chars.
name - the default name for the default language of the format.
rtype - the default nametype"""
try:
res = run_sql("INSERT INTO format (code, name) values (%s,%s)", (code, name))
fmtID = run_sql("SELECT id FROM format WHERE code=%s", (code,))
res = run_sql("INSERT INTO formatname(id_format, type, ln, value) VALUES (%s,%s,%s,%s)",
(fmtID[0][0], rtype, CFG_SITE_LANG, name))
return (1, fmtID)
except StandardError, e:
register_exception()
return (0, e)
def update_fldv(fldvID, name, value):
"""Modify existing fieldvalue
fldvID - id of fieldvalue to modify
value - the value of the fieldvalue
name - the name of the fieldvalue."""
try:
res = run_sql("UPDATE fieldvalue set name=%s where id=%s", (name, fldvID))
res = run_sql("UPDATE fieldvalue set value=%s where id=%s", (value, fldvID))
return (1, "")
except StandardError, e:
register_exception()
return (0, e)
def add_fldv(name, value):
"""Add a new fieldvalue, returns id of fieldvalue
value - the value of the fieldvalue
name - the name of the fieldvalue."""
try:
res = run_sql("SELECT id FROM fieldvalue WHERE name=%s and value=%s", (name, value))
if not res:
res = run_sql("INSERT INTO fieldvalue (name, value) values (%s,%s)", (name, value))
res = run_sql("SELECT id FROM fieldvalue WHERE name=%s and value=%s", (name, value))
if res:
return (1, res[0][0])
else:
raise StandardError
except StandardError, e:
register_exception()
return (0, e)
def add_pbx(title, body):
try:
res = run_sql("INSERT INTO portalbox (title, body) values (%s,%s)", (title, body))
res = run_sql("SELECT id FROM portalbox WHERE title=%s AND body=%s", (title, body))
if res:
return (1, res[0][0])
else:
raise StandardError
except StandardError, e:
register_exception()
return (0, e)
def add_col(colNAME, dbquery=None):
"""Adds a new collection to collection table
colNAME - the default name for the collection, saved to collection and collectionname
dbquery - query related to the collection"""
# BTW, sometimes '' are passed instead of None, so change them to None
if not dbquery:
dbquery = None
try:
rtype = get_col_nametypes()[0][0]
colID = run_sql("SELECT id FROM collection WHERE id=1")
if colID:
res = run_sql("INSERT INTO collection (name,dbquery) VALUES (%s,%s)",
(colNAME,dbquery))
else:
res = run_sql("INSERT INTO collection (id,name,dbquery) VALUES (1,%s,%s)",
(colNAME,dbquery))
colID = run_sql("SELECT id FROM collection WHERE name=%s", (colNAME,))
res = run_sql("INSERT INTO collectionname(id_collection, type, ln, value) VALUES (%s,%s,%s,%s)",
(colID[0][0], rtype, CFG_SITE_LANG, colNAME))
if colID:
return (1, colID[0][0])
else:
raise StandardError
except StandardError, e:
register_exception()
return (0, e)
def add_col_pbx(colID, pbxID, ln, position, score=''):
"""add a portalbox to the collection.
colID - the id of the collection involved
pbxID - the portalbox to add
ln - which language the portalbox is for
score - decides which portalbox is the most important
position - position on page the portalbox should appear."""
try:
if score:
res = run_sql("INSERT INTO collection_portalbox(id_portalbox, id_collection, ln, score, position) values (%s,%s,'%s',%s,%s)", (real_escape_string(pbxID), real_escape_string(colID), real_escape_string(ln), real_escape_string(score), real_escape_string(position))) # kwalitee: disable=sql
else:
res = run_sql("SELECT score FROM collection_portalbox WHERE id_collection=%s and ln=%s and position=%s ORDER BY score desc, ln, position", (colID, ln, position))
if res:
score = int(res[0][0])
else:
score = 0
res = run_sql("INSERT INTO collection_portalbox(id_portalbox, id_collection, ln, score, position) values (%s,%s,%s,%s,%s)", (pbxID, colID, ln, (score + 1), position))
return (1, "")
except StandardError, e:
register_exception()
return (0, e)
def add_col_fmt(colID, fmtID, score=''):
"""Add a output format to the collection.
colID - the id of the collection involved
fmtID - the id of the format.
score - the score of the format, decides sorting, if not given, place the format on top"""
try:
if score:
res = run_sql("INSERT INTO collection_format(id_format, id_collection, score) values (%s,%s,%s)", (fmtID, colID, score))
else:
res = run_sql("SELECT score FROM collection_format WHERE id_collection=%s ORDER BY score desc", (colID, ))
if res:
score = int(res[0][0])
else:
score = 0
res = run_sql("INSERT INTO collection_format(id_format, id_collection, score) values (%s,%s,%s)", (fmtID, colID, (score + 1)))
return (1, "")
except StandardError, e:
register_exception()
return (0, e)
def add_col_fld(colID, fldID, type, fldvID=''):
"""Add a sort/search/field to the collection.
colID - the id of the collection involved
fldID - the id of the field.
fldvID - the id of the fieldvalue.
type - which type, seo, sew...
score - the score of the format, decides sorting, if not given, place the format on top"""
try:
if fldvID and fldvID not in [-1, "-1"]:
run_sql("DELETE FROM collection_field_fieldvalue WHERE id_collection=%s AND id_field=%s and type=%s and id_fieldvalue is NULL", (colID, fldID, type))
res = run_sql("SELECT score FROM collection_field_fieldvalue WHERE id_collection=%s AND id_field=%s and type=%s ORDER BY score desc", (colID, fldID, type))
if res:
score = int(res[0][0])
res = run_sql("SELECT score_fieldvalue FROM collection_field_fieldvalue WHERE id_collection=%s AND id_field=%s and type=%s ORDER BY score_fieldvalue desc", (colID, fldID, type))
else:
res = run_sql("SELECT score FROM collection_field_fieldvalue WHERE id_collection=%s and type=%s ORDER BY score desc", (colID, type))
if res:
score = int(res[0][0]) + 1
else:
score = 1
res = run_sql("SELECT id_collection,id_field,id_fieldvalue,type,score,score_fieldvalue FROM collection_field_fieldvalue where id_field=%s and id_collection=%s and type=%s and id_fieldvalue=%s", (fldID, colID, type, fldvID))
if not res:
run_sql("UPDATE collection_field_fieldvalue SET score_fieldvalue=score_fieldvalue+1 WHERE id_field=%s AND id_collection=%s and type=%s", (fldID, colID, type))
res = run_sql("INSERT INTO collection_field_fieldvalue(id_field, id_fieldvalue, id_collection, type, score, score_fieldvalue) values (%s,%s,%s,%s,%s,%s)", (fldID, fldvID, colID, type, score, 1))
else:
return (0, (1, "Already exists"))
else:
res = run_sql("SELECT id_collection,id_field,id_fieldvalue,type,score,score_fieldvalue FROM collection_field_fieldvalue WHERE id_collection=%s AND type=%s and id_field=%s and id_fieldvalue is NULL", (colID, type, fldID))
if res:
return (0, (1, "Already exists"))
else:
run_sql("UPDATE collection_field_fieldvalue SET score=score+1")
res = run_sql("INSERT INTO collection_field_fieldvalue(id_field, id_collection, type, score,score_fieldvalue) values (%s,%s,%s,%s, 0)", (fldID, colID, type, 1))
return (1, "")
except StandardError, e:
register_exception()
return (0, e)
def modify_dbquery(colID, dbquery=None):
"""Modify the dbquery of an collection.
colID - the id of the collection involved
dbquery - the new dbquery"""
# BTW, sometimes '' is passed instead of None, so change it to None
if not dbquery:
dbquery = None
try:
res = run_sql("UPDATE collection SET dbquery=%s WHERE id=%s", (dbquery, colID))
return (1, "")
except StandardError, e:
register_exception()
return (0, e)
def modify_pbx(colID, pbxID, sel_ln, score='', position='', title='', body=''):
"""Modify a portalbox
colID - the id of the collection involved
pbxID - the id of the portalbox that should be modified
sel_ln - the language of the portalbox that should be modified
title - the title
body - the content
score - if several portalboxes in one position, who should appear on top.
position - position on page"""
try:
if title:
res = run_sql("UPDATE portalbox SET title=%s WHERE id=%s", (title, pbxID))
if body:
res = run_sql("UPDATE portalbox SET body=%s WHERE id=%s", (body, pbxID))
if score:
res = run_sql("UPDATE collection_portalbox SET score=%s WHERE id_collection=%s and id_portalbox=%s and ln=%s", (score, colID, pbxID, sel_ln))
if position:
res = run_sql("UPDATE collection_portalbox SET position=%s WHERE id_collection=%s and id_portalbox=%s and ln=%s", (position, colID, pbxID, sel_ln))
return (1, "")
except Exception, e:
register_exception()
return (0, e)
def switch_fld_score(colID, id_1, id_2):
"""Switch the scores of id_1 and id_2 in collection_field_fieldvalue
colID - collection the id_1 or id_2 is connected to
id_1/id_2 - id field from tables like format..portalbox...
table - name of the table"""
try:
res1 = run_sql("SELECT score FROM collection_field_fieldvalue WHERE id_collection=%s and id_field=%s", (colID, id_1))
res2 = run_sql("SELECT score FROM collection_field_fieldvalue WHERE id_collection=%s and id_field=%s", (colID, id_2))
if res1[0][0] == res2[0][0]:
return (0, (1, "Cannot rearrange the selected fields, either rearrange by name or use the mySQL client to fix the problem."))
else:
res = run_sql("UPDATE collection_field_fieldvalue SET score=%s WHERE id_collection=%s and id_field=%s", (res2[0][0], colID, id_1))
res = run_sql("UPDATE collection_field_fieldvalue SET score=%s WHERE id_collection=%s and id_field=%s", (res1[0][0], colID, id_2))
return (1, "")
except StandardError, e:
register_exception()
return (0, e)
def switch_fld_value_score(colID, id_1, fldvID_1, fldvID_2):
"""Switch the scores of two field_value
colID - collection the id_1 or id_2 is connected to
id_1/id_2 - id field from tables like format..portalbox...
table - name of the table"""
try:
res1 = run_sql("SELECT score_fieldvalue FROM collection_field_fieldvalue WHERE id_collection=%s and id_field=%s and id_fieldvalue=%s", (colID, id_1, fldvID_1))
res2 = run_sql("SELECT score_fieldvalue FROM collection_field_fieldvalue WHERE id_collection=%s and id_field=%s and id_fieldvalue=%s", (colID, id_1, fldvID_2))
if res1[0][0] == res2[0][0]:
return (0, (1, "Cannot rearrange the selected fields, either rearrange by name or use the mySQL client to fix the problem."))
else:
res = run_sql("UPDATE collection_field_fieldvalue SET score_fieldvalue=%s WHERE id_collection=%s and id_field=%s and id_fieldvalue=%s", (res2[0][0], colID, id_1, fldvID_1))
res = run_sql("UPDATE collection_field_fieldvalue SET score_fieldvalue=%s WHERE id_collection=%s and id_field=%s and id_fieldvalue=%s", (res1[0][0], colID, id_1, fldvID_2))
return (1, "")
except Exception, e:
register_exception()
return (0, e)
def switch_pbx_score(colID, id_1, id_2, sel_ln):
"""Switch the scores of id_1 and id_2 in the table given by the argument.
colID - collection the id_1 or id_2 is connected to
id_1/id_2 - id field from tables like format..portalbox...
table - name of the table"""
try:
res1 = run_sql("SELECT score FROM collection_portalbox WHERE id_collection=%s and id_portalbox=%s and ln=%s", (colID, id_1, sel_ln))
res2 = run_sql("SELECT score FROM collection_portalbox WHERE id_collection=%s and id_portalbox=%s and ln=%s", (colID, id_2, sel_ln))
if res1[0][0] == res2[0][0]:
return (0, (1, "Cannot rearrange the selected fields, either rearrange by name or use the mySQL client to fix the problem."))
res = run_sql("UPDATE collection_portalbox SET score=%s WHERE id_collection=%s and id_portalbox=%s and ln=%s", (res2[0][0], colID, id_1, sel_ln))
res = run_sql("UPDATE collection_portalbox SET score=%s WHERE id_collection=%s and id_portalbox=%s and ln=%s", (res1[0][0], colID, id_2, sel_ln))
return (1, "")
except Exception, e:
register_exception()
return (0, e)
def switch_score(colID, id_1, id_2, table):
"""Switch the scores of id_1 and id_2 in the table given by the argument.
colID - collection the id_1 or id_2 is connected to
id_1/id_2 - id field from tables like format..portalbox...
table - name of the table"""
try:
res1 = run_sql("SELECT score FROM collection_%s WHERE id_collection=%%s and id_%s=%%s"% (wash_table_column_name(table), wash_table_column_name(table)), (colID, id_1))
res2 = run_sql("SELECT score FROM collection_%s WHERE id_collection=%%s and id_%s=%%s"% (wash_table_column_name(table), wash_table_column_name(table)), (colID, id_2))
if res1[0][0] == res2[0][0]:
return (0, (1, "Cannot rearrange the selected fields, either rearrange by name or use the mySQL client to fix the problem."))
res = run_sql("UPDATE collection_%s SET score=%%s WHERE id_collection=%%s and id_%s=%%s"% (wash_table_column_name(table), wash_table_column_name(table)), (res2[0][0], colID, id_1))
res = run_sql("UPDATE collection_%s SET score=%%s WHERE id_collection=%%s and id_%s=%%s"% (wash_table_column_name(table), wash_table_column_name(table)), (res1[0][0], colID, id_2))
return (1, "")
except Exception, e:
register_exception()
return (0, e)
def get_detailed_page_tabs(colID=None, recID=None, ln=CFG_SITE_LANG):
"""
Returns the complete list of tabs to be displayed in the
detailed record pages.
Returned structured is a dict with
- key : last component of the url that leads to detailed record tab: http:www.../CFG_SITE_RECORD/74/key
- values: a dictionary with the following keys:
- label: *string* label to be printed as tab (Not localized here)
- visible: *boolean* if False, tab should not be shown
- enabled: *boolean* if True, tab should be disabled
- order: *int* position of the tab in the list of tabs
- ln: language of the tab labels
returns dict
"""
_ = gettext_set_language(ln)
tabs = {'metadata' : {'label': _('Information'), 'visible': False, 'enabled': True, 'order': 1},
'references': {'label': _('References'), 'visible': False, 'enabled': True, 'order': 2},
'citations' : {'label': _('Citations'), 'visible': False, 'enabled': True, 'order': 3},
'keywords' : {'label': _('Keywords'), 'visible': False, 'enabled': True, 'order': 4},
'comments' : {'label': _('Discussion'), 'visible': False, 'enabled': True, 'order': 5},
'usage' : {'label': _('Usage statistics'), 'visible': False, 'enabled': True, 'order': 6},
'files' : {'label': _('Files'), 'visible': False, 'enabled': True, 'order': 7},
'plots' : {'label': _('Plots'), 'visible': False, 'enabled': True, 'order': 8},
'holdings' : {'label': _('Holdings'), 'visible': False, 'enabled': True, 'order': 9},
'linkbacks' : {'label': _('Linkbacks'), 'visible': False, 'enabled': True, 'order': 10},
'hepdata' : {'label': _('HepData'), 'visible': False, 'enabled': True, 'order': 11}
}
res = run_sql("SELECT tabs FROM collectiondetailedrecordpagetabs " + \
"WHERE id_collection=%s", (colID, ))
if len(res) > 0:
tabs_state = res[0][0].split(';')
for tab_state in tabs_state:
if tabs.has_key(tab_state):
tabs[tab_state]['visible'] = True;
else:
# no preference set for this collection.
# assume all tabs are displayed
for key in tabs.keys():
tabs[key]['visible'] = True
if not CFG_WEBCOMMENT_ALLOW_COMMENTS and \
not CFG_WEBCOMMENT_ALLOW_REVIEWS:
tabs['comments']['visible'] = False
tabs['comments']['enabled'] = False
if recID is not None:
# Disable references if no references found
#bfo = BibFormatObject(recID)
#if bfe_references.format_element(bfo, '', '') == '':
# tabs['references']['enabled'] = False
## FIXME: the above was commented out because bfe_references
## may be too slow. And we do not really need this anyway
## because we can disable tabs in WebSearch Admin on a
## collection-by-collection basis. If we need this, then we
## should probably call bfo.fields('999') here that should be
## much faster than calling bfe_references.
# Disable citations if not citations found
#if len(get_cited_by(recID)) == 0:
# tabs['citations']['enabled'] = False
## FIXME: the above was commented out because get_cited_by()
## may be too slow. And we do not really need this anyway
## because we can disable tags in WebSearch Admin on a
## collection-by-collection basis.
# Disable Files tab if no file found except for Plots:
disable_files_tab_p = True
for abibdoc in BibRecDocs(recID).list_bibdocs():
abibdoc_type = abibdoc.get_type()
if abibdoc_type == 'Plot':
continue # ignore attached plots
else:
if CFG_INSPIRE_SITE and not \
abibdoc_type in ('', 'INSPIRE-PUBLIC', 'Supplementary Material'):
# ignore non-empty, non-INSPIRE-PUBLIC, non-suppl doctypes for INSPIRE
continue
# okay, we found at least one non-Plot file:
disable_files_tab_p = False
break
if disable_files_tab_p:
tabs['files']['enabled'] = False
#Disable holdings tab if collection != Books
collection = run_sql("""select name from collection where id=%s""", (colID, ))
if collection[0][0] != 'Books':
tabs['holdings']['enabled'] = False
# Disable Plots tab if no docfile of doctype Plot found
brd = BibRecDocs(recID)
if len(brd.list_bibdocs('Plot')) == 0:
tabs['plots']['enabled'] = False
if CFG_CERN_SITE:
from invenio.search_engine import get_collection_reclist
if recID in get_collection_reclist("Books & Proceedings"):
tabs['holdings']['visible'] = True
tabs['holdings']['enabled'] = True
# now treating the HEP data -> we have to check if there is HepData
# associated with the record and if so, make the tab visible and enabled
has_hepdata = record_has_hepdata_attached(recID)
tabs['hepdata']['visible'] = has_hepdata
tabs['hepdata']['enabled'] = has_hepdata
tabs[''] = tabs['metadata']
del tabs['metadata']
return tabs
def record_has_hepdata_attached(recID):
"""returns True or False depending if there is HepData attached or not"""
from invenio.search_engine import search_pattern
return len(search_pattern(p="786__w:%s" % (str(recID)))) > 0
def get_detailed_page_tabs_counts(recID):
"""
Returns the number of citations, references and comments/reviews
that have to be shown on the corresponding tabs in the
detailed record pages
@param recID: record id
@return: dictionary with following keys
'Citations': number of citations to be shown in the "Citations" tab
'References': number of references to be shown in the "References" tab
'Discussions': number of comments and reviews to be shown in the "Discussion" tab
"""
num_comments = 0 #num of comments
num_reviews = 0 #num of reviews
tabs_counts = {'Citations' : 0,
'References' : -1,
'Discussions' : 0
}
from invenio.search_engine import get_field_tags, get_record
if CFG_BIBRANK_SHOW_CITATION_LINKS:
if CFG_INSPIRE_SITE:
from invenio.search_engine import search_unit
citers_recids = intbitset(get_cited_by(recID))
citeable_recids = search_unit(p='citeable', f='collection')
tabs_counts['Citations'] = len(citers_recids & citeable_recids)
else:
tabs_counts['Citations'] = get_cited_by_count(recID)
if not CFG_CERN_SITE:#FIXME:should be replaced by something like CFG_SHOW_REFERENCES
reftag = ""
reftags = get_field_tags("reference")
if reftags:
reftag = reftags[0]
tmprec = get_record(recID)
if reftag and len(reftag) > 4:
tabs_counts['References'] = len(record_get_field_instances(tmprec, reftag[0:3], reftag[3], reftag[4]))
# obtain number of comments/reviews
from invenio.webcommentadminlib import get_nb_reviews, get_nb_comments
if CFG_WEBCOMMENT_ALLOW_COMMENTS and CFG_WEBSEARCH_SHOW_COMMENT_COUNT:
num_comments = get_nb_comments(recID, count_deleted=False)
if CFG_WEBCOMMENT_ALLOW_REVIEWS and CFG_WEBSEARCH_SHOW_REVIEW_COUNT:
num_reviews = get_nb_reviews(recID, count_deleted=False)
if num_comments or num_reviews:
tabs_counts['Discussions'] = num_comments + num_reviews
return tabs_counts
|
gpl-2.0
| 325,945,467,182,998,340
| 45.626352
| 507
| 0.570987
| false
| 3.60851
| false
| false
| false
|
coinchon/crc-dabmod
|
src/crc-dwap.py
|
1
|
21796
|
#!/usr/bin/env python
# Copyright (C) 2006, 2007, 2008, 2009,-2010 Her Majesty the Queen in
# Right of Canada (Communications Research Center Canada)
# This file is part of CRC-DADMOD.
#
# CRC-DADMOD is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# CRC-DADMOD is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with CRC-DADMOD. If not, see <http://www.gnu.org/licenses/>.
from wxPython.wx import *
from optparse import OptionParser
from gnuradio import gr
from gnuradio import usrp
from gnuradio.wxgui import fftsink, scopesink
from gnuradio.eng_notation import num_to_str
from gnuradio.eng_option import *
ID_ABOUT = wxNewId()
ID_EXIT = wxNewId()
ID_GAIN_SLIDER0 = wxNewId()
ID_FREQ_SLIDER0 = wxNewId()
ID_GAIN_SLIDER1 = wxNewId()
ID_FREQ_SLIDER1 = wxNewId()
ID_START = wxNewId()
ID_STOP = wxNewId()
def gcd(a, b) :
if b == 0 :
return a
return gcd(b, a % b)
def appendFrequency(option, opt, value, parser):
if parser.values.frequency is None :
parser.values.frequency = [ value ]
else :
parser.values.frequency.append(value)
def listUsrp(option, opt, value, parser):
id = 0
while (true) :
try:
version = usrp._look_for_usrp(id)
print "USRP #%i" % id
print " Rev: %i" % version
dst = usrp.sink_c(id)
src = usrp.source_c(id)
print " Tx"
for db in dst.db:
if (db[0].dbid() != -1):
print " %s" % db[0].side_and_name()
(min, max, offset) = db[0].freq_range()
print " Frequency"
print " Min: %sHz" % num_to_str(min)
print " Max: %sHz" % num_to_str(max)
print " Offset: %sHz" % num_to_str(offset)
(min, max, offset) = db[0].gain_range()
print " Gain"
print " Min: %sdB" % num_to_str(min)
print " Max: %sdB" % num_to_str(max)
print " Offset: %sdB" % num_to_str(offset)
print " Rx"
for db in src.db:
if (db[0].dbid() != -1):
print " %s" % db[0].side_and_name()
(min, max, offset) = db[0].freq_range()
print " Frequency"
print " Min: %sHz" % num_to_str(min)
print " Max: %sHz" % num_to_str(max)
print " Offset: %sHz" % num_to_str(offset)
(min, max, offset) = db[0].gain_range()
print " Gain"
print " Min: %sdB" % num_to_str(min)
print " Max: %sdB" % num_to_str(max)
print " Offset: %sdB" % num_to_str(offset)
except RuntimeError:
break
id += 1
raise SystemExit
class MyFrame(wxFrame):
def __init__(self, parent, ID, title):
wxFrame.__init__(self, parent, ID, title,
wxDefaultPosition)
self.pga = 0
self.pgaMin = -20
self.pgaMax = 0
self.pgaStep = 0.25
# Parsing options
parser = OptionParser(option_class=eng_option,
usage="usage: %prog [options] filename1" \
" [-f frequency2 filename2 [...]]")
parser.add_option("-a", "--agc", action="store_true",
help="enable agc")
parser.add_option("-c", "--clockrate", type="eng_float", default=128e6,
help="set USRP clock rate (128e6)")
parser.add_option("--copy", action="store_true",
help="enable real to imag data copy when in real mode")
parser.add_option("-e", "--encoding", type="choice", choices=["s", "f"],
default="f", help="choose data encoding: [s]igned or [f]loat.")
parser.add_option("-f", "--frequency", type="eng_float",
action="callback", callback=appendFrequency,
help="set output frequency (222.064e6)")
parser.add_option("-g", "--gain", type="float",
help="set output pga gain")
parser.add_option("-l", "--list", action="callback", callback=listUsrp,
help="list USRPs and daugtherboards")
parser.add_option("-m", "--mode", type="eng_float", default=2,
help="mode: 1: real, 2: complex (2)")
parser.add_option("-o", "--osc", action="store_true",
help="enable oscilloscope")
parser.add_option("-r", "--samplingrate", type="eng_float",
default=3.2e6,
help="set input sampling rate (3200000)")
parser.add_option("-s", "--spectrum", action="store_true",
help="enable spectrum analyzer")
# parser.add_option("-t", "--tx", type="choice", choices=["A", "B"],
# default="A", help="choose USRP tx A|B output (A)")
parser.add_option("-u", "--usrp", action="store_true",
help="enable USRP output")
(options, args) = parser.parse_args()
if len(args) == 0 :
options.filename = [ "/dev/stdin" ]
else :
options.filename = args
# Setting default frequency
if options.frequency is None :
options.frequency = [ 222.064e6 ]
if len(options.filename) != len(options.frequency) :
parser.error("Nb input file != nb frequency!")
# Status bar
# self.CreateStatusBar(3, 0)
# msg = "PGA: %.2f dB" % (self.pga * self.pgaStep)
# self.SetStatusText(msg, 1)
# msg = "Freq: %.3f mHz" % (options.frequency[0] / 1000000.0)
# self.SetStatusText(msg, 2)
# Menu bar
menu = wxMenu()
menu.Append(ID_ABOUT, "&About",
"More information about this program")
menu.AppendSeparator()
menu.Append(ID_EXIT, "E&xit", "Terminate the program")
menuBar = wxMenuBar()
menuBar.Append(menu, "&File")
self.SetMenuBar(menuBar)
# Main windows
mainSizer = wxFlexGridSizer(0, 1)
sliderSizer = wxFlexGridSizer(0, 2)
buttonSizer = wxBoxSizer(wxHORIZONTAL)
if options.usrp :
# TX d'board 0
gainLabel = wxStaticText(self, -1, "PGA 0")
gainSlider = wxSlider(self, ID_GAIN_SLIDER0, self.pga,
self.pgaMin / self.pgaStep, self.pgaMax / self.pgaStep,
style = wxSL_HORIZONTAL | wxSL_AUTOTICKS)
gainSlider.SetSize((400, -1))
sliderSizer.Add(gainLabel, 0,
wxALIGN_CENTER_VERTICAL | wxFIXED_MINSIZE, 0)
sliderSizer.Add(gainSlider, 0,
wxALIGN_CENTER_VERTICAL | wxFIXED_MINSIZE, 0)
freqLabel = wxStaticText(self, -1, "Frequency 0")
freqSlider = wxSlider(self, ID_FREQ_SLIDER0,
options.frequency[0] / 16000, 0, 20e3,
style = wxSL_HORIZONTAL | wxSL_AUTOTICKS)
freqSlider.SetSize((400, -1))
sliderSizer.Add(freqLabel, 0,
wxALIGN_CENTER_VERTICAL | wxFIXED_MINSIZE, 0)
sliderSizer.Add(freqSlider, 0,
wxALIGN_CENTER_VERTICAL | wxFIXED_MINSIZE, 0)
if len(options.frequency) > 1 :
# TX d'board 1
gainLabel = wxStaticText(self, -1, "PGA 1")
gainSlider = wxSlider(self, ID_GAIN_SLIDER1, self.pga,
self.pgaMin / self.pgaStep, self.pgaMax / self.pgaStep,
style = wxSL_HORIZONTAL | wxSL_AUTOTICKS)
gainSlider.SetSize((400, -1))
sliderSizer.Add(gainLabel, 0,
wxALIGN_CENTER_VERTICAL | wxFIXED_MINSIZE, 0)
sliderSizer.Add(gainSlider, 0,
wxALIGN_CENTER_VERTICAL | wxFIXED_MINSIZE, 0)
freqLabel = wxStaticText(self, -1, "Frequency 1")
freqSlider = wxSlider(self, ID_FREQ_SLIDER1,
options.frequency[1] / 16000, 0, 20e3,
style = wxSL_HORIZONTAL | wxSL_AUTOTICKS)
freqSlider.SetSize((400, -1))
sliderSizer.Add(freqLabel, 0,
wxALIGN_CENTER_VERTICAL | wxFIXED_MINSIZE, 0)
sliderSizer.Add(freqSlider, 0,
wxALIGN_CENTER_VERTICAL | wxFIXED_MINSIZE, 0)
mainSizer.Add(sliderSizer, 1, wxEXPAND, 0)
start = wxButton(self, ID_START, "Start")
stop = wxButton(self, ID_STOP, "Stop")
buttonSizer.Add(start, 1, wxALIGN_CENTER, 0)
buttonSizer.Add(stop, 1, wxALIGN_CENTER, 0)
mainSizer.Add(buttonSizer, 1, wxEXPAND, 0)
# GnuRadio
self.fg = gr.flow_graph()
if options.mode == 1 :
print "Source: real"
if (options.encoding == "s") :
print "Source encoding: short"
src = gr.file_source(gr.sizeof_short, options.filename[0], 1)
if (options.copy) :
print "Imag: copy"
imag = src
else :
print "Imag: null"
imag = gr.null_source(gr.sizeof_short)
interleaver = gr.interleave(gr.sizeof_short)
self.fg.connect(src, (interleaver, 0))
self.fg.connect(imag, (interleaver, 1))
tail = interleaver
elif (options.encoding == "f") :
print "Source encoding: float"
src = gr.file_source(gr.sizeof_gr_complex,
options.filename[0], 1)
tail = src
elif (options.mode == 2) :
print "Source: complex"
if len(options.frequency) == 1 :
if (options.encoding == "s") :
print "Source encoding: short"
src = gr.file_source(gr.sizeof_short,
options.filename[0], 1)
elif (options.encoding == "f") :
print "Source encoding: float"
src = gr.file_source(gr.sizeof_gr_complex,
options.filename[0], 1)
else :
parser.error("Invalid encoding type for complex data!")
tail = src
elif (len(options.frequency) == 2) :
src0 = gr.file_source(gr.sizeof_gr_complex,
options.filename[0], 1)
src1 = gr.file_source(gr.sizeof_gr_complex,
options.filename[1], 1)
interleaver = gr.interleave(gr.sizeof_gr_complex)
self.fg.connect(src0, (interleaver, 0))
self.fg.connect(src1, (interleaver, 1))
tail = interleaver
else :
parser.error(
"Invalid number of source (> 2) with complex input!")
else :
parser.error("Invalid mode!")
# Interpolation
dac_freq = options.clockrate
interp = int(dac_freq / options.samplingrate)
if interp == 0 :
parser.error("Invalid sampling rate!")
if options.mode == 2 :
print "Input sampling rate: %s complex samples/s" % \
num_to_str(options.samplingrate)
else :
print "Input sampling rate: %s samples/s" % \
num_to_str(options.samplingrate)
print "Interpolation rate: int(%s / %s) = %sx" % \
(num_to_str(dac_freq), num_to_str(options.samplingrate), interp)
if interp > 512 :
factor = gcd(dac_freq / 512, options.samplingrate)
num = int((dac_freq / 512) / factor)
den = int(options.samplingrate / factor)
print "Resampling by %i / %i" % (num, den)
resampler = blks.rational_resampler_ccc(self.fg, num, den)
self.fg.connect(tail, resampler)
tail = resampler
interp = 512
options.samplingrate = dac_freq / 512
# AGC
if options.agc :
agc = gr.agc_cc()
self.fg.connect(tail, agc)
tail = agc
# USRP
if options.usrp :
nchan = len(options.frequency)
if len(options.frequency) == 1 :
if options.mode == 1 :
mux = 0x00000098
elif options.mode == 2 :
mux = 0x00000098
else :
parser.error("Unsupported mode for USRP mux!")
elif len(options.frequency) == 2 :
if options.mode == 1 :
mux = 0x0000ba98
elif options.mode == 2 :
mux = 0x0000ba98
else :
parser.error("Unsupported mode for USRP mux!")
else :
parser.error("Invalid number of frequency [0..2]!")
# if options.tx == "A" :
# mux = 0x00000098
# else :
# mux = 0x00009800
print "Nb channels: ", nchan
print "Mux: 0x%x" % mux
if options.encoding == 's' :
dst = usrp.sink_s(0, interp, nchan, mux)
elif options.encoding == 'f' :
dst = usrp.sink_c(0, interp, nchan, mux)
else :
parser.error("Unsupported data encoding for USRP!")
dst.set_verbose(1)
for i in range(len(options.frequency)) :
if options.gain is None :
print "Setting gain to %f" % dst.pga_max()
dst.set_pga(i << 1, dst.pga_max())
else :
print "Setting gain to %f" % options.gain
dst.set_pga(i << 1, options.gain)
tune = false
for dboard in dst.db:
if (dboard[0].dbid() != -1):
device = dboard[0]
print "Tuning TX d'board %s to %sHz" % \
(device.side_and_name(),
num_to_str(options.frequency[i]))
device.lo_offset = 38e6
(min, max, offset) = device.freq_range()
print " Frequency"
print " Min: %sHz" % num_to_str(min)
print " Max: %sHz" % num_to_str(max)
print " Offset: %sHz" % num_to_str(offset)
#device.set_gain(device.gain_range()[1])
device.set_enable(True)
tune = \
dst.tune(device._which, device,
options.frequency[i] * 128e6 / dac_freq)
if tune:
print " Baseband frequency: %sHz" % \
num_to_str(tune.baseband_freq)
print " DXC frequency: %sHz" % \
num_to_str(tune.dxc_freq)
print " Residual Freqency: %sHz" % \
num_to_str(tune.residual_freq)
print " Inverted: ", \
tune.inverted
mux = usrp.determine_tx_mux_value(dst,
(device._which, 0))
dst.set_mux(mux)
break
else:
print " Failed!"
if not tune:
print " Failed!"
raise SystemExit
# int nunderruns ()
print "USRP"
print " Rx halfband: ", dst.has_rx_halfband()
print " Tx halfband: ", dst.has_tx_halfband()
print " Nb DDC: ", dst.nddc()
print " Nb DUC: ", dst.nduc()
#dst._write_9862(0, 14, 224)
print " DAC frequency: %s samples/s" % num_to_str(dst.dac_freq())
print " Fpga decimation rate: %s -> %s samples/s" % \
(num_to_str(dst.interp_rate()),
num_to_str(dac_freq / dst.interp_rate()))
print " Nb channels:",
if hasattr(dst, "nchannels()") :
print dst.nchannels()
else:
print "N/A"
print " Mux:",
if hasattr(dst, "mux()") :
print "0x%x" % dst.mux()
else :
print "N/A"
print " FPGA master clock frequency:",
if hasattr(dst, "fpga_master_clock_freq()") :
print "%sHz" % num_to_str(dst.fpga_master_clock_freq())
else :
print "N/A"
print " Converter rate:",
if hasattr(dst, "converter_rate()") :
print "%s" % num_to_str(dst.converter_rate())
else :
print "N/A"
print " DAC rate:",
if hasattr(dst, "dac_rate()") :
print "%s sample/s" % num_to_str(dst.dac_rate())
else :
print "N/A"
print " Interp rate: %sx" % num_to_str(dst.interp_rate())
print " DUC frequency 0: %sHz" % num_to_str(dst.tx_freq(0))
print " DUC frequency 1: %sHz" % num_to_str(dst.tx_freq(1))
print " Programmable Gain Amplifier 0: %s dB" % \
num_to_str(dst.pga(0))
print " Programmable Gain Amplifier 1: %s dB" % \
num_to_str(dst.pga(2))
else :
dst = gr.null_sink(gr.sizeof_gr_complex)
# AGC
if options.agc :
agc = gr.agc_cc()
self.fg.connect(tail, agc)
tail = agc
self.fg.connect(tail, dst)
# oscilloscope
if options.osc :
oscPanel = wxPanel(self, -1)
if (options.encoding == "s") :
converter = gr.interleaved_short_to_complex()
self.fg.connect(tail, converter)
signal = converter
elif (options.encoding == "f") :
signal = tail
else :
parser.error("Unsupported data encoding for oscilloscope!")
#block = scope_sink_f(fg, parent, title=label, sample_rate=input_rate)
#return (block, block.win)
oscWin = scopesink.scope_sink_c(self.fg, oscPanel, "Signal",
options.samplingrate)
self.fg.connect(signal, oscWin)
mainSizer.Add(oscPanel, 1, wxEXPAND)
# spectrometer
if options.spectrum :
ymin = 0
ymax = 160
fftPanel = wxPanel(self, -1)
if (options.encoding == "s") :
converter = gr.interleaved_short_to_complex()
self.fg.connect(tail, converter)
signal = converter
elif (options.encoding == "f") :
signal = tail
else :
parser.error("Unsupported data encoding for oscilloscope!")
fftWin = fftsink.fft_sink_c(self.fg, fftPanel,
title="Spectrum",
fft_size=2048,
sample_rate=options.samplingrate,
y_per_div=(ymax - ymin) / 8,
ref_level=ymax,
fft_rate=50,
average=True
)
self.fg.connect(signal, fftWin)
mainSizer.Add(fftPanel, 1, wxEXPAND)
# Events
EVT_MENU(self, ID_ABOUT, self.OnAbout)
EVT_MENU(self, ID_EXIT, self.TimeToQuit)
EVT_SLIDER(self, ID_GAIN_SLIDER0, self.slideEvent)
EVT_SLIDER(self, ID_FREQ_SLIDER0, self.slideEvent)
EVT_SLIDER(self, ID_GAIN_SLIDER1, self.slideEvent)
EVT_SLIDER(self, ID_FREQ_SLIDER1, self.slideEvent)
EVT_BUTTON(self, ID_START, self.onClick)
EVT_BUTTON(self, ID_STOP, self.onClick)
#Layout sizers
self.SetSizer(mainSizer)
self.SetAutoLayout(1)
mainSizer.Fit(self)
self.fg.start()
def OnAbout(self, event):
dlg = wxMessageDialog(self, "This sample program shows off\n"
"frames, menus, statusbars, and this\n"
"message dialog.",
"About Me", wxOK | wxICON_INFORMATION)
dlg.ShowModal()
dlg.Destroy()
def TimeToQuit(self, event):
self.Close(true)
def slideEvent(self, evt):
value = evt.GetInt()
id = evt.GetId()
if id == ID_GAIN_SLIDER:
msg = "PGA: %.2f dB" % (value * self.pgaStep)
self.SetStatusText(msg, 1)
elif id == ID_FREQ_SLIDER:
msg = "Freq: %.3f mHz" % (value * 16.0 / 1000)
self.SetStatusText(msg, 2)
else:
print "Slider event not yet coded!"
self.Close(True)
def onClick(self, event):
id = event.GetId()
if id == ID_START:
self.fg.start()
elif id == ID_STOP:
self.fg.stop()
else:
print "Click event not yet coded!"
self.Close(True)
class MyApp(wxApp):
def OnInit(self):
frame = MyFrame(NULL, -1, "Digital WAve Player")
frame.Show(true)
self.SetTopWindow(frame)
return true
app = MyApp(0)
app.MainLoop()
|
gpl-3.0
| -4,136,697,583,841,737,000
| 38.846435
| 80
| 0.49376
| false
| 3.782714
| false
| false
| false
|
madmax983/h2o-3
|
h2o-py/tests/testdir_algos/deeplearning/pyunit_weights_and_distributions_deeplearning.py
|
1
|
1284
|
import sys, os
sys.path.insert(1, os.path.join("..",".."))
import h2o
from tests import pyunit_utils
from h2o.estimators.deeplearning import H2ODeepLearningEstimator
def weights_and_distributions():
htable = h2o.upload_file(pyunit_utils.locate("smalldata/gbm_test/moppe.csv"))
htable["premiekl"] = htable["premiekl"].asfactor()
htable["moptva"] = htable["moptva"].asfactor()
htable["zon"] = htable["zon"]
# gamma
dl = H2ODeepLearningEstimator(distribution="gamma")
dl.train(x=range(3),y="medskad",training_frame=htable, weights_column="antskad")
predictions = dl.predict(htable)
# gaussian
dl = H2ODeepLearningEstimator(distribution="gaussian")
dl.train(x=range(3),y="medskad",training_frame=htable, weights_column="antskad")
predictions = dl.predict(htable)
# poisson
dl = H2ODeepLearningEstimator(distribution="poisson")
dl.train(x=range(3),y="medskad",training_frame=htable, weights_column="antskad")
predictions = dl.predict(htable)
# tweedie
dl = H2ODeepLearningEstimator(distribution="tweedie")
dl.train(x=range(3),y="medskad",training_frame=htable, weights_column="antskad")
predictions = dl.predict(htable)
if __name__ == "__main__":
pyunit_utils.standalone_test(weights_and_distributions)
else:
weights_and_distributions()
|
apache-2.0
| -2,314,508,829,679,752,700
| 32.815789
| 82
| 0.732087
| false
| 2.993007
| false
| false
| false
|
uschille/FabSim
|
python/lib/DataAnalysis.py
|
1
|
3679
|
import numpy as np
from scipy.optimize import leastsq
def derivatives(x, y):
num_x = len(x);
deriv = np.zeros((len(x)))
# If there for two input points, use a straight line as the derivative.
if num_x == 2:
deriv[0] = (y[1] - y[0]) / (x[1] - x[0])
deriv[1] = deriv[0]
return deriv
# Calculate the derivatives for the interior points. This loop uses
# a total of 6 points to calculate the derivative at any one
# point. And when the loop moves along in increasing array
# position, the same data point is used three times. So instead of
# reading the correct value from the array three times, just shift
# the values down by copying them from one variable to the next.
xi = 2*x[0]-x[1] # 0.0
xj = x[0]
xk = x[1]
yi = 2*y[0]-y[1] # 0.0
yj = y[0]
yk = y[1]
for i in xrange(1, num_x-1):
xi = xj
xj = xk
xk = x[i+1]
yi = yj
yj = yk
yk = y[i+1]
r1 = (xk - xj)*(xk - xj) + (yk - yj)*(yk - yj)
r2 = (xj - xi)*(xj - xi) + (yj - yi)*(yj - yi)
deriv[i] = ( (yj - yi)*r1 + (yk - yj)*r2 ) / ( (xj - xi)*r1 + (xk - xj)*r2 )
# Calculate the derivative at the first point, (x(0),y(0)).
slope = (y[1] - y[0]) / (x[1] - x[0])
if ((slope >= 0) and (slope >= deriv[1])) or ((slope <= 0) and (slope <= deriv[1])):
deriv[0] = 2 * slope - deriv[1]
else:
deriv[0] = slope + (abs(slope) * (slope - deriv[1])) / (abs(slope) + abs(slope - deriv[1]))
# Calculate the derivative at the last point.
slope = (y[num_x-1] - y[num_x-2]) / (x[num_x-1] - x[num_x-2])
if ((slope >= 0) and (slope >= deriv[num_x-2])) or ((slope <= 0) and (slope <= deriv[num_x-2])):
deriv[num_x-1] = 2 * slope - deriv[num_x-2]
else:
deriv[num_x-1] = slope + (abs(slope) * (slope - deriv[num_x-2])) / (abs(slope) + abs(slope - deriv[num_x-2]) )
return deriv
def get_centre_of_mass(molecule_particles, bounds):
# calculate centre of mass of a sheet in a periodic box.
# Becomes incorrect if any structure extends beyond 0.5 of the box size.
cm_rel = np.array(([0.0, 0.0, 0.0 ]))
rp = molecule_particles[0] #reference particle
for p in molecule_particles:
for i in xrange(0,3):
a = p[i] - rp[i]
if a > 0.5 * bounds[i]:
a = p[i] - rp[i] - bounds[i]
elif a < -0.5 * bounds[i]:
a = p[i] - rp[i] + bounds[i]
cm_rel[i] += a
cm_rel = cm_rel / len(molecule_particles)
cm = rp + cm_rel
cm[0] = cm[0] %bounds[0]
cm[1] = cm[1] %bounds[1]
cm[2] = cm[2] %bounds[2]
#print cm
#import sys
#sys.exit()
return cm
def f_min(X,p):
plane_xyz = p[0:3]
distance = (plane_xyz*X.T).sum(axis=1) + p[3]
return distance / np.linalg.norm(plane_xyz)
def residuals(params, signal, X):
return f_min(X, params)
def get_fitting_plane(points):
# returns a,b,c,d in ax+by+cz+d=0. a,b,c are also the normal.
pointsT = points.transpose()
# Inital guess of the plane
diff = points[0] - points[-1]
p0 = np.array(([diff[0], diff[1], diff[2], 1.]))
sol = leastsq(residuals, p0, args=(None, pointsT))[0]
#print "Solution: ", sol
#print "Old Error: ", (f_min(pointsT, p0)**2).sum()
#print "New Error: ", (f_min(pointsT, sol)**2).sum()
return sol
def unit_vector(vector):
""" Returns the unit vector of the vector. """
return vector / np.linalg.norm(vector)
def angle_between(v1, v2):
# Returns the angle in radians between vectors 'v1' and 'v2' in radians.
v1_u = unit_vector(v1)
v2_u = unit_vector(v2)
angle = np.arccos(np.dot(v1_u, v2_u))
if np.isnan(angle):
if (v1_u == v2_u).all():
return 0.0
else:
return np.pi
return angle
|
lgpl-3.0
| 584,378,390,467,305,900
| 28.44
| 114
| 0.568905
| false
| 2.660159
| false
| false
| false
|
gentimouton/swarch
|
server-master/server.py
|
1
|
4410
|
"""
Server master:
The server is almighty.
Every frame, it receives player inputs from clients,
executes these inputs to update the game state,
and sends the whole game state to all the clients for display.
"""
from __future__ import division # So to make division be float instead of int
from network import Listener, Handler, poll_for
from random import randint
import time
import uuid
##################### game logic #############
TICK_DURATION = 0.05 # seconds
# game state
borders = [[0, 0, 2, 300], [0, 0, 400, 2], [398, 0, 2, 300], [0, 298, 400, 2]]
pellets = [[randint(10, 390), randint(10, 290), 5, 5] for _ in range(4)]
players = {} # map a client handler to a player object
# map inputs received from clients to directions
input_dir = {'up': [0, -1], 'down': [0, 1],
'left': [-1, 0], 'right': [1, 0]}
class Player:
def __init__(self):
self.name = str(uuid.uuid4())
self.revive()
def revive(self):
self.box = [randint(10, 380), randint(10, 280), 10, 10]
self.dir = input_dir['down'] # original direction: downwards
self.speed = 2
def change_dir(self, inputt):
self.dir = input_dir[inputt]
def move(self):
self.box[0] += self.dir[0] * self.speed
self.box[1] += self.dir[1] * self.speed
def grow_and_slow(self, qty=2):
self.box[2] += qty
self.box[3] += qty
self.speed -= self.speed/6
def collide_borders(self):
[self.revive() for border in borders if collide_boxes(self.box, border)]
def collide_other_players(self):
for p in players.values():
# only the player with lowest id of the pair detects the collision
if self.name < p.name and collide_boxes(self.box, p.box):
playerw, pw = self.box[2], p.box[2] # widths
if playerw > pw:
self.grow_and_slow(pw)
p.revive()
elif playerw < pw:
p.grow_and_slow(playerw)
self.revive()
else: # they have same width: kill both
p.revive()
self.revive()
def collide_pellets(self):
for index, pellet in enumerate(pellets):
if collide_boxes(self.box, pellet):
self.grow_and_slow()
pellets[index] = [randint(10, 390), randint(10, 290), 5, 5]
def update(self):
self.move()
self.collide_borders()
self.collide_other_players()
self.collide_pellets()
def collide_boxes(box1, box2):
x1, y1, w1, h1 = box1
x2, y2, w2, h2 = box2
return x1 < x2 + w2 and y1 < y2 + h2 and x2 < x1 + w1 and y2 < y1 + h1
################### network ##############
event_queue = [] # list of ('event', handler)
# 'event' can be 'quit', 'join', 'up', 'down', 'left', 'right'
class MyHandler(Handler):
def on_open(self):
event_queue.append(('join', self))
def on_close(self):
event_queue.append(('quit', self))
def on_msg(self, data):
event_queue.append((data['input'], self))
server = Listener(8888, MyHandler)
######################### loop #######################
def apply_events():
# apply events onto game state
global event_queue
for event, handler in event_queue:
if event == 'quit':
del players[handler]
elif event == 'join':
players[handler] = Player()
else: # movement input
players[handler].change_dir(event)
event_queue = []
def update_simulation():
[player.update() for player in players.values()]
def broadcast_state():
# Send to all players 1) the whole game state, and 2) their own name,
# so players can draw themselves differently from the other players.
serialized_players = {p.name: p.box for p in players.values()}
for handler, player in players.items():
msg = {'borders': borders,
'pellets': pellets,
'myname': player.name,
'players': serialized_players}
handler.do_send(msg)
while 1:
loop_start = time.time()
apply_events()
update_simulation()
broadcast_state()
poll_for(TICK_DURATION - (time.time() - loop_start)) # poll until tick is over
|
mit
| 2,149,018,377,298,858,800
| 30.726619
| 82
| 0.547846
| false
| 3.539326
| false
| false
| false
|
bebound/linovel
|
novel.py
|
1
|
2503
|
from abc import ABC, abstractmethod, abstractstaticmethod
import requests
from bs4 import BeautifulSoup
class AbstractNovel(ABC):
"""
abstract novel class
Attributes:
url: The novel url
single_thread: A bool represent whether use single thread grab novel information
volume_name: A string represent the volume name
volume_number: A string represent the volume number
book_name: A string represent the book name
author: A string represent the author
illustrator: A string represent the illustrator
introduction: A string represent the introduction
chapters: A list represent the chapter
cover_url: A string represent the cover_url
date: A string represent the date the book last updated (As specified in ISO 8601)
novel_information: A list contains dict which represent the novel information
"""
_HEADERS = {'User-Agent': 'Mozilla/5.0 (Windows NT 6.1; WOW64; rv:19.0) Gecko/20100101 Firefox/19.0'}
def __init__(self, url, single_thread=False):
self.url = url
self.single_thread = single_thread
self.volume_name = ''
self.volume_number = ''
self.author = ''
self.illustrator = ''
self.introduction = ''
self.chapters = []
self.cover_url = ''
self.date = ''
self.novel_information = []
def __str__(self):
return '{}:{}'.format(self.__name__, self.url)
@abstractstaticmethod
def check_url(url):
"""check whether the url match this website"""
pass
def parse_page(self, url, encoding=''):
"""
parse page with BeautifulSoup
Args:
url: A string represent the url to be parsed
encoding: A string represent the encoding of the html
Return:
A BeatifulSoup element
"""
r = requests.get(url, headers=self._HEADERS)
r.encoding = 'utf-8' if not encoding else encoding
return BeautifulSoup(r.text, 'lxml')
@abstractmethod
def extract_novel_information(self):
"""extract novel information"""
pass
@abstractmethod
def get_novel_information(self):
"""
return the novel information
Return:
A list contains dict, dict usually has these information: volume_name, volume_number, book_name,
author, illustrator, introduction, chapters, cover_url, date, source
"""
pass
|
mit
| 2,969,921,192,952,360,000
| 31.089744
| 108
| 0.623252
| false
| 4.445826
| false
| false
| false
|
zzw922cn/Automatic_Speech_Recognition
|
speechvalley/models/dynamic_brnn.py
|
1
|
7057
|
# encoding: utf-8
# ******************************************************
# Author : zzw922cn
# Last modified: 2017-12-09 11:00
# Email : zzw922cn@gmail.com
# Filename : dynamic_brnn.py
# Description : Dynamic Bidirectional RNN model for Automatic Speech Recognition
# ******************************************************
import argparse
import time
import datetime
import os
from six.moves import cPickle
from functools import wraps
import numpy as np
import tensorflow as tf
from tensorflow.python.ops.rnn import bidirectional_dynamic_rnn
from speechvalley.utils import load_batched_data, describe, setAttrs, list_to_sparse_tensor, dropout, get_edit_distance
from speechvalley.utils import lnBasicRNNCell, lnGRUCell, lnBasicLSTMCell
def build_multi_dynamic_brnn(args,
maxTimeSteps,
inputX,
cell_fn,
seqLengths,
time_major=True):
hid_input = inputX
for i in range(args.num_layer):
scope = 'DBRNN_' + str(i + 1)
forward_cell = cell_fn(args.num_hidden, activation=args.activation)
backward_cell = cell_fn(args.num_hidden, activation=args.activation)
# tensor of shape: [max_time, batch_size, input_size]
outputs, output_states = bidirectional_dynamic_rnn(forward_cell, backward_cell,
inputs=hid_input,
dtype=tf.float32,
sequence_length=seqLengths,
time_major=True,
scope=scope)
# forward output, backward ouput
# tensor of shape: [max_time, batch_size, input_size]
output_fw, output_bw = outputs
# forward states, backward states
output_state_fw, output_state_bw = output_states
# output_fb = tf.concat(2, [output_fw, output_bw])
output_fb = tf.concat([output_fw, output_bw], 2)
shape = output_fb.get_shape().as_list()
output_fb = tf.reshape(output_fb, [shape[0], shape[1], 2, int(shape[2] / 2)])
hidden = tf.reduce_sum(output_fb, 2)
hidden = dropout(hidden, args.keep_prob, (args.mode == 'train'))
if i != args.num_layer - 1:
hid_input = hidden
else:
outputXrs = tf.reshape(hidden, [-1, args.num_hidden])
# output_list = tf.split(0, maxTimeSteps, outputXrs)
output_list = tf.split(outputXrs, maxTimeSteps, 0)
fbHrs = [tf.reshape(t, [args.batch_size, args.num_hidden]) for t in output_list]
return fbHrs
class DBiRNN(object):
def __init__(self, args, maxTimeSteps):
self.args = args
self.maxTimeSteps = maxTimeSteps
if args.layerNormalization is True:
if args.rnncell == 'rnn':
self.cell_fn = lnBasicRNNCell
elif args.rnncell == 'gru':
self.cell_fn = lnGRUCell
elif args.rnncell == 'lstm':
self.cell_fn = lnBasicLSTMCell
else:
raise Exception("rnncell type not supported: {}".format(args.rnncell))
else:
if args.rnncell == 'rnn':
self.cell_fn = tf.contrib.rnn.BasicRNNCell
elif args.rnncell == 'gru':
self.cell_fn = tf.contrib.rnn.GRUCell
elif args.rnncell == 'lstm':
self.cell_fn = tf.contrib.rnn.BasicLSTMCell
else:
raise Exception("rnncell type not supported: {}".format(args.rnncell))
self.build_graph(args, maxTimeSteps)
@describe
def build_graph(self, args, maxTimeSteps):
self.graph = tf.Graph()
with self.graph.as_default():
self.inputX = tf.placeholder(tf.float32,
shape=(maxTimeSteps, args.batch_size, args.num_feature)) # [maxL,32,39]
inputXrs = tf.reshape(self.inputX, [-1, args.num_feature])
# self.inputList = tf.split(0, maxTimeSteps, inputXrs) #convert inputXrs from [32*maxL,39] to [32,maxL,39]
self.inputList = tf.split(inputXrs, maxTimeSteps, 0) # convert inputXrs from [32*maxL,39] to [32,maxL,39]
self.targetIxs = tf.placeholder(tf.int64)
self.targetVals = tf.placeholder(tf.int32)
self.targetShape = tf.placeholder(tf.int64)
self.targetY = tf.SparseTensor(self.targetIxs, self.targetVals, self.targetShape)
self.seqLengths = tf.placeholder(tf.int32, shape=(args.batch_size))
self.config = {'name': args.model,
'rnncell': self.cell_fn,
'num_layer': args.num_layer,
'num_hidden': args.num_hidden,
'num_class': args.num_class,
'activation': args.activation,
'optimizer': args.optimizer,
'learning rate': args.learning_rate,
'keep prob': args.keep_prob,
'batch size': args.batch_size}
fbHrs = build_multi_dynamic_brnn(self.args, maxTimeSteps, self.inputX, self.cell_fn, self.seqLengths)
with tf.name_scope('fc-layer'):
with tf.variable_scope('fc'):
weightsClasses = tf.Variable(
tf.truncated_normal([args.num_hidden, args.num_class], name='weightsClasses'))
biasesClasses = tf.Variable(tf.zeros([args.num_class]), name='biasesClasses')
logits = [tf.matmul(t, weightsClasses) + biasesClasses for t in fbHrs]
logits3d = tf.stack(logits)
self.loss = tf.reduce_mean(tf.nn.ctc_loss(self.targetY, logits3d, self.seqLengths))
self.var_op = tf.global_variables()
self.var_trainable_op = tf.trainable_variables()
if args.grad_clip == -1:
# not apply gradient clipping
self.optimizer = tf.train.AdamOptimizer(args.learning_rate).minimize(self.loss)
else:
# apply gradient clipping
grads, _ = tf.clip_by_global_norm(tf.gradients(self.loss, self.var_trainable_op), args.grad_clip)
opti = tf.train.AdamOptimizer(args.learning_rate)
self.optimizer = opti.apply_gradients(zip(grads, self.var_trainable_op))
self.predictions = tf.to_int32(
tf.nn.ctc_beam_search_decoder(logits3d, self.seqLengths, merge_repeated=False)[0][0])
if args.level == 'cha':
self.errorRate = tf.reduce_sum(tf.edit_distance(self.predictions, self.targetY, normalize=True))
self.initial_op = tf.global_variables_initializer()
self.saver = tf.train.Saver(tf.global_variables(), max_to_keep=5, keep_checkpoint_every_n_hours=1)
|
mit
| 5,569,468,753,389,374,000
| 49.769784
| 119
| 0.552784
| false
| 3.888154
| false
| false
| false
|
ljcooke/dotfiles
|
python/pythonrc.py
|
1
|
2186
|
# -----------------------------------------------------------------------------
# Python repl config
# -----------------------------------------------------------------------------
import datetime
import math
import os
import random
import re
import sys
from math import *
from pydoc import pager
try:
from see import see
except ImportError:
see = dir
PY = sys.version_info[0]
class Term:
RESET = "\x1b[0m"
BOLD = "\x1b[1m"
DIM = "\x1b[2m"
UNBOLD = "\x1b[21m"
UNDIM = "\x1b[22m"
RED = "\x1b[31m"
GREEN = "\x1b[32m"
YELLOW = "\x1b[33m"
BLUE = "\x1b[34m"
PINK = "\x1b[35m"
CYAN = "\x1b[36m"
@classmethod
def color(cls, string, color, uncolor=RESET):
return ''.join((color, string, uncolor))
@classmethod
def dim(cls, string):
return ''.join((cls.DIM, string, cls.UNDIM))
@classmethod
def setup_prompt(cls):
version = '.'.join(str(s) for s in sys.version_info[:2])
sys.ps1 = '(py%s)> ' % version
sys.ps2 = '%s ' % ('.' * 8)
Term.setup_prompt()
if PY < 3:
try:
import rlcompleter
if 'libedit' in rlcompleter.readline.__doc__:
rlcompleter.readline.parse_and_bind('bind ^I rl_complete')
else:
rlcompleter.readline.parse_and_bind('tab: complete')
except ImportError:
print("Install readline for tab completion.")
print('')
def take(seq, count=1):
queue = []
for item in seq:
queue.append(item)
if len(queue) == count:
yield tuple(queue)
queue = []
if queue:
yield tuple(queue)
def pairs(seq):
return take(seq, 2)
def enhex(seq):
if isinstance(seq, str):
seq = seq.encode('utf-8')
return ' '.join(hex(b)[2:].zfill(2) for b in seq).upper()
def dehex(s, utf8=True):
s = ''.join(s.lower().split())
if not all(c in '0123456789abcdef' for c in s):
raise ValueError('Not a hex string')
byteseq = bytes(int(''.join(p), 16) for p in pairs(s))
if utf8:
try:
return byteseq.decode('utf-8')
except UnicodeDecodeError:
pass
return byteseq
|
mit
| -3,096,192,023,812,464,000
| 21.306122
| 79
| 0.522873
| false
| 3.383901
| false
| false
| false
|
edx/edx-e2e-tests
|
regression/pages/enterprise/enterprise_const.py
|
1
|
1142
|
"""
URLs and constants for enterprise stuff
"""
import os
ENTERPRISE_PORTAL_LOGIN_URL = "https://pmsalesdemo8.successfactors.com/login?company=SFPART011327#/login"
DEFAULT_ENTERPRISE_NAME = 'SuccessFactors'
ENTERPRISE_NAME = os.environ.get('ENTERPRISE_NAME', DEFAULT_ENTERPRISE_NAME)
DEFAULT_IDP_CSS_ID = 'bestrun'
IDP_CSS_ID = os.environ.get('IDP_CSS_ID', DEFAULT_IDP_CSS_ID)
ENT_CUSTOMER_UUID = os.environ.get('ENT_CUSTOMER_UUID', '')
ENT_CUSTOMER_CATALOG_UUID = os.environ.get('ENT_CUSTOMER_CATALOG_UUID',)
ENT_COURSE_ID = os.environ.get('ENT_COURSE_ID', 'course-v1:Mattx+TCE2E+2018')
ENT_PORTAL_USERNAME = os.environ.get('ENT_PORTAL_USERNAME')
ENT_PORTAL_PASSWORD = os.environ.get('ENT_PORTAL_PASSWORD')
ENT_PORTAL_EDX_LINKED_USERNAME = os.environ.get('ENT_PORTAL_EDX_LINKED_USERNAME')
ENT_PORTAL_EDX_LINKED_PASSWORD = os.environ.get('ENT_PORTAL_EDX_LINKED_PASSWORD')
ENT_COURSE_TITLE = os.environ.get('ENT_COURSE_TITLE')
ENT_COURSE_ORG = os.environ.get('ENT_COURSE_ORG')
ENT_COURSE_PRICE = os.environ.get('ENT_COURSE_PRICE')
ENT_COURSE_START_DATE = os.environ.get('ENT_COURSE_START_DATE')
DEFAULT_COURSE_PRICE = 100.0
|
agpl-3.0
| 7,302,774,754,136,636,000
| 27.55
| 105
| 0.744308
| false
| 2.572072
| false
| false
| false
|
amillar2/light-django
|
esp/mqtt_init.py
|
1
|
2256
|
import paho.mqtt.client as mqtt
import json
from .models import Switch, Device, PWM
# The callback for when the client receives a CONNACK response from the server.
def on_connect(client, userdata, flags, rc):
print("Connected with result code "+str(rc))
#subscribe to status and discovery topics
client.subscribe("+/status")
client.subscribe("discovery")
#subscribe to switch topics
client.subscribe("+/switch/#")
# The callback for when a PUBLISH message is received from the server.
"""
def on_message(client, userdata, msg):
print(msg.topic+": "+str(msg.payload))
if "status" in msg.topic:
print("Received status message on " + msg.topic + " : " + msg.payload)
if "discovery" in msg.topic:
print("Received discovery message: " + msg.payload)
def on_publish(client, userdata, mid):
print("Published message")
"""
def on_status(client, userdata, msg):
#grab espID from <espID>/status topic string
espID = msg.topic.split('/')[0]
print(msg.payload)
if msg.payload:
statusData = json.loads(msg.payload)
d = Device.objects.filter(espID=espID)
#if device exists, update status
if d:
d = d[0]
d.update_status(statusData)
else:
print("Received status from unknown device: %s"%espID)
def on_discovery(client, userdata, msg):
print('received discovery messgage')
#get espID
espID = msg.payload
#if espID exists, configure
d = Device.objects.filter(espID=espID)
if d:
d[0].config_device()
#if espID does not exist, make new object and save
else:
Device.objects.create(espID=espID, name=espID)
def on_switch(client, userdata, msg):
print("received switch input")
sw = Switch.objects.filter(topic=msg.topic)
#if switch exists, toggle pwms
print(sw)
if sw:
sw[0].toggle_pwm()
def mqtt_init():
client = mqtt.Client()
client.on_connect = on_connect
#client.on_message = on_message #for test/debug. uncomment func defs if used
#client.on_publish = on_publish
#add topic callbacks here
client.message_callback_add("+/status", on_status)
client.message_callback_add("discovery", on_discovery)
client.message_callback_add("+/switch/#", on_switch)
client.username_pw_set('test', password='testpass')
client.connect("localhost",port=1883, keepalive=60)
client.loop_start()
return client
|
mit
| -4,793,063,382,847,781,000
| 30.774648
| 79
| 0.719858
| false
| 3.016043
| false
| false
| false
|
felix9064/python
|
Demo/utils/route.py
|
1
|
1139
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# mac 同时上内外网的路由设置,将以下代码保存为py文件,使用sudo跑py文件
import os
import re
def route():
# 获取路由表的网关IP地址
data = os.popen("netstat -rn|awk '{print $2}'").readlines()
# 外网网关IP的正则表达式
re_ip1 = re.compile(r'172.16.\d{1,3}.\d{1,3}')
# 内网网关IP的正则表达式
re_ip2 = re.compile(r'198.98.\d{1,3}.\d{1,3}')
ip1 = ""
ip2 = ""
for x in data:
print(x)
if re_ip1.match(x):
# 捕获外网网关IP
ip1 = re_ip1.findall(x)[0]
if re_ip2.match(x):
# 捕获内网网关IP
ip2 = re_ip2.findall(x)[0]
print(ip1, ip2)
if ip1 is not None and ip2 is not None:
pass
# 删除默认外网路由
os.popen('route delete 0.0.0.0')
# 添加内网路由
os.popen('route -n add -net 198.98.0.0 -netmask 255.0.0.0 %s' % ip2)
# 添加外网路由
os.popen('route -n add -net 0.0.0.0 -netmask 0.0.0.0 %s' % ip1)
if __name__ == "__main__":
route()
|
mit
| 3,120,792,054,979,646,500
| 20.311111
| 76
| 0.514077
| false
| 1.957143
| false
| false
| false
|
btxgit/gazee
|
specproc.py
|
1
|
3211
|
import sqlite3
import os
if __name__ == '__main__':
# with open('/tmp/out.txt', 'rb') as fd:
# s = fd.read()
# ll = s.split('\n')
# otl = []
# s = '''UPDATE all_comics SET width=?, height=?, ratio=? WHERE comicid=?;'''
#
# with sqlite3.connect('data/gazee_comics.db') as con:
#
# for l in ll:
# if l.strip() == '':
# continue
# tl = l.strip().split(',')
# id = int(tl[0].split('-')[0], 10)
# w = int(tl[1], 10)
# h = int(tl[2], 10)
# ratio = (1.0) * w / h
# otl.append( (w,h,ratio,id) )
#
# print "Committing %d records..." % len(otl)
# con.executemany(s, otl)
# con.commit()
tgtw = 225
tgth = 300
with sqlite3.connect('data/gazee_comics.db') as con:
sql = '''SELECT comicid, width, height, ratio FROM all_comics;'''
for row in con.execute(sql):
cid, w, h, ratio = row
if w == 0 or h == 0:
continue
part = (cid // 512)
if ratio >= 1.2:
rot = 90
tw = h
h = w
w = tw
ratio = (1.0) * w / h
print("convert data/cache/%d/%d-native.jpg -rotate 90 -thumbnail %dx%d data/cache/%d/%d-%dx%d.jpg" % (part, cid, tgtw, tgth, part, cid, tgtw, tgth))
# continue
else:
rot = 0
# print("%d [ %d x %d ] (%.4f)" % (cid, w, h, ratio))
h1 = tgth
w1 = int(h1 * ratio)
w2 = tgtw
h2 = int(w2 / ratio)
# print("Opt1: %d x %d Opt2: %d x %d" % (w1, h1, w2, h2))
if (w1 > tgtw):
infn = "data/cache/%d/%d-%dx%d.jpg" % (part, cid, tgtw, tgth)
ofn = "data/cache/%d/p%d-%dx%d.jpg" % (part, cid, tgtw, tgth)
# print("convert data/cache/%d/p%d-%dx%d.jpg -rotate 90 -thumbnail %dx%d %s" % (part, cid, tgtw, tgth, tgtw, tgth, infn))
pw = w2
ph = h2
fixh = tgth - ph
origfixh = fixh
if ((fixh %2) == 1):
fixh += 1
fixwh = fixh // 2
# print("w1, h1 (%d, %d) w2, h2 (%d, %d)" % (w1, h1, w2, h2))
if rot == 90 or not os.path.exists(ofn):
print("bash imageborder -s 0x%d -p 20 -e edge -b 2 %s %s" % (fixwh, infn, ofn))
else:
pw = w1
ph = h1
fixw = tgtw - pw
origfixw = fixw
if ((fixw % 2) == 1):
fixw += 1
fixwb = fixw//2
ofn = "data/cache/%d/p%d-%dx%d.jpg" % (part, cid, tgtw, tgth)
if rot == 90 or not os.path.exists(ofn):
print("bash imageborder -s %dx0 -p 20 -e edge -b 2 data/cache/%d/%d-300x400.jpg %s" % (fixwb, part, cid, ofn))
print("echo %d..." % cid)
|
gpl-3.0
| 2,409,360,383,570,491,400
| 33.526882
| 164
| 0.379321
| false
| 3.211
| false
| false
| false
|
bponsler/icsv
|
icsv/tests/writeReadTests.py
|
1
|
1580
|
from os import unlink
from os.path import exists
from unittest import TestCase
from icsv import icsv, Row
class WriteReadTests(TestCase):
def setUp(self):
pass
def test_filter(self):
filename = "/tmp/testCsv.csv"
headers = ["one", "two", "three"]
csv = icsv(headers)
self.assertTrue(csv is not None)
self.assertEqual(csv.headers(), headers)
self.assertEqual(csv.delimiter(), ',')
rows = [
[0, 1, 2],
[3, 4, 5],
["hello", 1, True],
[1, False, "world"],
]
# Write all of the data to the file
for row in rows:
csv.addRow(row)
self.assertEqual(csv.numRows(), 4)
# Save the file
writer = csv.write(filename)
self.assertTrue(writer is not None)
# Read the same CSV
reader = csv.fromFile(filename, headers)
self.assertTrue(reader is not None)
# Compare the read data to the original
self.assertEqual(reader.numRows(), csv.numRows())
self.assertEqual(reader.numCols(), csv.numCols())
self.assertEqual(reader.headers(), csv.headers())
for index in range(len(rows)):
read = reader.getRow(index)
# Read data will be all strings
original = list(map(str, csv.getRow(index).list()))
expected = list(map(str, rows[index]))
for index in range(len(original)):
self.assertEqual(original[index], expected[index])
self.assertEqual(read.list(), expected)
|
mit
| 7,776,429,499,276,794,000
| 27.214286
| 63
| 0.570886
| false
| 4.061697
| true
| false
| false
|
SomewhatDamaged/Damaged-Cogs
|
persistentname.py
|
1
|
2155
|
import discord
from discord.ext import commands
import asyncio
import os
from .utils import checks
from .utils.dataIO import fileIO
class Persistentname:
"""When a user changes their account name, and no nickname is set, this will set their nickname to their old account name."""
def __init__(self, bot):
self.bot = bot
self.data = fileIO("data/persistentname/servers.json", "load")
@checks.admin_or_permissions(manage_server=True)
@commands.command(pass_context=True, name="persistentname", no_pm=True)
async def _toggle(self, context):
"""Toggles persistent names on/off for this server.
When a user changes their account name, and they have no nickname set, this will force their old account name to be their nickname."""
if context.message.server.id in self.data:
self.data.remove(context.message.server.id)
await self.bot.say("I will no longer persist usernames on this server.")
else:
self.data.append(context.message.server.id)
await self.bot.say("I will now persist usernames on this server.")
fileIO("data/persistentname/servers.json", "save", self.data)
async def listener(self, old_member, new_member):
if new_member.server.id not in self.data:
return
if old_member.name == new_member.name:
return
if new_member.nick is not None:
return
await self.bot.change_nickname(new_member, old_member.name)
def check_folder():
if not os.path.exists("data/persistentname"):
print("Creating data/persistentname folder...")
os.makedirs("data/persistentname")
def check_files():
default_servers = [ ]
if not fileIO("data/persistentname/servers.json", "check"):
print("Creating default persistentname servers.json...")
fileIO("data/persistentname/servers.json", "save", default_servers)
def setup(bot):
check_folder()
check_files()
n = Persistentname(bot)
bot.add_cog(n)
bot.add_listener(n.listener, 'on_member_update')
|
mit
| 9,112,897,618,856,068,000
| 38.698113
| 142
| 0.646868
| false
| 3.983364
| false
| false
| false
|
Manolaru/Python_train
|
Les_2/Task_6/fixture/user.py
|
1
|
2775
|
class UserHelper:
def __init__(self,app):
self.app = app
def create_user(self, user):
wd = self.app.wd
self.app.open_home_page()
# open user form
wd.find_element_by_link_text("add new").click()
# fill user form
wd.find_element_by_name("firstname").click()
wd.find_element_by_name("firstname").clear()
wd.find_element_by_name("firstname").send_keys(user.fname)
wd.find_element_by_name("lastname").click()
wd.find_element_by_name("lastname").clear()
wd.find_element_by_name("lastname").send_keys(user.lname)
wd.find_element_by_name("title").click()
wd.find_element_by_name("title").clear()
wd.find_element_by_name("title").send_keys(user.title)
wd.find_element_by_name("company").click()
wd.find_element_by_name("company").clear()
wd.find_element_by_name("company").send_keys()
wd.find_element_by_name("company").click()
wd.find_element_by_name("company").clear()
wd.find_element_by_name("company").send_keys(user.company)
wd.find_element_by_name("address").click()
wd.find_element_by_name("address").clear()
wd.find_element_by_name("address").send_keys(user.address)
wd.find_element_by_name("home").click()
wd.find_element_by_name("home").clear()
wd.find_element_by_name("home").send_keys(user.phoneh)
wd.find_element_by_name("email").click()
wd.find_element_by_name("email").clear()
wd.find_element_by_name("email").send_keys(user.email)
if not wd.find_element_by_xpath("//div[@id='content']/form/select[1]//option[3]").is_selected():
wd.find_element_by_xpath("//div[@id='content']/form/select[1]//option[3]").click()
if not wd.find_element_by_xpath("//div[@id='content']/form/select[2]//option[7]").is_selected():
wd.find_element_by_xpath("//div[@id='content']/form/select[2]//option[7]").click()
wd.find_element_by_name("byear").click()
wd.find_element_by_name("byear").clear()
wd.find_element_by_name("byear").send_keys(user.byear)
if not wd.find_element_by_xpath("//div[@id='content']/form/select[5]//option[3]").is_selected():
wd.find_element_by_xpath("//div[@id='content']/form/select[5]//option[3]").click()
# submit user creation
wd.find_element_by_xpath("//div[@id='content']/form/input[21]").click()
def delete_first_user(self):
wd = self.app.wd
self.app.open_home_page()
# select first group
wd.find_element_by_name("selected[]").click()
# submit deletion
wd.find_element_by_xpath ("//div[@id='content']/form[2]/div[2]/input").click()
wd.switch_to_alert().accept()
|
apache-2.0
| 1,534,537,264,706,128,100
| 46.844828
| 104
| 0.606486
| false
| 3.215527
| false
| false
| false
|
simonspa/django-datacollect
|
datacollect/questionnaire/migrations/0015_auto_20170324_1600.py
|
1
|
2504
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9 on 2017-03-24 15:00
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('questionnaire', '0014_auto_20170323_2344'),
]
operations = [
migrations.AlterField(
model_name='followup',
name='familiarity',
field=models.IntegerField(choices=[(1, 'I am very familiar with the case.'), (2, 'I have information but it might be incomplete.'), (3, 'I only have little information.'), (4, "I don't have any information.")], default=4, null=True, verbose_name='Level of familiarity'),
),
migrations.AlterField(
model_name='followup',
name='incident_date_1',
field=models.DateField(blank=True, help_text='Format YYYY-MM-DD', null=True, verbose_name='Date of the incident'),
),
migrations.AlterField(
model_name='followup',
name='incident_date_2',
field=models.DateField(blank=True, help_text='Format YYYY-MM-DD', null=True, verbose_name='Date of the incident'),
),
migrations.AlterField(
model_name='followup',
name='incident_date_3',
field=models.DateField(blank=True, help_text='Format YYYY-MM-DD', null=True, verbose_name='Date of the incident'),
),
migrations.AlterField(
model_name='followup',
name='incident_date_4',
field=models.DateField(blank=True, help_text='Format YYYY-MM-DD', null=True, verbose_name='Date of the incident'),
),
migrations.AlterField(
model_name='followup',
name='incident_date_5',
field=models.DateField(blank=True, help_text='Format YYYY-MM-DD', null=True, verbose_name='Date of the incident'),
),
migrations.AlterField(
model_name='followup',
name='language',
field=models.CharField(choices=[(b'en', 'English'), (b'es', 'Spanish'), (b'fr', 'French')], default='en', max_length=7, verbose_name='Language'),
),
migrations.AlterField(
model_name='followup',
name='rating',
field=models.IntegerField(choices=[(1, 'Much better'), (2, 'Somewhat better'), (3, 'Situation stayed the same overall'), (4, 'Somewhat worse'), (5, 'Much worse'), (6, "I don't know")], default=6, null=True, verbose_name='Rating'),
),
]
|
gpl-3.0
| 2,766,687,761,227,599,000
| 44.527273
| 282
| 0.594249
| false
| 3.906396
| false
| false
| false
|
redhat-openstack/manila
|
manila/tests/share/drivers/test_glusterfs.py
|
1
|
29189
|
# Copyright (c) 2014 Red Hat, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import copy
import socket
import ddt
import mock
from oslo_config import cfg
from manila import context
from manila import exception
from manila.share import configuration as config
from manila.share.drivers import ganesha
from manila.share.drivers import glusterfs
from manila.share.drivers.glusterfs import layout
from manila import test
from manila.tests import fake_share
from manila.tests import fake_utils
CONF = cfg.CONF
fake_gluster_manager_attrs = {
'export': '127.0.0.1:/testvol',
'host': '127.0.0.1',
'qualified': 'testuser@127.0.0.1:/testvol',
'user': 'testuser',
'volume': 'testvol',
'path_to_private_key': '/fakepath/to/privatekey',
'remote_server_password': 'fakepassword',
}
fake_share_name = 'fakename'
NFS_EXPORT_DIR = 'nfs.export-dir'
NFS_EXPORT_VOL = 'nfs.export-volumes'
NFS_RPC_AUTH_ALLOW = 'nfs.rpc-auth-allow'
NFS_RPC_AUTH_REJECT = 'nfs.rpc-auth-reject'
@ddt.ddt
class GlusterfsShareDriverTestCase(test.TestCase):
"""Tests GlusterfsShareDriver."""
def setUp(self):
super(GlusterfsShareDriverTestCase, self).setUp()
fake_utils.stub_out_utils_execute(self)
self._execute = fake_utils.fake_execute
self._context = context.get_admin_context()
self.addCleanup(fake_utils.fake_execute_set_repliers, [])
self.addCleanup(fake_utils.fake_execute_clear_log)
CONF.set_default('reserved_share_percentage', 50)
CONF.set_default('driver_handles_share_servers', False)
self.fake_conf = config.Configuration(None)
self._driver = glusterfs.GlusterfsShareDriver(
execute=self._execute,
configuration=self.fake_conf)
self.share = fake_share.fake_share(share_proto='NFS')
def test_do_setup(self):
self.mock_object(self._driver, '_get_helper')
self.mock_object(layout.GlusterfsShareDriverBase, 'do_setup')
_context = mock.Mock()
self._driver.do_setup(_context)
self._driver._get_helper.assert_called_once_with()
layout.GlusterfsShareDriverBase.do_setup.assert_called_once_with(
_context)
@ddt.data(True, False)
def test_setup_via_manager(self, has_parent):
gmgr = mock.Mock()
gmgr.gluster_call = mock.Mock()
share_mgr_parent = mock.Mock() if has_parent else None
nfs_helper = mock.Mock()
nfs_helper.get_export = mock.Mock(return_value='host:/vol')
self._driver.nfs_helper = mock.Mock(return_value=nfs_helper)
ret = self._driver._setup_via_manager(
{'manager': gmgr, 'share': self.share},
share_manager_parent=share_mgr_parent)
gmgr.gluster_call.assert_called_once_with(
'volume', 'set', gmgr.volume, 'nfs.export-volumes', 'off')
self._driver.nfs_helper.assert_called_once_with(
self._execute, self.fake_conf, gluster_manager=gmgr)
nfs_helper.get_export.assert_called_once_with(self.share)
self.assertEqual('host:/vol', ret)
@ddt.data({'helpercls': None, 'path': '/fakepath'},
{'helpercls': None, 'path': None},
{'helpercls': glusterfs.GlusterNFSHelper, 'path': '/fakepath'},
{'helpercls': glusterfs.GlusterNFSHelper, 'path': None})
@ddt.unpack
def test_setup_via_manager_path(self, helpercls, path):
gmgr = mock.Mock()
gmgr.gluster_call = mock.Mock()
gmgr.path = path
if not helpercls:
helper = mock.Mock()
helper.get_export = mock.Mock(return_value='host:/vol')
helpercls = mock.Mock(return_value=helper)
self._driver.nfs_helper = helpercls
if helpercls == glusterfs.GlusterNFSHelper and path is None:
gmgr.get_gluster_vol_option = mock.Mock(return_value='on')
self._driver._setup_via_manager(
{'manager': gmgr, 'share': self.share})
if helpercls == glusterfs.GlusterNFSHelper and path is None:
gmgr.get_gluster_vol_option.assert_called_once_with(
NFS_EXPORT_VOL)
args = (NFS_RPC_AUTH_REJECT, '*')
else:
args = (NFS_EXPORT_VOL, 'off')
gmgr.gluster_call.assert_called_once_with(
'volume', 'set', gmgr.volume, *args)
@ddt.data(exception.ProcessExecutionError, RuntimeError)
def test_setup_via_manager_exception(self, _exception):
gmgr = mock.Mock()
gmgr.gluster_call = mock.Mock(side_effect=_exception)
gmgr.volume = 'somevol'
self.assertRaises(
{exception.ProcessExecutionError:
exception.GlusterfsException}.get(
_exception, _exception), self._driver._setup_via_manager,
{'manager': gmgr, 'share': self.share})
@ddt.data('off', 'no', '0', 'false', 'disable', 'foobarbaz')
def test_setup_via_manager_export_volumes_on(self, export_vol):
gmgr = mock.Mock()
gmgr.path = None
gmgr.get_gluster_vol_option = mock.Mock(return_value=export_vol)
self._driver.nfs_helper = glusterfs.GlusterNFSHelper
self.assertRaises(exception.GlusterfsException,
self._driver._setup_via_manager,
{'manager': gmgr, 'share': self.share})
gmgr.get_gluster_vol_option.assert_called_once_with(NFS_EXPORT_VOL)
def test_check_for_setup_error(self):
self._driver.check_for_setup_error()
def test_update_share_stats(self):
self.mock_object(layout.GlusterfsShareDriverBase,
'_update_share_stats')
self._driver._update_share_stats()
(layout.GlusterfsShareDriverBase._update_share_stats.
assert_called_once_with({'storage_protocol': 'NFS',
'vendor_name': 'Red Hat',
'share_backend_name': 'GlusterFS',
'reserved_percentage': 50}))
def test_get_network_allocations_number(self):
self.assertEqual(0, self._driver.get_network_allocations_number())
def test_get_helper(self):
ret = self._driver._get_helper()
self.assertIsInstance(ret, self._driver.nfs_helper)
@ddt.data({'path': '/fakepath', 'helper': glusterfs.GlusterNFSHelper},
{'path': None, 'helper': glusterfs.GlusterNFSVolHelper})
@ddt.unpack
def test_get_helper_vol(self, path, helper):
self._driver.nfs_helper = glusterfs.GlusterNFSHelper
gmgr = mock.Mock(path=path)
ret = self._driver._get_helper(gmgr)
self.assertIsInstance(ret, helper)
@ddt.data({'op': 'allow', 'kwargs': {}},
{'op': 'allow', 'kwargs': {'share_server': None}},
{'op': 'deny', 'kwargs': {}},
{'op': 'deny', 'kwargs': {'share_server': None}})
@ddt.unpack
def test_allow_deny_access_via_manager(self, op, kwargs):
self.mock_object(self._driver, '_get_helper')
gmgr = mock.Mock()
ret = getattr(self._driver, "_%s_access_via_manager" % op
)(gmgr, self._context, self.share,
fake_share.fake_access, **kwargs)
self._driver._get_helper.assert_called_once_with(gmgr)
getattr(
self._driver._get_helper(),
"%s_access" % op).assert_called_once_with(
'/', self.share, fake_share.fake_access)
self.assertIsNone(ret)
@ddt.ddt
class GlusterNFSHelperTestCase(test.TestCase):
"""Tests GlusterNFSHelper."""
def setUp(self):
super(GlusterNFSHelperTestCase, self).setUp()
fake_utils.stub_out_utils_execute(self)
gluster_manager = mock.Mock(**fake_gluster_manager_attrs)
self._execute = mock.Mock(return_value=('', ''))
self.fake_conf = config.Configuration(None)
self._helper = glusterfs.GlusterNFSHelper(
self._execute, self.fake_conf, gluster_manager=gluster_manager)
def test_get_export(self):
ret = self._helper.get_export(mock.Mock())
self.assertEqual(fake_gluster_manager_attrs['export'], ret)
@ddt.data({'output_str': '/foo(10.0.0.1|10.0.0.2),/bar(10.0.0.1)',
'expected': {'foo': ['10.0.0.1', '10.0.0.2'],
'bar': ['10.0.0.1']}},
{'output_str': None, 'expected': {}})
@ddt.unpack
def test_get_export_dir_dict(self, output_str, expected):
self.mock_object(self._helper.gluster_manager,
'get_gluster_vol_option',
mock.Mock(return_value=output_str))
ret = self._helper._get_export_dir_dict()
self.assertEqual(expected, ret)
(self._helper.gluster_manager.get_gluster_vol_option.
assert_called_once_with(NFS_EXPORT_DIR))
def test_manage_access_bad_access_type(self):
cbk = None
access = {'access_type': 'bad', 'access_to': None}
self.assertRaises(exception.InvalidShareAccess,
self._helper._manage_access, fake_share_name,
access['access_type'], access['access_to'], cbk)
def test_manage_access_noop(self):
cbk = mock.Mock(return_value=True)
access = fake_share.fake_access()
export_dir_dict = mock.Mock()
self.mock_object(self._helper, '_get_export_dir_dict',
mock.Mock(return_value=export_dir_dict))
ret = self._helper._manage_access(fake_share_name,
access['access_type'],
access['access_to'], cbk)
self._helper._get_export_dir_dict.assert_called_once_with()
cbk.assert_called_once_with(export_dir_dict, fake_share_name,
access['access_to'])
self.assertIsNone(ret)
def test_manage_access_adding_entry(self):
def cbk(d, key, value):
d[key].append(value)
access = fake_share.fake_access()
export_dir_dict = {
'example.com': ['10.0.0.1'],
'fakename': ['10.0.0.2'],
}
export_str = '/example.com(10.0.0.1),/fakename(10.0.0.2|10.0.0.1)'
args = ('volume', 'set', self._helper.gluster_manager.volume,
NFS_EXPORT_DIR, export_str)
self.mock_object(self._helper, '_get_export_dir_dict',
mock.Mock(return_value=export_dir_dict))
ret = self._helper._manage_access(fake_share_name,
access['access_type'],
access['access_to'], cbk)
self.assertIsNone(ret)
self._helper._get_export_dir_dict.assert_called_once_with()
self._helper.gluster_manager.gluster_call.assert_called_once_with(
*args)
def test_manage_access_adding_entry_cmd_fail(self):
def cbk(d, key, value):
d[key].append(value)
def raise_exception(*args, **kwargs):
raise exception.ProcessExecutionError()
access = fake_share.fake_access()
export_dir_dict = {
'example.com': ['10.0.0.1'],
'fakename': ['10.0.0.2'],
}
export_str = '/example.com(10.0.0.1),/fakename(10.0.0.2|10.0.0.1)'
args = ('volume', 'set', self._helper.gluster_manager.volume,
NFS_EXPORT_DIR, export_str)
self.mock_object(self._helper, '_get_export_dir_dict',
mock.Mock(return_value=export_dir_dict))
self.mock_object(self._helper.gluster_manager, 'gluster_call',
mock.Mock(side_effect=raise_exception))
self.mock_object(glusterfs.LOG, 'error')
self.assertRaises(exception.ProcessExecutionError,
self._helper._manage_access,
fake_share_name, access['access_type'],
access['access_to'], cbk)
self._helper._get_export_dir_dict.assert_called_once_with()
self._helper.gluster_manager.gluster_call.assert_called_once_with(
*args)
glusterfs.LOG.error.assert_called_once_with(mock.ANY, mock.ANY)
def test_manage_access_removing_last_entry(self):
def cbk(d, key, value):
d.pop(key)
access = fake_share.fake_access()
args = ('volume', 'reset', self._helper.gluster_manager.volume,
NFS_EXPORT_DIR)
export_dir_dict = {'fakename': ['10.0.0.1']}
self.mock_object(self._helper, '_get_export_dir_dict',
mock.Mock(return_value=export_dir_dict))
ret = self._helper._manage_access(fake_share_name,
access['access_type'],
access['access_to'], cbk)
self.assertIsNone(ret)
self._helper._get_export_dir_dict.assert_called_once_with()
self._helper.gluster_manager.gluster_call.assert_called_once_with(
*args)
def test_allow_access_with_share_having_noaccess(self):
access = fake_share.fake_access()
share = fake_share.fake_share()
export_dir_dict = {'example.com': ['10.0.0.1']}
export_str = '/example.com(10.0.0.1),/fakename(10.0.0.1)'
self.mock_object(self._helper, '_get_export_dir_dict',
mock.Mock(return_value=export_dir_dict))
self._helper.gluster_manager.path = '/fakename'
self._helper.allow_access(None, share, access)
self._helper._get_export_dir_dict.assert_called_once_with()
self._helper.gluster_manager.gluster_call.assert_called_once_with(
'volume', 'set', self._helper.gluster_manager.volume,
NFS_EXPORT_DIR, export_str)
def test_allow_access_with_share_having_access(self):
access = fake_share.fake_access()
share = fake_share.fake_share()
export_dir_dict = {'fakename': ['10.0.0.1']}
self.mock_object(self._helper, '_get_export_dir_dict',
mock.Mock(return_value=export_dir_dict))
self._helper.gluster_manager.path = '/fakename'
self._helper.allow_access(None, share, access)
self._helper._get_export_dir_dict.assert_called_once_with()
self.assertFalse(self._helper.gluster_manager.gluster_call.called)
def test_deny_access_with_share_having_noaccess(self):
access = fake_share.fake_access()
share = fake_share.fake_share()
export_dir_dict = {}
self.mock_object(self._helper, '_get_export_dir_dict',
mock.Mock(return_value=export_dir_dict))
self._helper.gluster_manager.path = '/fakename'
self._helper.deny_access(None, share, access)
self._helper._get_export_dir_dict.assert_called_once_with()
self.assertFalse(self._helper.gluster_manager.gluster_call.called)
def test_deny_access_with_share_having_access(self):
access = fake_share.fake_access()
share = fake_share.fake_share()
export_dir_dict = {
'example.com': ['10.0.0.1'],
'fakename': ['10.0.0.1'],
}
export_str = '/example.com(10.0.0.1)'
args = ('volume', 'set', self._helper.gluster_manager.volume,
NFS_EXPORT_DIR, export_str)
self.mock_object(self._helper, '_get_export_dir_dict',
mock.Mock(return_value=export_dir_dict))
self._helper.gluster_manager.path = '/fakename'
self._helper.deny_access(None, share, access)
self._helper._get_export_dir_dict.assert_called_once_with()
self._helper.gluster_manager.gluster_call.assert_called_once_with(
*args)
@ddt.ddt
class GlusterNFSVolHelperTestCase(test.TestCase):
"""Tests GlusterNFSVolHelper."""
def setUp(self):
super(GlusterNFSVolHelperTestCase, self).setUp()
fake_utils.stub_out_utils_execute(self)
gluster_manager = mock.Mock(**fake_gluster_manager_attrs)
self._execute = mock.Mock(return_value=('', ''))
self.fake_conf = config.Configuration(None)
self._helper = glusterfs.GlusterNFSVolHelper(
self._execute, self.fake_conf, gluster_manager=gluster_manager)
@ddt.data({'output_str': '10.0.0.1,10.0.0.2',
'expected': ['10.0.0.1', '10.0.0.2']},
{'output_str': None, 'expected': []})
@ddt.unpack
def test_get_vol_exports(self, output_str, expected):
self.mock_object(self._helper.gluster_manager,
'get_gluster_vol_option',
mock.Mock(return_value=output_str))
ret = self._helper._get_vol_exports()
self.assertEqual(expected, ret)
(self._helper.gluster_manager.get_gluster_vol_option.
assert_called_once_with(NFS_RPC_AUTH_ALLOW))
def test_manage_access_bad_access_type(self):
cbk = None
access = {'access_type': 'bad', 'access_to': None}
self.assertRaises(exception.InvalidShareAccess,
self._helper._manage_access,
access['access_type'], access['access_to'], cbk)
def test_manage_access_noop(self):
cbk = mock.Mock(return_value=True)
access = fake_share.fake_access()
export_list = mock.Mock()
self.mock_object(self._helper, '_get_vol_exports',
mock.Mock(return_value=export_list))
ret = self._helper._manage_access(access['access_type'],
access['access_to'], cbk)
self._helper._get_vol_exports.assert_called_once_with()
cbk.assert_called_once_with(export_list, access['access_to'])
self.assertIsNone(ret)
def test_manage_access_adding_entry(self):
def cbk(li, v):
li.append(v)
access = fake_share.fake_access()
export_list = ['10.0.0.2']
self.mock_object(self._helper, '_get_vol_exports',
mock.Mock(return_value=export_list))
ret = self._helper._manage_access(access['access_type'],
access['access_to'], cbk)
self.assertIsNone(ret)
self._helper._get_vol_exports.assert_called_once_with()
export_str = '10.0.0.2,10.0.0.1'
argseq = (('volume', 'set', self._helper.gluster_manager.volume,
NFS_RPC_AUTH_ALLOW, export_str),
('volume', 'reset', self._helper.gluster_manager.volume,
NFS_RPC_AUTH_REJECT))
self.assertEqual(
[mock.call(*a) for a in argseq],
self._helper.gluster_manager.gluster_call.call_args_list)
def test_manage_access_adding_entry_cmd_fail(self):
def cbk(li, v):
li.append(v)
def raise_exception(*args, **kwargs):
raise exception.ProcessExecutionError()
access = fake_share.fake_access()
export_list = ['10.0.0.2']
self.mock_object(self._helper, '_get_vol_exports',
mock.Mock(return_value=export_list))
self.mock_object(self._helper.gluster_manager, 'gluster_call',
mock.Mock(side_effect=raise_exception))
self.assertRaises(exception.ProcessExecutionError,
self._helper._manage_access,
access['access_type'],
access['access_to'], cbk)
self._helper._get_vol_exports.assert_called_once_with()
export_str = '10.0.0.2,10.0.0.1'
args = ('volume', 'set', self._helper.gluster_manager.volume,
NFS_RPC_AUTH_ALLOW, export_str)
self._helper.gluster_manager.gluster_call.assert_called_once_with(
*args)
def test_manage_access_removing_last_entry(self):
def cbk(li, v):
li.remove(v)
access = fake_share.fake_access()
export_list = ['10.0.0.1']
self.mock_object(self._helper, '_get_vol_exports',
mock.Mock(return_value=export_list))
ret = self._helper._manage_access(access['access_type'],
access['access_to'], cbk)
self.assertIsNone(ret)
self._helper._get_vol_exports.assert_called_once_with()
argseq = (('volume', 'reset', self._helper.gluster_manager.volume,
NFS_RPC_AUTH_ALLOW),
('volume', 'set', self._helper.gluster_manager.volume,
NFS_RPC_AUTH_REJECT, '*'))
self.assertEqual(
[mock.call(*a) for a in argseq],
self._helper.gluster_manager.gluster_call.call_args_list)
def test_allow_access_with_share_having_noaccess(self):
access = fake_share.fake_access()
share = fake_share.fake_share()
export_list = ['10.0.0.2']
self.mock_object(self._helper, '_get_vol_exports',
mock.Mock(return_value=export_list))
self._helper.allow_access(None, share, access)
self._helper._get_vol_exports.assert_called_once_with()
export_str = '10.0.0.2,10.0.0.1'
argseq = (('volume', 'set', self._helper.gluster_manager.volume,
NFS_RPC_AUTH_ALLOW, export_str),
('volume', 'reset', self._helper.gluster_manager.volume,
NFS_RPC_AUTH_REJECT))
self.assertEqual(
[mock.call(*a) for a in argseq],
self._helper.gluster_manager.gluster_call.call_args_list)
def test_allow_access_with_share_having_access(self):
access = fake_share.fake_access()
share = fake_share.fake_share()
export_list = ['10.0.0.1']
self.mock_object(self._helper, '_get_vol_exports',
mock.Mock(return_value=export_list))
self._helper.allow_access(None, share, access)
self._helper._get_vol_exports.assert_called_once_with()
self.assertFalse(self._helper.gluster_manager.gluster_call.called)
def test_deny_access_with_share_having_noaccess(self):
access = fake_share.fake_access()
share = fake_share.fake_share()
export_list = []
self.mock_object(self._helper, '_get_vol_exports',
mock.Mock(return_value=export_list))
self._helper.deny_access(None, share, access)
self._helper._get_vol_exports.assert_called_once_with()
self.assertFalse(self._helper.gluster_manager.gluster_call.called)
def test_deny_access_with_share_having_access(self):
access = fake_share.fake_access()
share = fake_share.fake_share()
export_list = ['10.0.0.1', '10.0.0.2']
self.mock_object(self._helper, '_get_vol_exports',
mock.Mock(return_value=export_list))
self._helper.deny_access(None, share, access)
self._helper._get_vol_exports.assert_called_once_with()
export_str = '10.0.0.2'
argseq = (('volume', 'set', self._helper.gluster_manager.volume,
NFS_RPC_AUTH_ALLOW, export_str),
('volume', 'reset', self._helper.gluster_manager.volume,
NFS_RPC_AUTH_REJECT))
self.assertEqual(
[mock.call(*a) for a in argseq],
self._helper.gluster_manager.gluster_call.call_args_list)
class GaneshaNFSHelperTestCase(test.TestCase):
"""Tests GaneshaNFSHelper."""
def setUp(self):
super(GaneshaNFSHelperTestCase, self).setUp()
self.gluster_manager = mock.Mock(**fake_gluster_manager_attrs)
self._execute = mock.Mock(return_value=('', ''))
self._root_execute = mock.Mock(return_value=('', ''))
self.access = fake_share.fake_access()
self.fake_conf = config.Configuration(None)
self.fake_template = {'key': 'value'}
self.share = fake_share.fake_share()
self.mock_object(glusterfs.ganesha_utils, 'RootExecutor',
mock.Mock(return_value=self._root_execute))
self.mock_object(glusterfs.ganesha.GaneshaNASHelper, '__init__',
mock.Mock())
socket.gethostname = mock.Mock(return_value='example.com')
self._helper = glusterfs.GaneshaNFSHelper(
self._execute, self.fake_conf,
gluster_manager=self.gluster_manager)
self._helper.tag = 'GLUSTER-Ganesha-localhost'
def test_init_local_ganesha_server(self):
glusterfs.ganesha_utils.RootExecutor.assert_called_once_with(
self._execute)
socket.gethostname.assert_has_calls([mock.call()])
glusterfs.ganesha.GaneshaNASHelper.__init__.assert_has_calls(
[mock.call(self._root_execute, self.fake_conf,
tag='GLUSTER-Ganesha-example.com')])
def test_get_export(self):
ret = self._helper.get_export(self.share)
self.assertEqual('example.com:/fakename', ret)
def test_init_remote_ganesha_server(self):
ssh_execute = mock.Mock(return_value=('', ''))
CONF.set_default('glusterfs_ganesha_server_ip', 'fakeip')
self.mock_object(glusterfs.ganesha_utils, 'SSHExecutor',
mock.Mock(return_value=ssh_execute))
glusterfs.GaneshaNFSHelper(
self._execute, self.fake_conf,
gluster_manager=self.gluster_manager)
glusterfs.ganesha_utils.SSHExecutor.assert_called_once_with(
'fakeip', 22, None, 'root', password=None, privatekey=None)
glusterfs.ganesha.GaneshaNASHelper.__init__.assert_has_calls(
[mock.call(ssh_execute, self.fake_conf,
tag='GLUSTER-Ganesha-fakeip')])
def test_init_helper(self):
ganeshelper = mock.Mock()
exptemp = mock.Mock()
def set_attributes(*a, **kw):
self._helper.ganesha = ganeshelper
self._helper.export_template = exptemp
self.mock_object(ganesha.GaneshaNASHelper, 'init_helper',
mock.Mock(side_effect=set_attributes))
self.assertEqual({}, glusterfs.GaneshaNFSHelper.shared_data)
self._helper.init_helper()
ganesha.GaneshaNASHelper.init_helper.assert_called_once_with()
self.assertEqual(ganeshelper, self._helper.ganesha)
self.assertEqual(exptemp, self._helper.export_template)
self.assertEqual({
'GLUSTER-Ganesha-localhost': {
'ganesha': ganeshelper,
'export_template': exptemp}},
glusterfs.GaneshaNFSHelper.shared_data)
other_helper = glusterfs.GaneshaNFSHelper(
self._execute, self.fake_conf,
gluster_manager=self.gluster_manager)
other_helper.tag = 'GLUSTER-Ganesha-localhost'
other_helper.init_helper()
self.assertEqual(ganeshelper, other_helper.ganesha)
self.assertEqual(exptemp, other_helper.export_template)
def test_default_config_hook(self):
fake_conf_dict = {'key': 'value1'}
mock_ganesha_utils_patch = mock.Mock()
def fake_patch_run(tmpl1, tmpl2):
mock_ganesha_utils_patch(
copy.deepcopy(tmpl1), tmpl2)
tmpl1.update(tmpl2)
self.mock_object(glusterfs.ganesha.GaneshaNASHelper,
'_default_config_hook',
mock.Mock(return_value=self.fake_template))
self.mock_object(glusterfs.ganesha_utils, 'path_from',
mock.Mock(return_value='/fakedir/glusterfs/conf'))
self.mock_object(self._helper, '_load_conf_dir',
mock.Mock(return_value=fake_conf_dict))
self.mock_object(glusterfs.ganesha_utils, 'patch',
mock.Mock(side_effect=fake_patch_run))
ret = self._helper._default_config_hook()
glusterfs.ganesha.GaneshaNASHelper._default_config_hook.\
assert_called_once_with()
glusterfs.ganesha_utils.path_from.assert_called_once_with(
glusterfs.__file__, 'conf')
self._helper._load_conf_dir.assert_called_once_with(
'/fakedir/glusterfs/conf')
glusterfs.ganesha_utils.patch.assert_called_once_with(
self.fake_template, fake_conf_dict)
self.assertEqual(fake_conf_dict, ret)
def test_fsal_hook(self):
self._helper.gluster_manager.path = '/fakename'
output = {
'Hostname': '127.0.0.1',
'Volume': 'testvol',
'Volpath': '/fakename'
}
ret = self._helper._fsal_hook('/fakepath', self.share, self.access)
self.assertEqual(output, ret)
|
apache-2.0
| -4,747,165,154,843,611,000
| 39.48405
| 78
| 0.590908
| false
| 3.541065
| true
| false
| false
|
boxed/python-terminal-menu
|
terminal_menu.py
|
1
|
2331
|
import os
import sys
import termios
import fcntl
from blessings import Terminal
def getch():
fd = sys.stdin.fileno()
oldterm = termios.tcgetattr(fd)
newattr = termios.tcgetattr(fd)
newattr[3] = newattr[3] & ~termios.ICANON & ~termios.ECHO
termios.tcsetattr(fd, termios.TCSANOW, newattr)
oldflags = fcntl.fcntl(fd, fcntl.F_GETFL)
fcntl.fcntl(fd, fcntl.F_SETFL, oldflags | os.O_NONBLOCK)
c = None
try:
while 1:
try:
c = sys.stdin.read(1)
break
except IOError:
pass
finally:
termios.tcsetattr(fd, termios.TCSAFLUSH, oldterm)
fcntl.fcntl(fd, fcntl.F_SETFL, oldflags)
return c
prefix = '\x1b\x5b'
lookup = {
'\x1b\x5b\x41': 'up',
'\x1b\x5b\x42': 'down',
'\x1b\x5b\x44': 'left',
'\x1b\x5b\x43': 'right',
}
def get_arrow_key_or_character():
buf = ''
while True:
buf += getch()
if buf in lookup:
return lookup[buf]
if buf and not prefix.startswith(buf):
return buf
def menu(menu_items):
if not menu_items:
return None
# hide cursor
sys.stdout.write("\033[?25l")
sys.stdout.flush()
try:
term = Terminal()
print '\n' * (len(menu_items) - 2)
focus = 0
while True:
for i, line in enumerate(menu_items):
with term.location(0, term.height - len(menu_items) + i):
if i == focus:
print term.on_red(term.bright_white(line)),
else:
print line,
k = get_arrow_key_or_character()
if k == 'down':
focus += 1
elif k == 'up':
focus -= 1
elif k == '\n':
break
# make sure we don't go outside menu
if focus < 0:
focus = 0
if focus == len(menu_items):
focus = len(menu_items) - 1
finally:
# show cursor again
sys.stdout.write("\033[?25h")
sys.stdout.flush()
print '' # Write a newline to avoid next output writing over the last line of the menu
return menu_items[focus]
m = menu(['foo 1', 'foo 2', 'foo 3', 'foo 4', 'foo 5', 'foo 6'])
print 'chosen:', m
|
mit
| 3,803,221,867,195,180,500
| 24.075269
| 91
| 0.510511
| false
| 3.5
| false
| false
| false
|
pingdynasty/OwlProgram
|
Tools/Heavy/uploader.py
|
1
|
24501
|
#!/usr/bin/env python
# Copyright (c) 2015-2017 Enzien Audio, Ltd. (info@enzienaudio.com)
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import argparse
import base64
import datetime
import getpass
import json
import os
import requests
import shutil
import stat
import sys
import tempfile
import time
import urlparse
import zipfile
class Colours:
purple = "\033[95m"
cyan = "\033[96m"
dark_cyan = "\033[36m"
blue = "\033[94m"
green = "\033[92m"
yellow = "\033[93m"
red = "\033[91m"
bold = "\033[1m"
underline = "\033[4m"
end = "\033[0m"
class ErrorCodes(object):
# NOTE(mhroth): this class could inherit from Enum, but we choose not to
# as to not require an additional dependency
# http://www.tldp.org/LDP/abs/html/exitcodes.html
# http://stackoverflow.com/questions/1101957/are-there-any-standard-exit-status-codes-in-linux
CODE_OK = 0 # success!
CODE_MAIN_NOT_FOUND = 3 # _main.pd not found
CODE_HEAVY_COMPILE_ERRORS = 4 # heavy returned compiler errors
CODE_UPLOAD_ASSET_TOO_LARGE = 5 # the size of the uploadable asset is too large
CODE_RELEASE_NOT_AVAILABLE = 6 # the requested release is not available
CODE_CONNECTION_ERROR = 7 # HTTPS connection could not be made to the server
CODE_CONNECTION_TIMEOUT = 8 # HTTPS connection has timed out
CODE_CONNECTION_400_500 = 9 # a 400 or 500 error has occured
CODE_INVALID_TOKEN = 10 # the user token could not be parsed
CODE_NEW_PATCH_FAIL = 11 # a new patch could not be made
CODE_EXCEPTION = 125 # a generic execption has occurred
class UploaderException(Exception):
def __init__(self, code, message=None, e=None):
self.code = code
self.message = message
self.e = e
# the maxmimum file upload size of 1MB
__HV_MAX_UPLOAD_SIZE = 1 * 1024*1024
__HV_UPLOADER_SERVICE_TOKEN = \
"eyJhbGciOiAiSFMyNTYiLCAidHlwIjogIkpXVCJ9." \
"eyJzdGFydERhdGUiOiAiMjAxNi0xMi0xNVQyMzoyNToxMC4wOTU2MjIiLCAic2VydmljZSI6ICJoZWF2eV91cGxvYWRlciJ9." \
"w2o1_RttJUAiq6WyN0J7MhDsaSseISzgDAQ9aP9Di6M="
__SUPPORTED_GENERATOR_SET = {
"c-src",
"web-local", "web-js",
"fabric-src", "fabric-macos-x64", "fabric-win-x86", "fabric-win-x64", "fabric-linux-x64", "fabric-android-armv7a",
"unity-src", "unity-macos-x64", "unity-win-x86", "unity-win-x64", "unity-linux-x64", "unity-android-armv7a",
"wwise-src", "wwise-macos-x64", "wwise-win-x86", "wwise-win-x64", "wwise-linux-x64", "wwise-ios-armv7a"
"vst2-src", "vst2-macos-x64", "vst2-win-x86", "vst2-win-x64", "vst2-linux-x64"
}
def __zip_dir(in_dir, zip_path, file_filter=None):
""" Recursively zip an entire directory with an optional file filter
"""
zf = zipfile.ZipFile(zip_path, mode="w", compression=zipfile.ZIP_DEFLATED)
for subdir, dirs, files in os.walk(in_dir):
for f in files:
if (file_filter is None) or (f.lower().split(".")[-1] in file_filter):
zf.write(
filename=os.path.join(subdir,f),
arcname=os.path.relpath(os.path.join(subdir,f), start=in_dir))
return zip_path
def __unzip(zip_path, target_dir):
""" Unzip a file to a given directory. All destination files are overwritten.
"""
zipfile.ZipFile(zip_path).extractall(target_dir)
def __get_file_url_stub_for_generator(json_api, g):
""" Returns the file link for a specific generator.
Returns None if no link could be found.
"""
for i in json_api["included"]:
if (i["type"] == "file") and (g == i["data"]["buildId"]):
return i["links"]["self"]
return None # by default, return None
def upload(input_dir, output_dirs=None, name=None, owner=None, generators=None, b=False, y=False, release=None, release_override=False, domain=None, verbose=False, token=None, clear_token=False, service_token=None, force_new_patch=False):
""" Upload a directory to the Heavy Cloud Service.
Parameters
----------
input_dir : str
Directory containing _main.pd file.
output_dirs : list, optional
List of directories where the output should be placed. Usually the output directory list has only one element.
If no argument is given, the input directory will be used.
name : str, optional
The name of the patch.
If no argument is given, the name "heavy" is used.
owner : str, optional
The name of the owner of the patch. Usually this is an organisation.
If no argument is given, the submitting user name is used.
generators : list, optional
A list of generators e.g. 'c', 'unity', or 'vst2-x86'
b : bool, optional
If True, puts the results of each generator into its own directory.
False by default in which case all files are put into the same directory.
y : bool, optional
If True, extract only generated C files, static files are deleted. False by default.
release : str, optional
The name of the release to use for compiling.
release_override : bool, optional
Disable the validity check for a requested release. Forces sending a
release request to the server.
verbose : bool, optional
False by default.
token : str, optional
The token used to identify the user to Heavy Cloud Service.
By default the stored token will be used.
clear_token : bool, optional
Clears and ignores any existing stored tokens. Requests a new one from the command line.
service_token : str, optional
Pass an optional service token to be used instead of the default heavy_uploader.
force_new_patch : bool, optional
Indicate that a new patch should be created with the given name, if it does not yet exist.
"""
# https://github.com/numpy/numpy/blob/master/doc/HOWTO_DOCUMENT.rst.txt
try:
# set default values
name = name or "heavy"
domain = domain or "https://enzienaudio.com"
exit_code = ErrorCodes.CODE_OK
reply_json = {}
temp_dir = None
post_data = {}
# token should be stored in ~/.heavy/token
token_path = os.path.expanduser(os.path.join("~/", ".heavy", "token"))
if token is None:
if os.path.exists(token_path):
if clear_token:
os.remove(token_path)
else:
with open(token_path, "r") as f:
token = f.read()
if token is None:
print "Please provide a user token from enzienaudio.com. " \
"Create or copy one from https://enzienaudio.com/h/<username>/settings."
token = getpass.getpass("Enter user token: ")
# write token to file
if not os.path.exists(os.path.dirname(token_path)):
# ensure that the .heavy directory exists
os.makedirs(os.path.dirname(token_path))
with open(token_path, "w") as f:
f.write(token)
os.chmod(token_path, stat.S_IRUSR | stat.S_IWUSR) # force rw------- permissions on the file
tick = time.time()
# check the validity of the token
try:
# check the valifity of the token
payload = json.loads(base64.urlsafe_b64decode(token.split(".")[1]))
payload["startDate"] = datetime.datetime.strptime(payload["startDate"], "%Y-%m-%dT%H:%M:%S.%f")
# ensure that the token is valid
now = datetime.datetime.utcnow()
assert payload["startDate"] <= now
if owner is None:
# if an owner is not supplied, default to the user name in the token
owner = payload["name"]
except Exception as e:
print "The user token is invalid. Generate a new one at https://enzienaudio.com/h/<username>/settings/."
exit_code = ErrorCodes.CODE_INVALID_TOKEN
raise e
# if there is a user-supplied service token, do a basic validity check
if service_token:
try:
# check the valifity of the token
payload = json.loads(base64.urlsafe_b64decode(token.split(".")[1]))
payload["startDate"] = datetime.datetime.strptime(payload["startDate"], "%Y-%m-%dT%H:%M:%S.%f")
# ensure that the token is valid
now = datetime.datetime.utcnow()
assert payload["startDate"] <= now
assert "service" in payload, "'service' field required in service token payload."
except Exception as e:
print "The supplied service token is invalid. A default token will be used."
service_token = __HV_UPLOADER_SERVICE_TOKEN
else:
service_token = __HV_UPLOADER_SERVICE_TOKEN
# parse the optional release argument
if release:
if not release_override:
# check the validity of the current release
releases_json = requests.get(urlparse.urljoin(domain, "/a/releases/")).json()
if release in releases_json:
today = datetime.datetime.now()
valid_until = datetime.datetime.strptime(releases_json[release]["validUntil"], "%Y-%m-%d")
if today > valid_until:
print "{0}Warning:{1} The release \"{2}\" expired on {3}. It may be removed at any time!".format(
Colours.yellow, Colours.end,
release,
releases_json[release]["validUntil"])
elif (valid_until - today) <= datetime.timedelta(weeks=4):
print "{0}Warning:{1} The release \"{2}\" will expire soon on {3}.".format(
Colours.yellow, Colours.end,
release,
releases_json[release]["validUntil"])
else:
print "{0}Error:{1} The release \"{2}\" is not available. Available releases are:".format(
Colours.red, Colours.end,
release)
for k,v in releases_json.items():
print "* {0} ({1})".format(
k,
v["releaseDate"])
raise UploaderException(ErrorCodes.CODE_RELEASE_NOT_AVAILABLE)
post_data["release"] = release
# make a temporary directory
temp_dir = tempfile.mkdtemp(prefix="lroyal-")
# zip up the pd directory into the temporary directory
if not os.path.exists(os.path.join(input_dir, "_main.pd")):
raise UploaderException(
ErrorCodes.CODE_MAIN_NOT_FOUND,
"Root Pd directory does not contain a file named _main.pd.")
zip_path = __zip_dir(
input_dir,
os.path.join(temp_dir, "archive.zip"),
file_filter={"pd"})
if os.stat(zip_path).st_size > __HV_MAX_UPLOAD_SIZE:
raise UploaderException(
ErrorCodes.CODE_UPLOAD_ASSET_TOO_LARGE,
"The target directory, zipped, is {0} bytes. The maximum upload size of 1MB.".format(
os.stat(zip_path).st_size))
# the outputs to generate
generators = list({s.lower() for s in set(generators or [])} & __SUPPORTED_GENERATOR_SET)
# check if the patch exists already. Ask to create it if it doesn't exist
r = requests.get(
urlparse.urljoin(domain, "/a/patches/{0}/{1}/".format(owner, name)),
headers={
"Accept": "application/json",
"Authorization": "Bearer " + token,
"X-Heavy-Service-Token": service_token
})
r.raise_for_status()
reply_json = r.json()
if "errors" in reply_json:
if reply_json["errors"][0]["status"] == "404":
# the patch does not exist
if force_new_patch:
create_new_patch = True
else:
create_new_patch = raw_input("A patch called \"{0}\" does not exist for owner \"{1}\". Create it? (y/n):".format(name, owner))
create_new_patch = (create_new_patch == "y")
if create_new_patch:
r = requests.post(
urlparse.urljoin(domain, "/a/patches/"),
data={"owner_name":owner, "name":name},
headers={
"Accept": "application/json",
"Authorization": "Bearer " + token,
"X-Heavy-Service-Token": service_token
})
r.raise_for_status()
reply_json = r.json()
if "errors" in reply_json:
raise UploaderException(
ErrorCodes.CODE_NEW_PATCH_FAIL,
reply_json["errors"][0]["detail"])
else:
pass # no errors? everything is cool! Proceed.
else:
UploaderException(
ErrorCodes.CODE_NEW_PATCH_FAIL,
"A patch called \"{0}\" does not exist for owner \"{1}\"".format(owner, name))
else:
raise UploaderException(
ErrorCodes.CODE_NEW_PATCH_FAIL,
reply_json["errors"][0]["detail"])
else:
pass # the patch exists, move on
# upload the job, get the response back
r = requests.post(
urlparse.urljoin(domain, "/a/patches/{0}/{1}/jobs/".format(owner, name)),
data=post_data,
headers={
"Accept": "application/json",
"Authorization": "Bearer " + token,
"X-Heavy-Service-Token": service_token
},
files={"file": (os.path.basename(zip_path), open(zip_path, "rb"), "application/zip")})
r.raise_for_status()
# decode the JSON API response (See below for an example response)
reply_json = r.json()
if verbose:
print json.dumps(reply_json, sort_keys=True, indent=2, separators=(",", ": "))
# print any warnings
for i,x in enumerate(reply_json.get("warnings",[])):
print "{3}) {0}Warning:{1} {2}".format(
Colours.yellow, Colours.end, x["detail"], i+1)
# check for errors
if len(reply_json.get("errors",[])) > 0:
for i,x in enumerate(reply_json["errors"]):
print "{3}) {0}Error:{1} {2}".format(
Colours.red, Colours.end, x["detail"], i+1)
raise UploaderException(ErrorCodes.CODE_HEAVY_COMPILE_ERRORS)
print "Job URL:", urlparse.urljoin(domain, reply_json["data"]["links"]["html"])
print "Heavy release:", reply_json["data"]["attributes"]["release"]
if len(generators) > 0:
print "Downloaded files placed in:"
# retrieve all requested files
for i,g in enumerate(generators):
file_url = urlparse.urljoin(
domain,
"/".join([
reply_json["data"]["links"]["html"],
g.replace("-", "/"),
"archive.zip"
])
)
if file_url and (len(output_dirs) > i or b):
r = requests.get(
file_url,
headers={
"Authorization": "Bearer " + token,
"X-Heavy-Service-Token": service_token
},
timeout=None # some builds can take a very long time
)
r.raise_for_status()
# write the reply to a temporary file
c_zip_path = os.path.join(temp_dir, "archive.{0}.zip".format(g))
with open(c_zip_path, "wb") as f:
f.write(r.content)
# unzip the files to where they belong
if b:
target_dir = os.path.join(os.path.abspath(os.path.expanduser(output_dirs[0])), g)
else:
target_dir = os.path.abspath(os.path.expanduser(output_dirs[i]))
if not os.path.exists(target_dir):
os.makedirs(target_dir) # ensure that the output directory exists
__unzip(c_zip_path, target_dir)
if g == "c-src" and y:
keep_files = ("_{0}.h".format(name), "_{0}.hpp".format(name), "_{0}.cpp".format(name))
for f in os.listdir(target_dir):
if not f.endswith(keep_files):
os.remove(os.path.join(target_dir, f));
print " * {0}: {1}".format(g, target_dir)
else:
print " * {0}Warning:{1} {2} files could not be retrieved.".format(
Colours.yellow, Colours.end,
g)
print "Total request time: {0}ms".format(int(1000.0*(time.time()-tick)))
except UploaderException as e:
exit_code = e.code
if e.message:
print "{0}Error:{1} {2}".format(Colours.red, Colours.end, e.message)
except requests.ConnectionError as e:
print "{0}Error:{1} Could not connect to server. Is the server down? Is the internet down?\n{2}".format(Colours.red, Colours.end, e)
exit_code = ErrorCodes.CODE_CONNECTION_ERROR
except requests.Timeout as e:
print "{0}Error:{1} Connection to server timed out. The server might be overloaded. Try again later?\n{2}".format(Colours.red, Colours.end, e)
exit_code = ErrorCodes.CODE_CONNECTION_TIMEOUT
except requests.HTTPError as e:
if e.response.status_code == requests.codes.unauthorized:
print "{0}Error:{1} Unknown username or password.".format(Colours.red, Colours.end)
else:
print "{0}Error:{1} An HTTP error has occurred with URL {2}\n{3}".format(Colours.red, Colours.end, e.request.path_url, e)
exit_code = ErrorCodes.CODE_CONNECTION_400_500
except Exception as e:
# a generic catch for any other exception
exit_code = exit_code if exit_code != ErrorCodes.CODE_OK else ErrorCodes.CODE_EXCEPTION
print "{0}Error:{1} ({2}) {3}".format(Colours.red, Colours.end, e.__class__, e)
print "Getting a weird error? Get the latest version with 'pip install hv-uploader -U', or check for issues at https://github.com/enzienaudio/heavy/issues."
finally:
if temp_dir:
shutil.rmtree(temp_dir) # delete the temporary directory no matter what
return exit_code, reply_json
def main():
parser = argparse.ArgumentParser(
description="Compiles a Pure Data file.")
parser.add_argument(
"input_dir",
help="A directory containing _main.pd. All .pd files in the directory structure will be uploaded.")
parser.add_argument(
"-n", "--name",
default="heavy",
help="Patch name. If it doesn't exist on the Heavy site, the uploader will fail.")
parser.add_argument(
"--owner",
help="The name of the owner of patch. Usually this is of an organisation.")
parser.add_argument(
"-g", "--gen",
nargs="+",
help="List of generator outputs. Currently supported generators are '" + "', '".join(sorted(__SUPPORTED_GENERATOR_SET)) + "'.")
parser.add_argument(
"-b",
help="All files will be placed in the output directory, placed in their own subdirectory corresponding to the generator name.",
action="count")
parser.add_argument(
"-y",
help="Extract only the generated C files. Static files are deleted. "
"Only effective for the 'c' generator.",
action="count")
parser.add_argument(
"-o", "--out",
nargs="+",
default=["./"], # by default
help="List of destination directories for retrieved files. Order should be the same as for --gen.")
parser.add_argument(
"-r", "--release",
help="Optionally request a specific release of Heavy to use while compiling.")
parser.add_argument(
"-rr",
help="Send a request for a specific release to the server without checking for validity first.",
action="count")
parser.add_argument(
"-v", "--verbose",
help="Show debugging information.",
action="count")
parser.add_argument(
"-t", "--token",
help="Use the specified token.")
parser.add_argument(
"--clear_token",
help="Clears the exsiting token and asks for a new one from the command line.",
action="count")
parser.add_argument(
"--service_token",
help="Use a custom service token.")
parser.add_argument(
"-d", "--domain",
default="https://enzienaudio.com",
help="Domain. Default is https://enzienaudio.com.")
parser.add_argument(
"--force_new_patch",
help="Create a new patch if the given name doesn't already exist.",
action="count")
args = parser.parse_args()
exit_code, reponse_obj = upload(
input_dir=args.input_dir,
output_dirs=args.out,
name=args.name,
owner=args.owner,
generators=args.gen,
b=args.b,
y=args.y,
release=args.release,
release_override=args.rr,
domain=args.domain,
verbose=args.verbose,
token=args.token,
clear_token=args.clear_token,
service_token=args.service_token,
force_new_patch=args.force_new_patch)
# exit and return the exit code
sys.exit(exit_code)
if __name__ == "__main__":
main()
"""
An example of the server response:
{
"data": {
"attributes": {
"compileTime": 0.266899,
"index": 188,
"release": "r2016.11",
"submittedAt": "2016-12-23T12:49:04.500000",
"warnings": []
},
"id": "mhroth/test_osc/188",
"links": {
"html": "/h/mhroth/test_osc/188",
"self": "/a/jobs/mhroth/test_osc/188"
},
"relationships": {
"files": {
"data": [
{
"id": "mhroth/test_osc/188/c/src",
"type": "file"
}
]
},
"patch": {
"links": {
"html": "/h/mhroth/test_osc",
"self": "/a/patches/mhroth/test_osc"
}
},
"submittedBy": {
"links": {
"html": "/h/mhroth",
"self": "/a/users/mhroth"
}
}
},
"type": "job"
},
"included": [
{
"data": {
"buildId": "c-src",
"compileTime": 0.266899,
"date": "2016-12-23T12:49:04.500000",
"mime": "application/zip",
"size": 51484
},
"id": "mhroth/test_osc/188/c/src",
"links": {
"self": "/h/mhroth/test_osc/188/c/src/archive.zip"
},
"type": "file"
}
]
}
"""
|
gpl-2.0
| -762,648,957,300,477,000
| 39.835
| 238
| 0.5621
| false
| 3.961358
| false
| false
| false
|
erikabarros/naguil
|
backend/appengine/routes/formacaos/edit.py
|
1
|
1215
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
from config.template_middleware import TemplateResponse
from gaebusiness.business import CommandExecutionException
from gaepermission.decorator import login_not_required
from tekton import router
from gaecookie.decorator import no_csrf
from formacao_app import formacao_facade
from routes import formacaos
from tekton.gae.middleware.redirect import RedirectResponse
@login_not_required
@no_csrf
def index(formacao_id):
formacao = formacao_facade.get_formacao_cmd(formacao_id)()
formacao_form = formacao_facade.formacao_form()
context = {'save_path': router.to_path(save, formacao_id), 'formacao': formacao_form.fill_with_model(formacao)}
return TemplateResponse(context, 'formacaos/formacao_form.html')
@login_not_required
def save(formacao_id, **formacao_properties):
cmd = formacao_facade.update_formacao_cmd(formacao_id, **formacao_properties)
try:
cmd()
except CommandExecutionException:
context = {'errors': cmd.errors, 'formacao': formacao_properties}
return TemplateResponse(context, 'formacaos/formacao_form.html')
return RedirectResponse(router.to_path(formacaos))
|
mit
| 1,807,663,158,633,435,000
| 39.5
| 115
| 0.766255
| false
| 3.248663
| false
| false
| false
|
ClaudioNahmad/Servicio-Social
|
Parametros/CosmoMC/prerrequisitos/plc-2.0/.waf-1.8.8-eab538dea2e33915d3770ff3f393b18b/waflib/Tools/cxx.py
|
1
|
1212
|
#! /usr/bin/env python
# encoding: utf-8
# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
from waflib import TaskGen,Task,Utils
from waflib.Tools import c_preproc
from waflib.Tools.ccroot import link_task,stlink_task
@TaskGen.extension('.cpp','.cc','.cxx','.C','.c++')
def cxx_hook(self,node):
return self.create_compiled_task('cxx',node)
if not'.c'in TaskGen.task_gen.mappings:
TaskGen.task_gen.mappings['.c']=TaskGen.task_gen.mappings['.cpp']
class cxx(Task.Task):
run_str='${CXX} ${ARCH_ST:ARCH} ${CXXFLAGS} ${CPPFLAGS} ${FRAMEWORKPATH_ST:FRAMEWORKPATH} ${CPPPATH_ST:INCPATHS} ${DEFINES_ST:DEFINES} ${CXX_SRC_F}${SRC} ${CXX_TGT_F}${TGT[0].abspath()}'
vars=['CXXDEPS']
ext_in=['.h']
scan=c_preproc.scan
class cxxprogram(link_task):
run_str='${LINK_CXX} ${LINKFLAGS} ${CXXLNK_SRC_F}${SRC} ${CXXLNK_TGT_F}${TGT[0].abspath()} ${RPATH_ST:RPATH} ${FRAMEWORKPATH_ST:FRAMEWORKPATH} ${FRAMEWORK_ST:FRAMEWORK} ${ARCH_ST:ARCH} ${STLIB_MARKER} ${STLIBPATH_ST:STLIBPATH} ${STLIB_ST:STLIB} ${SHLIB_MARKER} ${LIBPATH_ST:LIBPATH} ${LIB_ST:LIB}'
vars=['LINKDEPS']
ext_out=['.bin']
inst_to='${BINDIR}'
class cxxshlib(cxxprogram):
inst_to='${LIBDIR}'
class cxxstlib(stlink_task):
pass
|
gpl-3.0
| 5,481,147,303,242,751,000
| 45.615385
| 298
| 0.695545
| false
| 2.589744
| false
| false
| false
|
Oxygem/canaryd
|
canaryd/__main__.py
|
1
|
3515
|
# canaryd
# File: canaryd/__main__.py
# Desc: entry point for canaryd
import logging
import signal
from time import time
from canaryd_packages import click
from canaryd.daemon import run_daemon
from canaryd.log import logger, setup_logging, setup_logging_from_settings
from canaryd.plugin import (
get_and_prepare_working_plugins,
get_plugin_states,
)
from canaryd.remote import backoff, ping, shutdown, sync_states
from canaryd.settings import ensure_config_directory, get_settings
from canaryd.version import __version__
class GracefulExitRequested(Exception):
pass
def handle_graceful_quit(signum, frame):
raise GracefulExitRequested('yawn')
@click.command(context_settings={'help_option_names': ['-h', '--help']})
@click.option('-v', '--verbose', is_flag=True)
@click.option('-d', '--debug', is_flag=True)
@click.version_option(
version=__version__,
prog_name='canaryd',
message='%(prog)s: v%(version)s',
)
def main(verbose, debug):
'''
Run the canaryd daemon.
'''
log_level = setup_logging(verbose, debug)
logger.info('Starting canaryd v{0}'.format(__version__))
logger.info('Log level set to: {0}'.format(
logging.getLevelName(log_level),
))
# Ensure the config directory exists
ensure_config_directory()
# Load the settings, using our config file if provided
settings = get_settings()
# Setup any log file/syslog
setup_logging_from_settings(settings)
if not settings.api_key or not settings.server_id:
logger.critical('Missing api_key and/or server_id in config file')
return
# Initial ping for API presence
logger.info('Ping API...')
backoff(
ping, settings,
error_message='Could not ping',
max_wait=settings.collect_interval_s,
)
# Load the plugin list
plugins = get_and_prepare_working_plugins(settings)
# Get the initial state
logger.info('Getting initial state...')
start_time = time()
states = get_plugin_states(plugins, settings)
# Filter out the non-working plugins and wrap as a (command, data) tuple
# we don't track errors on the initial sync because often canaryd starts
# early on a server meaning some things aren't up. The next state collection
# will collect and sync these.
working_states = []
for plugin, status_data in states:
status, data = status_data
if status is not True:
continue
working_states.append((plugin, ('SYNC', data)))
# Sync this state and get settings
logger.info('Syncing initial state...')
remote_settings = backoff(
sync_states, working_states, settings,
error_message='Could not sync state',
max_wait=settings.collect_interval_s,
)
# Update settings w/remote ones
settings.update(remote_settings)
# Run the loop
logger.info('Starting daemon loop...')
# Make previous states dict
previous_states = dict(
(plugin, status_data[1])
for plugin, status_data in working_states
)
# Now that we've settings - setup graceful (clean shutdown) exit handling
signal.signal(signal.SIGTERM, handle_graceful_quit)
signal.signal(signal.SIGINT, handle_graceful_quit)
try:
run_daemon(previous_states, settings, start_time=start_time)
except GracefulExitRequested:
shutdown(settings) # we're exiting, so only one shot at this
try:
main()
except Exception:
# TODO: public Sentry logging
raise
|
mit
| -8,296,029,549,149,108,000
| 26.460938
| 80
| 0.675676
| false
| 3.84153
| false
| false
| false
|
DavideCanton/Python3
|
quadtree/gui.py
|
1
|
1615
|
__author__ = 'davide'
from random import randint
from quadtree import QuadTree, Rect
import pygame
from pygame.constants import *
from pygame.color import THECOLORS
from pygame.draw import rect, circle, line
W = 800
H = 600
R = 2
N = 100
def col(name):
"""
@type name: str
@return the color as a tuple
"""
return THECOLORS[name]
def draw(surf, qt):
"""
@param surf: the surface
@type surf: pygame.Surface
@param qt: quadtree
@type qt: QuadTree
"""
for node in qt:
rb = node.bounds
rect_ = pygame.Rect(rb.x, rb.y, rb.w, rb.h)
if node.val:
circle(surf, col("red"), node.val[0], R)
rect(surf, col("black"), rect_, 1)
def main():
pygame.init()
screen = pygame.display.set_mode((W, H))
clock = pygame.time.Clock()
data = [(randint(0, W), randint(0, H)) for _ in range(N)]
qt = QuadTree([], W, H)
i = 0
going = True
while True:
for event in pygame.event.get():
if event.type == QUIT:
going = False
elif event.type == KEYDOWN and event.key == K_ESCAPE:
going = False
elif (event.type == KEYDOWN
and event.key == K_F4
and event.mod & KMOD_ALT):
going = False
if not going:
break
if i < len(data):
qt.add_node(data[i])
qt.assert_correct()
screen.fill(col("white"))
draw(screen, qt)
pygame.display.flip()
clock.tick(10)
i += 1
if __name__ == "__main__":
main()
|
gpl-3.0
| -9,208,982,842,127,232,000
| 19.987013
| 65
| 0.52322
| false
| 3.549451
| false
| false
| false
|
joshuamckenty/yolo-octo-wookie
|
nova/cloudpipe/pipelib.py
|
1
|
3846
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
CloudPipe - Build a user-data payload zip file, and launch
an instance with it.
"""
import logging
import os
import tempfile
import base64
from zipfile import ZipFile, ZIP_DEFLATED
from nova import exception
from nova import flags
from nova.auth import users
from nova import utils
from nova.endpoint import api
FLAGS = flags.FLAGS
flags.DEFINE_string('boot_script_template',
utils.abspath('cloudpipe/bootscript.sh'),
'Template for script to run on cloudpipe instance boot')
class CloudPipe(object):
def __init__(self, cloud_controller):
self.controller = cloud_controller
self.manager = users.UserManager.instance()
def launch_vpn_instance(self, project_id):
logging.debug( "Launching VPN for %s" % (project_id))
project = self.manager.get_project(project_id)
# Make a payload.zip
tmpfolder = tempfile.mkdtemp()
filename = "payload.zip"
zippath = os.path.join(tmpfolder, filename)
z = ZipFile(zippath, "w", ZIP_DEFLATED)
z.write(FLAGS.boot_script_template,'autorun.sh')
z.close()
key_name = self.setup_keypair(project.project_manager_id, project_id)
zippy = open(zippath, "r")
context = api.APIRequestContext(handler=None, user=project.project_manager, project=project)
reservation = self.controller.run_instances(context,
# run instances expects encoded userdata, it is decoded in the get_metadata_call
# autorun.sh also decodes the zip file, hence the double encoding
user_data=zippy.read().encode("base64").encode("base64"),
max_count=1,
min_count=1,
instance_type='m1.tiny',
image_id=FLAGS.vpn_image_id,
key_name=key_name,
security_groups=["vpn-secgroup"])
zippy.close()
def setup_keypair(self, user_id, project_id):
key_name = '%s%s' % (project_id, FLAGS.vpn_key_suffix)
try:
private_key, fingerprint = self.manager.generate_key_pair(user_id, key_name)
try:
key_dir = os.path.join(FLAGS.keys_path, user_id)
if not os.path.exists(key_dir):
os.makedirs(key_dir)
with open(os.path.join(key_dir, '%s.pem' % key_name),'w') as f:
f.write(private_key)
except:
pass
except exception.Duplicate:
pass
return key_name
# def setup_secgroups(self, username):
# conn = self.euca.connection_for(username)
# try:
# secgroup = conn.create_security_group("vpn-secgroup", "vpn-secgroup")
# secgroup.authorize(ip_protocol = "udp", from_port = "1194", to_port = "1194", cidr_ip = "0.0.0.0/0")
# secgroup.authorize(ip_protocol = "tcp", from_port = "80", to_port = "80", cidr_ip = "0.0.0.0/0")
# secgroup.authorize(ip_protocol = "tcp", from_port = "22", to_port = "22", cidr_ip = "0.0.0.0/0")
# except:
# pass
|
apache-2.0
| 5,067,773,451,058,629,000
| 37.46
| 114
| 0.629485
| false
| 3.694524
| false
| false
| false
|
imec-myhdl/pycontrol-gui
|
BlockEditor/supsisim/pysim.py
|
1
|
1404
|
#!/usr/bin/python
# aim for python 2/3 compatibility
from __future__ import (division, print_function, absolute_import,
unicode_literals)
from Qt import QtGui, QtWidgets, QtCore # see https://github.com/mottosso/Qt.py
import sys
import os
#try:
# sip.setapi('QString', 1)
#except ValueError:
# sip.setapi('QString', 2)
import threading
#import autopep8
from supsisim.pyEdit import SupsiSimMainWindow
from supsisim.library import Library
class supsisimul(threading.Thread):
def __init__(self, filename = 'untitled', runflag = False):
threading.Thread.__init__(self)
if filename!='untitled':
self.fname = QtCore.QFileInfo(filename)
self.mypath = str(self.fname.absolutePath())
self.fname = str(self.fname.baseName())
else:
self.fname = 'untitled'
self.mypath = os.getcwd()
self.runflag = runflag
def run(self):
app = QtWidgets.QApplication(sys.argv)
library = Library()
library.setGeometry(20, 100, 400, 768)
library.show()
main = SupsiSimMainWindow(library, self.fname, self.mypath, self.runflag)
main.setGeometry(500,100,1024,768)
main.show()
ret = app.exec_()
app.deleteLater()
def supsim(fn = 'untitled'):
th = supsisimul(fn)
th.start()
|
lgpl-2.1
| 6,907,061,759,329,544,000
| 26.529412
| 81
| 0.608974
| false
| 3.646753
| false
| false
| false
|
ddaan/django-arctic
|
tests/test_layout_mixin.py
|
1
|
4936
|
import pytest
from collections import OrderedDict
from arctic.mixins import LayoutMixin
from articles.forms import ArticleForm
from tests.conftest import get_form
from tests.factories import ArticleFactory
@pytest.fixture
def layout():
class Layout(LayoutMixin):
layout = None
def __init__(self):
self.object = ArticleFactory()
self.form = ArticleForm(instance=self.object)
self.get_form = get_form(self.form)
return Layout()
pytestmark = pytest.mark.django_db
def test_layout_example_1(layout):
layout.layout = ['title|8']
layout = layout.get_layout()
assert layout[0]['fieldset']['title'] is None
assert layout[0]['fieldset']['description'] is None
assert layout[0]['fieldset']['collapsible'] is False
assert layout[0]['rows'][0]['name'] == 'title'
assert layout[0]['rows'][0]['column'] == '8'
def test_layout_example_2(layout):
layout.layout = [['title|3', 'title', 'title']]
layout = layout.get_layout()
assert layout[0]['fieldset']['title'] is None
assert layout[0]['fieldset']['description'] is None
assert layout[0]['fieldset']['collapsible'] is False
assert layout[0]['rows'][0][0]['name'] == 'title'
assert layout[0]['rows'][0][0]['column'] == '3'
assert layout[0]['rows'][0][1]['name'] == 'title'
assert layout[0]['rows'][0][1]['column'] == '4'
assert layout[0]['rows'][0][2]['name'] == 'title'
assert layout[0]['rows'][0][2]['column'] == '5'
def test_layout_example_3a(layout):
layout.layout = [['title|3', 'title', 'title', 'category', 'category']]
layout = layout.get_layout()
assert layout[0]['fieldset']['title'] is None
assert layout[0]['fieldset']['description'] is None
assert layout[0]['fieldset']['collapsible'] is False
assert layout[0]['rows'][0][0]['name'] == 'title'
assert layout[0]['rows'][0][0]['column'] == '3'
assert layout[0]['rows'][0][1]['name'] == 'title'
assert layout[0]['rows'][0][1]['column'] == '2'
assert layout[0]['rows'][0][2]['name'] == 'title'
assert layout[0]['rows'][0][2]['column'] == '2'
assert layout[0]['rows'][0][3]['name'] == 'category'
assert layout[0]['rows'][0][3]['column'] == '2'
assert layout[0]['rows'][0][4]['name'] == 'category'
assert layout[0]['rows'][0][4]['column'] == '3'
def test_layout_example_3b(layout):
layout.layout = ['title|3', 'title', 'title', ['category', 'category']]
layout = layout.get_layout()
assert layout[0]['fieldset']['title'] is None
assert layout[0]['fieldset']['description'] is None
assert layout[0]['fieldset']['collapsible'] is False
assert layout[0]['rows'][0]['name'] == 'title'
assert layout[0]['rows'][0]['column'] == '3'
assert layout[0]['rows'][1]['name'] == 'title'
assert layout[0]['rows'][1]['column'] is None
assert layout[0]['rows'][2]['name'] == 'title'
assert layout[0]['rows'][2]['column'] is None
assert layout[0]['rows'][3][0]['name'] == 'category'
assert layout[0]['rows'][3][0]['column'] == '6'
assert layout[0]['rows'][3][1]['name'] == 'category'
assert layout[0]['rows'][3][1]['column'] == '6'
def test_layout_example_4(layout):
layout.layout = OrderedDict([('-fieldset',
['title',
'title',
['category', 'updated_at|4']]),
('fieldset2|test description',
[['title|7', 'category']]),
('fieldset3',
['published'])])
layout = layout.get_layout()
assert layout[0]['fieldset']['title'] == 'fieldset'
assert layout[0]['fieldset']['description'] is None
assert layout[0]['fieldset']['collapsible'] is True
assert layout[0]['rows'][0]['name'] == 'title'
assert layout[0]['rows'][0]['column'] is None
assert layout[0]['rows'][1]['name'] == 'title'
assert layout[0]['rows'][1]['column'] is None
assert layout[0]['rows'][2][0]['name'] == 'category'
assert layout[0]['rows'][2][0]['column'] == '8'
assert layout[0]['rows'][2][1]['name'] == 'updated_at'
assert layout[0]['rows'][2][1]['column'] == '4'
assert layout[1]['fieldset']['title'] == 'fieldset2'
assert layout[1]['fieldset']['description'] == 'test description'
assert layout[1]['fieldset']['collapsible'] is False
assert layout[1]['rows'][0][0]['name'] == 'title'
assert layout[1]['rows'][0][0]['column'] == '7'
assert layout[1]['rows'][0][1]['name'] == 'category'
assert layout[1]['rows'][0][1]['column'] == '5'
assert layout[2]['fieldset']['title'] == 'fieldset3'
assert layout[2]['fieldset']['description'] is None
assert layout[2]['fieldset']['collapsible'] is False
assert layout[2]['rows'][0]['name'] == 'published'
assert layout[2]['rows'][0]['column'] is None
|
mit
| 3,694,713,306,793,887,000
| 38.174603
| 75
| 0.569692
| false
| 3.556196
| true
| false
| false
|
rdo-management/tuskar-ui
|
tuskar_ui/utils/utils.py
|
1
|
4279
|
# -*- coding: utf8 -*-
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import csv
from itertools import izip
import re
from django.utils.translation import ugettext_lazy as _
CAMEL_RE = re.compile(r'([A-Z][a-z]+|[A-Z]+(?=[A-Z\s]|$))')
def de_camel_case(text):
"""Convert CamelCase names to human-readable format."""
return ' '.join(w.strip() for w in CAMEL_RE.split(text) if w.strip())
def list_to_dict(object_list, key_attribute='id'):
"""Converts an object list to a dict
:param object_list: list of objects to be put into a dict
:type object_list: list
:param key_attribute: object attribute used as index by dict
:type key_attribute: str
:return: dict containing the objects in the list
:rtype: dict
"""
return dict((getattr(o, key_attribute), o) for o in object_list)
def length(iterator):
"""A length function for iterators
Returns the number of items in the specified iterator. Note that this
function consumes the iterator in the process.
"""
return sum(1 for _item in iterator)
def check_image_type(image, type):
"""Check if image 'type' property matches passed-in type.
If image has no 'type' property' return True, as we cannot
be sure what type of image it is.
"""
return (image.properties.get('type', type) == type)
def filter_items(items, **kwargs):
"""Filters the list of items and returns the filtered list.
Example usage:
>>> class Item(object):
... def __init__(self, index):
... self.index = index
... def __repr__(self):
... return '<Item index=%d>' % self.index
>>> items = [Item(i) for i in range(7)]
>>> list(filter_items(items, index=1))
[<Item index=1>]
>>> list(filter_items(items, index__in=(1, 2, 3)))
[<Item index=1>, <Item index=2>, <Item index=3>]
>>> list(filter_items(items, index__not_in=(1, 2, 3)))
[<Item index=0>, <Item index=4>, <Item index=5>, <Item index=6>]
"""
for item in items:
for name, value in kwargs.items():
if name.endswith('__in'):
if getattr(item, name[:-len('__in')]) not in value:
break
elif name.endswith('__not_in'):
if getattr(item, name[:-len('__not_in')]) in value:
break
else:
if getattr(item, name) != value:
break
else:
yield item
def safe_int_cast(value):
try:
return int(value)
except (TypeError, ValueError):
return 0
def parse_csv_file(csv_file):
"""Parses given CSV file.
If there is no error, it returns list of dicts. When something went wrong,
list is empty, but warning contains appropriate information about
possible problems.
"""
parsed_data = []
for row in csv.reader(csv_file):
try:
driver = row[0].strip()
except IndexError:
raise ValueError(_("Unable to parse the CSV file."))
if driver in ('pxe_ssh', 'pxe_ipmitool'):
node_keys = (
'mac_addresses', 'cpu_arch', 'cpus', 'memory_mb', 'local_gb')
if driver == 'pxe_ssh':
driver_keys = (
'driver', 'ssh_address', 'ssh_username',
'ssh_key_contents'
)
elif driver == 'pxe_ipmitool':
driver_keys = (
'driver', 'ipmi_address', 'ipmi_username',
'ipmi_password'
)
node = dict(izip(driver_keys+node_keys, row))
parsed_data.append(node)
else:
raise ValueError(_("Unknown driver: %s.") % driver)
return parsed_data
|
apache-2.0
| -1,079,681,464,923,149,700
| 29.564286
| 78
| 0.580743
| false
| 3.875906
| false
| false
| false
|
nonZero/OpenCommunity
|
src/issues/views.py
|
1
|
44889
|
from django.contrib.auth.views import redirect_to_login
from django.db.models.aggregates import Max
from django.http.response import HttpResponse, HttpResponseBadRequest, \
HttpResponseForbidden
from django.shortcuts import get_object_or_404, render, redirect
from django.template import RequestContext
from django.template.loader import render_to_string
from django.views.generic import ListView
from django.views.generic.base import View
from django.views.generic.detail import DetailView, SingleObjectMixin
from django.views.generic.edit import CreateView, UpdateView, DeleteView
from issues import models, forms
from issues.forms import CreateIssueForm, CreateProposalForm, EditProposalForm, \
UpdateIssueForm, EditProposalTaskForm, AddAttachmentForm, \
UpdateIssueAbstractForm, CreateProposalVoteArgumentForm
from issues.models import ProposalType, Issue, IssueStatus, ProposalVote, \
Proposal, ProposalVoteBoard, ProposalVoteValue, VoteResult, ProposalVoteArgument, ProposalVoteArgumentRanking
from meetings.models import Meeting
from oc_util.templatetags.opencommunity import minutes, board_voters_on_proposal
from ocd.base_views import AjaxFormView, json_response, CommitteeMixin
from ocd.validation import enhance_html
from ocd.base_managers import ConfidentialSearchQuerySet
from shultze_vote import send_issue_ranking
from acl.default_roles import DefaultGroups
from users.permissions import has_community_perm, has_committee_perm
from haystack.inputs import AutoQuery
import json
import mimetypes
from datetime import date
class IssueMixin(CommitteeMixin):
model = models.Issue
def get_queryset(self):
return self.model.objects.object_access_control(
user=self.request.user,
committee=self.committee).filter(committee=self.committee,
active=True)
class ProposalMixin(IssueMixin):
model = models.Proposal
def get_queryset(self):
return self.model.objects.object_access_control(
user=self.request.user,
committee=self.committee).filter(issue=self.issue,
active=True)
@property
def issue(self):
return get_object_or_404(models.Issue, committee=self.committee,
pk=self.kwargs['issue_id'])
def _can_complete_task(self):
o = self.get_object()
if self.request.user == o.assigned_to_user:
return True
return has_community_perm(self.request.user, self.committee.community,
'issues.edittask_proposal')
class IssueList(IssueMixin, ListView):
required_permission = 'viewopen_issue'
def get_queryset(self):
return super(IssueList, self).get_queryset().exclude(
status=IssueStatus.ARCHIVED).order_by('-created_at')
def get_context_data(self, **kwargs):
d = super(IssueList, self).get_context_data(**kwargs)
available_ids = set([x.id for x in self.get_queryset()])
if d['committee'].issue_ranking_enabled:
d['sorted_issues'] = super(IssueList, self).get_queryset().exclude(
status=IssueStatus.ARCHIVED).order_by('-order_by_votes')
if 'vote_ranking' in d['cperms'] and self.request.user.is_authenticated():
my_ranking = models.IssueRankingVote.objects.filter(
voted_by=self.request.user,
issue__committee_id=d['committee'].id) \
.order_by('rank')
d['my_vote'] = [i.issue for i in my_ranking if i.issue.active and \
i.issue.status != IssueStatus.ARCHIVED]
d['my_non_ranked'] = [i for i in self.get_queryset() \
if i not in d['my_vote']]
for obj in self.object_list:
obj.restricted_proposals = \
obj.proposals.object_access_control(
user=self.request.user, committee=self.committee)
for ai in obj.agenda_items.all():
ai.restricted_proposals = ai.proposals(
user=self.request.user, committee=self.committee)
ai.restricted_accepted_proposals = ai.accepted_proposals(
user=self.request.user, committee=self.committee)
return d
required_permission_for_post = 'vote_ranking'
def post(self, request, *args, **kwargs):
# TODO: check post permission for user and for each issue
send_issue_ranking(request)
return json_response({'res': 'ok', })
class IssueDetailView(IssueMixin, DetailView):
def get_required_permission(self):
o = self.get_object()
return 'viewclosed_issue' if o.is_published else 'viewopen_issue'
def get_context_data(self, **kwargs):
d = super(IssueDetailView, self).get_context_data(**kwargs)
m_id = self.request.GET.get('m_id', None)
d['form'] = forms.CreateIssueCommentForm()
d['proposal_form'] = forms.CreateProposalForm(committee=self.committee)
if m_id:
d['meeting'] = get_object_or_404(Meeting, id=m_id,
committee=self.committee)
a = d['meeting'].agenda.object_access_control(
user=self.request.user, committee=self.committee).all()
d['meeting_active_issues'] = [ai.issue for ai in a if
ai.issue.active]
else:
d['meeting'] = None
if self.request.GET.get('s', None) == '1':
d['all_issues'] = self.get_queryset().exclude(
status=IssueStatus.ARCHIVED).order_by('-created_at')
o = self.get_object()
if o.is_current and self.request.user in o.committee.upcoming_meeting_participants.all() and has_committee_perm(
self.request.user, self.committee, 'proposal_board_vote_self'):
d['can_board_vote_self'] = True
d['proposals'] = self.object.proposals.object_access_control(
user=self.request.user, committee=self.committee).open()
d['upcoming_issues'] = self.object.committee.upcoming_issues(
user=self.request.user, committee=self.committee)
d['agenda_items'] = self.object.agenda_items.all()
for ai in d['agenda_items']:
ai.accepted_proposals = ai.accepted_proposals(
user=self.request.user, committee=self.committee)
ai.rejected_proposals = ai.rejected_proposals(
user=self.request.user, committee=self.committee)
ai.proposals = ai.proposals(
user=self.request.user, committee=self.committee)
return d
required_permission_for_post = 'add_issuecomment'
def post(self, request, *args, **kwargs):
form = forms.CreateIssueCommentForm(request.POST)
if not form.is_valid():
return HttpResponseBadRequest()
i = self.get_object()
comment_id = request.POST.get('comment_id', None)
try:
c = i.comments.get(pk=int(comment_id))
c.content = enhance_html(form.cleaned_data['content'])
c.save()
return json_response({'comment_id': c.id})
except:
c = i.comments.create(content=enhance_html(form.cleaned_data['content']),
created_by=request.user)
return json_response({'comment_id': c.id})
# if comment_id == '':
# c = i.comments.create(content=enhance_html(form.cleaned_data['content']),
# created_by=request.user)
#
# self.object = i # this makes the next line work
# context = self.get_context_data(object=i, c=c)
# return render(request, 'issues/_comment.html', context)
# else:
# c = i.comments.get(pk=int(comment_id))
# c.content=enhance_html(form.cleaned_data['content'])
# return json_response({'comment_id': c.id})
class IssueCommentMixin(CommitteeMixin):
model = models.IssueComment
def get_required_permission(self):
o = self.get_object()
return 'editopen_issuecomment' if o.issue.is_upcoming else 'editclosed_issuecomment'
def get_queryset(self):
return models.IssueComment.objects.filter(issue__committee=self.committee)
class IssueCommentDeleteView(IssueCommentMixin, DeleteView):
def post(self, request, *args, **kwargs):
o = self.get_object()
o.active = 'undelete' in request.POST
o.save()
return HttpResponse(int(o.active))
class IssueCommentEditView(IssueCommentMixin, UpdateView):
form_class = forms.EditIssueCommentForm
def form_valid(self, form):
c = self.get_object()
c.update_content(form.instance.version, self.request.user,
form.cleaned_data['content'])
context = self.get_context_data(object=c.issue, c=c)
return render(self.request, 'issues/_comment.html', context)
def form_invalid(self, form):
return HttpResponse("")
def get_form_kwargs(self):
d = super(IssueCommentEditView, self).get_form_kwargs()
d['prefix'] = 'ic%d' % self.get_object().id
return d
class IssueCreateView(AjaxFormView, IssueMixin, CreateView):
form_class = CreateIssueForm
template_name = "issues/issue_create_form.html"
reload_on_success = True
def get_required_permission(self):
return 'editagenda_community' if self.upcoming else 'add_issue'
upcoming = False
def form_valid(self, form):
form.instance.committee = self.committee
form.instance.created_by = self.request.user
form.instance.status = IssueStatus.IN_UPCOMING_MEETING if \
self.upcoming else IssueStatus.OPEN
if self.upcoming:
max_upcoming = Issue.objects.filter(
committee=self.committee).aggregate(x=Max(
'order_in_upcoming_meeting'))['x']
form.instance.order_in_upcoming_meeting = max_upcoming + 1 \
if max_upcoming else 1
return super(IssueCreateView, self).form_valid(form)
def get_form_kwargs(self):
kwargs = super(IssueCreateView, self).get_form_kwargs()
kwargs.update({'committee': self.committee})
return kwargs
def get_success_url(self):
url = super(IssueCreateView, self).get_success_url()
if not self.upcoming:
url += '?s=1'
return url
class IssueEditView(AjaxFormView, IssueMixin, UpdateView):
required_permission = 'editopen_issue'
form_class = UpdateIssueForm
reload_on_success = True
def form_valid(self, form):
if self.reload_on_success:
return super(IssueEditView, self).form_valid(form)
else:
self.object = form.save()
return render(self.request, 'issues/_issue_title.html',
self.get_context_data())
def get_form_kwargs(self):
kwargs = super(IssueEditView, self).get_form_kwargs()
kwargs.update({'committee': self.committee})
return kwargs
class IssueEditAbstractView(AjaxFormView, IssueMixin, UpdateView):
required_permission = 'editopen_issue'
form_class = UpdateIssueAbstractForm
def form_valid(self, form):
self.object = form.save()
return render(self.request, 'issues/_issue-abstract.html',
self.get_context_data())
class IssueCompleteView(IssueMixin, SingleObjectMixin, View):
required_permission = 'add_meeting'
def post(self, request, *args, **kwargs):
o = self.get_object()
# TODO: verify that issue is in active meeting
if request.POST.get('complete') == '1':
o.completed = True
elif request.POST.get('undo_complete') == '1':
o.completed = False
if o.status == IssueStatus.ARCHIVED:
o.status = o.statuses.OPEN
elif request.POST.get('archive') == '1':
# TODO: check if issue can be closed
o.completed = True
o.status = IssueStatus.ARCHIVED
o.save()
return HttpResponse("-")
class IssueSetLengthView(IssueMixin, SingleObjectMixin, View):
required_permission = 'editagenda_community'
def post(self, request, *args, **kwargs):
o = self.get_object()
s = request.POST.get('length', '').strip()
if s:
try:
t = int(s)
if not 0 <= t <= 360:
raise ValueError('Illegal Value')
except ValueError:
return HttpResponseBadRequest("Bad Request")
else:
t = None
o.length_in_minutes = t
o.save()
return HttpResponse(minutes(t) or "--:--")
class IssueDeleteView(AjaxFormView, IssueMixin, DeleteView):
def get_required_permission(self):
o = self.get_object()
if o.is_published:
return 'editclosed_issue'
return 'add_issue' if o.created_by == self.request.user else 'editopen_issue'
def get_success_url(self):
return "" if self.issue.active else "-"
def delete(self, request, *args, **kwargs):
o = self.get_object()
o.active = False
o.save()
o.active_proposals().update(active=False)
return HttpResponse("-")
class AttachmentCreateView(AjaxFormView, IssueMixin, CreateView):
model = models.IssueAttachment
form_class = AddAttachmentForm
required_permission = 'editopen_issue'
reload_on_success = True
@property
def issue(self):
return get_object_or_404(models.Issue, committee=self.committee, pk=self.kwargs['pk'])
def form_valid(self, form):
form.instance.created_by = self.request.user
form.instance.issue = self.issue
return super(AttachmentCreateView, self).form_valid(form)
class AttachmentDeleteView(AjaxFormView, CommitteeMixin, DeleteView):
model = models.IssueAttachment
required_permission = 'editopen_issue'
@property
def issue(self):
return get_object_or_404(models.Issue, pk=self.kwargs['issue_id'])
def delete(self, request, *args, **kwargs):
o = self.get_object()
o.file.delete(save=False)
o.delete()
return HttpResponse("")
class AttachmentDownloadView(CommitteeMixin, SingleObjectMixin, View):
model = models.IssueAttachment
def get_required_permission(self):
o = self.get_object().issue
return 'viewclosed_issue' if o.is_published else 'viewopen_issue'
def get(self, request, *args, **kwargs):
o = self.get_object()
filename = o.file.name.split('/')[-1]
mime_type = mimetypes.guess_type(filename, True)[0] or "text/plain"
response = HttpResponse(o.file, content_type=mime_type)
response['Content-Disposition'] = 'attachment; filename=%s' % filename.encode('utf-8')
return response
class ProposalCreateView(AjaxFormView, ProposalMixin, CreateView):
reload_on_success = True
def get_required_permission(self):
return 'editclosedproposal' if self.issue.status == IssueStatus.ARCHIVED else 'add_proposal'
form_class = CreateProposalForm
def get_context_data(self, **kwargs):
context = super(ProposalCreateView, self).get_context_data(**kwargs)
context['issue'] = self.issue
return context
def form_valid(self, form):
form.instance.created_by = self.request.user
form.instance.issue = self.issue
if self.reload_on_success:
return super(ProposalCreateView, self).form_valid(form)
else:
self.object = form.save()
return render(self.request, 'issues/_proposal.html',
self.get_context_data(proposal=self.object))
def get_success_url(self):
return self.issue.get_absolute_url()
def get_form_kwargs(self):
d = super(ProposalCreateView, self).get_form_kwargs()
d['prefix'] = 'proposal'
d['committee'] = self.committee
d['initial'] = {'issue': self.issue}
return d
class ProposalDetailView(ProposalMixin, DetailView):
def get_required_permission(self):
p = self.get_object()
return 'viewclosed_proposal' if p.decided_at_meeting else 'viewopen_proposal'
def get_required_permission_for_post(self):
p = self.get_object()
return 'acceptclosed_proposal' if p.decided_at_meeting else 'acceptopen_proposal'
def board_votes_dict(self):
total_votes = 0
votes_dict = {'sums': {}, 'total': total_votes, 'per_user': {}}
pro_count = 0
con_count = 0
neut_count = 0
# Board vote permission
board_attending = self.get_object().issue.committee.meeting_participants()
for u in board_attending:
# check u has perm for board vote
vote = ProposalVoteBoard.objects.filter(proposal=self.get_object(), user=u)
if vote.exists():
votes_dict['per_user'][u] = vote[0]
if vote[0].value == 1:
pro_count += 1
total_votes += 1
elif vote[0].value == -1:
con_count += 1
total_votes += 1
elif vote[0].value == 0:
neut_count += 1
else:
votes_dict['per_user'][u] = None
neut_count += 1
votes_dict['sums']['pro_count'] = pro_count
votes_dict['sums']['con_count'] = con_count
votes_dict['sums']['neut_count'] = neut_count
votes_dict['total'] = total_votes
return votes_dict
def _init_board_votes(self, board_attending):
p = self.get_object()
for b in board_attending:
ProposalVoteBoard.objects.create(proposal=p, user=b,
voted_by_chairman=True)
def get_context_data(self, **kwargs):
"""add meeting for the latest straw voting result
add 'previous_res' var if found previous registered results for this meeting
"""
context = super(ProposalDetailView, self).get_context_data(**kwargs)
m_id = self.request.GET.get('m_id', None)
o = self.get_object()
if m_id:
context['meeting_context'] = get_object_or_404(Meeting, id=m_id,
committee=self.committee)
participants = context['meeting_context'].participants.all()
else:
context['meeting_context'] = None
participants = o.issue.committee.upcoming_meeting_participants.all()
try:
group = self.request.user.memberships.get(committee=self.issue.committee).default_group_name
except:
group = ""
board_votes = ProposalVoteBoard.objects.filter(proposal=o)
board_attending = board_voters_on_proposal(o)
is_current = o.issue.is_current
context['res'] = o.get_straw_results()
results = VoteResult.objects.filter(proposal=o) \
.order_by('-meeting__held_at')
if o.issue.is_upcoming and \
self.committee.upcoming_meeting_is_published and \
self.committee.straw_vote_ended:
context['meeting'] = self.committee.draft_meeting()
else:
if results.count():
context['meeting'] = results[0].meeting
else:
context['meeting'] = None
if not board_votes.exists():
self._init_board_votes(board_attending)
show_to_member = group == DefaultGroups.MEMBER and o.decided_at_meeting
show_to_board = (group == DefaultGroups.BOARD or \
group == DefaultGroups.SECRETARY) and \
(is_current or o.decided_at_meeting)
show_to_chairman = group == DefaultGroups.CHAIRMAN and o.decided
show_board_vote_result = o.register_board_votes and \
board_votes.exclude(
value=ProposalVoteValue.NEUTRAL).count() and \
(show_to_member or show_to_board or show_to_chairman)
context['issue_frame'] = self.request.GET.get('s', None)
context['board_attending'] = board_attending
context['user_vote'] = o.board_vote_by_member(self.request.user.id)
context['show_board_vote_result'] = show_board_vote_result
context['chairman_can_vote'] = is_current and not o.decided
context['board_votes'] = self.board_votes_dict()
context['can_board_vote_self'] = is_current and not o.decided and has_committee_perm(self.request.user,
self.committee,
'proposal_board_vote_self')\
and self.request.user in board_attending
rel_proposals = self.object.issue.proposals
context['proposals'] = rel_proposals.object_access_control(
user=self.request.user, committee=self.committee)
return context
def post(self, request, *args, **kwargs):
""" Used to change a proposal status (accept/reject)
or a proposal's property completed/not completed
"""
p = self.get_object()
v = request.POST.get('accepted', None)
if v:
v = int(v)
if v not in [
p.statuses.ACCEPTED,
p.statuses.REJECTED,
p.statuses.IN_DISCUSSION
]:
return HttpResponseBadRequest("Bad value for accepted POST parameter")
p.status = v
p.save()
return redirect(p)
class ProposalEditView(AjaxFormView, ProposalMixin, UpdateView):
form_class = EditProposalForm
reload_on_success = True
def get_required_permission(self):
o = self.get_object()
return 'editclosed_proposal' if o.decided_at_meeting else 'edittask_proposal'
def get_form_kwargs(self):
d = super(ProposalEditView, self).get_form_kwargs()
d['prefix'] = 'proposal'
d['committee'] = self.committee
return d
class ProposalEditTaskView(ProposalMixin, UpdateView):
form_class = EditProposalTaskForm
def get_queryset(self):
return super(ProposalEditTaskView, self).get_queryset().filter(type=ProposalType.TASK)
def get_required_permission(self):
o = self.get_object()
return 'editclosed_proposal' if o.decided_at_meeting else 'editopen_proposal'
class ProposalCompletedTaskView(ProposalMixin, UpdateView):
""" update a task as completed / un-completed
"""
def post(self, request, *args, **kwargs):
if not self._can_complete_task():
return HttpResponseForbidden("403 Unauthorized")
p = self.get_object()
completed = request.POST.get('completed', None)
if completed:
p.task_completed = completed == '1'
p.save()
return redirect(p)
class ProposalDeleteView(AjaxFormView, ProposalMixin, DeleteView):
def get_required_permission(self):
o = self.get_object()
if o.decided_at_meeting:
return 'editclosed_issue'
return 'add_proposal' if o.created_by == self.request.user else 'editopen_proposal'
def get_success_url(self):
return "" if self.issue.active else "-"
def delete(self, request, *args, **kwargs):
o = self.get_object()
o.active = False
o.save()
return HttpResponse("-")
class VoteResultsView(CommitteeMixin, DetailView):
model = models.Proposal
def get(self, request, *args, **kwargs):
meeting = None
meeting_id = request.GET.get('meeting_id', None)
p = self.get_object()
if meeting_id:
meeting = get_object_or_404(Meeting, id=int(meeting_id))
res = p.get_straw_results(meeting.id)
else:
meeting = self.committee.draft_meeting()
res = p.get_straw_results()
panel = render_to_string('issues/_proposal_vote_results.html',
RequestContext(request, {
'meeting': meeting,
'res': res,
'proposal': p,
}))
return HttpResponse(panel)
class ProposalVoteMixin(CommitteeMixin):
VOTE_OK = 0
VOTE_VER_ERR = 1
VOTE_OVERRIDE_ERR = 2
def _do_vote(self, vote_class, proposal, user_id, value, is_board, voter_group):
if is_board:
# verify
if not voter_group or voter_group == DefaultGroups.MEMBER \
or proposal.decided:
return (None, self.VOTE_VER_ERR)
by_chairman = voter_group == DefaultGroups.CHAIRMAN
vote, created = vote_class.objects.get_or_create(proposal_id=proposal.id,
user_id=user_id)
if not created and by_chairman and not vote.voted_by_chairman:
# don't allow chairman vote override a board member existing vote!
return (vote, self.VOTE_OVERRIDE_ERR)
vote.value = value
if is_board:
vote.voted_by_chairman = by_chairman
vote.save()
return (vote, self.VOTE_OK)
def _vote_values_map(self, key):
vote_map = {
'pro': 1,
'con': -1,
'neut': 0,
'reset': -2,
}
if type(key) != int:
try:
return vote_map[key]
except KeyError:
return None
else:
for k, val in vote_map.items():
if key == val:
return k
return None
class ProposalVoteView(ProposalVoteMixin, DetailView):
required_permission_for_post = 'vote'
model = models.Proposal
def post(self, request, *args, **kwargs):
is_board = request.POST.get('board', False)
user_id = request.POST.get('user', request.user.id)
voter_id = request.user.id
voter_group = 'board' if has_committee_perm(request.user, self.committee, 'proposal_board_vote') else ''
# voter_group = request.user.get_default_group(self.committee.community) \
# if request.user.is_authenticated() \
# else ''
val = request.POST['val']
if is_board:
# vote for board member by chairman or board member
vote_class = ProposalVoteBoard
else:
# straw vote by member
vote_class = ProposalVote
proposal = self.get_object()
pid = proposal.id
vote_panel_tpl = 'issues/_vote_panel.html' if val == 'reset' \
else 'issues/_vote_reset_panel.html'
res_panel_tpl = 'issues/_board_vote_res.html' if is_board \
else 'issues/_vote_reset_panel.html'
vote_response = {
'result': 'ok',
'html': render_to_string(res_panel_tpl,
{
'proposal': proposal,
'committee': self.committee,
'vote_status': val,
}),
}
value = ''
if val == 'reset':
vote = get_object_or_404(vote_class,
proposal_id=pid, user_id=user_id)
vote.delete()
related_arguments = ProposalVoteArgumentRanking.objects.filter(user=request.user,
argument__proposal_vote__proposal=proposal)
if related_arguments.count():
related_arguments.delete()
vote_response['html'] = render_to_string(vote_panel_tpl,
{
'proposal': proposal,
'committee': self.committee
})
return json_response(vote_response)
else:
value = self._vote_values_map(val)
if value == None:
return HttpResponseBadRequest('vote value not valid')
vote, valid = self._do_vote(vote_class, proposal, user_id, value,
is_board, voter_group)
if valid == ProposalVoteMixin.VOTE_OK:
vote_response['html'] = render_to_string(res_panel_tpl,
{
'proposal': proposal,
'committee': self.committee,
'vote_status': val,
'user': self.request.user
})
if is_board and voter_group == DefaultGroups.CHAIRMAN:
vote_response['sum'] = render_to_string('issues/_member_vote_sum.html',
{
'proposal': proposal,
'committee': self.committee,
'board_attending': board_voters_on_proposal(proposal),
})
else:
vote_response['result'] = 'err'
if valid == ProposalVoteMixin.VOTE_OVERRIDE_ERR:
vote_response['override_fail'] = [{'uid': user_id,
'val': self._vote_values_map(vote.value),
}]
return json_response(vote_response)
def get(self, request, *args, **kwargs):
voter_id = request.user.id
if not request.user.is_authenticated():
return redirect_to_login(request.build_absolute_uri())
is_board = request.GET.get('board', False)
voter_group = DefaultGroups.MEMBER
val = request.GET['val']
vote_class = ProposalVote
proposal = self.get_object()
value = self._vote_values_map(val)
if value == None:
return redirect(proposal)
vote, valid = self._do_vote(vote_class, proposal, voter_id, value,
is_board, voter_group)
return redirect(proposal)
class MultiProposalVoteView(ProposalVoteMixin, DetailView):
required_permission_for_post = 'chairman_vote'
model = models.Proposal
def post(self, request, *args, **kwargs):
voted_ids = json.loads(request.POST['users'])
proposal = self.get_object()
pid = proposal.id
voter_group = request.user.get_default_group(self.committee.community) \
if request.user.is_authenticated() \
else ''
val = request.POST['val']
value = self._vote_values_map(val)
if value == None:
return HttpResponseBadRequest('vote value not valid')
vote_failed = []
for user_id in voted_ids:
vote, valid = self._do_vote(ProposalVoteBoard, proposal,
user_id, value, True, voter_group)
if valid == ProposalVoteMixin.VOTE_OVERRIDE_ERR:
vote_failed.append({'uid': user_id, 'val': self._vote_values_map(vote.value), })
return json_response({
'result': 'ok',
'html': render_to_string('issues/_vote_reset_panel.html',
{
'proposal': proposal,
'committee': self.committee,
}),
'override_fail': vote_failed,
'sum': render_to_string('issues/_member_vote_sum.html',
{
'proposal': proposal,
'committee': self.committee,
'board_attending': board_voters_on_proposal(proposal),
})
})
class RankingVoteMixin(ProposalVoteMixin):
VOTE_OK = 0
VOTE_VER_ERR = 1
def _do_vote(self, vote_class, argument, user_id, value):
try:
vote = vote_class.objects.get(argument_id=argument.id,
user_id=user_id)
if argument.proposal_vote.value != ProposalVote.objects.get(user__id=user_id,
proposal=argument.proposal_vote.proposal).value:
return vote, self.VOTE_VER_ERR
if vote.value == value:
vote.delete()
else:
vote.value = value
vote.save()
except vote_class.DoesNotExist:
vote = vote_class.objects.create(argument_id=argument.id,
user_id=user_id,
value=value)
except vote_class.MultipleObjectsReturned:
# Should not happen
raise
except ProposalVote.DoesNotExist:
# Should not happen
raise
return vote, self.VOTE_OK
def _vote_values_map(self, key):
vote_map = {
'up': 1,
'down': -1,
}
if type(key) != int:
try:
return vote_map[key]
except KeyError:
return None
else:
for k, val in vote_map.items():
if key == val:
return k
return None
class ArgumentRankingVoteView(RankingVoteMixin, DetailView):
required_permission_for_post = 'vote'
model = models.ProposalVoteArgument
def post(self, request, *args, **kwargs):
user_id = request.POST.get('user', request.user.id)
val = request.POST['val']
vote_class = ProposalVoteArgumentRanking
argument = self.get_object()
vote_response = {
'result': 'ok',
}
value = self._vote_values_map(val)
if not value:
return HttpResponseBadRequest('vote value not valid')
vote, valid = self._do_vote(vote_class, argument, user_id, value)
if valid != RankingVoteMixin.VOTE_OK:
vote_response['result'] = 'err'
return HttpResponse(argument.argument_score)
def up_down_vote(request, committee_id, arg_id):
if request.method != "POST":
raise Exception("Must be POST")
argument = models.ProposalVoteArgument.objects.get(pk=arg_id)
val = request.POST['val']
value = 1 if val == 'up' else -1
try:
vote = models.ProposalVoteArgumentRanking.objects.get(argument=argument, user=request.user)
if vote.value == value:
vote.delete()
else:
vote.value = value
vote.save()
except ProposalVoteArgumentRanking.DoesNotExist:
obj = models.ProposalVoteArgumentRanking(argument=argument, user=request.user, value=value)
obj.save()
up_votes = ProposalVoteArgumentRanking.objects.filter(argument=argument, value=1).count()
down_votes = ProposalVoteArgumentRanking.objects.filter(argument=argument, value=-1).count()
total_votes = up_votes - down_votes
return HttpResponse(total_votes)
class ProposalVoteArgumentCreateView(CreateView):
model = models.ProposalVoteArgument
form_class = CreateProposalVoteArgumentForm
fields = ['argument', 'proposal_vote', 'created_by']
template_name = 'issues/proposal_vote_argument_form.html'
def get_success_url(self):
return ""
# def form_valid(self, form):
# form.instance.proposal_vote = ProposalVote.objects.get(pk=self.kwargs['vote_id'])
# form.instance.created_by = self.request.user
# return super(ProposalVoteArgumentCreateView, self).form_valid(form)
#
# def form_invalid(self, form):
# return HttpResponse("000")
def post(self, request, *args, **kwargs):
form = forms.CreateProposalVoteArgumentForm(request.POST)
if not form.is_valid():
return HttpResponseBadRequest()
proposal_vote = ProposalVote.objects.get(pk=self.kwargs['vote_id'])
a = ProposalVoteArgument.objects.create(argument=form.cleaned_data['argument'],
created_by=request.user, proposal_vote=proposal_vote)
self.object = a
context = self.get_context_data(arg=a, proposal=proposal_vote.proposal)
if proposal_vote.value == 1:
return render(request, 'issues/_pro_argument.html', context)
else:
return render(request, 'issues/_con_argument.html', context)
class ProposalMoreArgumentsView(DetailView):
model = models.Proposal
template_name = 'issues/_more_arguments_box.html'
def get_context_data(self, **kwargs):
d = super(ProposalMoreArgumentsView, self).get_context_data(**kwargs)
d['proposal'] = self.get_object()
d['user'] = self.request.user
return d
class ProposalArgumentsView(DetailView):
model = models.Proposal
template_name = 'issues/_vote_arguments.html'
context_object_name = 'proposal'
def get_context_data(self, **kwargs):
context = super(ProposalArgumentsView, self).get_context_data(**kwargs)
context['proposal'] = self.get_object()
context['user'] = self.request.user
return context
class ProposalVoteArgumentUpdateView(UpdateView):
model = models.ProposalVoteArgument
fields = ['argument', ]
def post(self, request, *args, **kwargs):
a = self.get_object()
if request.POST.get('argument', None):
a.argument = request.POST.get('argument')
a.save()
return HttpResponse(a.argument)
else:
return HttpResponse("")
class ProposalVoteArgumentDeleteView(DeleteView):
model = models.ProposalVoteArgument
success_url = ""
def post(self, request, *args, **kwargs):
o = self.get_object()
arg_id = o.id
o.delete()
return HttpResponse(arg_id)
def get_argument_value(request, committee_id, arg_id):
""" Return the value of the argument for editing """
arg_value = models.ProposalVoteArgument.objects.get(pk=arg_id)
return HttpResponse(arg_value.argument)
class ChangeBoardVoteStatusView(ProposalMixin, UpdateView):
required_permission_for_post = 'chairman_vote'
model = models.Proposal
def post(self, request, *args, **kwargs):
p = self.get_object()
if request.POST.get('val', None):
p.register_board_votes = request.POST.get('val') == '1'
p.save()
return json_response({'result': 'ok'})
else:
return json_response({'result': 'err'})
class AssignmentsView(ProposalMixin, ListView):
required_permission = 'viewopen_issue'
template_name = 'issues/assignment_list.html'
paginate_by = 75
def __init__(self, **kwargs):
super(AssignmentsView, self).__init__(**kwargs)
self.status = ''
def _get_order(self):
order_by = self.request.GET.get('ord', 'date')
if order_by == 'date':
order_by = '-due_by'
return order_by
def _add_status_qs(self, sqs):
self.status = self.request.GET.get('status', '')
if self.status:
if self.status == 'completed':
sqs = sqs.filter(task_completed=True)
else:
sqs = sqs.filter(task_completed=False)
if self.status == 'opened':
sqs = sqs.exclude(due_by__lt=date.today())
elif self.status == 'late':
sqs = sqs.filter(due_by__lt=date.today())
return sqs
def get_queryset(self):
term = self.request.GET.get('q', '').strip()
sqs = ConfidentialSearchQuerySet().models(Proposal).object_access_control(
user=self.request.user, committee=self.committee).filter(
active=True, committee=self.committee.id,
status=Proposal.statuses.ACCEPTED,
type=ProposalType.TASK).order_by(self._get_order())
sqs = self._add_status_qs(sqs)
if term:
sqs = sqs.filter(content=AutoQuery(term)) \
.filter_or(assignee__contains=term)
return sqs.load_all()
def get_context_data(self, **kwargs):
d = super(AssignmentsView, self).get_context_data(**kwargs)
search_query = self.request.GET.get('q', '').strip()
d['late'] = [p.id for p in list(self.get_queryset()) \
if not p.object.task_completed and p.due_by \
and p.due_by.date() < date.today()]
d['query'] = search_query
d['ord'] = self._get_order()
d['status'] = self.status
d['filter_as_link'] = d['is_paginated'] or d['status']
d['extra_arg'] = '&ord=' + d['ord'] + '&q=' + d['query'] + '&status=' + self.status
return d
class RulesMixin(CommitteeMixin):
def _get_rule_queryset(self):
qs = Proposal.objects.object_access_control(user=self.request.user,
committee=self.committee).filter(
active=True, issue__committee=self.committee,
status=Proposal.statuses.ACCEPTED,
type=ProposalType.RULE)
return qs
class ProceduresView(RulesMixin, ProposalMixin, ListView):
required_permission = 'viewopen_issue'
template_name = 'issues/procedure_list.html'
context_object_name = 'procedure_list'
paginate_by = 75
def __init__(self, **kwargs):
self.order_by = 'date'
super(ProceduresView, self).__init__(**kwargs)
def get_queryset(self):
term = self.request.GET.get('q', '').strip()
if not term:
# try search by tag
term = self.request.GET.get('t', '').strip()
self.order_by = self.request.GET.get('ord', 'date')
ord_term = '-decided_at' if self.order_by == 'date' else 'title'
sqs = ConfidentialSearchQuerySet().object_access_control(
user=self.request.user, committee=self.committee).filter(
active=True, committee=self.committee.id,
status=Proposal.statuses.ACCEPTED,
type=ProposalType.RULE).order_by(ord_term)
if term:
sqs = sqs.filter(content=AutoQuery(term))
return sqs.load_all()
def get_context_data(self, **kwargs):
def _sort_by_popularity(a, b):
return cmp(a[1], b[1])
d = super(ProceduresView, self).get_context_data(**kwargs)
alltags = {}
for p in self._get_rule_queryset():
for t in p.tags.names():
n = alltags.setdefault(t, 0)
alltags[t] = n + 1
sorted_tags = sorted(alltags.items(), _sort_by_popularity, reverse=True)
search_query = self.request.GET.get('q', '').strip()
tag_query = self.request.GET.get('t', '').strip()
d['sorted_tags'] = sorted_tags
d['query'] = search_query or tag_query
d['extra_arg'] = '&ord=' + self.order_by + '&q=' + d['query']
d['ord'] = self.order_by
d['active_tag'] = tag_query
d['tags_as_links'] = (not search_query and d['is_paginated']) or len(d['object_list']) == 0
return d
class AutoCompleteTagView(CommitteeMixin, View):
required_permission = 'editopen_issue'
def get(self, request, *args, **kwargs):
tag = request.GET.get('tag', '')
tag = tag.split(',')[-1].strip()
print 'T: ', tag
if not tag:
return HttpResponse(json.dumps([]))
json_tags = []
tags = set()
proposals = Proposal.objects.filter(
active=True, issue__committee=self.committee,
type=ProposalType.RULE)
for p in proposals:
tags.update(t for t in p.tags.names() if t.startswith(tag))
for t in tags:
json_tags.append({'tokens': [t, ], 'value': t})
# context = self.get_context_data(object_list=proposals)
return HttpResponse(json.dumps(json_tags), {'content_type': 'application/json'})
|
bsd-3-clause
| 5,895,209,078,495,821,000
| 37.366667
| 121
| 0.57239
| false
| 3.975996
| false
| false
| false
|
ctogle/dilapidator
|
test/geometry/vec3_tests.py
|
1
|
16823
|
import dilap.geometry.tools as dpr
from dilap.geometry.vec3 import vec3
from dilap.geometry.quat import quat
import dilap.core.plotting as dtl
import matplotlib.pyplot as plt
import unittest,numpy,math,random
#python3 -m unittest discover -v ./ "*tests.py"
class test_vec3(unittest.TestCase):
# given a vec3, an op, and a res, verify that the op
# does not return an independent object, and that the result is correct
# NOTE: this is for methods which return vec3 objects
def same(self,op,one,res,*args,**kwargs):
self.assertTrue(one is one)
opres = one.__getattribute__(op)(*args,**kwargs)
self.assertTrue(opres is one)
self.assertEqual(one,res)
# given a vec3, an op, and a res, verify that the op
# does return an independent object, and that the result is correct
# NOTE: this is for methods which return vec3 objects
def diff(self,op,one,res,*args,**kwargs):
self.assertTrue(one is one)
opres = one.__getattribute__(op)(*args,**kwargs)
self.assertFalse(opres is one)
self.assertEqual(opres,res)
# given a vec3, an op, and a res,
# verify that the op does return the correct result
# verify the op does not modify the input vector
def comp(self,op,one,res,*args,**kwargs):
cp = one.cp()
opres = one.__getattribute__(op)(*args,**kwargs)
self.assertTrue(dpr.isnear(opres,res))
self.assertEqual(one,cp)
def setUp(self):
self.origin = vec3(0,0,0)
self.one = vec3(1,1,1)
self.x = vec3(1,0,0)
self.y = vec3(0,1,0)
self.z = vec3(0,0,1)
self.basis = [self.x,self.y,self.z]
rd = random.random
self.r1 = vec3(rd()*10,rd()*10,rd()*10)
self.r2 = vec3(rd()*10,rd()*10,rd()*10)
self.r3 = vec3(rd()*10,rd()*10,rd()*10)
self.r4 = vec3(rd()*10,rd()*10,rd()*10)
self.rds = [self.r1,self.r2,self.r3,self.r4]
self.each = [self.origin,self.one]+self.basis+self.rds
def test_cp(self):
for e in self.each:self.diff('cp',e,e)
def test_cpxy(self):
for e in self.each:self.diff('cpxy',e,vec3(e.x,e.y,0))
#def test_cpr(self):
#def test_cpf(self):
def test_d(self):
for e in self.each:self.comp('d',e,e.mag(),self.origin)
for e in self.each:self.comp('d',e,0,e)
self.comp('d',self.x,math.sqrt(2),self.y)
self.comp('d',self.y,math.sqrt(2),self.z)
self.comp('d',self.z,math.sqrt(2),self.x)
def test_dxy(self):
v1,v2,v3,v4 = vec3(1,1,0),vec3(1,1,2),vec3(1,2,1),vec3(1,2,4)
self.assertTrue(dpr.isnear(v1.dxy(v1),0))
self.assertTrue(dpr.isnear(v1.dxy(v2),0))
self.assertTrue(dpr.isnear(v1.dxy(v3),1))
self.assertTrue(dpr.isnear(v1.dxy(v4),1))
self.assertTrue(dpr.isnear(v2.dxy(v3),1))
self.assertTrue(dpr.isnear(v2.dxy(v4),1))
self.assertTrue(dpr.isnear(v3.dxy(v4),0))
def test_ang(self):
v1,v2,v3,v4 = vec3(1,1,0),vec3(-1,1,0),vec3(-1,0,0),vec3(1,1,1)
self.assertTrue(dpr.isnear(v1.ang(v1),0))
self.assertTrue(dpr.isnear(v1.ang(v2),dpr.PI2))
self.assertTrue(dpr.isnear(v2.ang(v1),dpr.PI2))
self.assertTrue(dpr.isnear(v1.ang(v3),3*dpr.PI4))
self.assertTrue(dpr.isnear(v3.ang(v1),3*dpr.PI4))
self.assertTrue(dpr.isnear(v1.ang(v4),numpy.arctan(1.0/math.sqrt(2))))
self.assertTrue(dpr.isnear(v4.ang(v1),numpy.arctan(1.0/math.sqrt(2))))
v1.ang(vec3(0,0,0))
def test_sang(self):
p1,p2,p3,p4 = vec3(1,1,0),vec3(0,1,0),vec3(0,-1,0),vec3(0,0,1)
pn = vec3(0,0,1)
self.assertEqual(dpr.isnear(p1.sang(p2,pn), dpr.PI4),1)
self.assertEqual(dpr.isnear(p2.sang(p1,pn), dpr.PI4),0)
self.assertEqual(dpr.isnear(p2.sang(p1,pn),-dpr.PI4),1)
self.assertEqual(dpr.isnear(p2.sang(p3,pn), dpr.PI ),1)
self.assertEqual(dpr.isnear(p3.sang(p1,pn),dpr.threePI4),1)
def test_angxy(self):
v1,v2,v3,v4 = vec3(1,1,2),vec3(-1,1,-1),vec3(-1,0,0),vec3(1,1,1)
self.assertTrue(dpr.isnear(v1.angxy(v1),0))
self.assertTrue(dpr.isnear(v1.angxy(v2),dpr.PI2))
self.assertTrue(dpr.isnear(v2.angxy(v1),dpr.PI2))
self.assertTrue(dpr.isnear(v1.angxy(v3),3*dpr.PI4))
self.assertTrue(dpr.isnear(v3.angxy(v1),3*dpr.PI4))
self.assertTrue(dpr.isnear(v1.angxy(v4),0))
self.assertTrue(dpr.isnear(v4.angxy(v1),0))
'''#
def test_sang_xy(self):
p1,p2 = vec3(1,1,1),vec3(0,1,0)
meth,nr = gtl.sang_xy,gtl.isnear
self.assertEqual(nr(meth(p1,p2), gtl.PI4),1)
self.assertEqual(nr(meth(p2,p1), gtl.PI4),0)
self.assertEqual(nr(meth(p2,p1),-gtl.PI4),1)
def test_xang_xy(self):
p1,p2,p3 = vec3(1, 1,1),vec3(0, 1,0),vec3(0,-1,0)
meth,nr = gtl.xang_xy,gtl.isnear
self.assertEqual(nr(meth(p1),gtl.PI4),1)
self.assertEqual(nr(meth(p2),gtl.PI4),0)
self.assertEqual(nr(meth(p2),gtl.PI2),1)
self.assertEqual(nr(meth(p3),gtl.threePI2),1)
'''#
def test_dot(self):
v1,v2,v3,v4 = vec3(1,1,0),vec3(-1,1,0),vec3(-1,0,0),vec3(1,1,1)
self.assertEqual(dpr.isnear(v1.dot(v1), 2),1)
self.assertEqual(dpr.isnear(v1.dot(v2), 0),1)
self.assertEqual(dpr.isnear(v1.dot(v3),-1),1)
self.assertEqual(dpr.isnear(v1.dot(v4), 2),1)
self.assertEqual(dpr.isnear(v2.dot(v2), 2),1)
self.assertEqual(dpr.isnear(v2.dot(v3), 1),1)
self.assertEqual(dpr.isnear(v2.dot(v4), 0),1)
self.assertEqual(dpr.isnear(v3.dot(v3), 1),1)
self.assertEqual(dpr.isnear(v3.dot(v4),-1),1)
self.assertEqual(dpr.isnear(v4.dot(v4), 3),1)
def test_crs(self):
v1,v2,v3,v4 = vec3(1,1,0),vec3(-1,1,0),vec3(-1,0,0),vec3(1,1,1)
self.assertEqual(v1.crs(v1),vec3(0,0,0))
self.assertEqual(v2.crs(v2),vec3(0,0,0))
self.assertEqual(v3.crs(v3),vec3(0,0,0))
self.assertEqual(v4.crs(v4),vec3(0,0,0))
self.assertEqual(v1.crs(v2),vec3(0,0,2))
self.assertEqual(v1.crs(v3),vec3(0,0,1))
self.assertEqual(v1.crs(v4),vec3(1,-1,0))
self.assertEqual(v2.crs(v3),vec3(0,0,1))
self.assertEqual(v2.crs(v4),vec3(1,1,-2))
self.assertEqual(v3.crs(v4),vec3(0,1,-1))
self.assertEqual(v1.crs(v2),v2.crs(v1).flp())
self.assertEqual(v1.crs(v3),v3.crs(v1).flp())
self.assertEqual(v1.crs(v4),v4.crs(v1).flp())
def test_prj(self):
p1,pn1 = vec3(-1,1,0),vec3(-1,0,0)
p2,pn2 = vec3(3,-5,2),vec3(0,1,0)
v1,v2,v3 = vec3(1,1,0),vec3(2,-10,5),vec3(0,1,-1)
self.assertEqual(v1.cp().prj(p1,pn1),vec3(-1,1,0))
self.assertEqual(v2.cp().prj(p1,pn1),vec3(-1,-10,5))
self.assertEqual(v3.cp().prj(p1,pn1),vec3(-1,1,-1))
self.assertEqual(v1.cp().prj(p2,pn2),vec3(1,-5,0))
self.assertEqual(v2.cp().prj(p2,pn2),vec3(2,-5,5))
self.assertEqual(v3.cp().prj(p2,pn2),vec3(0,-5,-1))
self.assertTrue(v1.prj(p1,pn1) is v1)
#def test_prjps(self):
#def test_baryxy(self):
def test_inneighborhood(self):
v1,v2,v3,v4 = vec3(1,1,0),vec3(1,2,0),vec3(1,2,1),vec3(1,1,1)
self.assertEqual(v1.inneighborhood(v2,1.00),0)
self.assertEqual(v1.inneighborhood(v2,1.01),1)
self.assertEqual(v1.inneighborhood(v3,1.00),0)
self.assertEqual(v1.inneighborhood(v3,1.01),0)
self.assertEqual(v1.inneighborhood(v4,1.00),0)
self.assertEqual(v1.inneighborhood(v4,1.01),1)
def test_isnear(self):
v1,v2,v3,v4 = vec3(1,1,0),vec3(1,1,0.1),vec3(1,1,1),vec3(1.000001,1,1)
self.assertEqual(v1.isnear(v1),1)
self.assertEqual(v3.isnear(v3),1)
self.assertEqual(v1.isnear(v2),0)
self.assertEqual(v2.isnear(v1),0)
self.assertEqual(v1.isnear(v3),0)
self.assertEqual(v2.isnear(v3),0)
self.assertEqual(v2.isnear(v4),0)
self.assertEqual(v3.isnear(v4),1)
def test_isnearxy(self):
v1,v2,v3,v4 = vec3(1,1,0),vec3(1.1,1,0.1),vec3(1,1,1),vec3(1.000001,1,1)
self.assertEqual(v1.isnearxy(v1),1)
self.assertEqual(v3.isnearxy(v3),1)
self.assertEqual(v1.isnearxy(v2),0)
self.assertEqual(v2.isnearxy(v1),0)
self.assertEqual(v1.isnearxy(v3),1)
self.assertEqual(v2.isnearxy(v3),0)
self.assertEqual(v2.isnearxy(v4),0)
self.assertEqual(v3.isnearxy(v4),1)
def test_inbxy(self):
py = vec3(1,1,0).sq(2,2)
self.assertFalse(vec3(-1,-1,0).inbxy(py))
self.assertFalse(vec3(0,0,0).inbxy(py))
self.assertFalse(vec3(1,0,0).inbxy(py))
self.assertTrue(vec3(1,1,0).inbxy(py))
def test_intrixy(self):
a = vec3(-93.6169662475586, 46.23309326171875, 0.0)
b = vec3(28.083663940429688, 48.28422546386719, 0.0)
c = vec3(25.696874618530273, 48.28422546386719, 0.0)
p = vec3(-93.34214782714844, 43.73178482055664, 0.0)
i = p.intrixy(a,b,c)
self.assertFalse(i)
ax = dtl.plot_axes_xy(100)
ax = dtl.plot_points_xy((a,b,c,p),ax)
ax = dtl.plot_polygon_xy((a,b,c),ax,lw = 2,col = 'g')
plt.show()
#def test_onsxy(self):
'''#
def test_onseg_xy(self):
p1,p2,p3 = vec3(1,1,0),vec3(0,1,0),vec3(2,2,0)
s1,s2 = vec3(0,0,0),vec3(2,2,0)
self.assertEqual(gtl.onseg_xy(p1,s1,s2),1)
self.assertEqual(gtl.onseg_xy(p2,s1,s2),0)
self.assertEqual(gtl.onseg_xy(p3,s1,s2),1)
def test_inseg_xy(self):
p1,p2,p3 = vec3(1,1,0),vec3(0,1,0),vec3(2,2,0)
s1,s2 = vec3(0,0,0),vec3(2,2,0)
self.assertEqual(gtl.inseg_xy(p1,s1,s2),1)
self.assertEqual(gtl.inseg_xy(p2,s1,s2),0)
self.assertEqual(gtl.inseg_xy(p3,s1,s2),0)
'''#
def test_onbxy(self):
py = vec3(1,1,0).sq(2,2)
self.assertFalse(vec3(-1,-1,0).onbxy(py))
self.assertTrue(vec3(0,0,0).onbxy(py))
self.assertTrue(vec3(1,0,0).onbxy(py))
self.assertFalse(vec3(1,1,0).onbxy(py))
self.assertTrue(vec3(2,0,0).onbxy(py))
#def test_onpxy(self):
def test_mag2(self):
v1,v2,v3 = vec3(1,0,0),vec3(1,1,1),vec3(2,5,11)
self.assertEqual(dpr.isnear(v1.mag2(),1),1)
self.assertEqual(dpr.isnear(v2.mag2(),3),1)
self.assertEqual(dpr.isnear(v3.mag2(),150),1)
def test_mag(self):
v1,v2,v3 = vec3(1,0,0),vec3(1,1,1),vec3(2,5,11)
self.assertEqual(dpr.isnear(v1.mag(),1),1)
self.assertEqual(dpr.isnear(v2.mag(),math.sqrt(3)),1)
self.assertEqual(dpr.isnear(v3.mag(),math.sqrt(150)),1)
def test_nrm(self):
v1,v2,v3 = vec3(1,0,0),vec3(1,2,5),vec3(10,20,50)
self.assertTrue(v1.nrm() == v1)
self.assertTrue(v1.nrm() is v1)
self.assertTrue(v2.nrm() == v3.nrm())
self.assertFalse(v2.nrm() is v3.nrm())
self.assertFalse(v2.nrm() == v1.nrm())
def test_trn(self):
v1,v2 = vec3(-1,2,5),vec3(-12,24,60)
self.assertEqual(v1.cp().trn(v2),vec3(-13,26,65))
self.assertEqual(v2.cp().trn(v1),vec3(-13,26,65))
self.assertTrue(v1.trn(v2) is v1)
def test_xtrn(self):
v1 = vec3(-1,2,5)
self.assertEqual(v1.cp().xtrn(2),vec3(1,2,5))
self.assertEqual(v1.xtrn(2),vec3(1,2,5))
self.assertEqual(v1.xtrn(-5),vec3(-4,2,5))
self.assertTrue(v1.xtrn(2) is v1)
def test_ytrn(self):
v1 = vec3(-1,2,5)
self.assertEqual(v1.cp().ytrn(2),vec3(-1,4,5))
self.assertEqual(v1.ytrn(2),vec3(-1,4,5))
self.assertEqual(v1.ytrn(-5),vec3(-1,-1,5))
self.assertTrue(v1.ytrn(2) is v1)
def test_ztrn(self):
v1 = vec3(-1,2,5)
self.assertEqual(v1.cp().ztrn(2),vec3(-1,2,7))
self.assertEqual(v1.ztrn(2),vec3(-1,2,7))
self.assertEqual(v1.ztrn(-5),vec3(-1,2,2))
self.assertTrue(v1.ztrn(2) is v1)
def test_scl(self):
v1,v2,v3 = vec3(1,1,0),vec3(2,-10,5),vec3(0,1,-1)
self.assertEqual(v1*v1,vec3(1,1,0))
self.assertEqual(v2*v2,vec3(4,100,25))
self.assertEqual(v3*v3,vec3(0,1,1))
self.assertEqual(v1*v2,vec3(2,-10,0))
self.assertEqual(v1*v3,vec3(0,1,0))
self.assertEqual(v2*v1,vec3(2,-10,0))
self.assertEqual(v2*v3,vec3(0,-10,-5))
self.assertEqual(v3*v1,vec3(0,1,0))
self.assertEqual(v3*v2,vec3(0,-10,-5))
self.assertTrue(v1.scl(v2) is v1)
self.assertEqual(v1,vec3(2,-10,0))
def test_uscl(self):
v1,v2 = vec3(-1,2,5),vec3(-12,24,60)
self.assertTrue(v1.uscl(12) == v2)
self.assertTrue(v1.uscl(12) is v1)
self.assertFalse(v1.uscl(12) is v2)
self.assertTrue(v1.uscl(12) == v1)
def test_xscl(self):
self.same('xscl',self.one,vec3(4,1,1),4)
self.same('xscl',self.origin,vec3(0,0,0),4)
self.same('xscl',self.z,vec3(0,0,1),4)
def test_yscl(self):
self.same('yscl',self.one,vec3(1,4,1),4)
self.same('yscl',self.origin,vec3(0,0,0),4)
self.same('yscl',self.z,vec3(0,0,1),4)
def test_zscl(self):
self.same('zscl',self.one,vec3(1,1,4),4)
self.same('zscl',self.origin,vec3(0,0,0),4)
self.same('zscl',self.z,vec3(0,0,4),4)
def test_rot(self):
v1,v2 = vec3(0,2,0),vec3(-2,0,0)
q1 = quat(0,0,0,0).av(dpr.PI2,vec3(0,0,1))
q2 = quat(0,0,0,0).av(0,vec3(0,0,1))
self.assertEqual(v1.rot(q1),v2)
self.assertEqual(v1.cp().rot(q2),v1)
#def test_fulc(self):
#def test_cowxy(self):
def test_xrot(self):
self.same('xrot',self.origin,vec3(0,0,0),dpr.PI2)
self.same('xrot',self.one,vec3(1,-1,1),dpr.PI2)
self.same('xrot',self.x,vec3(1,0,0),dpr.PI2)
self.same('xrot',self.y,vec3(0,0,1),dpr.PI2)
self.same('xrot',self.z,vec3(0,-1,0),dpr.PI2)
def test_yrot(self):
self.same('yrot',self.origin,vec3(0,0,0),dpr.PI2)
self.same('yrot',self.one,vec3(1,1,-1),dpr.PI2)
self.same('yrot',self.x,vec3(0,0,-1),dpr.PI2)
self.same('yrot',self.y,vec3(0,1,0),dpr.PI2)
self.same('yrot',self.z,vec3(1,0,0),dpr.PI2)
def test_zrot(self):
self.same('zrot',self.origin,vec3(0,0,0),dpr.PI2)
self.same('zrot',self.one,vec3(-1,1,1),dpr.PI2)
self.same('zrot',self.x,vec3(0,1,0),dpr.PI2)
self.same('zrot',self.y,vec3(-1,0,0),dpr.PI2)
self.same('zrot',self.z,vec3(0,0,1),dpr.PI2)
def test_flp(self):
v1,v2 = vec3(-1,-2,-5),vec3(1,2,5)
self.assertTrue(v1.flp() == v1)
self.assertFalse(v1.cp() == v1.flp())
self.assertTrue(v1.flp() is v1)
self.assertFalse(v1.flp() is v2)
self.assertTrue(v1.flp() == v2)
def test_tov(self):
v1,v2 = vec3(1,-2,1),vec3(1,2,5)
self.assertEqual(v1.tov(v1),vec3(0,0,0))
self.assertEqual(v1.tov(v2),vec3(0, 4, 4))
self.assertEqual(v2.tov(v1),vec3(0,-4,-4))
self.assertEqual(v2.tov(v2),vec3(0,0,0))
def test_tovxy(self):
v1,v2 = vec3(1,-2,1),vec3(1,2,5)
self.assertEqual(v1.tovxy(v1),vec3(0,0,0))
self.assertEqual(v1.tovxy(v2),vec3(0, 4,0))
self.assertEqual(v2.tovxy(v1),vec3(0,-4,0))
self.assertEqual(v2.tovxy(v2),vec3(0,0,0))
def test_mid(self):
v1,v2 = vec3(0,2,0),vec3(-1,0,1)
v3,v4 = vec3(-0.5,1,0.5),vec3(-0.75,0.5,0.75)
self.assertEqual(v1.mid(v2),v3)
self.assertEqual(v2.mid(v3),v4)
def test_lerp(self):
v1,v2 = vec3(0,2,0),vec3(-1,0,1)
v3,v4 = vec3(-0.75,0.5,0.75),vec3(0,2,0)
self.assertEqual(v1.lerp(v2,0.75),v3)
self.assertFalse(v1.lerp(v2,0) is v1)
self.assertEqual(v1.lerp(v2,0),v1)
self.assertFalse(v1.lerp(v2,1) is v2)
self.assertEqual(v1.lerp(v2,1),v2)
def test_pline(self):
pline = self.origin.pline(self.one,2)
d1 = self.origin.d(pline[0])
d2 = pline[0].d(pline[1])
d3 = pline[1].d(self.one)
self.assertEqual(len(pline),2)
self.assertTrue(dpr.isnear(d1,d2))
self.assertTrue(dpr.isnear(d2,d3))
self.assertTrue(self.origin.mid(self.one),pline[0].mid(pline[1]))
def test_spline(self):
e = vec3(10,10,1)
t1,t2 = vec3(1,0,0),vec3(0,-1,0)
pline = self.origin.spline(e,t1,t2,5)
ax = dtl.plot_axes(10)
ax = dtl.plot_edges(pline,ax,lw = 3,col = 'b')
plt.show()
def test_pring(self):
p,r,n = vec3(0,0,0),4,8
ps = p.pring(r,n)
pm = ps[0].mid(ps[1])
alpha = numpy.pi*(2.0/n)
self.assertTrue(len(ps) == n)
self.assertTrue(p.d(ps[0].mid(ps[1])) == r)
self.assertTrue(dpr.isnear(ps[0].d(ps[1]),2*r*numpy.tan(alpha/2.0)))
#def test_sq(self):
#def test_com(self):
if __name__ == '__main__':
unittest.main()
|
mit
| -3,662,958,737,619,010,600
| 36.88964
| 80
| 0.579326
| false
| 2.292587
| true
| false
| false
|
csadorf/signac
|
signac/common/validate.py
|
1
|
1660
|
# Copyright (c) 2017 The Regents of the University of Michigan
# All rights reserved.
# This software is licensed under the BSD 3-Clause License.
import logging
from .configobj.validate import Validator
from .configobj.validate import VdtValueError
logger = logging.getLogger(__name__)
def version(value, *args, **kwargs):
try:
if isinstance(value, str):
return tuple((int(v) for v in value.split(',')))
else:
return tuple((int(v) for v in value))
except Exception:
raise VdtValueError(value)
def mongodb_uri(value, *args, **kwargs):
if isinstance(value, list):
value = ','.join(value)
if not value.startswith('mongodb://'):
value = 'mongodb://' + value
try:
import pymongo
except ImportError:
logger.debug("Install pymongo to validate database configurations!")
else:
try:
pymongo.uri_parser.parse_uri(value)
except pymongo.errors.InvalidURI:
raise VdtValueError(value)
return value
def password(value, *args, **kwargs):
return value
def get_validator():
return Validator({
'version': version,
'mongodb_uri': mongodb_uri,
'password': password,
})
cfg = """
workspace_dir = string(default='workspace')
project = string()
signac_version = version(default='0,1,0')
[General]
default_host = string()
[hosts]
[[__many__]]
url = mongodb_uri(default='localhost')
auth_mechanism = option('none', 'SCRAM-SHA-1', default='none')
username = string()
password = password()
db_auth = string(default='admin')
[[[password_config]]]
salt = string()
rounds = integer()
"""
|
bsd-3-clause
| 8,211,791,766,496,821,000
| 22.714286
| 76
| 0.640964
| false
| 3.747178
| false
| false
| false
|
zacharylawrence/ENEE408I-Team-9
|
pi/arduino.py
|
1
|
4379
|
#!/usr/bin/env python
# encoding: utf-8
"""
Control All Arduino Functions
"""
from pymata_aio.pymata3 import PyMata3
from pymata_aio.constants import Constants
import constants
class Arduino():
# Define Pin Constants
# SPI (for pixy) uses pins 10-13
_MOTOR1 = 3
_MOTOR1_DIR_A = 2
_MOTOR1_DIR_B = 4
_MOTOR2 = 6
_MOTOR2_DIR_A = 7
_MOTOR2_DIR_B = 8
# Note: ping sensor shouldn't have to be PWM
_PING = 5
_SERVO = 9
# Analog Pins
_IR_LEFT = 0
_IR_MID = 1
_IR_RIGHT = 2
def __init__(self):
# Instantiate the pymata_core API
self.board = PyMata3(sleep_tune=0)
# Set the pin modes
self.board.set_pin_mode(self._MOTOR1, Constants.PWM)
self.board.set_pin_mode(self._MOTOR1_DIR_A, Constants.OUTPUT)
self.board.set_pin_mode(self._MOTOR1_DIR_B, Constants.OUTPUT)
self.board.set_pin_mode(self._MOTOR2, Constants.PWM)
self.board.set_pin_mode(self._MOTOR2_DIR_A, Constants.OUTPUT)
self.board.set_pin_mode(self._MOTOR2_DIR_B, Constants.OUTPUT)
self.board.set_pin_mode(self._IR_LEFT, Constants.INPUT)
self.board.enable_analog_reporting(self._IR_LEFT)
self.board.set_pin_mode(self._IR_MID, Constants.INPUT)
self.board.enable_analog_reporting(self._IR_MID)
self.board.set_pin_mode(self._IR_RIGHT, Constants.INPUT)
self.board.enable_analog_reporting(self._IR_RIGHT)
self.board.sonar_config(self._PING, self._PING)
self.board.pixy_init(max_blocks=constants.MAX_PIXY_BLOCKS)
self.board.keep_alive(period=2)
self.board.servo_config(self._SERVO)
# Set default state
self.set_motors(0, 0)
self.open_claw()
def set_motors(self, motor1, motor2):
if (motor1 < -1 or motor1 > 1 or motor2 < -1 or motor2 > 1):
raise ValueError("set_motor called with (motor1=" + str(motor1) + ") and (motor2=" + str(motor2) + ")")
# print("Setting Motor 1 to: " + str(motor1))
# print("Setting Motor 2 to: " + str(motor2))
# Set motor directions
self.board.digital_write(self._MOTOR1_DIR_A, 0 if (motor1 < 0) else 1)
self.board.digital_write(self._MOTOR1_DIR_B, 1 if (motor1 < 0) else 0)
self.board.digital_write(self._MOTOR2_DIR_A, 1 if (motor2 < 0) else 0)
self.board.digital_write(self._MOTOR2_DIR_B, 0 if (motor2 < 0) else 1)
# Set motor speeds
self.board.analog_write(self._MOTOR1, int(abs(motor1) * 255))
self.board.analog_write(self._MOTOR2, int(abs(motor2) * 255))
def set_servo(self, servo):
self.board.analog_write(self._SERVO, servo)
def close_claw(self):
self.board.analog_write(self._SERVO, 100) # Used to be 75
self.board.sleep(constants.CLOSE_CLAW_PAUSE)
def open_claw(self):
self.board.analog_write(self._SERVO, 150)
self.board.sleep(constants.OPEN_CLAW_PAUSE)
# Get the ping sensor's distance in cm
def get_ping(self):
return self.board.sonar_data_retrieve(self._PING)
# Returns the value from the pixy camera
def get_pixy_blocks(self):
blocks = self.board.pixy_get_blocks()
print(blocks)
if len(blocks) > 0 and not "signature" in blocks[0]:
print("Malformed pixy block!!")
return None
# for block_index in range(len(blocks)):
# block = blocks[block_index]
# print(" block {}: sig: {} x: {} y: {} width: {} height: {}".format(
# block_index, block["signature"], block["x"], block["y"], block["width"], block["height"]))
# print("\n")
return blocks
def print_ir(self):
print(str(self.board.analog_read(self._IR_LEFT)) + " | " +
str(self.board.analog_read(self._IR_MID)) + " | " +
str(self.board.analog_read(self._IR_RIGHT)) + " | " +
str(self.get_ping()))
def get_ir_left(self):
return self.board.analog_read(self._IR_LEFT)
def get_ir_mid(self):
return self.board.analog_read(self._IR_MID)
def get_ir_right(self):
return self.board.analog_read(self._IR_RIGHT)
def ir_wall(self):
return (self.get_ir_left() >= constants.IR_WALL_THRESHOLD or
self.get_ping() <= constants.PING_WALL_THRESHOLD or
self.get_ir_right() >= constants.IR_WALL_THRESHOLD)
def ir_wall_target(self):
return (self.get_ir_left() >= constants.IR_WALL_THRESHOLD or
self.get_ir_mid() >= constants.IR_WALL_THRESHOLD or
self.get_ir_right() >= constants.IR_WALL_THRESHOLD)
def shutdown(self):
# Reset the board and exit
self.board.shutdown()
|
mit
| -2,159,931,330,257,782,800
| 29.622378
| 109
| 0.651975
| false
| 2.871475
| false
| false
| false
|
TeamSpen210/srctools
|
srctools/sndscript.py
|
1
|
12138
|
"""Reads and writes Soundscripts."""
from enum import Enum
from chunk import Chunk as WAVChunk
from srctools import Property, conv_float
from typing import (
Optional, Union, TypeVar, Callable,
List, Tuple, Dict,
TextIO, IO,
)
__all__ = [
'SND_CHARS', 'Pitch', 'VOL_NORM', 'Channel', 'Level',
'Sound', 'wav_is_looped',
]
# All the prefixes wavs can have.
SND_CHARS = '*@#<>^)}$!?'
class Pitch(float, Enum):
"""The constants permitted for sound pitches."""
PITCH_NORM = 100.0
PITCH_LOW = 95.0
PITCH_HIGH = 120.0
def __str__(self) -> str:
return self.name
class VOLUME(Enum):
"""Special value, substitutes default volume (usually 1)."""
VOL_NORM = 'VOL_NORM'
def __str__(self) -> str:
return self.name
VOL_NORM = VOLUME.VOL_NORM
# Old compatibility values, replaced by soundlevel.
ATTENUATION = {
'ATTN_NONE': 0,
'ATTN_NORM': 0.8,
'ATTN_IDLE': 2.0,
'ATTN_STATIC': 1.25,
'ATTN_RICOCHET': 1.5,
'ATTN_GUNFIRE': 0.27,
}
class Channel(Enum):
"""Different categories of sounds."""
DEFAULT = "CHAN_AUTO"
GUNFIRE = "CHAN_WEAPON"
VOICE = "CHAN_VOICE"
TF2_ANNOUNCER = "CHAN_VOICE2"
ITEMS = "CHAN_ITEM"
BODY = "CHAN_BODY"
STREAMING = "CHAN_STREAM"
CON_CMD = "CHAN_REPLACE"
BACKGROUND = "CHAN_STATIC"
PLAYER_VOICE = "CHAN_VOICE_BASE"
#CHAN_USER_BASE+<number>
#Custom channels can be defined here.
class Level(Enum):
"""Soundlevel constants - attenuation."""
SNDLVL_NONE = 'SNDLVL_NONE'
SNDLVL_20dB = 'SNDLVL_20dB'
SNDLVL_25dB = 'SNDLVL_25dB'
SNDLVL_30dB = 'SNDLVL_30dB'
SNDLVL_35dB = 'SNDLVL_35dB'
SNDLVL_40dB = 'SNDLVL_40dB'
SNDLVL_45dB = 'SNDLVL_45dB'
SNDLVL_50dB = 'SNDLVL_50dB'
SNDLVL_55dB = 'SNDLVL_55dB'
SNDLVL_IDLE = 'SNDLVL_IDLE'
SNDLVL_65dB = 'SNDLVL_65dB'
SNDLVL_STATIC = 'SNDLVL_STATIC'
SNDLVL_70dB = 'SNDLVL_70dB'
SNDLVL_NORM = 'SNDLVL_NORM'
SNDLVL_80dB = 'SNDLVL_80dB'
SNDLVL_TALKING = 'SNDLVL_TALKING'
SNDLVL_85dB = 'SNDLVL_85dB'
SNDLVL_90dB = 'SNDLVL_90dB'
SNDLVL_95dB = 'SNDLVL_95dB'
SNDLVL_100dB = 'SNDLVL_100dB'
SNDLVL_105dB = 'SNDLVL_105dB'
SNDLVL_110dB = 'SNDLVL_110dB'
SNDLVL_120dB = 'SNDLVL_120dB'
SNDLVL_125dB = 'SNDLVL_125dB'
SNDLVL_130dB = 'SNDLVL_130dB'
SNDLVL_GUNFIRE = 'SNDLVL_GUNFIRE'
SNDLVL_140dB = 'SNDLVL_140dB'
SNDLVL_145dB = 'SNDLVL_145dB'
SNDLVL_150dB = 'SNDLVL_150dB'
SNDLVL_180dB = 'SNDLVL_180dB'
def __str__(self) -> str:
return self.name
EnumType = TypeVar('EnumType', bound=Enum)
def split_float(
val: str,
enum: Callable[[str], Union[float, EnumType]],
default: Union[float, EnumType],
name: str,
) -> Tuple[Union[float, EnumType], Union[float, EnumType]]:
"""Handle values which can be either single or a low, high pair of numbers.
If single, low and high are the same.
enum is a Enum with values to match text constants, or a converter function
returning enums or raising ValueError, KeyError or IndexError.
The name is used for error handling.
"""
if isinstance(val, list):
raise ValueError(f'Property block used for option in {name} sound!')
if ',' in val:
s_low, s_high = val.split(',')
try:
low = enum(s_low.upper())
except (LookupError, ValueError):
low = conv_float(s_low, default)
try:
high = enum(s_high.upper())
except (LookupError, ValueError):
high = conv_float(s_high, default)
return low, high
else:
try:
out = enum(val.upper())
except (LookupError, ValueError):
out = conv_float(val, default)
return out, out
def join_float(val: Tuple[float, float]) -> str:
"""Reverse split_float()."""
low, high = val
if low == high:
return str(low)
else:
return '{!s},{!s}'.format(low, high)
def wav_is_looped(file: IO[bytes]) -> bool:
"""Check if the provided wave file contains loop cue points.
This code is partially copied from wave.Wave_read.initfp().
"""
first = WAVChunk(file, bigendian=False)
if first.getname() != b'RIFF':
raise ValueError('File does not start with RIFF id.')
if first.read(4) != b'WAVE':
raise ValueError('Not a WAVE file.')
while True:
try:
chunk = WAVChunk(file, bigendian=False)
except EOFError:
return False
if chunk.getname() == b'cue ':
return True
chunk.skip()
class Sound:
"""Represents a single soundscript."""
stack_start: Property
stack_update: Property
stack_stop: Property
def __init__(
self,
name: str,
sounds: List[str],
volume: Union[Tuple[Union[float, VOLUME], Union[float, VOLUME]], float, VOLUME]=(VOL_NORM, VOL_NORM),
channel: Channel=Channel.DEFAULT,
level: Union[Tuple[Union[float, Level], Union[float, Level]], float, Level]=(Level.SNDLVL_NORM, Level.SNDLVL_NORM),
pitch: Union[Tuple[Union[float, Pitch], Union[float, Pitch]], float, Pitch]=(Pitch.PITCH_NORM, Pitch.PITCH_NORM),
# Operator stacks
stack_start: Optional[Property]=None,
stack_update: Optional[Property]=None,
stack_stop: Optional[Property]=None,
use_v2: bool=False,
) -> None:
"""Create a soundscript."""
self.name = name
self.sounds = sounds
self.channel = channel
self.force_v2 = use_v2
if isinstance(volume, tuple):
self.volume = volume
else:
self.volume = volume, volume
if isinstance(level, tuple):
self.level = level
else:
self.level = level, level
if isinstance(pitch, tuple):
self.pitch = pitch
else:
self.pitch = pitch, pitch
self.stack_start = Property('', []) if stack_start is None else stack_start
self.stack_update = Property('', []) if stack_update is None else stack_update
self.stack_stop = Property('', []) if stack_stop is None else stack_stop
def __repr__(self) -> str:
res = f'Sound({self.name!r}, {self.sounds}, volume={self.volume}, channel={self.channel}, level={self.level}, pitch={self.pitch}'
if self.force_v2 or self.stack_start or self.stack_update or self.stack_stop:
res += f', stack_start={self.stack_start!r}, stack_update={self.stack_update!r}, stack_stop={self.stack_stop!r})'
else:
res += ')'
return res
@classmethod
def parse(cls, file: Property) -> Dict[str, 'Sound']:
"""Parses a soundscript file.
This returns a dict mapping casefolded names to Sounds.
"""
sounds = {}
for snd_prop in file:
volume = split_float(
snd_prop['volume', '1'],
VOLUME,
1.0,
snd_prop.real_name,
)
pitch = split_float(
snd_prop['pitch', '100'],
Pitch.__getitem__,
100.0,
snd_prop.real_name,
)
if 'soundlevel' in snd_prop:
level = split_float(
snd_prop['soundlevel'],
Level.__getitem__,
Level.SNDLVL_NORM,
snd_prop.real_name,
)
elif 'attenuation' in snd_prop:
atten_min, atten_max = split_float(
snd_prop['attenuation'],
ATTENUATION.__getitem__,
ATTENUATION['ATTN_IDLE'],
snd_prop.real_name,
)
# Convert to a soundlevel.
# See source_sdk/public/soundflags.h:ATTN_TO_SNDLVL()
level = (
(50.0 + 20.0 / atten_min) if atten_min else 0.0,
(50.0 + 20.0 / atten_max) if atten_max else 0.0,
)
else:
level = (Level.SNDLVL_NORM, Level.SNDLVL_NORM)
# Either 1 "wave", or multiple in "rndwave".
wavs: list[str] = []
for prop in snd_prop:
if prop.name == 'wave':
wavs.append(prop.value)
elif prop.name == 'rndwave':
for subprop in prop:
wavs.append(subprop.value)
channel = Channel(snd_prop['channel', 'CHAN_AUTO'])
sound_version = snd_prop.int('soundentry_version', 1)
if 'operator_stacks' in snd_prop:
if sound_version == 1:
raise ValueError(
'Operator stacks used with version '
'less than 2 in "{}"!'.format(snd_prop.real_name))
start_stack, update_stack, stop_stack = [
Property(stack_name, [
prop.copy()
for prop in
snd_prop.find_children('operator_stacks', stack_name)
])
for stack_name in
['start_stack', 'update_stack', 'stop_stack']
]
else:
start_stack, update_stack, stop_stack = [
Property(stack_name, [])
for stack_name in
['start_stack', 'update_stack', 'stop_stack']
]
sounds[snd_prop.name] = Sound(
snd_prop.real_name,
wavs,
volume,
channel,
level,
pitch,
start_stack,
update_stack,
stop_stack,
sound_version == 2,
)
return sounds
def export(self, file: TextIO):
"""Write a sound to a file.
Pass a file-like object open for text writing.
"""
file.write('"{}"\n\t{{\n'.format(self.name))
file.write('\t' 'channel {}\n'.format(self.channel.value))
file.write('\t' 'soundlevel {}\n'.format(join_float(self.level)))
if self.volume != (1, 1):
file.write('\tvolume {}\n'.format(join_float(self.volume)))
if self.pitch != (100, 100):
file.write('\tpitch {}\n'.format(join_float(self.pitch)))
if len(self.sounds) > 1:
file.write('\trndwave\n\t\t{\n')
for wav in self.sounds:
file.write('\t\twave "{}"\n'.format(wav))
file.write('\t\t}\n')
else:
file.write('\twave "{}"\n'.format(self.sounds[0]))
if self.force_v2 or self.stack_start or self.stack_stop or self.stack_update:
file.write(
'\t' 'soundentry_version 2\n'
'\t' 'operator_stacks\n'
'\t\t' '{\n'
)
if self.stack_start:
file.write(
'\t\t' 'start_stack\n'
'\t\t\t' '{\n'
)
for prop in self.stack_start:
for line in prop.export():
file.write('\t\t\t' + line)
file.write('\t\t\t}\n')
if self.stack_update:
file.write(
'\t\t' 'update_stack\n'
'\t\t\t' '{\n'
)
for prop in self.stack_update:
for line in prop.export():
file.write('\t\t\t' + line)
file.write('\t\t\t}\n')
if self.stack_stop:
file.write(
'\t\t' 'stop_stack\n'
'\t\t\t' '{\n'
)
for prop in self.stack_stop:
for line in prop.export():
file.write('\t\t\t' + line)
file.write('\t\t\t}\n')
file.write('\t\t}\n')
file.write('\t}\n')
|
unlicense
| -6,019,161,793,720,176,000
| 31.196286
| 137
| 0.511781
| false
| 3.509107
| false
| false
| false
|
allanlei/rhinocloud-utils
|
rhinocloud/views/mixins/formset.py
|
1
|
2115
|
from django.forms.models import modelformset_factory
from django.core.exceptions import ImproperlyConfigured
class ModelFormSetFactoryMixin(object):
formset_fields = None
formset_exclude = None
formset_extra = 1
def get_formset_model(self):
if self.formset_model:
model = self.formset_model
else:
raise ImproperlyConfigured('Provide formset_model or override get_formset_model().')
return model
def get_formset_fields(self):
return self.formset_fields
def get_formset_exclude(self):
return self.formset_exclude
def get_formset_class_kwargs(self):
return {
'fields': self.get_formset_fields(),
'exclude': self.get_formset_exclude(),
'extra': int(self.formset_extra),
}
def get_formset_class(self):
return modelformset_factory(self.get_formset_model(),**self.get_formset_class_kwargs())
def get_formset_queryset(self):
return self.get_formset_model().objects.all()
def get_formset_kwargs(self, **kwargs):
if 'queryset' not in kwargs:
kwargs['queryset'] = self.get_formset_queryset()
return kwargs
def get_formset(self, *args, **kwargs):
if not hasattr(self, 'formset') or self.formset is None:
self.formset = self.get_formset_class()(*args, **self.get_formset_kwargs(**kwargs))
return self.formset
def form_valid(self, form, **kwargs):
formset = self.get_formset(self.request.POST, self.request.FILES)
if formset.is_valid():
response = super(ModelFormSetFactoryMixin, self).form_valid(form, **kwargs)
self.formset_valid(formset, **kwargs)
return response
return self.form_invalid(form, **kwargs)
def form_invalid(self, form, **kwargs):
self.get_formset(self.request.POST, self.request.FILES).is_valid()
return super(ModelFormSetFactoryMixin, self).form_invalid(form, **kwargs)
def formset_valid(self, formset, **kwargs):
formset.save()
|
bsd-3-clause
| -6,625,479,159,177,680,000
| 34.847458
| 96
| 0.631678
| false
| 4.130859
| false
| false
| false
|
balloob/pychromecast
|
pychromecast/controllers/youtube.py
|
1
|
3736
|
"""
Controller to interface with the YouTube-app.
Use the media controller to play, pause etc.
"""
import threading
from casttube import YouTubeSession
from . import BaseController
from ..error import UnsupportedNamespace
from ..config import APP_YOUTUBE
YOUTUBE_NAMESPACE = "urn:x-cast:com.google.youtube.mdx"
TYPE_GET_SCREEN_ID = "getMdxSessionStatus"
TYPE_STATUS = "mdxSessionStatus"
ATTR_SCREEN_ID = "screenId"
MESSAGE_TYPE = "type"
class YouTubeController(BaseController):
""" Controller to interact with Youtube."""
def __init__(self):
super(YouTubeController, self).__init__(YOUTUBE_NAMESPACE, APP_YOUTUBE)
self.status_update_event = threading.Event()
self._screen_id = None
self._session = None
def start_session_if_none(self):
"""
Starts a session it is not yet initialized.
"""
if not (self._screen_id and self._session):
self.update_screen_id()
self._session = YouTubeSession(screen_id=self._screen_id)
def play_video(self, video_id, playlist_id=None):
"""
Play video(video_id) now. This ignores the current play queue order.
:param video_id: YouTube video id(http://youtube.com/watch?v=video_id)
:param playlist_id: youtube.com/watch?v=video_id&list=playlist_id
"""
self.start_session_if_none()
self._session.play_video(video_id, playlist_id)
def add_to_queue(self, video_id):
"""
Add video(video_id) to the end of the play queue.
:param video_id: YouTube video id(http://youtube.com/watch?v=video_id)
"""
self.start_session_if_none()
self._session.add_to_queue(video_id)
def play_next(self, video_id):
"""
Play video(video_id) after the currently playing video.
:param video_id: YouTube video id(http://youtube.com/watch?v=video_id)
"""
self.start_session_if_none()
self._session.play_next(video_id)
def remove_video(self, video_id):
"""
Remove video(videoId) from the queue.
:param video_id: YouTube video id(http://youtube.com/watch?v=video_id)
"""
self.start_session_if_none()
self._session.remove_video(video_id)
def clear_playlist(self):
"""
Clear the entire video queue
"""
self.start_session_if_none()
self._session.clear_playlist()
def update_screen_id(self):
"""
Sends a getMdxSessionStatus to get the screenId and waits for response.
This function is blocking
If connected we should always get a response
(send message will launch app if it is not running).
"""
self.status_update_event.clear()
# This gets the screenId but always throws. Couldn't find a better way.
try:
self.send_message({MESSAGE_TYPE: TYPE_GET_SCREEN_ID})
except UnsupportedNamespace:
pass
self.status_update_event.wait()
self.status_update_event.clear()
def receive_message(self, message, data: dict):
"""Called when a message is received."""
if data[MESSAGE_TYPE] == TYPE_STATUS:
self._process_status(data.get("data"))
return True
return False
def _process_status(self, status):
""" Process latest status update. """
self._screen_id = status.get(ATTR_SCREEN_ID)
self.status_update_event.set()
def quick_play(self, media_id=None, playlist_id=None, enqueue=False, **kwargs):
""" Quick Play """
if enqueue:
self.add_to_queue(media_id, **kwargs)
else:
self.play_video(media_id, playlist_id=playlist_id, **kwargs)
|
mit
| 7,168,021,507,368,404,000
| 32.963636
| 83
| 0.621253
| false
| 3.796748
| false
| false
| false
|
soplerproject/sopler
|
sopler/settings.py
|
1
|
7823
|
# -*- coding: utf-8 -*-
# Django settings for sopler project.
import os.path
PROJECT_DIR = os.path.normpath(os.path.dirname(os.path.abspath(__file__)))
DEBUG = True
TEMPLATE_DEBUG = DEBUG
SESSION_SAVE_EVERY_REQUEST = True
#Set "True" if all non-SSL requests should be permanently redirected to SSL.
SECURE_SSL_REDIRECT = False
# Setting to an integer number of seconds
#It is recommended to set the max-age to a big value like 31536000 (12 months) or 63072000 (24 months).
SECURE_HSTS_SECONDS = 31536000
# HTTP Strict Transport Security.
SECURE_HSTS_INCLUDE_SUBDOMAINS = True
# Prevent framing of your pages and protect them from clickjacking.
SECURE_FRAME_DENY = True
# Prevent the browser from guessing asset content types.
SECURE_CONTENT_TYPE_NOSNIFF = True
# Enable the browser’s XSS filtering protections.
SECURE_BROWSER_XSS_FILTER = True
SOCIAL_FRIENDS_USING_ALLAUTH = True
ADMINS = (
# ('Your Name', 'your_email@example.com'),
)
MANAGERS = ADMINS
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3', # Add 'postgresql_psycopg2', 'mysql', 'sqlite3' or 'oracle'.
'NAME': os.path.join(PROJECT_DIR, "../database.db"), # Or path to database file if using sqlite3.
#'NAME': '',
# The following settings are not used with sqlite3:
#'USER': '',
#'PASSWORD': '',
'HOST': '', # Empty for localhost through domain sockets or '127.0.0.1' for localhost through TCP.
'PORT': '', # Set to empty string for default.
}
}
# Hosts/domain names that are valid for this site; required if DEBUG is False
# See https://docs.djangoproject.com/en/1.5/ref/settings/#allowed-hosts
ALLOWED_HOSTS = ['*']
# Local time zone for this installation. Choices can be found here:
# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
# although not all choices may be available on all operating systems.
# In a Windows environment this must be set to your system time zone.
TIME_ZONE = 'America/Chicago'
# Language code for this installation. All choices can be found here:
# http://www.i18nguy.com/unicode/language-identifiers.html
LANGUAGE_CODE = 'en-us'
SITE_ID = 1
# If you set this to False, Django will make some optimizations so as not
# to load the internationalization machinery.
USE_I18N = True
# If you set this to False, Django will not format dates, numbers and
# calendars according to the current locale.
USE_L10N = True
# If you set this to False, Django will not use timezone-aware datetimes.
USE_TZ = True
# Absolute filesystem path to the directory that will hold user-uploaded files.
# Example: "/var/www/example.com/media/"
MEDIA_ROOT = ''
# URL that handles the media served from MEDIA_ROOT. Make sure to use a
# trailing slash.
# Examples: "http://example.com/media/", "http://media.example.com/"
MEDIA_URL = ''
# Absolute path to the directory static files should be collected to.
# Don't put anything in this directory yourself; store your static files
# in apps' "static/" subdirectories and in STATICFILES_DIRS.
# Example: "/var/www/example.com/static/"
STATIC_ROOT = ''
# URL prefix for static files.
# Example: "http://example.com/static/", "http://static.example.com/"
STATIC_URL = '/static/'
# Additional locations of static files
STATICFILES_DIRS = (
# Put strings here, like "/home/html/static" or "C:/www/django/static".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
os.path.join(PROJECT_DIR, "../static/"),
)
# List of finder classes that know how to find static files in
# various locations.
STATICFILES_FINDERS = (
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
# 'django.contrib.staticfiles.finders.DefaultStorageFinder',
)
# Make this unique, and don't share it with anybody.
# Ensure that you’re using a long, random and unique "SECRET_KEY"
SECRET_KEY = 'Enter_Here_A_Long_Random_And_Unique_Key_&^%$&!!'
# List of callables that know how to import templates from various sources.
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
# 'django.template.loaders.eggs.Loader',
)
TEMPLATE_CONTEXT_PROCESSORS = (
'django.core.context_processors.request',
'django.contrib.auth.context_processors.auth',
"django.core.context_processors.request",
"django.core.context_processors.media",
"django.core.context_processors.static",
"django.contrib.messages.context_processors.messages",
#"allauth.account.context_processors.account",
"allauth.socialaccount.context_processors.socialaccount",
'django.core.context_processors.request',
)
AUTHENTICATION_BACKENDS = (
"django.contrib.auth.backends.ModelBackend",
"allauth.account.auth_backends.AuthenticationBackend",
)
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'djangosecure.middleware.SecurityMiddleware',
# Uncomment the next line for simple clickjacking protection:
#'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
ROOT_URLCONF = 'sopler.urls'
# Python dotted path to the WSGI application used by Django's runserver.
WSGI_APPLICATION = 'sopler.wsgi.application'
TEMPLATE_DIRS = (
# Put strings here, like "/home/html/django_templates" or "C:/www/django/templates".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
os.path.join(PROJECT_DIR, "../templates/"),
)
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.messages',
'django.contrib.staticfiles',
# Uncomment the next line to enable the admin:
#'django_admin_bootstrapped',
'django.contrib.admin',
# Uncomment the next line to enable admin documentation:
# 'django.contrib.admindocs',
'allauth',
'allauth.account',
'allauth.socialaccount',
# Available Social Account Providers
'allauth.socialaccount.providers.twitter',
'allauth.socialaccount.providers.persona',
'allauth.socialaccount.providers.google',
'allauth.socialaccount.providers.facebook',
#'allauth.socialaccount.providers.openid',
'core',
# Extra Security Features.
'djangosecure',
# Data migration.
'south',
# Faving and unfaving lists.
'favit',
# Webservice API framework for Django.
#'tastypie',
# Fetches your friends from different social-networks.
'social_friends_finder',
'embed_video',
)
# A sample logging configuration. The only tangible logging
# performed by this configuration is to send an email to
# the site admins on every HTTP 500 error when DEBUG=False.
# See http://docs.djangoproject.com/en/dev/topics/logging for
# more details on how to customize your logging configuration.
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'filters': {
'require_debug_false': {
'()': 'django.utils.log.RequireDebugFalse'
}
},
'handlers': {
'mail_admins': {
'level': 'ERROR',
'filters': ['require_debug_false'],
'class': 'django.utils.log.AdminEmailHandler'
}
},
'loggers': {
'django.request': {
'handlers': ['mail_admins'],
'level': 'ERROR',
'propagate': True,
},
}
}
try:
from local_settings import *
except ImportError:
pass
|
agpl-3.0
| -6,497,830,003,044,947,000
| 33.29386
| 127
| 0.698043
| false
| 3.719791
| false
| false
| false
|
Pbartek/pyobd-pi-TFT
|
obd_gui.py
|
1
|
17289
|
#!/usr/bin/env python
###########################################################################
# obd_gui.py
#
# Created by Paul Bartek (pbartek@cowfishstudios.com)
#
###########################################################################
#-------------------------------------------------------------------------------
import os
import wx
import time
from threading import Thread
from obd_capture import OBD_Capture
from obd_sensors import SENSORS
from obd_sensors import *
#-------------------------------------------------------------------------------
# OBD variable
BACKGROUND_FILENAME = "bg_black.jpg"
GAUGE_FILENAME = "frame_C1.jpg"
LOGO_FILENAME = "cowfish.png"
#-------------------------------------------------------------------------------
def obd_connect(o):
o.connect()
class OBDConnection(object):
"""
Class for OBD connection. Use a thread for connection.
"""
def __init__(self):
self.c = OBD_Capture()
def get_capture(self):
return self.c
def connect(self):
self.t = Thread(target=obd_connect, args=(self.c,))
self.t.start()
def is_connected(self):
return self.c.is_connected()
def get_output(self):
if self.c and self.c.is_connected():
return self.c.capture_data()
return ""
def get_port(self):
return self.c.is_connected()
def get_port_name(self):
if self.c:
port = self.c.is_connected()
if port:
try:
return port.port.name
except:
pass
return None
def get_sensors(self):
sensors = []
if self.c:
sensors = self.c.getSupportedSensorList()
return sensors
#-------------------------------------------------------------------------------
class OBDText(wx.TextCtrl):
"""
Text display while loading OBD application.
"""
def __init__(self, parent):
"""
Constructor.
"""
style = wx.TE_READONLY | wx.TE_MULTILINE
wx.TextCtrl.__init__(self, parent, style=style)
self.SetBackgroundColour('#21211f')
self.SetForegroundColour(wx.WHITE)
font = wx.Font(12, wx.ROMAN, wx.NORMAL, wx.NORMAL, faceName="Monaco")
self.SetFont(font)
def AddText(self, text):
self.AppendText(text)
#-------------------------------------------------------------------------------
class OBDStaticBox(wx.StaticBox):
"""
OBD StaticBox.
"""
def __init__(self, *args, **kwargs):
"""
Constructor.
"""
wx.StaticBox.__init__(self, *args, **kwargs)
def OnPaint(self, event):
self.Paint(wx.PaintDC(self))
def Paint(self, dc):
dc.DrawBitmap(self.bitmap, 0, 0)
#-------------------------------------------------------------------------------
class OBDPanelGauges(wx.Panel):
"""
Panel for gauges.
"""
def __init__(self, *args, **kwargs):
"""
Constructor.
"""
super(OBDPanelGauges, self).__init__(*args, **kwargs)
# Background image
image = wx.Image(GAUGE_FILENAME)
width, height = wx.GetDisplaySize()
image = image.Scale(width, height, wx.IMAGE_QUALITY_HIGH)
self.bitmap = wx.BitmapFromImage(image)
self.Bind(wx.EVT_PAINT, self.OnPaint)
# Create an accelerator table
lid = wx.NewId()
cid = wx.NewId()
rid = wx.NewId()
self.Bind(wx.EVT_MENU, self.onCtrlC, id=cid)
self.Bind(wx.EVT_MENU, self.onLeft, id=lid)
self.Bind(wx.EVT_MENU, self.onRight, id=rid)
self.accel_tbl = wx.AcceleratorTable([
(wx.ACCEL_CTRL, ord('C'), cid),
(wx.ACCEL_NORMAL, wx.WXK_LEFT, lid),
(wx.ACCEL_NORMAL, wx.WXK_RIGHT, rid),
])
self.SetAcceleratorTable(self.accel_tbl)
# Handle events for mouse clicks
self.Bind(wx.EVT_LEFT_DOWN, self.onLeft)
self.Bind(wx.EVT_RIGHT_DOWN, self.onRight)
# Connection
self.connection = None
# Sensors
self.istart = 0
self.sensors = []
# Port
self.port = None
# List to hold children widgets
self.boxes = []
self.texts = []
def setConnection(self, connection):
self.connection = connection
def setSensors(self, sensors):
self.sensors = sensors
def setPort(self, port):
self.port = port
def getSensorsToDisplay(self, istart):
"""
Get at most 1 sensor to be displayed on screen.
"""
sensors_display = []
if istart<len(self.sensors):
iend = istart + 1
sensors_display = self.sensors[istart:iend]
return sensors_display
def ShowSensors(self):
"""
Display the sensors.
"""
sensors = self.getSensorsToDisplay(self.istart)
# Destroy previous widgets
for b in self.boxes: b.Destroy()
for t in self.texts: t.Destroy()
self.boxes = []
self.texts = []
# Main sizer
boxSizerMain = wx.BoxSizer(wx.VERTICAL)
# Grid sizer
nrows, ncols = 1, 1
vgap, hgap = 50, 50
gridSizer = wx.GridSizer(nrows, ncols, vgap, hgap)
# Create a box for each sensor
for index, sensor in sensors:
(name, value, unit) = self.port.sensor(index)
box = OBDStaticBox(self, wx.ID_ANY)
self.boxes.append(box)
boxSizer = wx.StaticBoxSizer(box, wx.VERTICAL)
# Text for sensor value
if type(value)==float:
value = str("%.2f"%round(value, 3))
t1 = wx.StaticText(parent=self, label=str(value), style=wx.ALIGN_CENTER)
t1.SetForegroundColour('WHITE')
font1 = wx.Font(30, wx.ROMAN, wx.NORMAL, wx.NORMAL, faceName="Monaco")
t1.SetFont(font1)
boxSizer.Add(t1, 0, wx.ALIGN_CENTER | wx.ALL, 70)
boxSizer.AddStretchSpacer()
self.texts.append(t1)
# Text for sensor name
t2 = wx.StaticText(parent=self, label=name, style=wx.ALIGN_CENTER)
t2.SetForegroundColour('WHITE')
font2 = wx.Font(10, wx.ROMAN, wx.NORMAL, wx.BOLD, faceName="Monaco")
t2.SetFont(font2)
boxSizer.Add(t2, 0, wx.ALIGN_CENTER | wx.ALL, 45)
self.texts.append(t2)
gridSizer.Add(boxSizer, 1, wx.EXPAND | wx.ALL)
# Add invisible boxes if necessary
nsensors = len(sensors)
for i in range(1-nsensors):
box = OBDStaticBox(self)
boxSizer = wx.StaticBoxSizer(box, wx.VERTICAL)
self.boxes.append(box)
box.Show(False)
gridSizer.Add(boxSizer, 1, wx.EXPAND | wx.ALL)
# Layout
boxSizerMain.Add(gridSizer, 1, wx.EXPAND | wx.ALL, 0)
self.SetSizer(boxSizerMain)
self.Refresh()
self.Layout()
# Timer for update
self.timer = wx.Timer(self)
self.Bind(wx.EVT_TIMER, self.refresh, self.timer)
self.timer.Start(1500)
def refresh(self, event):
sensors = self.getSensorsToDisplay(self.istart)
itext = 0
for index, sensor in sensors:
(name, value, unit) = self.port.sensor(index)
if type(value)==float:
value = str("%.2f"%round(value, 3))
if itext<len(self.texts):
self.texts[itext*2].SetLabel(str(value))
itext += 1
def onCtrlC(self, event):
self.GetParent().Close()
def onLeft(self, event):
"""
Get data from 1 previous sensor in the list.
"""
istart = self.istart + 1
if istart<len(self.sensors):
self.istart = istart
self.ShowSensors()
else:
istart = self.istart - 31
self.istart = istart
self.ShowSensors()
def onRight(self, event):
"""
Get data from 1 next sensor in the list.
"""
istart = self.istart + 1
if istart<len(self.sensors):
self.istart = istart
self.ShowSensors()
else:
istart = self.istart - 31
self.istart = istart
self.ShowSensors()
def OnPaint(self, event):
self.Paint(wx.PaintDC(self))
def Paint(self, dc):
dc.DrawBitmap(self.bitmap, 0, 0)
#-------------------------------------------------------------------------------
class OBDLoadingPanel(wx.Panel):
"""
Main panel for OBD application.
Show loading screen. Handle event from mouse/keyboard.
"""
def __init__(self, *args, **kwargs):
"""
Constructor.
"""
super(OBDLoadingPanel, self).__init__(*args, **kwargs)
# Background image
image = wx.Image(BACKGROUND_FILENAME)
width, height = wx.GetDisplaySize()
image = image.Scale(width, height, wx.IMAGE_QUALITY_HIGH)
self.bitmap = wx.BitmapFromImage(image)
self.Bind(wx.EVT_PAINT, self.OnPaint)
# Logo
bitmap = wx.Bitmap(LOGO_FILENAME)
width, height = bitmap.GetSize()
image = wx.ImageFromBitmap(bitmap)
image = image.Scale(width/12, height/12, wx.IMAGE_QUALITY_HIGH)
bitmap = wx.BitmapFromImage(image)
control = wx.StaticBitmap(self, wx.ID_ANY, bitmap)
control.SetPosition((2, 2))
# Create an accelerator table
cid = wx.NewId()
self.Bind(wx.EVT_MENU, self.onCtrlC, id=cid)
self.accel_tbl = wx.AcceleratorTable([
(wx.ACCEL_CTRL, ord('C'), cid),
])
self.SetAcceleratorTable(self.accel_tbl)
# Connection
self.c = None
# Sensors list
self.sensors = []
# Port
self.port = None
def getConnection(self):
return self.c
def showLoadingScreen(self):
"""
Display the loading screen.
"""
boxSizer = wx.BoxSizer(wx.VERTICAL)
self.textCtrl = OBDText(self)
boxSizer.Add(self.textCtrl, 1, wx.EXPAND | wx.ALL, 40)
self.SetSizer(boxSizer)
font3 = wx.Font(10, wx.ROMAN, wx.NORMAL, wx.NORMAL, faceName="Monaco")
self.textCtrl.SetFont(font3)
self.textCtrl.AddText(" Opening interface (serial port)\n")
self.textCtrl.AddText(" Trying to connect...\n")
self.timer0 = wx.Timer(self)
self.Bind(wx.EVT_TIMER, self.connect, self.timer0)
self.timer0.Start(1000)
def connect(self, event):
if self.timer0:
self.timer0.Stop()
# Connection
self.c = OBDConnection()
self.c.connect()
connected = False
while not connected:
connected = self.c.is_connected()
self.textCtrl.Clear()
self.textCtrl.AddText(" Trying to connect ..." + time.asctime())
if connected:
break
if not connected:
self.textCtrl.AddText(" Not connected\n")
return False
else:
self.textCtrl.Clear()
#self.textCtrl.AddText(" Connected\n")
port_name = self.c.get_port_name()
if port_name:
self.textCtrl.AddText(" Failed Connection: " + port_name +"\n")
self.textCtrl.AddText(" Please hold alt & esc to view terminal.")
self.textCtrl.AddText(str(self.c.get_output()))
self.sensors = self.c.get_sensors()
self.port = self.c.get_port()
self.GetParent().update(None)
def getSensors(self):
return self.sensors
def getPort(self):
return self.port
def onCtrlC(self, event):
self.GetParent().Close()
def OnPaint(self, event):
self.Paint(wx.PaintDC(self))
def Paint(self, dc):
dc.DrawBitmap(self.bitmap, 0, 0)
#-------------------------------------------------------------------------------
class OBDFrame(wx.Frame):
"""
OBD frame.
"""
def __init__(self):
"""
Constructor.
"""
wx.Frame.__init__(self, None, wx.ID_ANY, "OBD-Pi")
image = wx.Image(BACKGROUND_FILENAME)
width, height = wx.GetDisplaySize()
image = image.Scale(width, height, wx.IMAGE_QUALITY_HIGH)
self.bitmap = wx.BitmapFromImage(image)
self.Bind(wx.EVT_PAINT, self.OnPaint)
self.panelLoading = OBDLoadingPanel(self)
self.sizer = wx.BoxSizer(wx.VERTICAL)
self.sizer.Add(self.panelLoading, 1, wx.EXPAND)
self.SetSizer(self.sizer)
self.panelLoading.showLoadingScreen()
self.panelLoading.SetFocus()
def update(self, event):
if self.panelLoading:
connection = self.panelLoading.getConnection()
sensors = self.panelLoading.getSensors()
port = self.panelLoading.getPort()
self.panelLoading.Destroy()
self.panelGauges = OBDPanelGauges(self)
if connection:
self.panelGauges.setConnection(connection)
if sensors:
self.panelGauges.setSensors(sensors)
self.panelGauges.setPort(port)
self.sizer = wx.BoxSizer(wx.VERTICAL)
self.sizer.Add(self.panelGauges, 1, wx.EXPAND)
self.SetSizer(self.sizer)
self.panelGauges.ShowSensors()
self.panelGauges.SetFocus()
self.Layout()
def OnPaint(self, event):
self.Paint(wx.PaintDC(self))
def Paint(self, dc):
dc.DrawBitmap(self.bitmap, 0, 0)
#-------------------------------------------------------------------------------
class OBDFrame0(wx.Frame):
"""
OBD starting frame. Used only for full screen purpose at startup.
"""
def __init__(self):
"""
Constructor.
"""
wx.Frame.__init__(self, None, wx.ID_ANY, "")
image = wx.Image(BACKGROUND_FILENAME)
width, height = wx.GetDisplaySize()
image = image.Scale(width, height, wx.IMAGE_QUALITY_HIGH)
self.bitmap = wx.BitmapFromImage(image)
self.Bind(wx.EVT_PAINT, self.OnPaint)
def OnPaint(self, event):
self.Paint(wx.PaintDC(self))
def Paint(self, dc):
dc.DrawBitmap(self.bitmap, 0, 0)
#-------------------------------------------------------------------------------
class OBDSplashScreen(wx.SplashScreen):
"""
Splash screen.
"""
def __init__(self, parent=None, frame0=None):
"""
Constructor.
"""
self.frame0 = frame0
image = wx.Image(SPLASHSCREEN_FILENAME)
width, height = wx.GetDisplaySize()
image = image.Scale(width, height, wx.IMAGE_QUALITY_HIGH)
bitmap = wx.BitmapFromImage(image)
splashStyle = wx.SPLASH_CENTRE_ON_SCREEN | wx.SPLASH_TIMEOUT
splashDuration = SPLASHSCREEN_TIMEOUT
wx.SplashScreen.__init__(self, bitmap, splashStyle, splashDuration, parent)
self.Bind(wx.EVT_CLOSE, self.OnExit)
wx.Yield()
def OnExit(self, evt):
"""
Exit splash screen and pass over other to main OBD frame.
"""
# Main frame
frame = OBDFrame()
app.SetTopWindow(frame)
frame.ShowFullScreen(True)
frame.Show(True)
# Delete frame0
if self.frame0:
self.frame0.Destroy()
del self.frame0
evt.Skip()
#-------------------------------------------------------------------------------
class OBDApp(wx.App):
"""
OBD Application.
"""
def __init__(self, redirect=False, filename=None, useBestVisual=False, clearSigInt=True):
"""
Constructor.
"""
wx.App.__init__(self, redirect, filename, useBestVisual, clearSigInt)
def OnInit(self):
"""
Initializer.
"""
# Main frame
frame = OBDFrame()
self.SetTopWindow(frame)
frame.ShowFullScreen(True)
frame.Show(True)
#frame.showLoadingPanel()
# This frame is used only to set the full screen mode
# for the splash screen display and for transition with
# the loading screen.
# This frame is not shown and will be deleted later on.
#frame0 = OBDFrame0()
#self.SetTopWindow(frame0)
#frame0.ShowFullScreen(True)
#self.SetTopWindow(frame0)
# Splash screen
#splash = OBDSplashScreen(frame0, frame0)
#self.SetTopWindow(splash)
#splash.Show(True)
#splash.ShowFullScreen(True)
return True
def FilterEvent(self, event):
if event.GetEventType == wx.KeyEvent:
pass
#-------------------------------------------------------------------------------
app = OBDApp(False)
app.MainLoop()
#-------------------------------------------------------------------------------
|
gpl-2.0
| -4,817,368,613,607,881,000
| 27.863105
| 93
| 0.515588
| false
| 3.920408
| false
| false
| false
|
skosukhin/spack
|
var/spack/repos/builtin.mock/packages/optional-dep-test-3/package.py
|
1
|
1662
|
##############################################################################
# Copyright (c) 2013-2017, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/spack/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
class OptionalDepTest3(Package):
"""Depends on the optional-dep-test package"""
homepage = "http://www.example.com"
url = "http://www.example.com/optional-dep-test-3-1.0.tar.gz"
version('1.0', '0123456789abcdef0123456789abcdef')
variant('var', default=False)
depends_on('a', when='~var')
depends_on('b', when='+var')
def install(self, spec, prefix):
pass
|
lgpl-2.1
| -7,378,908,064,907,632,000
| 38.571429
| 78
| 0.661853
| false
| 4.073529
| false
| false
| false
|
enritoomey/DiagramaDeRafagasyManiobras
|
calculos.py
|
1
|
15303
|
# TODO: turn this into a set of functions
import numpy as np
import matplotlib.pyplot as plt
from scipy.optimize import fsolve
def plot_diagrama_de_rafagas(ax, Vb, Vc, Vd, n_25fts, n_50fts, n_60fts, dv, units, vel_label):
ax.plot(np.arange(0, Vb[units], dv), [1 + n_60fts(vel) for vel in np.arange(0, Vb[units], dv)], color='r')
ax.plot(np.arange(0, Vb[units], dv), [1 - n_60fts(vel) for vel in np.arange(0, Vb[units], dv)], color='r')
ax.plot(np.arange(0, Vc[units], dv), [1 + n_50fts(vel) for vel in np.arange(0, Vc[units], dv)], color='b')
ax.plot(np.arange(0, Vc[units], dv), [1 - n_50fts(vel) for vel in np.arange(0, Vc[units], dv)], color='b')
ax.plot(np.arange(0, Vd[units], dv), [1 + n_25fts(vel) for vel in np.arange(0, Vd[units], dv)], color='g')
ax.plot(np.arange(0, Vd[units], dv), [1 - n_25fts(vel) for vel in np.arange(0, Vd[units], dv)], color='g')
ax.plot([Vb[units], Vc[units]], [1 + n_60fts(Vb[units]), 1 + n_50fts(Vc[units])], color='m')
ax.plot([Vb[units], Vc[units]], [1 - n_60fts(Vb[units]), 1 - n_50fts(Vc[units])], color='m')
ax.plot([Vc[units], Vd[units]], [1 + n_50fts(Vc[units]), 1 + n_25fts(Vd[units])], color='m')
ax.plot([Vc[units], Vd[units]], [1 - n_50fts(Vc[units]), 1 - n_25fts(Vd[units])], color='m')
ax.plot([Vd[units], Vd[units]], [1 + n_25fts(Vd[units]), 1 - n_25fts(Vd[units])], color='m')
ax.set_xlabel("Speed [{}]".format(vel_label[units]))
ax.set_ylabel("n")
ax.set_title("Gust Diagram")
def plot_diagrama_de_maniobras(ax, n_stall_pos, n_stall_neg, n_max, Vs1, Vs0, Va, dv, units, vel_label):
ax.plot(np.arange(0, Vs1[units], dv), [n_stall_pos(vel) for vel in np.arange(0, Vs1[units], dv)], color='m',
linestyle='--')
ax.plot([Vs1[units], Vs1[units]], [0, n_stall_pos(Vs1[units])], color='m')
ax.plot(np.arange(Vs1[units], Va[units], dv), [n_stall_pos(vel) for vel in np.arange(Vs1[units], Va[units], dv)],
color='m', linestyle='-')
ax.plot(np.arange(0, Vs0[units] + dv, dv), [n_stall_neg(vel) for vel in np.arange(0, Vs0[units] + dv, dv)],
color='m', linestyle='--')
ax.plot([Vs0[units], Vs0[units]], [0, -1.0], color='m')
ax.plot([Vs1[units], Vs0[units]], [0.0, 0.0], color='m')
ax.plot([Va[units], Vd[units]], [n_max, n_max], color='m')
ax.plot([Vd[units], Vd[units]], [n_max, 0], color='m')
ax.plot([Vs0[units], Vc[units]], [-1.0, -1.0], color='m')
ax.plot([Vc[units], Vd[units]], [-1.0, 0.0], color='m')
ax.set_xlabel("Speed [{}]".format(vel_label[units]))
ax.set_ylabel("n")
ax.set_title("Manoeuvre Diagram")
def plot_diagrama_de_maniobras_con_flap(ax, n_stall_flap, Vsf, Vf_n2, Vf, dv, units, vel_label):
ax.plot(np.arange(0, Vsf[units] + dv, dv), [n_stall_flap(vel) for vel in np.arange(0, Vsf[units] + dv, dv)],
color='b', linestyle='--')
ax.plot(np.arange(Vsf[units], Vf_n2 + dv, dv), [n_stall_flap(vel) for vel in np.arange(Vsf[units], Vf_n2 + dv, dv)],
color='b', linestyle='-')
ax.plot([Vsf[units], Vsf[units]], [0.0, n_stall_flap(Vsf[units])], color='b', linestyle='-')
ax.plot([Vf_n2, Vf[units]], [2.0, 2.0], color='b', linestyle='-')
ax.plot([Vf[units], Vf[units]], [0.0, 2.0], color='b', linestyle='-')
ax.plot([Vsf[units], Vf[units]], [0.0, 0.0], color='b', linestyle='-')
ax.set_xlabel("Speed [{}]".format(vel_label[units]))
ax.set_ylabel("n")
ax.set_title("Manoeuvre Diagram")
def plot_diagrama_de_maniobras_y_rafagas(ax, n_stall_pos, n_stall_neg, n_gust_pos, n_gust_neg, n_manoeuvre_pos,
n_manoeuvre_neg, v_intersec_pos, v_intersec_neg, Vd, dv, units, vel_label):
ax.fill_between(np.arange(0, v_intersec_pos, dv), 0, [n_stall_pos(vel) for vel in np.arange(0, v_intersec_pos, dv)],
color='m', alpha=0.2)
ax.fill_between(np.arange(v_intersec_pos, Vd[units], dv), 0, [max(n_gust_pos(vel), n_manoeuvre_pos(vel))
for vel in np.arange(v_intersec_pos, Vd[units], dv)],
color='m', alpha=0.2)
ax.fill_between(np.arange(0, v_intersec_neg, dv), 0, [n_stall_neg(vel) for vel in np.arange(0, v_intersec_neg, dv)],
color='m', alpha=0.2)
ax.fill_between(np.arange(v_intersec_neg, Vd[units], dv), 0, [min(n_gust_neg(vel), n_manoeuvre_neg(vel))
for vel in np.arange(v_intersec_neg, Vd[units], dv)],
color='m', alpha=0.2)
ax.fill_between([Vd[units], Vd[units]], 0, [max(n_manoeuvre_pos(Vd[units]), n_gust_pos(Vd[units])),
min(n_manoeuvre_neg(Vd[units]), n_gust_neg(Vd[units]))], color='m',
alpha=0.2)
ax.set_xlabel("Speed [{}]".format(vel_label[units]))
ax.set_ylabel("n")
ax.set_title("Combined Gust & Manoeuvre Diagram")
#import ipdb
if __name__ == '__main__':
# Units
units = 'IM' # 'IM'
ft2m = 0.3048
lb2kg = 0.453592
slugcuft2kgm3 = 515.379
vel_label = {'IM': 'ft/s', 'SI': 'm/s'}
# Input Data:
CAM = {'SI': 2.461}
CAM['IM'] = CAM['SI']/ft2m
sw = {'SI': 60}
sw['IM'] = sw['SI']/ft2m/ft2m
a3D = 5.0037 #1/rad
MTOW = {'SI': 23000}
MTOW['IM'] = MTOW['SI']/lb2kg
MLW = {'SI': 23000}
MLW['IM'] = MLW['SI']/lb2kg
W0 = {'SI': 13766.0}
W0['IM'] = W0['SI']/lb2kg
MZFW = {'SI': 16376.0}
MZFW['IM'] = MZFW['SI']/lb2kg
Vc = {'SI': 151.93}
Vc['IM'] = Vc['SI']/ft2m
clmax = 1.2463
clmax_flap = 1.499
clmin = -0.75*clmax
Zmo = {'SI': 9999.2}
Zmo['IM'] = Zmo['SI']/ft2m
# Variables
W = {'SI': 20000}
W['IM'] = W['SI']/lb2kg
h = {'SI': 5000}
h['IM'] = h['SI']/ft2m
den = {'SI': 0.125}
den['IM'] = den['SI']/lb2kg*ft2m**3
# constantes
cte_fgz = {'IM': 250000}
cte_fgz['SI'] = cte_fgz['IM']*ft2m
s = {'IM': 100.015}
s['SI'] = s['IM']*ft2m
gravedad = {'SI': 9.81}
gravedad['IM'] = gravedad['SI']*ft2m/lb2kg
cte_nmax_1 = {'IM': 24000}
cte_nmax_1['SI'] = cte_nmax_1['IM']*lb2kg
cte_nmax_2 = {'IM': 10000}
cte_nmax_2['SI'] = cte_nmax_1['IM']*lb2kg
# Constants depending from input data
carga_alar = {}
H = {}
Vs1 = {}
Vs0 = {}
Vsf = {}
Vd = {}
Va = {}
Vf = {}
Vb = {}
Uref = {}
Uds = U = {}
Ude_25fts = {}
Ude_50fts = {}
Ude_60fts = {}
carga_alar[units] = W[units] / sw[units]
mu_g = 2 * carga_alar[units] / (den[units] * CAM[units] * a3D)#*gravedad[units])
Kg = 0.88 * (mu_g / (5.3 + mu_g))
Vs1[units] = np.sqrt(carga_alar[units] / (0.5 * den[units] * clmax))
Vs0[units] = np.sqrt(-carga_alar[units] / (0.5 * den[units] * clmin))
Vsf[units] = np.sqrt(carga_alar[units] / (0.5 * den[units] * clmax_flap))
# Calculo de n_max
n_max = 2.1 + cte_nmax_1[units] / (MTOW[units] + cte_nmax_2[units])
if n_max < 2.5:
n_max = 2.5
elif n_max > 3.8:
n_max = 3.8
Va[units] = Vs1[units] * np.sqrt(n_max)
if Va[units] > Vc[units]:
Va[units] = Vc[units]
Vd[units] = Vc[units] / 0.85
Vf[units] = max(Vs1[units] * 1.6, Vsf[units] * 1.8)
cte_Uref_h1 = {'IM': 15000}
cte_Uref_h1['SI'] = cte_Uref_h1['IM'] * ft2m
cte_Uref_h2 = {'IM': 50000}
cte_Uref_h2['SI'] = cte_Uref_h2['IM'] * ft2m
cte_Uref_v1 = {'IM': 56}
cte_Uref_v1['SI'] = cte_Uref_v1['IM'] * ft2m
cte_Uref_v2 = {'IM': 56}
cte_Uref_v2['SI'] = cte_Uref_v2['IM'] * ft2m
cte_Uref_v3 = {'IM': 26}
cte_Uref_v3['SI'] = cte_Uref_v3['IM'] * ft2m
#ipdb.set_trace()
if h[units] < cte_Uref_h1[units]:
Uref[units] = cte_Uref_v1[units] - 12.0 * h[units] / cte_Uref_h1[units]
elif h[units] < cte_Uref_h2[units]:
Uref[units] = cte_Uref_v2[units] - 18.0 * (h[units] - cte_Uref_h1[units]) /\
(cte_Uref_h2[units] - cte_Uref_h1[units])
else:
Uref[units] = cte_Uref_v3[units]
# Esta constante esta porque hay que usar la pendiente a_cn = dCn/dalpha, y no a_cl = dCl/dalpha, pero no se de donde sale el valor
ad_CN = 0.59248
cte_Vb = {'IM': 498.0}# lb/s**2
cte_Vb['SI'] = cte_Vb['IM'] * ft2m**4 / lb2kg
Vb[units] = min(Vc[units], Vs1[units] * np.sqrt(1 + Kg * Uref[units] * Vc[units] * a3D * ad_CN /
(cte_Vb[units] * carga_alar[units])))
# Velocidad de rafadas
cte_Ude_h1 = {'IM': 20000}
cte_Ude_h1['SI'] = cte_Ude_h1['IM'] * ft2m
cte_Ude_h2 = {'IM': 50000}
cte_Ude_h2['SI'] = cte_Ude_h2['IM'] * ft2m
cte_25fts_v1 = {'IM': 25}
cte_25fts_v1['SI'] = cte_25fts_v1['IM'] * ft2m
cte_25fts_v2 = {'IM': 33.34}
cte_25fts_v2['SI'] = cte_25fts_v2['IM'] * ft2m
cte_25fts_m2 = 0.000417
cte_25fts_v3 = {'IM': 12.5}
cte_25fts_v3['SI'] = cte_25fts_v3['IM'] * ft2m
cte_50fts_v1 = {'IM': 50}
cte_50fts_v1['SI'] = cte_50fts_v1['IM'] * ft2m
cte_50fts_v2 = {'IM': 66.77}
cte_50fts_v2['SI'] = cte_50fts_v2['IM'] * ft2m
cte_50fts_m2 = 0.0008933
cte_50fts_v3 = {'IM': 25}
cte_50fts_v3['SI'] = cte_50fts_v3['IM'] * ft2m
cte_60fts_v1 = {'IM': 60}
cte_60fts_v1['SI'] = cte_60fts_v1['IM'] * ft2m
cte_60fts_v2 = {'IM': 60}
cte_60fts_v2['SI'] = cte_60fts_v2['IM'] * ft2m
cte_60fts_m2 = {'IM': 18}
cte_60fts_m2['SI'] = cte_60fts_m2['IM'] * ft2m
cte_60fts_v3 = {'IM': 38}
cte_60fts_v3['SI'] = cte_60fts_v3['IM'] * ft2m
if h[units] < cte_Ude_h1[units]:
Ude_25fts[units] = cte_25fts_v1[units]
Ude_50fts[units] = cte_50fts_v1[units]
Ude_60fts[units] = cte_60fts_v1[units]
elif h[units] < cte_Ude_h2[units]:
Ude_25fts[units] = cte_25fts_v2[units] - cte_25fts_m2 * h[units]
Ude_50fts[units] = cte_50fts_v2[units] - cte_50fts_m2 * h[units]
Ude_60fts[units] = cte_60fts_v2[units] - cte_60fts_m2[units] * (h[units] - cte_Ude_h1[units])\
/ (cte_Ude_h2[units] - cte_Ude_h2[units])
else:
Ude_25fts[units] = cte_25fts_v3[units]
Ude_50fts[units] = cte_50fts_v3[units]
Ude_60fts[units] = cte_60fts_v3[units]
def n_25fts(vel):
return fg * Ude_25fts[units] * a3D * ad_CN * vel / (cte_Vb[units] * carga_alar[units])
def n_50fts(vel):
return Kg * Ude_50fts[units] * a3D * ad_CN * vel / (cte_Vb[units] * carga_alar[units])
def n_60fts(vel):
return Kg * Ude_60fts[units] * a3D * ad_CN * vel / (cte_Vb[units] * carga_alar[units])
def n_gust_pos(vel):
if 0 <= vel <= Vb[units]:
return 1 + n_60fts(vel)
elif vel <= Vc[units]:
m = (n_50fts(Vc[units]) - n_60fts(Vb[units])) / (Vc[units] - Vb[units])
b = n_50fts(Vc[units]) - m * Vc[units]
return 1 + m*vel + b
elif vel <= Vd[units]:
m = (n_25fts(Vd[units]) - n_50fts(Vc[units])) / (Vd[units] - Vc[units])
b = n_25fts(Vd[units]) - m * Vd[units]
return 1 + m * vel + b
return None
def n_gust_neg(vel):
if 0 <= vel <= Vb[units]:
return 1 - n_60fts(vel)
elif vel <= Vc[units]:
m = (n_50fts(Vc[units]) - n_60fts(Vb[units])) / (Vc[units] - Vb[units])
b = n_50fts(Vc[units]) - m * Vc[units]
return 1 - m * vel + b
elif vel <= Vd[units]:
m = (n_25fts(Vd[units]) - n_50fts(Vc[units])) / (Vd[units] - Vc[units])
b = n_25fts(Vd[units]) - m * Vd[units]
return 1 - m * vel + b
return None
# Variables definidas pero no utilizadas
# H[units] = 12.5*CAM[units]
R1 = MLW[units] / MTOW[units]
R2 = MZFW[units] / MTOW[units]
fgm = np.sqrt(R2 * np.tan(np.pi * R1 / 4.0))
fgz = 1 - Zmo[units] / cte_fgz[units]
fg = 0.5*(fgz + fgm)
# cte_Uds = {'IM':350}
# cte_Uds['SI'] = cte_Uds['IM']*ft2m
# Uds[units] = Uref[units]*fg*(H[units]/cte_Uds[units])**(1.0/6.0)
# U[units] = 0.5*Uds[units]*(1-np.cos(np.pi*s[units]/H[units]))
print("Kg = {}, Vb = {}, Vc = {}, Vd = {}".format(Kg, Vb, Vc, Vd))
dv = 1.0
# Diagrama de Rafagas
fig, ax = plt.subplots(nrows=1, ncols=1, sharex=True, sharey=True, squeeze=True)
plot_diagrama_de_rafagas(ax, Vb, Vc, Vd, n_25fts, n_50fts, n_60fts, dv, units, vel_label)
plt.grid(True)
plt.show()
def n_stall_pos(vel):
return 0.5 * den[units] * vel**2 * sw[units] * clmax / W[units]
def n_stall_neg(vel):
return 0.5 * den[units] * vel**2 * sw[units] * clmin / W[units]
def n_stall_flap(vel):
return 0.5 * den[units] * vel**2 * sw[units] * clmax_flap / W[units]
def n_manoeuvre_pos(vel):
if 0 <= vel <= Va[units]:
return n_stall_pos(vel)
elif vel <= Vd[units]:
return n_max
return None
def n_manoeuvre_neg(vel):
if 0 <= vel <= Vs0[units]:
return n_stall_neg(vel)
elif vel <= Vc[units]:
return -1.0
elif vel <= Vd[units]:
return -1 + 1 / (Vd[units] - Vc[units]) * (vel - Vc[units])
return None
# Diagrama de maniobras:
fig, ax = plt.subplots(nrows=1, ncols=1, sharex=True, sharey=True, squeeze=True)
plot_diagrama_de_maniobras(ax, n_stall_pos, n_stall_neg, n_max, Vs1, Vs0, Va, dv)
#plt.show()
# Diagrama de maniobras c/flap:
#fig, ax = plt.subplots(nrows=1, ncols= 1, sharex=True, sharey=True, squeeze=True)
Vf_n2 = np.sqrt(2 * W[units] / (0.5 * den[units] * clmax_flap * sw[units]))
plot_diagrama_de_maniobras_con_flap(ax, n_stall_flap, Vsf, Vf_n2, Vf, dv, units, vel_label)
plt.grid(True)
plt.show()
# Calculo de las intersecciones:
if n_gust_pos(Va[units]) > n_max:
# extender stall hasta interseccion con gust y arrancar la comparacion desde ese punto
def func1(vel):
return n_gust_pos(vel) - n_stall_pos(vel)
v_intersec_pos = fsolve(func1, Va[units])[0]
else:
v_intersec_pos = Va[units]
if n_gust_pos(Vs0[units]) < -1.0:
# extender stall hasta interseccion con gust y arrancar la comparacion desde ese punto
def func2(vel):
return n_gust_neg(vel) - n_stall_neg(vel)
v_intersec_neg = fsolve(func2, Vs0[units])[0]
else:
v_intersec_neg = Vs0[units]
# Plot intersection
# fig = plt.figure(facecolor='white')
# axescolor = '#f6f6f6' # the axes background color
# ax = fig.add_axes([0, 1, 0, 1], axisbg=axescolor)
fig, ax = plt.subplots(nrows=1, ncols=1, sharex=True, sharey=True, squeeze=True)
plot_diagrama_de_maniobras_y_rafagas(ax, n_stall_pos, n_stall_neg, n_gust_pos, n_gust_neg, n_manoeuvre_pos,
n_manoeuvre_neg, v_intersec_pos, v_intersec_neg, Vd, dv, units, vel_label)
plt.grid(True)
plt.show()
|
mit
| 6,982,465,582,770,812,000
| 40.041209
| 135
| 0.52493
| false
| 2.373294
| false
| false
| false
|
hpcuantwerpen/easybuild-easyblocks
|
easybuild/easyblocks/b/boost.py
|
1
|
17638
|
##
# Copyright 2009-2021 Ghent University
#
# This file is part of EasyBuild,
# originally created by the HPC team of Ghent University (http://ugent.be/hpc/en),
# with support of Ghent University (http://ugent.be/hpc),
# the Flemish Supercomputer Centre (VSC) (https://www.vscentrum.be),
# Flemish Research Foundation (FWO) (http://www.fwo.be/en)
# and the Department of Economy, Science and Innovation (EWI) (http://www.ewi-vlaanderen.be/en).
#
# https://github.com/easybuilders/easybuild
#
# EasyBuild is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation v2.
#
# EasyBuild is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with EasyBuild. If not, see <http://www.gnu.org/licenses/>.
##
"""
EasyBuild support for Boost, implemented as an easyblock
@author: Stijn De Weirdt (Ghent University)
@author: Dries Verdegem (Ghent University)
@author: Kenneth Hoste (Ghent University)
@author: Pieter De Baets (Ghent University)
@author: Jens Timmerman (Ghent University)
@author: Ward Poelmans (Ghent University)
@author: Petar Forai (IMP/IMBA)
@author: Luca Marsella (CSCS)
@author: Guilherme Peretti-Pezzi (CSCS)
@author: Joachim Hein (Lund University)
@author: Michele Dolfi (ETH Zurich)
@author: Simon Branford (University of Birmingham)
"""
from distutils.version import LooseVersion
import fileinput
import glob
import os
import re
import sys
import easybuild.tools.toolchain as toolchain
from easybuild.framework.easyblock import EasyBlock
from easybuild.framework.easyconfig import CUSTOM
from easybuild.tools.build_log import EasyBuildError
from easybuild.tools.config import ERROR
from easybuild.tools.filetools import apply_regex_substitutions, copy, mkdir, symlink, which, write_file
from easybuild.tools.modules import get_software_root, get_software_version
from easybuild.tools.run import run_cmd
from easybuild.tools.systemtools import AARCH64, POWER, UNKNOWN
from easybuild.tools.systemtools import get_cpu_architecture, get_glibc_version, get_shared_lib_ext
class EB_Boost(EasyBlock):
"""Support for building Boost."""
def __init__(self, *args, **kwargs):
"""Initialize Boost-specific variables."""
super(EB_Boost, self).__init__(*args, **kwargs)
self.objdir = None
self.pyvers = []
if LooseVersion(self.version) >= LooseVersion("1.71.0"):
self.bjamcmd = 'b2'
else:
self.bjamcmd = 'bjam'
@staticmethod
def extra_options():
"""Add extra easyconfig parameters for Boost."""
extra_vars = {
'boost_mpi': [False, "Build mpi boost module", CUSTOM],
'boost_multi_thread': [None, "Build boost with multi-thread option (DEPRECATED)", CUSTOM],
'tagged_layout': [None, "Build with tagged layout on library names, default from version 1.69.0", CUSTOM],
'single_threaded': [None, "Also build single threaded libraries, requires tagged_layout, "
"default from version 1.69.0", CUSTOM],
'toolset': [None, "Toolset to use for Boost configuration ('--with-toolset' for bootstrap.sh)", CUSTOM],
'build_toolset': [None, "Toolset to use for Boost compilation "
"('toolset' for b2, default calculated from toolset)", CUSTOM],
'mpi_launcher': [None, "Launcher to use when running MPI regression tests", CUSTOM],
'only_python_bindings': [False, "Only install Boost.Python library providing Python bindings", CUSTOM],
'use_glibcxx11_abi': [None, "Use the GLIBCXX11 ABI", CUSTOM],
}
return EasyBlock.extra_options(extra_vars)
def patch_step(self):
"""Patch Boost source code before building."""
super(EB_Boost, self).patch_step()
# TIME_UTC is also defined in recent glibc versions, so we need to rename it for old Boost versions (<= 1.49)
glibc_version = get_glibc_version()
old_glibc = glibc_version is not UNKNOWN and LooseVersion(glibc_version) > LooseVersion("2.15")
if old_glibc and LooseVersion(self.version) <= LooseVersion("1.49.0"):
self.log.info("Patching because the glibc version is too new")
files_to_patch = ["boost/thread/xtime.hpp"] + glob.glob("libs/interprocess/test/*.hpp")
files_to_patch += glob.glob("libs/spirit/classic/test/*.cpp") + glob.glob("libs/spirit/classic/test/*.inl")
for patchfile in files_to_patch:
try:
for line in fileinput.input("%s" % patchfile, inplace=1, backup='.orig'):
line = re.sub(r"TIME_UTC", r"TIME_UTC_", line)
sys.stdout.write(line)
except IOError as err:
raise EasyBuildError("Failed to patch %s: %s", patchfile, err)
def prepare_step(self, *args, **kwargs):
"""Prepare build environment."""
super(EB_Boost, self).prepare_step(*args, **kwargs)
# keep track of Python version(s) used during installation,
# so we can perform a complete sanity check
if get_software_root('Python'):
self.pyvers.append(get_software_version('Python'))
def configure_step(self):
"""Configure Boost build using custom tools"""
# boost_multi_thread is deprecated
if self.cfg['boost_multi_thread'] is not None:
self.log.deprecated("boost_multi_thread has been replaced by tagged_layout. "
"We build with tagged layout and both single and multi threading libraries "
"from version 1.69.0.", '5.0')
self.cfg['tagged_layout'] = True
# mpi sanity check
if self.cfg['boost_mpi'] and not self.toolchain.options.get('usempi', None):
raise EasyBuildError("When enabling building boost_mpi, also enable the 'usempi' toolchain option.")
# create build directory (Boost doesn't like being built in source dir)
self.objdir = os.path.join(self.builddir, 'obj')
mkdir(self.objdir)
# generate config depending on compiler used
toolset = self.cfg['toolset']
if toolset is None:
if self.toolchain.comp_family() == toolchain.INTELCOMP:
toolset = 'intel-linux'
elif self.toolchain.comp_family() == toolchain.GCC:
toolset = 'gcc'
else:
raise EasyBuildError("Unknown compiler used, don't know what to specify to --with-toolset, aborting.")
cmd = "%s ./bootstrap.sh --with-toolset=%s --prefix=%s %s"
tup = (self.cfg['preconfigopts'], toolset, self.objdir, self.cfg['configopts'])
run_cmd(cmd % tup, log_all=True, simple=True)
# Use build_toolset if specified or the bootstrap toolset without the OS suffix
self.toolset = self.cfg['build_toolset'] or re.sub('-linux$', '', toolset)
user_config = []
# Explicitely set the compiler path to avoid B2 checking some standard paths like /opt
cxx = os.getenv('CXX')
if cxx:
cxx = which(cxx, on_error=ERROR)
# Remove default toolset config which may lead to duplicate toolsets (e.g. for intel-linux)
apply_regex_substitutions('project-config.jam', [('using %s ;' % toolset, '')])
# Add our toolset config with no version and full path to compiler
user_config.append("using %s : : %s ;" % (self.toolset, cxx))
if self.cfg['boost_mpi']:
# configure the boost mpi module
# http://www.boost.org/doc/libs/1_47_0/doc/html/mpi/getting_started.html
# let Boost.Build know to look here for the config file
# Check if using a Cray toolchain and configure MPI accordingly
if self.toolchain.toolchain_family() == toolchain.CRAYPE:
if self.toolchain.PRGENV_MODULE_NAME_SUFFIX == 'gnu':
craympichdir = os.getenv('CRAY_MPICH2_DIR')
craygccversion = os.getenv('GCC_VERSION')
# We configure the gcc toolchain below, so make sure the EC doesn't use another toolset
if self.toolset != 'gcc':
raise EasyBuildError("For the cray toolchain the 'gcc' toolset must be used.")
# Remove the previous "using gcc" line add above (via self.toolset) if present
user_config = [x for x in user_config if not x.startswith('using gcc :')]
user_config.extend([
'local CRAY_MPICH2_DIR = %s ;' % craympichdir,
'using gcc ',
': %s' % craygccversion,
': CC ',
': <compileflags>-I$(CRAY_MPICH2_DIR)/include ',
r' <linkflags>-L$(CRAY_MPICH2_DIR)/lib \ ',
'; ',
'using mpi ',
': CC ',
': <find-shared-library>mpich ',
': %s' % self.cfg['mpi_launcher'],
';',
'',
])
else:
raise EasyBuildError("Bailing out: only PrgEnv-gnu supported for now")
else:
user_config.append("using mpi : %s ;" % os.getenv("MPICXX"))
write_file('user-config.jam', '\n'.join(user_config), append=True)
def build_boost_variant(self, bjamoptions, paracmd):
"""Build Boost library with specified options for bjam."""
# build with specified options
cmd = "%s ./%s %s %s %s" % (self.cfg['prebuildopts'], self.bjamcmd, bjamoptions, paracmd, self.cfg['buildopts'])
run_cmd(cmd, log_all=True, simple=True)
# install built Boost library
cmd = "%s ./%s %s install %s %s" % (
self.cfg['preinstallopts'], self.bjamcmd, bjamoptions, paracmd, self.cfg['installopts'])
run_cmd(cmd, log_all=True, simple=True)
# clean up before proceeding with next build
run_cmd("./%s %s --clean-all" % (self.bjamcmd, bjamoptions), log_all=True, simple=True)
def build_step(self):
"""Build Boost with bjam tool."""
self.bjamoptions = " --prefix=%s --user-config=user-config.jam" % self.objdir
if 'toolset=' not in self.cfg['buildopts']:
self.bjamoptions += " toolset=" + self.toolset
cxxflags = os.getenv('CXXFLAGS')
# only disable -D_GLIBCXX_USE_CXX11_ABI if use_glibcxx11_abi was explicitly set to False
# None value is the default, which corresponds to default setting (=1 since GCC 5.x)
if self.cfg['use_glibcxx11_abi'] is not None:
cxxflags += ' -D_GLIBCXX_USE_CXX11_ABI='
if self.cfg['use_glibcxx11_abi']:
cxxflags += '1'
else:
cxxflags += '0'
if cxxflags is not None:
self.bjamoptions += " cxxflags='%s'" % cxxflags
ldflags = os.getenv('LDFLAGS')
if ldflags is not None:
self.bjamoptions += " linkflags='%s'" % ldflags
# specify path for bzip2/zlib if module is loaded
for lib in ["bzip2", "zlib"]:
libroot = get_software_root(lib)
if libroot:
self.bjamoptions += " -s%s_INCLUDE=%s/include" % (lib.upper(), libroot)
self.bjamoptions += " -s%s_LIBPATH=%s/lib" % (lib.upper(), libroot)
self.paracmd = ''
if self.cfg['parallel']:
self.paracmd = "-j %s" % self.cfg['parallel']
if self.cfg['only_python_bindings']:
# magic incantation to only install Boost Python bindings is... --with-python
# see http://boostorg.github.io/python/doc/html/building/installing_boost_python_on_your_.html
self.bjamoptions += " --with-python"
# Default threading since at least 1.47.0 is multi with system layout
threading = " threading=multi"
layout = " --layout=system"
if LooseVersion(self.version) >= LooseVersion("1.69.0"):
# As of 1.69.0 we build with layout tagged and both single and multi threading
# Linking default libraries to multi-threaded versions.
if self.cfg['tagged_layout'] is None:
self.cfg['tagged_layout'] = True
if self.cfg['single_threaded'] is None:
self.cfg['single_threaded'] = True
if self.cfg['tagged_layout']:
layout = " --layout=tagged"
if self.cfg['single_threaded']:
if not self.cfg['tagged_layout']:
raise EasyBuildError("Singled threaded build requires tagged layout.")
threading = " threading=single,multi"
self.bjamoptions += threading + layout
if self.cfg['boost_mpi']:
self.log.info("Building boost_mpi library")
mpi_bjamoptions = " --with-mpi"
self.build_boost_variant(self.bjamoptions + mpi_bjamoptions, self.paracmd)
self.log.info("Building Boost libraries")
# build with specified options
cmd = ' '.join([
self.cfg['prebuildopts'],
os.path.join('.', self.bjamcmd),
self.bjamoptions,
self.paracmd,
self.cfg['buildopts'],
])
run_cmd(cmd, log_all=True, simple=True)
def install_step(self):
"""Install Boost by copying files to install dir."""
# install boost libraries
self.log.info("Installing Boost libraries")
cmd = ' '.join([
self.cfg['preinstallopts'],
os.path.join('.', self.bjamcmd),
self.bjamoptions,
'install',
self.paracmd,
self.cfg['installopts'],
])
run_cmd(cmd, log_all=True, simple=True)
self.log.info("Copying %s to installation dir %s", self.objdir, self.installdir)
if self.cfg['only_python_bindings'] and 'Python' in self.cfg['multi_deps'] and self.iter_idx > 0:
self.log.info("Main installation should already exist, only copying over missing Python libraries.")
copy(glob.glob(os.path.join(self.objdir, 'lib', 'libboost_python*')), os.path.join(self.installdir, 'lib'),
symlinks=True)
else:
copy(glob.glob(os.path.join(self.objdir, '*')), self.installdir, symlinks=True)
if self.cfg['tagged_layout']:
if LooseVersion(self.version) >= LooseVersion("1.69.0") or not self.cfg['single_threaded']:
# Link tagged multi threaded libs as the default libs
lib_glob = 'lib*-mt*.*'
mt_replace = re.compile(r'-[^.]*\.')
for source_lib in glob.glob(os.path.join(self.installdir, 'lib', lib_glob)):
target_lib = mt_replace.sub('.', os.path.basename(source_lib))
symlink(os.path.basename(source_lib), os.path.join(self.installdir, 'lib', target_lib),
use_abspath_source=False)
def sanity_check_step(self):
"""Custom sanity check for Boost."""
shlib_ext = get_shared_lib_ext()
custom_paths = {
'files': [],
'dirs': ['include/boost']
}
if self.cfg['only_python_bindings']:
for pyver in self.pyvers:
pymajorver = pyver.split('.')[0]
pyminorver = pyver.split('.')[1]
if LooseVersion(self.version) >= LooseVersion("1.67.0"):
suffix = '%s%s' % (pymajorver, pyminorver)
elif int(pymajorver) >= 3:
suffix = pymajorver
else:
suffix = ''
custom_paths['files'].append(os.path.join('lib', 'libboost_python%s.%s' % (suffix, shlib_ext)))
else:
custom_paths['files'].append(os.path.join('lib', 'libboost_system.%s' % shlib_ext))
if self.cfg['tagged_layout']:
lib_mt_suffix = '-mt'
# MT libraries gained an extra suffix from v1.69.0 onwards
if LooseVersion(self.version) >= LooseVersion("1.69.0"):
if get_cpu_architecture() == AARCH64:
lib_mt_suffix += '-a64'
elif get_cpu_architecture() == POWER:
lib_mt_suffix += '-p64'
else:
lib_mt_suffix += '-x64'
custom_paths['files'].append(os.path.join('lib', 'libboost_thread%s.%s' % (lib_mt_suffix, shlib_ext)))
if self.cfg['boost_mpi']:
custom_paths['files'].append(os.path.join('lib', 'libboost_mpi.%s' % shlib_ext))
if self.cfg['tagged_layout']:
custom_paths['files'].append(os.path.join('lib', 'libboost_mpi%s.%s' % (lib_mt_suffix, shlib_ext)))
super(EB_Boost, self).sanity_check_step(custom_paths=custom_paths)
def make_module_extra(self):
"""Set up a BOOST_ROOT environment variable to e.g. ease Boost handling by cmake"""
txt = super(EB_Boost, self).make_module_extra()
if not self.cfg['only_python_bindings']:
txt += self.module_generator.set_environment('BOOST_ROOT', self.installdir)
return txt
|
gpl-2.0
| 8,498,834,797,141,491,000
| 45.785146
| 120
| 0.589806
| false
| 3.856987
| true
| false
| false
|
pacoqueen/ginn
|
ginn/formularios/partes_de_fabricacion_bolsas.py
|
1
|
117840
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
###############################################################################
# Copyright (C) 2005-2020 Francisco José Rodríguez Bogado, #
# Diego Muñoz Escalante. #
# (pacoqueen@users.sourceforge.net, escalant3@users.sourceforge.net) #
# #
# This file is part of GeotexInn. #
# #
# GeotexInn is free software; you can redistribute it and/or modify #
# it under the terms of the GNU General Public License as published by #
# the Free Software Foundation; either version 2 of the License, or #
# (at your option) any later version. #
# #
# GeotexInn is distributed in the hope that it will be useful, #
# but WITHOUT ANY WARRANTY; without even the implied warranty of #
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
# GNU General Public License for more details. #
# #
# You should have received a copy of the GNU General Public License #
# along with GeotexInn; if not, write to the Free Software #
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA #
###############################################################################
"""
###################################################################
# # partes_de_fabricacion_bolsas.py - Partes embolsado fibra cem.
###################################################################
# NOTAS:
##
# ----------------------------------------------------------------
##
###################################################################
# Changelog:
# 5 de mayo de 2009 -> Inicio
##
###################################################################
# NOTAS:
##
###################################################################
"""
# import sys, os
# sys.stdout = open("salida_debug.txt", "a")
# pylint: disable=import-error,too-many-lines,wrong-import-position
from __future__ import print_function
import time # noqa
import datetime # noqa
import gtk # noqa
import pygtk # noqa
pygtk.require('2.0')
from ventana import Ventana # noqa
from formularios import utils # noqa
from formularios.reports import mandar_a_imprimir_con_ghostscript # noqa
from framework import pclases # noqa
import mx.DateTime # noqa
from informes import geninformes # noqa
# pylint: disable=redefined-builtin
from utils import _float as float # noqa
from ventana_progreso import VentanaActividad, VentanaProgreso # noqa
from partes_de_fabricacion_balas import verificar_solapamiento # noqa
from partes_de_fabricacion_balas import entran_en_turno # noqa
from partes_de_fabricacion_rollos import descontar_material_adicional # noqa
try:
from api import murano
MURANO = True
except ImportError:
MURANO = False
# pylint:disable=unused-argument
def copy2(entry1, evento, entry2, sumar=0):
"""
Simplemente copia el contenido del entry1 en el entry2.
Si sumar es algo distinto de 0 intenta convertir el contenido del entry 1
a entero y escribirlo en el segundo como cadena tras sumarle el número
en cuestión.
"""
# No es más que para evitarme escribir la fecha de fin en 2 de cada 3
# partes.
if not sumar:
entry2.set_text(entry1.get_text())
else:
# Y ahora la hora. Menos tecleo, más rapidez.
try:
num = int(entry1.get_text())
except (ValueError, TypeError):
entry2.set_text(entry1.get_text())
else:
entry2.set_text(str((num + sumar) % 24))
MEMENTO_MORI = {'tipo': None,
'que_imprimir': None} # Tipo de etiqueta y qué imprimir
# (caja, palé o palé + cajas) hasta que cierren la ventana o cambien de parte.
# pylint: disable=too-many-public-methods,too-many-instance-attributes
class PartesDeFabricacionBolsas(Ventana):
"""
Clase que encapsula la ventana de partes de producción de fibra embolsada.
"""
def __init__(self, objeto=None, usuario=None):
# pylint: disable=invalid-name,consider-using-ternary
self.SHOW_PALES_COMPLETOS = False # Si True meterá también cajas y
# bolsas en el treeview. Igual a mucho más lento.
self.NIVEL_POR_LOTES = 2 # Mínimo nivel (o máximo, según se vea)
# para poder crear palés por lote.
self.objeto = objeto
if not isinstance(usuario, pclases.Usuario):
try:
usuario = pclases.Usuario.selectBy(usuario=usuario)[0]
except IndexError:
usuario = None
self.usuario = usuario
self.producto = None # Producto relacionado con el parte.
self.__lecturaescritura = objeto and objeto.id or None
if usuario:
nombreventana = "partes_de_fabricacion_bolsas.py"
try:
ventana = pclases.Ventana.selectBy(fichero=nombreventana)[0]
self.__permisos = usuario.get_permiso(ventana)
except IndexError:
txt = "WARNING: partes_de_fabricacion_bolsas.py::__init__ -> "\
"No se pudieron determinar permisos de %s para la venta"\
"na %s." % (self.usuario.usuario, nombreventana)
print(txt)
self.logger.error(txt)
else:
self.__lecturaescritura = (self.__permisos.escritura
and self.objeto and self.objeto.id
or None)
else:
class FakePermisos:
# pylint: disable=too-few-public-methods
"""Clase para habilitar temporalmente permisos."""
def __init__(self):
self.nuevo = True
self.lectura = self.escritura = self.permiso = self.nuevo
self.__permisos = FakePermisos()
Ventana.__init__(self, 'partes_de_fabricacion_bolsas.glade', objeto,
usuario=usuario)
connections = {'b_salir/clicked': self._salir,
'ventana/delete_event': self._salir,
'b_add_empleado/clicked': self.add_empleado,
'b_drop_empleado/clicked': self.drop_empleado,
"b_partida/clicked": self.seleccionar_partida,
"b_producto/clicked": self.seleccionar_producto,
'b_actualizar/clicked': self.actualizar_ventana,
'b_guardar/clicked': self.guardar,
'b_nuevo/clicked': self.crear_nuevo_partedeproduccion,
'b_borrar/clicked': self.borrar_parte,
'b_buscar/clicked': self.buscar_partedeproduccion,
'ch_bloqueado/clicked': self.bloquear,
'b_add_consumo/clicked': self.add_consumo,
'b_add_bigbag/clicked': self.add_bigbag,
'b_drop_consumo/clicked': self.drop_consumo,
'b_add_incidencia/clicked': self.add_incidencia,
'b_drop_incidencia/clicked': self.drop_incidencia,
'b_add_pale/clicked': self.add_pale,
'b_drop_pale/clicked': self.drop_pale,
'b_etiquetar/clicked': self.etiquetar,
'b_next/clicked': self.siguiente,
'b_back/clicked': self.anterior
}
self.add_connections(connections)
self.wids['e_fechaini'].connect("key-release-event",
copy2,
self.wids['e_fechafin'])
self.wids['e_horaini'].connect("key-release-event",
copy2,
self.wids['e_horafin'],
8)
try:
linea = pclases.LineaDeProduccion.select(
pclases.LineaDeProduccion.q.nombre.contains(
'de embolsado'))[0]
except IndexError:
print("WARNING: La línea de embolsado no está correctamente "
"dada a de alta. La creo sobre la marcha.")
linea = pclases.LineaDeProduccion(
formulacion=None,
nombre="Línea de embolsado",
descripcion="Línea de embolsado de fibra de cemento.",
observaciones="Produce bolsas de fibra de cemento a partir"
" de bigbags fabricados en la línea de fibra.")
pclases.Auditoria.nuevo(linea, self.usuario, __file__)
self.linea = linea
self.formulacion = linea.formulacion
self.inicializar_ventana()
if self.objeto is None:
self.ir_a_primero()
else:
self.ir_a(objeto)
gtk.main()
def anterior(self, boton=None):
"""Va al parte de embolsado anterior al actual."""
if self.objeto:
anterior = self.objeto.anterior()
if anterior:
self.objeto = anterior
# Reinicio preferencias de etiqueta.
global MEMENTO_MORI # pylint:disable=global-statement
MEMENTO_MORI = {'que_imprimir': None, 'tipo': None}
self.actualizar_ventana()
else:
utils.dialogo_info(
titulo="NO MÁS PARTES",
texto="No hay partes de producción anteriores "
"al actual.",
padre=self.wids['ventana'])
def siguiente(self, boton=None):
"""Va al siguiente parte de producción."""
if self.objeto:
siguiente = self.objeto.siguiente()
if siguiente:
self.objeto = siguiente
# Reinicio preferencias de etiqueta.
global MEMENTO_MORI # pylint: disable=global-statement
MEMENTO_MORI = {'que_imprimir': None, 'tipo': None}
self.actualizar_ventana()
else:
utils.dialogo_info(
titulo="NO MÁS PARTES",
texto="No hay partes de producción posteriores "
"al actual.",
padre=self.wids['ventana'])
# --------------- Funciones auxiliares ------------------------------
def leer_valores_ventana(self):
"""
Devuelve un diccionario con los nombres de los campos del objeto
como claves y los valores de la ventana ya tratados como valores.
"""
res = {}
try:
fecha = utils.parse_fecha(self.wids['e_fechaini'].get_text())
except (TypeError, ValueError):
fecha = mx.DateTime.localtime()
# self.wids['e_fechaini'].set_text(utils.str_fecha(fecha))
# ¿Qué parte de LEER no entendiste? ¿Por qué cambias el entry?
res["fecha"] = fecha
try:
hora = utils.parse_hora(self.wids['e_horaini'].get_text())
except (TypeError, ValueError):
hora = mx.DateTime.DateTimeDelta(0.0)
res["horainicio"] = hora
try:
hora = utils.parse_hora(self.wids['e_horafin'].get_text())
except (TypeError, ValueError):
hora = mx.DateTime.DateTimeDelta(0.0)
res["horafin"] = hora
res["prodestandar"] = 0 # No se usa
res["merma"] = 0.0 # Tampoco se usa
res["bloqueado"] = self.wids['ch_bloqueado'].get_active()
buff = self.wids['txt_observaciones'].get_buffer()
txt = buff.get_text(buff.get_start_iter(), buff.get_end_iter())
res["observaciones"] = txt
res["fechahorainicio"] = res["fecha"] + res["horainicio"]
try:
fechafin = utils.parse_fecha(self.wids['e_fechafin'].get_text())
except (TypeError, ValueError):
fechafin = mx.DateTime.localtime()
# self.wids['e_fechafin'].set_text(utils.str_fecha(fechafin))
res["fechahorafin"] = fechafin + res["horafin"]
codpartida = self.wids['e_partida'].get_text()
try:
partida = pclases.PartidaCem.selectBy(codigo=codpartida)[0]
res["partidaCemID"] = partida.id
except IndexError:
print("partes_de_fabricacion_bolsas.py::leer_valores_ventana -> "
"No se encontró partida con código '%s'. Probablemente "
"no se haya terminado de cargar la ventana." % codpartida)
partida = None
res["partidaCemID"] = None
return res
def es_diferente(self):
"""
Devuelve True si la información en pantalla es distinta a la
del objeto en memoria.
"""
partedeproduccion = self.objeto
if partedeproduccion is None:
return False # Si no hay partedeproduccion activo, devuelvo
# que no hay cambio respecto a la ventana
condicion = True
valores = self.leer_valores_ventana()
for campo in valores:
valor_objeto = getattr(self.objeto, campo)
# El nuevo psycopg2 devuelve datetimes. Aaaargh!
if "hora" in campo and "fecha" not in campo:
valor_objeto = utils.DateTime2DateTimeDelta(valor_objeto)
# X X X
valor_ventana = valores[campo]
condicion = condicion and valor_ventana == valor_objeto
if not condicion:
if pclases.DEBUG:
print("partes_de_fabricacion_bolsas.py::es_diferente -> ",
campo,
"ventana", type(valor_ventana), valor_ventana,
"objeto", valor_objeto, type(valor_objeto))
break
return not condicion # Condición verifica que sea igual
def colorear_tabla_empleados(self):
"""
Prepara y asocia la función para resaltar los empleados
cuyas horas trabajadas sean inferiores o superiores a
la duración del parte.
"""
def cell_func(column, cell, model, itr, numcol):
idht = model[itr][-1]
horastrab = pclases.HorasTrabajadas.get(idht)
duracion_parte = self.objeto.get_duracion()
ht_horas = horastrab.horas
try:
supera_parte = ht_horas > duracion_parte
except TypeError: # horastrab.horas es datetime.time
ht_horas = utils.DateTime2DateTimeDelta(ht_horas)
supera_parte = ht_horas > duracion_parte
if supera_parte:
color = "orange"
elif ht_horas < duracion_parte:
color = "red"
else:
color = "black"
cell.set_property("foreground", color)
cols = self.wids['tv_empleados'].get_columns()
numcol = len(cols) - 1
column = cols[numcol]
cells = column.get_cell_renderers()
for cell in cells:
column.set_cell_data_func(cell, cell_func, numcol)
def inicializar_ventana(self):
"""
Inicializa los controles de la ventana, estableciendo sus
valores por defecto, deshabilitando los innecesarios,
rellenando los combos, formateando el TreeView -si lo hay-...
"""
# Inicialmente no se muestra NADA. Sólo se le deja al
# usuario la opción de buscar o crear nuevo.
self.activar_widgets(False)
self.wids['b_actualizar'].set_sensitive(False)
self.wids['b_guardar'].set_sensitive(False)
self.wids['b_nuevo'].set_sensitive(True)
self.wids['b_buscar'].set_sensitive(True)
# Inicialización del resto de widgets:
# (Nombre, tipo, editable, ordenable, buscable, función_actualización)
cols = (('Nº. de Palet', 'gobject.TYPE_STRING',
False, True, True, None),
('# cajas/palé', 'gobject.TYPE_STRING',
False, True, False, None),
('# bolsas/caja', 'gobject.TYPE_STRING',
True, True, False, self.cambiar_numbolsas),
('Peso neto', 'gobject.TYPE_STRING',
False, True, False, None),
('B', "gobject.TYPE_BOOLEAN",
True, True, False, self.pasar_pale_a_B),
('Observaciones', 'gobject.TYPE_STRING',
True, False, False, self.cambiar_observaciones),
('PUID', 'gobject.TYPE_STRING', False, False, False, None)
)
utils.preparar_treeview(self.wids['tv_produccion'], cols)
self.wids['tv_produccion'].get_selection().set_mode(
gtk.SELECTION_MULTIPLE)
self.wids['tv_produccion'].add_events(gtk.gdk.BUTTON_PRESS_MASK)
cols = (('Código', 'gobject.TYPE_INT64', False, True, False, None),
('Nombre', 'gobject.TYPE_STRING', False, True, False, None),
('Apellidos', 'gobject.TYPE_STRING', False, True, True, None),
('Horas', 'gobject.TYPE_STRING', True, True, False,
self.cambiar_horas_trabajadas),
('ID', 'gobject.TYPE_INT64', False, False, False, None))
utils.preparar_listview(self.wids['tv_empleados'], cols)
self.colorear_tabla_empleados()
cols = (('Producto', 'gobject.TYPE_STRING', False, True, True, None),
('Cantidad', 'gobject.TYPE_STRING', False, True, False, None),
('ID', 'gobject.TYPE_INT64', False, False, False, None))
utils.preparar_listview(self.wids['tv_consumos'], cols)
self.wids['tv_consumos'].get_selection().set_mode(
gtk.SELECTION_MULTIPLE)
cols = (("Tipo de incidencia", "gobject.TYPE_STRING",
False, True, False, None),
("Hora de inicio", "gobject.TYPE_STRING",
False, True, False, None),
("Hora de finalización", "gobject.TYPE_STRING",
False, True, False, None),
("Observaciones", "gobject.TYPE_STRING",
False, True, True, None),
("ID", "gobject.TYPE_INT64", False, False, False, None))
utils.preparar_listview(self.wids['tv_incidencias'], cols)
self.wids['tv_incidencias'].get_selection().set_mode(
gtk.SELECTION_MULTIPLE)
self.wids['ventana'].maximize()
def cambiar_horas_trabajadas(self, cell, path, newtext):
"""Cambia las horas trabajadas por el operario seleccionado."""
newtext = newtext.replace(".", ":").replace(",", ":")
if ":" not in newtext:
if len(newtext) < 4:
newtext = ("0" * (4 - len(newtext))) + newtext
newtext = "%s:%s" % (newtext[:-2], newtext[-2:])
model = self.wids['tv_empleados'].get_model()
iid = model[path][-1]
horastrab = pclases.HorasTrabajadas.get(iid)
try:
try:
dtdelta = mx.DateTime.DateTimeDelta(
0,
float(newtext.split(':')[0]),
float(newtext.split(':')[1]),
0)
except IndexError:
dtdelta = mx.DateTime.DateTimeDelta(0, int(newtext), 0)
newtext = utils.str_hora_corta(dtdelta)
if dtdelta > self.objeto.get_duracion():
utils.dialogo_info(
titulo="TIEMPO INCORRECTO",
texto="El tiempo trabajado no puede superar la\n"
"duración del parte de producción.",
padre=self.wids['ventana'])
return
horastrab.horas = newtext
horastrab.sync()
horastrab.syncUpdate()
model[path][3] = horastrab.horas.strftime('%H:%M')
except (ValueError, TypeError):
utils.dialogo_info(
titulo="ERROR",
texto='El texto "%s" no representa el formato horario.' % (
newtext),
padre=self.wids['ventana'])
def activar_widgets(self, sens):
"""
Activa o desactiva (sensitive=True/False) todos
los widgets de la ventana que dependan del
objeto mostrado.
Entrada: s debe ser True o False. En todo caso
se evaluará como boolean.
"""
sens = sens and ((self.usuario and self.usuario.nivel <= 2)
or not self.objeto.bloqueado or not self.usuario)
if self.objeto:
sens = sens or self.objeto.id == self.__lecturaescritura
wids = ('hbox1', 'hbox2', 'hbox3', 'tv_produccion', 'hbox7',
'tv_incidencias', 'hbox8', 'tv_consumos', 'hbox9',
'table1', 'hbox6')
for wid in wids:
self.wids[wid].set_sensitive(sens)
if self.usuario and self.usuario.nivel > 3:
# No permito (des)bloquear.
self.wids['ch_bloqueado'].set_sensitive(False)
# if self.usuario:
# self.wids['b_partida'].set_sensitive(
# sens and self.usuario.nivel < 3)
def ir_a_primero(self):
"""
Pregunta si crear un parte nuevo, de forma que al abrir la ventana
siempre se pueda empezar un parte de rápidamente.
Si se contesta que no al diálogo, se va al _último_ registro de la
tabla.
"""
nuevo = False
if nuevo:
self.crear_nuevo_partedeproduccion(None)
else:
partedeproduccion = self.objeto
try:
if partedeproduccion is not None:
partedeproduccion.notificador.desactivar()
# Anulo el aviso de actualización del parte que deja de
# ser activo.
partesdeproduccion = pclases.ParteDeProduccion.select(
pclases.ParteDeProduccion.q.partidaCemID != None) # noqa
partesdeproduccion = partesdeproduccion.orderBy("-id")
partedeproduccion = partesdeproduccion[0]
partedeproduccion.notificador.activar(self.aviso_actualizacion)
# Activo la notificación
except IndexError:
partedeproduccion = None
self.objeto = partedeproduccion
self.actualizar_ventana()
def refinar_resultados_busqueda(self, resultados):
"""
Muestra en una ventana de resultados todos los
registros de "resultados".
Devuelve el id (primera columna de la ventana
de resultados) de la fila seleccionada o None
si se canceló.
"""
filas_res = []
for result in resultados:
filas_res.append((result.id,
utils.str_fecha(result.fecha),
utils.str_hora_corta(result.horainicio),
utils.str_hora_corta(result.horafin),
"CLIC PARA VER",
result.observaciones))
idpartedeproduccion = utils.dialogo_resultado(
filas_res,
titulo='Seleccione parte de línea de envasado',
cabeceras=('ID Interno',
'Fecha',
'Hora inicio',
'Hora fin',
'Partida',
'Observaciones'),
func_change=self.mostrar_info_parte,
padre=self.wids['ventana'])
if idpartedeproduccion < 0:
res = None
else:
res = idpartedeproduccion
return res
def mostrar_info_parte(self, treev):
"""Rellena la información general del parte."""
model, itr = treev.get_selection().get_selected()
if itr is not None and model[itr][-2] == "CLIC PARA VER":
parte = pclases.ParteDeProduccion.get(model[itr][0]) # En los
# diálogos de resultado el ID va al revés.
if parte.es_de_bolsas() and parte.articulos:
# partida=parte.articulos[0].bolsa.caja.pale.partidaCem.codigo
partida = parte.partidaCem.codigo
else:
partida = 'VACIO'
producto = (parte.articulos != []
and parte.articulos[0].productoVenta.nombre or 'VACÍO')
model[itr][-2] = "%s (%s)" % (partida, producto)
def rellenar_widgets(self):
"""
Introduce la información del partedeproduccion actual
en los widgets.
No se chequea que sea != None, así que
hay que tener cuidado de no llamar a
esta función en ese caso.
"""
if not self.objeto:
self.activar_widgets(False)
return
partedeproduccion = self.objeto
self.wids['ch_bloqueado'].set_active(self.objeto.bloqueado)
# Información global:
if self.objeto.articulos != []:
self.producto = self.objeto.articulos[0].productoVenta
# Y si no hay, sigo usando el de antes.
self.rellenar_datos_producto(self.producto)
self.wids['e_fechaini'].set_text(
utils.str_fecha(partedeproduccion.fechahorainicio))
self.wids['e_fechafin'].set_text(
utils.str_fecha(partedeproduccion.fechahorafin))
self.wids['e_horaini'].set_text(
partedeproduccion.horainicio.strftime('%H:%M'))
self.wids['e_horafin'].set_text(
partedeproduccion.horafin.strftime('%H:%M'))
self.wids['e_duracion'].set_text(
partedeproduccion.get_duracion().strftime('%H:%M'))
self.wids['txt_observaciones'].get_buffer().set_text(
partedeproduccion.observaciones)
self.rellenar_estadisticas()
# Información de detalle:
try:
e_partida = self.objeto.partidaCem.codigo
mostrar_mensaje_correccion_partidaCem = False
except AttributeError:
self.objeto._corregir_partidaCem_nula()
e_partida = self.objeto.partidaCem.codigo
mostrar_mensaje_correccion_partidaCem = True
self.wids['e_partida'].set_text(e_partida)
self.rellenar_tabla_empleados()
self.rellenar_tabla_bolsas()
self.rellenar_tabla_incidencias()
self.rellenar_tabla_consumos()
self.objeto.make_swap()
self.check_permisos()
if mostrar_mensaje_correccion_partidaCem:
utils.dialogo_info(
titulo="PARTIDA DE CEMENTO CORREGIDA",
texto="La partida de cemento del parte actual contenía "
"\nun error o era nula.\n"
"Se ha corregido automáticamente. Por favor, \n"
"verifique que se corresponde con la partida real.",
padre=self.wids['ventana'])
self.objeto.observaciones += "\nPartida corregida automáticamente."
self.wids['b_back'].set_sensitive(
self.objeto and self.objeto.anterior() and 1 or 0)
self.wids['b_next'].set_sensitive(
self.objeto and self.objeto.siguiente() and 1 or 0)
def rellenar_estadisticas(self):
partedeproduccion = self.objeto
# Estadísticas:
# numbolsas = len(self.objeto.articulos)
# numbolsas = pclases.ParteDeProduccion._queryOne("""
# SELECT COUNT(id)
# FROM articulo
# WHERE articulo.parte_de_produccion_id = %d""" % self.objeto.id)[0]
numbolsas = sum(
[a.caja.numbolsas for a in partedeproduccion.articulos])
self.wids['e_prod_bolsas'].set_text(str(numbolsas))
# kilos = sum([a.peso for a in self.objeto.articulos])
# Optimizando, que es gerundio:
try:
# kilos = (len(self.objeto.articulos)
kilos = (numbolsas
* self.producto.camposEspecificosBala.gramosBolsa/1000.0)
except AttributeError:
kilos = 0.0
self.wids['e_prod_kg'].set_text(utils.float2str(kilos, autodec=True))
cajas = set([a.caja for a in self.objeto.articulos]) # Ojo:python>2.3
numcajas = len(cajas)
# Optimización:
# sqlpales = pclases.Pale.select(pclases.AND(
# pclases.Articulo.q.parteDeProduccionID == self.objeto.id,
# pclases.Articulo.q.bolsaID == pclases.Bolsa.q.id,
# pclases.Bolsa.q.cajaID == pclases.Caja.q.id,
# pclases.Caja.q.paleID == pclases.Pale.q.id))
# pales = set([p for p in sqlpales]) # Ojo: python > 2.3
# numcajas = sum(p.numcajas for p in pales)
self.wids['e_prod_cajas'].set_text(str(numcajas))
try:
bolsasminuto = str(numbolsas
/ partedeproduccion.get_duracion().minutes)
except ZeroDivisionError:
bolsasminuto = "inf."
self.wids['e_bolsasminuto'].set_text(bolsasminuto)
try:
kgh = utils.float2str(kilos/partedeproduccion.get_duracion().hours,
autodec=True)
except ZeroDivisionError:
kgh = "inf."
self.wids['e_kgh'].set_text(kgh)
pales = set([a.caja.pale
for a in self.objeto.articulos]) # Ojo: python > 2.3
# numpales = len(pales)
# Optimizando:
# numpales = sqlpales.count() # Sin groupBy salen tantas como bolsas
numpales = len(pales)
self.wids['e_prodpales'].set_text(str(numpales))
try:
activo = partedeproduccion.get_horas_trabajadas()
except AssertionError:
# pylint: disable=protected-access
partedeproduccion._corregir_duracion_paradas()
activo = partedeproduccion.get_horas_trabajadas()
self.wids['e_activo'].set_text(activo.strftime("%H:%M"))
pasivo = partedeproduccion.get_horas_paradas()
self.wids['e_pasivo'].set_text(pasivo.strftime("%H:%M"))
self.wids['e_bbconsumidos'].set_text(
utils.float2str(len(self.objeto.bigbags), autodec=True))
self.wids['e_kgconsumidos'].set_text(
utils.float2str(sum([bigbag.pesobigbag for bigbag
in self.objeto.bigbags]),
autodec=True))
try:
palesa = len(self.objeto.partidaCem.get_pales_a())
palesb = len(self.objeto.partidaCem.get_pales_b())
except AttributeError:
palesa = palesb = 0
self.wids['e_palesa'].set_text(repr(palesa))
self.wids['e_palesb'].set_text(repr(palesb))
def rellenar_tabla_incidencias(self):
"""Rellena la tabla de paradas del parte."""
parte = self.objeto
treev = self.wids['tv_incidencias']
if parte is not None:
model = treev.get_model()
treev.set_model(None)
model.clear()
incidencias = pclases.Incidencia.select(
pclases.Incidencia.q.parteDeProduccionID == self.objeto.id,
orderBy="horainicio")
for incidencia in incidencias:
model.append((incidencia.tipoDeIncidencia.descripcion,
utils.str_fechahora(incidencia.horainicio),
utils.str_fechahora(incidencia.horafin),
incidencia.observaciones,
incidencia.id))
treev.set_model(model)
def rellenar_tabla_consumos(self):
"""
Rellena la tabla de consumos del parte.
"""
parte = self.objeto
if parte is not None:
model = self.wids['tv_consumos'].get_model()
self.wids['tv_consumos'].set_model(None)
model.clear()
consumos = parte.consumos[:]
try:
consumos.sort(lambda c1, c2: c1 is not None and c2 is not None
and int(c1.id - c2.id) or 0)
except TypeError as msg:
self.logger.error("partes_de_fabricacion_bolsas.py (rellenar"
"_tabla_consumos): Error ordenando consumo"
"s (%s):\n%s" % (msg, consumos))
for c in parte.consumos:
if c.productoCompraID is not None:
unidad = c.productoCompra.unidad
producto = c.productoCompra.descripcion
else:
unidad = ""
producto = ""
model.append((producto,
"%s %s" % (utils.float2str(c.cantidad), unidad),
c.id))
for bigbag in parte.bigbags: # Consumos de fibra de cemento:
str_bb = "{} ({}) {}".format(bigbag.codigo,
bigbag.articulo.productoVenta.nombre,
bigbag.api and "✔" or "✘")
str_bb = geninformes.sanitize_unicode(str_bb)
model.append((str_bb,
utils.float2str(bigbag.pesobigbag) + " kg",
-bigbag.id))
self.wids['tv_consumos'].set_model(model)
def check_permisos(self):
if self.__permisos.escritura: # Puede modificar los partes:
self.activar_widgets(True)
else: # Sólo puede modificar el parte que haya creado nuevo (si es
# que ha creado alguno)
if self.__lecturaescritura == (self.objeto.id or
not self.objeto.bloqueado):
self.activar_widgets(True)
else:
self.activar_widgets(False)
# Compruebo primero este porque habilita o deshabilita todos los
# botones, incluso los que dependen de los otros dos permisos.
self.wids['b_buscar'].set_sensitive(self.__permisos.lectura)
self.wids['b_nuevo'].set_sensitive(self.__permisos.nuevo)
def rellenar_tabla_bolsas(self):
model = self.wids['tv_produccion'].get_model()
model.clear()
# detallesdeproduccion = self.objeto.articulos[:]
# detallesdeproduccion.sort(lambda x, y:
# utils.orden_por_campo_o_id(x,y,"fechahora"))
# detallesdeproduccion = self.objeto.articulos
detallesdeproduccion = pclases.Articulo.select(
pclases.Articulo.q.parteDeProduccionID == self.objeto.id,
orderBy="id")
# Filas del TreeView
pales = {} # Diccionarios de nodos padres (cajas) y abuelos (palés).
cajas = {}
self.wids['tv_produccion'].freeze_child_notify()
self.wids['tv_produccion'].set_model(None)
for articulo in detallesdeproduccion:
pale = articulo.caja.pale
if pale not in pales: # Inserto palé.
es_clase_b = pale.es_clase_b()
pale_api = pale.api
if pale_api is None:
volcado = ""
elif pale_api:
volcado = " ✔"
else:
volcado = " ✘"
volcado = geninformes.sanitize_unicode(volcado)
peso_neto = sum([c.articulo.peso_neto for c in pale.cajas])
numcajas = len(pale.cajas) # = pale.numcajas
pales[pale] = model.append(
None,
("Palé " + pale.codigo + volcado,
numcajas,
pale.numbolsas,
peso_neto,
es_clase_b,
pale.observaciones,
pale.get_puid()))
if not self.SHOW_PALES_COMPLETOS:
continue
caja = articulo.caja
if caja not in cajas:
if caja.articulo.api is None:
volcado = ""
elif caja.articulo:
volcado = " ✔"
else:
volcado = " ✘"
volcado = geninformes.sanitize_unicode(volcado)
cajas[caja] = model.append(
pales[pale],
("Caja " + caja.codigo + volcado,
1, # 1 caja por caja:)
caja.numbolsas,
caja.peso,
es_clase_b,
caja.observaciones,
caja.get_puid()))
# pesogramos = "%s gr" % utils.float2str(
# bolsa.peso * 1000, autodec = True)
# model.append(cajas[bolsa.caja],
# ("Bolsa " + bolsa.codigo,
# pesogramos,
# bolsa.claseb,
# bolsa.observaciones,
# bolsa.get_puid()))
self.wids['tv_produccion'].set_model(model)
self.wids['tv_produccion'].thaw_child_notify()
def seleccionar_producto(self, boton):
"""
Selecciona el producto del parte actual.
Si ya tiene producción, cambia el producto de toda la producción
de la partida completa.
"""
a_buscar = utils.dialogo_entrada(titulo="BUSCAR PRODUCTO",
texto="Introduzca el texto a buscar:",
padre=self.wids['ventana'])
if a_buscar is not None:
pvs = utils.buscar_productos_venta(a_buscar)
pvs = [p for p in pvs if p.es_bolsa()]
if len(pvs):
if len(pvs) == 1:
pv = pvs[0]
elif len(pvs) > 1:
idpv = self.refinar_resultados_busqueda_producto(pvs)
if idpv:
pv = pclases.ProductoVenta.get(idpv)
else:
pv = None
if pv:
try:
pcem = self.objeto.partidaCem
producto_anterior = pcem.pales[0].productoVenta
except IndexError:
producto_anterior = None
if producto_anterior == pv:
return
if not producto_anterior:
producto_anterior = pv
if (producto_anterior.camposEspecificosBala.bolsasCaja !=
pv.camposEspecificosBala.bolsasCaja or
producto_anterior.camposEspecificosBala.cajasPale !=
pv.camposEspecificosBala.cajasPale):
utils.dialogo_info(
titulo="PRODUCTO INCOMPATIBLE",
texto="Seleccione un producto con el mismo"
"número de bolsas por caja\no elimine primero"
" la producción actual, cree una nueva "
"partida\n y vuelva a crearla con el nuevo "
"producto.",
padre=self.wids['ventana'])
return
titulo = "¿CAMBIAR PRODUCTO AL LOTE COMPLETO?"
texto = "Va a cambiar la producción del lote completo de"\
" %s\na %s. ¿Está seguro?\n\n"\
"(Puede durar bastante. "\
"No interrumpa el proceso)" % (
producto_anterior
and producto_anterior.descripcion or "",
pv.descripcion)
padre = self.wids['ventana']
if (not self.objeto.partidaCem.pales
or utils.dialogo(texto, titulo, padre)):
ceb = pv.camposEspecificosBala
for pale in self.objeto.partidaCem.pales:
pale.numcajas = ceb.cajasPale
pale.numbolsas = ceb.bolsasCaja
pale.sync()
for caja in pale.cajas:
caja.numbolsas = ceb.bolsasCaja
caja.peso = (ceb.bolsasCaja
* ceb.gramosBolsa / 1000)
a = caja.articulo
a.pesoReal = (caja.peso
+ pclases.PESO_EMBALAJE_CAJAS)
a.productoVenta = pv
a.syncUpdate()
self.producto = pv
self.rellenar_datos_producto(pv)
self.actualizar_ventana()
def rellenar_datos_producto(self, producto):
"""
A partir del artículo recibido, completa la información
de la cabecera del formulario (ancho, etc...) en
función de los datos de la bolsa.
También verifica si el parte tiene ficha de fabricación. Si no la
tiene, pone la del producto recibido.
"""
if producto is None:
self.wids['e_producto'].set_text('')
else:
nomproducto = "%s. Corte: %d mm. %d gr/bolsa" % (
producto.descripcion,
producto.camposEspecificosBala.corte,
producto.camposEspecificosBala.gramosBolsa)
self.wids['e_producto'].set_text(nomproducto)
# --------------- Manejadores de eventos ----------------------------
def add_pale(self, wid):
"""
Crea un nuevo palé con todas las cajas y bolsas que contiene.
Si es el primer palé del lote pide el número de bolsas que han
entrado en la primera de las cajas, si no, toma las del primer
palé de la PARTIDA (aunque debe coincidir con el parte, pero en el
caso de que no sea así me curo en salud y procuro desde el principio
que todos los palés de la misma partida sean idénticos).
Si el número de bolsas es inferior a 40 se va a crear por defecto
como B.
"""
if not MURANO:
utils.dialogo_info(titulo="ERROR DE CONEXIÓN CON MURANO",
texto="No puede crear cajas. Solo consultas.",
padre=self.wids['ventana'])
return
if not self.producto:
utils.dialogo_info(
titulo="SELECCIONE UN PRODUCTO",
texto="Antes debe seleccionar un producto.",
padre=self.wids['ventana'])
return
partidaCem = self.objeto.partidaCem
try:
pale = partidaCem.pales[0]
defecto = pale.numbolsas
except IndexError:
defecto = self.producto.camposEspecificosBala.bolsasCaja
if not defecto:
defecto = pclases.Pale.NUMBOLSAS
texto = "Introduzca el número de bolsas de la primera caja:"
if self.usuario and self.usuario.nivel <= self.NIVEL_POR_LOTES:
texto += "\n\n<small>Si introduce una serie de números se\n"\
"crearán tantos palés como números haya tecleado;\n"\
"cada uno de ellos con las bolsas por caja indicadas."\
"</small>"
numbolsas = utils.dialogo_pedir_rango(
titulo="¿NÚMERO DE BOLSAS?",
texto=texto,
padre=self.wids['ventana'],
valor_por_defecto=defecto,
permitir_repetidos=True)
else:
numbolsas = utils.dialogo_entrada(
titulo="¿NÚMERO DE BOLSAS?",
texto=texto,
padre=self.wids['ventana'],
valor_por_defecto=defecto)
if not numbolsas:
return
if not self.usuario or self.usuario.nivel > self.NIVEL_POR_LOTES:
try:
numbolsas = [int(numbolsas)]
except (ValueError, TypeError):
utils.dialogo_info(titulo="NÚMERO INCORRECTO",
texto='El texto "%s" no es un número.' % (
numbolsas),
padre=self.wids['ventana'])
return
listanumbolsas = numbolsas
if pclases.DEBUG:
print(listanumbolsas)
pales_a_etiquetar = []
productoVenta = self.producto
for numbolsas in listanumbolsas:
vpro = VentanaProgreso(padre=self.wids['ventana'])
vpro.mostrar()
icont = 0.0
# numcajasdefecto = pclases.Pale.NUMCAJAS
numcajasdefecto = productoVenta.camposEspecificosBala.cajasPale
# 1.- Creo el palé.
numpale, codigo = pclases.Pale.get_next_numpale(numbolsas)
ahora = mx.DateTime.localtime()
pale = pclases.Pale(partidaCem=partidaCem,
numpale=numpale,
codigo=codigo,
fechahora=None,
numbolsas=numbolsas,
numcajas=numcajasdefecto)
try:
pale.fechahora = ahora
except: # noqa
pale.fechahora = datetime.datetime.now()
pclases.Auditoria.nuevo(pale, self.usuario, __file__)
# 2.- Creo las cajas.
tot = pale.numcajas
for i in range(pale.numcajas): # @UnusedVariable
numcaja, codigo = pclases.Caja.get_next_numcaja()
vpro.set_valor(icont / tot, "Creando caja %s..." % codigo)
try:
gramos = productoVenta.camposEspecificosBala.gramosBolsa
except AttributeError:
gramos = 0
peso_neto = (gramos * numbolsas) / 1000.0
# peso = peso_bruto = peso_neto + 0.150 + 0.100 # Palé+cartón
caja = pclases.Caja(pale=pale,
numcaja=numcaja,
codigo=codigo,
fechahora=None,
peso=peso_neto,
numbolsas=numbolsas)
try:
caja.fechahora = mx.DateTime.localtime()
except: # noqa
caja.fechahora = datetime.datetime.now()
pclases.Auditoria.nuevo(caja, self.usuario, __file__)
articulo = pclases.Articulo(
parteDeProduccion=self.objeto,
caja=caja,
rolloDefectuoso=None,
albaranSalida=None,
productoVenta=self.producto,
bala=None,
rollo=None,
bigbag=None,
almacen=pclases.Almacen.get_almacen_principal(),
rolloC=None,
balaCable=None)
pclases.Auditoria.nuevo(articulo, self.usuario, __file__)
# DONE: Al final sí que está volcando. Pero va tan sumamente
# lento caja a caja que parece que se cuelga. Además la salida
# por consola no está habilitada, con lo que al final el error
# que da es por algún print de depuración que hay por ahí.
# try:
# murano.ops.create_articulo(articulo)
# except IOError:
# pass # Alguna movida con la salida por consola de
# # depuración y no está disponible.
icont += 1
# 3.- Creo el palé en Murano
vpro.set_valor(icont / tot,
"Creando palé {}...".format(pale.codigo))
# murano.ops.create_pale(pale, observaciones="")
# OJO: Le paso el último artículo porque la formulación de esta
# línea será por PALÉS COMPLETOS.
pales_a_etiquetar.append(pale)
vpro.ocultar()
descontar_material_adicional(self, articulo)
self.objeto.buscar_o_crear_albaran_interno(
incluir_consumos_auto=True) # Normalmente no, pero
# aquí sí quiero que aparezcan en el alb. interno.
imprimir_etiquetas_pales(pales_a_etiquetar, self.wids['ventana'],
mostrar_dialogo=False)
self.rellenar_tabla_consumos()
self.rellenar_tabla_bolsas()
self.rellenar_estadisticas()
def seleccionar_partida(self, wid):
"""
Wrapper para cambiar_partida.
"""
self.cambiar_partida(wid)
def _salir(self, wid, event=None):
if (self.__permisos.escritura
and self.objeto
and not self.objeto.bloqueado
and self.objeto.fecha < (mx.DateTime.localtime()
- mx.DateTime.oneDay)
and (not self.usuario or self.usuario.nivel <= 2)):
# Tiene permiso para bloquear el parte
res = utils.dialogo(titulo="DEBE VERIFICAR EL PARTE",
texto="Antes de cerrar el parte debe verifi"
"carlo.\n¿Marcar como verificado?",
padre=self.wids['ventana'],
bloq_temp=["Sí"])
self.objeto.bloqueado = res
self.wids['ch_bloqueado'].set_active(self.objeto.bloqueado)
# return True
if not self.salir(wid, mostrar_ventana=event is None):
# Devuelve True cuando se cancela el cierre de la ventana (por
# temas de event-chain).
try:
padre = self.wids['ventana']
except KeyError:
padre = None
vpro = VentanaActividad(texto="Comprobando disparo de alertas...",
padre=padre)
vpro.mostrar()
if not self.linea:
linea = pclases.LineaDeProduccion.select(
pclases.LineaDeProduccion.q.nombre.contains('de embolsado'))
self.linea = linea
vpro.mover()
if self.linea is None:
txt = "WARNING: La línea de embolsado no está correctamente "\
"dada de alta."
print(txt)
self.logger.warning(txt)
else:
vpro.mover()
formulacion = self.linea.formulacion
if not formulacion:
# TODO: Dar mensaje de error por logger
pass
else:
for ca in [ca_con_p for ca_con_p
in formulacion.consumosAdicionales
if ca_con_p.productoCompra is not None]:
vpro.mover()
# Verifico que no haya productos bajo mínimos:
if (ca.productoCompra.existencias
< ca.productoCompra.minimo):
vpro.mover()
try:
v = pclases.Ventana.select(
pclases.Ventana.q.fichero
== "pedidos_de_compra.py")[0]
except IndexError:
txt = "WARNING: ¡La ventana de pedidos de "\
"compra SE HA PERDIDO!"
print(txt)
self.logger.warning(txt)
mensaje = "El producto %s tiene las existencias "\
"bajo mínimos. Considere hacer un "\
"pedido de compra." % (
ca.productoCompra.descripcion)
for u in [p.usuario
for p in v.permisos if p.nuevo]:
vpro.mover()
u.enviar_mensaje(mensaje)
# Y Verifico que no haya existencias negativas:
if ca.productoCompra.existencias < 0:
vpro.mover()
try:
v = pclases.Ventana.select(
pclases.Ventana.q.fichero
== "pedidos_de_compra.py")[0]
except IndexError:
print("WARNING: ¡La ventana de pedidos de "
"compra SE HA PERDIDO!")
self.logger.error(
"partes_de_fabricacion_rollos: ¡La "
"ventana de pedidos de compra "
"SE HA PERDIDO!")
vpro.mover()
mensaje = "El producto %s tiene existencias "
mensaje += "NEGATIVAS. Corrija el error lo antes"
mensaje += " posible." % (
ca.productoCompra.descripcion)
for u in [p.usuario
for p in v.permisos if p.nuevo]:
vpro.mover()
u.enviar_mensaje(mensaje)
vpro.mover()
vpro.ocultar()
def cambiar_observaciones(self, cell, path, newtext):
"""
Solo cambia las observaciones del objeto. NO PASA A PRODUCTO B.
"""
model = self.wids['tv_produccion'].get_model()
puid = model[path][-1]
clase, aidi = puid.split(":")
objeto = getattr(pclases, clase).get(int(aidi))
objeto.observaciones = newtext
model[path][5] = newtext
def crear_nuevo_partedeproduccion(self, widget):
"""
Función callback del botón b_nuevo.
Pide los datos básicos para crear un nuevo objeto.
Una vez insertado en la BD hay que hacerlo activo
en la ventana para que puedan ser editados el resto
de campos que no se hayan pedido aquí.
"""
partedeproduccion = self.objeto
if not utils.dialogo('Se creará un nuevo parte de producción vacío.',
'NUEVO PARTE',
padre=self.wids['ventana']):
return
if partedeproduccion is not None:
partedeproduccion.notificador.desactivar()
partedeproduccion = pclases.ParteDeProduccion(
fecha=mx.DateTime.localtime(),
horainicio=time.struct_time(
time.localtime()[:4] + (0, 0) + time.localtime()[6:]),
horafin=time.struct_time(
time.localtime()[:3] + ((time.localtime()[3]+8) % 24, 0, 0)
+ time.localtime()[6:]),
prodestandar=0,
observaciones='',
bloqueado=False,
partidaCem=pclases.PartidaCem.get_nueva_o_ultima_vacia(),
merma=0.0)
pclases.Auditoria.nuevo(partedeproduccion, self.usuario, __file__)
# pylint: disable=protected-access
partedeproduccion._corregir_campos_fechahora()
self.objeto = partedeproduccion
self.wids['e_partida'].set_text(self.objeto.partidaCem.codigo)
self.add_empleados_calendario()
self.__lecturaescritura = self.objeto.id
self.actualizar_ventana()
self.objeto.notificador.activar(self.aviso_actualizacion)
verificar_solapamiento(partedeproduccion, self.wids['ventana'])
def refinar_resultados_busqueda_producto(self, resultados):
"""
Muestra en una ventana de resultados todos los
registros de "resultados".
Devuelve el id (primera columna de la ventana
de resultados) de la fila seleccionada o None
si se canceló.
"""
filas_res = []
for result in resultados:
filas_res.append((result.id, result.codigo, result.nombre,
result.descripcion))
idproducto = utils.dialogo_resultado(
filas_res,
titulo='Seleccione producto',
cabeceras=('ID Interno', 'Código', 'Nombre', 'Descripción'),
padre=self.wids['ventana'])
if idproducto < 0:
res = None
else:
res = idproducto
return res
# pylint: disable=too-many-branches,too-many-statements
def buscar_partedeproduccion(self, widget):
"""
Muestra una ventana de búsqueda y a continuación los
resultados. El objeto seleccionado se hará activo
en la ventana a no ser que se pulse en Cancelar en
la ventana de resultados.
PRECONDICION: Los partes de embolsado SIEMPRE deben tener una
partida de cemento relacionada.
"""
partedeproduccion = self.objeto
a_buscar = utils.dialogo_entrada(
titulo="BUSCAR PARTE",
texto="Introduzca fecha del parte o nombre del producto:",
padre=self.wids['ventana'])
if a_buscar is not None:
try:
if a_buscar != '':
a_buscar = a_buscar.replace("-", "/")
if a_buscar.count('/') == 1:
a_buscar = "%s/%d" % (a_buscar,
mx.DateTime.localtime().year)
if len(a_buscar.split('/')[-1]) == 2:
fecha = time.strptime(a_buscar, '%d/%m/%y')
else:
fecha = time.strptime(a_buscar, '%d/%m/%Y')
# pylint: disable=singleton-comparison
resultados = pclases.ParteDeProduccion.select(
pclases.AND(
pclases.ParteDeProduccion.q.fecha == fecha,
pclases.ParteDeProduccion.q.partidaCemID
!= None)) # noqa
else:
resultados = pclases.ParteDeProduccion.select(
# pylint: disable=singleton-comparison
pclases.ParteDeProduccion.q.partidaCemID
!= None) # noqa
except (TypeError, ValueError):
# pylint: disable=singleton-comparison
producto = pclases.ProductoVenta.select(pclases.AND(
pclases.ProductoVenta.q.nombre.contains(a_buscar),
pclases.ProductoVenta.q.camposEspecificosBalaID
!= None)) # noqa
producto = pclases.SQLtuple(
[p for p in producto if p.es_bolsa()])
resultados = pclases.ParteDeProduccion.select()
# Pongo la barra porque con muchos partes esto tarda
vpro = VentanaProgreso(padre=self.wids['ventana'])
vpro.mostrar()
i = 0.0
tot = resultados.count()
partes = []
if producto.count() > 1:
idproducto = self.refinar_resultados_busqueda_producto(
producto)
if idproducto is not None:
for p in resultados:
if (p.articulos != []
and p.articulos[0].productoVentaID
== idproducto):
partes.append(p)
vpro.set_valor(i/tot, 'Buscando partes')
i += 1
else:
vpro.ocultar()
return
elif producto.count() == 1:
for p in resultados:
if (p.articulos != []
and p.articulos[0].productoVentaID
== producto[0].id):
partes.append(p)
vpro.set_valor(i/tot, 'Buscando partes')
i += 1
else:
for p in resultados:
if p.es_de_bolsas():
partes.append(p)
vpro.set_valor(i/tot, 'Buscando partes')
i += 1
vpro.ocultar()
resultados = partes
try:
len_resultados = len(resultados)
except TypeError:
len_resultados = resultados.count()
if len_resultados > 1:
# Refinar los resultados
idpartedeproduccion = self.refinar_resultados_busqueda(
resultados)
if idpartedeproduccion is None:
return
resultados = [
pclases.ParteDeProduccion.get(idpartedeproduccion)]
elif len_resultados < 1:
# Sin resultados de búsqueda
utils.dialogo_info(
'SIN RESULTADOS',
'La búsqueda no produjo resultados.\nPruebe a cambiar'
' el texto buscado o déjelo en blanco para ver una '
'lista completa.\n(Atención: Ver la lista completa '
'puede resultar lento si el número de elementos es '
'muy alto)',
padre=self.wids['ventana'])
return
# # Un único resultado
# Primero anulo la función de actualización
if partedeproduccion is not None:
partedeproduccion.notificador.desactivar()
# Pongo el objeto como actual
try:
partedeproduccion = resultados[0]
except IndexError:
utils.dialogo_info(
titulo="ERROR",
texto="Se produjo un error al recuperar la "
"información.\nCierre y vuelva a abrir la "
"aplicación antes de volver a intentarlo.",
padre=self.wids['ventana'])
return
# Y activo la función de notificación:
partedeproduccion.notificador.activar(self.aviso_actualizacion)
self.objeto = partedeproduccion
# Reinicio preferencias de etiqueta.
global MEMENTO_MORI
MEMENTO_MORI = {'que_imprimir': None, 'tipo': None}
self.actualizar_ventana()
def guardar(self, widget):
"""
Guarda el contenido de los entry y demás widgets de entrada
de datos en el objeto y lo sincroniza con la BD.
"""
partedeproduccion = self.objeto
valores = self.leer_valores_ventana()
if valores["fechahorainicio"] > valores["fechahorafin"]:
self.wids['e_fechafin'].set_text(
self.wids['e_fechaini'].get_text())
self.wids['e_horafin'].set_text(
self.wids['e_horaini'].get_text())
valores = self.leer_valores_ventana()
ye_olde_fecha = partedeproduccion.fecha
ye_olde_horainicio = utils.str_hora_corta(partedeproduccion.horainicio)
ye_olde_horafin = utils.str_hora_corta(partedeproduccion.horafin)
# Desactivo el notificador momentáneamente
partedeproduccion.notificador.activar(lambda: None)
# Actualizo los datos del objeto
for campo in valores:
try:
if (isinstance(valores[campo],
type(mx.DateTime.DateTimeDelta(0))) and
isinstance(getattr(self.objeto, campo),
type(datetime.time()))):
# Hay un bug con el mx de Python 2.7 en Windows y tengo
# que hacer esta conversión a mano:
valores[campo] = datetime.time(valores[campo].hour,
valores[campo].minute)
setattr(self.objeto, campo, valores[campo])
except ValueError:
if isinstance(valores[campo], mx.DateTime.DateTimeDeltaType):
setattr(self.objeto, campo,
valores[campo].strftime("%H:%M"))
# partedeproduccion._corregir_campos_fechahora() <-- Aquí no hace falta
# Verificación de que no se solapa con otros partes:
verificar_solapamiento(partedeproduccion,
self.wids['ventana'], # <- Esto es horrible.
ye_olde_fecha,
ye_olde_horainicio,
ye_olde_horafin)
# Fuerzo la actualización de la BD y no espero a que SQLObject lo haga
# por mí:
partedeproduccion.sync()
# Vuelvo a activar el notificador
partedeproduccion.notificador.activar(self.aviso_actualizacion)
self.actualizar_ventana()
self.wids['b_guardar'].set_sensitive(False)
def borrar_parte(self, boton):
if not self.objeto:
return
if not utils.dialogo('Se va a intentar eliminar el parte actual.\nSi '
'hay operaciones complejas implicadas se cancela'
'rá el borrado.\nDe cualquier forma, no se acons'
'eja eliminar ningún parte que ya tenga producci'
'ón relacionada.\n¿Está seguro de borrar el part'
'e actual?',
'ELIMINAR PARTE',
padre=self.wids['ventana']):
return
partedeproduccion = self.objeto
partedeproduccion.notificador.desactivar()
try:
partedeproduccion.destroy(ventana=__file__)
except Exception as msgexception:
utils.dialogo_info(
'PARTE NO BORRADO',
'El parte no se eliminó.\nSi tiene bolsas o empleados '
'asociados, trate primero de eliminarlos y vuelva a '
'intentarlo.\n\nExcepción capturada: {}'.format(
msgexception),
padre=self.wids['ventana'])
return
self.ir_a_primero()
def add_incidencia(self, boton):
ii = pclases.TipoDeIncidencia.select()
idincidencia = utils.dialogo_combo(
'SELECCIONE UN TIPO DE INCIDENCIA',
'Seleccione un tipo de incidencia del desplegable inferior',
[(i.id, i.descripcion) for i in ii],
padre=self.wids['ventana'])
if idincidencia is None:
return
utils.dialogo_info(
'HORA INICIO',
'A continuación seleccione la hora de inicio de la '
'incidencia.',
padre=self.wids['ventana'])
horaini = utils.mostrar_hora(time.localtime()[3], 0, 0, 'HORA INICIO')
if not horaini:
return
utils.dialogo_info(
'HORA FIN',
'A continuación seleccione la hora de finalización de la '
'incidencia.',
padre=self.wids['ventana'])
horafin = utils.mostrar_hora(time.localtime()[3], 0, 0, 'HORA FIN')
if not horafin:
return
self.objeto.sync()
horaini = mx.DateTime.DateTimeFrom(year=self.objeto.fecha.year,
month=self.objeto.fecha.month,
day=self.objeto.fecha.day,
hour=int(horaini.split(":")[0]),
minute=int(horaini.split(":")[1]))
horafin = mx.DateTime.DateTimeFrom(year=self.objeto.fecha.year,
month=self.objeto.fecha.month,
day=self.objeto.fecha.day,
hour=int(horafin.split(":")[0]),
minute=int(horafin.split(":")[1]))
if horaini > horafin:
horafin += mx.DateTime.oneDay
while horaini < self.objeto.fechahorainicio: # El parte está en la
# franja de medianoche y la incidencia comienza después de las 12.
horaini += mx.DateTime.oneDay # Debe llevar la fecha del día
# siguiente.
horafin += mx.DateTime.oneDay
if entran_en_turno(self.objeto, horaini, horafin):
observaciones = utils.dialogo_entrada(
titulo='OBSERVACIONES',
texto='Introduzca observaciones sobre la incidencia:',
padre=self.wids['ventana'])
if observaciones is None:
return
incidencia = pclases.Incidencia(
tipoDeIncidencia=pclases.TipoDeIncidencia.get(idincidencia),
horainicio=horaini,
horafin=horafin,
parteDeProduccion=self.objeto,
observaciones=observaciones)
pclases.Auditoria.nuevo(incidencia, self.usuario, __file__)
# self.actualizar_ventana()
self.rellenar_tabla_incidencias()
self.rellenar_estadisticas()
else:
utils.dialogo_info(
titulo='ERROR HORARIO',
texto='La franja horaria que ha seleccionado no entra en '
'el turno del parte.',
padre=self.wids['ventana'])
def drop_pale(self, boton):
"""
Elimina el palé, sus cajas, bolsas y consumos relacionados.
"""
if not self.usuario or self.usuario.nivel > 1:
utils.dialogo_info(
titulo="PERMISOS INSUFICIENTES",
texto="No puede borrar artículos fabricados.\n\n"
"Solicite su eliminación por escrito indicando\n"
"claramente los motivos y el código de\n"
"trazabilidad del artículo en cuestión.",
padre=self.wids['ventana'])
return
if not MURANO:
utils.dialogo_info(
titulo="ERROR DE CONEXIÓN CON MURANO",
texto="No puede eliminar cajas. Solo consultas.",
padre=self.wids['ventana'])
return
model, paths = self.wids['tv_produccion'].get_selection(
).get_selected_rows()
if (not paths or
not utils.dialogo(
titulo="¿ESTÁ SEGURO?",
texto="Se van a eliminar %d líneas. ¿Desea continuar?" % (
len(paths)),
padre=self.wids['ventana'])):
return
vpro = VentanaProgreso(padre=self.wids['ventana'])
vpro.mostrar()
icont = 0
tot = len(paths)
error = False
for path in paths:
puid = model[path][-1]
vpro.set_valor(icont / tot, "Eliminando %s..." % puid)
clase, aidi = puid.split(":")
objeto = getattr(pclases, clase).get(int(aidi))
if isinstance(objeto, pclases.Pale):
try:
articulo = objeto.cajas[0].articulo
except IndexError:
# Si el palé está vacío, artículo será None
articulo = None
elif isinstance(objeto, pclases.Caja):
articulo = objeto.articulo
if articulo:
# OJO: Le paso el último artículo porque la formulación de
# esta línea es por PALÉS COMPLETOS.
descontar_material_adicional(self, articulo, restar=False)
try:
# murano.ops.delete_articulo(objeto)
objeto.destroy_en_cascada(ventana=__file__)
except IOError:
pass # No tenemos consola para sacar los mensajes de debug.
except Exception as msg:
vpro.ocultar()
error = True
utils.dialogo_info(
titulo="ERROR AL ELIMINAR",
texto="Ocurrió un error al eliminar la producción.\n"
"\n\nInformación de depuración:\n"
"PUID: %s\nMensaje de la excepción:\n"
"%s" % (objeto.get_puid(), msg),
padre=self.wids['ventana'])
break # Paso de seguir con los demás paths (si los hubiera)
icont += 1
if not error:
vpro.ocultar()
if paths:
self.rellenar_tabla_consumos()
self.rellenar_tabla_bolsas()
self.rellenar_estadisticas()
def drop_incidencia(self, boton):
model, paths = self.wids['tv_incidencias'].get_selection(
).get_selected_rows()
if paths is None or paths == []:
utils.dialogo_info(
'INCIDENCIA NO SELECCIONADA',
'Debe seleccionar la incidencia que desee eliminar del '
'parte.',
padre=self.wids['ventana'])
else:
if not utils.dialogo('¿Eliminar del parte?',
'BORRAR INCIDENCIAS DE CONTROL DE PRODUCCIÓN',
padre=self.wids['ventana']):
return
for path in paths:
aidi = model[path][-1]
incidencia = pclases.Incidencia.get(aidi)
incidencia.parteDeProduccion = None
try:
incidencia.destroy(ventana=__file__)
except Exception as msgexception:
utils.dialogo_info(titulo='INCIDENCIA NO ELIMINADA',
texto='Ocurrió un error al intentar '
'eliminar la incidencia.\n\n'
'Excepción capturada: {}'.format(
msgexception),
padre=self.wids['ventana'])
self.actualizar_ventana()
def add_empleado(self, wid):
empleados = pclases.Empleado.select(pclases.AND(
pclases.Empleado.q.activo == True, # noqa
pclases.Empleado.q.planta == True), # noqa
orderBy='apellidos')
empleados = [(e.id, e.nombre, e.apellidos) for e in empleados
if e.planta and e.activo and e.categoriaLaboral
and e.categoriaLaboral.planta]
# e.categoriaLaboral.planta and \
# e.categoriaLaboral.lineaDeProduccion == self.linea)]
ids = utils.dialogo_resultado(filas=empleados,
titulo='SELECCIONE EMPLEADOS',
cabeceras=('ID', 'Nombre', 'Apellidos'),
multi=True,
padre=self.wids['ventana'])
if ids == [-1]:
return
for ide in ids:
try:
e = pclases.Empleado.get(ide)
self.objeto.addEmpleado(e)
except Exception as msg:
utils.dialogo_info(
titulo='NÚMERO INCORRECTO',
texto='El empleado con código identificador %s no '
'existe o no se pudo agregar.\n\n'
'Información de depuración:\n'
'\t%s' % (ide, msg),
padre=self.wids['ventana'])
self.rellenar_tabla_empleados()
def drop_empleado(self, wid):
"""Quita un empleado del parte de producción."""
if self.wids['tv_empleados'].get_selection(
).count_selected_rows() == 0:
return
model, path = self.wids['tv_empleados'].get_selection().get_selected()
ide = model[path][0] # El ide del empleado es la columna 0
e = pclases.Empleado.get(ide)
self.objeto.removeEmpleado(e)
self.rellenar_tabla_empleados()
def rellenar_tabla_empleados(self):
"""Rellena la tabla de empleados."""
model = self.wids['tv_empleados'].get_model()
model.clear()
horas_parte = self.objeto.get_duracion()
for horastrab in self.objeto.horasTrabajadas:
try:
supera_duracion_parte = horastrab.horas > horas_parte
except TypeError:
supera_duracion_parte = (
utils.DateTime2DateTimeDelta(horastrab.horas)
> horas_parte)
if supera_duracion_parte:
horastrab.horas = horas_parte.strftime('%H:%M')
horastrab.sync()
model.append((horastrab.empleado.id,
horastrab.empleado.nombre,
horastrab.empleado.apellidos,
horastrab.horas.strftime('%H:%M'),
horastrab.id))
# pylint: disable=too-many-branches
def cambiar_partida(self, wid):
"""
Pide un número de partida por teclado y cambia a él.
"""
texto = """
Al cambiar la partida del parte, se cambiará la partida de
todos los productos relacionados con él, así como el artículo
al que pertencen los productos.
Si quiere comenzar la producción de una nueva partida sin afectar
a los ya existentes, cree un nuevo parte."""
if (self.objeto.articulos != []
and not utils.dialogo(titulo='¿ESTÁ SEGURO?',
texto=texto,
padre=self.wids['ventana'])):
return
codigo = utils.dialogo_entrada(
titulo='¿NÚMERO DE PARTIDA?',
texto='Introduzca el número de partida de embolsado a '
'producir:',
padre=self.wids['ventana'])
if codigo is None: # Cancel
return
ultima_partida = pclases.PartidaCem.get_nueva_o_ultima_vacia()
try:
codigo = utils.parse_numero(codigo.upper().replace(
pclases.PREFIJO_PARTIDACEM, ""))
partida = pclases.PartidaCem.select(
pclases.PartidaCem.q.numpartida == codigo)[0]
if (self.usuario and self.usuario.nivel > 2
and partida.numpartida > ultima_partida):
utils.dialogo_info(
titulo="NÚMERO DE PARTIDA INCORRECTO",
texto="El número de partida %d es superior al de la "
"última partida válida para producir: %d\n"
"Vuelva a seleccionar partida." % (
ultima_partida.numpartida, codigo),
padre=self.wids['ventana'])
return
except (TypeError, ValueError) as msg:
self.logger.error("partes_de_fabricacion_bolsas::cambiar_partida "
"-> Código partida: %s. Excepción capturada: %s"
% (codigo, msg))
return
except IndexError:
if not self.usuario or self.usuario.nivel <= 2:
partida = pclases.PartidaCem(numpartida=codigo,
codigo="M-%d" % codigo)
pclases.Auditoria.nuevo(partida, self.usuario, __file__)
else:
danextone = ultima_partida
if danextone:
danextone = danextone.codigo
else:
danextone = "¡no encontrada!"
if utils.dialogo(
titulo="PARTIDA NO ENCONTRADA",
texto="No se encontró la partida.\n¿Continuar con la"
" siguiente partida de embolsado de cemento sin"
" \nproducción no asignada a ningún otro parte"
" (%s)?" % danextone,
padre=self.wids['ventana'],
defecto=True,
tiempo=15):
partida = ultima_partida
else:
return
# Pongo la partida como actual.
self.objeto.partidaCem = partida
self.wids['e_partida'].set_text(partida.codigo)
if partida.pales: # Ya tiene algún palé asociado de un parte anterior.
# Para no mezclar productos, cambio el del parte actual.
productoVenta = partida.pales[0].cajas[0].articulo.productoVenta
self.producto = productoVenta
self.rellenar_datos_producto(self.producto)
# Y cambio de partida los artículos y de producto de venta.
pales = []
for a in self.objeto.articulos:
a.productoVenta = self.producto
pale = a.caja.pale
if pale not in pales:
pales.append(a.caja.pale)
pale.partidaCem = partida
self.actualizar_ventana()
def get_partida(self):
"""
Devuelve la partida relacionada con el parte actual.
Si no hay partida definida devuelve None.
"""
numpartida = self.wids['e_partida_gtx'].get_text()
numpartida = numpartida.upper().replace(pclases.PREFIJO_PARTIDACEM, "")
numpartida = int(numpartida)
return pclases.PartidaCem.select(
pclases.PartidaCem.q.numpartida == numpartida)[0]
# Debe existir en la BD por fuerza, "óyenme", por fuerza.
def _DEPRECATED_bloquear(self, ch, mostrar_alerta=True):
# Si el parte tiene menos de un día y se encuentra bloqueado, dejo
# que lo pueda desbloquear cualquiera.
if (mx.DateTime.localtime() - self.objeto.fecha <= mx.DateTime.oneDay
and (self.objeto.bloqueado or ch.get_active())):
self.objeto.bloqueado = False
elif ch.get_active() != self.objeto.bloqueado:
# NEW!: Los partes bloqueados solo los pueden desbloquear
# usuarios con nivel <= 1.
if self.objeto.bloqueado:
if self.usuario and self.usuario.nivel <= 2:
# and self.objeto.bloqueado and not ch.get_active():
self.objeto.bloqueado = False
else:
if self.__permisos.escritura: # Tiene permiso para bloquear
# el parte
self.objeto.bloqueado = True
else:
if mostrar_alerta:
utils.dialogo_info(
titulo="USUARIO SIN PRIVILEGIOS",
texto="No tiene permisos suficientes para bloq"
"uear y verificar partes de producción.",
padre=self.wids['ventana'])
self.objeto.sync()
self.objeto.make_swap()
ch.set_active(self.objeto.bloqueado)
def bloquear(self, ch, mostrar_alerta=True):
"""
- Si el usuario no tiene permisos y mostrar_alerta, avisa de que no
puede modificar la verificación del parte.
- Si el usuario tiene permisos,
- Si el parte está verificado y mostrar_alerta, informa de que no
se puede desbloquear un parte ya volcado a Murano.
- Si el parte no está verificado, lo bloquea y vuelca tanto la
producción como los consumos. Si mostrar_alerta, avisa de que
es una operación que no se puede deshacer.
El usuario debe tener nivel 2 o inferior.
"""
if self.objeto and ch.get_active() != self.objeto.bloqueado:
# No es la propia ventana la que está marcando la casilla al
# mostrar un parte bloqueado. El usuario el que ha hecho clic.
if (self.usuario and self.usuario.nivel <= 3
and self.__permisos.escritura):
if self.objeto.bloqueado:
# Ya está bloqueado. **No se puede desbloquear.** Los
# rollos puede que incluso ya se hayan vendido en Murano.
utils.dialogo_info(
titulo="OPERACIÓN NO PERMITIDA",
texto="No se pueden desbloquear partes ya "
"volcados a Murano.",
padre=self.wids['ventana'])
else:
if mostrar_alerta:
seguro = utils.dialogo(
titulo="¿VERIFICAR PARTE?",
texto="Se verificará el parte y se bloqueará."
"\nToda la producción y consumos se "
"volcarán a Murano.\n\n"
"¿Está completamente seguro?\n\n"
"(Esta operación no se puede deshacer)",
padre=self.wids['ventana'])
else:
seguro = True
if seguro:
# Porque Mr. Soy-demasiado-listo-para-esperar me tiene
# hasta los...
finparte = utils.convertir_a_fechahora(
self.objeto.fechahorafin)
ahora = mx.DateTime.now()
parte_terminado = ahora - finparte > 0
sensitive = self.wids['ch_bloqueado'].get_sensitive()
activo = sensitive and parte_terminado
# Impido verificar si el parte está abierto en
# producción todavía. Tiene que pasar al menos 1
# segundo desde la hora de fin de parte.
if not activo:
utils.dialogo_info(
titulo="HOLA, MARTY",
texto="No se puede cerrar un parte que "
"todavía no ha terminado de fabricarse.\n"
"\n\n(Y, por favor, si se te pregunta si "
"estás seguro, mejor que estés seguro "
"de verdad)",
padre=self.wids['ventana'])
else:
res = self.volcar_produccion()
if res:
self.objeto.bloqueado = True
self.objeto.sync()
self.objeto.make_swap()
else:
if mostrar_alerta:
str_error = "No se pudo volcar toda la "
str_error += "producción y consumos a "
str_error += "Murano.\n\nLos artículos no "
str_error += "volcados se han marcado con"
str_error += " el símbolo «✘».\nInténtelo"
str_error += " más tarde o contacte con el"
str_error += " administrador.\nEl parte "
str_error += "quedará pendiente de "
str_error += "verificar mientras tanto."
utils.dialogo_info(
titulo="ERROR VOLCADO",
texto=str_error,
padre=self.wids['ventana'])
self.rellenar_widgets()
else:
if mostrar_alerta:
utils.dialogo_info(
titulo="USUARIO SIN PRIVILEGIOS",
texto="No tiene permisos suficientes para "
"bloquear y verificar partes de "
"producción.\nPruebe a hacerlo desde "
"la ventana de partes pendientes de "
"verificar.",
padre=self.wids['ventana'])
ch.set_active(self.objeto.bloqueado)
def volcar_produccion(self):
"""
Vuelca todos los artículos del parte y consumos relacionados a Murano.
Devuelve True si todo ha ido bien o False si ocurrió algún error.
Vuelca también los consumos del parte.
"""
res = True
if not MURANO:
utils.dialogo_info(
titulo="ERROR CONEXIÓN MURANO",
texto="No hay conexión con Murano. Se aborta operación.",
padre=self.wids['ventana'])
else:
# Producción ===
vpro = VentanaProgreso(padre=self.wids['ventana'])
vpro.mostrar()
i = 0.0
no_volcados = list(set([a.caja.pale for a in self.objeto.articulos
if not a.api]))
tot = len(no_volcados)
for pale in no_volcados:
i += 1
vpro.set_valor(i/tot, 'Volcando palé {} ({}/{})'.format(
pale.codigo, int(i), tot))
try:
volcado = murano.ops.create_pale(pale, observaciones="")
res = res and volcado
except Exception as errpale:
print("Error creando palé en Murano: ".format(errpale))
res = False
vpro.ocultar()
# Consumos ===
vpro = VentanaProgreso(padre=self.wids['ventana'])
vpro.mostrar()
consumos = [c for c in self.objeto.consumos
if not c.api and c.actualizado]
i = 0.0
tot = len(consumos) + len(self.objeto.bigbags)
# # consumos materiales
for consumo in consumos:
i += 1
vpro.set_valor(i/tot, 'Consumiendo {} ({}/{})'.format(
consumo.productoCompra.descripcion, int(i), tot))
try:
consumido = murano.ops.consumir(consumo.productoCompra,
consumo.cantidad,
consumo=consumo)
res = res and consumido
except Exception as errconsum:
print("Error consumiendo en Murano: {}".format(errconsum))
res = False
# # consumos materia prima (bigbags)
for bigbag in self.objeto.bigbags:
i += 1
vpro.set_valor(i/tot, 'Consumiendo materia prima ({})'.format(
bigbag.codigo))
try:
consumido = murano.ops.consume_bigbag(bigbag)
res = res and consumido
except Exception as errconsbb:
print("Error consumiendo bigbag en Murano: {}".format(
errconsbb))
res = False
vpro.ocultar()
return res
def add_empleados_calendario(self):
"""
Añade los empleados planificados según el calendario laboral
para la línea de producción.
1.- Obtener el calendario para self.linea.
2.- Obtener los laborables del calendario correspondiente a la fecha
del objeto.
3.- Filtrar los laborables en función del turno correspondiente a la
hora del objeto.
4.- Obtener los empleados del laborable resultante.
5.- Eliminar los empleados actuales. (PREGUNTA ANTES DE HACERLO)
6.- Insertarlos los nuevos en el parte.
"""
if self.linea is not None:
idldp = self.linea.id
CAL = pclases.CalendarioLaboral
calendarios = CAL.select("""linea_de_produccion_id = %d AND
date_part('month', mes_anno) = %d AND
date_part('year', mes_anno) = %d"""
% (idldp, self.objeto.fecha.month,
self.objeto.fecha.year))
if calendarios.count() == 1:
calendario = calendarios[0]
empleados = self.get_empleados_de_calendario(calendario)
# Si hay empleados
if self.objeto.horasTrabajadas != []:
# Si no son los mismos del calendario y los quiere borrar.
if ([horastrab.empleado
for horastrab
in self.objeto.horasTrabajadas] != empleados and
utils.dialogo(
titulo="¿ELIMINAR OPERARIOS?",
texto="El parte ya tiene empleados relacionado"
"s.\n¿Desea eliminarlos y asociar los de"
"finidos en el turno?",
padre=self.wids['ventana'])):
for horastrab in self.objeto.horasTrabajadas:
self.objeto.removeEmpleado(horastrab.empleado)
else:
# Si no los quiere borrar, cancelo todo.
return
# Si no había empleados o no eran los mismos y los ha borrado.
# Añado empleados de los laborables que cumplan el turno y
# sean de producción (no-recuperación).
for empleado in empleados:
self.objeto.addEmpleado(empleado)
elif calendarios.count() > 1:
self.logger.error(
"partes_de_fabricacion_bolsas.py -> Existe"
" más de un calendario laboral para el mes, año y "
"línea de producción: fecha %s - idldp %d - idparte "
"%s." % (self.objeto.fecha, idldp, self.objeto.id))
# pylint: disable=too-many-locals
def get_empleados_de_calendario(self, calendario):
"""
Devuelve los empleados programados para trabajar en el parte
actual según el calendario recibido.
"""
res = []
lab = pclases.Laborable
dia_lab_parte = self.objeto.fecha
seis_am = mx.DateTime.DateTimeDeltaFrom(hours=6)
medianoche = mx.DateTime.DateTimeDeltaFrom(hours=0)
if (self.objeto.horainicio >= medianoche and
self.objeto.horainicio <= seis_am and
self.objeto.horafin <= seis_am): # No se mezclan turnos, esta
# última comprobación podría no hacer falta.
dia_lab_parte -= mx.DateTime.oneDay
laborables = lab.select(
"calendario_laboral_id = %d AND date_part('day', fecha) = %d"
% (calendario.id, dia_lab_parte.day))
for laborable in laborables:
turno = laborable.turno
if turno is None:
mensaje = "partes_de_fabricacion_bolsas.py::"\
"get_empleados_de_calendario -> Laborable ID %d no"\
" tiene turno relacionado. Intento eliminarlo de la"\
" BD." % (laborable.id)
print("ERROR: %s" % (mensaje))
self.logger.error(mensaje)
try:
laborable.destroy(ventana=__file__)
idlaborable = laborable.id
self.logger.warning(
"partes_de_fabricacion_bolsas.py::"
"get_empleados_de_calendario -> Registro "
"laborable ID %d ELIMINADO "
"SATISFACTORIAMENTE." % (idlaborable))
except Exception as msg: # pylint: disable=bare-exception
self.logger.error(
"partes_de_fabricacion_bolsas.py::"
"get_empleados_de_calendario -> Registro "
"laborable ID %d NO ELIMINADO.\n\n"
"Error: %s" % (laborable.id, msg))
continue
turnohorainicio = utils.DateTime2DateTimeDelta(turno.horainicio)
turnohorafin = utils.DateTime2DateTimeDelta(turno.horafin)
objetohorainicio = utils.DateTime2DateTimeDelta(
self.objeto.horainicio)
objetohorafin = utils.DateTime2DateTimeDelta(self.objeto.horafin)
if not turno.recuperacion:
ohi = objetohorainicio
ohf = objetohorafin
thi = turnohorainicio
thf = turnohorafin
if thi > thf:
thf += mx.DateTime.oneDay
if ohi > ohf:
ohf += mx.DateTime.oneDay
if seis_am > ohi >= medianoche:
ohi += mx.DateTime.oneDay
if seis_am >= ohf >= medianoche:
ohf += mx.DateTime.oneDay
if thi <= ohi <= thf and thi <= ohf <= thf:
for empleado in laborable.empleados:
res.append(empleado)
return res
def add_consumo(self, boton):
"""Agrega un consumo al parte actual."""
self.consumir_manual(boton)
def add_bigbag(self, boton):
"""
Consume un bigbag buscándolo por su código de trazabilidad.
"""
codigo = utils.dialogo_entrada(
titulo="BUSCAR BIGBAG",
texto="Introduzca el código de trazabilidad del bigbag\n"
"de fibra de cemento:",
padre=self.wids['ventana'])
if codigo:
codigo = codigo.replace(" ", "").replace("-", "").upper().strip()
if not codigo.startswith("C"):
try:
codigo = "C%d" % utils.parse_numero(codigo)
except TypeError:
utils.dialogo_info(
titulo="ERROR",
texto="El texto introducido «%s» no es un número."
% (codigo),
padre=self.wids['ventana'])
codigo = "erróneo"
try:
bigbag = pclases.Bigbag.selectBy(codigo=codigo)[0]
except IndexError:
utils.dialogo_info(
titulo="CÓDIGO NO ENCONTRADO",
texto="El código %s no se encontró." % codigo,
padre=self.wids['ventana'])
else:
albint = self.objeto.buscar_o_crear_albaran_interno(
incluir_consumos_auto=True) # Normalmente no, pero
# aquí sí quiero que aparezcan en el alb. interno.
bigbag.articulo.sync()
if bigbag.articulo.almacen != albint.almacenOrigen:
utils.dialogo_info(
titulo="BIGBAG NO ESTÁ EN ALMACÉN",
texto="El bigbag %s no se encuentra en el almacén"
" %s" % (codigo,
albint.almacenOrigen.nombre),
padre=self.wids['ventana'])
else:
# Para consumir lo sacamos del almacén.
bigbag.parteDeProduccion = self.objeto
bigbag.articulo.almacen = None
bigbag.articulo.syncUpdate()
# Y lo metemos en el albarán interno.
lineas_albaran = {}
for ldv in albint.lineasDeVenta:
pv = ldv.productoVenta
if pv not in lineas_albaran:
lineas_albaran[pv] = [ldv]
else:
lineas_albaran[pv].append(ldv)
pv_bb = bigbag.articulo.productoVenta
if pv_bb not in lineas_albaran:
linea_albaran = pclases.LineaDeVenta(
ticket=None,
pedidoVenta=None,
facturaVenta=None,
productoVenta=pv_bb,
albaranSalida=albint,
prefactura=None,
productoCompra=None,
fechahora=mx.DateTime.localtime(),
cantidad=0.0,
precio=pv_bb.precioDefecto,
descuento=0.0,
notas="",
descripcionComplementaria="Reembolsado")
lineas_albaran[pv_bb] = [linea_albaran]
pclases.Auditoria.nuevo(linea_albaran,
self.usuario, __file__)
bigbag.articulo.albaranSalida = albint
lineas_albaran[pv_bb][-1].cantidad += bigbag.pesobigbag
lineas_albaran[pv_bb][-1].syncUpdate()
self.rellenar_tabla_consumos()
self.rellenar_estadisticas()
def drop_consumo(self, boton):
"""
Elimina los consumos seleccionados.
"""
model, paths = self.wids['tv_consumos'].get_selection(
).get_selected_rows()
if paths is None or paths == []:
utils.dialogo_info(
'CONSUMOS NO SELECCIONADOS',
'Debe seleccionar uno o varios consumos a eliminar del '
'parte.',
padre=self.wids['ventana'])
else:
if not utils.dialogo('¿Eliminar del parte?',
'BORRAR CONSUMOS DEL CONTROL DE ENVASADO',
padre=self.wids['ventana']):
return
for path in paths:
ide = model[path][-1]
if ide > 0: # Es consumo
consumo = pclases.Consumo.get(ide)
consumo.parteDeProduccion = None
try:
consumo.anular_consumo()
# consumo.destroy(ventana = __file__)
except Exception as msg:
utils.dialogo_info(
titulo='INCIDENCIA NO ELIMINADA',
texto='Ocurrió un error al intentar '
'eliminar el consumo.\n\n\n'
'Error: {}'.format(msg),
padre=self.wids['ventana'])
elif ide < 0: # Es bigbag
ide = -ide
bigbag = pclases.Bigbag.get(ide)
albint = self.objeto.get_albaran_interno() # DEBE existir
assert albint == bigbag.articulo.albaranSalida
# Devuelvo al almacén
bigbag.parteDeProduccion = None
bigbag.articulo.almacen = albint.almacenOrigen
bigbag.articulo.albaranSalida = None
bigbag.articulo.sync()
# Y saco del albarán
idldv = albint._buscar_ldv(
albint.agrupar_articulos(),
bigbag.articulo.productoVenta.codigo,
0.0) # La cantidad no me importa.
ldv = pclases.LineaDeVenta.get(idldv)
ldv.cantidad -= bigbag.pesobigbag
ldv.syncUpdate()
# self.actualizar_ventana()
self.objeto.buscar_o_crear_albaran_interno(
incluir_consumos_auto=True) # Normalmente no, pero
# aquí sí quiero que aparezcan en el alb. interno.
self.rellenar_tabla_consumos()
def cambiar_numbolsas(self, cell, path, newtext):
"""
Comprueba que se ha escrito un número y ajusta el número de bolsas por
caja del palé creando o eliminando bolsas de cada caja hasta llegar
al número tecleado.
"""
# TODO:
pass
def pasar_pale_a_B(self, cell, path):
"""
Si la fila que ha marcado era B cambia todo el palé a A. Si no, hace
lo contrario y lo cambia entero a A.
"""
# TODO: Implica cambiar bolsas por caja y demás. No es solo cambiar un
# atributo en el objeto.
pass
def consumir_manual(self, boton):
"""
Crea un registro de consumo manualmente y unifica los
consumos a continuación.
Si algún consumo acaba con cantidad 0 (porque se haya
agregado un consumo negativo que haya restado a otro)
se elimina antes de salir de la rutina.
"""
# Pedir producto(s) a consumir.
producto, texto_buscado = utils.pedir_producto_compra(
padre=self.wids['ventana'])
# Pedir cantidad.
if producto is not None:
unidad = ""
try:
producto_unidad = producto.unidad
if producto_unidad != "":
unidad = " en %s" % (producto_unidad)
except AttributeError as msg:
self.logger.error(
"%sEl producto tipo %s ID %d no tiene "
"atributo unidad. Excepción AttributeError: %s." % (
self.usuario and self.usuario.usuario + ": " or "",
type(producto),
producto is not None and producto.id or "NONE",
msg))
descripcion = producto.descripcion
cantidad = utils.dialogo_entrada(
titulo="CANTIDAD",
texto="Introduzca la cantidad a consumir de %s%s."
"\n<small><i>%s</i></small>" % (
descripcion, unidad, producto.observaciones),
padre=self.wids['ventana'])
if cantidad is not None:
try:
cantidad_a_consumir = utils._float(cantidad)
except (TypeError, ValueError):
utils.dialogo_info(
titulo="ERROR DE FORMATO",
texto='El texto introducido "%s" no es un número'
'.' % (cantidad),
padre=self.wids['ventana'])
else:
# Crear consumo.
producto.sync()
consumo = pclases.Consumo(
silo=None,
parteDeProduccion=self.objeto,
productoCompra=producto,
actualizado=True,
antes=producto.existencias,
despues=producto.existencias - cantidad_a_consumir,
cantidad=cantidad_a_consumir)
pclases.Auditoria.nuevo(consumo, self.usuario, __file__)
# Actualizar existencias
producto.existencias -= cantidad_a_consumir
producto.add_existencias(-cantidad_a_consumir)
producto.syncUpdate()
self.logger.warning(
"%sCONSUMO LÍNEA EMBOLSADO -> PARTE %d -> "
"Consumiendo manualmente %f %s de %s (ID %d). "
"Existencias: %f." % (
self.usuario
and self.usuario.usuario + ": " or "",
self.objeto.id,
cantidad_a_consumir,
producto.unidad,
producto.descripcion,
producto.id,
producto.existencias))
# Unificar consumos.
self.objeto.unificar_consumos()
# Eliminar consumos con cantidad cero.
for c in self.objeto.consumos:
if round(c.cantidad, 3) == 0:
# Cosas tan pequeñas como las agujas se descuentan
# en cantidades tan pequeñas que tras varias
# inserciones y borrados puede quedar el consumo
# con cantidad 0.0000...1, que debe ser borrado.
try:
c.destroy(ventana=__file__)
except Exception as msg:
self.logger.error(
"%sConsumo ID %d no se pudo eliminar."
" Excepción: %s" % (
self.usuario
and self.usuario.usuario + ": "
or "",
c.id,
msg))
self.rellenar_tabla_consumos()
# Buscar y crear (si no existe) el albarán interno de
# consumos.
self.objeto.buscar_o_crear_albaran_interno(
incluir_consumos_auto=True) # Normalmente no, pero
# aquí sí quiero que aparezcan en el alb. interno.
def etiquetar(self, boton):
"""
Genera las etiquetas de los palés seleccionados.
"""
treev = self.wids['tv_produccion']
model, paths = treev.get_selection().get_selected_rows()
if paths is None or paths == []:
utils.dialogo_info(
'SELECCIONE PALÉS',
'Debe seleccionar uno o más palés a etiquetar.',
padre=self.wids['ventana'])
else:
if not utils.dialogo(
'A continuación se generarán las etiquetas para {} palé{}.'
'\n\n¿Continuar?'.format(len(paths),
len(paths) != 1 and "s" or ""),
'¿GENERAR ETIQUETAS?',
padre=self.wids['ventana']):
return
ids = []
for path in paths:
puid = model[path][-1]
clase, ide = puid.split(":")
ids.append(ide)
pales = [pclases.Pale.get(ide) for ide in ids]
imprimir_etiquetas_pales(pales, self.wids['ventana'])
def imprimir_etiquetas_pales(pales, padre=None, mostrar_dialogo=True):
"""
Muestra una ventana para seleccionar el tipo de etiquetas a imprimir.
Por defecto se marca la neutral (tipo = 1).
Si mostrar_dialogo es False, usa la opción global que haya guardada. Si no
hay opción globar guardada, se ignora el parámetro y se muestra el
cuadro de diálogo.
"""
global MEMENTO_MORI
# Para la normativa del 1 de julio de 2013 fuerzo a que siempre se
# saquen ya las etiquetas con el nuevo formato. Pero como puede haber una
# vuelta atrás, voy a permitir la posibilidad (aunque no en GUI, solo
# programáticamente) de seguir sacando etiquetas antiguas.
MEMENTO_MORI['que_imprimir'] = 0 # Ya no se harán etiquetas de caja. Con
# la norma 2013 solo etiquetas de palé.
if MEMENTO_MORI['que_imprimir'] is None: # Nunca ha elegido una opción:
mostrar_dialogo = True
else:
que_imprimir = MEMENTO_MORI['que_imprimir']
mostrar_dialogo = False
if mostrar_dialogo:
que_imprimir = utils.dialogo_radio(
titulo="SELECCIONAR TIPO IMPRESIÓN",
texto="Seleccione qué imprimir:",
ops=[(0, "Etiqueta de palé"),
(1, "Etiquetas de caja"),
(2, "Etiquetas de palé y cajas")],
padre=padre,
valor_por_defecto=2)
if que_imprimir is not None:
MEMENTO_MORI['que_imprimir'] = que_imprimir
if que_imprimir in (0, 2):
tipo = 3 # Opción inexistente en el diálogo pero reconocible
# por la función que va a generar las etiquetas.
# BACKTRACKING a etiqueta antigua hasta que arreglemos la
# etiquetadora de la línea de cemento.
# tipo = MEMENTO_MORI['tipo'] # <- 18/09/2013: Pasamos a la nueva.
# Ya no permito seleccionar otra.
if tipo is None:
tipo = utils.dialogo_radio(
titulo="SELECCIONAR ETIQUETA",
texto="Seleccione el tipo de etiqueta a generar:",
ops=[(0, "Mínima (solo código de palé, partida y "
"marcado CE)"),
(1, "Neutra (incluye datos de producto)"),
(2, "Completa (incluye el nombre de la empresa"
")")],
padre=padre,
valor_por_defecto=1)
if tipo is not None:
MEMENTO_MORI['tipo'] = tipo
else:
return
# EOBACKTRACK: Descomentar el rotate=True cuando volvamos a usar
# las etiquetas nuevas.
try:
ceb = pales[0].productoVenta.camposEspecificosBala
func_etiqueta = ceb.modeloEtiqueta.get_func()
filetiqpale = func_etiqueta(pales)
except (AttributeError, IndexError, ValueError): # Fallback a
# etiqueta por defecto.
filetiqpale = geninformes.generar_etiqueta_pale(pales, tipo)
for pale in pales:
pclases.Auditoria.modificado(
pale,
# self.usuario,
None,
__file__,
"Impresión de etiqueta para palé %s" % (
pale.codigo))
mandar_a_imprimir_con_ghostscript(filetiqpale, rotate=True)
if que_imprimir == 1 or que_imprimir == 2:
tipo = MEMENTO_MORI['tipo']
if tipo is None:
tipo = utils.dialogo_radio(
titulo="SELECCIONAR ETIQUETA",
texto="Seleccione el tipo de etiqueta a generar:",
ops=[(0, "Mínima (solo código de palé, partida y "
"marcado CE)"),
(1, "Neutra (incluye datos de producto)"),
(2, "Completa (incluye el nombre de la empresa"
")")],
padre=padre,
valor_por_defecto=1)
if tipo is not None:
MEMENTO_MORI['tipo'] = tipo
cajas = []
for p in pales:
cajas += p.cajas[:]
for caja in cajas:
pclases.Auditoria.modificado(
caja.articulo,
# self.usuario,
None,
__file__,
"Impresión de etiqueta para caja %s" % (
caja.articulo.get_info()))
filetiqcaja = geninformes.generar_etiqueta_caja(cajas, tipo)
mandar_a_imprimir_con_ghostscript(filetiqcaja)
if __name__ == "__main__":
# TODO: ¿Por qué no me coje usuario y contraseña desde los parámetros? Necesito poderlo ejecutar como python formularios/partes...
p = PartesDeFabricacionBolsas()
|
gpl-2.0
| -6,043,880,975,479,553,000
| 46.973039
| 134
| 0.493912
| false
| 3.665928
| false
| false
| false
|
hackerspace-silesia/cebulany-manager
|
cebulany/resources/payment_summary.py
|
1
|
1554
|
from flask_restful import fields, marshal_with
from flask_restful.reqparse import RequestParser
from cebulany.auth import token_required
from cebulany.queries.payment_summary import PaymentSummaryQuery
from cebulany.resources.model import ModelListResource
resource_fields = {
'payments': fields.List(fields.Nested({
'cost': fields.Price(decimals=2),
'is_positive': fields.Boolean(),
'payment_type_id': fields.Integer(),
'budget_id': fields.Integer(),
})),
'balances': fields.Nested({
'curr_start_year': fields.Price(decimals=2),
'curr_end_year': fields.Price(decimals=2),
'prev_start_year': fields.Price(decimals=2),
'prev_end_year': fields.Price(decimals=2),
'diff_start_year': fields.Price(decimals=2),
'diff_end_year': fields.Price(decimals=2),
'diff_prev_start_year': fields.Price(decimals=2),
'diff_prev_end_year': fields.Price(decimals=2),
}),
'outstanding_cost': fields.Price(decimals=2),
}
query_summary_parser = RequestParser()
query_summary_parser.add_argument('year', type=int)
class PaymentSummaryResource(ModelListResource):
@token_required
@marshal_with(resource_fields)
def get(self):
args = query_summary_parser.parse_args()
year = args['year']
return {
'payments': PaymentSummaryQuery.get_payment_data(year),
'balances': PaymentSummaryQuery.get_balances(year),
'outstanding_cost': PaymentSummaryQuery.get_outstanding_cost(year),
}
|
mit
| 7,752,611,034,030,863,000
| 32.782609
| 79
| 0.666667
| false
| 3.52381
| false
| false
| false
|
vprnet/iframe
|
app/views.py
|
1
|
2454
|
from app import app
from flask import render_template, request
from config import BASE_URL
import csv
# Not being used anywhere
@app.route('/connect-choropleth')
def connect():
page_url = BASE_URL + request.path
return render_template('health-connect-choropleth.html', page_url=page_url)
@app.route('/sewage')
def sewage():
page_url = BASE_URL + request.path
return render_template('sewage.html', page_url=page_url)
@app.route('/doc-cloud')
def cloud():
return render_template('doc-cloud.html')
@app.route('/license-plates')
def license():
return render_template('license-plates.html')
@app.route('/broadband')
def broadband():
return render_template('broadband.html')
@app.route('/town-meeting/school-budget-revote')
def school_budget_revote():
return render_template('school-budget-revote.html')
@app.route('/town-meeting/school-budget')
def school_budget():
return render_template('school-budget.html')
@app.route('/town-meeting/town-budget')
def town_budget():
return render_template('town-budget.html')
@app.route('/town-meeting/public-bank')
def public_bank():
return render_template('public-bank.html')
@app.route('/town-meeting/results')
def results():
return render_template('results.html')
@app.route('/town-meeting/tar-sands')
def tar_sands():
return render_template('tar-sands.html')
@app.route('/code/tmd-index')
def tmd_index():
return render_template('tmdindex.html')
@app.route('/code/tmd-script')
def tmd_script():
return render_template('tmdscript.html')
@app.route('/code/tmd-structure')
def tmd_structure():
return render_template('tmdstructure.html')
@app.route('/winter')
def winter_length():
f = csv.reader(open('app/data/winters.csv', 'rU'))
winters = [l for l in f]
temperatures = [[20, 30, 40, 50, 60, 70, 80, 90],
[20, 22, 24, 26, 28, 30, 32, 34],
[25, 26, 27, 28, 29, 30, 31, 32],
[30, 32, 34, 36, 38, 40, 42, 44],
[35, 36, 37, 38, 39, 40, 41, 42],
[40, 42, 44, 46, 48, 50, 52, 54],
[45, 46, 47, 48, 49, 50, 51, 52],
[50, 52, 54, 56, 58, 60, 62, 64],
[55, 56, 57, 58, 59, 60, 61, 62],
[60, 62, 64, 66, 68, 70, 72, 74],
[65, 66, 67, 68, 69, 70, 71, 72],
[70, 72, 74, 76, 78, 80, 82, 84],
[80, 82, 84, 86, 88, 90, 92, 94]]
return render_template('winter.html',
temperatures=temperatures,
winters=winters)
|
apache-2.0
| -876,918,957,947,200,600
| 23.54
| 79
| 0.628769
| false
| 2.917955
| false
| false
| false
|
foursquare/pants
|
src/python/pants/backend/jvm/tasks/analysis_extraction.py
|
1
|
6308
|
# coding=utf-8
# Copyright 2014 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import absolute_import, division, print_function, unicode_literals
import json
import os
from collections import defaultdict
from pants.backend.jvm.subsystems.dependency_context import DependencyContext
from pants.backend.jvm.subsystems.zinc import Zinc
from pants.backend.jvm.tasks.nailgun_task import NailgunTask
from pants.base.build_environment import get_buildroot
from pants.base.exceptions import TaskError
from pants.base.workunit import WorkUnitLabel
from pants.goal.products import MultipleRootedProducts
from pants.util.memo import memoized_property
class AnalysisExtraction(NailgunTask):
"""A task that handles extracting product and dependency information from zinc analysis."""
# The output JSON created by this task is not localized, but is used infrequently enough
# that re-computing it from the zinc analysis (which _is_ cached) when necessary is fine.
create_target_dirs = True
@classmethod
def subsystem_dependencies(cls):
return super(AnalysisExtraction, cls).subsystem_dependencies() + (DependencyContext, Zinc.Factory)
@classmethod
def register_options(cls, register):
super(AnalysisExtraction, cls).register_options(register)
@classmethod
def prepare(cls, options, round_manager):
super(AnalysisExtraction, cls).prepare(options, round_manager)
round_manager.require_data('zinc_analysis')
round_manager.require_data('runtime_classpath')
@classmethod
def product_types(cls):
return ['classes_by_source', 'product_deps_by_src']
def _create_products_if_should_run(self):
"""If this task should run, initialize empty products that it will populate.
Returns true if the task should run.
"""
should_run = False
if self.context.products.is_required_data('classes_by_source'):
should_run = True
make_products = lambda: defaultdict(MultipleRootedProducts)
self.context.products.safe_create_data('classes_by_source', make_products)
if self.context.products.is_required_data('product_deps_by_src'):
should_run = True
self.context.products.safe_create_data('product_deps_by_src', dict)
return should_run
@memoized_property
def _zinc(self):
return Zinc.Factory.global_instance().create(self.context.products)
def _summary_json_file(self, vt):
return os.path.join(vt.results_dir, 'summary.json')
@memoized_property
def _analysis_by_runtime_entry(self):
zinc_analysis = self.context.products.get_data('zinc_analysis')
return {cp_entry: analysis_file for _, cp_entry, analysis_file in zinc_analysis.values()}
def execute(self):
# If none of our computed products are necessary, return immediately.
if not self._create_products_if_should_run():
return
zinc_analysis = self.context.products.get_data('zinc_analysis')
classpath_product = self.context.products.get_data('runtime_classpath')
classes_by_source = self.context.products.get_data('classes_by_source')
product_deps_by_src = self.context.products.get_data('product_deps_by_src')
fingerprint_strategy = DependencyContext.global_instance().create_fingerprint_strategy(
classpath_product)
targets = list(zinc_analysis.keys())
with self.invalidated(targets,
fingerprint_strategy=fingerprint_strategy,
invalidate_dependents=True) as invalidation_check:
# Extract and parse products for any relevant targets.
for vt in invalidation_check.all_vts:
summary_json_file = self._summary_json_file(vt)
cp_entry, _, analysis_file = zinc_analysis[vt.target]
if not vt.valid:
self._extract_analysis(vt.target, analysis_file, summary_json_file)
self._register_products(vt.target,
cp_entry,
summary_json_file,
classes_by_source,
product_deps_by_src)
def _extract_analysis(self, target, analysis_file, summary_json_file):
target_classpath = self._zinc.compile_classpath('runtime_classpath', target)
analysis_by_cp_entry = self._analysis_by_runtime_entry
upstream_analysis = list(self._upstream_analysis(target_classpath, analysis_by_cp_entry))
args = [
'-summary-json', summary_json_file,
'-analysis-cache', analysis_file,
'-classpath', ':'.join(target_classpath),
'-analysis-map', ','.join('{}:{}'.format(k, v) for k, v in upstream_analysis),
]
args.extend(self._zinc.rebase_map_args)
result = self.runjava(classpath=self._zinc.extractor,
main=Zinc.ZINC_EXTRACT_MAIN,
args=args,
workunit_name=Zinc.ZINC_EXTRACTOR_TOOL_NAME,
workunit_labels=[WorkUnitLabel.MULTITOOL])
if result != 0:
raise TaskError('Failed to parse analysis for {}'.format(target.address.spec),
exit_code=result)
def _upstream_analysis(self, target_classpath, analysis_by_cp_entry):
for entry in target_classpath:
analysis_file = analysis_by_cp_entry.get(entry)
if analysis_file is not None:
yield entry, analysis_file
def _register_products(self,
target,
target_cp_entry,
summary_json_file,
classes_by_source,
product_deps_by_src):
summary_json = self._parse_summary_json(summary_json_file)
# Register a mapping between sources and classfiles (if requested).
if classes_by_source is not None:
buildroot = get_buildroot()
for abs_src, classes in summary_json['products'].items():
source = os.path.relpath(abs_src, buildroot)
classes_by_source[source].add_abs_paths(target_cp_entry, classes)
# Register classfile product dependencies (if requested).
if product_deps_by_src is not None:
product_deps_by_src[target] = summary_json['dependencies']
def _parse_summary_json(self, summary_json_file):
with open(summary_json_file) as f:
return json.load(f, encoding='utf-8')
|
apache-2.0
| -4,502,153,119,825,968,600
| 40.5
| 102
| 0.677394
| false
| 3.932668
| false
| false
| false
|
anarchih/SmallProject
|
project1-1/evaluator.py
|
1
|
5529
|
# import pandas as pd
import datetime as dt
# import matplotlib.pyplot as plt
from collections import Counter
import csv
class Cluster(object):
def __init__(self, capacity):
self.week = [0] * 60
class Evaluater(object):
def __init__(self, filename, k, dist, capacity, date_range):
self.data = self.read_tsv(filename)
self.k = k
self.dist = dist
self.capacity = capacity
self.date_range = dt.timedelta(date_range)
self.cluster = Cluster(capacity)
self.xmin = min(self.data['x'])
self.xmax = max(self.data['x'])
self.ymin = min(self.data['y'])
self.ymax = max(self.data['y'])
self.labels_ = [0 for i in range(len(self.data['x']))]
def read_tsv(self, filename):
data = {'x': [], 'y': [], 'date':[]}
f = open(filename, "r")
f.readline()
for row in csv.reader(f, delimiter='\t'):
data['x'].append(float(row[9]))
data['y'].append(float(row[10]))
data['date'].append(dt.datetime.strptime('2015/' + row[5], "%Y/%m/%d"))
return data
# data = pd.read_csv(filename, sep="\t")
# # rows = random.sample(list(data.index), 5000)
# # data = data.ix[rows]
# data = data.rename(columns = {'經度座標':'x'})
# data = data.rename(columns = {'緯度座標':'y'})
# dtime = pd.DataFrame([[dt.datetime.strptime('2015/' + i, "%Y/%m/%d")] for i in data['發病日期']], columns=['date'])
# data = data.join(dtime)
# del data['發病日期']
# data = data.sort(['date'])
# data = data.reset_index()
# return data
def evaluate(self, ind):
count = 0
for p1 in zip(self.data['x'], self.data['y']):
for j, p2 in enumerate(ind):
if self.distance(p1, p2) < self.dist:
count += 1
break
return count,
def eval(self, ind):
count = 0
latest_date = [dt.datetime(1990, 1, 1)] * self.k
sum_capacity = [0] * self.k
tmp = [[0] * self.date_range.days for i in range(self.k)]
for i, p1 in enumerate(zip(self.data['x'], self.data['y'])):
c = self.find_data_belongs_to(p1, ind)
if c != 10000:
date_gap = self.data['date'][i] - latest_date[c]
latest_date[c] = self.data['date'][i]
if date_gap >= self.date_range:
sum_capacity[c] = 1
tmp[c] = [0] * self.date_range.days
tmp[c][0] = 1
count += 1
else:
t = [0] * date_gap.days + tmp[c][0:self.date_range.days - date_gap.days]
t[0] += 1
sum_c = sum(t)
if sum_c <= self.capacity:
tmp[c] = t
sum_capacity[c] = sum_c
count += 1
return count,
def find_data_belongs_to(self, p1, ind):
current_cluster = 10000
Min = 10000
for j, p2 in enumerate(ind):
dist = self.distance(p1, p2)
if dist < self.dist and dist < Min:
Min = dist
current_cluster = j
return current_cluster
def distance(self, p1, p2):
return abs(p1[0] - p2[0]) + abs(p1[1] - p2[1])
def calc_labels(self, ind):
count = 0
latest_date = [dt.datetime(1990, 1, 1)] * self.k
sum_capacity = [0] * self.k
tmp = [[0] * self.date_range.days for i in range(self.k)]
for i, p1 in enumerate(zip(self.data['x'], self.data['y'])):
c = self.find_data_belongs_to(p1, ind)
if c != 10000:
date_gap = self.data['date'][i] - latest_date[c]
latest_date[c] = self.data['date'][i]
if date_gap >= self.date_range:
sum_capacity[c] = 1
tmp[c] = [0] * self.date_range.days
tmp[c][0] = 1
count += 1
else:
t = [0] * date_gap.days + tmp[c][0:self.date_range.days - date_gap.days]
t[0] += 1
sum_c = sum(t)
if sum_c <= self.capacity:
tmp[c] = t
sum_capacity[c] = sum_c
count += 1
self.labels_[i] = c + 1
return count,
def draw_result(self):
self.draw_data()
# self.draw_range()
print(Counter(self.labels_))
# plt.show()
def draw_range(self):
pass
# plt.scatter(self.cluster_centers_[:, 0], self.cluster_centers_[:, 1], s=50)
def draw_data(self):
tmp = [20 if self.labels_[i] != 0 else 1 for i in range(len(self.labels_))]
# plt.scatter(self.data['x'], self.data['y'], s = tmp, c = self.labels_)
# plt.scatter(self.data['x'], self.data['y'], s=tmp, c=self.result)
def draw_raw_data(self):
pass
# plt.scatter(self.data['x'],self.data['y'],s=1)
# plt.show()
def save_result(self):
pass
# data = pd.DataFrame({'id': self.data['傳染病報告單電腦編號'],
# 'x': self.data['x'],
# 'y': self.data['y'],
# 'class':self.labels_})
# data.to_csv("result.csv")
e = Evaluater("data.tsv", 5, 0.02, 200000, 3)
|
gpl-3.0
| 891,766,790,229,226,900
| 35.513333
| 121
| 0.466314
| false
| 3.410336
| true
| false
| false
|
google/digitalbuildings
|
tools/validators/instance_validator/validate/handler.py
|
1
|
8569
|
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the License);
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an AS IS BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Validation Helper."""
from __future__ import print_function
from datetime import datetime
import sys
from typing import Callable, Dict, List, Optional
from validate import entity_instance
from validate import generate_universe
from validate import instance_parser
from validate import subscriber
from validate import telemetry_validator
from yamlformat.validator import presubmit_validate_types_lib as pvt
def Deserialize(
yaml_files: List[str]) -> Dict[str, entity_instance.EntityInstance]:
"""Parses a yaml configuration file and deserializes it.
Args:
yaml_files: list of building configuration files.
Returns:
A map of entity name to EntityInstance.
"""
print('Validating syntax please wait ...')
parser = instance_parser.InstanceParser()
for yaml_file in yaml_files:
print('Opening file: {0}, please wait ...'.format(yaml_file))
parser.AddFile(yaml_file)
parser.Finalize()
default_entity_operation = instance_parser.EntityOperation.ADD
if parser.GetConfigMode() == instance_parser.ConfigMode.UPDATE:
default_entity_operation = instance_parser.EntityOperation.UPDATE
entities = {}
for entity_name, entity_yaml in parser.GetEntities().items():
try:
entities[entity_name] = entity_instance.EntityInstance.FromYaml(
entity_yaml, default_entity_operation)
except ValueError:
print('Invalid Entity ' + entity_name)
raise
return entities, parser.GetConfigMode()
def _ValidateConfig(
filenames: List[str],
universe: pvt.ConfigUniverse) -> List[entity_instance.EntityInstance]:
"""Runs all config validation checks."""
print('\nLoading config files...\n')
entities, config_mode = Deserialize(filenames)
print('\nStarting config validation...\n')
helper = EntityHelper(universe)
return helper.Validate(entities, config_mode)
def _ValidateTelemetry(subscription: str, service_account: str,
entities: Dict[str, entity_instance.EntityInstance],
report_filename: str, timeout: int) -> None:
"""Runs all telemetry validation checks."""
helper = TelemetryHelper(subscription, service_account, report_filename)
helper.Validate(entities, report_filename, timeout)
def RunValidation(filenames: List[str],
modified_types_filepath: str = None,
subscription: str = None,
service_account: str = None,
report_filename: str = None,
timeout: int = 60) -> None:
"""Master runner for all validations."""
if bool(subscription) != bool(service_account):
print('Subscription and a service account file are '
'both needed for the telemetry validation!')
sys.exit(0)
print('\nStarting validator...\n')
print('\nStarting universe generation...\n')
universe = generate_universe.BuildUniverse(modified_types_filepath)
if not universe:
print('\nError generating universe')
sys.exit(0)
print('\nStarting config validation...\n')
entities = _ValidateConfig(filenames, universe)
if subscription:
print('\nStarting telemetry validation...\n')
_ValidateTelemetry(subscription, service_account, entities, report_filename,
timeout)
class TelemetryHelper(object):
"""A validation helper to encapsulate telemetry validation.
Attributes:
subscription: resource string referencing the subscription to check
service_account_file: path to file with service account information
report_filename: a report filename provided by the user
"""
def __init__(self, subscription, service_account_file, report_filename=None):
super().__init__()
self.report_filename = report_filename
self.subscription = subscription
self.service_account_file = service_account_file
def Validate(self, entities: Dict[str, entity_instance.EntityInstance],
report_filename: str, timeout: int) -> None:
"""Validates telemetry payload received from the subscription.
Args:
entities: EntityInstance dictionary keyed by entity name
report_filename: path to write results to
timeout: number of seconds to wait for telemetry
"""
print('Connecting to pubsub subscription: ', self.subscription)
sub = subscriber.Subscriber(self.subscription, self.service_account_file)
validator = telemetry_validator.TelemetryValidator(
entities, timeout,
self.BuildTelemetryValidationCallback(report_filename))
validator.StartTimer()
sub.Listen(validator.ValidateMessage)
def BuildTelemetryValidationCallback(
self,
report_filename: Optional[str] = None
) -> Callable[[telemetry_validator.TelemetryValidator], None]:
"""Returns a callback to be called when a telemetry message is received.
Args:
report_filename: path to write results to
"""
def TelemetryValidationCallback(
validator: telemetry_validator.TelemetryValidator) -> None:
"""Callback when the telemetry validator finishes.
This could be called due to a timeout or because telemetry messages were
received and validated for every expected entity.
Args:
validator: the telemetry validator that triggered the callback.
"""
print('Generating validation report ...')
current_time = datetime.now()
timestamp = current_time.strftime('%d-%b-%Y (%H:%M:%S)')
report = '\nReport Generated at: {0}\n'.format(timestamp)
if not validator.AllEntitiesValidated():
report += ('No telemetry message was received for the following '
'entities:')
report += '\n'
for entity_name in validator.GetUnvalidatedEntityNames():
report += ' {0}\n'.format(entity_name)
report += '\nTelemetry validation errors:\n'
for error in validator.GetErrors():
report += error.GetPrintableMessage()
report += '\nTelemetry validation warnings:\n'
for warnings in validator.GetWarnings():
report += warnings.GetPrintableMessage()
if report_filename:
with open(self.report_filename, 'w') as f:
f.write(report)
f.close()
else:
print('\n')
print(report)
print('Report Generated')
sys.exit(0)
return TelemetryValidationCallback
class EntityHelper(object):
"""A validation helper to coordinate the various steps of the validation.
Attributes:
universe: ConfigUniverse to validate against
"""
def __init__(self, universe: pvt.ConfigUniverse):
super().__init__()
self.universe = universe
def Validate(
self, entities: Dict[str, entity_instance.EntityInstance],
config_mode: instance_parser.ConfigMode
) -> Dict[str, entity_instance.EntityInstance]:
"""Validates entity instances that are already deserialized.
Args:
entities: a dict of entity instances
config_mode: processing mode of the configuration
Returns:
A dictionary containing valid entities by name
Raises:
SyntaxError: If no building is found in the config
"""
print('Validating entities ...')
building_found = False
valid_entities = {}
validator = entity_instance.CombinationValidator(self.universe, config_mode,
entities)
for entity_name, current_entity in entities.items():
if (current_entity.operation is not instance_parser.EntityOperation.DELETE
and current_entity.type_name.lower() == 'building'):
building_found = True
if not validator.Validate(current_entity):
print(entity_name, 'is not a valid instance')
continue
valid_entities[entity_name] = current_entity
if not building_found:
print('Config must contain a non-deleted entity with a building type')
raise SyntaxError('Building Config must contain an '
'entity with a building type')
print('All entities validated')
return valid_entities
|
apache-2.0
| 6,749,910,311,691,980,000
| 34.704167
| 80
| 0.690629
| false
| 4.49109
| true
| false
| false
|
salv-orlando/MyRepo
|
nova/api/openstack/common.py
|
1
|
13765
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2010 OpenStack LLC.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import functools
import re
import urlparse
from lxml import etree
import webob
from xml.dom import minidom
from nova.api.openstack import wsgi
from nova.api.openstack import xmlutil
from nova.compute import vm_states
from nova.compute import task_states
from nova import exception
from nova import flags
from nova import ipv6
from nova import log as logging
import nova.network
from nova import quota
LOG = logging.getLogger('nova.api.openstack.common')
FLAGS = flags.FLAGS
XML_NS_V11 = 'http://docs.openstack.org/compute/api/v1.1'
_STATE_MAP = {
vm_states.ACTIVE: {
'default': 'ACTIVE',
task_states.REBOOTING: 'REBOOT',
task_states.REBOOTING_HARD: 'HARD_REBOOT',
task_states.UPDATING_PASSWORD: 'PASSWORD',
task_states.RESIZE_VERIFY: 'VERIFY_RESIZE',
},
vm_states.BUILDING: {
'default': 'BUILD',
},
vm_states.REBUILDING: {
'default': 'REBUILD',
},
vm_states.STOPPED: {
'default': 'STOPPED',
},
vm_states.MIGRATING: {
'default': 'MIGRATING',
},
vm_states.RESIZING: {
'default': 'RESIZE',
},
vm_states.PAUSED: {
'default': 'PAUSED',
},
vm_states.SUSPENDED: {
'default': 'SUSPENDED',
},
vm_states.RESCUED: {
'default': 'RESCUE',
},
vm_states.ERROR: {
'default': 'ERROR',
},
vm_states.DELETED: {
'default': 'DELETED',
},
vm_states.SOFT_DELETE: {
'default': 'DELETED',
},
}
def status_from_state(vm_state, task_state='default'):
"""Given vm_state and task_state, return a status string."""
task_map = _STATE_MAP.get(vm_state, dict(default='UNKNOWN_STATE'))
status = task_map.get(task_state, task_map['default'])
LOG.debug("Generated %(status)s from vm_state=%(vm_state)s "
"task_state=%(task_state)s." % locals())
return status
def vm_state_from_status(status):
"""Map the server status string to a vm state."""
for state, task_map in _STATE_MAP.iteritems():
status_string = task_map.get("default")
if status.lower() == status_string.lower():
return state
def get_pagination_params(request):
"""Return marker, limit tuple from request.
:param request: `wsgi.Request` possibly containing 'marker' and 'limit'
GET variables. 'marker' is the id of the last element
the client has seen, and 'limit' is the maximum number
of items to return. If 'limit' is not specified, 0, or
> max_limit, we default to max_limit. Negative values
for either marker or limit will cause
exc.HTTPBadRequest() exceptions to be raised.
"""
params = {}
if 'limit' in request.GET:
params['limit'] = _get_limit_param(request)
if 'marker' in request.GET:
params['marker'] = _get_marker_param(request)
return params
def _get_limit_param(request):
"""Extract integer limit from request or fail"""
try:
limit = int(request.GET['limit'])
except ValueError:
msg = _('limit param must be an integer')
raise webob.exc.HTTPBadRequest(explanation=msg)
if limit < 0:
msg = _('limit param must be positive')
raise webob.exc.HTTPBadRequest(explanation=msg)
return limit
def _get_marker_param(request):
"""Extract marker id from request or fail"""
return request.GET['marker']
def limited(items, request, max_limit=FLAGS.osapi_max_limit):
"""
Return a slice of items according to requested offset and limit.
@param items: A sliceable entity
@param request: `wsgi.Request` possibly containing 'offset' and 'limit'
GET variables. 'offset' is where to start in the list,
and 'limit' is the maximum number of items to return. If
'limit' is not specified, 0, or > max_limit, we default
to max_limit. Negative values for either offset or limit
will cause exc.HTTPBadRequest() exceptions to be raised.
@kwarg max_limit: The maximum number of items to return from 'items'
"""
try:
offset = int(request.GET.get('offset', 0))
except ValueError:
msg = _('offset param must be an integer')
raise webob.exc.HTTPBadRequest(explanation=msg)
try:
limit = int(request.GET.get('limit', max_limit))
except ValueError:
msg = _('limit param must be an integer')
raise webob.exc.HTTPBadRequest(explanation=msg)
if limit < 0:
msg = _('limit param must be positive')
raise webob.exc.HTTPBadRequest(explanation=msg)
if offset < 0:
msg = _('offset param must be positive')
raise webob.exc.HTTPBadRequest(explanation=msg)
limit = min(max_limit, limit or max_limit)
range_end = offset + limit
return items[offset:range_end]
def limited_by_marker(items, request, max_limit=FLAGS.osapi_max_limit):
"""Return a slice of items according to the requested marker and limit."""
params = get_pagination_params(request)
limit = params.get('limit', max_limit)
marker = params.get('marker')
limit = min(max_limit, limit)
start_index = 0
if marker:
start_index = -1
for i, item in enumerate(items):
if item['id'] == marker or item.get('uuid') == marker:
start_index = i + 1
break
if start_index < 0:
msg = _('marker [%s] not found') % marker
raise webob.exc.HTTPBadRequest(explanation=msg)
range_end = start_index + limit
return items[start_index:range_end]
def get_id_from_href(href):
"""Return the id or uuid portion of a url.
Given: 'http://www.foo.com/bar/123?q=4'
Returns: '123'
Given: 'http://www.foo.com/bar/abc123?q=4'
Returns: 'abc123'
"""
return urlparse.urlsplit("%s" % href).path.split('/')[-1]
def remove_version_from_href(href):
"""Removes the first api version from the href.
Given: 'http://www.nova.com/v1.1/123'
Returns: 'http://www.nova.com/123'
Given: 'http://www.nova.com/v1.1'
Returns: 'http://www.nova.com'
"""
parsed_url = urlparse.urlsplit(href)
new_path = re.sub(r'^/v[0-9]+\.[0-9]+(/|$)', r'\1', parsed_url.path,
count=1)
if new_path == parsed_url.path:
msg = _('href %s does not contain version') % href
LOG.debug(msg)
raise ValueError(msg)
parsed_url = list(parsed_url)
parsed_url[2] = new_path
return urlparse.urlunsplit(parsed_url)
def get_version_from_href(href):
"""Returns the api version in the href.
Returns the api version in the href.
If no version is found, 1.0 is returned
Given: 'http://www.nova.com/123'
Returns: '1.0'
Given: 'http://www.nova.com/v1.1'
Returns: '1.1'
"""
try:
#finds the first instance that matches /v#.#/
version = re.findall(r'[/][v][0-9]+\.[0-9]+[/]', href)
#if no version was found, try finding /v#.# at the end of the string
if not version:
version = re.findall(r'[/][v][0-9]+\.[0-9]+$', href)
version = re.findall(r'[0-9]+\.[0-9]', version[0])[0]
except IndexError:
version = '1.0'
return version
def check_img_metadata_quota_limit(context, metadata):
if metadata is None:
return
num_metadata = len(metadata)
quota_metadata = quota.allowed_metadata_items(context, num_metadata)
if quota_metadata < num_metadata:
expl = _("Image metadata limit exceeded")
raise webob.exc.HTTPRequestEntityTooLarge(explanation=expl,
headers={'Retry-After': 0})
def dict_to_query_str(params):
# TODO: we should just use urllib.urlencode instead of this
# But currently we don't work with urlencoded url's
param_str = ""
for key, val in params.iteritems():
param_str = param_str + '='.join([str(key), str(val)]) + '&'
return param_str.rstrip('&')
def get_networks_for_instance(context, instance):
"""Returns a prepared nw_info list for passing into the view
builders
We end up with a data structure like:
{'public': {'ips': [{'addr': '10.0.0.1', 'version': 4},
{'addr': '2001::1', 'version': 6}],
'floating_ips': [{'addr': '172.16.0.1', 'version': 4},
{'addr': '172.16.2.1', 'version': 4}]},
...}
"""
def _emit_addr(ip, version):
return {'addr': ip, 'version': version}
networks = {}
fixed_ips = instance['fixed_ips']
ipv6_addrs_seen = {}
for fixed_ip in fixed_ips:
fixed_addr = fixed_ip['address']
network = fixed_ip['network']
vif = fixed_ip.get('virtual_interface')
if not network or not vif:
name = instance['name']
ip = fixed_ip['address']
LOG.warn(_("Instance %(name)s has stale IP "
"address: %(ip)s (no network or vif)") % locals())
continue
label = network.get('label', None)
if label is None:
continue
if label not in networks:
networks[label] = {'ips': [], 'floating_ips': []}
nw_dict = networks[label]
cidr_v6 = network.get('cidr_v6')
if FLAGS.use_ipv6 and cidr_v6:
ipv6_addr = ipv6.to_global(cidr_v6, vif['address'],
network['project_id'])
# Only add same IPv6 address once. It's possible we've
# seen it before if there was a previous fixed_ip with
# same network and vif as this one
if not ipv6_addrs_seen.get(ipv6_addr):
nw_dict['ips'].append(_emit_addr(ipv6_addr, 6))
ipv6_addrs_seen[ipv6_addr] = True
nw_dict['ips'].append(_emit_addr(fixed_addr, 4))
for floating_ip in fixed_ip.get('floating_ips', []):
float_addr = floating_ip['address']
nw_dict['floating_ips'].append(_emit_addr(float_addr, 4))
return networks
class MetadataXMLDeserializer(wsgi.XMLDeserializer):
def extract_metadata(self, metadata_node):
"""Marshal the metadata attribute of a parsed request"""
if metadata_node is None:
return {}
metadata = {}
for meta_node in self.find_children_named(metadata_node, "meta"):
key = meta_node.getAttribute("key")
metadata[key] = self.extract_text(meta_node)
return metadata
def _extract_metadata_container(self, datastring):
dom = minidom.parseString(datastring)
metadata_node = self.find_first_child_named(dom, "metadata")
metadata = self.extract_metadata(metadata_node)
return {'body': {'metadata': metadata}}
def create(self, datastring):
return self._extract_metadata_container(datastring)
def update_all(self, datastring):
return self._extract_metadata_container(datastring)
def update(self, datastring):
dom = minidom.parseString(datastring)
metadata_item = self.extract_metadata(dom)
return {'body': {'meta': metadata_item}}
class MetadataHeadersSerializer(wsgi.ResponseHeadersSerializer):
def delete(self, response, data):
response.status_int = 204
metadata_nsmap = {None: xmlutil.XMLNS_V11}
class MetaItemTemplate(xmlutil.TemplateBuilder):
def construct(self):
sel = xmlutil.Selector('meta', xmlutil.get_items, 0)
root = xmlutil.TemplateElement('meta', selector=sel)
root.set('key', 0)
root.text = 1
return xmlutil.MasterTemplate(root, 1, nsmap=metadata_nsmap)
class MetadataTemplateElement(xmlutil.TemplateElement):
def will_render(self, datum):
return True
class MetadataTemplate(xmlutil.TemplateBuilder):
def construct(self):
root = MetadataTemplateElement('metadata', selector='metadata')
elem = xmlutil.SubTemplateElement(root, 'meta',
selector=xmlutil.get_items)
elem.set('key', 0)
elem.text = 1
return xmlutil.MasterTemplate(root, 1, nsmap=metadata_nsmap)
class MetadataXMLSerializer(xmlutil.XMLTemplateSerializer):
def index(self):
return MetadataTemplate()
def create(self):
return MetadataTemplate()
def update_all(self):
return MetadataTemplate()
def show(self):
return MetaItemTemplate()
def update(self):
return MetaItemTemplate()
def default(self):
return xmlutil.MasterTemplate(None, 1)
def check_snapshots_enabled(f):
@functools.wraps(f)
def inner(*args, **kwargs):
if not FLAGS.allow_instance_snapshots:
LOG.warn(_('Rejecting snapshot request, snapshots currently'
' disabled'))
msg = _("Instance snapshots are not permitted at this time.")
raise webob.exc.HTTPBadRequest(explanation=msg)
return f(*args, **kwargs)
return inner
|
apache-2.0
| -6,424,597,816,870,545,000
| 31.011628
| 78
| 0.609517
| false
| 3.825737
| false
| false
| false
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.