repo_name
stringlengths 5
92
| path
stringlengths 4
221
| copies
stringclasses 19
values | size
stringlengths 4
6
| content
stringlengths 766
896k
| license
stringclasses 15
values | hash
int64 -9,223,277,421,539,062,000
9,223,102,107B
| line_mean
float64 6.51
99.9
| line_max
int64 32
997
| alpha_frac
float64 0.25
0.96
| autogenerated
bool 1
class | ratio
float64 1.5
13.6
| config_test
bool 2
classes | has_no_keywords
bool 2
classes | few_assignments
bool 1
class |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
iandees/all-the-places
|
locations/spiders/pizzaranch.py
|
1
|
3924
|
import scrapy
import re
from locations.items import GeojsonPointItem
import json
class CVSSpider(scrapy.Spider):
name = "pizzaranch"
allowed_domains = ["pizzaranch.com"]
download_delay = 0.5
start_urls = (
'https://pizzaranch.com/locations',
)
def parse_times(self, times):
if times.strip() == 'Open 24 hours':
return '24/7'
hours_to = [x.strip() for x in times.split('-')]
cleaned_times = []
for hour in hours_to:
if re.search('PM$', hour):
hour = re.sub('PM', '', hour).strip()
hour_min = hour.split(":")
if int(hour_min[0]) < 12:
hour_min[0] = str(12 + int(hour_min[0]))
cleaned_times.append(":".join(hour_min))
if re.search('AM$', hour):
hour = re.sub('AM', '', hour).strip()
hour_min = hour.split(":")
if len(hour_min[0]) <2:
hour_min[0] = hour_min[0].zfill(2)
else:
hour_min[0] = str(int(hour_min[0]))
cleaned_times.append(":".join(hour_min))
return "-".join(cleaned_times)
def parse_hours(self, lis):
hours = []
for li in lis:
day = li.xpath('normalize-space(.//td[@class="c-location-hours-details-row-day"]/text())').extract_first()[:2]
times = li.xpath('.//td[@class="c-location-hours-details-row-intervals"]/span/span/text()').extract()
times = "".join(x for x in times)
if times and day:
parsed_time = self.parse_times(times)
hours.append(day + ' ' + parsed_time)
return "; ".join(hours)
def parse_stores(self, response):
map_data = response.xpath('normalize-space(//script[@id="js-map-config-dir-map-nap-map"]/text())').extract_first()
map_json = json.loads(map_data)
properties = {
'name': response.xpath('//span[@class="location-name-geo"]/text()').extract_first(),
'addr_full': response.xpath('normalize-space(//span[@itemprop="streetAddress"]/span/text())').extract_first(),
'phone': response.xpath('normalize-space(//div[@class="c-phone-number c-phone-main-number"]/span[@class="c-phone-number-span c-phone-main-number-span"]/text())').extract_first(),
'city': response.xpath('normalize-space(//span[@itemprop="addressLocality"]/text())').extract_first(),
'state': response.xpath('normalize-space(//abbr[@itemprop="addressRegion"]/text())').extract_first(),
'postcode': response.xpath('normalize-space(//span[@itemprop="postalCode"]/text())').extract_first(),
'ref': map_json['locs'][0]['id'],
'website': response.url,
'lat': float( map_json['locs'][0]['latitude']),
'lon': float( map_json['locs'][0]['longitude']),
}
hours = self.parse_hours(response.xpath('//div[@class="hours-primary hidden-xs"]/div[@class="c-location-hours"]/div[@class="c-location-hours-details-wrapper js-location-hours"]/table/tbody/tr'))
if hours:
properties['opening_hours'] = hours
yield GeojsonPointItem(**properties)
def parse_state_stores(self, response):
stores = response.xpath('//h3[@class="title"]/a/@href').extract()
for store in stores:
yield scrapy.Request(response.urljoin(store), callback=self.parse_stores)
next_page_url = response.xpath('//div[@class="pagination"]//li[@class="next"]/a/@href').extract_first()
if next_page_url:
yield scrapy.Request(next_page_url, callback=self.parse_state_stores)
def parse(self, response):
urls = response.xpath('//ol[@class="state-list"]/li/a/@href').extract()
for path in urls:
yield scrapy.Request(response.urljoin(path), callback=self.parse_state_stores)
|
mit
| -6,156,751,004,344,614,000
| 44.627907
| 202
| 0.568552
| false
| 3.619926
| false
| false
| false
|
thegricean/sinking-marbles
|
models/wonky_world/scripts/parseResults.py
|
1
|
4158
|
import csv
import itertools
import random
import ast
import sys
#usage
# python parseResults.py results.txt
fname = '../results/model_results/'+sys.argv[1]
file_names = [fname]
itemfile = open("items.txt")
items = [" ".join(l.rstrip().split()) for l in itemfile.readlines()]
itemfile.close()
print items
lines = []
results = []
wresults = []
files = [open(fn) for fn in file_names]
for f in files:
lines.extend([l.rstrip() for l in f.readlines()])
#print lines
def getReducedAlternatives(alts):
basic = ""
lownum = ""
highnum = ""
extra = ""
twowords = ""
threewords = ""
if "some,all,none" in alts:
basic = "0_basic"
if "one,two,three" in alts:
lownum = "1_lownum"
if "eleven" in alts:
highnum = "3_highnum"
if "many" in alts:
extra = "2_extra"
if "almostall" in alts:
twowords = "4_twowords"
if "lessthanhalf" in alts:
threewords = "5_threewords"
return "".join([basic,lownum,extra,highnum,twowords,threewords])
headers = ["Item","QUD","NumState","Alternatives","SpeakerOptimality","PriorProbability-0","PriorProbability-1","PriorProbability-2","PriorProbability-3","PriorProbability-4","PriorProbability-5","PriorProbability-6","PriorProbability-7","PriorProbability-8","PriorProbability-9","PriorProbability-10","PriorProbability-11","PriorProbability-12","PriorProbability-13","PriorProbability-14","PriorProbability-15","PosteriorProbability","SmoothingBW"]
k = 0
mcnt = 0
condcnt = 0
priorcnt = 0
while k < len(lines):
if lines[k] == "alternatives":
if priorcnt < 89:
priorcnt = priorcnt+1
else:
priorcnt = 0
mcnt = mcnt + 1
k = k + 1
alts = getReducedAlternatives(lines[k])
k = k + 1
smoothing_bw = lines[k].split("_")[1]
k = k + 1
priors = lines[k].split(",")
k = k + 1
qud = lines[k].split(",")[1]
k = k + 1
spopt = lines[k].split(",")[1]
k = k + 1
pairs = lines[k].split(",,")
print pairs
print k
ssize = pairs[0].split(",")
prob = pairs[1].split(",")
for j in range(len(ssize)):
# print priorcnt
# print len(items)
results.append([items[priorcnt],qud, ssize[j], alts, spopt, priors[0], priors[1], priors[2], priors[3], priors[4], priors[5], priors[6], priors[7], priors[8], priors[9], priors[10], priors[11], priors[12], priors[13], priors[14], priors[15], prob[j],smoothing_bw])
k = k + 1
elif lines[k].startswith("speaker-opt"):
spopt = lines[k].split(",")[1]
k = k + 1
pairs = lines[k].split(",,")
print pairs
ssize = pairs[0].split(",")
prob = pairs[1].split(",")
for j in range(len(ssize)):
results.append([items[priorcnt],qud, ssize[j], alts, spopt, priors[0], priors[1], priors[2], priors[3], priors[4], priors[5], priors[6], priors[7], priors[8], priors[9], priors[10], priors[11], priors[12], priors[13], priors[14], priors[15], prob[j],smoothing_bw])
k = k + 1
elif lines[k].startswith("qud"):
qud = lines[k].split(",")[1]
k = k + 1
spopt = lines[k].split(",")[1]
k = k + 1
pairs = lines[k].split(",,")
print pairs
ssize = pairs[0].split(",")
prob = pairs[1].split(",")
for j in range(len(ssize)):
results.append([items[priorcnt],qud, ssize[j], alts, spopt, priors[0], priors[1], priors[2], priors[3], priors[4], priors[5], priors[6], priors[7], priors[8], priors[9], priors[10], priors[11], priors[12], priors[13], priors[14], priors[15], prob[j],smoothing_bw])
k = k + 1
else:
#print lines[k]
print "this shouldn't be happening"
#print results
for r in results:
inner_dict = dict(zip(headers,r))
wresults.append(inner_dict)
oname = '../results/data/parsed_marble_results.tsv'
#w = csv.DictWriter(open('../results-simulation/parsed/pragmatic-speaker-uniform.csv', 'wb'),fieldnames=headers,restval="NA",delimiter="\t")
#w = csv.DictWriter(open('../results-simulation/parsed/pragmatic-speaker-simple.csv', 'wb'),fieldnames=headers,restval="NA",delimiter="\t")
#w = csv.DictWriter(open('../results-simulation/parsed/pragmatic-speaker-partitive.csv', 'wb'),fieldnames=headers,restval="NA",delimiter="\t")
w = csv.DictWriter(open(oname, 'wb'),fieldnames=headers,restval="NA",delimiter="\t")
w.writeheader()
w.writerows(wresults)
|
mit
| -4,322,104,831,348,527,600
| 32.264
| 457
| 0.64911
| false
| 2.633312
| false
| false
| false
|
mdrasmus/argweaver
|
argweaver/emit.py
|
1
|
4373
|
#
# HMM emission related functions
#
from math import exp, log
#=============================================================================
def parsimony_ancestral_seq(tree, seqs, pos):
"""Calculates ancestral sequence for a local tree using parsimony"""
ancestral = {}
sets = {}
# do unweight parsimony
for node in tree.postorder():
if node.is_leaf():
sets[node] = set([seqs[node.name][pos]])
else:
lset = sets[node.children[0]]
rset = sets[node.children[1]]
intersect = lset & rset
if len(intersect) > 0:
sets[node] = intersect
else:
sets[node] = lset | rset
# traceback
for node in tree.preorder():
s = sets[node]
if len(s) == 1 or not node.parents:
# NOTE: this technique is used to make assignment deterministic
ancestral[node.name] = ("A" if "A" in s else
"C" if "C" in s else
"G" if "G" in s else
"T")
else:
pchar = ancestral[node.parents[0].name]
if pchar in s:
ancestral[node.name] = pchar
else:
ancestral[node.name] = ("A" if "A" in s else
"C" if "C" in s else
"G" if "G" in s else
"T")
return ancestral
def calc_emission(tree, model, pos, new_name):
"""
Calculates emissions for all states at positions 'pos'
"""
mu = model.mu
seqs = model.seqs
mintime = model.time_steps[0]
emit = []
for node_name, timei in model.states[pos]:
node = tree[node_name]
time = model.times[timei]
local_site = parsimony_ancestral_seq(tree, seqs, pos)
# v = new chromosome
# x = current branch
# p = parent of current branch
if node.parents:
parent = node.parents[0]
parent_age = parent.age
if not parent.parents:
# unwrap top branch
c = parent.children
sib = (c[1] if node == c[0] else c[0])
v = seqs[new_name][pos]
x = local_site[node.name]
p = local_site[sib.name]
# modify (x,p) length to (x,p) + (sib,p)
parent_age = 2 * parent_age - sib.age
else:
v = seqs[new_name][pos]
x = local_site[node.name]
p = local_site[parent.name]
else:
parent = None
parent_age = None
# adjust time by unwrapping branch
time = 2 * time - node.age
v = seqs[new_name][pos]
x = local_site[node.name]
p = x
time = max(time, mintime)
if v == x == p:
# no mutation
emit.append(- mu * time)
elif v != p == x:
# mutation on v
emit.append(log(.3333 - .3333 * exp(-mu * time)))
elif v == p != x:
# mutation on x
t1 = max(parent_age - node.age, mintime)
t2 = max(time - node.age, mintime)
emit.append(log((1 - exp(-mu * t2)) / (1 - exp(-mu * t1))
* exp(-mu * (time + t2 - t1))))
elif v == x != p:
# mutation on (y,p)
t1 = max(parent_age - node.age, mintime)
t2 = max(parent_age - time, mintime)
emit.append(log((1 - exp(-mu * t2)) / (1 - exp(-mu * t1))
* exp(-mu * (time + t2 - t1))))
else:
# two mutations (v,x)
# mutation on x
if parent:
t1 = max(parent_age - node.age, mintime)
t2a = max(parent_age - time, mintime)
else:
t1 = max(model.times[-1] - node.age, mintime)
t2a = max(model.times[-1] - time, mintime)
t2b = max(time - node.age, mintime)
t2 = max(t2a, t2b)
t3 = time
emit.append(log((1 - exp(-mu * t2)) * (1 - exp(-mu * t3))
/ (1 - exp(-mu * t1))
* exp(-mu * (time + t2 + t3 - t1))))
return emit
|
mit
| -7,273,805,090,329,823,000
| 28.952055
| 78
| 0.430597
| false
| 3.789428
| false
| false
| false
|
ragnraok/MonoReader
|
monoweb/mono/api/objects.py
|
1
|
2368
|
"""
API object specification and correspond methods
"""
def fill_list_article_object(article_id, title, site, updated, cover_url, is_fav):
"""
list article object:
{
article_id: article_id,
title: title,
site: site_title,
updated: YYYY-MM-DD,
cover_url: url, may be None
is_fav: is_fav, boolean
}
"""
result = dict(article_id=article_id, title=title, site=site,
updated=updated.strftime("%Y-%m-%d"), cover_url=cover_url,
is_fav=is_fav)
return result
def fill_article_object(article_id, title, site, updated, content, url, cover_url,
is_fav):
"""
article object:
{
article_id: article_id,
title: title,
site: site_title,
updated: YYYY-MM-DD,
content: html content,
url: origin article url,
cover_url: url, may be null
is_fav: is_fav, boolean
}
"""
return dict(article_id=article_id, title=title, site=site,
updated=updated.strftime("%Y-%m-%d"), content=content, url=url,
cover_url=cover_url, is_fav=is_fav)
def fill_site_object(site_id, title, updated, url, category, is_read_daily, article_count,
is_un_classified):
"""
site object:
{
site_id: site_id,
title: title,
udpated: YYYY-MM-DD,
category: category,
is_fav: boolean,
article_count: article_count,
url: url
is_un_classified: boolean
}
"""
return dict(site_id=site_id, title=title, updated=updated.strftime("%Y-%m-%d"),
category=getattr(category, 'name', None),
is_fav=is_read_daily, article_count=article_count, url=url,
is_un_classified=is_un_classified)
def fill_category_object(category_id, name, is_un_classified):
"""
category object:
{
category_id: category_id,
name: category_name,
is_un_classified: boolean
}
"""
return dict(category_id=category_id, name=name, is_un_classified=is_un_classified)
def fill_change_date_object(timestamp):
"""
change date object:
{
timestamp: timestamp
}
"""
return dict(timestamp=timestamp)
|
mit
| -4,820,680,407,312,486,000
| 28.974684
| 90
| 0.549831
| false
| 3.72327
| false
| false
| false
|
Fuzion24/mitmproxy
|
libmproxy/console/flowview.py
|
1
|
22322
|
from __future__ import absolute_import
import os
import sys
import urwid
from netlib import odict
from . import common, grideditor, contentview, signals, searchable, tabs
from . import flowdetailview
from .. import utils, controller
from ..protocol.http import HTTPRequest, HTTPResponse, CONTENT_MISSING, decoded
class SearchError(Exception):
pass
def _mkhelp():
text = []
keys = [
("A", "accept all intercepted flows"),
("a", "accept this intercepted flow"),
("b", "save request/response body"),
("Z", "copy as curl command"),
("d", "delete flow"),
("D", "duplicate flow"),
("e", "edit request/response"),
("f", "load full body data"),
("m", "change body display mode for this entity"),
(None,
common.highlight_key("automatic", "a") +
[("text", ": automatic detection")]
),
(None,
common.highlight_key("hex", "e") +
[("text", ": Hex")]
),
(None,
common.highlight_key("html", "h") +
[("text", ": HTML")]
),
(None,
common.highlight_key("image", "i") +
[("text", ": Image")]
),
(None,
common.highlight_key("javascript", "j") +
[("text", ": JavaScript")]
),
(None,
common.highlight_key("json", "s") +
[("text", ": JSON")]
),
(None,
common.highlight_key("urlencoded", "u") +
[("text", ": URL-encoded data")]
),
(None,
common.highlight_key("raw", "r") +
[("text", ": raw data")]
),
(None,
common.highlight_key("xml", "x") +
[("text", ": XML")]
),
("M", "change default body display mode"),
("p", "previous flow"),
("P", "copy response(content/headers) to clipboard"),
("r", "replay request"),
("V", "revert changes to request"),
("v", "view body in external viewer"),
("w", "save all flows matching current limit"),
("W", "save this flow"),
("x", "delete body"),
("z", "encode/decode a request/response"),
("tab", "next tab"),
("h, l", "previous tab, next tab"),
("space", "next flow"),
("|", "run script on this flow"),
("/", "search (case sensitive)"),
("n", "repeat search forward"),
("N", "repeat search backwards"),
]
text.extend(common.format_keyvals(keys, key="key", val="text", indent=4))
return text
help_context = _mkhelp()
footer = [
('heading_key', "?"), ":help ",
('heading_key', "q"), ":back ",
]
class FlowViewHeader(urwid.WidgetWrap):
def __init__(self, master, f):
self.master, self.flow = master, f
self._w = common.format_flow(
f,
False,
extended=True,
padding=0,
hostheader=self.master.showhost
)
signals.flow_change.connect(self.sig_flow_change)
def sig_flow_change(self, sender, flow):
if flow == self.flow:
self._w = common.format_flow(
flow,
False,
extended=True,
padding=0,
hostheader=self.master.showhost
)
cache = utils.LRUCache(200)
TAB_REQ = 0
TAB_RESP = 1
class FlowView(tabs.Tabs):
highlight_color = "focusfield"
def __init__(self, master, state, flow, tab_offset):
self.master, self.state, self.flow = master, state, flow
tabs.Tabs.__init__(self,
[
(self.tab_request, self.view_request),
(self.tab_response, self.view_response),
(self.tab_details, self.view_details),
],
tab_offset
)
self.show()
self.last_displayed_body = None
signals.flow_change.connect(self.sig_flow_change)
def tab_request(self):
if self.flow.intercepted and not self.flow.reply.acked and not self.flow.response:
return "Request intercepted"
else:
return "Request"
def tab_response(self):
if self.flow.intercepted and not self.flow.reply.acked and self.flow.response:
return "Response intercepted"
else:
return "Response"
def tab_details(self):
return "Detail"
def view_request(self):
return self.conn_text(self.flow.request)
def view_response(self):
return self.conn_text(self.flow.response)
def view_details(self):
return flowdetailview.flowdetails(self.state, self.flow)
def sig_flow_change(self, sender, flow):
if flow == self.flow:
self.show()
def content_view(self, viewmode, conn):
if conn.content == CONTENT_MISSING:
msg, body = "", [urwid.Text([("error", "[content missing]")])]
return (msg, body)
else:
full = self.state.get_flow_setting(
self.flow,
(self.tab_offset, "fullcontents"),
False
)
if full:
limit = sys.maxsize
else:
limit = contentview.VIEW_CUTOFF
description, text_objects = cache.get(
contentview.get_content_view,
viewmode,
tuple(tuple(i) for i in conn.headers.lst),
conn.content,
limit,
isinstance(conn, HTTPRequest)
)
return (description, text_objects)
def viewmode_get(self):
override = self.state.get_flow_setting(
self.flow,
(self.tab_offset, "prettyview")
)
return self.state.default_body_view if override is None else override
def conn_text(self, conn):
if conn:
txt = common.format_keyvals(
[(h + ":", v) for (h, v) in conn.headers.lst],
key = "header",
val = "text"
)
viewmode = self.viewmode_get()
msg, body = self.content_view(viewmode, conn)
cols = [
urwid.Text(
[
("heading", msg),
]
)
]
cols.append(
urwid.Text(
[
" ",
('heading', "["),
('heading_key', "m"),
('heading', (":%s]" % viewmode.name)),
],
align="right"
)
)
title = urwid.AttrWrap(urwid.Columns(cols), "heading")
txt.append(title)
txt.extend(body)
else:
txt = [
urwid.Text(""),
urwid.Text(
[
("highlight", "No response. Press "),
("key", "e"),
("highlight", " and edit any aspect to add one."),
]
)
]
return searchable.Searchable(self.state, txt)
def set_method_raw(self, m):
if m:
self.flow.request.method = m
signals.flow_change.send(self, flow = self.flow)
def edit_method(self, m):
if m == "e":
signals.status_prompt.send(
prompt = "Method",
text = self.flow.request.method,
callback = self.set_method_raw
)
else:
for i in common.METHOD_OPTIONS:
if i[1] == m:
self.flow.request.method = i[0].upper()
signals.flow_change.send(self, flow = self.flow)
def set_url(self, url):
request = self.flow.request
try:
request.url = str(url)
except ValueError:
return "Invalid URL."
signals.flow_change.send(self, flow = self.flow)
def set_resp_code(self, code):
response = self.flow.response
try:
response.code = int(code)
except ValueError:
return None
import BaseHTTPServer
if int(code) in BaseHTTPServer.BaseHTTPRequestHandler.responses:
response.msg = BaseHTTPServer.BaseHTTPRequestHandler.responses[
int(code)][0]
signals.flow_change.send(self, flow = self.flow)
def set_resp_msg(self, msg):
response = self.flow.response
response.msg = msg
signals.flow_change.send(self, flow = self.flow)
def set_headers(self, lst, conn):
conn.headers = odict.ODictCaseless(lst)
signals.flow_change.send(self, flow = self.flow)
def set_query(self, lst, conn):
conn.set_query(odict.ODict(lst))
signals.flow_change.send(self, flow = self.flow)
def set_path_components(self, lst, conn):
conn.set_path_components(lst)
signals.flow_change.send(self, flow = self.flow)
def set_form(self, lst, conn):
conn.set_form_urlencoded(odict.ODict(lst))
signals.flow_change.send(self, flow = self.flow)
def edit_form(self, conn):
self.master.view_grideditor(
grideditor.URLEncodedFormEditor(
self.master,
conn.get_form_urlencoded().lst,
self.set_form,
conn
)
)
def edit_form_confirm(self, key, conn):
if key == "y":
self.edit_form(conn)
def set_cookies(self, lst, conn):
od = odict.ODict(lst)
conn.set_cookies(od)
signals.flow_change.send(self, flow = self.flow)
def set_setcookies(self, data, conn):
conn.set_cookies(data)
signals.flow_change.send(self, flow = self.flow)
def edit(self, part):
if self.tab_offset == TAB_REQ:
message = self.flow.request
else:
if not self.flow.response:
self.flow.response = HTTPResponse(
self.flow.request.httpversion,
200, "OK", odict.ODictCaseless(), ""
)
self.flow.response.reply = controller.DummyReply()
message = self.flow.response
self.flow.backup()
if message == self.flow.request and part == "c":
self.master.view_grideditor(
grideditor.CookieEditor(
self.master,
message.get_cookies().lst,
self.set_cookies,
message
)
)
if message == self.flow.response and part == "c":
self.master.view_grideditor(
grideditor.SetCookieEditor(
self.master,
message.get_cookies(),
self.set_setcookies,
message
)
)
if part == "r":
with decoded(message):
# Fix an issue caused by some editors when editing a
# request/response body. Many editors make it hard to save a
# file without a terminating newline on the last line. When
# editing message bodies, this can cause problems. For now, I
# just strip the newlines off the end of the body when we return
# from an editor.
c = self.master.spawn_editor(message.content or "")
message.content = c.rstrip("\n")
elif part == "f":
if not message.get_form_urlencoded() and message.content:
signals.status_prompt_onekey.send(
prompt = "Existing body is not a URL-encoded form. Clear and edit?",
keys = [
("yes", "y"),
("no", "n"),
],
callback = self.edit_form_confirm,
args = (message,)
)
else:
self.edit_form(message)
elif part == "h":
self.master.view_grideditor(
grideditor.HeaderEditor(
self.master,
message.headers.lst,
self.set_headers,
message
)
)
elif part == "p":
p = message.get_path_components()
self.master.view_grideditor(
grideditor.PathEditor(
self.master,
p,
self.set_path_components,
message
)
)
elif part == "q":
self.master.view_grideditor(
grideditor.QueryEditor(
self.master,
message.get_query().lst,
self.set_query, message
)
)
elif part == "u":
signals.status_prompt.send(
prompt = "URL",
text = message.url,
callback = self.set_url
)
elif part == "m":
signals.status_prompt_onekey.send(
prompt = "Method",
keys = common.METHOD_OPTIONS,
callback = self.edit_method
)
elif part == "o":
signals.status_prompt.send(
prompt = "Code",
text = str(message.code),
callback = self.set_resp_code
)
elif part == "m":
signals.status_prompt.send(
prompt = "Message",
text = message.msg,
callback = self.set_resp_msg
)
signals.flow_change.send(self, flow = self.flow)
def _view_nextprev_flow(self, np, flow):
try:
idx = self.state.view.index(flow)
except IndexError:
return
if np == "next":
new_flow, new_idx = self.state.get_next(idx)
else:
new_flow, new_idx = self.state.get_prev(idx)
if new_flow is None:
signals.status_message.send(message="No more flows!")
else:
signals.pop_view_state.send(self)
self.master.view_flow(new_flow, self.tab_offset)
def view_next_flow(self, flow):
return self._view_nextprev_flow("next", flow)
def view_prev_flow(self, flow):
return self._view_nextprev_flow("prev", flow)
def change_this_display_mode(self, t):
self.state.add_flow_setting(
self.flow,
(self.tab_offset, "prettyview"),
contentview.get_by_shortcut(t)
)
signals.flow_change.send(self, flow = self.flow)
def delete_body(self, t):
if t == "m":
val = CONTENT_MISSING
else:
val = None
if self.tab_offset == TAB_REQ:
self.flow.request.content = val
else:
self.flow.response.content = val
signals.flow_change.send(self, flow = self.flow)
def keypress(self, size, key):
key = super(self.__class__, self).keypress(size, key)
if key == " ":
self.view_next_flow(self.flow)
return
key = common.shortcuts(key)
if self.tab_offset == TAB_REQ:
conn = self.flow.request
elif self.tab_offset == TAB_RESP:
conn = self.flow.response
else:
conn = None
if key in ("up", "down", "page up", "page down"):
# Why doesn't this just work??
self._w.keypress(size, key)
elif key == "a":
self.flow.accept_intercept(self.master)
self.master.view_flow(self.flow)
elif key == "A":
self.master.accept_all()
self.master.view_flow(self.flow)
elif key == "d":
if self.state.flow_count() == 1:
self.master.view_flowlist()
elif self.state.view.index(self.flow) == len(self.state.view) - 1:
self.view_prev_flow(self.flow)
else:
self.view_next_flow(self.flow)
f = self.flow
f.kill(self.master)
self.state.delete_flow(f)
elif key == "D":
f = self.master.duplicate_flow(self.flow)
self.master.view_flow(f)
signals.status_message.send(message="Duplicated.")
elif key == "p":
self.view_prev_flow(self.flow)
elif key == "r":
r = self.master.replay_request(self.flow)
if r:
signals.status_message.send(message=r)
signals.flow_change.send(self, flow = self.flow)
elif key == "V":
if not self.flow.modified():
signals.status_message.send(message="Flow not modified.")
return
self.state.revert(self.flow)
signals.flow_change.send(self, flow = self.flow)
signals.status_message.send(message="Reverted.")
elif key == "W":
signals.status_prompt_path.send(
prompt = "Save this flow",
callback = self.master.save_one_flow,
args = (self.flow,)
)
elif key == "Z":
common.copy_as_curl_command(self.flow)
elif key == "|":
signals.status_prompt_path.send(
prompt = "Send flow to script",
callback = self.master.run_script_once,
args = (self.flow,)
)
if not conn and key in set(list("befgmxvz")):
signals.status_message.send(
message = "Tab to the request or response",
expire = 1
)
elif conn:
if key == "b":
if self.tab_offset == TAB_REQ:
common.ask_save_body(
"q", self.master, self.state, self.flow
)
else:
common.ask_save_body(
"s", self.master, self.state, self.flow
)
elif key == "e":
if self.tab_offset == TAB_REQ:
signals.status_prompt_onekey.send(
prompt = "Edit request",
keys = (
("cookies", "c"),
("query", "q"),
("path", "p"),
("url", "u"),
("header", "h"),
("form", "f"),
("raw body", "r"),
("method", "m"),
),
callback = self.edit
)
else:
signals.status_prompt_onekey.send(
prompt = "Edit response",
keys = (
("cookies", "c"),
("code", "o"),
("message", "m"),
("header", "h"),
("raw body", "r"),
),
callback = self.edit
)
key = None
elif key == "f":
signals.status_message.send(message="Loading all body data...")
self.state.add_flow_setting(
self.flow,
(self.tab_offset, "fullcontents"),
True
)
signals.flow_change.send(self, flow = self.flow)
signals.status_message.send(message="")
elif key == "P":
if self.tab_offset == TAB_REQ:
scope = "q"
else:
scope = "s"
common.ask_copy_part(scope, self.flow, self.master, self.state)
elif key == "m":
p = list(contentview.view_prompts)
p.insert(0, ("Clear", "C"))
signals.status_prompt_onekey.send(
self,
prompt = "Display mode",
keys = p,
callback = self.change_this_display_mode
)
key = None
elif key == "x":
signals.status_prompt_onekey.send(
prompt = "Delete body",
keys = (
("completely", "c"),
("mark as missing", "m"),
),
callback = self.delete_body
)
key = None
elif key == "v":
if conn.content:
t = conn.headers["content-type"] or [None]
t = t[0]
if "EDITOR" in os.environ or "PAGER" in os.environ:
self.master.spawn_external_viewer(conn.content, t)
else:
signals.status_message.send(
message = "Error! Set $EDITOR or $PAGER."
)
elif key == "z":
self.flow.backup()
e = conn.headers.get_first("content-encoding", "identity")
if e != "identity":
if not conn.decode():
signals.status_message.send(
message = "Could not decode - invalid data?"
)
else:
signals.status_prompt_onekey.send(
prompt = "Select encoding: ",
keys = (
("gzip", "z"),
("deflate", "d"),
),
callback = self.encode_callback,
args = (conn,)
)
signals.flow_change.send(self, flow = self.flow)
return key
def encode_callback(self, key, conn):
encoding_map = {
"z": "gzip",
"d": "deflate",
}
conn.encode(encoding_map[key])
signals.flow_change.send(self, flow = self.flow)
|
mit
| 8,710,828,440,068,676,000
| 33.131498
| 90
| 0.457307
| false
| 4.37429
| false
| false
| false
|
sternshus/arelle2.7
|
svr-2.7/arelle/ValidateVersReport.py
|
1
|
44336
|
u'''
Created on Nov 9, 2010
@author: Mark V Systems Limited
(c) Copyright 2010 Mark V Systems Limited, All rights reserved.
'''
from arelle import ModelVersObject, XbrlConst, ValidateXbrl, ModelDocument
from arelle.ModelValue import qname
conceptAttributeEventAttributes = {
u"conceptAttributeDelete": (u"fromCustomAttribute",),
u"conceptAttributeAdd": (u"toCustomAttribute",),
u"conceptAttributeChange": (u"fromCustomAttribute",u"toCustomAttribute"),
u"conceptAttributeChange": (u"fromCustomAttribute",u"toCustomAttribute"),
u"attributeDefinitionChange": (u"fromCustomAttribute",u"toCustomAttribute"),
}
schemaAttributeEventAttributes = {
u"conceptIDChange": u"id",
u"conceptTypeChange": u"type",
u"conceptSubstitutionGroupChange": u"substitutionGroup",
u"conceptNillableChange": u"nillable",
u"conceptAbstractChange": u"abstract",
u"conceptBlockChange": u"block",
u"conceptDefaultChange": u"default",
u"conceptFixedChange": u"fixed",
u"conceptFinalChange": u"final"
}
class ValidateVersReport():
def __init__(self, testModelXbrl):
self.testModelXbrl = testModelXbrl # testcase or controlling validation object
def close(self):
self.__dict__.clear() # dereference everything
def validate(self, modelVersReport):
self.modelVersReport = modelVersReport
versReport = modelVersReport.modelDocument
if not hasattr(versReport, u"xmlDocument"): # not parsed
return
for DTSname in (u"fromDTS", u"toDTS"):
DTSmodelXbrl = getattr(versReport, DTSname)
if DTSmodelXbrl is None or DTSmodelXbrl.modelDocument is None:
self.modelVersReport.error(u"vere:invalidDTSIdentifier",
_(u"%(dts)s is missing or not loaded"),
modelObject=self, dts=DTSname)
else:
# validate DTS
ValidateXbrl.ValidateXbrl(DTSmodelXbrl).validate(DTSmodelXbrl)
if len(DTSmodelXbrl.errors) > 0:
self.modelVersReport.error(u"vere:invalidDTSIdentifier",
_(u"%(dts) has errors: %(error)s"),
modelObject=DTSmodelXbrl.modelDocument, dts=DTSname, error=DTSmodelXbrl.errors)
# validate linkbases
ValidateXbrl.ValidateXbrl(self.modelVersReport).validate(modelVersReport)
versReportElt = versReport.xmlRootElement
# check actions
for assignmentRef in versReportElt.iterdescendants(tag=u"{http://xbrl.org/2010/versioning-base}assignmentRef"):
ref = assignmentRef.get(u"ref")
if ref not in versReport.idObjects or \
not isinstance(versReport.idObjects[ref], ModelVersObject.ModelAssignment):
self.modelVersReport.error(u"vere:invalidAssignmentRef",
_(u"AssignmentRef %(assignmentRef)s does not reference an assignment"),
modelObject=assignmentRef, assignmentRef=ref)
# check namespace renames
for NSrename in versReport.namespaceRenameFrom.values():
if NSrename.fromURI not in versReport.fromDTS.namespaceDocs:
self.modelVersReport.error(u"vere:invalidNamespaceMapping",
_(u"NamespaceRename fromURI %(uri)s does not reference a schema in fromDTS"),
modelObject=self, uri=NSrename.fromURI)
if NSrename.toURI not in versReport.toDTS.namespaceDocs:
self.modelVersReport.error(u"vere:invalidNamespaceMapping",
_(u"NamespaceRename toURI %(uri)s does not reference a schema in toDTS"),
modelObject=self, uri=NSrename.toURI)
# check role changes
for roleChange in versReport.roleChanges.values():
if roleChange.fromURI not in versReport.fromDTS.roleTypes:
self.modelVersReport.error(u"vere:invalidRoleChange",
_(u"RoleChange fromURI %(uri)s does not reference a roleType in fromDTS"),
modelObject=self, uri=roleChange.fromURI)
if roleChange.toURI not in versReport.toDTS.roleTypes:
self.modelVersReport.error(u"vere:invalidRoleChange",
_(u"RoleChange toURI %(uri)s does not reference a roleType in toDTS"),
modelObject=self, uri=roleChange.toURI)
# check reportRefs
# check actions
for reportRef in versReportElt.iterdescendants(tag=u"{http://xbrl.org/2010/versioning-base}reportRef"):
# if existing it must be valid
href = reportRef.get(u"{http://www.w3.org/1999/xlink}href")
# TBD
if versReport.fromDTS and versReport.toDTS:
# check concept changes of concept basic
for conceptChange in versReport.conceptUseChanges:
fromConceptQn = conceptChange.fromConceptQname
toConceptQn = conceptChange.toConceptQname
if (conceptChange.name != u"conceptAdd" and
(fromConceptQn is None or fromConceptQn not in versReport.fromDTS.qnameConcepts)):
self.modelVersReport.error(u"vercue:invalidConceptReference",
_(u"%(event)s fromConcept %(concept)s does not reference a concept in fromDTS"),
modelObject=conceptChange, event=conceptChange.name, concept=conceptChange.fromConceptQname)
if (conceptChange.name != u"conceptDelete" and
(toConceptQn is None or toConceptQn not in versReport.toDTS.qnameConcepts)):
self.modelVersReport.error(u"vercue:invalidConceptReference",
_(u"%(event)s toConcept %(concept)s does not reference a concept in toDTS"),
modelObject=conceptChange, event=conceptChange.name, concept=conceptChange.toConceptQname)
if (conceptChange.name == u"conceptAdd" and toConceptQn is not None and
conceptChange.isPhysical ^
(qname(versReport.namespaceRenameTo.get(toConceptQn.namespaceURI, toConceptQn.namespaceURI),
toConceptQn.localName) not in versReport.fromDTS.qnameConcepts)):
self.modelVersReport.error(u"vercue:inconsistentPhysicalAttribute",
_(u"%(event)s toConcept %(concept)s physical attribute conflicts with presence in fromDTS"),
modelObject=conceptChange, event=conceptChange.name, concept=conceptChange.toConceptQname)
if (conceptChange.name == u"conceptDelete" and toConceptQn is not None and
conceptChange.isPhysical ^
(qname(versReport.namespaceRenameFrom.get(fromConceptQn.namespaceURI, fromConceptQn.namespaceURI),
fromConceptQn.localName) in versReport.toDTS.qnameConcepts)):
self.modelVersReport.error(u"vercue:inconsistentPhysicalAttribute",
_(u"%(event)s toConcept %(concept)s physical attribute conflicts with presence in toDTS"),
modelObject=conceptChange, event=conceptChange.name, concept=conceptChange.toConceptQname)
# check concept changes of concept extended
equivalentAttributes = {}
for conceptChange in versReport.conceptDetailsChanges:
fromConcept = conceptChange.fromConcept
toConcept = conceptChange.toConcept
fromResource = conceptChange.fromResource
toResource = conceptChange.toResource
# fromConcept checks
if not conceptChange.name.endswith(u"Add"):
if not fromConcept is not None:
self.modelVersReport.error(u"vercue:invalidConceptReference",
_(u"%(action)s %(event)s fromConcept %(concept)s does not reference a concept in fromDTS"),
modelObject=conceptChange, action=conceptChange.actionId,
event=conceptChange.name, concept=conceptChange.fromConceptQname)
# tuple check
elif _(u"Child") in conceptChange.name and \
not versReport.fromDTS.qnameConcepts[fromConcept.qname] \
.isTuple:
self.modelVersReport.error(u"vercue:invalidConceptReference",
_(u"%(action)s %(event)s fromConcept %(concept)s must be defined as a tuple"),
modelObject=conceptChange, action=conceptChange.actionId,
event=conceptChange.name, concept=conceptChange.fromConceptQname)
# resource check
elif u"Label" in conceptChange.name:
if fromResource is None:
self.modelVersReport.error(u"vercde:invalidResourceIdentifier",
_(u"%(action)s %(event)s fromResource %(resource)s does not reference a resource in fromDTS"),
modelObject=conceptChange, action=conceptChange.actionId,
event=conceptChange.name, resource=conceptChange.fromResourceValue)
else:
relationship = fromConcept.relationshipToResource(fromResource, XbrlConst.conceptLabel)
if relationship is not None:
if (relationship.qname != XbrlConst.qnLinkLabelArc or
relationship.parentQname != XbrlConst.qnLinkLabelLink or
fromResource.qname != XbrlConst.qnLinkLabel):
self.modelVersReport.error(u"vercde:invalidConceptLabelIdentifier",
_(u"%(action)s %(event)s fromResource %(resource)s for %(concept)s in fromDTS does not have expected link, arc, or label elements"),
modelObject=conceptChange, action=conceptChange.actionId,
event=conceptChange.name, resource=conceptChange.fromResourceValue, concept=conceptChange.fromConceptQname)
else:
relationship = fromConcept.relationshipToResource(fromResource, XbrlConst.elementLabel)
if relationship is not None:
if relationship.qname != XbrlConst.qnGenArc or \
fromResource.qname != XbrlConst.qnGenLabel:
self.modelVersReport.error(u"vercde:invalidConceptLabelIdentifier",
_(u"%(action)s %(event)s fromResource %(resource)s for %(concept)s in fromDTS does not have expected link, arc, or label elements"),
modelObject=conceptChange, action=conceptChange.actionId,
event=conceptChange.name, resource=conceptChange.fromResourceValue, concept=conceptChange.fromConceptQname)
else:
self.modelVersReport.error(u"vercde:invalidResourceIdentifier",
_(u"%(action)s %(event)s fromResource %(resource)s does not have a label relationship to {3} in fromDTS"),
modelObject=conceptChange, action=conceptChange.actionId,
event=conceptChange.name, resource=conceptChange.fromResourceValue)
elif u"Reference" in conceptChange.name:
if fromResource is None:
self.modelVersReport.error(u"vercde:invalidResourceIdentifier",
_(u"%(action)s %(event)s fromResource %(resource)s does not reference a resource in fromDTS"),
modelObject=conceptChange, action=conceptChange.actionId,
event=conceptChange.name, resource=conceptChange.fromResourceValue)
else:
relationship = fromConcept.relationshipToResource(fromResource, XbrlConst.conceptReference)
if relationship is not None:
if relationship.qname != XbrlConst.qnLinkReferenceArc or \
relationship.parentQname != XbrlConst.qnLinkReferenceLink or \
fromResource.qname != XbrlConst.qnLinkReference:
self.modelVersReport.error(u"vercde:invalidConceptReferenceIdentifier",
_(u"%(action)s %(event)s fromResource %(resource)s for %(concept)s in fromDTS does not have expected link, arc, or label elements"),
modelObject=conceptChange, action=conceptChange.actionId,
event=conceptChange.name, resource=conceptChange.fromResourceValue, concept=conceptChange.fromConceptQname)
else:
relationship = fromConcept.relationshipToResource(fromResource, XbrlConst.elementReference)
if relationship is not None:
if relationship.qname != XbrlConst.qnGenArc or \
fromResource.qname != XbrlConst.qnGenReference:
self.modelVersReport.error(u"vercde:invalidConceptReferenceIdentifier",
_(u"%(action)s %(event)s fromResource %(resource)s for %(concept)s in fromDTS does not have expected link, arc, or label elements"),
modelObject=conceptChange, action=conceptChange.actionId,
event=conceptChange.name, resource=conceptChange.fromResourceValue, concept=conceptChange.fromConceptQname)
else:
self.modelVersReport.error(u"vercde:invalidResourceIdentifier",
_(u"%(action)s %(event)s fromResource %(resource)s does not have a reference relationship to %(concept)s in fromDTS"),
modelObject=conceptChange, action=conceptChange.actionId,
event=conceptChange.name, resource=conceptChange.fromResourceValue, concept=conceptChange.fromConceptQname)
# toConcept checks
if not conceptChange.name.endswith(u"Delete"):
if not toConcept is not None:
self.modelVersReport.error(u"vercue:invalidConceptReference",
_(u"%(action)s %(event)s toConcept %(concept)s does not reference a concept in toDTS"),
modelObject=conceptChange, action=conceptChange.actionId,
event=conceptChange.name, concept=conceptChange.toConceptQname)
# tuple check
elif u"Child" in conceptChange.name and \
not versReport.toDTS.qnameConcepts[toConcept.qname] \
.isTuple:
self.modelVersReport.error(u"vercue:invalidConceptReference",
_(u"%(action)s %(event)s toConcept %(concept)s must be defined as a tuple"),
modelObject=conceptChange, action=conceptChange.actionId,
event=conceptChange.name, concept=conceptChange.toConceptQname)
# resource check
elif u"Label" in conceptChange.name:
if toResource is None:
self.modelVersReport.error(u"vercde:invalidResourceIdentifier",
_(u"%(action)s %(event)s toResource %(resource)s for %(concept)s does not reference a resource in toDTS"),
modelObject=conceptChange, action=conceptChange.actionId,
event=conceptChange.name, resource=conceptChange.toResourceValue, concept=conceptChange.toConceptQname)
elif toResource.qname not in (XbrlConst.qnLinkLabel, XbrlConst.qnGenLabel):
self.modelVersReport.error(u"vercde:invalidConceptLabelIdentifier",
_(u"%(action)s %(event)s toResource %(resource)s is not a label in toDTS"),
modelObject=conceptChange, action=conceptChange.actionId,
event=conceptChange.name, resource=conceptChange.toResourceValue, concept=conceptChange.toConceptQname)
else:
relationship = toConcept.relationshipToResource(toResource, XbrlConst.conceptLabel)
if relationship is not None:
if relationship.qname != XbrlConst.qnLinkLabelArc or \
relationship.parentQname != XbrlConst.qnLinkLabelLink or \
toResource.qname != XbrlConst.qnLinkLabel:
self.modelVersReport.error(u"vercde:invalidConceptLabelIdentifier",
_(u"%(action)s %(event)s toResource %(resource)s for %(concept)s in toDTS does not have expected link, arc, or label elements"),
modelObject=conceptChange, action=conceptChange.actionId,
event=conceptChange.name, resource=conceptChange.toResourceValue, concept=conceptChange.toConceptQname)
else:
relationship = toConcept.relationshipToResource(toResource, XbrlConst.elementLabel)
if relationship is not None:
if relationship.qname != XbrlConst.qnGenArc or \
toResource.qname != XbrlConst.qnGenLabel:
self.modelVersReport.error(u"vercde:invalidConceptLabelIdentifier",
_(u"%(action)s %(event)s toResource %(resource)s for %(concept)s in toDTS does not have expected link, arc, or label elements"),
modelObject=conceptChange, action=conceptChange.actionId,
event=conceptChange.name, resource=conceptChange.toResourceValue, concept=conceptChange.toConceptQname)
else:
self.modelVersReport.error(u"vercde:invalidConceptResourceIdentifier",
_(u"%(action)s %(event)s toResource %(resource)s does not have a label relationship to %(concept)s in toDTS"),
modelObject=conceptChange, action=conceptChange.actionId,
event=conceptChange.name, resource=conceptChange.toResourceValue, concept=conceptChange.toConceptQname)
elif u"Reference" in conceptChange.name:
if toResource is None:
self.modelVersReport.error(u"vercde:invalidResourceIdentifier",
_(u"%(action)s %(event)s toResource %(resource)s does not reference a resource in toDTS"),
modelObject=conceptChange, action=conceptChange.actionId,
event=conceptChange.name, resource=conceptChange.toResourceValue)
elif toResource.qname not in (XbrlConst.qnLinkReference, XbrlConst.qnGenReference):
self.modelVersReport.error(u"vercde:invalidConceptReferenceIdentifier",
_(u"%(action)s %(event)s toResource %(resource)s is not a reference in toDTS"),
modelObject=conceptChange, action=conceptChange.actionId,
event=conceptChange.name, resource=conceptChange.toResourceValue, concept=conceptChange.toConceptQname)
else:
relationship = toConcept.relationshipToResource(toResource, XbrlConst.conceptReference)
if relationship is not None:
if relationship.qname != XbrlConst.qnLinkReferenceArc or \
relationship.parentQname != XbrlConst.qnLinkReferenceLink or \
toResource.qname != XbrlConst.qnLinkReference:
self.modelVersReport.error(u"vercde:invalidConceptReferenceIdentifier",
_(u"%(action)s %(event)s toResource %(resource)s for %(concept)s in toDTS does not have expected link, arc, or label elements"),
modelObject=conceptChange, action=conceptChange.actionId,
event=conceptChange.name, resource=conceptChange.toResourceValue, concept=conceptChange.toConceptQname)
else:
relationship = toConcept.relationshipToResource(toResource, XbrlConst.elementReference)
if relationship is not None:
if relationship.qname != XbrlConst.qnGenArc or \
toResource.qname != XbrlConst.qnGenReference:
self.modelVersReport.error(u"vercde:invalidConceptReferenceIdentifier",
_(u"%(action)s %(event)s toResource %(resource)s for %(concept)s in toDTS does not have expected link, arc, or label elements"),
modelObject=conceptChange, action=conceptChange.actionId,
event=conceptChange.name, resource=conceptChange.toResourceValue, concept=conceptChange.toConceptQname)
else:
self.modelVersReport.error(u"vercde:invalidConceptResourceIdentifier",
_(u"%(action)s %(event)s toResource %(resource)s does not have a reference relationship to %(concept)s in toDTS"),
modelObject=conceptChange, action=conceptChange.actionId,
event=conceptChange.name, resource=conceptChange.toResourceValue, concept=conceptChange.toConceptQname)
# check concept correspondence
if fromConcept is not None and toConcept is not None:
if (versReport.toDTSqname(fromConcept.qname) != toConcept.qname and
versReport.equivalentConcepts.get(fromConcept.qname) != toConcept.qname and
toConcept.qname not in versReport.relatedConcepts.get(fromConcept.qname,[])):
self.modelVersReport.error(u"vercde:invalidConceptCorrespondence",
_(u"%(action)s %(event)s fromConcept %(conceptFrom)s and toConcept %(conceptTo)s must be equivalent or related"),
modelObject=conceptChange, action=conceptChange.actionId,
event=conceptChange.name, conceptFrom=conceptChange.fromConceptQname, conceptTo=conceptChange.toConceptQname)
# custom attribute events
if conceptChange.name.startswith(u"conceptAttribute") or conceptChange.name == u"attributeDefinitionChange":
try:
for attr in conceptAttributeEventAttributes[conceptChange.name]:
customAttributeQname = conceptChange.customAttributeQname(attr)
if not customAttributeQname:
self.modelVersReport.info(u"arelle:invalidAttributeChange",
_(u"%(action)s %(event)s %(attr)s $(attrName)s does not have a name"),
modelObject=conceptChange, action=conceptChange.actionId,
attr=attr, attrName=customAttributeQname)
elif customAttributeQname.namespaceURI in (None, XbrlConst.xbrli, XbrlConst.xsd):
self.modelVersReport.error(u"vercde:illegalCustomAttributeEvent",
_(u"%(action)s %(event)s %(attr)s $(attrName)s has an invalid namespace"),
modelObject=conceptChange, action=conceptChange.actionId, event=conceptChange.name,
attr=attr, attrName=customAttributeQname)
except KeyError:
self.modelVersReport.info(u"arelle:eventNotRecognized",
_(u"%(action)s %(event)s event is not recognized"),
modelObject=conceptChange, action=conceptChange.actionId, event=conceptChange.name)
if conceptChange.name == u"attributeDefinitionChange":
fromAttr = conceptChange.customAttributeQname(u"fromCustomAttribute")
toAttr = conceptChange.customAttributeQname(u"toCustomAttribute")
equivalentAttributes[fromAttr] = toAttr
equivalentAttributes[toAttr] = fromAttr
# check item concept identifiers
if conceptChange.name in (u"conceptPeriodTypeChange", u"conceptPeriodTypeChange"):
for concept in (fromConcept, toConcept):
if concept is not None and not concept.isItem:
self.modelVersReport.error(u"vercde:invalidItemConceptIdentifier",
_(u"%(action)s %(event)s concept %(concept)s does not reference an item concept."),
modelObject=conceptChange, action=conceptChange.actionId,
event=conceptChange.name, concept=concept.qname)
# check tuple concept identifiers
if conceptChange.name in (u"tupleContentModelChange", ):
for concept in (fromConcept, toConcept):
if concept is not None and not concept.isItem:
self.modelVersReport.error(u"vercde:invalidTupleConceptIdentifier",
_(u"%(action)s %(event)s concept %(concept)s does not reference a tuple concept."),
modelObject=conceptChange, action=conceptChange.actionId,
event=conceptChange.name, concept=concept.qname)
if conceptChange.name in schemaAttributeEventAttributes:
attr = schemaAttributeEventAttributes[conceptChange.name]
if (fromConcept is not None and not fromConcept.get(attr) and
toConcept is not None and not toConcept.get(attr)):
self.modelVersReport.error(u"vercde:illegalSchemaAttributeChangeEvent",
_(u"%(action)s %(event)s neither concepts have a %(attribute)s attribute: %(fromConcept)s, %(toConcept)s."),
modelObject=conceptChange, action=conceptChange.actionId, attribute=attr,
event=conceptChange.name, fromConcept=fromConcept.qname, toConcept=toConcept.qname)
# check concept changes for equivalent attributes
for conceptChange in versReport.conceptDetailsChanges:
if conceptChange.name == u"conceptAttributeChange":
fromAttr = conceptChange.customAttributeQname(u"fromCustomAttribute")
toAttr = conceptChange.customAttributeQname(u"toCustomAttribute")
if (equivalentAttributes.get(fromAttr) != toAttr and
(fromAttr.localName != toAttr.localName or
(fromAttr.namespaceURI != toAttr.namespaceURI and
versReport.namespaceRenameFrom.get(fromAttr.namespaceURI, fromAttr.namespaceURI) != toAttr.namespaceURI))):
self.modelVersReport.error(u"vercde:invalidAttributeCorrespondence",
_(u"%(action)s %(event)s has non-equivalent attributes %(fromQname)s and %(toQname)s"),
modelObject=conceptChange, action=conceptChange.actionId, event=conceptChange.name,
fromQname=fromAttr, toQname=toAttr)
del equivalentAttributes # dereference
# check relationship set changes
for relSetChange in versReport.relationshipSetChanges:
for relationshipSet, name in ((relSetChange.fromRelationshipSet, u"fromRelationshipSet"),
(relSetChange.toRelationshipSet, u"toRelationshipSet")):
if relationshipSet is not None:
dts = relationshipSet.dts
relationshipSetValid = True
if relationshipSet.link:
if (relationshipSet.link not in dts.qnameConcepts or
(dts.qnameConcepts[relationshipSet.link].type is not None and
not dts.qnameConcepts[relationshipSet.link].type.isDerivedFrom(XbrlConst.qnXlExtendedType))):
self.modelVersReport.error(u"verrelse:invalidLinkElementReferenceEvent",
_(u"%(event)s %(relSet)s link %(link)s does not reference an element in its DTS"),
modelObject=relSetChange, event=relSetChange.name, relSet=name,
link=relationshipSet.link)
relationshipSetValid = False
if relationshipSet.arc:
if (relationshipSet.arc not in dts.qnameConcepts or
(dts.qnameConcepts[relationshipSet.arc].type is not None and
not dts.qnameConcepts[relationshipSet.arc].type.isDerivedFrom(XbrlConst.qnXlArcType))):
self.modelVersReport.error(u"verrelse:invalidArcElementReferenceEvent",
_(u"%(event)s %(relSet)s arc %(arc) does not reference an element in its DTS"),
modelObject=relSetChange, event=relSetChange.name, relSet=name,
arc=relationshipSet.arc)
relationshipSetValid = False
if relationshipSet.linkrole:
if not (XbrlConst.isStandardRole(relationshipSet.linkrole) or
relationshipSet.linkrole in relationshipSet.dts.roleTypes):
self.modelVersReport.error(u"verrelse:invalidLinkrole",
_(u"%(event)s %(relSet)s linkrole %(linkrole)s does not reference an linkrole in its DTS"),
modelObject=relSetChange, event=relSetChange.name, relSet=name,
linkrole=relationshipSet.linkrole)
relationshipSetValid = False
elif not any(linkrole == relationshipSet.linkrole
for arcrole, linkrole, linkqname, arcqname in dts.baseSets.keys()):
self.modelVersReport.error(u"verrelse:invalidLinkrole",
_(u"%(event)s %(relSet)s linkrole %(linkrole)s is not used in its DTS"),
modelObject=relSetChange, event=relSetChange.name, relSet=name,
linkrole=relationshipSet.linkrole)
relationshipSetValid = False
if relationshipSet.arcrole:
if not (XbrlConst.isStandardArcrole(relationshipSet.arcrole) or
relationshipSet.arcrole in relationshipSet.dts.arcroleTypes):
self.modelVersReport.error(u"verrelse:invalidArcrole",
_(u"%(event)s %(relSet)s arcrole %(arcrole)s does not reference an arcrole in its DTS"),
modelObject=relSetChange, event=relSetChange.name, relSet=name,
arcrole=relationshipSet.arcrole)
relationshipSetValid = False
elif not any(arcrole == relationshipSet.arcrole
for arcrole, linkrole, linkqname, arcqname in dts.baseSets.keys()):
self.modelVersReport.error(u"verrelse:invalidArcrole",
_(u"%(event)s %(relSet)s arcrole %(arcrole)s is not used in its DTS"),
modelObject=relSetChange, event=relSetChange.name, relSet=name,
arcrole=relationshipSet.arcrole)
relationshipSetValid = False
for relationship in relationshipSet.relationships:
# fromConcept checks
if relationship.fromConcept is None:
self.modelVersReport.error(u"vercue:invalidConceptReference",
_(u"%(event)s %(relSet)s relationship fromConcept %(conceptFrom)s does not reference a concept in its DTS"),
modelObject=relSetChange, event=relSetChange.name, relSet=name,
conceptFrom=relationship.fromName)
relationshipSetValid = False
if relationship.toName and relationship.toConcept is None:
self.modelVersReport.error(u"vercue:invalidConceptReference",
_(u"%(event)s %(relSet)s relationship toConcept %(conceptTo)s does not reference a concept in its DTS"),
modelObject=relSetChange, event=relSetChange.name, relSet=name,
conceptTo=relationship.toName)
relationshipSetValid = False
if relationshipSetValid: # test that relations exist
if relationship.fromRelationship is None:
if relationship.toName:
self.modelVersReport.error(u"verrelse:invalidRelationshipReference",
_(u"%(event)s %(relSet)s no relationship found from fromConcept %(conceptFrom)s to toConcept %(conceptTo)s in its DTS"),
modelObject=relSetChange, event=relSetChange.name, relSet=name,
conceptFrom=relationship.fromName, conceptTo=relationship.toName)
else:
self.modelVersReport.error(u"verrelse:invalidRelationshipReference",
_(u"%(event)s %(relSet)s no relationship found fromConcept %(conceptFrom)s in its DTS"),
modelObject=relSetChange, event=relSetChange.name, relSet=name,
conceptFrom=relationship.fromName)
# check instance aspect changes
for iaChange in versReport.instanceAspectChanges:
for instAspects in (iaChange.fromAspects, iaChange.toAspects):
if instAspects is not None and instAspects.aspects:
dimAspectElts = {}
for aspect in instAspects.aspects:
dts = aspect.modelAspects.dts
if (aspect.localName in (u"explicitDimension", u"typedDimension") and aspect.concept is None):
self.modelVersReport.error(u"vercue:invalidConceptReference",
_(u"%(event)s dimension %(dimension)s is not a concept in its DTS"),
modelObject=aspect, event=iaChange.name, dimension=aspect.conceptName)
elif aspect.localName == u"explicitDimension":
dimConcept = aspect.concept
if not dimConcept.isExplicitDimension:
self.modelVersReport.error(u"verdime:invalidExplicitDimensionIdentifier",
_(u"%(event)s dimension %(dimension)s is not an explicit dimension in its DTS"),
modelObject=aspect, event=iaChange.name, dimension=aspect.conceptName)
if dimConcept in dimAspectElts:
self.modelVersReport.error(u"verdime:duplicateExplicitDimensionAspect",
_(u"%(event)s dimension %(dimension)s is duplicated in a single explicitDimension element"),
modelObject=(aspect, dimAspectElts[dimConcept]), event=iaChange.name, dimension=aspect.conceptName)
else:
dimAspectElts[dimConcept] = aspect
elif aspect.localName == u"typedDimension":
dimConcept = aspect.concept
if not dimConcept.isTypedDimension:
self.modelVersReport.error(u"verdime:invalidTypedDimensionIdentifier",
_(u"%(event)s dimension %(dimension)s is not a typed dimension in its DTS"),
modelObject=aspect, event=iaChange.name, dimension=aspect.conceptName)
if dimConcept in dimAspectElts:
self.modelVersReport.error(u"verdime:duplicateTypedDimensionAspect",
_(u"%(event)s dimension %(dimension)s is duplicated in a single explicitDimension element"),
modelObject=(aspect, dimAspectElts[dimConcept]), event=iaChange.name, dimension=aspect.conceptName)
else:
dimAspectElts[dimConcept] = aspect
if aspect.localName in (u"explicitDimension", u"concepts"):
for relatedConcept in aspect.relatedConcepts:
conceptMdlObj = relatedConcept.concept
if conceptMdlObj is None or not conceptMdlObj.isItem:
self.modelVersReport.error(u"vercue:invalidConceptReference",
_(u"%(event)s concept %(concept)s is not an item in its DTS"),
modelObject=aspect, event=iaChange.name, concept=relatedConcept.conceptName)
if relatedConcept.arcrole is not None:
if (not XbrlConst.isStandardArcrole(relatedConcept.arcrole) and
relatedConcept.arcrole not in dts.arcroleTypes):
self.modelVersReport.error(u"verdime:invalidURI",
_(u"%(event)s arcrole %(arcrole)s is not defined in its DTS"),
modelObject=aspect, event=iaChange.name, arcrole=relatedConcept.arcrole)
elif not any(arcrole == relatedConcept.arcrole
for arcrole, linkrole, linkqname, arcqname in dts.baseSets.keys()):
self.modelVersReport.error(u"verdime:invalidURI",
_(u"%(event)s arcrole %(arcrole)s is not used in its DTS"),
modelObject=aspect, event=iaChange.name, linkrole=relatedConcept.arcrole)
if relatedConcept.linkrole is not None:
if (relatedConcept.linkrole != u"http://www.xbrl.org/2003/role/link" and
relatedConcept.linkrole not in dts.roleTypes):
self.modelVersReport.error(u"verdime:invalidURI",
_(u"%(event)s linkrole %(linkrole)s is not defined in its DTS"),
modelObject=aspect, event=iaChange.name, linkrole=relatedConcept.linkrole)
elif not any(linkrole == relatedConcept.linkrole
for arcrole, linkrole, linkqname, arcqname in dts.baseSets.keys()):
self.modelVersReport.error(u"verdime:invalidURI",
_(u"%(event)s linkrole %(linkrole)s is not used in its DTS"),
modelObject=aspect, event=iaChange.name, linkrole=relatedConcept.linkrole)
if (relatedConcept.arc is not None and
(relatedConcept.arc not in dts.qnameConcepts or
(dts.qnameConcepts[relatedConcept.arc].type is not None and
not dts.qnameConcepts[relatedConcept.arc].type.isDerivedFrom(XbrlConst.qnXlArcType)))):
self.modelVersReport.error(u"verdime:invalidArcElement",
_(u"%(event)s arc %(arc)s is not defined as an arc in its DTS"),
modelObject=aspect, event=iaChange.name, arc=relatedConcept.arc)
if (relatedConcept.link is not None and
(relatedConcept.link not in dts.qnameConcepts or
(dts.qnameConcepts[relatedConcept.link].type is not None and
not dts.qnameConcepts[relatedConcept.link].type.isDerivedFrom(XbrlConst.qnXlExtendedType)))):
self.modelVersReport.error(u"verdime:invalidLinkElement",
_(u"%(event)s link %(link)s is not defined in its DTS"),
modelObject=aspect, event=iaChange.name, link=relatedConcept.link)
self.close()
|
apache-2.0
| 4,493,540,456,087,069,000
| 80.107407
| 177
| 0.541276
| false
| 5.044487
| false
| false
| false
|
weinbe58/QuSpin
|
tests/higher_spin_test.py
|
1
|
2498
|
import sys,os
qspin_path = os.path.join(os.getcwd(),"../")
sys.path.insert(0,qspin_path)
from quspin.operators import hamiltonian
from quspin.basis import spin_basis_1d
import numpy as np
from itertools import product
try:
from functools import reduce
except ImportError:
pass
dtypes = [(np.float32,np.complex64),(np.float64,np.complex128)]
spin_ops={}
spins=['1/2','1','3/2','2']
spin_ops['1/2']={}
spin_ops['1/2']["I"]=np.array([[1,0],[0,1]]) + 0.0j
spin_ops['1/2']['x']=(1.0/2.0)*np.array([[0,1],[1,0]]) + 0.0j
spin_ops['1/2']['y']=(1.0j/2.0)*np.array([[0,-1],[1,0]]) + 0.0j
spin_ops['1/2']['z']=(1.0/2.0)*np.array([[1,0.0],[0.0,-1]]) + 0.0j
spin_ops['1']={}
spin_ops['1']['I']=np.array([[1,0,0],[0,1,0],[0,0,1]]) + 0.0j
spin_ops['1']['x']=(1.0/np.sqrt(2))*np.array([[0,1,0],[1,0,1],[0,1,0]]) + 0.0j
spin_ops['1']['y']=(1.0j/np.sqrt(2))*np.array([[0,-1,0],[1,0,-1],[0,1,0]]) +0.0j
spin_ops['1']['z']=np.array([[1,0,0],[0,0,0],[0,0,-1]]) + 0.0j
spin_ops['3/2']={}
spin_ops['3/2']['I']=np.array([[1,0,0,0],[0,1,0,0],[0,0,1,0],[0,0,0,1]])
spin_ops['3/2']['x']=(1.0/2.0)*np.array([[0,np.sqrt(3),0,0],[np.sqrt(3),0,2,0],[0,2,0,np.sqrt(3)],[0,0,np.sqrt(3),0]]) + 0j
spin_ops['3/2']['y']=(1.0j/2.0)*np.array([[0,-np.sqrt(3),0,0],[np.sqrt(3),0,-2,0],[0,2,0,-np.sqrt(3)],[0,0,np.sqrt(3),0]])
spin_ops['3/2']['z']=(1.0/2.0)*np.array([[3,0,0,0],[0,1,0,0],[0,0,-1,0],[0,0,0,-3]])+ 0.0j
spin_ops['2']={}
spin_ops['2']['I']=np.array([[1,0,0,0,0],[0,1,0,0,0],[0,0,1,0,0],[0,0,0,1,0],[0,0,0,0,1]])
spin_ops['2']['x']=(1.0/2.0)*np.array([[0,2.0,0,0,0],[2,0,np.sqrt(6),0,0],[0,np.sqrt(6),0,np.sqrt(6),0],[0,0,np.sqrt(6),0,2],[0,0,0,2,0]])
spin_ops['2']['y']=(1.0j/2.0)*np.array([[0,-2.0,0,0,0],[2,0,-np.sqrt(6),0,0],[0,np.sqrt(6),0,-np.sqrt(6),0],[0,0,np.sqrt(6),0,-2],[0,0,0,2,0]])
spin_ops['2']['z']=np.array([[2,0,0,0,0],[0,1,0,0,0],[0,0,0,0,0],[0,0,0,-1,0],[0,0,0,0,-2]])+ 0.0j
L_max = 4
for S in spins:
for L in range(1,L_max+1):
basis = spin_basis_1d(L,S=S,pauli=False)
J = [1.0]
J.extend(range(L))
for p in product(*[spin_ops[S].items() for i in range(L)]):
opstr,ops = zip(*list(p))
opstr = "".join(opstr)
static = [[opstr,[J]]]
static,_ = basis.expanded_form(static,[])
quspin_op = hamiltonian(static,[],basis=basis,check_symm=False,check_herm=False)
op = reduce(np.kron,ops)
np.testing.assert_allclose(quspin_op.toarray(),op,atol=1e-14,err_msg="failed test for S={} operator {}".format(S,opstr))
print("spin-{} operators comparisons passed!".format(S))
|
bsd-3-clause
| -688,366,178,981,138,300
| 36.848485
| 143
| 0.552842
| false
| 1.914176
| false
| false
| false
|
MehnaazAsad/ECO_Globular_Clusters
|
src/data/mods_prelim_checks_2/Exp_fil2.py
|
1
|
6614
|
# -*- coding: utf-8 -*-
"""
Created on Mon Jun 19 17:36:35 2017
@author: asadm2
"""
### DESCRIPTION
#This script carries out an exposure time check and an "at least 2 filters"
#check
import pandas as pd
import numpy as np
def expfil2(objname,revtxt):
"""
This function carries out an exposure time check and also checks if
the images that passed the first check are taken using at least 2 filters
Args:
objname: ECOID of the galaxy
revtxt: objname_rev.txt file that Obj_in_Img.py returns
Returns:
goodObj.txt and badObj.txt files depending on which ECOIDs passed
both checks and which ones didn't
"""
path_to_raw = '/fs1/masad/Research/Repositories/ECO_Globular_Clusters/'\
'data/raw/'
path_to_interim = '/fs1/masad/Research/Repositories/ECO_Globular_Clusters/'\
'data/interim/'
ECO = path_to_raw + 'Available_HST_Data_ECO.txt'
ECO = pd.read_csv(ECO, delimiter='\s+', header=None, \
names=['ECOID', 'HSTOBJ', 'RA', 'DEC', 'exptime', \
'camera', 'filename'])
ECO['exptime'] = pd.to_numeric(ECO['exptime'],errors='coerce')
ECO['filename'] = ECO['filename'].astype('str')
files_arr = ECO['filename'].values
n_files = len(files_arr)
wfc3_ir = ['f110w','f125w','f160w']
wfc3_uvis = ['f606w','f600lp']
filters = [[] for x in range(n_files)]
for i in range(len(ECO['filename'])):
str_split = ECO['filename'][i].split(';')[1].split('_')
filter_i = str_split[3]+'_'+str_split[4]+'_'+str_split[5]
if 'ACS' in filter_i: #acs_wfc
filter_i = filter_i.lower()
elif 'd634' in filter_i: #acs-wfc fixed to acs_wfc
str_split = filter_i.split('_')
filter_i = str_split[0]+'_'+str_split[2]
if 'acs-wfc' in filter_i:
str_split = filter_i.split('-')
filter_i = str_split[0]+'_'+str_split[1]
elif 'm51' in filter_i: #acs-wfc fixed to acs_wfc
str_split = filter_i.split('_')
filter_i = str_split[0]+'_'+str_split[2]
if 'acs-wfc' in filter_i:
str_split = filter_i.split('-')
filter_i = str_split[0]+'_'+str_split[1]
elif 'tile' in filter_i: #acs-wfc fixed to acs_wfc
str_split = filter_i.split('_')
filter_i = str_split[0]+'_'+str_split[2]
if 'acs-wfc' in filter_i:
str_split = filter_i.split('-')
filter_i = str_split[0]+'_'+str_split[1]
elif 'c_v' in filter_i: #acs-wfc fixed to acs_wfc
str_split = filter_i.split('_')
filter_i = str_split[0]+'_'+str_split[2]
if 'acs-wfc' in filter_i:
str_split = filter_i.split('-')
filter_i = str_split[0]+'_'+str_split[1]
elif 'ngc' in filter_i: #acs fixed to acs_wfc
str_split = filter_i.split('_')
filter_i = str_split[0]+'_wfc_'+str_split[2]
elif '131009' in filter_i: #wfc3
str_split = filter_i.split('_')
if str_split[2] == 'f438w':
filter_i = str_split[0]+'_uvis_'+str_split[2]
elif str_split[2] == 'f775w':
filter_i = str_split[0]+'_uvis_'+str_split[2]
elif 'par' in filter_i:#and any(str in filter_i for str in wfc3_ir):
str_split = filter_i.split('_')
if str_split[2] == 'f606w':
filter_i = str_split[0]+'_uvis_'+str_split[2]
elif str_split[2] == 'f125w':
filter_i = str_split[0]+'_ir_'+str_split[2]
elif str_split[2] == 'f160w':
filter_i = str_split[0]+'_ir_'+str_split[2]
elif str_split[2] == 'f110w':
filter_i = str_split[0]+'_ir_'+str_split[2]
elif str_split[2] == 'f600lp':
filter_i = str_split[0]+'_uvis_'+str_split[2]
elif 'w_wf' in filter_i: #wfpc2
str_split = filter_i.split('_')
filter_i = str_split[0]+'_'+str_split[1]
elif 'lp_wf' in filter_i: #wfpc2
str_split = filter_i.split('_')
filter_i = str_split[0]+'_'+str_split[1]
elif 'n4496' in filter_i: #all wfpc2
str_split = filter_i.split('_')
filter_i = str_split[0]+'_'+str_split[2]
elif 'n5194' in filter_i: #all wfpc2
str_split = filter_i.split('_')
filter_i = str_split[0]+'_'+str_split[2]
elif 'u6614' in filter_i: #all wfpc2
str_split = filter_i.split('_')
filter_i = str_split[0]+'_'+str_split[2]
filters[i] = filter_i
filters = np.asarray(filters)
filters_unique = np.unique(filters)
#Adding filter array to DataFrame
ECO.loc[:, 'filters'] = filters
### Exposure time check
exptime_arr = [9399, 3671, 3331, 1319, 2055, 2236, 1758, 10337, 2045, 1237,
2290, 3853, 1928101311, 73024829, 275363, 1241, 31705,
26575,6021, 3548, 3723, 2053, 2249, 3368, 5275, 4069,
171413, 31062, 11431, 5789, 8520, 10071, 6677, 24445, 12605,
10757, 50294]
exp_fil_dict = dict(zip(filters_unique, exptime_arr ))
contents = pd.read_csv(revtxt,header=None,names=['filename'])
contents.filename = 'http://hla.stsci.edu/cgi-bin/' + contents.filename\
.astype(str)
#Match and return all columns associated with this ECOID and filename
#from ECO catalog
ECO2 = ECO.loc[(ECO.filename.isin(contents.filename)) & \
(ECO.ECOID==objname),: ]
ECOID_groups = ECO2.groupby('filters')
ECO_keys = ECOID_groups.groups.keys()
ECO_match3 = []
for key in ECO_keys:
if ECOID_groups.get_group(key).exptime.sum() >= exp_fil_dict[key]:
ECO_match3.append(key) #"good" keys
ECO_match3 = np.array(ECO_match3)
### At least 2 filter check
if len(ECO_match3) >= 2:
result = True
with open(path_to_interim + 'goodObj.txt', 'a') as newfile:
newfile.write(np.unique(ECO2.ECOID)[0]+'\n')
else:
result = False
with open(path_to_interim + 'badObj.txt', 'a') as newfile:
newfile.write(np.unique(ECO2.ECOID)[0]+'\n')
return result
|
mit
| -8,252,784,929,073,507,000
| 37.453488
| 80
| 0.51648
| false
| 3.220058
| false
| false
| false
|
beiko-lab/gengis
|
bin/Lib/site-packages/numpy/distutils/tests/test_fcompiler_intel.py
|
1
|
1161
|
from numpy.testing import *
import numpy.distutils.fcompiler
intel_32bit_version_strings = [
("Intel(R) Fortran Intel(R) 32-bit Compiler Professional for applications"\
"running on Intel(R) 32, Version 11.1", '11.1'),
]
intel_64bit_version_strings = [
("Intel(R) Fortran IA-64 Compiler Professional for applications"\
"running on IA-64, Version 11.0", '11.0'),
("Intel(R) Fortran Intel(R) 64 Compiler Professional for applications"\
"running on Intel(R) 64, Version 11.1", '11.1')
]
class TestIntelFCompilerVersions(TestCase):
def test_32bit_version(self):
fc = numpy.distutils.fcompiler.new_fcompiler(compiler='intel')
for vs, version in intel_32bit_version_strings:
v = fc.version_match(vs)
assert_(v == version)
class TestIntelEM64TFCompilerVersions(TestCase):
def test_64bit_version(self):
fc = numpy.distutils.fcompiler.new_fcompiler(compiler='intelem')
for vs, version in intel_64bit_version_strings:
v = fc.version_match(vs)
assert_(v == version)
if __name__ == '__main__':
run_module_suite()
|
gpl-3.0
| 4,528,753,986,483,240,000
| 32.147059
| 79
| 0.641688
| false
| 3.56135
| true
| false
| false
|
docusign/docusign-python-client
|
docusign_esign/models/payment_details.py
|
1
|
17416
|
# coding: utf-8
"""
DocuSign REST API
The DocuSign REST API provides you with a powerful, convenient, and simple Web services API for interacting with DocuSign. # noqa: E501
OpenAPI spec version: v2.1
Contact: devcenter@docusign.com
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
import pprint
import re # noqa: F401
import six
class PaymentDetails(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'allowed_payment_methods': 'list[str]',
'charge_id': 'str',
'currency_code': 'str',
'currency_code_metadata': 'PropertyMetadata',
'customer_id': 'str',
'custom_metadata': 'str',
'custom_metadata_required': 'bool',
'gateway_account_id': 'str',
'gateway_account_id_metadata': 'PropertyMetadata',
'gateway_display_name': 'str',
'gateway_name': 'str',
'line_items': 'list[PaymentLineItem]',
'payment_option': 'str',
'payment_source_id': 'str',
'signer_values': 'PaymentSignerValues',
'status': 'str',
'total': 'Money'
}
attribute_map = {
'allowed_payment_methods': 'allowedPaymentMethods',
'charge_id': 'chargeId',
'currency_code': 'currencyCode',
'currency_code_metadata': 'currencyCodeMetadata',
'customer_id': 'customerId',
'custom_metadata': 'customMetadata',
'custom_metadata_required': 'customMetadataRequired',
'gateway_account_id': 'gatewayAccountId',
'gateway_account_id_metadata': 'gatewayAccountIdMetadata',
'gateway_display_name': 'gatewayDisplayName',
'gateway_name': 'gatewayName',
'line_items': 'lineItems',
'payment_option': 'paymentOption',
'payment_source_id': 'paymentSourceId',
'signer_values': 'signerValues',
'status': 'status',
'total': 'total'
}
def __init__(self, allowed_payment_methods=None, charge_id=None, currency_code=None, currency_code_metadata=None, customer_id=None, custom_metadata=None, custom_metadata_required=None, gateway_account_id=None, gateway_account_id_metadata=None, gateway_display_name=None, gateway_name=None, line_items=None, payment_option=None, payment_source_id=None, signer_values=None, status=None, total=None): # noqa: E501
"""PaymentDetails - a model defined in Swagger""" # noqa: E501
self._allowed_payment_methods = None
self._charge_id = None
self._currency_code = None
self._currency_code_metadata = None
self._customer_id = None
self._custom_metadata = None
self._custom_metadata_required = None
self._gateway_account_id = None
self._gateway_account_id_metadata = None
self._gateway_display_name = None
self._gateway_name = None
self._line_items = None
self._payment_option = None
self._payment_source_id = None
self._signer_values = None
self._status = None
self._total = None
self.discriminator = None
if allowed_payment_methods is not None:
self.allowed_payment_methods = allowed_payment_methods
if charge_id is not None:
self.charge_id = charge_id
if currency_code is not None:
self.currency_code = currency_code
if currency_code_metadata is not None:
self.currency_code_metadata = currency_code_metadata
if customer_id is not None:
self.customer_id = customer_id
if custom_metadata is not None:
self.custom_metadata = custom_metadata
if custom_metadata_required is not None:
self.custom_metadata_required = custom_metadata_required
if gateway_account_id is not None:
self.gateway_account_id = gateway_account_id
if gateway_account_id_metadata is not None:
self.gateway_account_id_metadata = gateway_account_id_metadata
if gateway_display_name is not None:
self.gateway_display_name = gateway_display_name
if gateway_name is not None:
self.gateway_name = gateway_name
if line_items is not None:
self.line_items = line_items
if payment_option is not None:
self.payment_option = payment_option
if payment_source_id is not None:
self.payment_source_id = payment_source_id
if signer_values is not None:
self.signer_values = signer_values
if status is not None:
self.status = status
if total is not None:
self.total = total
@property
def allowed_payment_methods(self):
"""Gets the allowed_payment_methods of this PaymentDetails. # noqa: E501
# noqa: E501
:return: The allowed_payment_methods of this PaymentDetails. # noqa: E501
:rtype: list[str]
"""
return self._allowed_payment_methods
@allowed_payment_methods.setter
def allowed_payment_methods(self, allowed_payment_methods):
"""Sets the allowed_payment_methods of this PaymentDetails.
# noqa: E501
:param allowed_payment_methods: The allowed_payment_methods of this PaymentDetails. # noqa: E501
:type: list[str]
"""
self._allowed_payment_methods = allowed_payment_methods
@property
def charge_id(self):
"""Gets the charge_id of this PaymentDetails. # noqa: E501
# noqa: E501
:return: The charge_id of this PaymentDetails. # noqa: E501
:rtype: str
"""
return self._charge_id
@charge_id.setter
def charge_id(self, charge_id):
"""Sets the charge_id of this PaymentDetails.
# noqa: E501
:param charge_id: The charge_id of this PaymentDetails. # noqa: E501
:type: str
"""
self._charge_id = charge_id
@property
def currency_code(self):
"""Gets the currency_code of this PaymentDetails. # noqa: E501
# noqa: E501
:return: The currency_code of this PaymentDetails. # noqa: E501
:rtype: str
"""
return self._currency_code
@currency_code.setter
def currency_code(self, currency_code):
"""Sets the currency_code of this PaymentDetails.
# noqa: E501
:param currency_code: The currency_code of this PaymentDetails. # noqa: E501
:type: str
"""
self._currency_code = currency_code
@property
def currency_code_metadata(self):
"""Gets the currency_code_metadata of this PaymentDetails. # noqa: E501
:return: The currency_code_metadata of this PaymentDetails. # noqa: E501
:rtype: PropertyMetadata
"""
return self._currency_code_metadata
@currency_code_metadata.setter
def currency_code_metadata(self, currency_code_metadata):
"""Sets the currency_code_metadata of this PaymentDetails.
:param currency_code_metadata: The currency_code_metadata of this PaymentDetails. # noqa: E501
:type: PropertyMetadata
"""
self._currency_code_metadata = currency_code_metadata
@property
def customer_id(self):
"""Gets the customer_id of this PaymentDetails. # noqa: E501
# noqa: E501
:return: The customer_id of this PaymentDetails. # noqa: E501
:rtype: str
"""
return self._customer_id
@customer_id.setter
def customer_id(self, customer_id):
"""Sets the customer_id of this PaymentDetails.
# noqa: E501
:param customer_id: The customer_id of this PaymentDetails. # noqa: E501
:type: str
"""
self._customer_id = customer_id
@property
def custom_metadata(self):
"""Gets the custom_metadata of this PaymentDetails. # noqa: E501
# noqa: E501
:return: The custom_metadata of this PaymentDetails. # noqa: E501
:rtype: str
"""
return self._custom_metadata
@custom_metadata.setter
def custom_metadata(self, custom_metadata):
"""Sets the custom_metadata of this PaymentDetails.
# noqa: E501
:param custom_metadata: The custom_metadata of this PaymentDetails. # noqa: E501
:type: str
"""
self._custom_metadata = custom_metadata
@property
def custom_metadata_required(self):
"""Gets the custom_metadata_required of this PaymentDetails. # noqa: E501
# noqa: E501
:return: The custom_metadata_required of this PaymentDetails. # noqa: E501
:rtype: bool
"""
return self._custom_metadata_required
@custom_metadata_required.setter
def custom_metadata_required(self, custom_metadata_required):
"""Sets the custom_metadata_required of this PaymentDetails.
# noqa: E501
:param custom_metadata_required: The custom_metadata_required of this PaymentDetails. # noqa: E501
:type: bool
"""
self._custom_metadata_required = custom_metadata_required
@property
def gateway_account_id(self):
"""Gets the gateway_account_id of this PaymentDetails. # noqa: E501
# noqa: E501
:return: The gateway_account_id of this PaymentDetails. # noqa: E501
:rtype: str
"""
return self._gateway_account_id
@gateway_account_id.setter
def gateway_account_id(self, gateway_account_id):
"""Sets the gateway_account_id of this PaymentDetails.
# noqa: E501
:param gateway_account_id: The gateway_account_id of this PaymentDetails. # noqa: E501
:type: str
"""
self._gateway_account_id = gateway_account_id
@property
def gateway_account_id_metadata(self):
"""Gets the gateway_account_id_metadata of this PaymentDetails. # noqa: E501
:return: The gateway_account_id_metadata of this PaymentDetails. # noqa: E501
:rtype: PropertyMetadata
"""
return self._gateway_account_id_metadata
@gateway_account_id_metadata.setter
def gateway_account_id_metadata(self, gateway_account_id_metadata):
"""Sets the gateway_account_id_metadata of this PaymentDetails.
:param gateway_account_id_metadata: The gateway_account_id_metadata of this PaymentDetails. # noqa: E501
:type: PropertyMetadata
"""
self._gateway_account_id_metadata = gateway_account_id_metadata
@property
def gateway_display_name(self):
"""Gets the gateway_display_name of this PaymentDetails. # noqa: E501
# noqa: E501
:return: The gateway_display_name of this PaymentDetails. # noqa: E501
:rtype: str
"""
return self._gateway_display_name
@gateway_display_name.setter
def gateway_display_name(self, gateway_display_name):
"""Sets the gateway_display_name of this PaymentDetails.
# noqa: E501
:param gateway_display_name: The gateway_display_name of this PaymentDetails. # noqa: E501
:type: str
"""
self._gateway_display_name = gateway_display_name
@property
def gateway_name(self):
"""Gets the gateway_name of this PaymentDetails. # noqa: E501
# noqa: E501
:return: The gateway_name of this PaymentDetails. # noqa: E501
:rtype: str
"""
return self._gateway_name
@gateway_name.setter
def gateway_name(self, gateway_name):
"""Sets the gateway_name of this PaymentDetails.
# noqa: E501
:param gateway_name: The gateway_name of this PaymentDetails. # noqa: E501
:type: str
"""
self._gateway_name = gateway_name
@property
def line_items(self):
"""Gets the line_items of this PaymentDetails. # noqa: E501
# noqa: E501
:return: The line_items of this PaymentDetails. # noqa: E501
:rtype: list[PaymentLineItem]
"""
return self._line_items
@line_items.setter
def line_items(self, line_items):
"""Sets the line_items of this PaymentDetails.
# noqa: E501
:param line_items: The line_items of this PaymentDetails. # noqa: E501
:type: list[PaymentLineItem]
"""
self._line_items = line_items
@property
def payment_option(self):
"""Gets the payment_option of this PaymentDetails. # noqa: E501
# noqa: E501
:return: The payment_option of this PaymentDetails. # noqa: E501
:rtype: str
"""
return self._payment_option
@payment_option.setter
def payment_option(self, payment_option):
"""Sets the payment_option of this PaymentDetails.
# noqa: E501
:param payment_option: The payment_option of this PaymentDetails. # noqa: E501
:type: str
"""
self._payment_option = payment_option
@property
def payment_source_id(self):
"""Gets the payment_source_id of this PaymentDetails. # noqa: E501
# noqa: E501
:return: The payment_source_id of this PaymentDetails. # noqa: E501
:rtype: str
"""
return self._payment_source_id
@payment_source_id.setter
def payment_source_id(self, payment_source_id):
"""Sets the payment_source_id of this PaymentDetails.
# noqa: E501
:param payment_source_id: The payment_source_id of this PaymentDetails. # noqa: E501
:type: str
"""
self._payment_source_id = payment_source_id
@property
def signer_values(self):
"""Gets the signer_values of this PaymentDetails. # noqa: E501
:return: The signer_values of this PaymentDetails. # noqa: E501
:rtype: PaymentSignerValues
"""
return self._signer_values
@signer_values.setter
def signer_values(self, signer_values):
"""Sets the signer_values of this PaymentDetails.
:param signer_values: The signer_values of this PaymentDetails. # noqa: E501
:type: PaymentSignerValues
"""
self._signer_values = signer_values
@property
def status(self):
"""Gets the status of this PaymentDetails. # noqa: E501
Indicates the envelope status. Valid values are: * sent - The envelope is sent to the recipients. * created - The envelope is saved as a draft and can be modified and sent later. # noqa: E501
:return: The status of this PaymentDetails. # noqa: E501
:rtype: str
"""
return self._status
@status.setter
def status(self, status):
"""Sets the status of this PaymentDetails.
Indicates the envelope status. Valid values are: * sent - The envelope is sent to the recipients. * created - The envelope is saved as a draft and can be modified and sent later. # noqa: E501
:param status: The status of this PaymentDetails. # noqa: E501
:type: str
"""
self._status = status
@property
def total(self):
"""Gets the total of this PaymentDetails. # noqa: E501
:return: The total of this PaymentDetails. # noqa: E501
:rtype: Money
"""
return self._total
@total.setter
def total(self, total):
"""Sets the total of this PaymentDetails.
:param total: The total of this PaymentDetails. # noqa: E501
:type: Money
"""
self._total = total
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
if issubclass(PaymentDetails, dict):
for key, value in self.items():
result[key] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, PaymentDetails):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
|
mit
| 576,533,233,708,825,000
| 30.267504
| 415
| 0.605018
| false
| 4.121155
| false
| false
| false
|
django-bmf/django-bmf
|
djangobmf/pagination.py
|
2
|
3352
|
#!/usr/bin/python
# ex:set fileencoding=utf-8:
from __future__ import unicode_literals
from django.core.paginator import Paginator
from django.core.paginator import InvalidPage
from django.template import Context
from django.template import loader
from django.utils import six
from django.utils.translation import ugettext_lazy as _
from rest_framework.exceptions import NotFound
from rest_framework.pagination import BasePagination
from rest_framework.response import Response
from rest_framework.utils.urls import replace_query_param
from rest_framework.utils.urls import remove_query_param
class PaginationMixin(BasePagination):
template = 'rest_framework/pagination/previous_and_next.html'
invalid_page_message = _('Invalid page "{page_number}": {message}.')
page_size = None
def paginate_queryset(self, queryset, request, view=None):
self.request = request
if not self.page_size:
self.page = None
self.count = queryset.count()
return list(queryset)
paginator = Paginator(queryset, self.page_size)
page_number = request.query_params.get('page', 1)
self.count = paginator.count
try:
self.page = paginator.page(page_number)
except InvalidPage as exc:
msg = self.invalid_page_message.format(
page_number=page_number,
message=six.text_type(exc),
)
raise NotFound(msg)
if paginator.num_pages > 1:
# The browsable API should display pagination controls.
self.display_page_controls = True
return list(self.page)
def get_paginated_response_data(self, data):
if self.page:
return {
'paginator': {
'current': self.page.number,
'count': self.count,
'pages': self.page.paginator.num_pages,
},
'items': data,
}
else:
return {
'paginator': {
'current': 1,
'count': self.count,
'pages': 1,
},
'items': data,
}
def get_paginated_response(self, data):
return Response(self.get_paginated_response_data(data))
def get_next_link(self):
if not self.page or not self.page.has_next():
return None
url = self.request.build_absolute_uri()
page_number = self.page.next_page_number()
return replace_query_param(url, 'page', page_number)
def get_previous_link(self):
if not self.page or not self.page.has_previous():
return None
url = self.request.build_absolute_uri()
page_number = self.page.previous_page_number()
if page_number == 1:
return remove_query_param(url, 'page')
return replace_query_param(url, 'page', page_number)
def get_html_context(self):
return {
'previous_url': self.get_previous_link(),
'next_url': self.get_next_link(),
}
def to_html(self):
template = loader.get_template(self.template)
context = Context(self.get_html_context())
return template.render(context)
class ModulePagination(PaginationMixin):
page_size = 100
|
bsd-3-clause
| -7,896,100,331,072,318,000
| 31.230769
| 72
| 0.598449
| false
| 4.163975
| false
| false
| false
|
BoGoEngine/bogo-osx
|
main.py
|
1
|
1638
|
#!/usr/bin/env python
from Cocoa import *
from InputMethodKit import *
from itertools import takewhile
import bogo
class BogoController(IMKInputController):
def __init__(self):
# Cocoa doesn't call this method at all
self.reset()
self.initialized = True
def reset(self):
self.composing_string = ""
self.raw_string = ""
def inputText_client_(self, string, client):
if not hasattr(self, 'initialized'):
self.__init__()
if string == ' ':
self.reset()
return NO
self.raw_string += string
result = bogo.process_sequence(self.raw_string)
same_initial_chars = list(takewhile(lambda tupl: tupl[0] == tupl[1],
zip(self.composing_string,
result)))
n_backspace = len(self.composing_string) - len(same_initial_chars)
string_to_commit = result[len(same_initial_chars):]
start = self.client().length() - n_backspace
length = len(string_to_commit)
self.client().insertText_replacementRange_(
string_to_commit,
NSMakeRange(start, length))
self.composing_string = result
return YES
def main():
pool = NSAutoreleasePool.alloc().init()
connectionName = "Bogo_1_Connection"
identifier = NSBundle.mainBundle().bundleIdentifier()
NSLog(NSBundle.mainBundle().bundleIdentifier())
server = IMKServer.alloc().initWithName_bundleIdentifier_(
connectionName,
"com.ngochin.inputmethod.BoGo")
# NSBundle.loadNibNamed_owner_(
# "MainMenu",
# NSApplication.sharedApplication())
NSLog("here")
NSApplication.sharedApplication().run()
pool.release()
if __name__ == "__main__":
main()
|
gpl-3.0
| 5,871,501,967,262,425,000
| 21.75
| 70
| 0.663004
| false
| 3.230769
| false
| false
| false
|
ibaidev/gplib
|
gplib/covariance_functions/white_noise.py
|
1
|
4027
|
# -*- coding: utf-8 -*-
#
# Copyright 2018 Ibai Roman
#
# This file is part of GPlib.
#
# GPlib is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# GPlib is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with GPlib. If not, see <http://www.gnu.org/licenses/>.
import numpy as np
from .covariance_function import CovarianceFunction
from ..parameters import OptimizableParameter
from ..transformations import LogTransformation
class WhiteNoise(CovarianceFunction):
"""
"""
def __init__(self, data):
scale = 2.0
ov2_min = -10
ov2_max = 10
if np.random.rand() < 0.5:
ov2 = np.random.normal(
loc=np.log(np.std(data['Y'])) - np.log(10),
scale=scale
)
else:
ov2 = np.random.normal(
loc=0,
scale=scale
)
ov2 = np.clip(ov2, ov2_min, ov2_max)
hyperparams = [
OptimizableParameter(
'output_variance', LogTransformation,
default_value=np.exp(ov2),
min_value=np.exp(ov2_min), max_value=np.exp(ov2_max)
)
]
super(WhiteNoise, self).__init__(hyperparams)
def covariance(self, mat_a, mat_b=None, only_diagonal=False):
"""
Measures the distance matrix between solutions of A and B, and
applies the kernel function element-wise to the distance matrix.
:param mat_a: List of solutions in lines and dimensions in columns.
:type mat_a:
:param mat_b: List of solutions in lines and dimensions in columns.
:type mat_b:
:param only_diagonal:
:type only_diagonal:
:return: Result matrix with kernel function applied element-wise.
:rtype:
"""
len_a = len(mat_a)
if mat_b is not None:
len_b = len(mat_b)
return np.zeros((len_a, len_b))
if only_diagonal:
return np.square(self.get_param_value('output_variance')) * \
np.ones((len_a, 1))
return np.square(self.get_param_value('output_variance')) * \
np.eye(len_a)
def dk_dx(self, mat_a, mat_b=None):
"""
Measures gradient of the distance between solutions of A and B in X.
:param mat_a: List of solutions in lines and dimensions in columns.
:param mat_b: List of solutions in lines and dimensions in columns.
:return: 3D array with the gradient in every dimension of X.
"""
raise NotImplementedError("Not Implemented. This is an interface.")
def dk_dtheta(self, mat_a, mat_b=None, trans=False):
"""
Measures gradient of the distance between solutions of A and B in the
hyper-parameter space.
:param mat_a: List of solutions in lines and dimensions in columns.
:type mat_a:
:param mat_b: List of solutions in lines and dimensions in columns.
:type mat_b:
:param trans: Return results in the transformed space.
:type trans:
:return: 3D array with the gradient in every
dimension the length-scale hyper-parameter space.
:rtype:
"""
len_a = len(mat_a)
if mat_b is not None:
len_b = len(mat_b)
return np.zeros((len_a, len_b)),
dk_dov = np.eye(len_a) * \
2.0 * self.get_param_value('output_variance')
if trans:
dk_dov = self.get_hyperparam('output_variance').grad_trans(dk_dov)
return dk_dov,
|
gpl-3.0
| -4,278,698,062,277,995,000
| 31.216
| 78
| 0.593991
| false
| 3.809839
| false
| false
| false
|
elastacloud/libcloud
|
libcloud/compute/drivers/libvirt_driver.py
|
1
|
10168
|
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import re
import os
import time
import platform
import subprocess
import mimetypes
from os.path import join as pjoin
from collections import defaultdict
try:
from lxml import etree as ET
except ImportError:
from xml.etree import ElementTree as ET
from libcloud.compute.base import NodeDriver, Node
from libcloud.compute.base import NodeState
from libcloud.compute.types import Provider
from libcloud.utils.networking import is_public_subnet
try:
import libvirt
have_libvirt = True
except ImportError:
have_libvirt = False
class LibvirtNodeDriver(NodeDriver):
"""
Libvirt (http://libvirt.org/) node driver.
To enable debug mode, set LIBVIR_DEBUG environment variable.
"""
type = Provider.LIBVIRT
name = 'Libvirt'
website = 'http://libvirt.org/'
NODE_STATE_MAP = {
0: NodeState.TERMINATED, # no state
1: NodeState.RUNNING, # domain is running
2: NodeState.PENDING, # domain is blocked on resource
3: NodeState.TERMINATED, # domain is paused by user
4: NodeState.TERMINATED, # domain is being shut down
5: NodeState.TERMINATED, # domain is shut off
6: NodeState.UNKNOWN, # domain is crashed
7: NodeState.UNKNOWN, # domain is suspended by guest power management
}
def __init__(self, uri):
"""
:param uri: Hypervisor URI (e.g. vbox:///session, qemu:///system,
etc.).
:type uri: ``str``
"""
if not have_libvirt:
raise RuntimeError('Libvirt driver requires \'libvirt\' Python ' +
'package')
self._uri = uri
self.connection = libvirt.open(uri)
def list_nodes(self):
domains = self.connection.listAllDomains()
nodes = self._to_nodes(domains=domains)
return nodes
def reboot_node(self, node):
domain = self._get_domain_for_node(node=node)
return domain.reboot(flags=0) == 0
def destroy_node(self, node):
domain = self._get_domain_for_node(node=node)
return domain.destroy() == 0
def ex_start_node(self, node):
"""
Start a stopped node.
:param node: Node which should be used
:type node: :class:`Node`
:rtype: ``bool``
"""
domain = self._get_domain_for_node(node=node)
return domain.create() == 0
def ex_shutdown_node(self, node):
"""
Shutdown a running node.
Note: Usually this will result in sending an ACPI event to the node.
:param node: Node which should be used
:type node: :class:`Node`
:rtype: ``bool``
"""
domain = self._get_domain_for_node(node=node)
return domain.shutdown() == 0
def ex_suspend_node(self, node):
"""
Suspend a running node.
:param node: Node which should be used
:type node: :class:`Node`
:rtype: ``bool``
"""
domain = self._get_domain_for_node(node=node)
return domain.suspend() == 0
def ex_resume_node(self, node):
"""
Resume a suspended node.
:param node: Node which should be used
:type node: :class:`Node`
:rtype: ``bool``
"""
domain = self._get_domain_for_node(node=node)
return domain.resume() == 0
def ex_take_node_screenshot(self, node, directory, screen=0):
"""
Take a screenshot of a monitoring of a running instance.
:param node: Node to take the screenshot of.
:type node: :class:`libcloud.compute.base.Node`
:param directory: Path where the screenshot will be saved.
:type directory: ``str``
:param screen: ID of the monitor to take the screenshot of.
:type screen: ``int``
:return: Full path where the screenshot has been saved.
:rtype: ``str``
"""
if not os.path.exists(directory) or not os.path.isdir(directory):
raise ValueError('Invalid value for directory argument')
domain = self._get_domain_for_node(node=node)
stream = self.connection.newStream()
mime_type = domain.screenshot(stream=stream, screen=0)
extensions = mimetypes.guess_all_extensions(type=mime_type)
if extensions:
extension = extensions[0]
else:
extension = '.png'
name = 'screenshot-%s%s' % (int(time.time()), extension)
file_path = pjoin(directory, name)
with open(file_path, 'wb') as fp:
def write(stream, buf, opaque):
fp.write(buf)
stream.recvAll(write, None)
try:
stream.finish()
except Exception:
# Finish is not supported by all backends
pass
return file_path
def ex_get_hypervisor_hostname(self):
"""
Return a system hostname on which the hypervisor is running.
"""
hostname = self.connection.getHostname()
return hostname
def ex_get_hypervisor_sysinfo(self):
"""
Retrieve hypervisor system information.
:rtype: ``dict``
"""
xml = self.connection.getSysinfo()
etree = ET.XML(xml)
attributes = ['bios', 'system', 'processor', 'memory_device']
sysinfo = {}
for attribute in attributes:
element = etree.find(attribute)
entries = self._get_entries(element=element)
sysinfo[attribute] = entries
return sysinfo
def _to_nodes(self, domains):
nodes = [self._to_node(domain=domain) for domain in domains]
return nodes
def _to_node(self, domain):
state, max_mem, memory, vcpu_count, used_cpu_time = domain.info()
state = self.NODE_STATE_MAP.get(state, NodeState.UNKNOWN)
public_ips, private_ips = [], []
ip_addresses = self._get_ip_addresses_for_domain(domain)
for ip_address in ip_addresses:
if is_public_subnet(ip_address):
public_ips.append(ip_address)
else:
private_ips.append(ip_address)
extra = {'uuid': domain.UUIDString(), 'os_type': domain.OSType(),
'types': self.connection.getType(),
'used_memory': memory / 1024, 'vcpu_count': vcpu_count,
'used_cpu_time': used_cpu_time}
node = Node(id=domain.ID(), name=domain.name(), state=state,
public_ips=public_ips, private_ips=private_ips,
driver=self, extra=extra)
node._uuid = domain.UUIDString() # we want to use a custom UUID
return node
def _get_ip_addresses_for_domain(self, domain):
"""
Retrieve IP addresses for the provided domain.
Note: This functionality is currently only supported on Linux and
only works if this code is run on the same machine as the VMs run
on.
:return: IP addresses for the provided domain.
:rtype: ``list``
"""
result = []
if platform.system() != 'Linux':
# Only Linux is supported atm
return result
mac_addresses = self._get_mac_addresses_for_domain(domain=domain)
cmd = ['arp', '-an']
child = subprocess.Popen(cmd, stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
stdout, _ = child.communicate()
arp_table = self._parse_arp_table(arp_output=stdout)
for mac_address in mac_addresses:
if mac_address in arp_table:
ip_addresses = arp_table[mac_address]
result.extend(ip_addresses)
return result
def _get_mac_addresses_for_domain(self, domain):
"""
Parses network interface MAC addresses from the provided domain.
"""
xml = domain.XMLDesc()
etree = ET.XML(xml)
elems = etree.findall("devices/interface[@type='network']/mac")
result = []
for elem in elems:
mac_address = elem.get('address')
result.append(mac_address)
return result
def _get_domain_for_node(self, node):
"""
Return libvirt domain object for the provided node.
"""
domain = self.connection.lookupByUUIDString(node.uuid)
return domain
def _get_entries(self, element):
"""
Parse entries dictionary.
:rtype: ``dict``
"""
elements = element.findall('entry')
result = {}
for element in elements:
name = element.get('name')
value = element.text
result[name] = value
return result
def _parse_arp_table(self, arp_output):
"""
Parse arp command output and return a dictionary which maps mac address
to an IP address.
:return: Dictionary which maps mac address to IP address.
:rtype: ``dict``
"""
lines = arp_output.split('\n')
arp_table = defaultdict(list)
for line in lines:
match = re.match('.*?\((.*?)\) at (.*?)\s+', line)
if not match:
continue
groups = match.groups()
ip_address = groups[0]
mac_address = groups[1]
arp_table[mac_address].append(ip_address)
return arp_table
|
apache-2.0
| 3,972,654,172,272,321,500
| 29.443114
| 79
| 0.590382
| false
| 4.199917
| false
| false
| false
|
srkukarni/heron
|
heron/tools/cli/src/python/restart.py
|
1
|
2128
|
# Copyright 2016 Twitter. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
''' restart.py '''
from heron.common.src.python.utils.log import Log
import heron.tools.cli.src.python.args as args
import heron.tools.cli.src.python.cli_helper as cli_helper
import heron.tools.common.src.python.utils.config as config
def create_parser(subparsers):
'''
:param subparsers:
:return:
'''
parser = subparsers.add_parser(
'restart',
help='Restart a topology',
usage="%(prog)s [options] cluster/[role]/[env] <topology-name> [container-id]",
add_help=True)
args.add_titles(parser)
args.add_cluster_role_env(parser)
args.add_topology(parser)
parser.add_argument(
'container-id',
nargs='?',
type=int,
default=-1,
help='Identifier of the container to be restarted')
args.add_config(parser)
args.add_service_url(parser)
args.add_verbose(parser)
parser.set_defaults(subcommand='restart')
return parser
# pylint: disable=unused-argument
def run(command, parser, cl_args, unknown_args):
'''
:param command:
:param parser:
:param cl_args:
:param unknown_args:
:return:
'''
Log.debug("Restart Args: %s", cl_args)
container_id = cl_args['container-id']
if cl_args['deploy_mode'] == config.SERVER_MODE:
dict_extra_args = {"container_id": str(container_id)}
return cli_helper.run_server(command, cl_args, "restart topology", extra_args=dict_extra_args)
else:
list_extra_args = ["--container_id", str(container_id)]
return cli_helper.run_direct(command, cl_args, "restart topology", extra_args=list_extra_args)
|
apache-2.0
| -3,431,443,403,372,592,000
| 30.761194
| 98
| 0.705357
| false
| 3.505766
| false
| false
| false
|
Muon/s3o-tools
|
s3o-optimize.py
|
1
|
3528
|
#!/usr/bin/env python
from s3o import S3O
from optparse import OptionParser
from glob import glob
import vertex_cache
def recursively_optimize_pieces(piece):
optimize_piece(piece)
for child in piece.children:
recursively_optimize_pieces(child)
def chunks(l, n):
""" Yield successive n-sized chunks from l.
"""
for i in range(0, len(l), n):
yield tuple(l[i:i + n])
def optimize_piece(piece):
remap = {}
new_indices = []
for index in piece.indices:
vertex = piece.vertices[index]
if vertex not in remap:
remap[vertex] = len(remap)
new_indices.append(remap[vertex])
new_vertices = [(index, vertex) for vertex, index in remap.items()]
new_vertices.sort()
new_vertices = [vertex for index, vertex in new_vertices]
if piece.primitive_type == "triangles" and len(new_indices) > 0:
tris = list(chunks(new_indices, 3))
acmr = vertex_cache.average_transform_to_vertex_ratio(tris)
tmp = vertex_cache.get_cache_optimized_triangles(tris)
acmr_new = vertex_cache.average_transform_to_vertex_ratio(tmp)
if acmr_new < acmr:
new_indices = []
for tri in tmp:
new_indices.extend(tri)
vertex_map = []
remapped_indices = []
for index in new_indices:
try:
new_index = vertex_map.index(index)
except ValueError:
new_index = len(vertex_map)
vertex_map.append(index)
remapped_indices.append(new_index)
new_vertices = [new_vertices[index] for index in vertex_map]
new_indices = remapped_indices
piece.indices = new_indices
piece.vertices = new_vertices
def sizeof_fmt(num):
for x in ['bytes', 'KB', 'MB', 'GB']:
if abs(num) < 1024.0:
return "%3.1f %s" % (num, x)
num /= 1024.0
return "%3.1f%s" % (num, 'TB')
if __name__ == '__main__':
parser = OptionParser(usage="%prog [options] FILES", version="%prog 0.1",
description="Optimize a Spring S3O file by "
"removing redundant data.")
parser.add_option("-d", "--dry-run", action="store_true",
default=False, dest="is_dry",
help="show output summary without committing changes")
parser.add_option("-q", "--quiet", action="store_true",
default=False, dest="silence_output",
help="silence detailed optimization output")
options, args = parser.parse_args()
if len(args) < 1:
parser.error("insufficient arguments")
dry = options.is_dry
silence_output = options.silence_output
if len(args) == 1:
filenames = glob(args[0])
else:
filenames = args
delta_total = 0
for filename in filenames:
with open(filename, 'rb+') as input_file:
data = input_file.read()
model = S3O(data)
recursively_optimize_pieces(model.root_piece)
optimized_data = model.serialize()
delta_size = len(optimized_data) - len(data)
delta_total += delta_size
if not silence_output:
print("modified %s: "
"size change: %+d bytes" % (filename, delta_size))
if not dry:
input_file.seek(0)
input_file.truncate()
input_file.write(optimized_data)
print("total size difference: %s" % sizeof_fmt(delta_total))
|
mit
| -5,973,406,678,418,618,000
| 29.413793
| 77
| 0.571995
| false
| 3.793548
| false
| false
| false
|
pywinauto/pywinauto
|
examples/notepad_slow.py
|
1
|
9097
|
# GUI Application automation and testing library
# Copyright (C) 2006-2018 Mark Mc Mahon and Contributors
# https://github.com/pywinauto/pywinauto/graphs/contributors
# http://pywinauto.readthedocs.io/en/latest/credits.html
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# * Neither the name of pywinauto nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Run some automations to test things"""
from __future__ import unicode_literals
from __future__ import print_function
import os.path
import sys
import time
try:
from pywinauto import application
except ImportError:
pywinauto_path = os.path.abspath(__file__)
pywinauto_path = os.path.split(os.path.split(pywinauto_path)[0])[0]
sys.path.append(pywinauto_path)
from pywinauto import application
from pywinauto import tests
from pywinauto.findbestmatch import MatchError
from pywinauto.timings import Timings
print("Setting timings to slow settings, may be necessary for")
print("slow applications or slow machines.")
Timings.slow()
#application.set_timing(3, .5, 10, .5, .4, .2, .2, .1, .2, .5)
def run_notepad():
"""Run notepad and do some small stuff with it"""
start = time.time()
app = application.Application()
## for distribution we don't want to connect to anybodies application
## because we may mess up something they are working on!
#try:
# app.connect_(path = r"c:\windows\system32\notepad.exe")
#except application.ProcessNotFoundError:
# app.start_(r"c:\windows\system32\notepad.exe")
app.start(r"notepad.exe")
app.Notepad.menu_select("File->PageSetup")
# ----- Page Setup Dialog ----
# Select the 4th combobox item
app.PageSetupDlg.SizeComboBox.select(4)
# Select the 'Letter' combobox item or the Letter
try:
app.PageSetupDlg.SizeComboBox.select("Letter")
except ValueError:
app.PageSetupDlg.SizeComboBox.select('Letter (8.5" x 11")')
app.PageSetupDlg.SizeComboBox.select(2)
# run some tests on the Dialog. List of available tests:
# "AllControls",
# "AsianHotkey",
# "ComboBoxDroppedHeight",
# "CompareToRefFont",
# "LeadTrailSpaces",
# "MiscValues",
# "Missalignment",
# "MissingExtraString",
# "Overlapping",
# "RepeatedHotkey",
# "Translation",
# "Truncation",
bugs = app.PageSetupDlg.run_tests('RepeatedHotkey Truncation')
# if there are any bugs they will be printed to the console
# and the controls will be highlighted
tests.print_bugs(bugs)
# ----- Next Page Setup Dialog ----
app.PageSetupDlg.Printer.click()
# do some radio button clicks
# Open the Connect to printer dialog so we can
# try out checking/unchecking a checkbox
app.PageSetupDlg.Network.click()
# ----- Connect To Printer Dialog ----
# Select a checkbox
app.ConnectToPrinter.ExpandByDefault.check()
app.ConnectToPrinter.ExpandByDefault.uncheck()
# try doing the same by using click
app.ConnectToPrinter.ExpandByDefault.click()
app.ConnectToPrinter.ExpandByDefault.click()
# close the dialog
app.ConnectToPrinter.Cancel.close_click()
# ----- 2nd Page Setup Dialog again ----
app.PageSetupDlg.Properties.click()
doc_props = app.window(name_re = ".*Properties$")
doc_props.wait('exists', timeout=40)
# ----- Document Properties Dialog ----
# some tab control selections
# Two ways of selecting tabs with indices...
doc_props.TabCtrl.select(0)
doc_props.TabCtrl.select(1)
try:
doc_props.TabCtrl.select(2)
except IndexError:
# not all users have 3 tabs in this dialog
print('Skip 3rd tab selection...')
# or with text...
doc_props.TabCtrl.select("PaperQuality")
try:
doc_props.TabCtrl.select("JobRetention")
except MatchError:
# some people do not have the "Job Retention" tab
print('Skip "Job Retention" tab...')
# doc_props.TabCtrl.select("Layout")
#
# # do some radio button clicks
# doc_props.RotatedLandscape.click()
# doc_props.BackToFront.click()
# doc_props.FlipOnShortEdge.click()
#
# doc_props.Portrait.click()
# doc_props._None.click()
# doc_props.FrontToBack.click()
#
# # open the Advanced options dialog in two steps
# advbutton = doc_props.Advanced
# advbutton.click()
#
# # close the 4 windows
#
# # ----- Advanced Options Dialog ----
# app.window(name_re = ".* Advanced Options").Ok.click()
# ----- Document Properties Dialog again ----
doc_props.Cancel.close_click()
# for some reason my current printer driver
# window does not close cleanly :(
if doc_props.Cancel.exists():
doc_props.OK.close_click()
# ----- 2nd Page Setup Dialog again ----
app.PageSetupDlg.OK.close_click()
# ----- Page Setup Dialog ----
app.PageSetupDlg.Ok.close_click()
# type some text - note that extended characters ARE allowed
app.Notepad.Edit.set_edit_text("I am typing s\xe4me text to Notepad\r\n\r\n"
"And then I am going to quit")
app.Notepad.Edit.right_click()
app.Popup.menu_item("Right To Left Reading Order").click()
#app.PopupMenu.menu_select("Paste", app.Notepad.ctrl_())
#app.Notepad.Edit.right_click()
#app.PopupMenu.menu_select("Right To Left Reading Order", app.Notepad.ctrl_())
#app.PopupMenu.menu_select("Show unicode control characters", app.Notepad.ctrl_())
#time.sleep(1)
#app.Notepad.Edit.right_click()
#app.PopupMenu.menu_select("Right To Left Reading Order", app.Notepad.ctrl_())
#time.sleep(1)
#app.Notepad.Edit.right_click()
#app.PopupMenu.menu_select("Insert Unicode control character -> IAFS", app.Notepad.ctrl_())
#time.sleep(1)
#app.Notepad.Edit.type_keys("{ESC}")
# the following shows that Sendtext does not accept
# accented characters - but does allow 'control' characters
app.Notepad.Edit.type_keys("{END}{ENTER}SendText d\xf6\xe9s "
u"s\xfcpp\xf4rt \xe0cce\xf1ted characters!!!", with_spaces = True)
# Try and save
app.Notepad.menu_select("File->SaveAs")
app.SaveAs.EncodingComboBox.select("UTF-8")
app.SaveAs.FileNameEdit.set_edit_text("Example-utf8.txt")
app.SaveAs.Save.close_click()
# my machine has a weird problem - when connected to the network
# the SaveAs Dialog appears - but doing anything with it can
# cause a LONG delay - the easiest thing is to just wait
# until the dialog is no longer active
# - Dialog might just be gone - because click worked
# - dialog might be waiting to disappear
# so can't wait for next dialog or for it to be disabled
# - dialog might be waiting to display message box so can't wait
# for it to be gone or for the main dialog to be enabled.
# while the dialog exists wait upto 30 seconds (and yes it can
# take that long on my computer sometimes :-( )
app.SaveAsDialog2.Cancel.wait_not('enabled')
# If file exists - it asks you if you want to overwrite
try:
app.SaveAs.Yes.wait('exists').close_click()
except MatchError:
print('Skip overwriting...')
# exit notepad
app.Notepad.menu_select("File->Exit")
#if not run_with_appdata:
# app.WriteAppData(os.path.join(scriptdir, "Notepad_fast.pkl"))
print("That took %.3f to run"% (time.time() - start))
if __name__ == "__main__":
run_notepad()
|
bsd-3-clause
| 718,532,393,781,119,500
| 34.388
| 95
| 0.665714
| false
| 3.713061
| true
| false
| false
|
brguez/TEIBA
|
src/python/retrotransposition_eventTypes.chart.py
|
1
|
14275
|
#!/usr/bin/env python
#coding: utf-8
def header(string):
"""
Display header
"""
timeInfo = time.strftime("%Y-%m-%d %H:%M")
print '\n', timeInfo, "****", string, "****"
def info(string):
"""
Display basic information
"""
timeInfo = time.strftime("%Y-%m-%d %H:%M")
print timeInfo, string
####### CLASSES #######
class cohort():
"""
.....................
Methods:
-
"""
def __init__(self):
"""
"""
self.VCFdict = {}
def read_VCFs(self, inputPath):
"""
"""
inputFile = open(inputPath, 'r')
info("Read input VCFs ")
# Per iteration, read a VCF, generate a VCF object and add it to the cohort
for line in inputFile:
line = line.rstrip('\n')
line = line.split("\t")
projectCode = line[0]
sampleId = line[1]
VCFfile = line[2]
# Create VCF object
VCFObj = formats.VCF()
info("Reading " + VCFfile + "...")
# Input VCF available
if os.path.isfile(VCFfile):
# Read VCF and add information to VCF object
VCFObj.read_VCF(VCFfile)
# Add projectCode and sampleId information to the genotype field in each MEI object
for MEIObject in VCFObj.lineList:
MEIObject.format = MEIObject.format + ':SAMPLEID'
MEIObject.genotype = MEIObject.genotype + ':' + sampleId
# Initialize the donor list for a given project if needed
if projectCode not in self.VCFdict:
self.VCFdict[projectCode] = []
# Add donor VCF to cohort
self.VCFdict[projectCode].append(VCFObj)
else:
print "[ERROR] Input file does not exist"
####### FUNCTIONS #######
def autolabel(rects, ax, valuesList):
# Get x-axis height to calculate label position from.
(x_left, x_right) = ax.get_xlim()
x_length = x_right - x_left
index = 0
for rect in rects:
value = valuesList[index]
ax.text(1.04*x_length, rect.get_y(),
'%d' % int(value),
ha='center', va='bottom', fontsize=8)
index += 1
#### MAIN ####
## Import modules ##
import argparse
import sys
import os.path
import formats
import time
from operator import itemgetter, attrgetter, methodcaller
import pandas as pd
import numpy as np
from matplotlib import pyplot as plt
import matplotlib.patches as mpatches
## Get user's input ##
parser = argparse.ArgumentParser(description= """""")
parser.add_argument('inputPath', help='Tabular text file containing one row per sample with the following consecutive fields: projectCode sampleId vcf_path')
parser.add_argument('pseudoPath', help='[PROVISIONAL] Tabular text file containing one row per projectCode with the following consecutive fields: projectCode Nb.pseudogenes')
parser.add_argument('-o', '--outDir', default=os.getcwd(), dest='outDir', help='output directory. Default: current working directory.')
args = parser.parse_args()
inputPath = args.inputPath
pseudoPath = args.pseudoPath
outDir = args.outDir
scriptName = os.path.basename(sys.argv[0])
## Display configuration to standard output ##
print
print "***** ", scriptName, " configuration *****"
print "inputPath: ", inputPath
print "pseudoPath: ", pseudoPath
print "outDir: ", outDir
print
print "***** Executing ", scriptName, ".... *****"
print
## Start ##
############# BAR CHART ###############
### 1. Initialize cohort object
cohortObj = cohort()
### 2. Read VCF files, create VCF objects and organize them
cohortObj.read_VCFs(inputPath)
### 3. Make a dictionary containing per tumor type the total number of events for each retrotrotransposition insertion type:
# * L1-solo
# * L1-td
# * Alu
# * SVA
# * ERVK
# * processed-pseudogene
eventCountsDict = {}
for projectCode in cohortObj.VCFdict:
## Initialize category counts
eventCountsDict[projectCode] = {}
eventCountsDict[projectCode]["L1-solo"] = 0
eventCountsDict[projectCode]["L1-transduction"] = 0
eventCountsDict[projectCode]["Alu"] = 0
eventCountsDict[projectCode]["SVA"] = 0
eventCountsDict[projectCode]["ERVK"] = 0
eventCountsDict[projectCode]["processed-pseudogene"] = 0
## Count total number of donors per tumor type and number of donor per category
for VCFObj in cohortObj.VCFdict[projectCode]:
for MEIObj in VCFObj.lineList:
MEIClass = MEIObj.infoDict["CLASS"]
MEIType = MEIObj.infoDict["TYPE"]
## a) L1-solo:
if (MEIClass == "L1") and (MEIType == "TD0"):
eventCountsDict[projectCode]["L1-solo"] += 1
## b) L1-transduction
elif (MEIType == "TD1") or (MEIType == "TD2"):
eventCountsDict[projectCode]["L1-transduction"] += 1
## c) Alu
# Note: I added a provisional SCORE filtering
# to ask for at least one breakpoint reconstructed
elif (MEIClass == "Alu") and (int(MEIObj.infoDict["SCORE"]) > 2):
eventCountsDict[projectCode]["Alu"] += 1
## d) SVA
# Note: I added a provisional SCORE filtering
# to ask for at least one breakpoint reconstructed
elif (MEIClass == "SVA") and (int(MEIObj.infoDict["SCORE"]) > 2):
eventCountsDict[projectCode]["SVA"] += 1
## e) ERVK
# Note: I added a provisional SCORE filtering
# to ask for at least one breakpoint reconstructed
elif (MEIClass == "ERVK") and (int(MEIObj.infoDict["SCORE"]) > 2):
eventCountsDict[projectCode]["ERVK"] += 1
## f) Processed pseudogene
elif (MEIClass == "PSD"):
eventCountsDict[projectCode]["processed-pseudogene"] += 1
## g) Unexpected value
#else:
# print MEIObj.infoDict["CLASS"], "[ERROR] Unexpected MEI Class value"
print "eventCountsDict: ", eventCountsDict
### 4. Make dataframe with the number of events per type for each tumor type
# Project codes: columns
# Event type number of events:
# ProjectCode1 ProjectCode2 ProjectCode3....
# L1-solo X1 Y1 Z1
# L1-transduction X2 Y2 Z2
# ...
eventCountsDataframe = pd.DataFrame(eventCountsDict)
print "eventCountsDataframe: ", eventCountsDataframe
### PROVISIONAL -- ADD PSEUDOGENE INFORMATION
pseudoCounts = open(pseudoPath, 'r')
# Read file line by line
for line in pseudoCounts:
line = line.rstrip('\r\n')
## Discard header
if not line.startswith("#"):
fieldsList = line.split("\t")
projectCode = str(fieldsList[0])
nbPseudogenes = fieldsList[1]
# Add nb. pseudogenes to the counts dataframe
eventCountsDataframe.set_value("processed-pseudogene", projectCode, nbPseudogenes)
print "eventCountsDataframePseudo: ", eventCountsDataframe
### 5. Make dataframe with the percentage of events per type for each tumor type
# Project codes: columns
# Categories % samples:
# ProjectCode1 ProjectCode2 ProjectCode3....
# L1-solo X1% Y1% Z1%
# L1-transduction X2% Y2% Z2%
nbEventsPerTumorTypeSerie = eventCountsDataframe.sum(axis=0)
print "nbEventsPerTumorTypeSerie:" , nbEventsPerTumorTypeSerie
eventTypes = eventCountsDataframe.index
projecCodes = eventCountsDataframe.columns
eventPercDataframe = pd.DataFrame(index=eventTypes, columns=projecCodes)
# Iterate over row index labels (activity categories)
for eventType in eventTypes:
# Iterate over column index labels (project codes)
for projectCode in projecCodes:
eventCountProjectCode = eventCountsDataframe.loc[eventType, projectCode]
nbEventsInTumortype = nbEventsPerTumorTypeSerie.loc[projectCode]
# Compute the percentage
eventPercTumorType = float(eventCountProjectCode)/float(nbEventsInTumortype) * 100
# Add source element contribution to dataframe
eventPercDataframe.set_value(eventType, projectCode, eventPercTumorType)
print "eventPercDataframe: ", eventPercDataframe
## Order dataframe columns (tumor types) in the same order as for the chart generated in "retrotranspositionRates.chart.py"
tumorTypeList = ['ESAD', 'HNSC', 'COAD', 'LUSC', 'STAD', 'UCEC', 'PRAD', 'MELA', 'BOCA', 'PACA', 'BRCA', 'LIRI', 'READ', 'CESC', 'OV', 'SARC', 'LIHC', 'GBM', 'THCA', 'BLCA', 'GACA', 'PAEN', 'KICH', 'BTCA', 'ORCA', 'SKCM', 'LINC', 'KIRP', 'LGG', 'LUAD', 'KIRC', 'DLBC', 'EOPC', 'LAML', 'RECA', 'CMDI', 'LICA', 'MALY', 'PBCA', 'CLLE']
# I need to reverse it to have the bars correctly placed...
tumorTypeList.reverse()
eventPercSortedDataframe = eventPercDataframe.reindex_axis(tumorTypeList, axis=1)
print "eventPercSortedDataframe: ", eventPercSortedDataframe
### 6. Make list per event type containing the percentage of events in each tumor category
# list 0 insertions [%ProjectCode1, %ProjectCode2, ... ]
# list 1-10 insertions [%ProjectCode1, %ProjectCode2, ... ]
# ...
AluList, ERVKList, L1SoloList, L1TDList, SVAList, PSDList = eventPercSortedDataframe.values.tolist()
### 7. Make ordered list containing the total number of insertions per tumor type
nbEventsPerTumorTypeSortedSerie = nbEventsPerTumorTypeSerie.reindex(tumorTypeList)
### 8. Make bar plot
# Note: I will not represent ERVK as we only have one insertion in the full PCAWG cohort...
ypos = np.arange(1, len(AluList) + 1) # the y locations for the groups
height = 0.75 # the width of the bars: can also be len(x) sequence
fig = plt.figure(figsize=(7, 12))
# fig.suptitle('Number of samples', fontsize=12)
ax = fig.add_subplot(111)
ax.yaxis.set_label_position("right")
plt.ylabel('Total number of MEI', fontsize=10, labelpad=40)
plt.xlabel('% MEI', fontsize=10)
p1 = ax.barh(ypos, L1SoloList, color='#aed3e3', alpha=0.90, edgecolor='#000000', height=height, align='center')
p2 = ax.barh(ypos, L1TDList, color='#ed1f24', alpha=0.90, edgecolor='#000000', height=height, align='center',
left=[i for i in L1SoloList])
p3 = ax.barh(ypos, AluList, color='#59bd7d', alpha=0.90, edgecolor='#000000', height=height, align='center',
left=[i+j for i,j in zip(L1SoloList, L1TDList)])
p4 = ax.barh(ypos, SVAList, color='#faa41a', alpha=0.90, edgecolor='#000000', height=height, align='center',
left=[i+j+x for i,j,x in zip(L1SoloList, L1TDList, AluList)])
p5 = ax.barh(ypos, PSDList, color='#8B4513', alpha=0.80, edgecolor='#000000', height=height, align='center',
left=[i+j+x+z for i,j,x,z in zip(L1SoloList, L1TDList, AluList, SVAList)])
# Add a horizontal grid to the plot, but make it very light in color
# so we can use it for reading data values but not be distracting
ax.xaxis.grid(True, linestyle='-', which='major', color='lightgrey',
alpha=0.5)
ax.set_axisbelow(True)
## Customize axis
plot_xmargin = 20
plot_ymargin = 4
x0, x1, y0, y1 = plt.axis()
plt.axis((x0,
x1 - plot_xmargin,
y0,
y1 - plot_ymargin))
## Customize ticks
plt.xticks(np.arange(0, 100.001, 10), fontsize=8)
plt.yticks(ypos, tumorTypeList, fontsize=8)
# Rotate them
locs, labels = plt.xticks()
plt.setp(labels, rotation=90)
## Add the number of samples per tumor type on the top of each bar
nbEventsPerTumorList = nbEventsPerTumorTypeSortedSerie.values.tolist()
print "nbEventsPerTumorTypeSortedSerie: ", nbEventsPerTumorTypeSortedSerie
autolabel(p5, ax, nbEventsPerTumorList) ## autolabel function
## Make legend
circle1 = mpatches.Circle((0, 0), 5, color='#aed3e3', alpha=0.90)
circle2 = mpatches.Circle((0, 0), 5, color='#ed1f24', alpha=0.90)
circle3 = mpatches.Circle((0, 0), 5, color='#59bd7d', alpha=0.90)
circle4 = mpatches.Circle((0, 0), 5, color='#faa41a', alpha=0.90)
circle5 = mpatches.Circle((0, 0), 5, color='#8B4513', alpha=0.90)
l = plt.figlegend((circle1, circle2, circle3, circle4, circle5), ("L1-solo", "L1-transduction", "Alu", "SVA", "processed-pseudogene"), loc = 'upper center', ncol=5, labelspacing=0.75, fontsize=8, fancybox=True)
## Save figure
fileName = outDir + "/PCAWG_retrotransposition_events_tumorTypes.pdf"
plt.savefig(fileName)
############# PIE CHART ###############
## 1. Gather data
regionsList = []
## For project code
for projectCode in cohortObj.VCFdict:
## For donor
for VCFObj in cohortObj.VCFdict[projectCode]:
## For MEI
for MEIObj in VCFObj.lineList:
if (MEIObj.infoDict['REGION']=="splicing") or (MEIObj.infoDict['REGION']=="upstream,downstream") or (MEIObj.infoDict['REGION']=="upstream") or (MEIObj.infoDict['REGION']=="downstream"):
region = "Other"
elif (MEIObj.infoDict['REGION']=="UTR5") or (MEIObj.infoDict['REGION']=="UTR3") or (MEIObj.infoDict['REGION']=="UTR5,UTR3") or (MEIObj.infoDict['REGION']=="UTR3,UTR5"):
region = "UTR"
elif (MEIObj.infoDict['REGION']=="ncRNA_exonic") or (MEIObj.infoDict['REGION']=="ncRNA_intronic") or (MEIObj.infoDict['REGION']=="ncRNA_splicing"):
region = "ncRNA"
else:
region = MEIObj.infoDict['REGION']
regionsList.append(region)
regionTuples = [(x, int(regionsList.count(x))) for x in set(regionsList)]
regionList = [list(t) for t in zip(*regionTuples)]
labels = regionList[0]
sizes = regionList[1]
## 2. Make pie chart
fig = plt.figure(figsize=(6,6))
fig.suptitle('Somatic MEI functional spectrum', fontsize=16)
colors = ['#008000', '#A67D3D', '#87CEFA', '#ff0000', '#FFD700', '#FFA500']
patches, texts, perc = plt.pie(sizes, colors=colors, startangle=90, autopct='%1.1f%%', pctdistance=1.2, labeldistance=1)
plt.legend(patches, labels, loc="best", fontsize=11)
##### Save figure
fileName = outDir + "/PCAWG_somatic_funcSpectrum_piechart.pdf"
plt.savefig(fileName)
## End ##
print
print "***** Finished! *****"
print
|
gpl-3.0
| 7,796,183,208,989,563,000
| 33.589806
| 332
| 0.633359
| false
| 3.264834
| false
| false
| false
|
davidwilson-85/easymap
|
graphic_output/Pillow-4.2.1/Tests/test_file_gimpgradient.py
|
1
|
2789
|
from helper import unittest, PillowTestCase
from PIL import GimpGradientFile
class TestImage(PillowTestCase):
def test_linear_pos_le_middle(self):
# Arrange
middle = 0.5
pos = 0.25
# Act
ret = GimpGradientFile.linear(middle, pos)
# Assert
self.assertEqual(ret, 0.25)
def test_linear_pos_le_small_middle(self):
# Arrange
middle = 1e-11
pos = 1e-12
# Act
ret = GimpGradientFile.linear(middle, pos)
# Assert
self.assertEqual(ret, 0.0)
def test_linear_pos_gt_middle(self):
# Arrange
middle = 0.5
pos = 0.75
# Act
ret = GimpGradientFile.linear(middle, pos)
# Assert
self.assertEqual(ret, 0.75)
def test_linear_pos_gt_small_middle(self):
# Arrange
middle = 1 - 1e-11
pos = 1 - 1e-12
# Act
ret = GimpGradientFile.linear(middle, pos)
# Assert
self.assertEqual(ret, 1.0)
def test_curved(self):
# Arrange
middle = 0.5
pos = 0.75
# Act
ret = GimpGradientFile.curved(middle, pos)
# Assert
self.assertEqual(ret, 0.75)
def test_sine(self):
# Arrange
middle = 0.5
pos = 0.75
# Act
ret = GimpGradientFile.sine(middle, pos)
# Assert
self.assertEqual(ret, 0.8535533905932737)
def test_sphere_increasing(self):
# Arrange
middle = 0.5
pos = 0.75
# Act
ret = GimpGradientFile.sphere_increasing(middle, pos)
# Assert
self.assertAlmostEqual(ret, 0.9682458365518543)
def test_sphere_decreasing(self):
# Arrange
middle = 0.5
pos = 0.75
# Act
ret = GimpGradientFile.sphere_decreasing(middle, pos)
# Assert
self.assertEqual(ret, 0.3385621722338523)
def test_load_via_imagepalette(self):
# Arrange
from PIL import ImagePalette
test_file = "Tests/images/gimp_gradient.ggr"
# Act
palette = ImagePalette.load(test_file)
# Assert
# load returns raw palette information
self.assertEqual(len(palette[0]), 1024)
self.assertEqual(palette[1], "RGBA")
def test_load_1_3_via_imagepalette(self):
# Arrange
from PIL import ImagePalette
# GIMP 1.3 gradient files contain a name field
test_file = "Tests/images/gimp_gradient_with_name.ggr"
# Act
palette = ImagePalette.load(test_file)
# Assert
# load returns raw palette information
self.assertEqual(len(palette[0]), 1024)
self.assertEqual(palette[1], "RGBA")
if __name__ == '__main__':
unittest.main()
|
gpl-3.0
| -5,356,480,336,562,170,000
| 21.312
| 62
| 0.564001
| false
| 3.67942
| true
| false
| false
|
irl/gajim
|
src/common/contacts.py
|
1
|
32063
|
# -*- coding:utf-8 -*-
## src/common/contacts.py
##
## Copyright (C) 2006 Dimitur Kirov <dkirov AT gmail.com>
## Travis Shirk <travis AT pobox.com>
## Nikos Kouremenos <kourem AT gmail.com>
## Copyright (C) 2006-2014 Yann Leboulanger <asterix AT lagaule.org>
## Jean-Marie Traissard <jim AT lapin.org>
## Copyright (C) 2007 Lukas Petrovicky <lukas AT petrovicky.net>
## Tomasz Melcer <liori AT exroot.org>
## Julien Pivotto <roidelapluie AT gmail.com>
## Copyright (C) 2007-2008 Stephan Erb <steve-e AT h3c.de>
## Copyright (C) 2008 Brendan Taylor <whateley AT gmail.com>
## Jonathan Schleifer <js-gajim AT webkeks.org>
##
## This file is part of Gajim.
##
## Gajim is free software; you can redistribute it and/or modify
## it under the terms of the GNU General Public License as published
## by the Free Software Foundation; version 3 only.
##
## Gajim is distributed in the hope that it will be useful,
## but WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
## GNU General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with Gajim. If not, see <http://www.gnu.org/licenses/>.
##
from functools import cmp_to_key
try:
from common import caps_cache
from common.account import Account
import common.gajim
except ImportError as e:
if __name__ != "__main__":
raise ImportError(str(e))
class XMPPEntity(object):
"""
Base representation of entities in XMPP
"""
def __init__(self, jid, account, resource):
self.jid = jid
self.resource = resource
self.account = account
class CommonContact(XMPPEntity):
def __init__(self, jid, account, resource, show, status, name,
our_chatstate, chatstate, client_caps=None):
XMPPEntity.__init__(self, jid, account, resource)
self.show = show
self.status = status
self.name = name
self.client_caps = client_caps or caps_cache.NullClientCaps()
# please read xep-85 http://www.xmpp.org/extensions/xep-0085.html
# this holds what WE SEND to contact (our current chatstate)
self.our_chatstate = our_chatstate
# this is contact's chatstate
self.chatstate = chatstate
def get_full_jid(self):
raise NotImplementedError
def get_shown_name(self):
raise NotImplementedError
def supports(self, requested_feature):
"""
Return True if the contact has advertised to support the feature
identified by the given namespace. False otherwise.
"""
if self.show == 'offline':
# Unfortunately, if all resources are offline, the contact
# includes the last resource that was online. Check for its
# show, so we can be sure it's existant. Otherwise, we still
# return caps for a contact that has no resources left.
return False
else:
return caps_cache.client_supports(self.client_caps, requested_feature)
class Contact(CommonContact):
"""
Information concerning a contact
"""
def __init__(self, jid, account, name='', groups=[], show='', status='',
sub='', ask='', resource='', priority=0, keyID='', client_caps=None,
our_chatstate=None, chatstate=None, last_status_time=None, msg_id=None,
last_activity_time=None):
if not isinstance(jid, str):
print('no str')
CommonContact.__init__(self, jid, account, resource, show, status, name,
our_chatstate, chatstate, client_caps=client_caps)
self.contact_name = '' # nick choosen by contact
self.groups = [i for i in set(groups)] # filter duplicate values
self.sub = sub
self.ask = ask
self.priority = priority
self.keyID = keyID
self.msg_id = msg_id
self.last_status_time = last_status_time
self.last_activity_time = last_activity_time
self.pep = {}
def get_full_jid(self):
if self.resource:
return self.jid + '/' + self.resource
return self.jid
def get_shown_name(self):
if self.name:
return self.name
if self.contact_name:
return self.contact_name
return self.jid.split('@')[0]
def get_shown_groups(self):
if self.is_observer():
return [_('Observers')]
elif self.is_groupchat():
return [_('Groupchats')]
elif self.is_transport():
return [_('Transports')]
elif not self.groups:
return [_('General')]
else:
return self.groups
def is_hidden_from_roster(self):
"""
If contact should not be visible in roster
"""
# XEP-0162: http://www.xmpp.org/extensions/xep-0162.html
if self.is_transport():
return False
if self.sub in ('both', 'to'):
return False
if self.sub in ('none', 'from') and self.ask == 'subscribe':
return False
if self.sub in ('none', 'from') and (self.name or len(self.groups)):
return False
if _('Not in Roster') in self.groups:
return False
return True
def is_observer(self):
# XEP-0162: http://www.xmpp.org/extensions/xep-0162.html
is_observer = False
if self.sub == 'from' and not self.is_transport()\
and self.is_hidden_from_roster():
is_observer = True
return is_observer
def is_groupchat(self):
for account in common.gajim.gc_connected:
if self.jid in common.gajim.gc_connected[account]:
return True
return False
def is_transport(self):
# if not '@' or '@' starts the jid then contact is transport
return self.jid.find('@') <= 0
class GC_Contact(CommonContact):
"""
Information concerning each groupchat contact
"""
def __init__(self, room_jid, account, name='', show='', status='', role='',
affiliation='', jid='', resource='', our_chatstate=None,
chatstate=None):
CommonContact.__init__(self, jid, account, resource, show, status, name,
our_chatstate, chatstate)
self.room_jid = room_jid
self.role = role
self.affiliation = affiliation
def get_full_jid(self):
return self.room_jid + '/' + self.name
def get_shown_name(self):
return self.name
def as_contact(self):
"""
Create a Contact instance from this GC_Contact instance
"""
return Contact(jid=self.get_full_jid(), account=self.account,
name=self.name, groups=[], show=self.show, status=self.status,
sub='none', client_caps=self.client_caps)
class LegacyContactsAPI:
"""
This is a GOD class for accessing contact and groupchat information.
The API has several flaws:
* it mixes concerns because it deals with contacts, groupchats,
groupchat contacts and metacontacts
* some methods like get_contact() may return None. This leads to
a lot of duplication all over Gajim because it is not sure
if we receive a proper contact or just None.
It is a long way to cleanup this API. Therefore just stick with it
and use it as before. We will try to figure out a migration path.
"""
def __init__(self):
self._metacontact_manager = MetacontactManager(self)
self._accounts = {}
def change_account_name(self, old_name, new_name):
self._accounts[new_name] = self._accounts[old_name]
self._accounts[new_name].name = new_name
del self._accounts[old_name]
self._metacontact_manager.change_account_name(old_name, new_name)
def add_account(self, account_name):
self._accounts[account_name] = Account(account_name, Contacts(),
GC_Contacts())
self._metacontact_manager.add_account(account_name)
def get_accounts(self):
return self._accounts.keys()
def remove_account(self, account):
del self._accounts[account]
self._metacontact_manager.remove_account(account)
def create_contact(self, jid, account, name='', groups=[], show='',
status='', sub='', ask='', resource='', priority=0, keyID='',
client_caps=None, our_chatstate=None, chatstate=None, last_status_time=None,
last_activity_time=None):
# Use Account object if available
account = self._accounts.get(account, account)
return Contact(jid=jid, account=account, name=name, groups=groups,
show=show, status=status, sub=sub, ask=ask, resource=resource,
priority=priority, keyID=keyID, client_caps=client_caps,
our_chatstate=our_chatstate, chatstate=chatstate,
last_status_time=last_status_time,
last_activity_time=last_activity_time)
def create_self_contact(self, jid, account, resource, show, status, priority,
name='', keyID=''):
conn = common.gajim.connections[account]
nick = name or common.gajim.nicks[account]
account = self._accounts.get(account, account) # Use Account object if available
self_contact = self.create_contact(jid=jid, account=account,
name=nick, groups=['self_contact'], show=show, status=status,
sub='both', ask='none', priority=priority, keyID=keyID,
resource=resource)
self_contact.pep = conn.pep
return self_contact
def create_not_in_roster_contact(self, jid, account, resource='', name='',
keyID=''):
# Use Account object if available
account = self._accounts.get(account, account)
return self.create_contact(jid=jid, account=account, resource=resource,
name=name, groups=[_('Not in Roster')], show='not in roster',
status='', sub='none', keyID=keyID)
def copy_contact(self, contact):
return self.create_contact(contact.jid, contact.account,
name=contact.name, groups=contact.groups, show=contact.show,
status=contact.status, sub=contact.sub, ask=contact.ask,
resource=contact.resource, priority=contact.priority,
keyID=contact.keyID, client_caps=contact.client_caps,
our_chatstate=contact.our_chatstate, chatstate=contact.chatstate,
last_status_time=contact.last_status_time,
last_activity_time=contact.last_activity_time)
def add_contact(self, account, contact):
if account not in self._accounts:
self.add_account(account)
return self._accounts[account].contacts.add_contact(contact)
def remove_contact(self, account, contact):
if account not in self._accounts:
return
return self._accounts[account].contacts.remove_contact(contact)
def remove_jid(self, account, jid, remove_meta=True):
self._accounts[account].contacts.remove_jid(jid)
if remove_meta:
self._metacontact_manager.remove_metacontact(account, jid)
def get_contacts(self, account, jid):
return self._accounts[account].contacts.get_contacts(jid)
def get_contact(self, account, jid, resource=None):
return self._accounts[account].contacts.get_contact(jid, resource=resource)
def iter_contacts(self, account):
for contact in self._accounts[account].contacts.iter_contacts():
yield contact
def get_contact_from_full_jid(self, account, fjid):
return self._accounts[account].contacts.get_contact_from_full_jid(fjid)
def get_first_contact_from_jid(self, account, jid):
return self._accounts[account].contacts.get_first_contact_from_jid(jid)
def get_contacts_from_group(self, account, group):
return self._accounts[account].contacts.get_contacts_from_group(group)
def get_contacts_jid_list(self, account):
return self._accounts[account].contacts.get_contacts_jid_list()
def get_jid_list(self, account):
return self._accounts[account].contacts.get_jid_list()
def change_contact_jid(self, old_jid, new_jid, account):
return self._accounts[account].change_contact_jid(old_jid, new_jid)
def get_highest_prio_contact_from_contacts(self, contacts):
if not contacts:
return None
prim_contact = contacts[0]
for contact in contacts[1:]:
if int(contact.priority) > int(prim_contact.priority):
prim_contact = contact
return prim_contact
def get_contact_with_highest_priority(self, account, jid):
contacts = self.get_contacts(account, jid)
if not contacts and '/' in jid:
# jid may be a fake jid, try it
room, nick = jid.split('/', 1)
contact = self.get_gc_contact(account, room, nick)
return contact
return self.get_highest_prio_contact_from_contacts(contacts)
def get_nb_online_total_contacts(self, accounts=[], groups=[]):
"""
Return the number of online contacts and the total number of contacts
"""
if accounts == []:
accounts = self.get_accounts()
nbr_online = 0
nbr_total = 0
for account in accounts:
our_jid = common.gajim.get_jid_from_account(account)
for jid in self.get_jid_list(account):
if jid == our_jid:
continue
if common.gajim.jid_is_transport(jid) and not \
_('Transports') in groups:
# do not count transports
continue
if self.has_brother(account, jid, accounts) and not \
self.is_big_brother(account, jid, accounts):
# count metacontacts only once
continue
contact = self._accounts[account].contacts._contacts[jid][0]
if _('Not in roster') in contact.groups:
continue
in_groups = False
if groups == []:
in_groups = True
else:
for group in groups:
if group in contact.get_shown_groups():
in_groups = True
break
if in_groups:
if contact.show not in ('offline', 'error'):
nbr_online += 1
nbr_total += 1
return nbr_online, nbr_total
def __getattr__(self, attr_name):
# Only called if self has no attr_name
if hasattr(self._metacontact_manager, attr_name):
return getattr(self._metacontact_manager, attr_name)
else:
raise AttributeError(attr_name)
def create_gc_contact(self, room_jid, account, name='', show='', status='',
role='', affiliation='', jid='', resource=''):
account = self._accounts.get(account, account) # Use Account object if available
return GC_Contact(room_jid, account, name, show, status, role, affiliation, jid,
resource)
def add_gc_contact(self, account, gc_contact):
return self._accounts[account].gc_contacts.add_gc_contact(gc_contact)
def remove_gc_contact(self, account, gc_contact):
return self._accounts[account].gc_contacts.remove_gc_contact(gc_contact)
def remove_room(self, account, room_jid):
return self._accounts[account].gc_contacts.remove_room(room_jid)
def get_gc_list(self, account):
return self._accounts[account].gc_contacts.get_gc_list()
def get_nick_list(self, account, room_jid):
return self._accounts[account].gc_contacts.get_nick_list(room_jid)
def get_gc_contact(self, account, room_jid, nick):
return self._accounts[account].gc_contacts.get_gc_contact(room_jid, nick)
def is_gc_contact(self, account, jid):
return self._accounts[account].gc_contacts.is_gc_contact(jid)
def get_nb_role_total_gc_contacts(self, account, room_jid, role):
return self._accounts[account].gc_contacts.get_nb_role_total_gc_contacts(room_jid, role)
class Contacts():
"""
This is a breakout of the contact related behavior of the old
Contacts class (which is not called LegacyContactsAPI)
"""
def __init__(self):
# list of contacts {jid1: [C1, C2]}, } one Contact per resource
self._contacts = {}
def add_contact(self, contact):
if contact.jid not in self._contacts:
self._contacts[contact.jid] = [contact]
return
contacts = self._contacts[contact.jid]
# We had only one that was offline, remove it
if len(contacts) == 1 and contacts[0].show == 'offline':
# Do not use self.remove_contact: it deteles
# self._contacts[account][contact.jid]
contacts.remove(contacts[0])
# If same JID with same resource already exists, use the new one
for c in contacts:
if c.resource == contact.resource:
self.remove_contact(c)
break
contacts.append(contact)
def remove_contact(self, contact):
if contact.jid not in self._contacts:
return
if contact in self._contacts[contact.jid]:
self._contacts[contact.jid].remove(contact)
if len(self._contacts[contact.jid]) == 0:
del self._contacts[contact.jid]
def remove_jid(self, jid):
"""
Remove all contacts for a given jid
"""
if jid in self._contacts:
del self._contacts[jid]
def get_contacts(self, jid):
"""
Return the list of contact instances for this jid
"""
return self._contacts.get(jid, [])
def get_contact(self, jid, resource=None):
### WARNING ###
# This function returns a *RANDOM* resource if resource = None!
# Do *NOT* use if you need to get the contact to which you
# send a message for example, as a bare JID in Jabber means
# highest available resource, which this function ignores!
"""
Return the contact instance for the given resource if it's given else the
first contact is no resource is given or None if there is not
"""
if jid in self._contacts:
if not resource:
return self._contacts[jid][0]
for c in self._contacts[jid]:
if c.resource == resource:
return c
return self._contacts[jid][0]
def iter_contacts(self):
for jid in list(self._contacts.keys()):
for contact in self._contacts[jid][:]:
yield contact
def get_jid_list(self):
return list(self._contacts.keys())
def get_contacts_jid_list(self):
return [jid for jid, contact in self._contacts.items() if not
contact[0].is_groupchat()]
def get_contact_from_full_jid(self, fjid):
"""
Get Contact object for specific resource of given jid
"""
barejid, resource = common.gajim.get_room_and_nick_from_fjid(fjid)
return self.get_contact(barejid, resource)
def get_first_contact_from_jid(self, jid):
if jid in self._contacts:
return self._contacts[jid][0]
def get_contacts_from_group(self, group):
"""
Return all contacts in the given group
"""
group_contacts = []
for jid in self._contacts:
contacts = self.get_contacts(jid)
if group in contacts[0].groups:
group_contacts += contacts
return group_contacts
def change_contact_jid(self, old_jid, new_jid):
if old_jid not in self._contacts:
return
self._contacts[new_jid] = []
for _contact in self._contacts[old_jid]:
_contact.jid = new_jid
self._contacts[new_jid].append(_contact)
del self._contacts[old_jid]
class GC_Contacts():
def __init__(self):
# list of contacts that are in gc {room_jid: {nick: C}}}
self._rooms = {}
def add_gc_contact(self, gc_contact):
if gc_contact.room_jid not in self._rooms:
self._rooms[gc_contact.room_jid] = {gc_contact.name: gc_contact}
else:
self._rooms[gc_contact.room_jid][gc_contact.name] = gc_contact
def remove_gc_contact(self, gc_contact):
if gc_contact.room_jid not in self._rooms:
return
if gc_contact.name not in self._rooms[gc_contact.room_jid]:
return
del self._rooms[gc_contact.room_jid][gc_contact.name]
# It was the last nick in room ?
if not len(self._rooms[gc_contact.room_jid]):
del self._rooms[gc_contact.room_jid]
def remove_room(self, room_jid):
if room_jid in self._rooms:
del self._rooms[room_jid]
def get_gc_list(self):
return self._rooms.keys()
def get_nick_list(self, room_jid):
gc_list = self.get_gc_list()
if not room_jid in gc_list:
return []
return list(self._rooms[room_jid].keys())
def get_gc_contact(self, room_jid, nick):
nick_list = self.get_nick_list(room_jid)
if not nick in nick_list:
return None
return self._rooms[room_jid][nick]
def is_gc_contact(self, jid):
"""
>>> gc = GC_Contacts()
>>> gc._rooms = {'gajim@conference.gajim.org' : {'test' : True}}
>>> gc.is_gc_contact('gajim@conference.gajim.org/test')
True
>>> gc.is_gc_contact('test@jabbim.com')
False
"""
jid = jid.split('/')
if len(jid) != 2:
return False
gcc = self.get_gc_contact(jid[0], jid[1])
return gcc != None
def get_nb_role_total_gc_contacts(self, room_jid, role):
"""
Return the number of group chat contacts for the given role and the total
number of group chat contacts
"""
if room_jid not in self._rooms:
return 0, 0
nb_role = nb_total = 0
for nick in self._rooms[room_jid]:
if self._rooms[room_jid][nick].role == role:
nb_role += 1
nb_total += 1
return nb_role, nb_total
class MetacontactManager():
def __init__(self, contacts):
self._metacontacts_tags = {}
self._contacts = contacts
def change_account_name(self, old_name, new_name):
self._metacontacts_tags[new_name] = self._metacontacts_tags[old_name]
del self._metacontacts_tags[old_name]
def add_account(self, account):
if account not in self._metacontacts_tags:
self._metacontacts_tags[account] = {}
def remove_account(self, account):
del self._metacontacts_tags[account]
def define_metacontacts(self, account, tags_list):
self._metacontacts_tags[account] = tags_list
def _get_new_metacontacts_tag(self, jid):
if not jid in self._metacontacts_tags:
return jid
#FIXME: can this append ?
assert False
def iter_metacontacts_families(self, account):
for tag in self._metacontacts_tags[account]:
family = self._get_metacontacts_family_from_tag(account, tag)
yield family
def _get_metacontacts_tag(self, account, jid):
"""
Return the tag of a jid
"""
if not account in self._metacontacts_tags:
return None
for tag in self._metacontacts_tags[account]:
for data in self._metacontacts_tags[account][tag]:
if data['jid'] == jid:
return tag
return None
def add_metacontact(self, brother_account, brother_jid, account, jid, order=None):
tag = self._get_metacontacts_tag(brother_account, brother_jid)
if not tag:
tag = self._get_new_metacontacts_tag(brother_jid)
self._metacontacts_tags[brother_account][tag] = [{'jid': brother_jid,
'tag': tag}]
if brother_account != account:
common.gajim.connections[brother_account].store_metacontacts(
self._metacontacts_tags[brother_account])
# be sure jid has no other tag
old_tag = self._get_metacontacts_tag(account, jid)
while old_tag:
self.remove_metacontact(account, jid)
old_tag = self._get_metacontacts_tag(account, jid)
if tag not in self._metacontacts_tags[account]:
self._metacontacts_tags[account][tag] = [{'jid': jid, 'tag': tag}]
else:
if order:
self._metacontacts_tags[account][tag].append({'jid': jid,
'tag': tag, 'order': order})
else:
self._metacontacts_tags[account][tag].append({'jid': jid,
'tag': tag})
common.gajim.connections[account].store_metacontacts(
self._metacontacts_tags[account])
def remove_metacontact(self, account, jid):
if not account in self._metacontacts_tags:
return
found = None
for tag in self._metacontacts_tags[account]:
for data in self._metacontacts_tags[account][tag]:
if data['jid'] == jid:
found = data
break
if found:
self._metacontacts_tags[account][tag].remove(found)
common.gajim.connections[account].store_metacontacts(
self._metacontacts_tags[account])
break
def has_brother(self, account, jid, accounts):
tag = self._get_metacontacts_tag(account, jid)
if not tag:
return False
meta_jids = self._get_metacontacts_jids(tag, accounts)
return len(meta_jids) > 1 or len(meta_jids[account]) > 1
def is_big_brother(self, account, jid, accounts):
family = self.get_metacontacts_family(account, jid)
if family:
nearby_family = [data for data in family
if account in accounts]
bb_data = self._get_metacontacts_big_brother(nearby_family)
if bb_data['jid'] == jid and bb_data['account'] == account:
return True
return False
def _get_metacontacts_jids(self, tag, accounts):
"""
Return all jid for the given tag in the form {acct: [jid1, jid2],.}
"""
answers = {}
for account in self._metacontacts_tags:
if tag in self._metacontacts_tags[account]:
if account not in accounts:
continue
answers[account] = []
for data in self._metacontacts_tags[account][tag]:
answers[account].append(data['jid'])
return answers
def get_metacontacts_family(self, account, jid):
"""
Return the family of the given jid, including jid in the form:
[{'account': acct, 'jid': jid, 'order': order}, ] 'order' is optional
"""
tag = self._get_metacontacts_tag(account, jid)
return self._get_metacontacts_family_from_tag(account, tag)
def _get_metacontacts_family_from_tag(self, account, tag):
if not tag:
return []
answers = []
for account in self._metacontacts_tags:
if tag in self._metacontacts_tags[account]:
for data in self._metacontacts_tags[account][tag]:
data['account'] = account
answers.append(data)
return answers
def _compare_metacontacts(self, data1, data2):
"""
Compare 2 metacontacts
Data is {'jid': jid, 'account': account, 'order': order} order is
optional
"""
jid1 = data1['jid']
jid2 = data2['jid']
account1 = data1['account']
account2 = data2['account']
contact1 = self._contacts.get_contact_with_highest_priority(account1, jid1)
contact2 = self._contacts.get_contact_with_highest_priority(account2, jid2)
show_list = ['not in roster', 'error', 'offline', 'invisible', 'dnd',
'xa', 'away', 'chat', 'online', 'requested', 'message']
# contact can be null when a jid listed in the metacontact data
# is not in our roster
if not contact1:
if contact2:
return -1 # prefer the known contact
else:
show1 = 0
priority1 = 0
else:
show1 = show_list.index(contact1.show)
priority1 = contact1.priority
if not contact2:
if contact1:
return 1 # prefer the known contact
else:
show2 = 0
priority2 = 0
else:
show2 = show_list.index(contact2.show)
priority2 = contact2.priority
# If only one is offline, it's always second
if show1 > 2 and show2 < 3:
return 1
if show2 > 2 and show1 < 3:
return -1
if 'order' in data1 and 'order' in data2:
if data1['order'] > data2['order']:
return 1
if data1['order'] < data2['order']:
return -1
if 'order' in data1:
return 1
if 'order' in data2:
return -1
transport1 = common.gajim.get_transport_name_from_jid(jid1)
transport2 = common.gajim.get_transport_name_from_jid(jid2)
if transport2 and not transport1:
return 1
if transport1 and not transport2:
return -1
if show1 > show2:
return 1
if show2 > show1:
return -1
if priority1 > priority2:
return 1
if priority2 > priority1:
return -1
server1 = common.gajim.get_server_from_jid(jid1)
server2 = common.gajim.get_server_from_jid(jid2)
myserver1 = common.gajim.config.get_per('accounts', account1, 'hostname')
myserver2 = common.gajim.config.get_per('accounts', account2, 'hostname')
if server1 == myserver1:
if server2 != myserver2:
return 1
elif server2 == myserver2:
return -1
if jid1 > jid2:
return 1
if jid2 > jid1:
return -1
# If all is the same, compare accounts, they can't be the same
if account1 > account2:
return 1
if account2 > account1:
return -1
return 0
def get_nearby_family_and_big_brother(self, family, account):
"""
Return the nearby family and its Big Brother
Nearby family is the part of the family that is grouped with the
metacontact. A metacontact may be over different accounts. If accounts
are not merged then the given family is split account wise.
(nearby_family, big_brother_jid, big_brother_account)
"""
if common.gajim.config.get('mergeaccounts'):
# group all together
nearby_family = family
else:
# we want one nearby_family per account
nearby_family = [data for data in family if account == data['account']]
big_brother_data = self._get_metacontacts_big_brother(nearby_family)
big_brother_jid = big_brother_data['jid']
big_brother_account = big_brother_data['account']
return (nearby_family, big_brother_jid, big_brother_account)
def _get_metacontacts_big_brother(self, family):
"""
Which of the family will be the big brother under wich all others will be
?
"""
family.sort(key=cmp_to_key(self._compare_metacontacts))
return family[-1]
if __name__ == "__main__":
import doctest
doctest.testmod()
|
gpl-3.0
| -6,566,727,521,522,041,000
| 36.282558
| 96
| 0.588747
| false
| 3.980509
| false
| false
| false
|
davelab6/telaro
|
src/dash2/words/telaro/fileparse.py
|
1
|
10711
|
#!/usr/bin/env python
# -----------------------------------------------------------------------------
# calc.py
#
# A simple calculator with variables. This is from O'Reilly's
# "Lex and Yacc", p. 63.
#
# Class-based example contributed to PLY by David McNab
# -----------------------------------------------------------------------------
import sys
sys.path.insert(0,"../..")
#import readline
import ply.lex as lex
import ply.yacc as yacc
import os
from getopt import getopt, GetoptError
import sys
from string import strip
from markov import Markov, random_sentence
def make_sentence(sentence, word_filter=None, max_chars=78):
res = ''
to_print = ''
while sentence:
word = sentence.pop(0)
if (word not in ' ()=-+.,:;\t?!"\'' and
word_filter and not word_filter.has_key(word.lower())):
continue
if word == '\x00':
res += to_print + "\n\n"
to_print = ''
continue
if len(to_print) + len(word) > max_chars:
res += to_print + "\n"
to_print = ''
if to_print:
if word in ' ()=-+.,:;\t?!"\'':
to_print += word
else:
to_print += ' ' + word
else:
to_print = word
if to_print:
res += to_print + "\n"
return res
class Parser:
"""
Base class for a lexer/parser that has the rules defined as methods
"""
tokens = ()
precedence = ()
def __init__(self, **kw):
self.debug = kw.get('debug', 0)
self.sentences = []
self.markov = Markov()
self.clause_starter = {}
self.para_starter = []
self.word_filter = kw.get('word_filter', None)
self.letter_priority = kw.get('letter_priority', None)
try:
modname = os.path.split(os.path.splitext(__file__)[0])[1] + "_" + self.__class__.__name__
except:
modname = "parser"+"_"+self.__class__.__name__
self.debugfile = modname + ".dbg"
self.tabmodule = modname + "_" + "parsetab"
#print self.debugfile, self.tabmodule
# Build the lexer and parser
lex.lex(module=self, debug=self.debug)
yacc.yacc(module=self,
debug=self.debug,
debugfile=self.debugfile,
tabmodule=self.tabmodule)
def run(self, txt=None, para_starter=False):
if txt is None:
s = sys.stdin.read()
else:
s = txt
s = s.replace('\n\n', '\x00')
s = s.replace('\x00\x00', '\x00')
s = s.replace('\n\n', '')
s = s.replace('\n', ' ')
s = s.replace(' ', ' ')
yacc.parse(s)
print self.sentences
self.markov.printout()
print
print "clause starters"
keys = self.clause_starter.keys()
keys.sort()
for k in keys:
v = self.clause_starter[k]
print "\t", repr(k), v
print
print "para starters", self.para_starter
print
word_filter = self.word_filter
if self.letter_priority and word_filter:
# certain words are given a higher priority (multiplier)
# than others.
states = self.markov.states
for from_word, fp in states.items():
for to_word in fp.keys():
if word_filter.has_key(to_word.lower()):
fp[to_word] *= self.letter_priority
word_filter = None
self.markov.prepare()
if para_starter:
para_starters = None
else:
para_starters = self.para_starter
sentence = random_sentence(self.markov, 800,
starters=self.clause_starter,
para_starters=para_starters)
return make_sentence(sentence, word_filter=word_filter)
class Text(Parser):
tokens = (
'NAME',#'NUMBER',
'FULLSTOP',
'NULL',
#'LBRACK',
#'QUOTE',
#'RBRACK',
'COLON','SEMICOLON',
'EXCLAMATION','QUESTIONMARK',
'NEWLINE',
'TAB',
'SLASH',
'COMMA',
)
# Tokens
t_NULL = r'\x00'
t_FULLSTOP = r'\.'
#t_SPACE = r'\ '
t_COLON = r':'
t_SEMICOLON = r';'
t_NEWLINE = r'\n'
t_EXCLAMATION = r'!'
#t_QUOTE = r'[\'"]'
#t_LBRACK = r'\('
#t_RBRACK = r'\)'
t_QUESTIONMARK = r'\?'
t_TAB = r'\t'
t_COMMA = r','
t_SLASH = r'/'
t_NAME = r'[a-zA-Z0-9_][\'`a-zA-Z0-9_]*'
def _t_FLOAT(self, t):
r'\d+[\.]\d*'
try:
t.value = float(t.value)
except ValueError:
print "Integer value too large", t.value
t.value = 0
print "parsed number %s" % repr(t.value)
return t
def _t_NUMBER(self, t):
r'\d+'
try:
t.value = float(t.value)
except ValueError:
print "Integer value too large", t.value
t.value = 0
print "parsed number %s" % repr(t.value)
return t
t_ignore = " "
def _t_newline(self, t):
r'\n+'
t.lexer.lineno += t.value.count("\n")
def t_error(self, t):
print "Illegal character '%s'" % repr(t.value[0])
t.lexer.skip(1)
# Parsing rules
#precedence = (
# ('left','PLUS','MINUS'),
# ('left','TIMES','DIVIDE'),
# ('left', 'EXP'),
# ('right','UMINUS'),
# )
def p_text_para(self, p):
'text : paragraph'
self.sentences.append(p[1])
self.para_starter.append(p[1][0])
p[0] = [p[1]]
def p_text_paras(self, p):
'text : text NULL paragraph'
self.markov.inc_state_link(p[1][-1][-1], p[2])
self.markov.inc_state_link(p[2], p[3][0])
self.sentences.append(p[3])
self.para_starter.append(p[3][0])
p[0] = p[1] + [p[3]]
#print "join", repr(p[-1][-1][-1]), repr(p[2]), repr(p[3][0])
def p_paragraph_assign(self, p):
'paragraph : sentences'
#self.sentences.append(p[1])
#self.markov.inc_state_link(p[1][-1], p[2])
p[0] = p[1] #+ [p[2]]
def p_sentences_ended(self, p):
"""sentence : sentence clausedivider
"""
#if p[2] != '\n' or p[1][-1] in ':;,. \n':
self.markov.inc_state_link(p[1][-1], p[2])
p[0] = p[1] + [p[2]]
#def p_paradivider_expr(self, p):
# """paradivider : FULLSTOP NEWLINE
# """
# self.markov.inc_state_link(p[1], p[2])
# p[0] = [p[1], p[2]]
def p_sentenceending_prefixedtwice(self, p):
"""sentenceending : clausedivider clausedivider sentence
"""
#if p[1] != '\n' or p[2][0] in ':;,. \n':
self.markov.inc_state_link(p[1], p[2])
self.markov.inc_state_link(p[2], p[3][0])
if not self.clause_starter.has_key(p[2]):
self.clause_starter[p[2]] = []
self.clause_starter[p[2]].append(p[3][0])
if p[2] in '.?! \n':
self.para_starter.append(p[3][0])
p[0] = [p[1], p[2]] + p[3]
def p_sentenceending_prefixed(self, p):
"""sentenceending : clausedivider sentence
"""
#if p[1] != '\n' or p[2][0] in ':;,. \n':
self.markov.inc_state_link(p[1], p[2][0])
if not self.clause_starter.has_key(p[1]):
self.clause_starter[p[1]] = []
self.clause_starter[p[1]].append(p[2][0])
if p[1] in '.?! \n':
self.para_starter.append(p[2][0])
p[0] = [p[1]] + p[2]
def p_sentences_divided(self, p):
"""sentence : sentence sentenceending
"""
#if p[2][0] != '\n' or p[1][-1] in '\n.':
self.markov.inc_state_link(p[1][-1], p[2][0])
p[0] = p[1] + p[2]
def p_sentences_single(self, p):
"""sentences : sentence
"""
#print "single sentence", p[1]
p[0] = p[1]
def p_clausedivider_expr(self, p):
"""clausedivider : FULLSTOP
| COLON
| SEMICOLON
| TAB
| SLASH
| COMMA
| EXCLAMATION
| QUESTIONMARK
"""
p[0] = p[1]
def p_sentence_namesorlinks(self, p):
"""sentence : sentence NAME
"""
#print "sentence names", p[1], p[2]
self.markov.inc_state_link(p[1][-1], p[2])
p[0] = p[1] + [p[2]]
#def p_hyperlink_expr1(self, p):
# """hyperlink : NAME COLON SLASH SLASH namedots
# """
# p[0] = p[1]+"://"+p[5]
# print "hyperlink", p[0]
#def p_namedots_expr(self, p):
# """namedots : NAME FULLSTOP namedots
# """
# p[0] = p[1]+"."+p[3]
#def p_namedots_name(self, p):
# """namedots : NAME
# """
# p[0] = p[1]
def p_sentence_name(self, p):
"""sentence : NAME
"""
p[0] = [p[1]]
#def p_nameorhyp_exp(self, p):
# """nameorhyp : NAME
# | hyperlink"""
# p[0] = p[1]
def p_error(self, p):
if p:
print "Syntax error at '%s'" % repr(p.value)
else:
print "Syntax error at EOF"
def check_all_letters_in(letters, word):
for w in word:
if w.lower() not in letters and w.upper() not in letters:
return False
return True
if __name__ == '__main__':
letter_priority = 0
use_words = False
words_file = "/usr/share/dict/words"
letters = map(chr, range(65+32, 65+26+32)) + ["'`"]
try:
opts, args = getopt(sys.argv[1:], "l:d:hp:",
["letters=", "dictionary=", "letter-priority=",
"help"])
except GetoptError, message:
print "%s: %s" %(sys.argv[0], message)
usage()
exit(0)
for optind, optarg in opts:
if optind in ("--letter-priority", "-p"):
letter_priority = int(optarg)
elif optind in ("--dictionary", "-d"):
use_words = True
words_file = optarg
elif optind in ("--letters", "-l"):
letters = []
for l in optarg:
letters.append(l)
words = None
if use_words:
words = {}
for w in open(words_file).readlines():
w = w.strip()
if not letters:
words[w.lower()] = 1
else:
if check_all_letters_in(letters, w):
words[w.lower()] = 1
calc = Text(word_filter=words, letter_priority=letter_priority)
print calc.run()
|
gpl-3.0
| 5,159,091,096,856,290,000
| 28.105978
| 101
| 0.469051
| false
| 3.332607
| false
| false
| false
|
RoyGunhooYoon/mcb
|
mcb.py
|
1
|
3638
|
#!/usr/bin/python3
import shelve
import sys
import pyperclip
print('\nWelcome to mcb!\n')
print('Type help to see manual\n')
while True:
args = input('>').split()
commands = ['help', 'list', 'load', 'save', 'quit', 'delete', 'show']
command = args[0]
mcb_shelve = shelve.open('mcb')
if command not in commands:
print("Unknown command, type help to see list of available commands.")
# Single command operations
else:
if command == 'quit':
print('Bye')
mcb_shelve.close()
sys.exit()
elif command == 'help':
doc = open('help.txt')
print(doc.read())
doc.close()
elif command == 'list':
if len(mcb_shelve) > 0:
for k in mcb_shelve:
print('Keyword: {} Overview: {}'.format(k, mcb_shelve[k][:30] + '...'))
else:
print("Could not find any keywords. Use save command to store clipboard into database.")
elif command == 'save':
try:
keyword = args[1]
content = pyperclip.paste()
if keyword in mcb_shelve:
ask = input("Key already exist. Do you want to override it? (y/n)")
if ask == 'y':
mcb_shelve[keyword] = pyperclip.paste()
print("Keyword override success. New content: {}"\
.format(content[:30] + '...'))
else:
print("Keyword override denied by user.")
else:
mcb_shelve[keyword] = pyperclip.paste()
print("Clipboard successfully saved with keyword\nContent: {}"\
.format(content[:30] + '...'))
except:
print("Please supply a keyword name to store clipboard content.")
elif command == 'load':
try:
keyword = args[1]
if keyword in mcb_shelve:
pyperclip.copy(mcb_shelve[keyword])
print("Content successfully copied to clipboard ctrl + v to paste.")
else:
print("Given keyword is not found. Type list to see available keywords.")
except:
print("Please supply keyword name to load stored clipboard.")
elif command == 'delete':
try:
keyword = args[1]
if keyword in mcb_shelve:
del mcb_shelve[keyword]
print("Keyword: {} and its content has been removed"\
.format(keyword))
elif keyword == '*':
ask = input("Are you sure you want to delete all keywords and its contents?(y/n)")
if ask == 'y':
for keyword in mcb_shelve:
del mcb_shelve[keyword]
print("Deleted all keywords in database.")
else:
print("Request denied by user.")
else:
print("There are no matching keyword to delete.")
except:
print("Please supply keyword name that is to be deleted.")
elif command == 'show':
try:
keyword = args[1]
if keyword in mcb_shelve:
print(mcb_shelve[keyword])
else:
print("Given keyword is not found in database.")
except:
print("Please supply keyword name.")
|
bsd-3-clause
| 1,446,098,040,609,851,400
| 38.11828
| 104
| 0.474711
| false
| 4.780552
| false
| false
| false
|
R-daneel-olivaw/CPET
|
module1/probes/Pobe.py
|
1
|
7811
|
'''
Created on Feb 25, 2015
@author: Akshat
'''
import psutil
from module1.ds.procRecord import ProcRecord
from time import sleep
import csv
import os, platform, subprocess, re
from subprocess import check_output
import copy
class ProcessProbe:
PROCNAME = None
p_map = {}
k_list = [0]
o_map = {}
def __init__(self, processName, pid=None, output_path=None, stepDelay=0.5):
if pid:
self.pid = int(pid)
else:
self.pid = None
self.PROCNAME = processName
self.stepDelay = stepDelay
self.output_path = output_path
def isMatch(self, proc, name):
return name in repr(proc)
def addToCSV(self, writer, mango):
# writer.appe(mango.getTime(), mango.getCpu(), mango.getMem())
seq = mango.toSequence()
writer.writerow(seq)
return
def getProcessNameForPid(self, pid):
p = psutil.Process(int(pid))
p_name = p.name()
return p_name
def getProcessForPid(self, pid):
p = psutil.Process(pid)
return p
def getPidForProcessName(self, procName):
for proc in psutil.process_iter():
if self.isMatch(proc, self.PROCNAME):
# print(proc)
procId = proc.pid
return procId
return 0
def getProbeTargetName(self):
return self.PROCNAME
def appendChildProcesses(self):
c_proc_id = copy.deepcopy(self.p_map)
parent_id = self.getPidForProcessName(self.PROCNAME)
# parent_id = 7832
c_proc_id[parent_id] = [parent_id]
# try:
c_process = psutil.Process(parent_id)
childs = c_process.children(recursive=True)
for chp in childs:
c_proc_id[parent_id].append(chp.pid)
c_proc_id[parent_id] = list(set(c_proc_id[parent_id]))
'''
except:
print('process ', p, 'lost')
continue
'''
self.p_map = c_proc_id
def get_process(self, p):
if p not in self.o_map:
pr = psutil.Process(p)
self.o_map[p] = pr
return self.o_map[p]
def get_processor_speed(self):
if platform.system() == "Windows":
pro_info = check_output("wmic cpu get name,CurrentClockSpeed,MaxClockSpeed", shell=True)
pro_info = str(pro_info, "utf-8")
pro_info = pro_info.splitlines()[2]
pro_info = pro_info.split(sep=None, maxsplit=1)[0].strip()
return int(pro_info)
elif platform.system() == "Darwin":
import os
os.environ['PATH'] = os.environ['PATH'] + os.pathsep + '/usr/sbin'
command = "sysctl -n machdep.cpu.brand_string"
print('os not supported')
print(subprocess.check_output(command).strip())
return 100
elif platform.system() == "Linux":
cpu_mhz = check_output("lscpu | grep MHz", shell=True)
cpu_mhz = str(cpu_mhz, 'utf-8')
f_cpu_mhz = float(cpu_mhz.split(':')[1].strip())
return f_cpu_mhz
print('os not supported')
return 100
def probe_process(self, p, rec):
try:
proc = self.get_process(p)
cpu = proc.get_cpu_percent(interval=0)
cpu_speed = self.get_processor_speed()
cpu = float("{0:.2f}".format(cpu_speed * (cpu / 100)))
mem = proc.get_memory_info()[0] / float(2 ** 20)
diskIo = proc.get_io_counters()
disk_rc = diskIo[0]
disk_wc = diskIo[1]
disk_rb = diskIo[2]
disk_wb = diskIo[3]
netc = len(proc.connections())
if not rec:
rec = ProcRecord(cpu, mem, disk_rc, disk_wc, disk_rb, disk_wb, netc, 0)
else:
rec.addCpu(cpu)
rec.addMem(mem)
rec.addReadc(disk_rc)
rec.addWritec(disk_wc)
rec.addReadb(disk_rb)
rec.addWriteb(disk_wb)
rec.addConnectionCount(netc)
rec.addChildCount(1)
print(p, 'cpu = ', cpu)
print(p, 'memory = ', mem)
print(p, 'disk_read_count = ', diskIo[0])
print(p, 'disk_write_count = ', diskIo[1])
print(p, 'disk_read_bytes = ', diskIo[2])
print(p, 'disk_write_bytes = ', diskIo[3])
print(p, 'network counters = ', netc)
print()
except:
print("process lost..")
self.k_list.append(p)
return rec
def startProbe(self):
parent_id = None
if self.pid:
self.PROCNAME = self.getProcessNameForPid(self.pid)
parent_id = self.pid
else :
parent_id = self.getPidForProcessName(self.PROCNAME)
print('STARTING PROBE FOR ', self.PROCNAME)
# parent_id = 7832
self.p_map[parent_id] = [parent_id]
# self.procId.append(self.getPidForProcessName(self.PROCNAME))
print(self.p_map)
# print(proc)
# print(proc.cpu_times())
try:
fileCsv = None
while True:
self.appendChildProcesses()
buffer = {}
for parent in self.p_map:
if psutil.pid_exists(parent) and parent != 0:
buffer[parent] = self.p_map[parent]
self.p_map = buffer
if not self.p_map:
break
for parent in self.p_map:
if self.pid:
fileCsv = open(self.output_path + self.PROCNAME + str(self.pid) + '.csv', 'a')
else:
fileCsv = open(self.output_path + self.PROCNAME + '.csv', 'a')
writer = csv.writer(fileCsv, delimiter=',', quoting=csv.QUOTE_NONE, lineterminator='\n')
if(parent not in self.k_list):
if psutil.pid_exists(parent):
p_childs = self.p_map[parent]
rec = None
for p in p_childs:
if(p not in self.k_list):
rec = self.probe_process(p, rec)
self.addToCSV(writer, rec)
else:
print('parent lost')
self.k_list.append(parent)
continue
sleep(self.stepDelay)
finally:
if fileCsv:
fileCsv.close()
print("Terminating...")
|
lgpl-3.0
| -7,484,567,214,915,481,000
| 31.238298
| 118
| 0.428882
| false
| 4.291758
| false
| false
| false
|
joaormatos/anaconda
|
mmfparser/player/extensions/kcclock.py
|
1
|
26527
|
# Copyright (c) Mathias Kaerlev 2012.
# This file is part of Anaconda.
# Anaconda is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# Anaconda is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with Anaconda. If not, see <http://www.gnu.org/licenses/>.
"""
kcclock.mfx
Date and Time object - ClickTeam (http://www.clickteam.com)
Used to display date and time in various formats. Can act as a
stopwatch or countdown device.
Ported to Python by Mathias Kaerlev
"""
from mmfparser.player.extensions.common import UserExtension, HiddenObject
from mmfparser.player.event.actions.common import Action
from mmfparser.player.event.conditions.common import Condition
from mmfparser.player.event.expressions.common import Expression
# Actions
class Action0(Action):
"""
Set hundredths of seconds
Parameters:
0: Set hundredths of seconds (EXPRESSION, ExpressionParameter)
"""
def execute(self, instance):
value = self.evaluate_index(0)
newTime = int(instance.objectPlayer.currentTime) + value / 100.0
instance.objectPlayer.currentTime = newTime
class SetTimeAction(Action):
def execute(self, instance):
new = self.evaluate_index(0)
value = instance.objectPlayer.get_struct_time()
self.action(value, new)
instance.objectPlayer.set_struct_time(value)
def action(self, value, new):
pass
class Action1(SetTimeAction):
"""
Set seconds
Parameters:
0: Set seconds (EXPRESSION, ExpressionParameter)
"""
def action(self, value, new):
value.tm_sec = new
class Action2(SetTimeAction):
"""
Set minutes
Parameters:
0: Set minutes (EXPRESSION, ExpressionParameter)
"""
def action(self, value, new):
value.tm_min = new
class Action3(SetTimeAction):
"""
Set hours
Parameters:
0: Set hours (EXPRESSION, ExpressionParameter)
"""
def action(self, value, new):
value.tm_hours = new
class Action4(SetTimeAction):
"""
Set day of week
Parameters:
0: Set day of week (EXPRESSION, ExpressionParameter)
"""
def action(self, value, new):
value.tm_wday = new
class Action5(SetTimeAction):
"""
Set day of month
Parameters:
0: Set day of month (EXPRESSION, ExpressionParameter)
"""
def action(self, value, new):
value.tm_mday = new
class Action6(SetTimeAction):
"""
Set month
Parameters:
0: Set month (EXPRESSION, ExpressionParameter)
"""
def action(self, value, new):
value.tm_mon = new
class Action7(SetTimeAction):
"""
Set year
Parameters:
0: Set year (EXPRESSION, ExpressionParameter)
"""
def action(self, value, new):
value.tm_year = new
class Action8(Action):
"""
Stop watch->Reset stop watch to 00:00:00
"""
def execute(self, instance):
instance.objectPlayer.counting = None
instance.objectPlayer.currentTime = 0
class Action9(Action):
"""
Stop watch->Start stop watch
"""
def execute(self, instance):
instance.objectPlayer.counting = 1
class Action10(Action):
"""
Stop watch->Pause stop watch
"""
def execute(self, instance):
instance.objectPlayer.counting = None
class Action11(Action):
"""
Visibility->Make object reappear
"""
def execute(self, instance):
instance.visible = True
class Action12(Action):
"""
Visibility->Make object invisible
"""
def execute(self, instance):
instance.visible = False
class Action13(Action):
"""
Position->Select position...
Parameters:
0: Select position... (POSITION, Position)
"""
def execute(self, instance):
destX, destY, _ = self.get_positions(
self.get_parameter(0))[0]
instance.set_position(destX, destY, True)
class Action14(Action):
"""
Count down->Set count down
Parameters:
0: Set count down (TIME, Time)
"""
def execute(self, instance):
instance.objectPlayer.currentTime = self.get_time(self.get_parameter(0))
class Action15(Action):
"""
Count down->Start count down
"""
def execute(self, instance):
instance.objectPlayer.counting = -1
class Action16(Action):
"""
Count down->Pause count down
"""
def execute(self, instance):
instance.objectPlayer.counting = None
class Action17(Action):
"""
Position->Set Horizontal Position
Parameters:
0: Set Horizontal Position (EXPRESSION, ExpressionParameter)
"""
def execute(self, instance):
x = self.evaluate_index(0)
instance.set_position(x, instance.y, True)
class Action18(Action):
"""
Position->Set Vertical Position
Parameters:
0: Set Vertical Position (EXPRESSION, ExpressionParameter)
"""
def execute(self, instance):
y = self.evaluate_index(0)
instance.set_position(instance.x, y, True)
class Action19(Action):
"""
Size->Set Horizontal Size
Parameters:
0: Set Horizontal Size (EXPRESSION, ExpressionParameter)
"""
def execute(self, instance):
width = self.evaluate_index(0)
instance.objectPlayer.resize(width = width)
class Action20(Action):
"""
Size->Set Vertical Size
Parameters:
0: Set Vertical Size (EXPRESSION, ExpressionParameter)
"""
def execute(self, instance):
height = self.evaluate_index(0)
instance.objectPlayer.resize(height = height)
# Conditions
class Condition0(Condition):
"""
Compare to chrono
Parameters:
0: Compare to chrono (CMPTIME, CompareTime)
"""
def created(self):
parameter = self.get_parameter(0)
self.compareValue = parameter.comparison
self.seconds = parameter.timer / 1000.0
def check(self, instance):
return self.compare(instance.objectPlayer.currentTime, self.seconds)
class Condition1(Condition):
"""
New clock second ?
"""
def created(self):
self.add_handlers(second_changed = self.changed)
def changed(self):
self.generate()
def check(self, instance):
return self.isTriggered
class Condition2(Condition):
"""
New clock minute ?
"""
def created(self):
self.add_handlers(minute_changed = self.changed)
def changed(self):
self.generate()
def check(self, instance):
return self.isTriggered
class Condition3(Condition):
"""
New clock hour ?
"""
def created(self):
self.add_handlers(hour_changed = self.changed)
def changed(self):
self.generate()
def check(self, instance):
return self.isTriggered
class Condition4(Condition):
"""
New clock day ?
"""
def created(self):
self.add_handlers(day_changed = self.changed)
def changed(self):
self.generate()
def check(self, instance):
return self.isTriggered
class Condition5(Condition):
"""
New clock month ?
"""
def created(self):
self.add_handlers(month_changed = self.changed)
def changed(self):
self.generate()
def check(self, instance):
return self.isTriggered
class Condition6(Condition):
"""
New clock year ?
"""
def created(self):
self.add_handlers(year_changed = self.changed)
def changed(self):
self.generate()
def check(self, instance):
return self.isTriggered
class Condition7(Condition):
"""
Compare to count down
Parameters:
0: Compare to count down (CMPTIME, CompareTime)
"""
def created(self):
parameter = self.get_parameter(0)
self.compareValue = parameter.comparison
self.seconds = parameter.timer / 1000.0
def check(self, instance):
return self.compare(instance.objectPlayer.currentTime, self.seconds)
class Condition8(Condition):
"""
Is visible ?
"""
def check(self, instance):
return instance.visible
# Expressions
class Expression0(Expression):
"""
Retrieve hundredths of seconds
Return type: Int
"""
def get(self, instance):
val = instance.objectPlayer.currentTime
return int((val - int(val)) * 100)
class Expression1(Expression):
"""
Retrieve seconds
Return type: Int
"""
def get(self, instance):
return instance.objectPlayer.get_struct_time().tm_second
class Expression2(Expression):
"""
Retrieve minutes
Return type: Int
"""
def get(self, instance):
return instance.objectPlayer.get_struct_time().tm_min
class Expression3(Expression):
"""
Retrieve hours
Return type: Int
"""
def get(self, instance):
return instance.objectPlayer.get_struct_time().tm_hour
class Expression4(Expression):
"""
Retrieve day of week
Return type: Int
"""
def get(self, instance):
return instance.objectPlayer.get_struct_time().tm_wday
class Expression5(Expression):
"""
Retrieve day of month
Return type: Int
"""
def get(self, instance):
return instance.objectPlayer.get_struct_time().tm_mday
class Expression6(Expression):
"""
Retrieve month
Return type: Int
"""
def get(self, instance):
return instance.objectPlayer.get_struct_time().tm_mon
class Expression7(Expression):
"""
Retrieve year
Return type: Int
"""
def get(self, instance):
return instance.objectPlayer.get_struct_time().tm_year
class Expression8(Expression):
"""
Retrieve Stop watch time
Return type: Int
"""
def get(self, instance):
return instance.objectPlayer.currentTime
class Expression9(Expression):
"""
Retrieve analog clock data->X coordinate of clock centre
Return type: Int
"""
def get(self, instance):
return instance.objectPlayer.get_center(True)[0]
class Expression10(Expression):
"""
Retrieve analog clock data->Y coordinate of clock centre
Return type: Int
"""
def get(self, instance):
return instance.objectPlayer.get_center(True)[1]
def get_hand(instance, angle, is_hour = False):
radius = instance.objectPlayer.get_radius()
if is_hour:
radius /= 1.5
mid_x, mid_y = instance.objectPlayer.get_center(True)
x_value = math.cos(angle)
y_value = -math.sin(angle)
return (mid_x + x_value * radius, mid_y + y_value * radius)
class Expression11(Expression):
"""
Retrieve analog clock data->X coordinate of hour hand's end
Return type: Int
"""
def get(self, instance):
second, minute, hour = instance.objectPlayer.get_time()
angle = get_hour_angle(hour + minute / 60.0)
return get_hand(instance, angle)[0]
class Expression12(Expression):
"""
Retrieve analog clock data->Y coordinate of hour hand's end
Return type: Int
"""
def get(self, instance):
second, minute, hour = instance.objectPlayer.get_time()
angle = get_hour_angle(hour + minute / 60.0)
return get_hand(instance, angle)[1]
class Expression13(Expression):
"""
Retrieve analog clock data->X coordinate of minute hand's end
Return type: Int
"""
def get(self, instance):
second, minute, hour = instance.objectPlayer.get_time()
angle = get_second_minute_angle(minute + second / 60.0)
return get_hand(instance, angle)[0]
class Expression14(Expression):
"""
Retrieve analog clock data->Y coordinate of minute hand's end
Return type: Int
"""
def get(self, instance):
second, minute, hour = instance.objectPlayer.get_time()
angle = get_second_minute_angle(minute + second / 60.0)
return get_hand(instance, angle)[1]
class Expression15(Expression):
"""
Retrieve analog clock data->X coordinate of second hand's end
Return type: Int
"""
def get(self, instance):
second, minute, hour = instance.objectPlayer.get_time()
angle = get_second_minute_angle(second)
return get_hand(instance, angle)[0]
class Expression16(Expression):
"""
Retrieve analog clock data->Y coordinate of second hand's end
Return type: Int
"""
def get(self, instance):
second, minute, hour = instance.objectPlayer.get_time()
angle = get_second_minute_angle(second)
return get_hand(instance, angle)[1]
class Expression17(Expression):
"""
Retrieve Count down time
Return type: Int
"""
def get(self, instance):
return instance.objectPlayer.currentTime
class Expression18(Expression):
"""
X Position of Clock
Return type: Int
"""
def get(self, instance):
return instance.x
class Expression19(Expression):
"""
Y Position of Clock
Return type: Int
"""
def get(self, instance):
return instance.y
class Expression20(Expression):
"""
X Size of Clock
Return type: Int
"""
def get(self, instance):
return instance.objectPlayer.width
class Expression21(Expression):
"""
Y Size of Clock
Return type: Int
"""
def get(self, instance):
return instance.objectPlayer.height
from mmfparser.data.font import LogFont
ANALOG_CLOCK = 0
DIGITAL_CLOCK = 1
INVISIBLE = 2
CALENDAR = 3
CLOCK = 0
STOPWATCH = 1
COUNTDOWN = 2
SHORTDATE = 0
LONGDATE = 1
FIXEDDATE = 2
import datetime
import calendar
import time
import math
from pyglet.gl import (glTranslatef, glPushMatrix, glPopMatrix, glBegin,
glEnd, glVertex2f, glColor3ub, GL_LINES, glLineWidth, glEnable,
glDisable, GL_LINE_SMOOTH, GL_POINT_SMOOTH, GL_LINE_LOOP)
from pyglet.graphics import vertex_list
from mmfparser.player.common import make_ellipse_vertices
def get_pointer_angle(value):
return math.radians(360.0 * value)
def get_mark_angle(i):
return get_pointer_angle((i + 1) / 12.0)
def get_hour_angle(i):
return -get_pointer_angle(i / 12.0 - 0.25)
def get_second_minute_angle(i):
return -get_pointer_angle(i / 60.0 - 0.25)
roman_characters = [ "I", "II", "III", "IV", "V", "VI", "VII", "VIII", "IX",
"X", "XI", "XII"]
def display_pointer(angle, color, radius):
x_value = math.cos(angle)
y_value = math.sin(angle)
glBegin(GL_LINES)
glColor3ub(*color)
glVertex2f(0.0, 0.0)
glVertex2f(x_value * radius, y_value * radius)
glEnd()
class DefaultObject(HiddenObject):
clockLabels = None
border = None
formatString = None
label = None
text = None
counting = None
currentTime = 0
oldTime = None
def created(self, data):
self.width = data.readShort(True)
self.height = data.readShort(True)
data.skipBytes(4 * 16)
displayType = self.displayType = data.readShort(True)
self.clockMode = data.readShort(True)
self.drawBorder = data.readShort() != 0
self.drawLines = data.readShort() != 0
analogClockMarkerType = data.readShort(True)
font = LogFont(data)
if font.height in (8, -8) and font.faceName.lower() == 'system':
font.height = -13
font.weight = 700
color = self.color = data.readColor()
data.skipBytes(40)
self.displaySeconds = data.readShort(True) != 0
self.secondsColor = data.readColor()
self.displayMinutes = data.readShort(True) != 0
self.minutesColor = data.readColor()
self.displayHours = data.readShort(True) != 0
self.hoursColor = data.readColor()
digitalClockType = data.readShort(True)
calenderType = data.readShort(True)
calenderFormat = data.readShort(True)
data.skipBytes(40)
if self.clockMode == COUNTDOWN:
countHours = data.readShort(True)
countMinutes = data.readShort(True)
countSeconds = data.readShort(True)
self.currentTime = (countSeconds + countMinutes * 60 +
countHours * 60 * 60)
elif self.clockMode == CLOCK:
self.currentTime = time.time()
minWidth = data.readShort(True)
minHeight = data.readShort(True)
if displayType == ANALOG_CLOCK:
if analogClockMarkerType != 2:
self.clockLabels = []
for i in xrange(1, 13):
if analogClockMarkerType == 0:
text = str(i)
else:
text = roman_characters[i-1]
label = self.create_label(font, text, color)
label.width = label.content_width
label.height = label.content_height
label.y = label.content_height / 2
label.x = -label.content_width / 2
self.clockLabels.append(label)
self.make_border()
elif displayType == DIGITAL_CLOCK:
if digitalClockType == 0:
formatString = '%(hour)s:%(minute)s'
elif digitalClockType == 1:
formatString = '%(hour)s:%(minute)s:%(second)s'
elif digitalClockType == 2:
formatString = '%(full_hour)s:%(minute)s'
elif digitalClockType == 3:
formatString = '%(full_hour)s:%(minute)s:%(second)s'
self.formatString = formatString
elif displayType == CALENDAR:
if calenderType == SHORTDATE:
formatString = '%d-%m-%Y'
elif calenderType == LONGDATE:
formatString = '%d. %B %Y'
else:
if calenderFormat == 0:
formatString = '%d/%m/%y'
elif calenderFormat == 1:
formatString = '%d %B %Y'
elif calenderFormat == 2:
formatString = '%d %B, %Y'
elif calenderFormat == 3:
formatString = '%B %d, %Y'
elif calenderFormat == 4:
formatString = '%d-%b-%y'
elif calenderFormat == 5:
formatString = '%B, %y'
elif calenderFormat == 6:
formatString = '%b-%Y'
self.formatString = formatString
if displayType in (DIGITAL_CLOCK, CALENDAR):
label = self.label = self.create_label(font, '', color,
multiline = True)
label.height = self.height
label.width = self.width
label.content_valign = 'center'
label.set_style('align', 'center')
label.x = label.y = 0
glEnable(GL_LINE_SMOOTH)
glEnable(GL_POINT_SMOOTH)
self.updateEnabled = True
def update(self):
if (self.counting is not None or self.clockMode == CLOCK or
self.displayType == CALENDAR):
self.currentTime += self.player.sinceLast * (self.counting or 1)
self.currentTime = max(0, self.currentTime)
val = self.get_struct_time()
old_val = self.oldTime
if old_val is not None:
if val.tm_sec != old_val.tm_sec:
self.fire_handler('second_changed')
if val.tm_hour != old_val.tm_hour:
self.fire_handler('hour_changed')
if val.tm_yday != old_val.tm_yday:
self.fire_handler('day_changed')
if val.tm_mon != old_val.tm_mon:
self.fire_handler('month_changed')
if val.tm_year != old_val.tm_year:
self.fire_handler('year_changed')
self.oldTime = val
def make_border(self):
if self.drawBorder:
if self.border is not None:
self.border.delete()
radius = min(self.width, self.height) / 2.0 - 20
vertices = []
for item in make_ellipse_vertices(radius * 2, radius * 2):
vertices += item
self.border = vertex_list(len(vertices) / 2,
('v2f', vertices),
('c3B', self.color * (len(vertices) / 2)))
def get_center(self, not_gl = False):
mid_x = self.width / 2.0
mid_y = self.height / 2.0
if not_gl:
return (self.parent.x + mid_x, self.parent.y + mid_y)
else:
return (self.x + mid_x, self.y - mid_y)
def get_radius(self):
return min(self.width, self.height) / 2.0 - 20
def draw(self):
mid_x, mid_y = self.get_center()
glLineWidth(2)
if self.displayType == ANALOG_CLOCK:
radius = min(self.width, self.height) / 2.0 - 10
radius_end = radius - 10
glPushMatrix()
glTranslatef(mid_x, mid_y, 0)
if self.clockLabels is not None or self.drawLines:
for i in xrange(0, 12):
glPushMatrix()
angle = get_mark_angle(i + 1)
x_value = math.cos(angle)
y_value = math.sin(angle)
if self.drawLines:
glBegin(GL_LINES)
glColor3ub(*self.color)
glVertex2f(x_value * radius_end, y_value * radius_end)
glVertex2f(x_value * (radius - 20),
y_value * (radius - 20))
glEnd()
if self.clockLabels is not None:
x = x_value * radius
y = y_value * radius
glTranslatef(x, y, 0)
self.clockLabels[-i].draw()
glPopMatrix()
# second pointer
second, minute, hour = self.get_time()
if self.displaySeconds:
display_pointer(get_second_minute_angle(
second),
self.secondsColor, radius_end)
if self.displayHours:
display_pointer(get_hour_angle(hour + minute / 60.0),
self.hoursColor, radius_end / 1.5)
if self.displayMinutes:
display_pointer(get_second_minute_angle(
minute + second / 60.0), self.minutesColor,
radius_end)
glPopMatrix()
if self.border is not None:
glPushMatrix()
glTranslatef(self.x + 20, self.y - 20, 0.0)
self.border.draw(GL_LINE_LOOP)
glPopMatrix()
elif self.displayType in (DIGITAL_CLOCK, CALENDAR):
text = self.get_text()
if text != self.text:
self.label.text = text
self.text = text
glPushMatrix()
glTranslatef(self.x, self.y, 0)
self.label.draw()
if self.displayType == DIGITAL_CLOCK and self.drawBorder:
glBegin(GL_LINE_LOOP)
glColor3ub(*self.color)
glVertex2f(0, 0)
glVertex2f(self.width, 0)
glVertex2f(self.width, -self.height)
glVertex2f(0, -self.height)
glEnd()
glPopMatrix()
def get_text(self):
if self.displayType == CALENDAR:
return time.strftime(self.formatString,
self.get_struct_time())
else:
second, minute, full_hour = self.get_time(False)
hour = full_hour % 12
return self.formatString % {
'second' : '%02d' % second,
'minute' : '%02d' % minute,
'full_hour' : '%02d' % full_hour,
'hour' : '%02d' % hour
}
def set_struct_time(self, value):
if self.displayType != CALENDAR and self.clockMode != CLOCK:
self.currentTime = calender.timegm(value)
else:
self.currentTime = time.mktime(value)
def get_struct_time(self):
if self.displayType != CALENDAR and self.clockMode != CLOCK:
return time.gmtime(self.currentTime)
else:
return time.localtime(self.currentTime)
def get_time(self, micro_precision = True):
val = self.get_struct_time()
second = val.tm_sec + self.currentTime - int(self.currentTime)
return (second, val.tm_min, val.tm_hour)
def resize(self, width = None, height = None):
self.width = width or self.width
self.height = height or self.height
self.make_border()
class kcclock(UserExtension):
objectPlayer = DefaultObject
actions = {
0 : Action0,
1 : Action1,
2 : Action2,
3 : Action3,
4 : Action4,
5 : Action5,
6 : Action6,
7 : Action7,
8 : Action8,
9 : Action9,
10 : Action10,
11 : Action11,
12 : Action12,
13 : Action13,
14 : Action14,
15 : Action15,
16 : Action16,
17 : Action17,
18 : Action18,
19 : Action19,
20 : Action20,
}
conditions = {
0 : Condition0,
1 : Condition1,
2 : Condition2,
3 : Condition3,
4 : Condition4,
5 : Condition5,
6 : Condition6,
7 : Condition7,
8 : Condition8,
}
expressions = {
0 : Expression0,
1 : Expression1,
2 : Expression2,
3 : Expression3,
4 : Expression4,
5 : Expression5,
6 : Expression6,
7 : Expression7,
8 : Expression8,
9 : Expression9,
10 : Expression10,
11 : Expression11,
12 : Expression12,
13 : Expression13,
14 : Expression14,
15 : Expression15,
16 : Expression16,
17 : Expression17,
18 : Expression18,
19 : Expression19,
20 : Expression20,
21 : Expression21,
}
extension = kcclock()
def get_extension():
return extension
|
gpl-3.0
| 847,755,223,363,182,600
| 25.876393
| 80
| 0.580541
| false
| 3.927598
| false
| false
| false
|
aarontuor/cpp
|
safekit/graph_training_utils.py
|
1
|
5196
|
"""
Utilities for training the parameters of tensorflow computational graphs.
"""
import tensorflow as tf
import sys
import math
OPTIMIZERS = {'grad': tf.train.GradientDescentOptimizer, 'adam': tf.train.AdamOptimizer}
class EarlyStop:
"""
A class for determining when to stop a training while loop by a bad count criterion.
If the data is exhausted or the model's performance hasn't improved for *badlimit* training
steps, the __call__ function returns false. Otherwise it returns true.
"""
def __init__(self, badlimit=20):
"""
:param badlimit: Limit of for number of training steps without improvement for early stopping.
"""
self.badlimit = badlimit
self.badcount = 0
self.current_loss = sys.float_info.max
def __call__(self, mat, loss):
"""
Returns a boolean for customizable stopping criterion.
For first loop iteration set loss to sys.float_info.max.
:param mat: Current batch of features for training.
:param loss: Current loss during training.
:return: boolean, True when mat is not None and self.badcount < self.badlimit and loss != inf, nan.
"""
if mat is None:
sys.stderr.write('Done Training. End of data stream.')
cond = False
elif math.isnan(loss) or math.isinf(loss):
sys.stderr.write('Exiting due divergence: %s\n\n' % loss)
cond = False
elif loss > self.current_loss:
self.badcount += 1
if self.badcount >= self.badlimit:
sys.stderr.write('Exiting. Exceeded max bad count.')
cond = False
else:
cond = True
else:
self.badcount = 0
cond = True
self.current_loss = loss
return cond
class ModelRunner:
"""
A class for gradient descent training tensorflow models.
"""
def __init__(self, loss, ph_dict, learnrate=0.01, opt='adam', debug=False):
"""
:param loss: The objective function for optimization strategy.
:param ph_dict: A dictionary of names (str) to tensorflow placeholders.
:param learnrate: The step size for gradient descent.
:param opt: A tensorflow op implementing the gradient descent optimization strategy.
:param debug: Whether or not to print debugging info.
"""
self.loss = loss
self.ph_dict = ph_dict
self.debug = debug
self.train_op = OPTIMIZERS[opt](learnrate).minimize(loss)
self.init = tf.initialize_all_variables()
self.sess = tf.Session()
self.sess.run(self.init)
def train_step(self, datadict):
"""
Performs a training step of gradient descent with given optimization strategy.
:param datadict: A dictionary of names (str) matching names in ph_dict to numpy matrices for this mini-batch.
"""
self.sess.run(self.train_op, feed_dict=get_feed_dict(datadict, self.ph_dict, debug=self.debug))
def eval(self, datadict, eval_tensors):
"""
Evaluates tensors without effecting parameters of model.
:param datadict: A dictionary of names (str) matching names in ph_dict to numpy matrices for this mini-batch.
:param eval_tensors: Tensors from computational graph to evaluate as numpy matrices.
:return: A list of evaluated tensors as numpy matrices.
"""
return self.sess.run(eval_tensors, feed_dict=get_feed_dict(datadict, self.ph_dict, train=0, debug=self.debug))
def get_feed_dict(datadict, ph_dict, train=1, debug=False):
"""
Function for pairing placeholders of a tensorflow computational graph with numpy arrays.
:param datadict: A dictionary with keys matching keys in ph_dict, and values are numpy arrays.
:param ph_dict: A dictionary where the keys match keys in datadict and values are placeholder tensors.
:param train: {1,0}. Different values get fed to placeholders for dropout probability, and batch norm statistics
depending on if model is training or evaluating.
:param debug: (boolean) Whether or not to print dimensions of contents of placeholderdict, and datadict.
:return: A feed dictionary with keys of placeholder tensors and values of numpy matrices.
"""
fd = {ph_dict[key]: datadict[key] for key in ph_dict}
dropouts = tf.get_collection('dropout_prob')
bn_deciders = tf.get_collection('bn_deciders')
if dropouts:
for prob in dropouts:
if train == 1:
fd[prob[0]] = prob[1]
else:
fd[prob[0]] = 1.0
if bn_deciders:
fd.update({decider: [train] for decider in bn_deciders})
if debug:
for desc in ph_dict:
print('%s\n\tph: %s\t%s\tdt: %s\t%s' % (desc,
ph_dict[desc].get_shape().as_list(),
ph_dict[desc].dtype,
datadict[desc].shape,
datadict[desc].dtype))
print(fd.keys())
return fd
|
mit
| 1,544,231,019,683,145,700
| 39.913386
| 118
| 0.613934
| false
| 4.210697
| false
| false
| false
|
ThomasMcVay/MediaApp
|
AppCoreX/__init__.py
|
1
|
1275
|
#===============================================================================
# @Author: Madison Aster
# @ModuleDescription:
# @License:
# MediaApp Library - Python Package framework for developing robust Media
# Applications with Qt Library
# Copyright (C) 2013 Madison Aster
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License version 2.1 as published by the Free Software Foundation;
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
#
# See LICENSE in the root directory of this library for copy of
# GNU Lesser General Public License and other license details.
#===============================================================================
from .Core import *
|
lgpl-2.1
| 5,790,089,494,029,108,000
| 47.115385
| 83
| 0.614902
| false
| 4.442509
| false
| false
| false
|
xiaxia47/Python-learning
|
spiders/downloadpic.py
|
1
|
1376
|
import os
from urllib.request import urlretrieve
from urllib.request import urlopen
from bs4 import BeautifulSoup
def getAbsoluteUrl(baseUrl, source):
if source.startswith("http://www."):
url= "http://" + source[11:]
elif source.startswith("http://"):
url = source
elif source.startswith("www."):
url = source[4:]
url = "http://" + source
else:
url = baseUrl + "/" + source
if baseUrl not in url:
return None
return url
def getDownloadPath(baseUrl, absoluteUrl, downloadDirectory):
path = absoluteUrl.replace("www.", "")
path = path.replace(baseUrl, "")
path = downloadDirectory + path
directory = os.path.dirname(path)
if not os.path.exists(directory):
os.makedirs(directory)
return path
downloadDirectory = 'D:/Python Learning/download/'
baseUrl = "http://pythonscraping.com"
html = urlopen("http://www.pythonscraping.com")
bsObj = BeautifulSoup(html,"html.parser")
#imageLocation = bsObj.find("a", {"id": "logo"}).find("img")["src"]
#urlretrieve(imageLocation, "logo.jpg")
downloadList = bsObj.findAll(src=True)
for download in downloadList:
fileUrl = getAbsoluteUrl(baseUrl, download["src"])
if fileUrl is not None:
print(fileUrl)
#urlretrieve(fileUrl, getDownloadPath(baseUrl, fileUrl, downloadDirectory))
|
gpl-3.0
| 7,759,273,243,587,410,000
| 31.560976
| 75
| 0.65625
| false
| 3.708895
| false
| false
| false
|
rshk/python-pcapng
|
pcapng/flags.py
|
1
|
5513
|
"""
Module to wrap an integer in bitwise flag/field accessors.
"""
from collections import OrderedDict
from collections.abc import Iterable
from pcapng._compat import namedtuple
class FlagBase(object):
"""\
Base class for flag types to be used in a Flags object.
Handles the bitwise math so subclasses don't have to worry about it.
"""
__slots__ = [
"owner",
"offset",
"size",
"extra",
"mask",
]
def __init__(self, owner, offset, size, extra=None):
if size < 1:
raise TypeError("Flag must be at least 1 bit wide")
if size > owner._nbits:
raise TypeError("Flag must fit into owner size")
self.owner = owner
self.offset = offset
self.size = size
self.extra = extra
self.mask = ((1 << self.size) - 1) << self.offset
def get_bits(self):
return (self.owner._value & self.mask) >> self.offset
def set_bits(self, val):
val &= (1 << self.size) - 1
self.owner._value &= ~self.mask
self.owner._value |= val << self.offset
class FlagBool(FlagBase):
"""Object representing a single boolean flag"""
def __init__(self, owner, offset, size, extra=None):
if size != 1:
raise TypeError(
"{cls} can only be 1 bit in size".format(cls=self.__class__.__name__)
)
super(FlagBool, self).__init__(owner, offset, size)
def get(self):
return bool(self.get_bits())
def set(self, val):
self.set_bits(int(bool(val)))
class FlagUInt(FlagBase):
"""\
Object representing an unsigned integer of the given size stored in
a larger bitfield
"""
def get(self):
return self.get_bits()
def set(self, val):
self.set_bits(val)
class FlagEnum(FlagBase):
"""\
Object representing a range of values stored in part of a larger
bitfield
"""
def __init__(self, owner, offset, size, extra=None):
if not isinstance(extra, Iterable):
raise TypeError(
"{cls} needs an iterable of values".format(cls=self.__class__.__name__)
)
extra = list(extra)
if len(extra) > 2 ** size:
raise TypeError(
"{cls} iterable has too many values (got {got}, "
"{size} bits only address {max})".format(
cls=self.__class__.__name__,
got=len(extra),
size=size,
max=2 ** size,
)
)
super(FlagEnum, self).__init__(owner, offset, size, extra)
def get(self):
val = self.get_bits()
try:
return self.extra[val]
except IndexError:
return "[invalid value]"
def set(self, val):
if val in self.extra:
self.set_bits(self.extra.index(val))
elif isinstance(val, int):
self.set_bits(val)
else:
raise TypeError(
"Invalid value {val} for {cls}".format(
val=val, cls=self.__class__.__name__
)
)
# Class representing a single flag schema for FlagWord.
# 'nbits' defaults to 1, and 'extra' defaults to None.
FlagField = namedtuple(
"FlagField", ("name", "ftype", "nbits", "extra"), defaults=(1, None)
)
class FlagWord(object):
"""\
Class to wrap an integer in bitwise flag/field accessors.
"""
__slots__ = [
"_nbits",
"_value",
"_schema",
]
def __init__(self, schema, nbits=32, initial=0):
"""
:param schema:
A list of FlagField objects representing the values to be packed
into this object, in order from LSB to MSB of the underlying int
:param nbits:
An integer representing the total number of bits used for flags
:param initial:
The initial integer value of the flags field
"""
self._nbits = nbits
self._value = initial
self._schema = OrderedDict()
tot_bits = sum([item.nbits for item in schema])
if tot_bits > nbits:
raise TypeError(
"Too many fields for {nbits}-bit field "
"(schema defines {tot} bits)".format(nbits=nbits, tot=tot_bits)
)
bitn = 0
for item in schema:
if not isinstance(item, FlagField):
raise TypeError("Schema must be composed of FlagField objects")
if not issubclass(item.ftype, FlagBase):
raise TypeError("Expected FlagBase, got {}".format(item.ftype))
self._schema[item.name] = item.ftype(self, bitn, item.nbits, item.extra)
bitn += item.nbits
def __int__(self):
return self._value
def __repr__(self):
rv = "<{0} (value={1})".format(self.__class__.__name__, self._value)
for k, v in self._schema.items():
rv += " {0}={1}".format(k, v.get())
return rv + ">"
def __getattr__(self, name):
try:
v = self._schema[name]
except KeyError:
raise AttributeError(name)
return v.get()
def __setattr__(self, name, val):
try:
return object.__setattr__(self, name, val)
except AttributeError:
pass
try:
v = self._schema[name]
except KeyError:
raise AttributeError(name)
return v.set(val)
|
apache-2.0
| 8,224,031,001,209,343,000
| 27.127551
| 87
| 0.534736
| false
| 4.170197
| false
| false
| false
|
roaet/quark
|
quark/db/migration/alembic/versions/79b768afed65_rename_tenant_id_indexes.py
|
1
|
6050
|
"""rename tenant id indexes
Revision ID: 79b768afed65
Revises: 271cce54e15b
Create Date: 2015-05-20 21:39:19.348638
"""
# revision identifiers, used by Alembic.
revision = '79b768afed65'
down_revision = '271cce54e15b'
from alembic import op
import sqlalchemy as sa
from neutron.api.v2 import attributes as attr
_INSPECTOR = None
def get_inspector():
"""Reuse inspector"""
global _INSPECTOR
if _INSPECTOR:
return _INSPECTOR
else:
bind = op.get_bind()
_INSPECTOR = sa.engine.reflection.Inspector.from_engine(bind)
return _INSPECTOR
def get_tables():
tables = [
'quark_tags',
'quark_routes',
'quark_dns_nameservers',
'quark_security_group_rules',
'quark_security_groups',
'quark_ports',
'quark_mac_addresses',
'quark_ip_policy',
'quark_subnets',
'quark_networks',
'quark_async_transactions',
'quotas',
'address_scopes',
'floatingips',
'meteringlabels',
'networkrbacs',
'networks',
'ports',
'qos_policies',
'qospolicyrbacs',
'reservations',
'routers',
'securitygrouprules',
'securitygroups',
'subnetpools',
'subnets',
'trunks',
'auto_allocated_topologies',
'default_security_group',
'ha_router_networks',
'quotausages',
'vips',
'members',
'pools',
'healthmonitors',
'lbaas_members',
'lbaas_healthmonitors',
'lbaas_loadbalancers',
'lbaas_pools',
'lbaas_l7rules',
'lbaas_l7policies',
'lbaas_listeners',
]
return tables
def get_columns(table):
"""Returns list of columns for given table."""
inspector = get_inspector()
return inspector.get_columns(table)
def get_data():
"""Returns combined list of tuples: [(table, column)].
List is built, based on retrieved tables, where column with name
``tenant_id`` exists.
"""
output = []
tables = get_tables()
for table in tables:
try:
columns = get_columns(table)
except sa.exc.NoSuchTableError:
continue
for column in columns:
if column['name'] == 'tenant_id':
output.append((table, column))
return output
def alter_column(table, column):
old_name = 'tenant_id'
new_name = 'project_id'
coltype = sa.String(attr.TENANT_ID_MAX_LEN)
op.alter_column(
table_name=table,
column_name=old_name,
new_column_name=new_name,
type_=coltype,
existing_nullable=column['nullable']
)
def recreate_index(index, table_name):
old_name = index['name']
new_name = old_name.replace('tenant', 'project')
op.drop_index(op.f(old_name), table_name)
op.create_index(new_name, table_name, ['project_id'])
def upgrade():
data = get_data()
for table, column in data:
alter_column(table, column)
op.drop_index(op.f('ix_quark_networks_tenant_id'),
table_name='quark_networks')
op.drop_index(op.f('ix_quark_networks_tenant_id_name'),
table_name='quark_networks')
op.drop_index(op.f('ix_quark_subnets_tenant_id'),
table_name='quark_subnets')
op.drop_index(op.f('ix_quark_subnets_network_id_tenant_id'),
table_name='quark_subnets')
op.drop_index(op.f('ix_quark_ports_tenant_id'),
table_name='quark_ports')
op.drop_index(op.f('ix_quark_ports_network_id_tenant_id'),
table_name='quark_ports')
op.drop_index(op.f('ix_quark_ports_name_tenant_id'),
table_name='quark_ports')
op.drop_index(op.f('ix_quotas_tenant_id'),
table_name='quotas')
op.create_index(op.f('ix_quark_networks_project_id'),
'quark_networks',
['project_id'],
unique=False)
op.create_index(op.f('ix_quark_networks_project_id_name'),
'quark_networks',
['project_id', 'name'],
unique=False)
op.create_index(op.f('ix_quark_subnets_project_id'),
'quark_subnets',
['project_id'],
unique=False)
op.create_index(op.f('ix_quark_subnets_network_id_project_id'),
'quark_subnets',
['network_id', 'project_id'],
unique=False)
op.create_index(op.f('ix_quark_ports_project_id'),
'quark_ports',
['project_id'],
unique=False)
op.create_index(op.f('ix_quark_ports_network_id_project_id'),
'quark_ports',
['network_id', 'project_id'],
unique=False)
op.create_index(op.f('ix_quark_ports_name_project_id'),
'quark_ports',
['name', 'project_id'],
unique=False)
op.create_index(op.f('ix_quotas_project_id'),
'quotas',
['project_id'],
unique=False)
def downgrade():
op.drop_index(op.f('ix_quark_networks_project_id'),
table_name='quark_networks')
op.drop_index(op.f('ix_quark_networks_project_id_name'),
table_name='quark_networks')
op.drop_index(op.f('ix_quark_subnets_project_id'),
table_name='quark_subnets')
op.drop_index(op.f('ix_quark_subnets_network_id_project_id'),
table_name='quark_subnets')
op.drop_index(op.f('ix_quark_ports_project_id'),
table_name='ports')
op.drop_index(op.f('ix_quark_ports_network_id_project_id'),
table_name='quark_ports')
op.drop_index(op.f('ix_quark_ports_name_project_id'),
table_name='quark_ports')
op.drop_index(op.f('ix_quotas_project_id'),
table_name='quotas')
|
apache-2.0
| -3,824,265,833,923,501,600
| 27.403756
| 69
| 0.546612
| false
| 3.519488
| false
| false
| false
|
pykiki/PyKI
|
tests/test_check_key_cert.py
|
1
|
2012
|
#!/usr/bin/env python3
# -*- coding: UTF-8 -*-
from OpenSSL import crypto, SSL
from os import path
'''
PyKI - PKI openssl for managing TLS certificates
Copyright (C) 2016 MAIBACH ALAIN
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
Contact: alain.maibach@gmail.com / 34 rue appienne 13480 - FRANCE.
'''
certPath = "/Users/albookpro/Downloads/pyTLSpki/building/pki/CERTS/clients/newcsr/newcsr.crt"
keyPath = "/Users/albookpro/Downloads/pyTLSpki/building/pki/CERTS/clients/newcsr/newcsr.key"
def check_cer_vs_key(cert, key, keypass=False):
if not path.exists(cert):
print("Error, unable to find " + cert + "\n")
exit(1)
elif not path.exists(key):
print("Error, unable to find " + key + "\n")
exit(1)
if not keypass:
keyObj = crypto.load_privatekey(crypto.FILETYPE_PEM, open(key).read())
else:
keyObj = crypto.load_privatekey(
crypto.FILETYPE_PEM, open(key).read(), keypass)
certObj = crypto.load_certificate(crypto.FILETYPE_PEM, open(cert).read())
ctx = SSL.Context(SSL.TLSv1_METHOD)
ctx.use_privatekey(keyObj)
ctx.use_certificate(certObj)
try:
ctx.check_privatekey()
except SSL.Error:
print("Incorrect key.\n")
else:
print("Key matches certificate.\n")
# interactive mode
#check_cer_vs_key(certPath, keyPath)
check_cer_vs_key(certPath, keyPath, b'azerty')
|
gpl-3.0
| -7,313,299,838,045,761,000
| 32.533333
| 93
| 0.687873
| false
| 3.505226
| false
| false
| false
|
cherbib/fofix
|
src/GuitarScene.py
|
1
|
323592
|
#####################################################################
# -*- coding: iso-8859-1 -*- #
# #
# Frets on Fire #
# Copyright (C) 2006 Sami Kyöstil? #
# 2008 Alarian #
# 2008 myfingershurt #
# 2008 Capo #
# 2008 Spikehead777 #
# 2008 Glorandwarf #
# 2008 ShiekOdaSandz #
# 2008 QQStarS #
# 2008 .liquid. #
# 2008 Blazingamer #
# 2008 evilynux <evilynux@gmail.com> #
# #
# This program is free software; you can redistribute it and/or #
# modify it under the terms of the GNU General Public License #
# as published by the Free Software Foundation; either version 2 #
# of the License, or (at your option) any later version. #
# #
# This program is distributed in the hope that it will be useful, #
# but WITHOUT ANY WARRANTY; without even the implied warranty of #
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
# GNU General Public License for more details. #
# #
# You should have received a copy of the GNU General Public License #
# along with this program; if not, write to the Free Software #
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, #
# MA 02110-1301, USA. #
#####################################################################
from Scene import Scene, SuppressScene
from Song import Note, TextEvent, PictureEvent, loadSong, Bars, VocalPhrase
from Menu import Menu
from Language import _
import Player
from Player import STAR, KILL, CANCEL, KEY1A
import Dialogs
import Audio
import Stage
import Settings
import Song
from Scorekeeper import ScoreCard
from Shader import shaders
import random
import os
import Log
import locale
from OpenGL.GL import *
class GuitarScene(Scene):
def __init__(self, engine, libraryName, songName):
Scene.__init__(self, engine)
if self.engine.world.sceneName == "GuitarScene": #MFH - dual / triple loading cycle fix
Log.warn("Extra GuitarScene was instantiated, but detected and shut down. Cause unknown.")
raise SuppressScene #stump
else:
self.engine.createdGuitarScene = True
self.engine.world.sceneName = "GuitarScene"
self.playerList = self.players
self.partyMode = False
self.battle = False #QQstarS:new2 Bettle
self.battleGH = False #Qstick
self.coOp = False
self.coOpRB = False #akedrou
self.coOpGH = False
self.coOpType = False
self.practiceMode = False
self.bossBattle = False
self.ready = False
Log.debug("GuitarScene init...")
self.coOpPlayerMeter = 0
#MFH - testing new traceback logging:
#raise TypeError
#myfingershurt: new loading place for "loading" screen for song preparation:
#blazingamer new loading phrases
self.sinfo = Song.loadSongInfo(self.engine, songName, library = libraryName)
phrase = self.sinfo.loadingPhrase
if phrase == "":
phrase = random.choice(self.engine.theme.loadingPhrase)
if phrase == "None":
i = random.randint(0,4)
if i == 0:
phrase = _("Let's get this show on the Road")
elif i == 1:
phrase = _("Impress the Crowd")
elif i == 2:
phrase = _("Don't forget to strum!")
elif i == 3:
phrase = _("Rock the house!")
else:
phrase = _("Jurgen is watching")
splash = Dialogs.showLoadingSplashScreen(self.engine, phrase + " \n " + _("Initializing..."))
Dialogs.changeLoadingSplashScreenText(self.engine, splash, phrase + " \n " + _("Initializing..."))
self.countdownSeconds = 3 #MFH - don't change this initialization value unless you alter the other related variables to match
self.countdown = 100 #MFH - arbitrary value to prevent song from starting right away
self.countdownOK = False
#MFH - retrieve game parameters:
self.gamePlayers = len(self.engine.world.players)
self.gameMode1p = self.engine.world.gameMode
self.gameMode2p = self.engine.world.multiMode
self.lostFocusPause = self.engine.config.get("game", "lost_focus_pause")
if self.sinfo.bossBattle == "True" and self.gameMode1p == 2 and self.gamePlayers == 1:
self.bossBattle = True
self.engine.world.multiMode = 6
self.gameMode2p = 6
self.gamePlayers = 2
if self.gameMode1p == 2:
self.careerMode = True
else:
self.careerMode = False
#MFH - check for party mode
if self.gameMode2p == 2:
self.partyMode = True
self.gamePlayers = 1
self.partySwitch = 0
self.partyTime = self.engine.config.get("game", "party_time")
self.partyPlayer = 0
elif self.gamePlayers > 1:
#MFH - check for battle mode
if self.gameMode2p == 1:
self.battle = True
self.battleGH = False
self.coOp = False
self.coOpRB = False
self.coOpGH = False
self.coOpType = False
elif self.gameMode2p == 3:
self.battle = False
self.battleGH = False
self.coOp = True
self.coOpRB = False
self.coOpGH = False
self.coOpType = True
elif self.gameMode2p == 4:
self.battle = False
self.battleGH = False
self.coOp = False
self.coOpRB = True
self.coOpGH = False
self.coOpType = True
elif self.gameMode2p == 5:
self.battle = False
self.battleGH = False
self.coOp = False
self.coOpRB = False
self.coOpGH = True
self.coOpType = True
elif self.gameMode2p == 6:
self.battle = False
self.battleGH = True
self.coOp = False
self.coOpRB = False
self.coOpGH = False
self.coOpType = False
else:
self.battle = False
self.coOp = False
self.coOpRB = False
self.coOpGH = False
self.coOpType = False
self.splayers = self.gamePlayers #Spikehead777
#myfingershurt: drums :)
self.instruments = [] # akedrou - this combines Guitars, Drums, and Vocalists
self.keysList = []
self.soloKeysList = []
self.soloShifts = []
self.playingVocals = False
self.numberOfGuitars = len(self.playerList)
self.numOfPlayers = len(self.playerList)
self.numOfSingers = 0
self.firstGuitar = None
self.neckrender = []
gNum = 0
for j,player in enumerate(self.playerList):
guitar = True
if player.part.id == Song.VOCAL_PART:
from Vocalist import Vocalist
inst = Vocalist(self.engine, player, False, j)
if self.coOpRB:
inst.coOpRB = True
self.instruments.append(inst)
self.playingVocals = True
self.numOfSingers += 1
self.numberOfGuitars -= 1
guitar = False
elif player.part.id == Song.DRUM_PART:
#myfingershurt: drums :)
from Drum import Drum
inst = Drum(self.engine,player,False,j)
self.instruments.append(inst)
else:
from Guitar import Guitar
bass = False
if player.part.id == Song.BASS_PART:
bass = True
inst = Guitar(self.engine,player,False,j, bass = bass)
self.instruments.append(inst)
if player.part.id == Song.LEAD_PART or player.part.id == Song.GUITAR_PART: #both these selections should get guitar solos
self.instruments[j].canGuitarSolo = True
if player.practiceMode:
self.practiceMode = True
if guitar:
player.guitarNum = gNum
gNum += 1
if self.firstGuitar is None:
self.firstGuitar = j
self.neckrender.append(self.instruments[j].neck)
if self.instruments[j].isDrum:
self.keysList.append(player.drums)
self.soloKeysList.append(player.drumSolo)
self.soloShifts.append(None)
self.instruments[j].keys = player.drums
self.instruments[j].actions = player.drums
else:
self.keysList.append(player.keys)
self.soloKeysList.append(player.soloKeys)
self.soloShifts.append(player.soloShift)
self.instruments[j].keys = player.keys
self.instruments[j].actions = player.actions
else:
self.neckrender.append(None)
self.keysList.append([])
self.soloKeysList.append([])
self.soloShifts.append([])
self.guitars = self.instruments #for compatibility - I'll try to fix this...
#Log.debug("GuitarScene keysList: " + str(self.keysList))
Log.debug("GuitarScene keysList: %s" % str(self.keysList))
#for number formatting with commas for Rock Band:
locale.setlocale(locale.LC_ALL, '') #more compatible
self.visibility = 1.0
self.libraryName = libraryName
self.songName = songName
self.done = False
#try:
# self.sfxChannel = self.engine.audio.getChannel(5)
#except Exception, e:
# Log.warn("GuitarScene.py: Unable to procure sound effect track: %s" % e)
# self.sfxChannel = None
self.lastMultTime = [None for i in self.playerList]
self.cheatCodes = [
#([117, 112, 116, 111, 109, 121, 116, 101, 109, 112, 111], self.toggleAutoPlay), #Jurgen is enabled in the menu -- Spikehead777
([102, 97, 115, 116, 102, 111, 114, 119, 97, 114, 100], self.goToResults)
]
self.enteredCode = []
self.song = None
#self.finishedProcessingSong = False
#Spikehead777
#self.jurg = self.engine.config.get("game", "jurgtype")
#MFH
#self.jurgenLogic = self.engine.config.get("game", "jurglogic") #logic 0 = original, logic 1 = MFH-1
self.numOfPlayers = len(self.playerList)
self.jurgenLogic = [0 for i in self.playerList]
for i in range(len(self.playerList)):
self.jurgenLogic[i] = self.engine.config.get("game", "jurg_logic_p%d" % i)
self.aiSkill = [0 for i in self.playerList]
self.aiHitPercentage = [0 for i in self.playerList]
self.aiPlayNote = [True for i in self.playerList]
self.jurgBattleWhammyTime = [0 for i in self.playerList]
self.jurgBattleUseTime = [0 for i in self.playerList]
self.aiUseSP = [0 for i in self.playerList]
self.battleItemsHolding = [0 for i in self.playerList]
self.battleTarget = [0 for i in self.playerList]
for i, player in enumerate(self.playerList):
self.battleTarget[i] = i-1
if self.battleTarget[i] == -1:
self.battleTarget[i] = self.numOfPlayers - 1
self.aiSkill[i] = self.engine.config.get("game", "jurg_skill_p%d" % i)
if self.aiSkill[i] == 0:
self.aiHitPercentage[i] = 70 + (5*player.getDifficultyInt())
self.jurgBattleWhammyTime[i] = 1000
self.jurgBattleUseTime[i] = 5000
elif self.aiSkill[i] == 1:
self.aiHitPercentage[i] = 80 + (5*player.getDifficultyInt())
self.jurgBattleWhammyTime[i] = 750
self.jurgBattleUseTime[i] = 2000
elif self.aiSkill[i] == 2:
self.aiHitPercentage[i] = 85 + (5*player.getDifficultyInt())
self.jurgBattleWhammyTime[i] = 750
self.jurgBattleUseTime[i] = 2000
elif self.aiSkill[i] == 3:
self.aiHitPercentage[i] = 90 + (5*player.getDifficultyInt())
self.jurgBattleWhammyTime[i] = 500
self.jurgBattleUseTime[i] = 1000
elif self.aiSkill[i] == 4:
self.aiHitPercentage[i] = 95 + (5*player.getDifficultyInt())
self.jurgBattleWhammyTime[i] = 250
self.jurgBattleUseTime[i] = 1000 #this will be replaced by algorithm
elif self.aiSkill[i] == 5:
self.aiHitPercentage[i] = 100
self.jurgBattleWhammyTime[i] = 200
self.jurgBattleUseTime[i] = 1000 #this will be replaced by algorithm
if self.aiHitPercentage[i] > 100:
self.aiHitPercentage[i] = 100
#self.jurgenText = self.engine.config.get("game", "jurgtext")
self.jurgenText = self.engine.theme.jurgTextPos
if float(self.jurgenText[2]) < 0.00035:
self.jurgenText[2] = 0.00035
if float(self.jurgenText[0]) < 0:
self.jurgenText[0] = 0
if float(self.jurgenText[1]) < 0:
self.jurgenText[1] = 0
self.battleJurgMissTime = [0 for i in self.playerList]
self.whammySavesSP = self.engine.config.get("game", "whammy_saves_starpower") #MFH
self.failingEnabled = self.engine.config.get("coffee", "failingEnabled")
self.timeLeft = None
self.processedFirstNoteYet = False
#MFH - MUST be in front of loadSettings call!
#self.autoPlay = self.engine.config.get("game", "jurgmode")
#if self.autoPlay == 0:
# self.autoPlay = True
#else:
# self.autoPlay = False
self.playerAssist = [0 for i in self.playerList]
for i, player in enumerate(self.playerList):
if self.instruments[i].isDrum:
if player.autoKick:
self.playerAssist[i] = 3
elif not self.instruments[i].isVocal:
self.playerAssist[i] = player.assistMode
if self.playerAssist[i] == 2 and player.getDifficultyInt() > 1:
self.playerAssist[i] = 0
elif self.playerAssist[i] == 1 and player.getDifficultyInt() > 2:
self.playerAssist[i] = 0
for assistMode in self.playerAssist:
if assistMode > 0:
self.assisting = True
break
else:
self.assisting = False
self.autoPlay = False
self.jurgPlayer = [False for i in self.playerList]
self.jurg = [False for i in self.playerList]
self.customBot = [None for i in self.playerList]
for i in range(self.numOfPlayers):
if self.instruments[i].isVocal:
continue
if self.engine.config.get("game", "jurg_p%d" % i) == True:
self.jurg[i] = True
self.autoPlay = True
self.lastPickPos = [None for i in self.playerList]
self.lastSongPos = 0.0
self.keyBurstTimeout = [None for i in self.playerList]
self.keyBurstPeriod = 30
self.camera.target = (0.0, 0.0, 4.0)
self.camera.origin = (0.0, 3.0, -3.0)
self.camera.target = (0.0, 1.0, 8.0)
self.camera.origin = (0.0, 2.0, -3.4)
self.targetX = self.engine.theme.povTargetX
self.targetY = self.engine.theme.povTargetY
self.targetZ = self.engine.theme.povTargetZ
self.originX = self.engine.theme.povOriginX
self.originY = self.engine.theme.povOriginY
self.originZ = self.engine.theme.povOriginZ
self.customPOV = False
self.ending = False
povList = [str(self.targetX), str(self.targetY), str(self.targetZ), str(self.originX), str(self.originY), str(self.originZ)]
if "None" not in povList:
self.customPOV = True
Log.debug("All theme POV set. Using custom camera POV.")
self.pause = False
self.failed = False
self.finalFailed = False
self.failEnd = False
self.crowdsCheering = False #akedrou
self.inUnison = [False for i in self.playerList]
self.haveUnison = [False for i in self.playerList]
self.firstUnison = False
self.firstUnisonDone = False
self.unisonActive = False
self.unisonNum = 0
self.unisonEarn = [False for i in self.playerList]
self.starPowersActive = 0
self.playersInGreen = 0
self.crowdFaderVolume = 0.0
self.coOpStarPower = 0
self.coOpStarPowerTimer = 0
self.coOpStarPowerActive = [0 for i in self.playerList]
self.battleSuddenDeath = False
self.failTimer = 0
self.rockTimer = 0 #myfingershurt
self.youRock = False #myfingershurt
self.rockCountdown = 100
self.soloReviewDispDelay = 300
self.baseScore = 50
self.baseSustainScore = .1
self.rockFinished = False
self.spTimes = [[] for i in self.playerList]
self.midiSP = False
self.oBarScale = 0.0 #volshebnyi - overdrive bar scale factor
#self.bossBattle = False
###Capo###
self.firstClap = True
###endCapo###
self.multi = [1 for i in self.playerList]
self.x1 = [0 for i in self.playerList]
self.y1 = [0 for i in self.playerList]
self.x2 = [0 for i in self.playerList]
self.y2 = [0 for i in self.playerList]
self.x3 = [0 for i in self.playerList]
self.y3 = [0 for i in self.playerList]
if self.coOpType:
self.x1.append(0)
self.y1.append(0)
self.x2.append(0)
self.y2.append(0)
self.x3.append(0)
self.y3.append(0)
#MFH - precalculation variable definition
#Get theme
themename = self.engine.data.themeLabel
self.theme = self.engine.data.theme
self.rmtype = self.theme
if self.engine.theme.hopoIndicatorX != None:
self.hopoIndicatorX = self.engine.theme.hopoIndicatorX
else:
self.hopoIndicatorX = .950
if self.engine.theme.hopoIndicatorY != None:
self.hopoIndicatorY = self.engine.theme.hopoIndicatorY
else:
self.hopoIndicatorY = .710
self.hopoIndicatorActiveColor = self.engine.theme.hopoIndicatorActiveColor
self.hopoIndicatorInactiveColor = self.engine.theme.hopoIndicatorInactiveColor
if self.coOpGH:
for instrument in self.instruments:
instrument.starPowerDecreaseDivisor /= self.numOfPlayers
self.rockMax = 30000.0
self.rockMedThreshold = self.rockMax/3.0 #MFH
self.rockHiThreshold = self.rockMax/3.0*2 #MFH
self.rock = [self.rockMax/2 for i in self.playerList]
self.arrowRotation = [.5 for i in self.playerList]
self.starNotesMissed = [False for i in self.playerList] #MFH
self.notesMissed = [False for i in self.playerList]
self.lessMissed = [False for i in self.playerList]
self.notesHit = [False for i in self.playerList]
self.lessHit = False
self.minBase = 400
self.pluBase = 15
self.minGain = 2
self.pluGain = 7
self.battleMax = 300 #QQstarS:new2 the max adding when battle
self.minusRock = [self.minBase for i in self.playerList]
self.plusRock = [self.pluBase for i in self.playerList]
self.coOpMulti = 1
self.coOpFailDone = [False for i in self.playerList]
if self.coOpRB: #akedrou
self.coOpPlayerMeter = len(self.rock)
self.rock.append(self.rockMax/2)
self.minusRock.append(0.0)
self.plusRock.append(0.0)
self.timesFailed = [0 for i in self.playerList]
if self.coOp or self.coOpGH:
self.coOpPlayerMeter = len(self.rock)-1 #make sure it's the last one
#Dialogs.changeLoadingSplashScreenText(self.engine, splash, phrase + " \n " + _("Loading Stage..."))
stage = os.path.join("themes",themename,"stage.ini")
self.stage = Stage.Stage(self, self.engine.resource.fileName(stage))
#Dialogs.changeLoadingSplashScreenText(self.engine, splash, phrase + " \n " + _("Loading Settings..."))
self.loadSettings()
self.tsBotNames = [_("KiD"), _("Stump"), _("AkedRobot"), _("Q"), _("MFH"), _("Jurgen")]
#MFH pre-translate text strings:
self.powerUpName = self.engine.theme.power_up_name
if self.battleGH:
self.tsBattleIcons = [None] * 9
self.tsBattleIcons[1] = _("Death Drain")
self.tsBattleIcons[2] = _("Difficulty Up")
self.tsBattleIcons[3] = _("Broken String")
self.tsBattleIcons[4] = _("Whammy")
self.tsBattleIcons[5] = _("Powerup Steal")
self.tsBattleIcons[6] = _("Switch Controls")
self.tsBattleIcons[7] = _("Double Notes")
self.tsBattleIcons[8] = _("Amp Overload")
self.tsNoteStreak = _("%d Note Streak")
self.tsPhraseStreak = _("%d Phrase Streak")
self.tsStarPowerReady = _("%s Ready") % self.powerUpName
self.tsCoOpStarPower = _("Activate %s!") % self.powerUpName
self.tsYouFailedBattle = _("You Failed")
self.tsJurgenIsHere = _("%s is here")
self.tsJurgenWasHere = _("%s was here")
self.tsPercentComplete = _("% Complete")
self.tsHopoIndicator = _("HOPO")
self.tsCompleted = _("COMPLETED")
self.tsPercentOn = _(" % ON ")
self.tsBassGroove = _("BASS GROOVE")
self.tsBassGrooveLabel = _("Bass Groove:")
self.tsHandicapLabel = _("Handicap")
self.tsAvgLabel = _("Avg")
self.tsAccVeryLate = _("Very Late")
self.tsAccLate = _("Late")
self.tsAccSlightlyLate = _("Slightly Late")
self.tsAccExcellentLate = _("-Excellent!")
self.tsAccPerfect = _("Perfect!!")
self.tsAccExcellentEarly= _("+Excellent!")
self.tsAccSlightlyEarly = _("Slightly Early")
self.tsAccEarly = _("Early")
self.tsAccVeryEarly = _("Very Early")
self.msLabel = _("ms")
self.tsSolo = _("Solo!")
self.tsPerfectSolo = _("Perfect Solo!")
self.tsAwesomeSolo = _("Awesome Solo!")
self.tsGreatSolo = _("Great Solo!")
self.tsGoodSolo = _("Good Solo!")
self.tsSolidSolo = _("Solid Solo!")
self.tsOkaySolo = _("Okay Solo")
self.tsMessySolo = _("Messy Solo")
self.tsPtsLabel = _("pts")
self.tsGetReady = _("Get Ready to Rock")
self.tsAsMadeFamousBy = _("as made famous by")
self.tsBy = _("by ")
self.tsFrettedBy = _(" fretted by ")
for player in self.playerList:
player.currentTheme = self.theme
#MFH - precalculate full and player viewports
self.engine.view.setViewport(1,0)
self.wFull, self.hFull = self.engine.view.geometry[2:4]
#Log.debug("GuitarScene wFull = %d, hFull = %d" % (self.wFull, self.hFull) )
self.wPlayer = []
self.hPlayer = []
self.hOffset = []
self.hFontOffset = []
self.stage.wFull = self.wFull #MFH - needed for new stage background handling
self.stage.hFull = self.hFull
#self.fontScreenBottom = 0.75 #from our current viewport's constant 3:4 aspect ratio (which is always stretched to fill the video resolution)
self.fontScreenBottom = self.engine.data.fontScreenBottom
self.oBarScaleCoef = (0.6 + 0.4 * self.numberOfGuitars) * 1.256 * self.hFull / self.wFull #volshebnyi - depends on resolution and number of players
for i, player in enumerate(self.playerList):
if not self.instruments[i].isVocal:
self.engine.view.setViewportHalf(self.numberOfGuitars,player.guitarNum)
w = self.engine.view.geometryAllHalf[self.numberOfGuitars-1,player.guitarNum,2]
h = self.engine.view.geometryAllHalf[self.numberOfGuitars-1,player.guitarNum,3]
else:
w = self.wFull
h = self.hFull
self.wPlayer.append( w )
self.hPlayer.append( h )
self.hOffset.append( h )
self.hFontOffset.append( h )
if not self.instruments[i].isVocal:
self.wPlayer[i] = self.wPlayer[i]*self.numberOfGuitars #QQstarS: set the width to right one
if self.numberOfGuitars>1:
self.hPlayer[i] = self.hPlayer[i]*self.numberOfGuitars/1.5 #QQstarS: Set the hight to right one
self.hOffset[i] = self.hPlayer[i]*.4*(self.numberOfGuitars-1)
else:
self.hPlayer[i] = self.hPlayer[i]*self.numberOfGuitars #QQstarS: Set the hight to right one
self.hOffset[i] = 0
self.hFontOffset[i] = -self.hOffset[i]/self.hPlayer[i]*0.752 #QQstarS: font Hight Offset when there are 2 players
self.engine.view.setViewport(1,0)
self.drumMisses = self.engine.config.get("game", "T_sound") #Faaa Drum sound
if not self.engine.data.bassDrumSoundFound:
self.bassKickSoundEnabled = False
if not self.engine.data.T1DrumSoundFound:
self.drumMisses == 0
if not self.engine.data.T2DrumSoundFound:
self.drumMisses == 0
if not self.engine.data.T3DrumSoundFound:
self.drumMisses == 0
if not self.engine.data.CDrumSoundFound:
self.drumMisses == 0
#MFH - constant definitions, ini value retrievals
self.pitchBendLowestFactor = .90 #stump: perhaps read this from song.ini and fall back on a specific value?
self.lineByLineLyricMaxLineWidth = 0.5
self.lineByLineStartSlopMs = 750
self.digitalKillswitchStarpowerChunkSize = 0.05 * self.engine.audioSpeedFactor
self.digitalKillswitchActiveStarpowerChunkSize = self.digitalKillswitchStarpowerChunkSize / 3.0
# evilynux: was 0.10, now much closer to actual GH3
self.analogKillswitchStarpowerChunkSize = 0.15 * self.engine.audioSpeedFactor
self.analogKillswitchActiveStarpowerChunkSize = self.analogKillswitchStarpowerChunkSize / 3.0
self.rbOverdriveBarGlowFadeInChunk = .07 #this amount added to visibility every run() cycle when fading in - original .2
self.rbOverdriveBarGlowFadeOutChunk = .03 #this amount subtracted from visibility every run() cycle when fading out - original .07
self.crowdCheerFadeInChunk = .02 #added to crowdVolume every run() when fading in
self.crowdCheerFadeOutChunk = .03 #subtracted from crowdVolume every run() on fade out.
self.maxDisplayTextScale = 0.0024 #orig 0.0024
self.displayTextScaleStep2 = 0.00008 #orig 0.00008
self.displayTextScaleStep1 = 0.0001 #orig 0.0001
self.textTimeToDisplay = 100
self.songInfoDisplayScale = self.engine.theme.songInfoDisplayScale
self.songInfoDisplayX = self.engine.theme.songInfoDisplayX #Worldrave - This controls the X position of song info display during countdown
self.songInfoDisplayY = self.engine.theme.songInfoDisplayY #Worldrave - This controls the Y position of song info display during countdown
self.lyricMode = self.engine.config.get("game", "lyric_mode")
self.scriptLyricPos = self.engine.config.get("game", "script_lyric_pos")
self.starClaps = self.engine.config.get("game", "star_claps")
self.rb_sp_neck_glow = self.engine.config.get("game", "rb_sp_neck_glow")
self.accuracy = [0 for i in self.playerList]
self.resumeCountdownEnabled = self.engine.config.get("game", "resume_countdown")
self.resumeCountdown = 0
self.resumeCountdownSeconds = 0
self.pausePos = 0
self.dispAccuracy = [False for i in self.playerList]
self.showAccuracy = self.engine.config.get("game", "accuracy_mode")
self.hitAccuracyPos = self.engine.config.get("game", "accuracy_pos")
self.showUnusedTextEvents = self.engine.config.get("game", "show_unused_text_events")
self.bassKickSoundEnabled = self.engine.config.get("game", "bass_kick_sound")
self.gameTimeMode = self.engine.config.get("game", "game_time")
self.midiLyricsEnabled = self.engine.config.get("game", "rb_midi_lyrics")
self.midiSectionsEnabled = self.engine.config.get("game", "rb_midi_sections") #MFH
if self.numOfPlayers > 1 and self.midiLyricsEnabled == 1:
self.midiLyricsEnabled = 0
if self.numOfPlayers > 1 and self.midiSectionsEnabled == 1:
self.midiSectionsEnabled = 0
self.hopoDebugDisp = self.engine.config.get("game","hopo_debug_disp")
if self.hopoDebugDisp == 1:
for instrument in self.instruments:
if not instrument.isDrum and not instrument.isVocal:
instrument.debugMode = True
self.numDecimalPlaces = self.engine.config.get("game","decimal_places")
self.roundDecimalForDisplay = lambda n: ('%%.%df' % self.numDecimalPlaces) % float(n) #stump
self.starScoring = self.engine.config.get("game", "star_scoring")#MFH
self.ignoreOpenStrums = self.engine.config.get("game", "ignore_open_strums") #MFH
self.muteSustainReleases = self.engine.config.get("game", "sustain_muting") #MFH
self.hopoIndicatorEnabled = self.engine.config.get("game", "hopo_indicator") #MFH
self.fontShadowing = self.engine.config.get("game", "in_game_font_shadowing") #MFH
self.muteLastSecond = self.engine.config.get("audio", "mute_last_second") #MFH
self.mutedLastSecondYet = False
self.muteDrumFill = self.engine.config.get("game", "mute_drum_fill") #MFH
self.starScoreUpdates = self.engine.config.get("performance", "star_score_updates") #MFH
self.currentlyAnimating = True
self.missPausesAnim = self.engine.config.get("game", "miss_pauses_anim") #MFH
self.displayAllGreyStars = self.engine.theme.displayAllGreyStars
self.starpowerMode = self.engine.config.get("game", "starpower_mode") #MFH
self.useMidiSoloMarkers = False
self.logMarkerNotes = self.engine.config.get("game", "log_marker_notes")
self.logStarpowerMisses = self.engine.config.get("game", "log_starpower_misses")
self.soloFrameMode = self.engine.config.get("game", "solo_frame")
self.whammyEffect = self.engine.config.get("audio", "whammy_effect")
if self.whammyEffect == 1 and not Audio.pitchBendSupported: #pitchbend
Dialogs.showMessage(self.engine, "Pitchbend module not found! Forcing Killswitch effect.")
self.whammyEffect = 0
shaders.var["whammy"] = self.whammyEffect
self.bigRockEndings = self.engine.config.get("game", "big_rock_endings")
self.showFreestyleActive = self.engine.config.get("debug", "show_freestyle_active")
#stump: continuous star fillup
self.starFillupCenterX = self.engine.theme.starFillupCenterX
self.starFillupCenterY = self.engine.theme.starFillupCenterY
self.starFillupInRadius = self.engine.theme.starFillupInRadius
self.starFillupOutRadius = self.engine.theme.starFillupOutRadius
self.starFillupColor = self.engine.theme.colorToHex(self.engine.theme.starFillupColor)
self.starContinuousAvailable = self.engine.config.get("performance", "star_continuous_fillup") and \
None not in (self.starFillupCenterX, self.starFillupCenterY, self.starFillupInRadius, self.starFillupOutRadius, self.starFillupColor)
self.showBpm = self.engine.config.get("debug", "show_bpm") #MFH
self.logLyricEvents = self.engine.config.get("log", "log_lyric_events")
#self.logTempoEvents = self.engine.config.get("log", "log_tempo_events")
self.vbpmLogicType = self.engine.config.get("debug", "use_new_vbpm_beta")
#MFH - switch to midi lyric mode option
self.midiLyricMode = self.engine.config.get("game", "midi_lyric_mode")
#self.midiLyricMode = 0
self.currentSimpleMidiLyricLine = ""
self.noMoreMidiLineLyrics = False
self.screenCenterX = self.engine.video.screen.get_rect().centerx
self.screenCenterY = self.engine.video.screen.get_rect().centery
#racer: practice beat claps:
self.beatClaps = self.engine.config.get("game", "beat_claps")
self.killDebugEnabled = self.engine.config.get("game", "kill_debug")
#myfingershurt: for checking if killswitch key is analog for whammy
self.whammyVolAdjStep = 0.1
self.analogKillMode = [self.engine.input.getAnalogKill(i) for i in range(self.numOfPlayers)]
self.isKillAnalog = [False for i in self.playerList]
self.isSPAnalog = [False for i in self.playerList]
self.isSlideAnalog = [False for i in self.playerList]
self.whichJoyKill = [0 for i in self.playerList]
self.whichAxisKill = [0 for i in self.playerList]
self.whichJoyStar = [0 for i in self.playerList]
self.whichAxisStar = [0 for i in self.playerList]
self.whichJoySlide = [0 for i in self.playerList]
self.whichAxisSlide = [0 for i in self.playerList]
self.whammyVol = [0.0 for i in self.playerList]
self.starAxisVal = [0.0 for i in self.playerList]
self.starDelay = [0.0 for i in self.playerList]
self.starActive = [False for i in self.playerList]
self.slideValue = [-1 for i in self.playerList]
self.targetWhammyVol = [0.0 for i in self.playerList]
self.defaultWhammyVol = [self.analogKillMode[i]-1.0 for i in range(self.numOfPlayers)] #makes xbox defaults 1.0, PS2 defaults 0.0
for i in range(self.numOfPlayers):
if self.analogKillMode[i] == 3: #XBOX inverted mode
self.defaultWhammyVol[i] = -1.0
self.actualWhammyVol = [self.defaultWhammyVol[i] for i in range(self.numOfPlayers)]
self.lastWhammyVol = [self.defaultWhammyVol[i] for i in range(self.numOfPlayers)]
KillKeyCode = [0 for i in self.playerList]
StarKeyCode = [0 for i in self.playerList]
SlideKeyCode = [0 for i in self.playerList]
self.lastTapText = "tapp: -"
#myfingershurt: auto drum starpower activation option
#self.autoDrumStarpowerActivate = self.engine.config.get("game", "auto_drum_sp")
self.autoDrumStarpowerActivate = self.engine.config.get("game", "drum_sp_mode")
self.analogSlideMode = [self.engine.input.getAnalogSlide(i) for i in range(self.numOfPlayers)]
self.analogSPMode = [self.engine.input.getAnalogSP(i) for i in range(self.numOfPlayers)]
self.analogSPThresh = [self.engine.input.getAnalogSPThresh(i) for i in range(self.numOfPlayers)]
self.analogSPSense = [self.engine.input.getAnalogSPSense(i) for i in range(self.numOfPlayers)]
self.numDrumFills = 0 #MFH - count drum fills to see whether or not we should use auto SP
#MFH - TODO - rewrite in an expandable fashion; requires creation of some new Player object constants that will link to the appropriate player's control based on which player the object is set to
for i, player in enumerate(self.playerList):
if self.analogKillMode[i] > 0:
KillKeyCode[i] = self.controls.getReverseMapping(player.keyList[KILL])
self.isKillAnalog[i], self.whichJoyKill[i], self.whichAxisKill[i] = self.engine.input.getWhammyAxis(KillKeyCode[i])
if self.isKillAnalog[i]:
try:
testJoy = self.engine.input.joysticks[self.whichJoyKill[i]].get_axis(self.whichAxisKill[i])
except IndexError:
self.isKillAnalog[i] = False
if self.analogSPMode[i] > 0:
StarKeyCode[i] = self.controls.getReverseMapping(player.keyList[STAR])
self.isSPAnalog[i], self.whichJoyStar[i], self.whichAxisStar[i] = self.engine.input.getWhammyAxis(StarKeyCode[i])
if self.isSPAnalog[i]:
try:
testJoy = self.engine.input.joysticks[self.whichJoyStar[i]].get_axis(self.whichAxisStar[i])
except IndexError:
self.isSPAnalog[i] = False
if player.controlType == 4:
SlideKeyCode[i] = self.controls.getReverseMapping(player.keyList[KEY1A])
self.isSlideAnalog[i], self.whichJoySlide[i], self.whichAxisSlide[i] = self.engine.input.getWhammyAxis(SlideKeyCode[i])
if self.isSlideAnalog[i]:
try:
testJoy = self.engine.input.joysticks[self.whichJoySlide[i]].get_axis(self.whichAxisSlide[i])
except IndexError:
self.isSlideAnalog[i] = False
self.inGameStats = self.engine.config.get("performance","in_game_stats")
self.inGameStars = self.engine.config.get("game","in_game_stars")
self.partialStars = self.engine.config.get("game","partial_stars")
self.guitarSoloAccuracyDisplayMode = self.engine.config.get("game", "gsolo_accuracy_disp")
self.guitarSoloAccuracyDisplayPos = self.engine.config.get("game", "gsolo_acc_pos")
#need a new flag for each player, showing whether or not they've missed a note during a solo section.
#this way we have a backup detection of Perfect Solo in case a note got left out, picks up the other side of the solo slop
self.guitarSoloBroken = [False for i in self.playerList]
self.deadPlayerList = [] #akedrou - keep the order failed.
self.numDeadPlayers = 0
coOpInstruments = []
self.scoring = []
#self.stars = [0,0]
for instrument in self.instruments:
if instrument.isDrum:
this = Song.DRUM_PART
coOpInstruments.append(this)
elif instrument.isBassGuitar:
this = Song.BASS_PART
coOpInstruments.append(this)
elif instrument.isVocal:
this = Song.VOCAL_PART
coOpInstruments.append(this)
else:
this = Song.GUITAR_PART
coOpInstruments.append(this) #while different guitars exist, they don't affect scoring.
self.scoring.append(ScoreCard([this]))
if self.coOpType:
self.coOpScoreCard = ScoreCard(coOpInstruments, coOpType = True)
else:
self.coOpScoreCard = None
self.partialStar = [0 for i in self.playerList]
self.starRatio = [0.0 for i in self.playerList]
self.dispSoloReview = [False for i in self.playerList]
self.soloReviewText = [[] for i in self.playerList]
self.soloReviewCountdown = [0 for i in self.playerList]
self.guitarSoloAccuracy = [0.0 for i in self.playerList]
self.guitarSoloActive = [False for i in self.playerList]
self.currentGuitarSolo = [0 for i in self.playerList]
#guitar solo display initializations
if self.theme == 2:
self.solo_soloFont = self.engine.data.scoreFont
else:
self.solo_soloFont = self.engine.data.font
self.guitarSoloShown = [False for i in self.playerList]
self.currentGuitarSoloLastHitNotes = [1 for i in self.playerList]
self.solo_xOffset = [0.0 for i in self.playerList]
self.solo_yOffset = [0.0 for i in self.playerList]
self.solo_boxXOffset = [0.0 for i in self.playerList]
self.solo_boxYOffset = [0.0 for i in self.playerList]
self.solo_Tw = [0.0 for i in self.playerList]
self.solo_Th = [0.0 for i in self.playerList]
self.solo_soloText = ["solo" for i in self.playerList]
self.soloAcc_Rect = [None for i in self.playerList]
self.solo_txtSize = 0.00250
for i, playa in enumerate(self.playerList):
if self.guitarSoloAccuracyDisplayPos == 0: #right
if self.guitarSoloAccuracyDisplayMode == 1: #percentage only
self.solo_xOffset[i] = 0.890
else:
self.solo_xOffset[i] = 0.950
else:
self.solo_xOffset[i] = 0.150
self.solo_yOffset[i] = 0.320 #last change -.040
#self.totalNotes = [0,0]
#self.totalSingleNotes = [0,0]
self.currentGuitarSoloTotalNotes = [0 for i in self.playerList]
#self.currentGuitarSoloHitNotes = [0,0]
self.guitarSolos = [ [] for i in self.playerList]
guitarSoloStartTime = 0
isGuitarSoloNow = False
guitarSoloNoteCount = 0
lastSoloNoteTime = 0
self.drumStart = False
soloSlop = 100.0
unisonCheck = []
if self.careerMode:
self.failingEnabled = True
self.tut = self.engine.config.get("game", "tut")
#MFH - no Jurgen in Career mode or tutorial mode or practice mode:
if self.careerMode or self.tut or self.playerList[0].practiceMode:
self.autoPlay = False
#force jurgen player 2 (and only player 2) for boss battles
if self.bossBattle:
self.autoPlay = True
self.jurg = [False for i in self.playerList]
self.jurg[1] = True
self.rockFailUp = True #akedrou - fading mech
self.rockFailViz = 0.0
self.failViz = [0.0 for i in self.playerList]
self.phrases = self.engine.config.get("coffee", "game_phrases")#blazingamer
self.starfx = self.engine.config.get("game", "starfx")#blazingamer
smallMult = self.engine.config.get("game","small_rb_mult")
self.rbmfx = False
if smallMult == 2 or (smallMult == 1 and self.engine.theme.smallMult):
self.rbmfx = True
self.boardY = 2
self.rbOverdriveBarGlowVisibility = 0
self.rbOverdriveBarGlowFadeOut = False
self.counting = self.engine.config.get("video", "counting")
Dialogs.changeLoadingSplashScreenText(self.engine, splash, phrase + " \n " + _("Loading Song..."))
#MFH - this is where song loading originally took place, and the loading screen was spawned.
self.engine.resource.load(self, "song", lambda: loadSong(self.engine, songName, library = libraryName, part = [player.part for player in self.playerList], practiceMode = self.playerList[0].practiceMode, practiceSpeed = self.playerList[0].practiceSpeed), synch = True, onLoad = self.songLoaded)
# glorandwarf: show the loading splash screen and load the song synchronously
#Dialogs.hideLoadingSplashScreen(self.engine, splash)
#splash = None
#splash = Dialogs.showLoadingSplashScreen(self.engine, phrase)
Dialogs.changeLoadingSplashScreenText(self.engine, splash, phrase + " \n " + _("Preparing Note Phrases..."))
if self.playerList[0].practiceMode or self.song.info.tutorial or self.tut:
self.failingEnabled = False
self.playerList[0].hopoFreq = self.song.info.hopofreq
bassGrooveEnableSet = self.engine.config.get("game", "bass_groove_enable")
if bassGrooveEnableSet == 1 and self.theme == 2:
self.bassGrooveEnabled = True
elif bassGrooveEnableSet == 2 and self.song.midiStyle == Song.MIDI_TYPE_RB:
self.bassGrooveEnabled = True
elif bassGrooveEnableSet == 3:
self.bassGrooveEnabled = True
else:
self.bassGrooveEnabled = False
for i, drum in enumerate(self.instruments):
if not drum.isDrum:
continue
if drum.drumFlip:
for d in range(len(Song.difficulties)):
self.song.tracks[i][d].flipDrums()
for scoreCard in self.scoring:
scoreCard.bassGrooveEnabled = self.bassGrooveEnabled
#MFH - single audio track song detection
self.isSingleAudioTrack = self.song.isSingleAudioTrack
#myfingershurt: also want to go through song and search for guitar solo parts, and count notes in them in each diff.
#MFH - now, handle MIDI starpower / overdrive / other special marker notes:
#MFH - first, count the markers for each instrument. If a particular instrument does not have at least two starpower phrases
# marked, ignore them and force auto-generation of SP paths.
for i in range(self.numOfPlayers): #MFH - count number of drum fills
if self.instruments[i].isDrum: #MFH - count number of drum fill markers
self.numDrumFills = len([1 for time, event in self.song.midiEventTrack[i].getAllEvents() if (isinstance(event, Song.MarkerNote) and (event.number == Song.freestyleMarkingNote) ) ])
Log.debug("Drum part found, scanning for drum fills.... %d freestyle markings found (the last one may be a Big Rock Ending)." % self.numDrumFills)
#MFH - handle early hit window automatic type determination, and how it compares to the forced handicap if not auto
self.effectiveEarlyHitWindow = Song.EARLY_HIT_WINDOW_HALF
self.automaticEarlyHitWindow = Song.EARLY_HIT_WINDOW_HALF
self.forceEarlyHitWindowSetting = self.engine.config.get("handicap", "early_hit_window")
if self.song.info.early_hit_window_size:
Log.debug("song.ini setting found speficying early_hit_window_size - %s" % self.song.info.early_hit_window_size)
if self.song.info.early_hit_window_size.lower() == "none":
self.automaticEarlyHitWindow = Song.EARLY_HIT_WINDOW_NONE
elif self.song.info.early_hit_window_size.lower() == "half":
self.automaticEarlyHitWindow = Song.EARLY_HIT_WINDOW_HALF
#elif self.song.info.early_hit_window_size.lower() == "full":
else: #all other unrecognized cases, default to "full"
self.automaticEarlyHitWindow = Song.EARLY_HIT_WINDOW_FULL
else:
Log.debug("No song.ini setting found speficying early_hit_window_size - using automatic detection...")
if self.song.midiStyle == Song.MIDI_TYPE_RB:
Log.debug("Basic RB1/RB2 type MIDI found - early hitwindow of NONE is set as handicap base.")
self.automaticEarlyHitWindow = Song.EARLY_HIT_WINDOW_NONE
if self.forceEarlyHitWindowSetting > 0: #MFH - if user is specifying a specific early hitwindow, then calculate handicap...
self.effectiveEarlyHitWindow = self.forceEarlyHitWindowSetting
tempHandicap = 1.00
if self.automaticEarlyHitWindow > self.effectiveEarlyHitWindow: #MFH - positive handicap
tempHandicap += ( (self.automaticEarlyHitWindow - self.effectiveEarlyHitWindow) * 0.05 )
elif self.automaticEarlyHitWindow < self.effectiveEarlyHitWindow: #MFH - negative handicap
tempHandicap -= ( (self.effectiveEarlyHitWindow - self.automaticEarlyHitWindow) * 0.05 )
for scoreCard in self.scoring:
scoreCard.earlyHitWindowSizeHandicap = tempHandicap
if self.coOpType:
self.coOpScoreCard.earlyHitWindowSizeHandicap = tempHandicap
#Log.debug("User-forced early hit window setting %d, effective handicap determined: %f" % (self.forceEarlyHitWindowSetting,tempHandicap) ) #MFH - not used atm
else:
#Log.debug("Automatic early hit window mode - automatically-detected setting used: %d" % self.automaticEarlyHitWindow) #MFH - not used atm
self.effectiveEarlyHitWindow = self.automaticEarlyHitWindow
tempEarlyHitWindowSizeFactor = 0.5
if self.effectiveEarlyHitWindow == 1: #none
tempEarlyHitWindowSizeFactor = 0.10 #really, none = about 10%
elif self.effectiveEarlyHitWindow == 2: #half
tempEarlyHitWindowSizeFactor = 0.5
else: #any other value will be full
tempEarlyHitWindowSizeFactor = 1.0
#MFH - TODO - single, global BPM here instead of in instrument objects:
#self.tempoBpm = Song.DEFAULT_BPM
#self.actualBpm = 0.0
#self.targetPeriod = 60000.0 / self.targetBpm
self.disableVBPM = self.engine.config.get("game", "disable_vbpm")
self.currentBpm = Song.DEFAULT_BPM
self.currentPeriod = 60000.0 / self.currentBpm
self.targetBpm = self.currentBpm
self.lastBpmChange = -1.0
self.baseBeat = 0.0
#for guit in self.guitars: #MFH - tell guitar / drum objects which VBPM logic to use
# guit.vbpmLogicType = self.vbpmLogicType
for instrument in self.instruments: #MFH - force update of early hit window
instrument.earlyHitWindowSizeFactor = tempEarlyHitWindowSizeFactor
instrument.actualBpm = 0.0
instrument.currentBpm = Song.DEFAULT_BPM
instrument.setBPM(instrument.currentBpm)
#if self.starpowerMode == 2: #auto-MIDI mode only
self.markSolos = self.engine.config.get("game", "mark_solo_sections")
if self.markSolos == 2:
if self.engine.theme.markSolos == 2:
if self.theme == 2:
self.markSolos = 1
else:
self.markSolos = 0
else:
self.markSolos = self.engine.theme.markSolos
if self.song.hasStarpowerPaths:
for i,guitar in enumerate(self.instruments):
if guitar.isVocal:
continue
#MFH - first, count the SP marker notes!
numOfSpMarkerNotes = len([1 for time, event in self.song.midiEventTrack[i].getAllEvents() if (isinstance(event, Song.MarkerNote) and not event.endMarker and (event.number == Song.overDriveMarkingNote or (event.number == Song.starPowerMarkingNote and self.song.midiStyle == Song.MIDI_TYPE_GH) ) ) ])
#also want to count RB solo sections in this track, if the MIDI type is RB. Then we'll know to activate MIDI guitar solo markers or not
# for this instrument
if self.song.midiStyle == Song.MIDI_TYPE_RB:
numMidiSoloMarkerNotes = len([1 for time, event in self.song.midiEventTrack[i].getAllEvents() if (isinstance(event, Song.MarkerNote) and not event.endMarker and event.number == Song.starPowerMarkingNote ) ])
if numMidiSoloMarkerNotes > 0 and self.markSolos > 0: #if at least 1 solo marked in this fashion, tell that guitar to ignore text solo events
self.useMidiSoloMarkers = True
guitar.useMidiSoloMarkers = True
if self.neckrender[self.playerList[i].guitarNum] is not None:
self.neckrender[self.playerList[i].guitarNum].useMidiSoloMarkers = True
if numOfSpMarkerNotes > 1:
for time, event in self.song.midiEventTrack[i].getAllEvents():
if isinstance(event, Song.MarkerNote) and not event.endMarker:
markStarpower = False
if event.number == Song.overDriveMarkingNote:
markStarpower = True
if event.number == Song.starPowerMarkingNote:
if self.song.midiStyle == Song.MIDI_TYPE_GH:
markStarpower = True
#else: #RB solo marking!
if markStarpower and self.starpowerMode == 2: #auto-MIDI mode only:
tempStarpowerNoteList = self.song.track[i].getEvents(time, time+event.length)
self.spTimes[i].append((time,time+event.length))
lastSpNoteTime = 0
for spTime, spEvent in tempStarpowerNoteList:
if isinstance(spEvent, Note):
if spTime > lastSpNoteTime:
lastSpNoteTime = spTime
spEvent.star = True
#now, go back and mark all of the last chord as finalStar
# BUT only if not drums! If drums, mark only ONE of the last notes!
#lastChordTime = spTime
oneLastSpNoteMarked = False
for spTime, spEvent in tempStarpowerNoteList:
if isinstance(spEvent, Note):
if spTime == lastSpNoteTime:
if (guitar.isDrum and not oneLastSpNoteMarked) or (not guitar.isDrum):
spEvent.finalStar = True
oneLastSpNoteMarked = True
if self.logMarkerNotes == 1:
Log.debug("GuitarScene: P%d overdrive / starpower phrase marked between %f and %f" % ( i+1, time, time+event.length ) )
if lastSpNoteTime == 0:
Log.warn("This starpower phrase doesn't appear to have any finalStar notes marked... probably will not reward starpower!")
self.midiSP = True
unisonCheck.extend(self.spTimes[i])
elif self.starpowerMode == 2: #this particular instrument only has one starpower path marked! Force auto-generation of SP paths.
Log.warn("Instrument %s only has one starpower path marked! ...falling back on auto-generated paths for this instrument." % self.playerList[i].part.text)
guitar.starNotesSet = False #fallback on auto generation.
elif self.starpowerMode == 2:
if self.numberOfGuitars > 0:
Log.warn("This song does not appear to have any starpower or overdrive paths marked, falling back on auto-generated paths.")
for instrument in self.instruments:
if instrument.isVocal:
continue
instrument.starNotesSet = False #fallback on auto generation.
if self.useMidiSoloMarkers or self.song.midiStyle == Song.MIDI_TYPE_RB or self.markSolos == 3: #assume RB Midi-types with no solos don't want any, dammit!
self.markSolos = 0
for i, player in enumerate(self.playerList):
if player.guitarNum is not None:
self.instruments[i].markSolos = self.markSolos
if self.neckrender[player.guitarNum] is not None:
self.neckrender[player.guitarNum].markSolos = self.markSolos
self.lastDrumNoteTime = 0.0
self.lastNoteTimes = [0.0 for i in self.playerList]
#self.lastDrumNoteEvent = None
self.drumScoringEnabled = True
#akedrou - moved this to the part where it loads notes...
for i in range(self.numOfPlayers):
if self.instruments[i].isVocal:
self.song.track[i].removeTempoEvents()
self.song.track[i].markPhrases()
holdingTap = False
holdingTapLength = 0
holdingTapNotes = 0
phraseId = 0
for time, event in self.song.track[i].getAllEvents():
if isinstance(event, VocalPhrase):
if event.tapPhrase:
if not holdingTap:
holdingTap = True
self.instruments[i].tapPartStart.append(phraseId)
holdingTapLength += 1
holdingTapNotes += len(event)
else:
if holdingTap:
self.instruments[i].tapPartLength.append(holdingTapLength)
self.instruments[i].tapNoteTotals.append(holdingTapNotes)
self.instruments[i].tapNoteHits.append(0)
holdingTap = False
holdingTapLength = 0
holdingTapNotes = 0
phraseId += 1
else:
self.instruments[i].totalPhrases = phraseId
if holdingTap:
self.instruments[i].tapPartLength.append(holdingTapLength)
self.instruments[i].tapNoteTotals.append(holdingTapNotes)
self.instruments[i].tapNoteHits.append(0)
else:
#myfingershurt: preventing ever-thickening BPM lines after restarts
self.song.track[i].markBars()
#MFH - should only be done the first time.
if self.hopoStyle > 0 or self.song.info.hopo == "on":
if not self.instruments[i].isDrum and not self.instruments[i].isVocal:
if self.hopoStyle == 2 or self.hopoStyle == 3 or self.hopoStyle == 4: #GH2 style HOPO system
self.song.track[i].markHopoGH2(self.song.info.eighthNoteHopo, self.hopoAfterChord, self.song.info.hopofreq)
elif self.hopoStyle == 1: #RF-Mod style HOPO system
self.song.track[i].markHopoRF(self.song.info.eighthNoteHopo, self.song.info.hopofreq)
#self.song.track[i].removeTempoEvents() #MFH - perform a little event cleanup on these tracks
if self.battleGH and not self.instruments[i].isVocal:
if self.instruments[i].difficulty != 0:
self.song.difficulty[i] = Song.difficulties[self.instruments[i].difficulty-1]
self.song.track[i].markBars()
if self.hopoStyle > 0 or self.song.info.hopo == "on":
if not self.instruments[i].isDrum:
if self.hopoStyle == 2 or self.hopoStyle == 3 or self.hopoStyle == 4: #GH2 style HOPO system
self.song.track[i].markHopoGH2(self.song.info.eighthNoteHopo, self.hopoAfterChord, self.song.info.hopofreq)
elif self.hopoStyle == 1: #RF-Mod style HOPO system
self.song.track[i].markHopoRF(self.song.info.eighthNoteHopo, self.song.info.hopofreq)
#self.song.track[i].removeTempoEvents() #MFH - perform a little event cleanup on these tracks
self.song.difficulty[i] = Song.difficulties[self.instruments[i].difficulty]
#myfingershurt: removing buggy disable stats option
lastTime = 0
for i in range(self.numOfPlayers):
for time, event in self.song.track[i].getAllEvents():
if not isinstance(event, Note) and not isinstance(event, VocalPhrase):
continue
if time + event.length > lastTime:
lastTime = time + event.length
self.lastEvent = lastTime + 1000
self.lastEvent = round(self.lastEvent / 1000) * 1000
#self.notesCum = 0
self.noteLastTime = 0
totalBreNotes = 0
#count / init solos and notes
for i,instrument in enumerate(self.instruments):
#MFH - go through, locate, and mark the last drum note. When this is encountered, drum scoring should be turned off.
lastDrumNoteTime = 0.0
lastDrumNoteEvent = None
for time, event in self.song.track[i].getAllEvents():
if isinstance(event, Note) or isinstance(event, VocalPhrase):
if time >= lastDrumNoteTime:
lastDrumNoteTime = time
lastDrumNoteEvent = event
if instrument.isDrum:
self.lastDrumNoteTime = lastDrumNoteTime
Log.debug("Last drum note located at time = " + str(self.lastDrumNoteTime) )
#self.lastDrumNoteEvent = lastDrumNoteEvent
self.scoring[i].totalStreakNotes = len([1 for time, event in self.song.track[i].getEvents(self.playerList[i].startPos,self.lastEvent) if isinstance(event, Note)])
elif instrument.isVocal:
self.scoring[i].totalStreakNotes = len([1 for time, event in self.song.track[i].getEvents(self.playerList[i].startPos,self.lastEvent) if isinstance(event, VocalPhrase)])
else:
self.scoring[i].totalStreakNotes = len(set(time for time, event in self.song.track[i].getEvents(self.playerList[i].startPos,self.lastEvent) if isinstance(event, Note)))
#self.song.track[i].allEvents[self.song.track[i].maxIndex][0]
#self.scoring[i].totalStreakNotes = len(set(time for time, event in self.song.track[i].getAllEvents() if isinstance(event, Note)))
self.scoring[i].lastNoteEvent = lastDrumNoteEvent
self.scoring[i].lastNoteTime = lastDrumNoteTime
self.lastNoteTimes[i] = lastDrumNoteTime
if lastDrumNoteEvent:
if isinstance(lastDrumNoteEvent, Note):
Log.debug("Last note (number %d) found for player %d at time %f" % (lastDrumNoteEvent.number, i, lastDrumNoteTime) )
elif isinstance(lastDrumNoteEvent, VocalPhrase):
Log.debug("Last vocal phrase found for player %d at time %f" % (i, lastDrumNoteTime) )
else:
Log.debug("Last note event not found and is None!")
#- #volshebnyi - don't count notes in BRE zones if BRE active
#- if guitar.freestyleEnabled:
#- self.playerList[i].freestyleSkippedNotes = 0
#- for time, event in self.song.midiEventTrack[i].getAllEvents():
#- if isinstance(event, Song.MarkerNote) and not event.endMarker:
#- if (event.number == Song.freestyleMarkingNote):
#- if guitar.isDrum:
#- guitar.drumFillsTotal += 1
#- else:
#- for freestyleTime, event1 in self.song.track[i].getEvents(time, time + event.length):
#- if isinstance(event1, Note):
#- self.playerList[i].freestyleSkippedNotes += 1
#-
#- self.playerList[i].totalStreakNotes -= self.playerList[i].freestyleSkippedNotes
if instrument.isVocal:
self.scoring[i].totalNotes = self.scoring[i].totalStreakNotes - len(instrument.tapNoteTotals)
self.scoring[i].totalPercNotes = sum(instrument.tapNoteTotals)
self.scoring[i].baseScore = (instrument.vocalBaseScore * self.scoring[i].totalNotes) + (self.scoring[i].totalPercNotes * instrument.baseScore)
else:
self.scoring[i].totalNotes = len([1 for Ntime, event in self.song.track[i].getAllEvents() if isinstance(event, Note)])
if self.song.midiEventTrack[i] is not None: # filters out vocals
#MFH - determine which marker is BRE, and count streak notes behind it to remove from the scorecard
if self.song.hasFreestyleMarkings:
for time, event in self.song.midiEventTrack[i].getAllEvents():
if isinstance(event, Song.MarkerNote) and not event.endMarker:
if (event.number == Song.freestyleMarkingNote):
thisIsABre = False
#if guitar.isDrum and self.song.breMarkerTime: #MFH - must ensure this song HAS a BRE!
# if time > self.song.breMarkerTime:
# thisIsABre = True
#else: #MFH - guitar or bass; no BRE text event marker required
if not instrument.isDrum:
thisIsABre = True
if thisIsABre: #MFH - only deal with guitar/bass BRE notes here. Drum notes will be handled in realtime as they are encountered under a fill or BRE.
breStart = time
breEnd = time + event.length
#if guitar.isDrum: #MFH - count drum notes individually
# numBreStreakNotes = len([1 for time, event in self.song.track[i].getEvents(breStart, breEnd) if isinstance(event, Note)])
#else: #MFH - count guitar / bass notes with grouped chords
numBreStreakNotes = len(set(time for time, event in self.song.track[i].getEvents(breStart, breEnd) if isinstance(event, Note)))
self.scoring[i].totalStreakNotes -= numBreStreakNotes #MFH - remove BRE notes correctly from streak count.
Log.debug("Removed %d streak notes from player %d" % (numBreStreakNotes, i) )
totalBreNotes += numBreStreakNotes
if instrument.useMidiSoloMarkers: #mark using the new MIDI solo marking system
for time, event in self.song.midiEventTrack[i].getAllEvents():
if isinstance(event, Song.MarkerNote) and not event.endMarker:
if (event.number == Song.starPowerMarkingNote) and (self.song.midiStyle == Song.MIDI_TYPE_RB): #solo marker note.
startTime = time
endTime = time + event.length
guitarSoloNoteCount = len([1 for Gtime, Gevent in self.song.track[i].getEvents(startTime, endTime) if isinstance(Gevent, Note)])
self.guitarSolos[i].append(guitarSoloNoteCount - 1)
Log.debug("P" + str(i+1) + " MIDI " + self.playerList[i].part.text + " Solo found from: " + str(startTime) + " to: " + str(endTime) + ", containing " + str(guitarSoloNoteCount) + " notes." )
elif instrument.markSolos == 1: #mark using the old text-based system
#Ntime now should contain the last note time - this can be used for guitar solo finishing
#MFH - use new self.song.eventTracks[Song.TK_GUITAR_SOLOS] -- retrieve a gsolo on / off combo, then use it to count notes
# just like before, detect if end reached with an open solo - and add a GSOLO OFF event just before the end of the song.
for time, event in self.song.eventTracks[Song.TK_GUITAR_SOLOS].getAllEvents():
if event.text.find("GSOLO") >= 0:
if event.text.find("ON") >= 0:
isGuitarSoloNow = True
guitarSoloStartTime = time
else:
isGuitarSoloNow = False
guitarSoloNoteCount = len([1 for Gtime, Gevent in self.song.track[i].getEvents(guitarSoloStartTime, time) if isinstance(Gevent, Note)])
self.guitarSolos[i].append(guitarSoloNoteCount - 1)
Log.debug("GuitarScene: Guitar Solo found: " + str(guitarSoloStartTime) + "-" + str(time) + " = " + str(guitarSoloNoteCount) )
if isGuitarSoloNow: #open solo until end - needs end event!
isGuitarSoloNow = False
#guitarSoloNoteCount = len([1 for Gtime, Gevent in self.song.track[i].getEvents(guitarSoloStartTime, time) if isinstance(Gevent, Note)])
#MFH - must find the real "last note" time, requires another iteration...
for lnTime, lnEvent in self.song.track[i].getAllEvents():
if isinstance(lnEvent, Note):
if lnTime > Ntime:
Ntime = lnTime
#Ntime = Ntime + soloSlop
guitarSoloNoteCount = len([1 for Gtime, Gevent in self.song.track[i].getEvents(guitarSoloStartTime, Ntime) if isinstance(Gevent, Note)])
self.guitarSolos[i].append(guitarSoloNoteCount - 1)
newEvent = TextEvent("GSOLO OFF", 100.0)
#self.song.eventTracks[Song.TK_GUITAR_SOLOS].addEvent(time - soloSlop,newEvent) #adding the missing GSOLO OFF event
self.song.eventTracks[Song.TK_GUITAR_SOLOS].addEvent(Ntime, newEvent) #adding the missing GSOLO OFF event
Log.debug("GuitarScene: Guitar Solo until end of song found - (guitarSoloStartTime - Ntime = guitarSoloNoteCount): " + str(guitarSoloStartTime) + "-" + str(Ntime) + " = " + str(guitarSoloNoteCount) )
self.unisonConfirm = [] #akedrou
self.unisonPlayers = []
self.unisonIndex = 0
if self.coOpRB:
for spNoted in unisonCheck:
if unisonCheck.count(spNoted) > 1:
if not spNoted in self.unisonConfirm:
self.unisonConfirm.append(spNoted)
if len(self.unisonConfirm) > 0:
self.unisonPlayers = [[] for i in self.unisonConfirm]
for i in range(len(self.unisonConfirm)):
for j in range(len(self.spTimes)):
if self.unisonConfirm[i] in self.spTimes[j]:
self.unisonPlayers[i].append(j)
Log.debug("Unisons confirmed: " + str(self.unisonConfirm))
Log.debug("Unisons between: " + str(self.unisonPlayers))
#MFH - handle gathering / sizing / grouping line-by-line lyric display here, during initialization:
self.midiLyricLineEvents = [] #MFH - this is a list of sublists of tuples.
# The tuples will contain (time, event)
# The sublists will contain:
# references to Lyric text events that will be treated as lines
# such that the game can still use song position to determine each text event's color
self.midiLyricLines = [] #MFH - this is a list of text strings
# it will contain a list of the concactenated midi lines for a simpler lyric display mode
self.nextMidiLyricLine = ""
self.lyricHeight = 0
if self.midiLyricsEnabled > 0 and (self.midiLyricMode == 1 or self.midiLyricMode == 2) and not self.playingVocals: #line-by-line lyrics mode is selected and enabled:
lyricFont = self.engine.data.font
if self.theme == 2:
txtSize = 0.00170
else:
txtSize = 0.00175
self.lyricHeight = lyricFont.getStringSize("A", scale = txtSize)[1]
#MFH - now we need an appropriate array to store and organize the lyric events into "lines"
# -- the first attempt at coding this will probably butcher the measures and timing horribly, but at least
# those of us with older systems can read the lyrics without them jumping all over the place.
tempLyricLine = ""
tempLyricLineEvents = []
firstTime = None
for time, event in self.song.eventTracks[Song.TK_LYRICS].getAllEvents():
if not firstTime:
firstTime = time
lastLyricLineContents = tempLyricLine
tempLyricLine = tempLyricLine + " " + event.text
if lyricFont.getStringSize(tempLyricLine, scale = txtSize)[0] > self.lineByLineLyricMaxLineWidth:
self.midiLyricLineEvents.append(tempLyricLineEvents)
self.midiLyricLines.append( (firstTime, lastLyricLineContents) )
firstTime = None
tempLyricLine = event.text
tempLyricLineEvents = []
tempLyricLineEvents.append( (time, event) )
else: #after last line is accumulated
if len(self.midiLyricLines) > 0:
self.midiLyricLineEvents.append(tempLyricLineEvents)
self.midiLyricLines.append( (firstTime, tempLyricLine) )
#MFH - test unpacking / decoding the lyrical lines:
for midiLyricSubList in self.midiLyricLineEvents:
if self.logLyricEvents == 1:
Log.debug("...New MIDI lyric line:")
for lyricTuple in midiLyricSubList:
time, event = lyricTuple
if self.logLyricEvents == 1:
Log.debug("MIDI Line-by-line lyric unpack test - time, event = " + str(time) + ", " + event.text )
for lineStartTime, midiLyricSimpleLineText in self.midiLyricLines:
if self.logLyricEvents == 1:
Log.debug("MIDI Line-by-line simple lyric line starting at time: " + str(lineStartTime) + ", " + midiLyricSimpleLineText)
self.numMidiLyricLines = len(self.midiLyricLines)
#self.initializeStarScoringThresholds() #MFH
self.coOpTotalStreakNotes = 0
self.coOpTotalNotes = 0
coOpTotalStreakNotes = 0
coOpTotalNotes = 0
if self.coOpScoreCard:
self.coOpScoreCard.lastNoteTime = max(self.lastNoteTimes)
Log.debug("Last note for co-op mode found at %.2f" % self.coOpScoreCard.lastNoteTime)
for i, scoreCard in enumerate(self.scoring): #accumulate base scoring values for co-op
if self.coOpScoreCard:
self.coOpScoreCard.totalStreakNotes += scoreCard.totalStreakNotes
self.coOpScoreCard.totalNotes += scoreCard.totalNotes
self.coOpPlayerIndex = len(range(self.numOfPlayers))
if self.coOpScoreCard:
self.coOpScoreCard.totalStreakNotes -= totalBreNotes
#glorandwarf: need to store the song's beats per second (bps) for later
self.songBPS = self.song.bpm / 60.0
Dialogs.changeLoadingSplashScreenText(self.engine, splash, phrase + " \n " + _("Loading Graphics..."))
# evilynux - Load stage background(s)
if self.stage.mode == 3:
if Stage.videoAvailable:
songVideo = None
if self.song.info.video is not None:
songVideo = self.song.info.video
songVideoStartTime = self.song.info.video_start_time
songVideoEndTime = self.song.info.video_end_time
if songVideoEndTime == -1:
songVideoEndTime = None
self.stage.loadVideo(self.libraryName, self.songName,
songVideo = songVideo,
songVideoStartTime = songVideoStartTime,
songVideoEndTime = songVideoEndTime)
else:
Log.warn("Video playback is not supported. GStreamer or its python bindings can't be found")
self.engine.config.set("game", "stage_mode", 1)
self.stage.mode = 1
self.stage.load(self.libraryName, self.songName, self.playerList[0].practiceMode)
#MFH - this determination logic should happen once, globally -- not repeatedly.
self.showScriptLyrics = False
if not self.playingVocals:
if self.song.hasMidiLyrics and self.lyricMode == 3: #racer: new option for double lyrics
self.showScriptLyrics = False
elif not self.song.hasMidiLyrics and self.lyricMode == 3: #racer
self.showScriptLyrics = True
elif self.song.info.tutorial:
self.showScriptLyrics = True
elif self.lyricMode == 1 and self.song.info.lyrics: #lyrics: song.ini
self.showScriptLyrics = True
elif self.lyricMode == 2: #lyrics: Auto
self.showScriptLyrics = True
self.ready = True
#lyric sheet!
if not self.playingVocals:
if self.song.hasMidiLyrics and self.midiLyricsEnabled > 0:
if self.midiLyricMode == 0:
if not self.engine.loadImgDrawing(self, "lyricSheet", os.path.join("themes",themename,"lyricsheet.png")):
self.lyricSheet = None
else:
if not self.engine.loadImgDrawing(self, "lyricSheet", os.path.join("themes",themename,"lyricsheet2.png")):
if not self.engine.loadImgDrawing(self, "lyricSheet", os.path.join("themes",themename,"lyricsheet.png")):
self.lyricSheet = None
else:
self.lyricSheet = None
else:
self.lyricSheet = None
if self.lyricSheet:
imgwidth = self.lyricSheet.width1()
self.lyricSheetScaleFactor = 640.000/imgwidth
#brescorebackground.png
if self.engine.loadImgDrawing(self, "breScoreBackground", os.path.join("themes",themename,"brescorebackground.png")):
breScoreBackgroundImgwidth = self.breScoreBackground.width1()
self.breScoreBackgroundWFactor = 640.000/breScoreBackgroundImgwidth
else:
Log.debug("BRE score background image loading problem!")
self.breScoreBackground = None
self.breScoreBackgroundWFactor = None
#brescoreframe.png
if self.engine.loadImgDrawing(self, "breScoreFrame", os.path.join("themes",themename,"brescoreframe.png")):
breScoreFrameImgwidth = self.breScoreFrame.width1()
self.breScoreFrameWFactor = 640.000/breScoreFrameImgwidth
else:
#MFH - fallback on using soloframe.png if no brescoreframe.png is found
if self.engine.loadImgDrawing(self, "breScoreFrame", os.path.join("themes",themename,"soloframe.png")):
breScoreFrameImgwidth = self.breScoreFrame.width1()
self.breScoreFrameWFactor = 640.000/breScoreFrameImgwidth
else:
self.breScoreFrame = None
self.breScoreFrameWFactor = None
if self.engine.loadImgDrawing(self, "soloFrame", os.path.join("themes",themename,"soloframe.png")):
soloImgwidth = self.soloFrame.width1()
self.soloFrameWFactor = 640.000/soloImgwidth
#soloImgheight = self.soloFrame.height1()
#soloHeightYFactor = (640.000*self.hFull)/self.wFull
#self.soloFrameHFactor = soloHeightYFactor/soloImgheight
else:
self.soloFrame = None
self.soloFrameWFactor = None
#self.soloFrameHFactor = None
self.partImage = True
self.part = [None for i in self.playerList]
self.partLoad = None
if self.counting or self.coOpType:
for i in range(self.numOfPlayers):
if not self.partImage:
break
if self.instruments[i].isDrum:
if not self.engine.loadImgDrawing(self, "partLoad", os.path.join("themes",themename,"drum.png")):
if not self.engine.loadImgDrawing(self, "partLoad", os.path.join("drum.png")):
self.counting = False
self.partImage = False
elif self.instruments[i].isBassGuitar:
if not self.engine.loadImgDrawing(self, "partLoad", os.path.join("themes",themename,"bass.png")):
if not self.engine.loadImgDrawing(self, "partLoad", os.path.join("bass.png")):
self.counting = False
self.partImage = False
elif self.instruments[i].isVocal:
if not self.engine.loadImgDrawing(self, "partLoad", os.path.join("themes",themename,"mic.png")):
if not self.engine.loadImgDrawing(self, "partLoad", os.path.join("mic.png")):
self.counting = False
self.partImage = False
else:
if not self.engine.loadImgDrawing(self, "partLoad", os.path.join("themes",themename,"guitar.png")):
if not self.engine.loadImgDrawing(self, "partLoad", os.path.join("guitar.png")):
self.counting = False
self.partImage = False
if self.partLoad:
self.part[i] = self.partLoad
self.partLoad = None
if self.soloFrameMode == 0:
self.soloFrame = None
#self.soloFrameHFactor = None
self.soloFrameWFactor = None
#Pause Screen
self.engine.loadImgDrawing(self, "pauseScreen", os.path.join("themes",themename,"pause.png"))
if not self.engine.loadImgDrawing(self, "failScreen", os.path.join("themes",themename,"fail.png")):
self.engine.loadImgDrawing(self, "failScreen", os.path.join("themes",themename,"pause.png"))
#failMessage
self.engine.loadImgDrawing(self, "failMsg", os.path.join("themes",themename,"youfailed.png"))
#myfingershurt: youRockMessage
self.engine.loadImgDrawing(self, "rockMsg", os.path.join("themes",themename,"yourock.png"))
self.counterY = -0.1
self.coOpPhrase = 0
self.scaleText = [0.0 for i in self.playerList]
self.displayText = [None for i in self.playerList]
self.displayTextScale = [0.0 for i in self.playerList]
#self.streakFlag = None #QQstarS:Set the flag,to show which one has reach the 50 note
self.textTimer = [0.0 for i in self.playerList]
#self.textChanged = False
self.textY = [.3 for i in self.playerList]
self.scaleText2 = [0.0 for i in self.playerList]
self.goingUP = [False for i in self.playerList]
if self.battleGH:
self.battleJustUsed = [0 for i in self.playerList]
self.battleText = [None for i in self.playerList]
self.battleTextTimer = [0.0 for i in self.playerList]
self.lastStreak = [0 for i in self.playerList]
if self.coOpType:
self.coOpPhrase = len(self.scaleText)
self.scaleText.append(0.0)
self.displayText.append(None)
self.displayTextScale.append(0.0)
self.textTimer.append(0.0)
self.textY.append(.3)
self.scaleText2.append(0.0)
self.goingUP.append(False)
self.lastStreak.append(0)
self.killswitchEngaged = [None for i in self.playerList]
#MFH - retrieve theme.ini pause background & text positions
self.pause_bkg = [float(i) for i in self.engine.theme.pause_bkg_pos]
self.pause_text_x = self.engine.theme.pause_text_xPos
self.pause_text_y = self.engine.theme.pause_text_yPos
if self.pause_text_x == None:
self.pause_text_x = .3
if self.pause_text_y == None:
self.pause_text_y = .31
#MFH - new theme.ini color options:
self.pause_text_color = self.engine.theme.hexToColor(self.engine.theme.pause_text_colorVar)
self.pause_selected_color = self.engine.theme.hexToColor(self.engine.theme.pause_selected_colorVar)
self.fail_text_color = self.engine.theme.hexToColor(self.engine.theme.fail_text_colorVar)
self.fail_selected_color = self.engine.theme.hexToColor(self.engine.theme.fail_selected_colorVar)
self.fail_completed_color = self.engine.theme.hexToColor(self.engine.theme.fail_completed_colorVar)
settingsMenu = Settings.GameSettingsMenu(self.engine, self.pause_text_color, self.pause_selected_color, players = self.playerList)
careerSettingsMenu = Settings.GameCareerSettingsMenu(self.engine, self.pause_text_color, self.pause_selected_color, players = self.playerList)
settingsMenu.fadeScreen = False
careerSettingsMenu.fadeScreen = False
# evilynux - More themeable options
self.rockmeter_score_color = self.engine.theme.rockmeter_score_colorVar
#self.fail_completed_color = self.engine.theme.hexToColor(self.engine.theme.song_name_selected_colorVar) # text same color as selected song
#self.fail_completed_color = self.engine.theme.hexToColor(self.engine.theme.fail_text_colorVar) #No, now same as fail_text color.
self.ingame_stats_color = self.engine.theme.ingame_stats_colorVar
if self.pause_text_color == None:
self.pause_text_color = (1,1,1)
if self.pause_selected_color == None:
self.pause_selected_color = (1,0.75,0)
if self.fail_text_color == None:
self.fail_text_color = (1,1,1)
if self.fail_selected_color == None:
self.fail_selected_color = (1,0.75,0)
if self.fail_completed_color == None:
self.fail_completed_color = self.fail_text_color
Log.debug("Pause text / selected colors: " + str(self.pause_text_color) + " / " + str(self.pause_selected_color))
#racer: theme.ini fail positions
size = self.engine.data.pauseFont.getStringSize("Quit to Main")
self.fail_bkg = [float(i) for i in self.engine.theme.fail_bkg_pos]
self.fail_text_x = self.engine.theme.fail_text_xPos
self.fail_text_y = self.engine.theme.fail_text_yPos
self.failSongPos=(self.engine.theme.fail_songname_xPos,self.engine.theme.fail_songname_yPos)
if self.fail_text_x == None:
self.fail_text_x = .5-size[0]/2.0
if self.fail_text_y == None:
self.fail_text_y = .47
if self.theme == 1: #GH3-like theme
if self.careerMode:
self.menu = Menu(self.engine, [
(_(" RESUME"), self.resumeSong), #Worldrave adjusted proper spacing.
(_(" RESTART"), self.restartSong),
#(_(" GIVE UP"), self.changeSong), *Worldrave-commented out just to match GH3. Since this is a GH3 specific instruction.
(_(" PRACTICE"), self.practiceSong), #evilynux
(_(" OPTIONS"), careerSettingsMenu),
(_(" QUIT"), self.quit), #Worldrave - added graphic menu support "careerpause" for Career Pause menu in below line.
], name = "careerpause", fadeScreen = False, onClose = self.resumeGame, font = self.engine.data.pauseFont, pos = (self.pause_text_x, self.pause_text_y), textColor = self.pause_text_color, selectedColor = self.pause_selected_color, append_submenu_char = False)
else:
self.menu = Menu(self.engine, [
(_(" RESUME"), self.resumeSong),
(_(" RESTART"), self.restartSong),
# (_(" GIVE UP"), self.changeSong),
(_(" END SONG"), self.endSong),
(_(" OPTIONS"), settingsMenu),
(_(" QUIT"), self.quit), #Worldrave - added graphic menu support "pause" for Pause menu in below line.
], name = "pause", fadeScreen = False, onClose = self.resumeGame, font = self.engine.data.pauseFont, pos = (self.pause_text_x, self.pause_text_y), textColor = self.pause_text_color, selectedColor = self.pause_selected_color, append_submenu_char = False)
size = self.engine.data.pauseFont.getStringSize("Quit to Main")
if self.careerMode:
self.failMenu = Menu(self.engine, [
(_("RETRY SONG"), self.restartAfterFail),
(_(" PRACTICE"), self.practiceSong), #evilynux
(_(" NEW SONG"), self.changeAfterFail),
(_(" QUIT"), self.quit), #Worldrave - added graphic menu support "careerfail" for Career Failed menu in below line.
], name = "careerfail", fadeScreen = False, onCancel = self.changeAfterFail, font = self.engine.data.pauseFont, pos = (self.fail_text_x, self.fail_text_y), textColor = self.fail_text_color, selectedColor = self.fail_selected_color)
else:
self.failMenu = Menu(self.engine, [
(_("RETRY SONG"), self.restartAfterFail),
(_(" NEW SONG"), self.changeAfterFail),
(_(" QUIT"), self.quit), #Worldrave - added graphic menu support "fail" for Fail menu in below line.
], name = "fail", fadeScreen = False, onCancel = self.changeAfterFail, font = self.engine.data.pauseFont, pos = (self.fail_text_x, self.fail_text_y), textColor = self.fail_text_color, selectedColor = self.fail_selected_color)
#FirstTime = True
#self.restartSong(FirstTime)
elif self.theme == 0: #GH2-like theme
if self.careerMode:
self.menu = Menu(self.engine, [
(_(" Resume"), self.resumeSong),
(_(" Start Over"), self.restartSong),
(_(" Change Song"), self.changeSong),
(_(" Practice"), self.practiceSong), #evilynux
(_(" Settings"), careerSettingsMenu),
(_(" Quit to Main Menu"), self.quit), #Worldrave - added graphic menu support "careerpause" for Career Pause menu in below line.
], name = "careerpause", fadeScreen = False, onClose = self.resumeGame, font = self.engine.data.pauseFont, pos = (self.pause_text_x, self.pause_text_y), textColor = self.pause_text_color, selectedColor = self.pause_selected_color)
else:
self.menu = Menu(self.engine, [
(_(" Resume"), self.resumeSong),
(_(" Start Over"), self.restartSong),
(_(" Change Song"), self.changeSong),
(_(" End Song"), self.endSong),
(_(" Settings"), settingsMenu),
(_(" Quit to Main Menu"), self.quit), #Worldrave - added graphic menu support "pause" for Pause menu in below line.
], name = "pause", fadeScreen = False, onClose = self.resumeGame, font = self.engine.data.pauseFont, pos = (self.pause_text_x, self.pause_text_y), textColor = self.pause_text_color, selectedColor = self.pause_selected_color)
size = self.engine.data.pauseFont.getStringSize("Quit to Main")
if self.careerMode:
self.failMenu = Menu(self.engine, [
(_(" Try Again?"), self.restartAfterFail),
(_(" Give Up?"), self.changeAfterFail),
(_(" Practice?"), self.practiceSong), #evilynux
(_("Quit to Main"), self.quit), #Worldrave - added graphic menu support "careerfail" for Career Fail menu in below line.
], name = "careerfail", fadeScreen = False, onCancel = self.changeAfterFail, font = self.engine.data.pauseFont, pos = (self.fail_text_x, self.fail_text_y), textColor = self.fail_text_color, selectedColor = self.fail_selected_color)
else:
self.failMenu = Menu(self.engine, [
(_(" Try Again?"), self.restartAfterFail),
(_(" Give Up?"), self.changeAfterFail),
(_("Quit to Main"), self.quit), #Worldrave - added graphic menu support "fail" for Fail menu in below line.
], name = "fail", fadeScreen = False, onCancel = self.changeAfterFail, font = self.engine.data.pauseFont, pos = (self.fail_text_x, self.fail_text_y), textColor = self.fail_text_color, selectedColor = self.fail_selected_color)
#FirstTime = True
#self.restartSong(FirstTime)
elif self.theme == 2: #RB-like theme
size = self.engine.data.pauseFont.getStringSize("Quit to Main Menu")
if self.careerMode:
self.menu = Menu(self.engine, [
(_(" RESUME"), self.resumeSong),
(_(" RESTART"), self.restartSong),
(_(" CHANGE SONG"), self.changeSong),
(_(" PRACTICE"), self.practiceSong), #evilynux
(_(" SETTINGS"), careerSettingsMenu),
(_(" QUIT"), self.quit), #Worldrave - added graphic menu support "careerpause" for Career Pause menu in below line.
], name = "careerpause", fadeScreen = False, onClose = self.resumeGame, font = self.engine.data.pauseFont, pos = (self.pause_text_x, self.pause_text_y), textColor = self.pause_text_color, selectedColor = self.pause_selected_color)
else:
self.menu = Menu(self.engine, [
(_(" RESUME"), self.resumeSong),
(_(" RESTART"), self.restartSong),
(_(" CHANGE SONG"), self.changeSong),
(_(" END SONG"), self.endSong),
(_(" SETTINGS"), settingsMenu),
(_(" QUIT"), self.quit), #Worldrave - added graphic menu support "pause" for Pause menu in below line.
], name = "pause", fadeScreen = False, onClose = self.resumeGame, font = self.engine.data.pauseFont, pos = (self.pause_text_x, self.pause_text_y), textColor = self.pause_text_color, selectedColor = self.pause_selected_color)
size = self.engine.data.pauseFont.getStringSize("Quit to Main")
if self.careerMode:
self.failMenu = Menu(self.engine, [
(_(" RETRY"), self.restartAfterFail),
(_(" NEW SONG"), self.changeAfterFail),
(_(" PRACTICE"), self.practiceSong), #evilynux
(_(" QUIT"), self.quit), #Worldrave - added graphic menu support "careerfail" for Career Fail menu in below line.
], name = "careerfail", fadeScreen = False, onCancel = self.changeAfterFail, font = self.engine.data.pauseFont, pos = (self.fail_text_x, self.fail_text_y), textColor = self.fail_text_color, selectedColor = self.fail_selected_color)
else:
self.failMenu = Menu(self.engine, [
(_(" RETRY"), self.restartAfterFail),
(_(" NEW SONG"), self.changeAfterFail),
(_(" QUIT"), self.quit), #Worldrave - added graphic menu support "fail" for Fail menu in below line.
], name = "fail", fadeScreen = False, onCancel = self.changeAfterFail, font = self.engine.data.pauseFont, pos = (self.fail_text_x, self.fail_text_y), textColor = self.fail_text_color, selectedColor = self.fail_selected_color)
self.restartSong(firstTime = True)
# hide the splash screen
Dialogs.hideLoadingSplashScreen(self.engine, splash)
splash = None
self.engine.createdGuitarScene = False
#MFH - end of GuitarScene cleint initialization routine
def pauseGame(self):
if self.song and self.song.readyToGo:
self.song.pause()
self.pausePos = self.getSongPosition()
self.pause = True
for instrument in self.instruments:
instrument.paused = True
if instrument.isVocal:
instrument.stopMic()
else:
instrument.neck.paused = True
def failGame(self):
self.engine.view.pushLayer(self.failMenu)
if self.song and self.song.readyToGo and self.pause: #akedrou - don't let the pause menu overlap the fail menu.
self.engine.view.popLayer(self.menu)
self.pause = False
for instrument in self.instruments:
instrument.paused = False
if instrument.isVocal:
instrument.stopMic()
else:
instrument.neck.paused = False
self.failEnd = True
def resumeGame(self):
self.loadSettings()
self.setCamera()
if self.resumeCountdownEnabled and not self.failed and not self.countdown:
self.resumeCountdownSeconds = 3
self.resumeCountdown = float(self.resumeCountdownSeconds) * self.songBPS
self.pause = False
else:
if self.song and self.song.readyToGo:
if not self.failed: #akedrou - don't resume the song if you have already failed.
self.song.unpause()
self.pause = False
for instrument in self.instruments:
instrument.paused = False
if instrument.isVocal:
instrument.startMic()
else:
instrument.neck.paused = False
def resumeSong(self):
self.engine.view.popLayer(self.menu)
self.resumeGame()
def lostFocus(self): #akedrou - catch to pause on lostFocus
if self.song and self.song.readyToGo:
if not self.failed and not self.pause and self.lostFocusPause == True:
self.engine.view.pushLayer(self.menu)
self.pauseGame()
def setCamera(self):
#x=0 middle
#x=1 rotate left
#x=-1 rotate right
#y=3 middle
#y=4 rotate back
#y=2 rotate front
#z=-3
if self.rmtype == 3:
self.camera.target = (0.0, 1.4, 1.8)
self.camera.origin = (0.0, 2.8, -3.6)
elif self.customPOV:
self.camera.target = (self.targetX, self.targetY, self.targetZ)
self.camera.origin = (self.originX, self.originY*self.boardY, self.originZ)
else:
if self.pov == 1: #GH3
self.camera.target = (0.0, 0.6, 4.4)
self.camera.origin = (0.0, 3.5*self.boardY, -3.8)
elif self.pov == 2: #RB
self.camera.target = (0.0, 0.0, 3.7)
self.camera.origin = (0.0, 2.9*self.boardY, -2.9)
elif self.pov == 3: #GH2
self.camera.target = (0.0, 1.6, 2.0)
self.camera.origin = (0.0, 2.6*self.boardY, -3.6)
elif self.pov == 4: #Rock Rev
self.camera.target = (0.0, -6.0, 2.6666666666)
self.camera.origin = (0.0, 6.0, 2.6666666665)
elif self.pov == 5: #Theme
if self.rmtype == 0:
self.camera.target = (0.0, 1.6, 2.0)
self.camera.origin = (0.0, 2.6*self.boardY, -3.6)
elif self.rmtype == 1:
self.camera.target = (0.0, 0.6, 4.4) #Worldrave - Perfected the proper GH3 POV
self.camera.origin = (0.0, 3.5*self.boardY, -3.8)
elif self.rmtype == 2:
self.camera.target = (0.0, 0.0, 3.7)
self.camera.origin = (0.0, 2.9*self.boardY, -2.9)
else: # FoF
self.camera.target = (0.0, 0.0, 4.0)
self.camera.origin = (0.0, 3.0*self.boardY, -3.0)
def freeResources(self):
self.engine.view.setViewport(1,0)
self.counter = None
self.failScreen = None
self.failMsg = None
self.menu = None
self.mult = None
self.pauseScreen = None
self.rockTop = None
self.rockMsg = None
for instrument in self.instruments:
if instrument.isVocal:
instrument.stopMic()
#MFH - Ensure all event tracks are destroyed before removing Song object!
if self.song:
self.song.tracks = None
self.song.eventTracks = None
self.song.midiEventTracks = None
if self.whammyEffect == 1:
self.song.resetInstrumentPitch(-1)
self.song = None
#MFH - additional cleanup!
self.lyricSheet = None
self.starWhite = None
self.starGrey = None
self.starPerfect = None
self.starGrey1 = None
self.starGrey2 = None
self.starGrey3 = None
self.starGrey4 = None
self.starGrey5 = None
self.starGrey6 = None
self.starGrey7 = None
self.part = [None for i in self.playerList]
for scoreCard in self.scoring:
scoreCard.lastNoteEvent = None
if self.coOpType:
self.coOpScoreCard.lastNoteEvent = None
if self.stage.mode == 3 and Stage.videoAvailable:
self.engine.view.popLayer(self.stage.vidPlayer)
def getHandicap(self):
hopoFreq = self.engine.config.get("coffee", "hopo_frequency")
try:
songHopo = int(self.song.info.hopofreq)
except Exception, e:
songHopo = 1
for i, scoreCard in enumerate(self.scoring):
if self.instruments[i].isVocal:
if self.engine.audioSpeedFactor != 1 or scoreCard.earlyHitWindowSizeHandicap != 1.0: #scalable handicaps
if (scoreCard.handicap>>1)&1 != 1:
scoreCard.handicap += 0x2
if self.coOpType:
if (self.coOpScoreCard.handicap>>1)&1 != 1:
self.coOpScoreCard.handicap += 0x2
if not self.failingEnabled:
if (scoreCard.handicap>>2)&1 != 1:
scoreCard.handicap += 0x4
if self.coOpType:
if (self.coOpScoreCard.handicap>>2)&1 != 1:
self.coOpScoreCard.handicap += 0x4
continue
if self.gh2sloppy == 1 and not self.instruments[i].isDrum: # or self.rb2sloppy == 1:
if (scoreCard.handicap)&1 != 1:
scoreCard.handicap += 1
if self.coOpType:
if self.coOpScoreCard.handicap&1 != 1:
self.coOpScoreCard.handicap += 1
if self.engine.audioSpeedFactor != 1 or scoreCard.earlyHitWindowSizeHandicap != 1.0: #scalable handicaps
if (scoreCard.handicap>>1)&1 != 1:
scoreCard.handicap += 0x2
if self.coOpType:
if (self.coOpScoreCard.handicap>>1)&1 != 1:
self.coOpScoreCard.handicap += 0x2
if not self.failingEnabled:
if (scoreCard.handicap>>2)&1 != 1:
scoreCard.handicap += 0x4
if self.coOpType:
if (self.coOpScoreCard.handicap>>2)&1 != 1:
self.coOpScoreCard.handicap += 0x4
if self.instruments[i].twoChordApply:
if (scoreCard.handicap>>3)&1 != 1:
scoreCard.handicap += 0x8
if self.coOpType:
if (self.coOpScoreCard.handicap>>3)&1 != 1:
self.coOpScoreCard.handicap += 0x8
if self.instruments[i].hitw == 0.70:
if (scoreCard.handicap>>4)&1 != 1:
scoreCard.handicap += 0x10
if self.coOpType:
if (self.coOpScoreCard.handicap>>4)&1 != 1:
self.coOpScoreCard.handicap += 0x10
elif self.instruments[i].hitw == 1.0:
if (scoreCard.handicap>>5)&1 != 1:
scoreCard.handicap += 0x20
if self.coOpType:
if (self.coOpScoreCard.handicap>>5)&1 != 1:
self.coOpScoreCard.handicap += 0x20
elif self.instruments[i].hitw == 1.9:
if (scoreCard.handicap>>6)&1 != 1:
scoreCard.handicap += 0x40
if self.coOpType:
if (self.coOpScoreCard.handicap>>6)&1 != 1:
self.coOpScoreCard.handicap += 0x40
elif self.instruments[i].hitw == 2.3:
if (scoreCard.handicap>>7)&1 != 1:
scoreCard.handicap += 0x80
if self.coOpType:
if (self.coOpScoreCard.handicap>>7)&1 != 1:
self.coOpScoreCard.handicap += 0x80
if self.hopoStyle == 0 and not self.instruments[i].isDrum: #no taps
if (scoreCard.handicap>>8)&1 != 1:
scoreCard.handicap += 0x100
if self.coOpType:
if (self.coOpScoreCard.handicap>>8)&1 != 1:
self.coOpScoreCard.handicap += 0x100
elif hopoFreq == 0 and songHopo != 1 and not self.instruments[i].isDrum:
if (scoreCard.handicap>>9)&1 != 1:
scoreCard.handicap += 0x200
if self.coOpType:
if (self.coOpScoreCard.handicap>>9)&1 != 1:
self.coOpScoreCard.handicap += 0x200
elif hopoFreq == 1 and songHopo != 1 and not self.instruments[i].isDrum:
if (scoreCard.handicap>>10)&1 != 1:
scoreCard.handicap += 0x400
if self.coOpType:
if (self.coOpScoreCard.handicap>>10)&1 != 1:
self.coOpScoreCard.handicap += 0x400
elif hopoFreq == 3 and songHopo != 1 and not self.instruments[i].isDrum:
if (scoreCard.handicap>>11)&1 != 1:
scoreCard.handicap += 0x800
if self.coOpType:
if (self.coOpScoreCard.handicap>>11)&1 != 1:
self.coOpScoreCard.handicap += 0x800
elif hopoFreq == 4 and songHopo != 1 and not self.instruments[i].isDrum:
if (scoreCard.handicap>>12)&1 != 1:
scoreCard.handicap += 0x1000
if self.coOpType:
if (self.coOpScoreCard.handicap>>12)&1 != 1:
self.coOpScoreCard.handicap += 0x1000
elif hopoFreq == 5 and songHopo != 1 and not self.instruments[i].isDrum:
if (scoreCard.handicap>>13)&1 != 1:
scoreCard.handicap += 0x2000
if self.coOpType:
if (self.coOpScoreCard.handicap>>13)&1 != 1:
self.coOpScoreCard.handicap += 0x2000
elif self.allTaps == 1 and not self.instruments[i].isDrum:
if (scoreCard.handicap>>14)&1 != 1:
scoreCard.handicap += 0x4000
if self.coOpType:
if (self.coOpScoreCard.handicap>>14)&1 != 1:
self.coOpScoreCard.handicap += 0x4000
if self.whammySavesSP and not self.instruments[i].isDrum:
if (scoreCard.handicap>>15)&1 != 1:
scoreCard.handicap += 0x8000
if self.coOpType:
if (self.coOpScoreCard.handicap>>15)&1 != 1:
self.coOpScoreCard.handicap += 0x8000
if self.autoPlay and self.jurg[i]:
if (scoreCard.handicap>>16)&1 != 1:
scoreCard.handicap += 0x10000
if self.coOpType:
if (self.coOpScoreCard.handicap>>16)&1 != 1:
self.coOpScoreCard.handicap += 0x10000
if self.playerAssist[i] == 1:
if (scoreCard.handicap>>17)&1 != 1:
scoreCard.handicap += 0x20000
if self.coOpType:
if (self.coOpScoreCard.handicap>>17)&1 != 1:
self.coOpScoreCard.handicap += 0x20000
if self.playerAssist[i] == 2:
if (scoreCard.handicap>>18)&1 != 1:
scoreCard.handicap += 0x40000
if self.coOpType:
if (self.coOpScoreCard.handicap>>18)&1 != 1:
self.coOpScoreCard.handicap += 0x40000
if self.playerAssist[i] == 3:
if (scoreCard.handicap>>19)&1 != 1:
scoreCard.handicap += 0x80000
if self.coOpType:
if (self.coOpScoreCard.handicap>>19)&1 != 1:
self.coOpScoreCard.handicap += 0x80000
scoreCard.updateHandicapValue()
if self.coOpType:
self.coOpScoreCard.updateHandicapValue()
def loadSettings(self):
self.stage.updateDelays()
self.activeVolume = self.engine.config.get("audio", "guitarvol")
self.screwUpVolume = self.engine.config.get("audio", "screwupvol")
self.killVolume = self.engine.config.get("audio", "kill_volume")
#self.sfxVolume = self.engine.config.get("audio", "SFX_volume")
self.crowdVolume = self.engine.config.get("audio", "crowd_volume") #akedrou
self.crowdsEnabled = self.engine.config.get("audio", "enable_crowd_tracks")
#self.engine.data.sfxVolume = self.sfxVolume #MFH - keep Data updated
self.engine.data.crowdVolume = self.crowdVolume
#MFH - now update volume of all screwup sounds and other SFX:
self.engine.data.SetAllScrewUpSoundFxObjectVolumes(self.screwUpVolume)
#self.engine.data.SetAllSoundFxObjectVolumes(self.sfxVolume)
#Re-apply Jurgen Settings -- Spikehead777
self.autoPlay = False
self.jurg = [False for i in self.playerList]
self.jurgenLogic = [0 for i in self.playerList]
self.aiSkill = [0 for i in self.playerList]
for i, player in enumerate(self.playerList):
jurgen = self.engine.config.get("game", "jurg_p%d" % i)
if jurgen == True:
self.jurg[i] = True
self.autoPlay = True
self.aiSkill[i] = self.engine.config.get("game", "jurg_skill_p%d" % i)
if player.part.id == Song.VOCAL_PART:
self.instruments[i].jurgenEnabled = jurgen
self.instruments[i].jurgenSkill = self.aiSkill[i]
self.jurgenLogic[i] = self.engine.config.get("game", "jurg_logic_p%d" % i)
#MFH - no Jurgen in Career mode.
if self.careerMode:
self.autoPlay = False
if self.bossBattle:
self.autoPlay = True
self.jurg = [False for i in self.playerList]
self.jurg[1] = True
self.hopoStyle = self.engine.config.get("game", "hopo_system")
self.gh2sloppy = self.engine.config.get("game", "gh2_sloppy")
self.allTaps = 0
self.autoKickBass = [0 for i in self.playerList]
if self.gh2sloppy == 1:
self.hopoStyle = 4
self.hopoAfterChord = self.engine.config.get("game", "hopo_after_chord")
self.pov = self.engine.config.get("fretboard", "point_of_view")
#CoffeeMod
#self.controls = self.engine.input.controls
self.activeGameControls = self.engine.input.activeGameControls
for i,player in enumerate(self.playerList):
if player.part.id == Song.VOCAL_PART:
continue
self.instruments[i].leftyMode = False
self.instruments[i].twoChordMax = False
self.instruments[i].drumFlip = False
if player.lefty > 0:
self.instruments[i].leftyMode = True
if player.drumflip > 0:
self.instruments[i].drumFlip = True
if player.twoChordMax > 0:
self.instruments[i].twoChordMax = True
self.keysList = []
for i, player in enumerate(self.playerList):
if self.instruments[i].isDrum:
self.keysList.append(player.drums)
elif self.instruments[i].isVocal:
self.keysList.append([])
continue
else:
self.keysList.append(player.keys)
if not self.instruments[i].twoChordMax:
if self.controls.twoChord[self.activeGameControls[i]] > 0:
self.instruments[i].twoChordMax = True
if self.song and self.song.readyToGo:
self.getHandicap() #akedrou - to be sure scoring objects are created.
#myfingershurt: ensure that after a pause or restart, the a/v sync delay is refreshed:
self.song.refreshAudioDelay()
#myfingershurt: ensuring the miss volume gets refreshed:
self.song.refreshVolumes()
self.song.setAllTrackVolumes(1)
if self.crowdsCheering == True:
self.song.setCrowdVolume(1)
else:
self.song.setCrowdVolume(0.0)
def songLoaded(self, song):
for i, player in enumerate(self.playerList):
if self.instruments[i].isVocal:
song.difficulty[i] = Song.difficulties[Song.EXP_DIF] #for track-finding purposes! Don't change this, ok?
continue
song.difficulty[i] = player.difficulty
if self.bossBattle == True:
song.difficulty[1] = song.difficulty[0]
self.song.readyToGo = False
def endSong(self):
self.engine.view.popLayer(self.menu)
validScoreFound = False
for scoreCard in self.scoring: #MFH - what if 2p (human) battles 1p (Jurgen / CPU)? He needs a valid score too!
if scoreCard.score > 0:
validScoreFound = True
break
if self.coOpType:
if self.coOpScoreCard.score > 0:
validScoreFound = True
if validScoreFound:
#if self.player.score > 0:
self.goToResults()
else:
self.changeSong()
def quit(self):
if self.song:
self.song.stop()
self.resetVariablesToDefaults()
self.done = True
# evilynux - Reset speed
self.engine.setSpeedFactor(1.0)
self.engine.view.setViewport(1,0)
self.engine.view.popLayer(self.menu)
self.engine.view.popLayer(self.failMenu)
self.freeResources()
self.engine.world.finishGame()
# evilynux - Switch to Practice
def practiceSong(self):
if self.song:
self.song.stop()
self.song = None
self.resetVariablesToDefaults()
self.engine.view.setViewport(1,0)
self.engine.view.popLayer(self.menu)
self.engine.view.popLayer(self.failMenu)
self.freeResources()
self.engine.world.gameMode = 1
self.engine.world.createScene("SongChoosingScene")
def changeSong(self):
if self.song:
self.song.stop()
self.song = None
self.resetVariablesToDefaults()
# evilynux - Reset speed
self.engine.setSpeedFactor(1.0)
self.engine.view.setViewport(1,0)
self.engine.view.popLayer(self.menu)
self.engine.view.popLayer(self.failMenu)
self.freeResources()
# self.session.world.deleteScene(self)
self.engine.world.createScene("SongChoosingScene")
def changeAfterFail(self):
if self.song:
self.song.stop()
self.song = None
self.resetVariablesToDefaults()
# evilynux - Reset speed
self.engine.setSpeedFactor(1.0)
self.engine.view.setViewport(1,0)
self.engine.view.popLayer(self.failMenu)
self.freeResources()
# self.session.world.deleteScene(self)
self.engine.world.createScene("SongChoosingScene")
def initBeatAndSpClaps(self):
###Capo###
if self.song:
self.beatTime = []
if (self.starClaps or self.beatClaps):
for time, event in self.song.track[0].getAllEvents():
if isinstance(event, Bars):
if (event.barType == 1 or event.barType == 2):
self.beatTime.append(time)
###endCapo###
def resetVariablesToDefaults(self):
if self.song:
self.song.readyToGo = False
#self.countdown = 4.0 * self.songBPS
self.countdownSeconds = 3 #MFH - This needs to be reset for song restarts, too!
self.countdown = float(self.countdownSeconds) * self.songBPS
self.scaleText = [0.0 for i in self.playerList]
self.displayText = [None for i in self.playerList]
self.displayTextScale = [0.0 for i in self.playerList]
self.textTimer = [0.0 for i in self.playerList]
self.textY = [.3 for i in self.playerList]
self.scaleText2 = [0.0 for i in self.playerList]
self.goingUP = [False for i in self.playerList]
self.lastStreak = [0 for i in self.playerList]
if self.coOpType:
self.coOpPhrase = len(self.scaleText)
self.scaleText.append(0.0)
self.displayText.append(None)
self.displayTextScale.append(0.0)
self.textTimer.append(0.0)
self.textY.append(.3)
self.scaleText2.append(0.0)
self.goingUP.append(False)
self.lastStreak.append(0)
self.midiLyricLineIndex = 0
self.drumStart = False #faaa's drum sound mod restart
self.dispAccuracy = [False for i in self.playerList]
for instrument in self.instruments:
instrument.spEnabled = True
instrument.bigRockEndingMarkerSeen = False
#self.partialStar = [0 for i in self.playerList]
#self.starRatio = [0.0 for i in self.playerList]
for scoreCard in self.scoring:
scoreCard.reset()
self.crowdsCheering = False #akedrou
if self.coOpType:
self.coOpScoreCard.reset()
self.coOpStarPower = 0
self.coOpStarPowerTimer = 0
self.coOpStarPowerActive = [0 for i in self.playerList]
self.mutedLastSecondYet = False
self.dispSoloReview = [False for i in self.playerList]
self.soloReviewCountdown = [0 for i in self.playerList]
self.guitarSoloAccuracy = [0.0 for i in self.playerList]
self.guitarSoloActive = [False for i in self.playerList]
self.currentGuitarSolo = [0 for i in self.playerList]
self.guitarSoloBroken = [False for i in self.playerList]
self.inUnison = [False for i in self.playerList]
self.haveUnison = [False for i in self.playerList]
self.firstUnison = False
self.firstUnisonDone = False
self.unisonNum = 0
self.unisonIndex = 0
self.unisonActive = False
self.unisonEarn = [False for i in self.playerList]
self.resumeCountdown = 0
self.resumeCountdownSeconds = 0
self.pausePos = 0
self.failTimer = 0 #myfingershurt
self.rockTimer = 0 #myfingershurt
self.youRock = False #myfingershurt
self.rockFinished = False #myfingershurt
if self.battleGH:
if not self.battleSuddenDeath:
self.rock = [self.rockMax/2 for i in self.playerList]
else:
self.rock = [self.rockMax/2 for i in self.playerList]
self.minusRock = [0.0 for i in self.playerList]
self.plusRock = [0.0 for i in self.playerList]
self.coOpMulti = 1
self.deadPlayerList = []
self.numDeadPlayers = 0
self.coOpFailDone = [False for i in self.playerList]
self.rockFailUp = True
self.rockFailViz = 0.0
self.failViz = [0.0 for i in self.playerList]
if self.coOpRB:
self.rock.append(self.rockMax/2)
self.minusRock.append(0.0)
self.plusRock.append(0.0)
self.timesFailed = [0 for i in self.playerList]
if self.battleGH:
self.battleJustUsed = [0 for i in self.playerList]
for instrument in self.instruments:
if self.battleGH:
if not self.battleSuddenDeath:
instrument.battleObjects = [0] * 3
instrument.battleSuddenDeath = False
instrument.battleStatus = [False] * 9
instrument.battleBeingUsed = [0] * 2
#self.guitars[i].battleDiffUp = False
#self.guitars[i].battleLefty = False
#self.guitars[i].battleWhammy = False
#self.guitars[i].battleAmp = False
instrument.starPower = 0
instrument.coOpFailed = False
#volshebnyi - BRE variables reset
instrument.freestyleStart = 0
instrument.freestyleFirstHit = 0
instrument.freestyleLength = 0
instrument.freestyleBonusFret = 0
if instrument.isDrum:
instrument.drumFillsCount = 0
instrument.drumFillsHits = 0
instrument.freestyleLastFretHitTime = [0 for i in range(5)]
if instrument.isVocal:
instrument.doneLastPhrase = False
instrument.phraseIndex = 0
instrument.currentTapPhrase = -1
instrument.phraseInTune = 0
instrument.phraseNoteTime = 0
instrument.phraseTaps = 0
instrument.phraseTapsHit = 0
#volshebnyi - shaders reset
shaders.reset()
if shaders.turnon:
for i, player in enumerate(self.playerList):
shaders.var["fret"][i]=[-10.0]*5
shaders.var["fretpos"][i]=[-10.0]*5
shaders.var["color"][i]=(.0,)*4
shaders.var["scoreMult"][i]=1
shaders.var["multChangePos"][i]=-10.0
self.failed = False
self.battleSuddenDeath = False
self.finalFailed = False
self.failEnd = False
self.drumScoringEnabled = True #MFH
self.initBeatAndSpClaps()
#MFH - init vars for the next time & lyric line to display
self.midiLyricLineIndex = 0
self.nextMidiLyricStartTime = 0
if ( self.numMidiLyricLines > 0 ):
self.nextMidiLyricStartTime, self.nextMidiLyricLine = self.midiLyricLines[self.midiLyricLineIndex]
#MFH - initialize word-by-word 2-line MIDI lyric display / highlighting system:
self.activeMidiLyricLine_GreyWords = ""
self.activeMidiLyricLine_GreenWords = ""
self.activeMidiLyricLine_WhiteWords = ""
self.activeMidiLyricLineIndex = 0
self.activeMidiLyricWordSubIndex = 0
self.numWordsInCurrentMidiLyricLine = 0
self.currentSimpleMidiLyricLine = ""
self.nextLyricWordTime = 0
self.nextLyricEvent = None
self.nextLyricIsOnNewLine = False
#MFH - reset global tempo variables
self.currentBpm = Song.DEFAULT_BPM
self.currentPeriod = 60000.0 / self.currentBpm
self.targetBpm = self.currentBpm
self.lastBpmChange = -1.0
self.baseBeat = 0.0
if self.midiLyricMode == 2 and not self.playingVocals:
if self.numMidiLyricLines > self.activeMidiLyricLineIndex:
self.numWordsInCurrentMidiLyricLine = 0
for nextLyricTime, nextLyricEvent in self.midiLyricLineEvents[self.activeMidiLyricLineIndex]: #populate the first active line
self.numWordsInCurrentMidiLyricLine += 1
if self.numWordsInCurrentMidiLyricLine > self.activeMidiLyricWordSubIndex+1: #there is another word in this line
self.nextLyricWordTime, self.nextLyricEvent = self.midiLyricLineEvents[self.activeMidiLyricLineIndex][self.activeMidiLyricWordSubIndex]
else:
self.noMoreMidiLineLyrics = True #t'aint no lyrics t'start wit!
#self.activeMidiLyricWordSubIndex += 1
for nextLyricTime, nextLyricEvent in self.midiLyricLineEvents[self.activeMidiLyricLineIndex]: #populate the first active line
self.activeMidiLyricLine_WhiteWords = "%s %s" % (self.activeMidiLyricLine_WhiteWords, nextLyricEvent.text)
if self.numMidiLyricLines > self.activeMidiLyricLineIndex+2: #is there a second line of lyrics?
tempTime, self.currentSimpleMidiLyricLine = self.midiLyricLines[self.activeMidiLyricLineIndex+1]
for player in self.playerList:
player.reset()
self.stage.reset()
self.enteredCode = []
self.jurgPlayer = [False for i in self.playerList] #Jurgen hasn't played the restarted song =P
for instrument in self.instruments:
instrument.scoreMultiplier = 1
if instrument.isVocal:
instrument.phraseIndex = 0
instrument.currentTapPhrase = -1
instrument.tapNoteHits = [0 for i in instrument.tapNoteTotals]
instrument.currentPhraseTime = 0
instrument.currentPhraseLength = 0
instrument.activePhrase = None
continue
instrument.twoChord = 0
instrument.hopoActive = 0
instrument.wasLastNoteHopod = False
instrument.sameNoteHopoString = False
instrument.hopoLast = -1
instrument.guitarSolo = False
instrument.neck.guitarSolo = False
instrument.currentGuitarSoloHitNotes = 0
if self.partyMode == True:
self.instruments[0].keys = self.playerList[0].keys
self.instruments[0].actions = self.playerList[0].actions
self.keysList = self.playerList[0].keys
if self.battle == True:
for i in range(self.numOfPlayers):
self.instruments[i].actions = self.playerList[i].actions
self.engine.collectGarbage()
self.boardY = 2
self.setCamera()
if self.song:
self.song.readyToGo = True
def restartSong(self, firstTime = False): #QQstarS: Fix this function
self.resetVariablesToDefaults()
self.engine.data.startSound.play()
self.engine.view.popLayer(self.menu)
if not self.song:
return
# glorandwarf: the countdown is now the number of beats to run
# before the song begins
self.partySwitch = 0
for instrument in self.instruments:
if instrument.isVocal:
instrument.stopMic()
else:
instrument.endPick(0) #akedrou: this is the position of the song, not a player number!
self.song.stop()
self.initBeatAndSpClaps()
if self.stage.mode == 3:
self.stage.restartVideo()
def restartAfterFail(self): #QQstarS: Fix this function
self.resetVariablesToDefaults()
self.engine.data.startSound.play()
self.engine.view.popLayer(self.failMenu)
if not self.song:
return
self.partySwitch = 0
for i,instrument in enumerate(self.instruments):
if instrument.isVocal:
instrument.stopMic()
else:
instrument.endPick(0)
self.song.stop()
#MFH - unnecessary re-marking of HOPOs
#for i, guitar in enumerate(self.guitars):
# #myfingershurt: next line commented to prevent everthickening BPM lines
# if self.hopoStyle > 0 or self.song.info.hopo == "on":
# if self.hopoStyle == 2 or self.hopoStyle == 3 or self.hopoStyle == 4: #GH2 style HOPO system
# self.song.track[i].markHopoGH2(self.song.info.eighthNoteHopo, self.hopoAfterChord, self.song.info.hopofreq)
# elif self.hopoStyle == 1: #RF-Mod style HOPO system
# self.song.track[i].markHopoRF(self.song.info.eighthNoteHopo, self.song.info.hopofreq)
def startSolo(self, playerNum): #MFH - more modular and general handling of solos
i = playerNum
#Guitar Solo Start
self.currentGuitarSoloTotalNotes[i] = self.guitarSolos[i][self.currentGuitarSolo[i]]
self.guitarSoloBroken[i] = False
self.instruments[i].guitarSolo = True
if not self.instruments[i].isVocal:
self.instruments[i].neck.guitarSolo = True
#self.displayText[i] = _("Guitar Solo!")
instrumentSoloString = "%s %s" % (self.playerList[i].part.text, self.tsSolo)
if self.phrases > 1:
self.newScalingText(self.playerList[i].number, instrumentSoloString )
#self.sfxChannel.setVolume(self.sfxVolume)
self.engine.data.crowdSound.play()
def endSolo(self, playerNum): #MFH - more modular and general handling of solos
i = playerNum
#Guitar Solo End
self.instruments[i].guitarSolo = False
if not self.instruments[i].isVocal:
self.instruments[i].neck.guitarSolo = False
#self.sfxChannel.setVolume(self.sfxVolume) #liquid
self.guitarSoloAccuracy[i] = (float(self.instruments[i].currentGuitarSoloHitNotes) / float(self.currentGuitarSoloTotalNotes[i]) ) * 100.0
if not self.guitarSoloBroken[i]: #backup perfect solo detection
if self.instruments[i].currentGuitarSoloHitNotes > 0: #MFH - need to make sure someone didn't just not play a guitar solo at all - and still wind up with 100%
self.guitarSoloAccuracy[i] = 100.0
if self.guitarSoloAccuracy[i] > 100.0:
self.guitarSoloAccuracy[i] = 100.0
if self.guitarSoloBroken[i] and self.guitarSoloAccuracy[i] == 100.0: #streak was broken, not perfect solo, force 99%
self.guitarSoloAccuracy[i] = 99.0
if self.guitarSoloAccuracy[i] == 100.0: #fablaculp: soloDescs changed
soloDesc = self.tsPerfectSolo
soloScoreMult = 100
self.engine.data.crowdSound.play() #liquid
elif self.guitarSoloAccuracy[i] >= 95.0:
soloDesc = self.tsAwesomeSolo
soloScoreMult = 50
self.engine.data.crowdSound.play() #liquid
elif self.guitarSoloAccuracy[i] >= 90.0:
soloDesc = self.tsGreatSolo
soloScoreMult = 30
self.engine.data.crowdSound.play() #liquid
elif self.guitarSoloAccuracy[i] >= 80.0:
soloDesc = self.tsGoodSolo
soloScoreMult = 20
elif self.guitarSoloAccuracy[i] >= 70.0:
soloDesc = self.tsSolidSolo
soloScoreMult = 10
elif self.guitarSoloAccuracy[i] >= 60.0:
soloDesc = self.tsOkaySolo
soloScoreMult = 5
else: #0% - 59.9%
soloDesc = self.tsMessySolo
soloScoreMult = 0
self.engine.data.failSound.play() #liquid
soloBonusScore = soloScoreMult * self.instruments[i].currentGuitarSoloHitNotes
self.scoring[i].score += soloBonusScore
if self.coOpType:
self.coOpScoreCard.score += soloBonusScore
trimmedSoloNoteAcc = self.roundDecimalForDisplay(self.guitarSoloAccuracy[i])
#self.soloReviewText[i] = [soloDesc,str(trimmedSoloNoteAcc) + "% = " + str(soloBonusScore) + _(" pts")]
#ptsText = _("pts")
self.soloReviewText[i] = [soloDesc,
"%(soloNoteAcc)s%% = %(soloBonus)d %(pts)s" % \
{'soloNoteAcc': str(trimmedSoloNoteAcc), 'soloBonus': soloBonusScore, 'pts': self.tsPtsLabel} ]
self.dispSoloReview[i] = True
self.soloReviewCountdown[i] = 0
#reset for next solo
self.instruments[i].currentGuitarSoloHitNotes = 0
self.currentGuitarSolo[i] += 1
def updateGuitarSolo(self, playerNum):
i = playerNum
#if self.guitars[i].canGuitarSolo:
if self.instruments[i].guitarSolo:
#update guitar solo for player i
#if we hit more notes in the solo than were counted, update the solo count (for the slop)
if self.instruments[i].currentGuitarSoloHitNotes > self.currentGuitarSoloTotalNotes[i]:
self.currentGuitarSoloTotalNotes[i] = self.instruments[i].currentGuitarSoloHitNotes
if self.instruments[i].currentGuitarSoloHitNotes != self.currentGuitarSoloLastHitNotes[i]: #changed!
self.currentGuitarSoloLastHitNotes[i] = self.instruments[i].currentGuitarSoloHitNotes #update.
if self.guitarSoloAccuracyDisplayMode > 0: #if not off:
tempSoloAccuracy = (float(self.instruments[i].currentGuitarSoloHitNotes)/float(self.currentGuitarSoloTotalNotes[i]) * 100.0)
trimmedIntSoloNoteAcc = self.roundDecimalForDisplay(tempSoloAccuracy)
if self.guitarSoloAccuracyDisplayMode == 1: #percentage only
#soloText = str(trimmedIntSoloNoteAcc) + "%"
self.solo_soloText[i] = "%s%%" % str(trimmedIntSoloNoteAcc)
elif self.guitarSoloAccuracyDisplayMode == 2: #detailed
#soloText = str(self.guitars[i].currentGuitarSoloHitNotes) + "/" + str(self.currentGuitarSoloTotalNotes[i]) + ": " + str(trimmedIntSoloNoteAcc) + "%"
self.solo_soloText[i] = "%(hitSoloNotes)d/ %(totalSoloNotes)d: %(soloAcc)s%%" % \
{'hitSoloNotes': self.instruments[i].currentGuitarSoloHitNotes, 'totalSoloNotes': self.currentGuitarSoloTotalNotes[i], 'soloAcc': str(trimmedIntSoloNoteAcc)}
self.solo_soloText[i] = self.solo_soloText[i].replace("0","O")
#if self.fontMode==0: #0 = oGL Hack, 1=LaminaScreen, 2=LaminaFrames
self.solo_Tw[i], self.solo_Th[i] = self.solo_soloFont.getStringSize(self.solo_soloText[i],self.solo_txtSize)
self.solo_boxXOffset[i] = self.solo_xOffset[i]
if self.guitarSoloAccuracyDisplayPos == 0: #right
self.solo_xOffset[i] -= self.solo_Tw[i]
self.solo_boxXOffset[i] -= self.solo_Tw[i]/2
#soloFont.render(soloText, (xOffset - Tw, yOffset),(1, 0, 0),txtSize) #right-justified
elif self.guitarSoloAccuracyDisplayPos == 1: #centered
self.solo_xOffset[i] = 0.5 - self.solo_Tw[i]/2
self.solo_boxXOffset[i] = 0.5
#soloFont.render(soloText, (0.5 - Tw/2, yOffset),(1, 0, 0),txtSize) #centered
elif self.guitarSoloAccuracyDisplayPos == 3: #racer: rock band
if self.hitAccuracyPos == 0: #Center - need to move solo text above this!
self.solo_yOffset[i] = 0.100 #above Jurgen Is Here
elif self.jurgPlayer[i] and self.autoPlay:
self.solo_yOffset[i] = 0.140 #above Jurgen Is Here
else: #no jurgens here:
self.solo_yOffset[i] = 0.175 #was 0.210, occluded notes
self.solo_xOffset[i] = 0.5 - self.solo_Tw[i]/2
self.solo_boxXOffset[i] = 0.5
#soloFont.render(soloText, (0.5 - Tw/2, yOffset),(1, 0, 0),txtSize) #rock band
else: #left
self.solo_boxXOffset[i] += self.solo_Tw[i]/2
self.guitarSoloShown[i] = True
else: #not currently a guitar solo - clear Lamina solo accuracy surface (but only once!)
if self.guitarSoloShown[i]:
self.guitarSoloShown[i] = False
self.currentGuitarSoloLastHitNotes[i] = 1
#MFH - single, global BPM here instead of in instrument objects:
#self.tempoBpm = Song.DEFAULT_BPM
#self.actualBpm = 0.0
#self.currentBpm = Song.DEFAULT_BPM
#self.currentPeriod = 60000.0 / self.currentBpm
#self.targetBpm = self.currentBpm
#self.targetPeriod = 60000.0 / self.targetBpm
#self.lastBpmChange = -1.0
#self.baseBeat = 0.0
#self.disableVBPM = self.engine.config.get("game", "disable_vbpm")
def handleTempo(self, song, pos):
if not song:
return
if self.lastBpmChange > 0 and self.disableVBPM == True: #MFH - only handle tempo once if the VBPM feature is off.
return
#tempo = song.tempoEventTrack.getCurrentTempo(pos)
#if tempo != self.targetBpm: #MFH - get latest tempo target
# self.targetBpm = tempo
tempEventHolder = song.tempoEventTrack.getNextTempoChange(pos)
if tempEventHolder:
time, event = tempEventHolder
#if (pos - time > self.currentPeriod or self.lastBpmChange < 0) and time > self.lastBpmChange:
if ( (time < pos or self.lastBpmChange < 0) or (pos - time < self.currentPeriod or self.lastBpmChange < 0) ) and time > self.lastBpmChange:
self.baseBeat += (time - self.lastBpmChange) / self.currentPeriod
#self.targetBpm = song.tempoEventTrack.getCurrentTempo(pos)
self.targetBpm = event.bpm
song.tempoEventTrack.currentIndex += 1 #MFH = manually increase current event
self.lastBpmChange = time
#adjust tempo gradually to meet new target:
if self.targetBpm != self.currentBpm:
diff = self.targetBpm - self.currentBpm
tempDiff = round( (diff * .03), 4) #MFH - better to calculate this once and reuse the variable instead of recalculating every use
if tempDiff != 0:
self.currentBpm = self.currentBpm + tempDiff
else:
self.currentBpm = self.targetBpm
#recalculate all variables dependant on the tempo, apply to instrument objects - only if currentBpm has changed:
self.currentPeriod = 60000.0 / self.currentBpm
for instrument in self.instruments:
instrument.setBPM(self.currentBpm)
instrument.lastBpmChange = self.lastBpmChange
instrument.baseBeat = self.baseBeat
def handleWhammy(self, playerNum):
i = playerNum
if self.resumeCountdown > 0: #MFH - conditions to completely ignore whammy
return
try: #since analog axis might be set but joystick not present = crash
#MFH - adding another nest of logic filtration; don't even want to run these checks unless there are playedNotes present!
if self.battleGH:
if self.isKillAnalog[i]:
if self.analogKillMode[i] == 2: #XBOX mode: (1.0 at rest, -1.0 fully depressed)
self.whammyVol[i] = 1.0 - (round(10* ((self.engine.input.joysticks[self.whichJoyKill[i]].get_axis(self.whichAxisKill[i])+1.0) / 2.0 ))/10.0)
elif self.analogKillMode[i] == 3: #XBOX Inverted mode: (-1.0 at rest, 1.0 fully depressed)
self.whammyVol[i] = (round(10* ((self.engine.input.joysticks[self.whichJoyKill[i]].get_axis(self.whichAxisKill[i])+1.0) / 2.0 ))/10.0)
else: #PS2 mode: (0.0 at rest, fluctuates between 1.0 and -1.0 when pressed)
self.whammyVol[i] = (round(10*(abs(self.engine.input.joysticks[self.whichJoyKill[i]].get_axis(self.whichAxisKill[i]))))/10.0)
if self.whammyVol[i] > 0.0 and self.whammyVol[i] < 0.1:
self.whammyVol[i] = 0.1
#MFH - simple whammy tail determination:
if self.whammyVol[i] > 0.1:
self.instruments[i].battleWhammyDown = True
else:
if self.instruments[i].battleWhammyDown:
self.instruments[i].battleWhammyDown = False
if self.instruments[i].battleStatus[4]:
self.instruments[i].battleWhammyNow -= 1
if self.instruments[i].battleWhammyNow == 0:
self.instruments[i].battleStatus[4] = False
for k, nowUsed in enumerate(self.instruments[i].battleBeingUsed):
if self.instruments[i].battleBeingUsed[k] == 4:
self.instruments[i].battleBeingUsed[k] = 0
else:
self.battleTarget[i] += 1
if self.battleTarget[i] == self.numOfPlayers:
self.battleTarget[i] = 0
if self.battleTarget[i] == i:
self.battleTarget[i] += 1
else:
if self.killswitchEngaged[i] == True: #QQstarS:new Fix the killswitch
self.killswitchEngaged[i] = True
if self.instruments[i].battleStatus[4]:
self.instruments[i].battleWhammyDown = True
else:
if self.instruments[i].battleStatus[4] and self.instruments[i].battleWhammyDown:
self.instruments[i].battleWhammyNow -= 1
self.instruments[i].battleWhammyDown = False
if self.instruments[i].battleWhammyNow == 0:
self.instruments[i].battleStatus[4] = False
for k, nowUsed in enumerate(self.instruments[i].battleBeingUsed):
if self.instruments[i].battleBeingUsed[k] == 4:
self.instruments[i].battleBeingUsed[k] = 0
if self.instruments[i].playedNotes:
#Player i kill / whammy check:
if self.isKillAnalog[i]:
if self.CheckForValidKillswitchNote(i): #if a note has length and is being held enough to get score
#rounding to integers, setting volumes 0-10 and only when changed from last time:
#want a whammy reading of 0.0 to = full volume, as that's what it reads at idle
if self.analogKillMode[i] == 2: #XBOX mode: (1.0 at rest, -1.0 fully depressed)
self.whammyVol[i] = 1.0 - (round(10* ((self.engine.input.joysticks[self.whichJoyKill[i]].get_axis(self.whichAxisKill[i])+1.0) / 2.0 ))/10.0)
elif self.analogKillMode[i] == 3: #XBOX Inverted mode: (-1.0 at rest, 1.0 fully depressed)
self.whammyVol[i] = (round(10* ((self.engine.input.joysticks[self.whichJoyKill[i]].get_axis(self.whichAxisKill[i])+1.0) / 2.0 ))/10.0)
else: #PS2 mode: (0.0 at rest, fluctuates between 1.0 and -1.0 when pressed)
self.whammyVol[i] = (round(10*(abs(self.engine.input.joysticks[self.whichJoyKill[i]].get_axis(self.whichAxisKill[i]))))/10.0)
if self.whammyVol[i] > 0.0 and self.whammyVol[i] < 0.1:
self.whammyVol[i] = 0.1
#MFH - simple whammy tail determination:
if self.whammyVol[i] > 0.1:
self.killswitchEngaged[i] = True
else:
self.killswitchEngaged[i] = False
if self.whammyVol[i] != self.lastWhammyVol[i] and self.whammyVol[i] > 0.1:
if self.instruments[i].killPoints:
self.instruments[i].starPower += self.analogKillswitchStarpowerChunkSize
if self.instruments[i].starPower > 100:
self.instruments[i].starPower = 100
elif (self.instruments[i].starPowerActive and self.whammySavesSP):
self.instruments[i].starPower += self.analogKillswitchActiveStarpowerChunkSize
if self.instruments[i].starPower > 100:
self.instruments[i].starPower = 100
self.lastWhammyVol[i] = self.whammyVol[i]
#here, scale whammyVol to match kill volume setting:
self.targetWhammyVol[i] = self.whammyVol[i] * (self.activeVolume - self.killVolume)
if self.actualWhammyVol[i] < self.targetWhammyVol[i]:
self.actualWhammyVol[i] += self.whammyVolAdjStep
whammyVolSet = self.activeVolume - self.actualWhammyVol[i]
if self.whammyEffect == 0: #killswitch
self.song.setInstrumentVolume(whammyVolSet, self.players[i].part)
elif self.whammyEffect == 1: #pitchbend
self.song.setInstrumentPitch(self.pitchBendLowestFactor+((1.0-self.pitchBendLowestFactor)*(1.0-self.whammyVol[i])), self.players[i].part)
elif self.actualWhammyVol[i] > self.targetWhammyVol[i]:
self.actualWhammyVol[i] -= self.whammyVolAdjStep
whammyVolSet = 1.0 - self.actualWhammyVol[i]
if self.whammyEffect == 0: #killswitch
self.song.setInstrumentVolume(whammyVolSet, self.players[i].part)
elif self.whammyEffect == 1: #pitchbend
self.song.setInstrumentPitch(self.pitchBendLowestFactor+((1.0-self.pitchBendLowestFactor)*(1.0-self.whammyVol[i])), self.players[i].part)
elif self.scoring[i].streak > 0:
self.song.setInstrumentVolume(1.0, self.players[i].part)
if self.whammyEffect == 1: #pitchbend
self.song.resetInstrumentPitch(self.players[i].part)
self.actualWhammyVol[i] = self.defaultWhammyVol[i]
else: #digital killswitch:
if self.CheckForValidKillswitchNote(i): #if a note has length and is being held enough to get score
if self.killswitchEngaged[i] == True: #QQstarS:new Fix the killswitch
if self.instruments[i].isKillswitchPossible() == True:
self.killswitchEngaged[i] = True
if self.whammyEffect == 0: #killswitch
self.song.setInstrumentVolume(self.killVolume, self.players[i].part) #MFH
elif self.whammyEffect == 1: #pitchbend
self.song.setInstrumentPitch(self.pitchBendLowestFactor+((1.0-self.pitchBendLowestFactor)*self.whammyVol[i]), self.players[i].part)
if self.instruments[i].killPoints:
self.instruments[i].starPower += self.digitalKillswitchStarpowerChunkSize
if self.instruments[i].starPower > 100:
self.instruments[i].starPower = 100
elif (self.instruments[i].starPowerActive and self.whammySavesSP and not self.instruments[i].isVocal):
self.instruments[i].starPower += self.digitalKillswitchActiveStarpowerChunkSize
if self.instruments[i].starPower > 100:
self.instruments[i].starPower = 100
else:
self.killswitchEngaged[i] = None
elif self.scoring[i].streak > 0:
self.song.setInstrumentVolume(1.0, self.players[i].part)
if self.whammyEffect == 1: #pitchbend
self.song.resetInstrumentPitch(self.players[i].part)
self.killswitchEngaged[i] = False
elif self.scoring[i].streak > 0:
self.song.setInstrumentVolume(1.0, self.players[i].part)
if self.whammyEffect == 1: #pitchbend
self.song.resetInstrumentPitch(self.players[i].part)
self.killswitchEngaged[i] = False
else:
self.killswitchEngaged[i] = False
except Exception, e:
self.whammyVol[i] = self.defaultWhammyVol[i]
def handleAnalogSP(self, playerNum, ticks):
i = playerNum
if self.resumeCountdown > 0:
return
if self.isSPAnalog[i]:
self.starAxisVal[i] = abs(self.engine.input.joysticks[self.whichJoyStar[i]].get_axis(self.whichAxisStar[i]))
if self.starAxisVal[i] > (self.analogSPThresh[i]/100.0):
if self.starDelay[i] == 0 and not self.starActive[i]:
self.starDelay[i] = (10-self.analogSPSense[i])*25
else:
self.starDelay[i] -= ticks
if self.starDelay[i] <= 0 and not self.starActive[i]:
self.activateSP(i)
self.starActive[i] = True
else:
self.starActive[i] = False
self.starDelay[i] = 0
def handleAnalogSlider(self, playerNum): #akedrou
i = playerNum
if self.resumeCountdown > 0:
return
if self.isSlideAnalog[i]:
oldSlide = self.slideValue[i]
if self.analogSlideMode[i] == 1: #Inverted mode
slideVal = -(self.engine.input.joysticks[self.whichJoySlide[i]].get_axis(self.whichAxisSlide[i])+1.0)/2.0
else: #Default
slideVal = (self.engine.input.joysticks[self.whichJoySlide[i]].get_axis(self.whichAxisSlide[i])+1.0)/2.0
if slideVal > 0.9 or slideVal < 0.01:
self.slideValue[i] = 4
elif slideVal > 0.77:
self.slideValue[i] = 4
self.markSlide(i)
elif slideVal > 0.68:
self.slideValue[i] = 3
elif slideVal > 0.60:
self.slideValue[i] = 3
self.markSlide(i)
elif slideVal > 0.54:
self.slideValue[i] = 2
elif slideVal > 0.43:
self.slideValue[i] = -1
#mark that sliding is not happening.
elif slideVal > 0.34:
self.slideValue[i] = 2
self.markSlide(i)
elif slideVal > 0.28:
self.slideValue[i] = 1
elif slideVal > 0.16:
self.slideValue[i] = 1
self.markSlide(i)
else:
self.slideValue[i] = 0
if self.slideValue[i] != oldSlide:
for n, k in enumerate(self.keysList[i]):
if n == self.slideValue[i] and not self.controls.getState(k):
self.controls.toggle(k, True)
self.keyPressed3(None, 0, k) #mfh
elif self.controls.getState(k):
self.controls.toggle(k, False)
self.keyReleased3(k)
if self.slideValue[i] > -1:
self.handlePick(i)
def markSlide(self, playerNum):
pass #akedrou - this will eventually handle the switch that you are, in fact, sliding up the analog fret bar.
def handlePhrases(self, playerNum, playerStreak):
if self.phrases > 0:
i = playerNum
vocalPart = False
if not (self.coOpType and i == self.coOpPhrase):
if self.instruments[i].isVocal:
vocalPart = True
if (self.coOpType and i == self.coOpPhrase) or not self.coOpType:
if self.lastStreak[i] < playerStreak:
textChanged = True
else:
textChanged = False
self.lastStreak[i] = playerStreak
if vocalPart:
streakModulo = playerStreak % 5
if ( (streakModulo == 0) or (self.lastStreak[i] % 5 > streakModulo) ) and playerStreak > 4 and textChanged:
self.newScalingText(i, self.tsPhraseStreak % (playerStreak - streakModulo) )
elif (playerStreak == 50 or (self.lastStreak[i] < 50 and playerStreak > 50) ) and textChanged:
#self.displayText[i] = _("50 Note Streak!!!") #kk69: more GH3-like
#self.newScalingText(i, _("50 Note Streak!!!") )
self.newScalingText(i, self.tsNoteStreak % 50)
#self.streakFlag = "%d" % (i) #QQstarS:Set [0] to [i] #if player0 streak50, set the flag to 1.
#MFH - I think a simple integer modulo would be more efficient here:
else:
streakModulo = playerStreak % 100
if ( (streakModulo == 0) or (self.lastStreak[i] % 100 > streakModulo) ) and playerStreak > 50 and textChanged:
#self.displayText[i] = _("%d Note Streak!!!") % playerStreak #kk69: more GH3-like
#self.newScalingText(i, _("%d Note Streak!!!") % playerStreak )
#self.newScalingText(i, _("%d Note Streak!!!") % (playerStreak - streakModulo) )
self.newScalingText(i, self.tsNoteStreak % (playerStreak - streakModulo) )
#self.streakFlag = "%d" % (i) #QQstarS:Set [0] to [i] #if player0 streak50, set the flag to 1.
if self.scaleText[i] >= self.maxDisplayTextScale:
self.displayTextScale[i] = self.scaleText[i] + self.scaleText2[i]
if self.scaleText2[i] <= -0.0005:
self.goingUP[i] = True
elif self.scaleText2[i] >= 0.0005:
self.goingUP[i] = False
if self.goingUP[i]:
self.scaleText2[i] += self.displayTextScaleStep2
else:
self.scaleText2[i] -= self.displayTextScaleStep2
else:
self.displayTextScale[i] = self.scaleText[i]
if not self.displayText[i] == None and not self.scaleText[i] >= self.maxDisplayTextScale:
self.scaleText[i] += self.displayTextScaleStep1
if self.scaleText[i] > self.maxDisplayTextScale:
self.scaleText[i] = self.maxDisplayTextScale
if not self.displayText[i] == None:
self.textTimer[i] += 1
if self.battleGH:
if not self.battleText[i] == None:
self.battleTextTimer[i] += 1
if self.battleTextTimer[i] > 500:
self.battleText[i] = None
self.battleTextTimer[i] = 0
if self.textTimer[i] > self.textTimeToDisplay:
self.textY[i] -= 0.02
if self.textY[i] < 0:
self.scaleText[i] = 0
self.textTimer[i] = 0
self.displayText[i] = None
#textChanged = False
self.textY[i] = .3
self.scaleText2[i] = 0.0
self.goingUP[i] = False
def newScalingText(self, playerNum, text):
i = playerNum
self.scaleText[i] = 0
self.textTimer[i] = 0
self.textY[i] = .3
self.scaleText2[i] = 0.0
self.goingUP[i] = False
self.displayText[i] = text
def handlePick(self, playerNum, hopo = False, pullOff = False):
i = playerNum
num = playerNum
guitar = self.instruments[num]
if self.resumeCountdown > 0: #MFH - conditions to completely ignore picks
return
#MFH - only actually pick if the player has not failed already!
if self.rock[i] > 0 and guitar.battleStatus[4] == False:
# Volshebnyi - new BRE and drum fills scoring
if guitar.freestyleActive or (guitar.isDrum and guitar.drumFillsActive):
if guitar.freestyleActive: #MFH - only for BREs, not drum fills. Will depend on BRE sound option when implemented.
self.song.setInstrumentVolume(1.0, self.players[i].part) #MFH - ensure that every freestyle pick, the volume for that track is set to 1.0
pos = self.getSongPosition()
score = 0
numFreestyleHits = guitar.freestylePick(self.song, pos, self.controls)
if numFreestyleHits>0 or guitar.isDrum:
if guitar.freestyleFirstHit + guitar.freestyleLength < pos :
guitar.freestyleFirstHit = pos
guitar.freestylePeriod = 1500
guitar.freestyleBaseScore = 150
score = 600 * numFreestyleHits
if guitar.isDrum:
guitar.drumFillsHits = 0
guitar.freestyleLastHit = pos - guitar.freestylePeriod
for fret in range (0,5):
guitar.freestyleLastFretHitTime[fret] = pos - guitar.freestylePeriod
if guitar.isDrum:
guitar.drumFillsHits += 1
#if guitar.freestyleSP: #MFH - this logic should be in the run() function, not conditional here...
# self.activateSP(num)
# guitar.freestyleSP = False
for fret in range (5):
if self.controls.getState(guitar.keys[fret]) or (self.playerList[i].controlType == 0 and self.controls.getState(guitar.keys[fret+5])):
hitspeed = min((pos - guitar.freestyleLastFretHitTime[fret]) / guitar.freestylePeriod, 1.0)
score += guitar.freestyleBaseScore * hitspeed
if numFreestyleHits > 0: #MFH - to prevent a float division!
score = int ( score / numFreestyleHits )
for fret in range (5):
if self.controls.getState(guitar.keys[fret]) or (self.playerList[i].controlType == 0 and self.controls.getState(guitar.keys[fret+5])):
guitar.freestyleLastFretHitTime[fret] = pos
#MFH - Add all BRE score to a temporary score accumulator with a separate display box
# and only reward if all notes after the BRE are hit without breaking streak!
if guitar.freestyleActive: #MFH - only want to add the score if this is a BRE - drum fills get no scoring...
if self.coOpType:
self.scoring[num].endingScore += score
self.scoring[num].endingStreakBroken = False
self.scoring[num].freestyleWasJustActive = True
self.coOpScoreCard.endingScore += score
self.coOpScoreCard.endingStreakBroken = False
self.coOpScoreCard.freestyleWasJustActive = True
else:
#self.playerList[num].addScore( score )
self.scoring[num].endingScore += score
#also, when this happens, want to set a flag indicating that all of the remaining notes in the song must be hit without
# breaking streak, or this score will not be kept!
self.scoring[num].endingStreakBroken = False
self.scoring[num].freestyleWasJustActive = True
#MFH - also must ensure notes that pass during this time are marked as skipped without resetting the streak
#missedNotes = self.guitars[num].getMissedNotesMFH(self.song, pos, catchup = True)
missedNotes = guitar.getMissedNotesMFH(self.song, pos + guitar.earlyMargin, catchup = True) #MFh - check slightly ahead here.
for tym, theNote in missedNotes: #MFH - also want to mark these notes as Played so they don't count against the note total!
#theNote.played = True
theNote.skipped = True
if guitar.isDrum:
if self.coOpType:
self.coOpScoreCard.totalStreakNotes -= 1
else:
self.scoring[num].totalStreakNotes -= 1
else:
if guitar.isDrum:
self.doPick(i)
else:
if self.hopoStyle == 1: #1 = rf-mod
self.doPick3RF(i, hopo)
elif self.hopoStyle == 2 or self.hopoStyle == 3 or self.hopoStyle == 4: #GH2 style HOPO
self.doPick3GH2(i, hopo, pullOff)
else: #2 = no HOPOs
self.doPick(i)
def handleJurgen(self, pos):
chordFudge = 1 #MFH - was 10 - #myfingershurt - needed to detect chords
if self.firstGuitar is not None:
chordFudge = self.song.track[self.firstGuitar].chordFudge
if self.autoPlay or self.assisting:
for i,instrument in enumerate(self.instruments):
#Allow Jurgen per player...Spikehead777
if self.jurg[i] == True: #if it is this player
self.jurgPlayer[i] = True
else: #and if not
if self.playerAssist[i] == 0: #and no assist
continue
if instrument.isVocal:
continue
guitar = instrument
if self.battleGH:
self.aiUseSP[i] = 0
if self.aiSkill[i] == 4 or self.aiSkill[i] == 5:
self.aiUseSP[i] += 25 * self.battleItemsHolding[i] #Number of Items in Holding
if self.instruments[self.battleTarget[i]].isStarPhrase:
self.aiUseSP[i] += 100 #always use when target is in starphrase
self.aiUseSP[i] += max((100 - (300*self.rock[self.battleTarget[i]])/self.rockMax), 0) #use when they're almost dead
self.aiUseSP[i] += max((100 - (500*self.rock[i])/self.rockMax), 0) #use when they're almost dead
else:
self.aiUseSP[i] = 100
if self.battleGH: #PRELIM LOGIC until algorithm goes in
if guitar.battleObjects[0] != 0:
if self.aiUseSP[i] > 50 and pos > guitar.battleGetTime + self.jurgBattleUseTime[i]:
self.activateSP(i)
if guitar.battleStatus[4]:
if guitar.battleWhammyNow == 0:
guitar.battleStatus[4] = False
for k, nowUsed in enumerate(guitar.battleBeingUsed):
if guitar.battleBeingUsed[k] == 4:
guitar.battleBeingUsed[k] = 0
if guitar.battleWhammyNow != 0:
if pos - guitar.battleStartTimes[4] > self.jurgBattleWhammyTime[i]:
guitar.battleStartTimes[4] = pos
guitar.battleWhammyNow -= 1
if self.jurgenLogic[i] == 0: #original FoF / RF-Mod style Jurgen Logic (cannot handle fast notes / can only handle 1 strum per notewindow)
notes = guitar.getRequiredNotesMFH(self.song, pos) #mfh - needed updatin'
notes = [note.number for time, note in notes]
changed = False
held = 0
for n, k in enumerate(self.keysList[i]):
if n > 4: break
if (self.autoPlay and self.jurg[i]) or (k == guitar.keys[4] and self.playerAssist[i] == 2) or ((k == guitar.keys[4] or k == guitar.keys[3]) and self.playerAssist[i] == 1) or (self.playerAssist[i] == 3 and k == guitar.keys[0]):
if n in notes and not self.controls.getState(k):
changed = True
self.controls.toggle(k, True)
self.keyPressed3(None, 0, k) #mfh
elif not n in notes and self.controls.getState(k):
changed = True
self.controls.toggle(k, False)
self.keyReleased3(k) #mfh
if self.controls.getState(k):
held += 1
#if changed and held and not self.playerList[i].part.text == "Drums": #dont need the extra pick for drums
if changed and held and not guitar.isDrum: #dont need the extra pick for drums
#myfingershurt:
self.handlePick(i)
elif self.jurgenLogic[i] == 1: #Jurgen logic style MFH-Early -- will separate notes out by time index, with chord slop detection, and strum every note
#MFH - Jurgen needs some logic that can handle notes that may be coming too fast to retrieve one set at a time
notes = guitar.getRequiredNotesMFH(self.song, pos) #mfh - needed updatin'
#now, want to isolate the first note or set of notes to strum - then do it, and then release the controls
if notes:
jurgStrumTime = notes[0][0]
jurgStrumNotes = [note.number for time, note in notes if abs(time-jurgStrumTime) <= chordFudge]
if self.battleJurgMissTime[i] != jurgStrumTime:
self.battleJurgMissTime[i] = jurgStrumTime
if guitar.battleStatus[2] or guitar.battleStatus[6] or guitar.battleStatus[7] or guitar.battleStatus[8]:
if random.randint(0,100) > self.aiHitPercentage[i] - ((5-self.aiSkill[i])*15):
self.aiPlayNote[i] = False
else:
self.aiPlayNote[i] = True
else:
if random.randint(0,100) > self.aiHitPercentage[i]:
self.aiPlayNote[i] = False
else:
self.aiPlayNote[i] = True
else:
jurgStrumNotes = []
changed = False
held = 0
if self.aiPlayNote[i]:
for n, k in enumerate(self.keysList[i]):
if n > 4: break
if (self.autoPlay and self.jurg[i]) or (k == guitar.keys[4] and self.playerAssist[i] == 2) or ((k == guitar.keys[4] or k == guitar.keys[3]) and self.playerAssist[i] == 1) or (guitar.isDrum and self.playerAssist[i] == 3 and k == guitar.keys[0]):
if n in jurgStrumNotes and not self.controls.getState(k):
changed = True
self.controls.toggle(k, True)
self.keyPressed(None, 0, k) #mfh
elif not n in jurgStrumNotes and self.controls.getState(k):
changed = True
self.controls.toggle(k, False)
self.keyReleased(k) #mfh
if self.controls.getState(k):
held += 1
#if changed and held and not self.playerList[i].part.text == "Drums": #dont need the extra pick for drums
if changed and held and not guitar.isDrum: #dont need the extra pick for drums
#myfingershurt:
self.handlePick(i)
elif self.jurgenLogic[i] == 2: #Jurgen logic style MFH-OnTime1 -- Have Jurgen attempt to strum on time instead of as early as possible
#This method simply shrinks the note retrieval window to only notes that are on time and late. No early notes are even considered.
#MFH - Jurgen needs some logic that can handle notes that may be coming too fast to retrieve one set at a time
notes = guitar.getRequiredNotesForJurgenOnTime(self.song, pos) #mfh - needed updatin'
#now, want to isolate the first note or set of notes to strum - then do it, and then release the controls
if notes:
jurgStrumTime = notes[0][0]
jurgStrumNotes = [note.number for time, note in notes if abs(time-jurgStrumTime) <= chordFudge]
if self.battleJurgMissTime[i] != jurgStrumTime:
self.battleJurgMissTime[i] = jurgStrumTime
if guitar.battleStatus[2] or guitar.battleStatus[6] or guitar.battleStatus[7] or guitar.battleStatus[8]:
if random.randint(0,100) > self.aiHitPercentage[i] - ((5-self.aiSkill[i])*15):
self.aiPlayNote[i] = False
else:
self.aiPlayNote[i] = True
else:
if random.randint(0,100) > self.aiHitPercentage[i]:
self.aiPlayNote[i] = False
else:
self.aiPlayNote[i] = True
else:
jurgStrumNotes = []
self.aiPlayNote[i] = True
changed = False
held = 0
if self.aiPlayNote[i]:
for n, k in enumerate(self.keysList[i]):
if n > 4: break
if (self.autoPlay and self.jurg[i]) or (k == guitar.keys[4] and self.playerAssist[i] == 2) or ((k == guitar.keys[4] or k == guitar.keys[3]) and self.playerAssist[i] == 1) or (guitar.isDrum and self.playerAssist[i] == 3 and k == guitar.keys[0]):
if n in jurgStrumNotes and not self.controls.getState(k):
changed = True
self.controls.toggle(k, True)
self.keyPressed(None, 0, k) #mfh
elif not n in jurgStrumNotes and self.controls.getState(k):
changed = True
self.controls.toggle(k, False)
self.keyReleased(k) #mfh
if self.controls.getState(k):
held += 1
#if changed and held and not self.playerList[i].part.text == "Drums": #dont need the extra pick for drums
if changed and held and not guitar.isDrum: #dont need the extra pick for drums
#myfingershurt:
self.handlePick(i)
elif self.jurgenLogic[i] == 3: #Jurgen logic style MFH-OnTime2 -- Have Jurgen attempt to strum on time instead of as early as possible
#This method retrieves all notes in the window and only attempts to play them as they pass the current position, like a real player
notes = guitar.getRequiredNotesMFH(self.song, pos) #mfh - needed updatin'
#now, want to isolate the first note or set of notes to strum - then do it, and then release the controls
if notes:
jurgStrumTime = notes[0][0]
jurgStrumNotes = [note.number for time, note in notes if abs(time-jurgStrumTime) <= chordFudge]
else:
jurgStrumTime = 0
jurgStrumNotes = []
changed = False
held = 0
if self.battleJurgMissTime[i] != jurgStrumTime:
self.battleJurgMissTime[i] = jurgStrumTime
if guitar.battleStatus[2] or guitar.battleStatus[6] or guitar.battleStatus[7] or guitar.battleStatus[8]:
if random.randint(0,100) > self.aiHitPercentage[i] - ((5-self.aiSkill[i])*15):
self.aiPlayNote[i] = False
else:
self.aiPlayNote[i] = True
else:
if random.randint(1,100) > self.aiHitPercentage[i]:
self.aiPlayNote[i] = False
else:
self.aiPlayNote[i] = True
#MFH - check if jurgStrumTime is close enough to the current position (or behind it) before actually playing the notes:
if (not notes or jurgStrumTime <= (pos + 30)) and self.aiPlayNote[i]:
for n, k in enumerate(self.keysList[i]):
if n > 4: break
if (self.autoPlay and self.jurg[i]) or (k == guitar.keys[4] and self.playerAssist[i] == 2) or ((k == guitar.keys[4] or k == guitar.keys[3]) and self.playerAssist[i] == 1) or (guitar.isDrum and self.playerAssist[i] == 3 and k == guitar.keys[0]):
if n in jurgStrumNotes and not self.controls.getState(k):
changed = True
self.controls.toggle(k, True)
self.keyPressed(None, 0, k) #mfh
elif not n in jurgStrumNotes and self.controls.getState(k):
changed = True
self.controls.toggle(k, False)
self.keyReleased(k) #mfh
if self.controls.getState(k):
held += 1
#if changed and held and not self.playerList[i].part.text == "Drums": #dont need the extra pick for drums
if changed and held and not guitar.isDrum: #dont need the extra pick for drums
#myfingershurt:
self.handlePick(i)
#MFH - release all frets - who cares about held notes, I want a test player (actually if no keyReleased call, will hold notes fine)
for n, k in enumerate(self.keysList[i]):
if (self.autoPlay and self.jurg[i]) or (k == guitar.keys[4] and self.playerAssist[i] == 2) or ((k == guitar.keys[4] or k == guitar.keys[3]) and self.playerAssist[i] == 1) or (guitar.isDrum and self.playerAssist[i] == 3 and k == guitar.keys[0]):
if self.controls.getState(k):
self.controls.toggle(k, False)
def rockmeterDecrease(self, playerNum, vScore = 0):
i = playerNum
if self.instruments[i].isVocal:
rockMinusAmount = 500 * (3 - vScore)
self.rock[i] -= rockMinusAmount
if (not self.coOpRB) and (self.rock[i]/self.rockMax <= 0.667) and ((self.rock[i]+rockMinusAmount)/self.rockMax > 0.667): #akedrou
self.playersInGreen -= 1
return
rockMinusAmount = 0 #akedrou - simplify the various incarnations of minusRock.
if self.instruments[i].isDrum:
self.drumStart = True
if not self.drumScoringEnabled: #MFH - ignore when drum scoring is disabled
return
if self.starNotesMissed[i] or self.instruments[i].isStarPhrase:
self.instruments[i].isStarPhrase = True
self.instruments[i].spEnabled = False
#self.instruments[i].spNote = False
if not self.failingEnabled or self.practiceMode:
return
if self.battle and self.numOfPlayers > 1: #battle mode
if self.notesMissed[i]:
self.minusRock[i] += self.minGain/self.multi[i]
#self.rock[i] -= self.minusRock[i]/self.multi[i]
if self.plusRock[i] > self.pluBase:
self.plusRock[i] -= self.pluGain*2.0/self.multi[i]
if self.plusRock[i] <= self.pluBase:
self.plusRock[i] = self.pluBase/self.multi[i]
if self.lessMissed[i]: #QQstarS:Set [i] to [i]
self.minusRock[i] += self.minGain/5.0/self.multi[i]
#self.rock[i] -= self.minusRock[i]/5.0/self.multi[i]
if self.plusRock[i] > self.pluBase:
self.plusRock[i] -= self.pluGain/2.5/self.multi[i]
elif (self.coOp or self.coOpGH) and self.numOfPlayers > 1: #co-op mode
if self.notesMissed[i]:
self.minusRock[self.coOpPlayerMeter] += self.minGain/self.multi[i]
rockMinusAmount = self.minusRock[self.coOpPlayerMeter]/self.multi[i]
self.rock[self.coOpPlayerMeter] -= rockMinusAmount
if self.plusRock[self.coOpPlayerMeter] > self.pluBase:
self.plusRock[self.coOpPlayerMeter] -= self.pluGain*2.0/self.multi[i]
if self.plusRock[self.coOpPlayerMeter] <= self.pluBase:
self.plusRock[self.coOpPlayerMeter] = self.pluBase/self.multi[i]
if self.lessMissed[i]:
self.minusRock[self.coOpPlayerMeter] += self.minGain/5.0/self.multi[i]
rockMinusAmount = self.minusRock[0]/5.0/self.multi[i]
self.rock[self.coOpPlayerMeter] -= rockMinusAmount
if self.plusRock[self.coOpPlayerMeter] > self.pluBase:
self.plusRock[self.coOpPlayerMeter] -= self.pluGain/2.5/self.multi[i]
if (self.rock[self.coOpPlayerMeter]/self.rockMax <= 0.667) and ((self.rock[self.coOpPlayerMeter]+rockMinusAmount)/self.rockMax > 0.667): #akedrou
self.playersInGreen -= 1
elif self.coOpRB and self.numOfPlayers > 1: #RB co-op mode
if self.notesMissed[i]:
self.minusRock[i] += self.minGain/self.coOpMulti
if self.numDeadPlayers > 0:
self.minusRock[self.coOpPlayerMeter] += self.minGain/self.coOpMulti
rockMinusAmount = self.minusRock[self.coOpPlayerMeter]/self.coOpMulti
self.rock[self.coOpPlayerMeter] -= rockMinusAmount/self.numOfPlayers
self.rock[i] -= self.minusRock[i]/self.coOpMulti
if self.plusRock[i] > self.pluBase:
self.plusRock[i] -= self.pluGain*2.0/self.coOpMulti
if self.plusRock[i] <= self.pluBase:
self.plusRock[i] = self.pluBase/self.coOpMulti
if self.lessMissed[i]:
self.minusRock[i] += self.minGain/5.0/self.coOpMulti
if self.numDeadPlayers > 0:
self.minusRock[self.coOpPlayerMeter] += self.minGain/5.0/self.coOpMulti
rockMinusAmount = self.minusRock[i]/5.0/self.coOpMulti
self.rock[self.coOpPlayerMeter] -= rockMinusAmount/(self.numOfPlayers - self.numDeadPlayers)
self.rock[i] -= self.minusRock[i]/5.0/self.coOpMulti
if self.plusRock[i] > self.pluBase:
self.plusRock[i] -= self.pluGain/2.5/self.coOpMulti
else: #normal mode
if self.notesMissed[i]:
self.minusRock[i] += self.minGain/self.multi[i]
rockMinusAmount = self.minusRock[i]/self.multi[i]
self.rock[i] -= rockMinusAmount
if self.plusRock[i] > self.pluBase:
self.plusRock[i] -= self.pluGain*2.0/self.multi[i]
if self.plusRock[i] <= self.pluBase:
self.plusRock[i] = self.pluBase/self.multi[i]
if self.lessMissed[i]:
self.minusRock[i] += self.minGain/5.0/self.multi[i]
rockMinusAmount = self.minusRock[i]/5.0/self.multi[i]
self.rock[i] -= rockMinusAmount
if self.plusRock[i] > self.pluBase:
self.plusRock[i] -= self.pluGain/2.5/self.multi[i]
if (self.rock[i]/self.rockMax <= 0.667) and ((self.rock[i]+rockMinusAmount)/self.rockMax > 0.667): #akedrou
self.playersInGreen -= 1
if self.minusRock[i] <= self.minBase:
self.minusRock[i] = self.minBase
if self.plusRock[i] <= self.pluBase:
self.plusRock[i] = self.pluBase
def rockmeterIncrease(self, playerNum, vScore = 0):
i = playerNum
if self.instruments[i].isVocal:
rockPlusAmt = 500 + (500 * (vScore-2))
self.rock[i] += rockPlusAmt
if self.rock[i] >= self.rockMax:
self.rock[i] = self.rockMax
if not self.coOpRB:
if (self.rock[i]/self.rockMax > 0.667) and ((self.rock[i]-rockPlusAmt)/self.rockMax <= 0.667):
self.playersInGreen += 1
if self.engine.data.cheerSoundFound > 0: #haven't decided whether or not to cut crowdSound with crowdsEnabled = 0, but would have to do it at solos too...
self.engine.data.crowdSound.play()
return
if self.instruments[i].isDrum:
self.drumStart = True
if not self.failingEnabled or self.practiceMode:
return
if not self.notesHit[i]: return
if self.battle and self.numOfPlayers > 1: #battle mode
if self.notesHit[i]:
if self.rock[i] < self.rockMax:
self.plusRock[i] += self.pluGain*self.multi[i]
if self.plusRock[i] > self.battleMax:
self.plusRock[i] = self.battleMax
self.rock[i] += self.plusRock[i]*self.multi[i]
self.rock[self.battleTarget[i]] -= self.plusRock[i]*self.multi[i]
if self.rock[self.battleTarget[i]] < 0:
self.rock[self.battleTarget[i]] = 0
if self.rock[i] >= self.rockMax:
self.rock[i] = self.rockMax
if self.minusRock[i] > self.minBase:
self.minusRock[i] -= self.minGain/2.0*self.multi[i]
#MFH TODO maintain separate rock status for each player
elif (self.coOp or self.coOpGH) and self.numOfPlayers > 1:
if self.rock[self.coOpPlayerMeter] < self.rockMax:
self.plusRock[self.coOpPlayerMeter] += self.pluGain*self.multi[i]
self.rock[self.coOpPlayerMeter] += self.plusRock[self.coOpPlayerMeter]*self.multi[i]
if self.rock[self.coOpPlayerMeter] >= self.rockMax:
self.rock[self.coOpPlayerMeter] = self.rockMax
if self.minusRock[self.coOpPlayerMeter] > self.minBase:
self.minusRock[self.coOpPlayerMeter] -= self.minGain/2.0*self.multi[i]
if (self.rock[self.coOpPlayerMeter]/self.rockMax > 0.667) and ((self.rock[self.coOpPlayerMeter]-(self.plusRock[self.coOpPlayerMeter]*self.multi[i]))/self.rockMax <= 0.667):
self.playersInGreen += 1
if self.engine.data.cheerSoundFound > 0: #haven't decided whether or not to cut crowdSound with crowdsEnabled = 0, but would have to do it at solos too...
self.engine.data.crowdSound.play()
elif self.coOpRB and self.numOfPlayers > 1:
if self.rock[i] < self.rockMax:
self.plusRock[i] += self.pluGain*self.coOpMulti
self.rock[i] += (self.plusRock[i]*self.coOpMulti)
if self.rock[i] >= self.rockMax:
self.rock[i] = self.rockMax
if self.minusRock[i] > self.minBase:
self.minusRock[i] -= self.minGain/2.0*self.coOpMulti
else: #normal mode
if self.rock[i] < self.rockMax:
self.plusRock[i] += self.pluGain*self.multi[i]
self.rock[i] += self.plusRock[i]*self.multi[i]
if self.rock[i] >= self.rockMax:
self.rock[i] = self.rockMax
if self.minusRock[i] > self.minBase:
self.minusRock[i] -= self.minGain/2.0*self.multi[i]
#Log.debug(str((self.rock[i]-(self.plusRock[i]*self.multi[i]))/self.rockMax) % "AND" % str(self.rock[i]/self.rockMax))
if (self.rock[i]/self.rockMax > 0.667) and ((self.rock[i]-(self.plusRock[i]*self.multi[i]))/self.rockMax <= 0.667):
self.playersInGreen += 1
if self.engine.data.cheerSoundFound > 0: #haven't decided whether or not to cut crowdSound with crowdsEnabled = 0, but would have to do it at solos too...
self.engine.data.crowdSound.play()
if self.minusRock[i] <= self.minBase:
self.minusRock[i] = self.minBase
if self.plusRock[i] <= self.pluBase:
self.plusRock[i] = self.pluBase
def rockmeterDrain(self, playerNum):
if self.battleGH:
self.rock[playerNum] -= 70.0
else:
self.rock[playerNum] -= 15.0
self.minusRock[playerNum] += self.minGain/10/self.coOpMulti
def run(self, ticks): #QQstarS: Fix this funcion
if self.song and self.song.readyToGo and not self.pause and not self.failed:
Scene.run(self, ticks)
if not self.resumeCountdown and not self.pause:
pos = self.getSongPosition()
self.song.update(ticks)
# update stage
else:
pos = self.pausePos
if self.vbpmLogicType == 1:
self.handleTempo(self.song, pos) #MFH - new global tempo / BPM handling logic
if self.bossBattle and self.rock[1] < 0:
if self.careerMode and not self.song.info.completed:
if self.song.info.count:
count = int(self.song.info.count)
else:
count = 0
count += 1
Log.debug("Song completed")
self.song.info.completed = True
self.song.info.count = "%d" % count
self.song.info.save()
#MFH - new failing detection logic
if self.failingEnabled:
#if self.numOfPlayers > 1:
if self.numOfPlayers > 1 and self.coOpType:
if self.rock[self.coOpPlayerMeter] <= 0:
self.failed = True
else:
if self.coOpRB:
for i, player in enumerate(self.playerList):
if self.rock[i] <= 0 and not self.coOpFailDone[i]:
self.instruments[i].coOpFailed = True
self.instruments[i].starPower = 0.0
self.engine.data.coOpFailSound.play()
self.deadPlayerList.append(i)
self.numDeadPlayers += 1
self.timesFailed[i] += 1
self.crowdsCheering = False
self.song.setInstrumentVolume(0.0, self.players[i].part)
if self.whammyEffect == 1: #pitchbend
self.song.resetInstrumentPitch(self.players[i].part)
self.coOpFailDone[i] = True
elif self.numOfPlayers > 1 and self.battleGH:
for i, player in enumerate(self.playerList):
if self.rock[i] <= 0:
self.failed = True
else:
somebodyStillAlive = False
for i, player in enumerate(self.playerList):
if self.rock[i] > 0:
somebodyStillAlive = True
if not somebodyStillAlive: #only if everybody has failed
self.failed = True
if pos > self.lastDrumNoteTime: #MFH - disable drum scoring so that the drummer can get down with his bad self at the end of the song without penalty.
self.drumScoringEnabled = False # ...is that what drummers do?
for i,instrument in enumerate(self.instruments):
if instrument.isVocal:
instrument.requiredNote = instrument.getRequiredNote(pos, self.song)
instrument.run(ticks, pos)
scoreBack = instrument.getScoreChange()
if scoreBack is not None:
points, scoreThresh, taps = scoreBack
self.scoring[i].score += points * instrument.scoreMultiplier * self.multi[i]
self.scoring[i].percNotesHit += taps
scoreThresh = 5-scoreThresh
if scoreThresh > 3:
self.rockmeterIncrease(i, scoreThresh)
self.scoring[i].notesHit += 1
self.scoring[i].streak += 1
elif scoreThresh == 3:
self.scoring[i].streak = 0
elif scoreThresh < 3:
self.rockmeterDecrease(i, scoreThresh)
self.scoring[i].streak = 0
self.scoring[i].updateAvMult()
self.scoring[i].getStarScores()
if instrument.starPowerGained:
if instrument.starPower >= 50 and not instrument.starPowerActive:
self.engine.data.starReadySound.play()
else:
self.engine.data.starSound.play()
if self.phrases > 1:
if instrument.starPower >= 50 and not instrument.starPowerActive:
self.newScalingText(i, self.tsStarPowerReady)
instrument.starPowerGained = False
if instrument.starPowerActivate:
self.activateSP(i)
instrument.starPowerActivate = False
continue
self.stage.run(pos, instrument.currentPeriod)
playerNum = i
guitar = instrument
if guitar.battleObjects[0] != 0:
self.battleItemsHolding[i] = 1
else:
self.battleItemsHolding[i] = 0
if guitar.battleObjects[1] != 0:
self.battleItemsHolding[i] = 2
if guitar.battleObjects[2] != 0:
self.battleItemsHolding[i] = 3
if self.battleGH:
if guitar.battleBeingUsed[0] == 0 and guitar.battleBeingUsed[1] != 0:
guitar.battleBeingUsed[0] = guitar.battleBeingUsed[1]
guitar.battleBeingUsed[1] = 0
#Log.debug("Battle Being Used: %s" % str(guitar.battleBeingUsed))
time = self.getSongPosition()
if guitar.battleStatus[1]:
if time - guitar.battleDrainStart > guitar.battleDrainLength:
Log.debug("Drain for Player %d disabled" % i)
guitar.battleStatus[1] = False
for k, nowUsed in enumerate(guitar.battleBeingUsed):
if guitar.battleBeingUsed[k] == 1:
guitar.battleBeingUsed[k] = 0
else:
self.rockmeterDrain(i)
for k, nowUsed in enumerate(guitar.battleBeingUsed):
if guitar.battleBeingUsed[k] == 5:
guitar.battleBeingUsed[k] = 0
if guitar.battleStatus[6]:
if time - guitar.battleStartTimes[6] > guitar.battleLeftyLength:
Log.debug("Lefty Mode for Player %d disabled" % i)
guitar.battleStatus[6] = False
for k, nowUsed in enumerate(guitar.battleBeingUsed):
if guitar.battleBeingUsed[k] == 6:
guitar.battleBeingUsed[k] = 0
if guitar.battleStatus[8]:
if time - guitar.battleStartTimes[8] > guitar.battleAmpLength:
Log.debug("Diff Up Mode for Player %d disabled" % i)
guitar.battleStatus[8] = False
for k, nowUsed in enumerate(guitar.battleBeingUsed):
if guitar.battleBeingUsed[k] == 8:
guitar.battleBeingUsed[k] = 0
if guitar.battleStatus[7]:
if time - guitar.battleStartTimes[7] > guitar.battleDoubleLength:
Log.debug("Diff Up Mode for Player %d disabled" % i)
guitar.battleStatus[7] = False
for k, nowUsed in enumerate(guitar.battleBeingUsed):
if guitar.battleBeingUsed[k] == 7:
guitar.battleBeingUsed[k] = 0
if guitar.battleStatus[3]:
if guitar.battleBreakNow <= 0:
guitar.battleStatus[3] = False
guitar.battleBreakString = 0
for k, nowUsed in enumerate(guitar.battleBeingUsed):
if guitar.battleBeingUsed[k] == 3:
guitar.battleBeingUsed[k] = 0
if guitar.battleStatus[2]:
if time - guitar.battleStartTimes[2] > guitar.battleDiffUpLength:
Log.debug("Diff Up Mode for Player %d disabled" % i)
guitar.battleStatus[2] = False
self.song.difficulty[i] = Song.difficulties[guitar.battleDiffUpValue]
guitar.difficulty = guitar.battleDiffUpValue
for k, nowUsed in enumerate(guitar.battleBeingUsed):
if guitar.battleBeingUsed[k] == 2:
guitar.battleBeingUsed[k] = 0
if guitar.isDrum and guitar.freestyleSP: #MFH - this drum fill starpower activation logic should always be checked.
self.activateSP(i)
guitar.freestyleSP = False
#MFH - check for any unplayed notes and for an unbroken streak since the BRE, then award bonus scores
#akedrou - does not work for co-op.
if self.coOpType:
scoreCard = self.coOpScoreCard
if scoreCard.freestyleWasJustActive and not scoreCard.endingAwarded:
if scoreCard.lastNoteTime < pos and not scoreCard.endingStreakBroken:
Log.debug("Big Rock Ending bonus awarded for co-op players! %d points." % scoreCard.endingScore)
if scoreCard.endingScore > 0:
scoreCard.addEndingScore()
self.engine.data.starActivateSound.play()
scoreCard.endingAwarded = True
else:
scoreCard = self.scoring[playerNum]
if scoreCard.freestyleWasJustActive and not scoreCard.endingAwarded:
if scoreCard.lastNoteEvent and not scoreCard.endingStreakBroken:
if scoreCard.lastNoteEvent.played or scoreCard.lastNoteEvent.hopod:
Log.debug("Big Rock Ending bonus awarded for player %d: %d points" % (playerNum, scoreCard.endingScore) )
if scoreCard.endingScore > 0:
scoreCard.addEndingScore()
self.engine.data.starActivateSound.play()
scoreCard.endingAwarded = True
if guitar.starPowerGained == True:
if self.unisonActive and self.inUnison[i]:
self.unisonEarn[i] = True
if self.coOpGH:
self.coOpStarPower += (25 * self.numOfPlayers) #lets 2 SP phrases give SP
if self.coOpStarPower > (100 * self.numOfPlayers):
self.coOpStarPower = (100 * self.numOfPlayers)
if self.coOpStarPower >= (50 * self.numOfPlayers) and not guitar.starPowerActive:
self.engine.data.starReadySound.play()
else:
self.engine.data.starSound.play()
if guitar.isDrum and self.autoDrumStarpowerActivate == 0 and self.numDrumFills < 2:
self.activateSP(playerNum)
else:
#myfingershurt: auto drum starpower activation option:
if guitar.isDrum and self.autoDrumStarpowerActivate == 0 and self.numDrumFills < 2:
self.activateSP(playerNum)
if guitar.starPower >= 50 and not guitar.starPowerActive:
self.engine.data.starReadySound.play()
else:
self.engine.data.starSound.play()
if self.phrases > 1:
if self.coOpGH:
if guitar.starPowerGained and self.coOpStarPower >= (50 * self.numOfPlayers) and not guitar.starPowerActive:
self.newScalingText(self.coOpPhrase, self.tsStarPowerReady )
elif self.battleGH:
if guitar.battleObjectGained and guitar.battleObjects[0] != 0:
self.battleText[i] = self.tsBattleIcons[guitar.battleObjects[0]]
guitar.battleObjectGained = False
else:
if guitar.starPower >= 50 and not guitar.starPowerActive: #QQstarS:Set [0] to [i]
self.newScalingText(playerNum, self.tsStarPowerReady )
self.hopFretboard(i, 0.04) #stump
guitar.starPowerGained = False #QQstarS:Set [0] to [i]
# update board
#for i,guitar in enumerate(self.guitars):
if self.coOpGH:
for k, theGuitar in enumerate(self.instruments):
theGuitar.starPower = self.coOpStarPower/self.numOfPlayers
if not guitar.run(ticks, pos, self.controls):
# done playing the current notes
self.endPick(i)
if guitar.drumFillsActive:
if self.muteDrumFill > 0 and not self.jurg[i]:
self.song.setInstrumentVolume(0.0, self.playerList[i].part)
#MFH - ensure this missed notes check doesn't fail you during a freestyle section
if guitar.freestyleActive or guitar.drumFillsActive:
missedNotes = guitar.getMissedNotesMFH(self.song, pos + guitar.lateMargin*2, catchup = True) #MFH - get all notes in the freestyle section.
for tym, theNote in missedNotes: #MFH - also want to mark these notes as Played so they don't count against the note total!
#theNote.played = True
theNote.skipped = True
if guitar.isDrum:
if self.coOpType:
self.coOpScoreCard.totalStreakNotes -= 1
self.scoring[playerNum].totalStreakNotes -= 1
else:
missedNotes = guitar.getMissedNotesMFH(self.song, pos)
if guitar.paused:
missedNotes = []
if missedNotes:
if guitar.isDrum:
self.drumStart = True
self.lessMissed[i] = True #QQstarS:Set [0] to [i]
for tym, theNote in missedNotes: #MFH
self.scoring[playerNum].notesMissed += 1
if self.coOpType:
self.coOpScoreCard.notesMissed += 1
if theNote.star or theNote.finalStar:
if self.logStarpowerMisses == 1:
Log.debug("SP Miss: run(), note: %d, gameTime: %s" % (theNote.number, self.timeLeft) )
self.starNotesMissed[i] = True
if self.unisonActive:
self.inUnison[i] = False
if (self.scoring[i].streak != 0 or not self.processedFirstNoteYet) and not guitar.playedNotes and len(missedNotes) > 0:
if not self.processedFirstNoteYet:
self.stage.triggerMiss(pos)
self.notesMissed[i] = True
self.processedFirstNoteYet = True
self.currentlyAnimating = False
guitar.setMultiplier(1)
guitar.hopoLast = -1
self.song.setInstrumentVolume(0.0, self.playerList[playerNum].part)
if self.whammyEffect == 1: #pitchbend
self.song.resetInstrumentPitch(self.playerList[playerNum].part)
self.guitarSoloBroken[i] = True
if self.coOpType:
self.coOpScoreCard.streak = 0
self.coOpScoreCard.endingStreakBroken = True
self.scoring[playerNum].streak = 0
self.scoring[playerNum].endingStreakBroken = True #MFH
if self.hopoDebugDisp == 1:
missedNoteNums = [noat.number for time, noat in missedNotes]
#Log.debug("Miss: run(), found missed note(s)... %s" % str(missedNoteNums) + ", Time left=" + str(self.timeLeft))
Log.debug("Miss: run(), found missed note(s)... %(missedNotes)s, Song time=%(songTime)s" % \
{'missedNotes': str(missedNoteNums), 'songTime': str(self.timeLeft)})
guitar.hopoActive = 0
guitar.wasLastNoteHopod = False
guitar.sameNoteHopoString = False
guitar.hopoProblemNoteNum = -1
#self.problemNotesP1 = []
#self.problemNotesP2 = []
#notes = self.guitars[i].getRequiredNotesMFH(self.song, pos) #MFH - wtf was this doing here? I must have left it by accident o.o
#if not self.pause and not self.failed:
#myfingershurt: Capo's starpower claps on a user setting:
#if self.starClaps and self.song and len(self.beatTime) > 0 or (self.beatClaps and self.song and len(self.beatTime) > 0):
if (self.starClaps or self.beatClaps) and len(self.beatTime) > 0:
###Capo###
#Play a sound on each beat on starpower
clap = False
if self.playerList[0].practiceMode and self.beatClaps:
clap = True
else:
for i,player in enumerate(self.playerList):
if self.instruments[i].starPowerActive == True:
clap = True
break
#pos = self.getSongPosition()
if pos >= (self.beatTime[0] - 100):
self.beatTime.pop(0)
if clap == True:
if self.firstClap == False:
#self.sfxChannel.setVolume(self.sfxVolume)
#self.sfxChannel.play(self.engine.data.clapSound)
self.engine.data.clapSound.play()
else:
self.firstClap = False
else:
self.firstClap = True
###endCapo###
#MFH - new refugees from the render() function:
if self.theme == 2:
if self.rbOverdriveBarGlowFadeOut == False:
self.rbOverdriveBarGlowVisibility = self.rbOverdriveBarGlowVisibility + self.rbOverdriveBarGlowFadeInChunk
elif self.rbOverdriveBarGlowFadeOut == True:
self.rbOverdriveBarGlowVisibility = self.rbOverdriveBarGlowVisibility - self.rbOverdriveBarGlowFadeOutChunk
if self.rbOverdriveBarGlowVisibility >= 1 and self.rbOverdriveBarGlowFadeOut == False:
self.rbOverdriveBarGlowFadeOut = True
elif self.rbOverdriveBarGlowVisibility <= 0 and self.rbOverdriveBarGlowFadeOut == True:
self.rbOverdriveBarGlowFadeOut = False
for playerNum in range(self.numOfPlayers):
self.handlePhrases(playerNum, self.scoring[playerNum].streak) #MFH - streak #1 for player #1...
self.handleAnalogSP(playerNum, ticks)
self.handleWhammy(playerNum)
if self.playerList[playerNum].controlType == 4:
self.handleAnalogSlider(playerNum)
self.updateGuitarSolo(playerNum)
if self.coOpType:
self.handlePhrases(self.coOpPhrase, self.coOpScoreCard.streak)
self.handleJurgen(pos)
#stage rotation
#MFH - logic to prevent advancing rotation frames if you have screwed up, until you resume a streak
if (self.currentlyAnimating and self.missPausesAnim == 1) or self.missPausesAnim == 0:
self.stage.rotate()
self.starPowersActive = 0
self.coOpStarPower = 0
#MFH - new logic to update the starpower pre-multiplier
#akedrou - broken up to support RB Co-op properly.
for i in range(self.numOfPlayers):
if self.instruments[i].starPowerActive:
self.multi[i] = 2
self.starPowersActive += 1
else:
self.multi[i] = 1
sp = self.instruments[i].starPower
if self.coOpGH:
self.coOpStarPower += sp
if self.coOpRB:
if self.unisonIndex < len(self.unisonConfirm) and not self.unisonActive: #akedrou - unison bonuses
while self.unisonConfirm[self.unisonIndex][0] < pos:
self.unisonIndex += 1
if len(self.unisonConfirm) == self.unisonIndex:
break
if len(self.unisonConfirm) > self.unisonIndex:
if self.unisonConfirm[self.unisonIndex][0] - pos < self.song.period * 2:
self.unisonActive = True
self.firstUnison = True
self.unisonNum = len(self.unisonPlayers[self.unisonIndex])
if self.starPowersActive > 0:
self.coOpMulti = 2 * self.starPowersActive
else:
self.coOpMulti = 1
#MFH - rewritten rockmeter / starpower miss logic, and Faaa's drum sounds:
#the old logic was ridiculously complicated
# For each existing player
if self.coOpRB:
oldCoOpRock = self.rock[self.coOpPlayerMeter]
coOpRock = 0.0
for i in range(self.numOfPlayers):
if (self.coOpRB and not guitar.coOpFailed) or not self.coOpRB:
if self.notesMissed[i] or self.lessMissed[i]: #(detects missed note or overstrum)
if self.instruments[i].isDrum:
if self.drumMisses == 0: #mode: always
self.rockmeterDecrease(i)
#elif self.drumMisses == 1 and self.countdownSeconds < 1: #mode: song start
elif self.drumMisses == 1 and self.countdown < 1: #mode: song start
self.rockmeterDecrease(i)
elif self.drumMisses == 2 and self.drumStart: #mode: song start
self.rockmeterDecrease(i)
else: #not drums
self.rockmeterDecrease(i)
if self.notesHit[i]:
self.rockmeterIncrease(i)
if self.coOpRB:
coOpRock += self.rock[i]
else:
if not self.instruments[i].coOpRestart:
self.rockmeterDrain(self.coOpPlayerMeter)
else:
oldCoOpRock = 0.0
coOpRock += self.rock[i]
self.notesMissed[i] = False
self.starNotesMissed[i] = False
self.notesHit[i] = False
self.lessMissed[i] = False
if self.unisonActive:
if self.firstUnison and i in self.unisonPlayers[self.unisonIndex]:
self.inUnison[i] = True
self.haveUnison[i] = True
#battle failing
if self.battle and self.numOfPlayers>1:
if self.rock[i] <= 0:
#self.displayText[i] = "You Failed!!!!"
#self.newScalingText(i, _("You Failed!!!!") )
self.newScalingText(i, self.tsYouFailedBattle )
#self.streakFlag = str(i) #QQstarS:Set [0] to [i] #if player0 streak50, set the flag to 1.
guitar.actions = [0,0,0]
if self.coOpRB: #RB co-op meter is just an average until someone dies.
if self.numDeadPlayers == 0:
self.rock[self.coOpPlayerMeter] = coOpRock/self.numOfPlayers
if (self.rock[self.coOpPlayerMeter]/self.rockMax > 0.667) and (oldCoOpRock/self.rockMax <= 0.667):
self.playersInGreen = 1
if self.engine.data.cheerSoundFound > 0: #haven't decided whether or not to cut crowdSound with crowdsEnabled = 0, but would have to do it at solos too...
self.engine.data.crowdSound.play()
if (self.rock[self.coOpPlayerMeter]/self.rockMax <= 0.667) and (oldCoOpRock/self.rockMax > 0.667):
self.playersInGreen = 0
if self.unisonActive: #akedrou unison bonuses
if self.firstUnison:
self.firstUnison = False
self.firstUnisonDone = True
if pos - self.unisonConfirm[self.unisonIndex][1] > 0 and self.firstUnisonDone:
for i in range(len(self.inUnison)):
if self.inUnison[i] != self.haveUnison[i]:
break
else:
if self.engine.data.cheerSoundFound > 0:
self.engine.data.crowdSound.play()
for i,guitar in enumerate(self.instruments):
if self.inUnison[i]:
guitar.starPower += 25
if guitar.starPower > 100:
guitar.starPower = 100
self.firstUnisonDone = False
if pos - self.unisonConfirm[self.unisonIndex][1] > self.song.period * 2:
self.unisonIndex+=1
self.unisonActive = False
self.unisonEarn = [False for i in self.playerList]
self.haveUnison = [False for i in self.playerList]
self.inUnison = [False for i in self.playerList]
#akedrou Song/Crowd logic
if self.numDeadPlayers == 0:
if self.crowdsEnabled == 3 and self.crowdsCheering == False and not self.countdown: #prevents cheer-cut-cheer
#self.song.setCrowdVolume(self.crowdVolume)
self.crowdsCheering = True
elif self.crowdsEnabled == 0 and self.crowdsCheering == True: #setting change
#self.song.setCrowdVolume(0.0)
self.crowdsCheering = False
elif self.crowdsEnabled == 1:
if self.starPowersActive > 0:
if self.crowdsCheering == False:
#self.song.setCrowdVolume(self.crowdVolume)
self.crowdsCheering = True
else:
if self.crowdsCheering == True:
#self.song.setCrowdVolume(0.0)
self.crowdsCheering = False
elif self.crowdsEnabled == 2:
if self.starPowersActive > 0 or self.playersInGreen > 0:
if self.crowdsCheering == False:
#self.song.setCrowdVolume(self.crowdVolume)
self.crowdsCheering = True
else:
if self.crowdsCheering == True:
#self.song.setCrowdVolume(0.0)
self.crowdsCheering = False
#Crowd fade-in/out
if self.crowdsCheering == True and self.crowdFaderVolume < self.crowdVolume:
self.crowdFaderVolume += self.crowdCheerFadeInChunk
if self.crowdFaderVolume > self.crowdVolume:
self.crowdFaderVolume = self.crowdVolume
self.song.setCrowdVolume(self.crowdFaderVolume)
if self.crowdsCheering == False and self.crowdFaderVolume > 0.0:
self.crowdFaderVolume -= self.crowdCheerFadeOutChunk
if self.crowdFaderVolume < 0.0:
self.crowdFaderVolume = 0.0
self.song.setCrowdVolume(self.crowdFaderVolume)
if self.countdown > 0 and self.countdownOK: #MFH won't start song playing if you failed or pause
self.countdown = max(self.countdown - ticks / self.song.period, 0)
self.countdownSeconds = self.countdown / self.songBPS + 1
if not self.countdown: #MFH - when countdown reaches zero, will only be executed once
#RF-mod should we collect garbage when we start?
self.engine.collectGarbage()
self.getHandicap()
self.song.setAllTrackVolumes(1)
self.song.setCrowdVolume(0.0)
self.song.clearPause()
self.crowdsCheering = False #catches crowdsEnabled != 3, pause before countdown, set to 3
self.starPowersActive = 0
self.playersInGreen = 0
for instrument in self.instruments:
if instrument.isVocal:
instrument.mic.start()
if self.playerList[0].practiceMode and self.engine.audioSpeedFactor == 1:
self.playerList[0].startPos -= self.song.period*4
if self.playerList[0].startPos < 0.0:
self.playerList[0].startPos = 0.0
self.song.play(start = self.playerList[0].startPos)
else:
self.song.play()
if self.resumeCountdown > 0: #unpause delay
self.resumeCountdown = max(self.resumeCountdown - ticks / self.song.period, 0)
self.resumeCountdownSeconds = self.resumeCountdown / self.songBPS + 1
if not self.resumeCountdown:
self.song.unpause()
self.pause = False
missedNotes = []
for instrument in self.instruments:
instrument.paused = False
if instrument.isVocal:
instrument.startMic()
if self.timeLeft == "0:01" and not self.mutedLastSecondYet and self.muteLastSecond == 1:
self.song.setAllTrackVolumes(0.0)
self.mutedLastSecondYet = True
#myfingershurt: this detects the end of the song and displays "you rock"
if self.countdown <= 0 and not self.song.isPlaying() and not self.done:
#must render fail message in render function, set and check flag here
self.youRock = True
#myfingershurt: This ends the song after 100 ticks of displaying "you rock" - if the use hasn't paused the game.
if self.rockFinished and not self.pause:
if self.battleGH:
self.restartSong()
else:
self.goToResults()
return
#MFH
if self.midiLyricMode == 1 and self.numMidiLyricLines > 0 and (not self.noMoreMidiLineLyrics) and not self.playingVocals: #line-by-line lyrics mode:
if pos >= (self.nextMidiLyricStartTime-self.lineByLineStartSlopMs):
self.currentSimpleMidiLyricLine = self.nextMidiLyricLine
if ( self.numMidiLyricLines > self.midiLyricLineIndex+1 ):
self.midiLyricLineIndex += 1
self.nextMidiLyricStartTime, self.nextMidiLyricLine = self.midiLyricLines[self.midiLyricLineIndex]
else:
self.noMoreMidiLineLyrics = True
elif self.midiLyricMode == 2 and self.numMidiLyricLines > 0 and (not self.noMoreMidiLineLyrics) and not self.playingVocals: #MFH - handle 2-line lyric mode with current-word highlighting advancement
#MFH - first, prepare / handle the active / top line (which will have highlighted words / syllables):
if pos >= self.nextLyricWordTime: #time to switch to this word
if self.nextLyricIsOnNewLine:
self.activeMidiLyricLineIndex += 1
self.activeMidiLyricWordSubIndex = 0
self.nextLyricIsOnNewLine = False
self.activeMidiLyricLine_GreyWords = ""
self.activeMidiLyricLine_GreenWords = "%s " % self.nextLyricEvent.text
self.numWordsInCurrentMidiLyricLine = 0
for nextLyricTime, nextLyricEvent in self.midiLyricLineEvents[self.activeMidiLyricLineIndex]: #populate the first active line
self.numWordsInCurrentMidiLyricLine += 1
if self.numWordsInCurrentMidiLyricLine > self.activeMidiLyricWordSubIndex+1: #there is another word in this line
self.activeMidiLyricWordSubIndex += 1
self.nextLyricWordTime, self.nextLyricEvent = self.midiLyricLineEvents[self.activeMidiLyricLineIndex][self.activeMidiLyricWordSubIndex]
self.activeMidiLyricLine_WhiteWords = ""
for nextLyricTime, nextLyricEvent in self.midiLyricLineEvents[self.activeMidiLyricLineIndex]:
if nextLyricTime > pos:
self.activeMidiLyricLine_WhiteWords = "%s %s" % (self.activeMidiLyricLine_WhiteWords, nextLyricEvent.text)
else: #next lyric is on the same line
if self.activeMidiLyricWordSubIndex > 0: #set previous word as grey
lastLyricTime, lastLyricEvent = self.midiLyricLineEvents[self.activeMidiLyricLineIndex][self.activeMidiLyricWordSubIndex-1]
self.activeMidiLyricLine_GreyWords = "%s%s " % (self.activeMidiLyricLine_GreyWords, lastLyricEvent.text)
self.activeMidiLyricLine_GreenWords = "%s " % self.nextLyricEvent.text
if self.numWordsInCurrentMidiLyricLine > self.activeMidiLyricWordSubIndex+1: #there is another word in this line
self.activeMidiLyricWordSubIndex += 1
self.nextLyricWordTime, self.nextLyricEvent = self.midiLyricLineEvents[self.activeMidiLyricLineIndex][self.activeMidiLyricWordSubIndex]
self.activeMidiLyricLine_WhiteWords = ""
for nextLyricTime, nextLyricEvent in self.midiLyricLineEvents[self.activeMidiLyricLineIndex]:
if nextLyricTime > pos:
self.activeMidiLyricLine_WhiteWords = "%s %s" % (self.activeMidiLyricLine_WhiteWords, nextLyricEvent.text)
else: #no more words in this line
if self.numMidiLyricLines > self.activeMidiLyricLineIndex+1: #there is another line
self.nextLyricIsOnNewLine = True
self.nextLyricWordTime, self.nextLyricEvent = self.midiLyricLineEvents[self.activeMidiLyricLineIndex+1][0]
self.activeMidiLyricLine_WhiteWords = ""
else: #no more lines
self.noMoreMidiLineLyrics = True
self.activeMidiLyricLine_WhiteWords = ""
self.currentSimpleMidiLyricLine = ""
#Log.notice("No more MIDI lyric lines to handle!")
#MFH - then, prepare / handle the next / bottom line (which will just be a simple line with all white text):
if self.numMidiLyricLines > self.activeMidiLyricLineIndex+1:
tempTime, self.currentSimpleMidiLyricLine = self.midiLyricLines[self.activeMidiLyricLineIndex+1]
else:
self.currentSimpleMidiLyricLine = ""
def endPick(self, num):
score = self.getExtraScoreForCurrentlyPlayedNotes(num)
if not self.instruments[num].endPick(self.song.getPosition()):
#if self.hopoDebugDisp == 1:
# Log.debug("MFH: An early sustain release was detected, and it was deemed too early, and muting was attempted.")
if self.muteSustainReleases > 0:
self.song.setInstrumentVolume(0.0, self.players[num].part)
#elif self.hopoDebugDisp == 1:
# Log.debug("MFH: An early sustain release was detected, and it was not deemed too early, so muting was not attempted.")
if score != 0:
scoreTemp = score*self.multi[num]
if self.coOpType:
if not self.coOpGH:
self.coOpScoreCard.score += (scoreTemp*self.scoring[num].getScoreMultiplier())
else: #shared mult
self.coOpScoreCard.addScore(scoreTemp)
else:
self.scoring[num].addScore(scoreTemp)
def render3D(self):
if self.stage.mode == 3 and Stage.videoAvailable:
if self.countdown <= 0:
if self.pause == True or self.failed == True:
self.stage.vidPlayer.paused = True
else:
self.stage.vidPlayer.paused = False
else:
self.stage.vidPlayer.paused = True
self.stage.render(self.visibility)
def renderVocals(self):
for i, vocalist in enumerate(self.instruments):
if vocalist.isVocal:
vocalist.render(self.visibility, self.song, self.getSongPosition(), self.numOfPlayers)
def renderGuitar(self):
for i, guitar in enumerate(self.instruments):
if guitar.isVocal:
continue
self.engine.view.setViewport(self.numberOfGuitars,self.playerList[i].guitarNum)
if self.theme not in (0, 1, 2) or (not self.pause and not self.failed):
glPushMatrix()
if guitar.fretboardHop > 0.0:
glTranslatef(0.0, guitar.fretboardHop, 0.0) #stump: fretboard hop
guitar.fretboardHop -= 0.005
if guitar.fretboardHop < 0.0:
guitar.fretboardHop = 0.0
self.neckrender[i].render(self.visibility, self.song, self.getSongPosition())
guitar.render(self.visibility, self.song, self.getSongPosition(), self.controls, self.killswitchEngaged[i]) #QQstarS: new
glPopMatrix()
if self.coOp or self.coOpGH:
guitar.rockLevel = self.rock[self.coOpPlayerMeter] / self.rockMax
if self.rock[self.coOpPlayerMeter]< self.rockMax/3.0 and self.failingEnabled:
self.neckrender[i].isFailing = True
else:
self.neckrender[i].isFailing = False
elif self.coOpRB:
guitar.rockLevel = self.rock[i] / self.rockMax
if self.rock[i]< self.rockMax/3.0 and self.failingEnabled:
self.neckrender[i].isFailing = True
elif self.numDeadPlayers > 0 and self.rock[self.coOpPlayerMeter]< self.rockMax/6.0 and self.failingEnabled:
self.neckrender[i].isFailing = True
else:
self.neckrender[i].isFailing = False
else:
guitar.rockLevel = self.rock[i] / self.rockMax
if self.rock[i]< self.rockMax/3.0 and self.failingEnabled:
self.neckrender[i].isFailing = True
else:
self.neckrender[i].isFailing = False
self.engine.view.setViewport(1,0)
def getSongPosition(self):
if self.song and self.song.readyToGo:
if not self.done:
self.lastSongPos = self.song.getPosition()
return self.lastSongPos - self.countdown * self.song.period
else:
# Nice speeding up animation at the end of the song
return self.lastSongPos + 4.0 * (1 - self.visibility) * self.song.period
return 0.0
def screwUp(self, num, controls):
if self.screwUpVolume > 0.0:
#self.sfxChannel.setVolume(self.screwUpVolume)
#if `self.playerList[num].part` == "Bass Guitar":
if self.instruments[num].isBassGuitar:
#self.sfxChannel.play(self.engine.data.screwUpSoundBass)
self.engine.data.screwUpSoundBass.play()
elif self.instruments[num].isDrum:
if self.drumMisses > 0: #MFH's cleaned-up - Faaa Drum sound
self.instruments[num].playDrumSounds(controls)
#- if self.instruments[num].lastFretWasT1:
#- self.engine.data.T1DrumSound.play()
#- elif self.instruments[num].lastFretWasT2:
#- self.engine.data.T2DrumSound.play()
#- elif self.instruments[num].lastFretWasT3:
#- self.engine.data.T3DrumSound.play()
#- elif self.instruments[num].lastFretWasC:
#- self.engine.data.CDrumSound.play()
else:
self.engine.data.screwUpSoundDrums.play() #plays random drum sounds
else: #guitar
self.engine.data.screwUpSound.play()
def doPick(self, num):
if not self.song:
return
pos = self.getSongPosition()
if self.instruments[num].playedNotes:
# If all the played notes are tappable, there are no required notes and
# the last note was played recently enough, ignore this pick
if self.instruments[num].areNotesTappable(self.instruments[num].playedNotes) and \
not self.instruments[num].getRequiredNotes(self.song, pos) and \
pos - self.lastPickPos[num] <= self.song.period / 2:
return
self.endPick(num)
self.lastPickPos[num] = pos
if self.coOpType:
scoreCard = self.coOpScoreCard
else:
scoreCard = self.scoring[num]
self.killswitchEngaged[num] = False #always reset killswitch status when picking / tapping
#volshebnyi - disable failing if BRE is active
if self.instruments[num].startPick(self.song, pos, self.controls):
if self.instruments[num].isDrum:
self.drumStart = True
self.song.setInstrumentVolume(1.0, self.playerList[num].part)
self.currentlyAnimating = True
self.notesHit[num] = True #QQstarS:Set [0] to [i]
tempScoreValue = len(self.instruments[num].playedNotes) * self.baseScore * self.multi[num]
if self.coOpType and not self.coOpGH:
scoreCard.score += (tempScoreValue*self.scoring[num].getScoreMultiplier())
else:
self.scoring[num].addScore(tempScoreValue)
scoreCard.notesHit += 1
#MFH - tell ScoreCard to update its totalStreak counter if we've just passed 100% for some reason:
if scoreCard.notesHit > scoreCard.totalStreakNotes:
scoreCard.totalStreakNotes = scoreCard.notesHit
scoreCard.streak += 1
if self.coOpType:
self.scoring[num].notesHit += 1
#MFH - tell ScoreCard to update its totalStreak counter if we've just passed 100% for some reason:
if self.scoring[num].notesHit > self.scoring[num].totalStreakNotes:
self.scoring[num].totalStreakNotes = self.scoring[num].notesHit
self.scoring[num].streak += 1
scoreCard.updateAvMult()
star = scoreCard.stars
a = scoreCard.getStarScores()
if a > star and self.engine.data.starDingSoundFound and ((self.inGameStars == 1 and self.theme == 2) or self.inGameStars == 2):
self.engine.data.starDingSound.play()
self.stage.triggerPick(pos, [n[1].number for n in self.instruments[num].playedNotes])
if self.coOpGH:
if scoreCard.streak%10 == 0:
self.lastMultTime[num] = pos
self.instruments[num].setMultiplier(scoreCard.getScoreMultiplier())
elif not self.battleGH:
if self.scoring[num].streak % 10 == 0:
self.lastMultTime[num] = pos
self.instruments[num].setMultiplier(self.scoring[num].getScoreMultiplier())
#myfingershurt
if self.showAccuracy:
self.accuracy[num] = self.instruments[num].playedNotes[0][0] - pos
self.dispAccuracy[num] = True
isFirst = True
noteList = self.instruments[num].playedNotes
for tym, noat in noteList:
if noat.star and isFirst:
self.instruments[num].isStarPhrase = True
isFirst = False
else:
ApplyPenalty = True
if self.instruments[num].isDrum:
if self.instruments[num].drumFillWasJustActive:
ApplyPenalty = False
self.instruments[num].freestylePick(self.song, pos, self.controls) #MFH - to allow late drum fill SP activation
self.instruments[num].drumFillWasJustActive = False
if ApplyPenalty:
self.song.setInstrumentVolume(0.0, self.playerList[num].part)
if self.whammyEffect == 1: #pitchbend
self.song.resetInstrumentPitch(self.playerList[num].part)
scoreCard.streak = 0
if self.coOpType:
self.scoring[num].streak = 0
self.scoring[num].endingStreakBroken = True
self.instruments[num].setMultiplier(1)
self.currentlyAnimating = False
self.stage.triggerMiss(pos)
self.guitarSoloBroken[num] = True
scoreCard.endingStreakBroken = True #MFH
self.notesMissed[num] = True #QQstarS:Set [0] to [i]
isFirst = True
noteList = self.instruments[num].matchingNotes
for tym, noat in noteList:
if (noat.star or noat.finalStar) and isFirst:
self.starNotesMissed[num] = True
isFirst = False
self.screwUp(num, self.controls) #MFH - call screw-up sound handling function
#myfingershurt: ensure accuracy display off when miss
self.dispAccuracy[num] = False
#myfingershurt: bass drum sound play
if self.instruments[num].isDrum and self.bassKickSoundEnabled:
self.instruments[num].playDrumSounds(self.controls, playBassDrumOnly = True)
#if self.guitars[num].lastFretWasBassDrum:
# #self.sfxChannel.setVolume(self.screwUpVolume)
# self.engine.data.bassDrumSound.play()
def doPick2(self, num, hopo = False):
if not self.song:
return
pos = self.getSongPosition()
#clear out any missed notes before this pick since they are already missed by virtue of the pick
missedNotes = self.instruments[num].getMissedNotes(self.song, pos, catchup = True)
if self.coOpType:
scoreCard = self.coOpScoreCard
else:
scoreCard = self.scoring[num]
if len(missedNotes) > 0:
self.processedFirstNoteYet = True
scoreCard.streak = 0
if self.coOpType:
self.scoring[num].streak = 0
self.scoring[num].endingStreakBroken = True
self.instruments[num].setMultiplier(1)
self.instruments[num].hopoActive = 0
self.instruments[num].wasLastNoteHopod = False
self.instruments[num].hopoLast = -1
self.guitarSoloBroken[num] = True
scoreCard.endingStreakBroken = True #MFH
self.notesMissed[num] = True #QQstarS:Set [0] to [i]
for tym, theNote in missedNotes: #MFH
if theNote.star or theNote.finalStar:
self.starNotesMissed[num] = True
if hopo == True:
return
#hopo fudge
hopoFudge = abs(abs(self.instruments[num].hopoActive) - pos)
activeList = [k for k in self.keysList[num] if self.controls.getState(k)]
if len(activeList) == 1 and (self.instruments[num].keys[self.instruments[num].hopoLast] == activeList[0] or self.instruments[num].keys[self.instruments[num].hopoLast+5] == activeList[0]):
if self.instruments[num].wasLastNoteHopod and hopoFudge > 0 and hopoFudge < self.instruments[num].lateMargin:
return
self.killswitchEngaged[num] = False #always reset killswitch status when picking / tapping
if self.instruments[num].startPick2(self.song, pos, self.controls, hopo):
self.song.setInstrumentVolume(1.0, self.playerList[num].part)
if self.instruments[num].playedNotes:
scoreCard.streak += 1
self.currentlyAnimating = True
if self.coOpType:
self.scoring[num].streak += 1
self.scoring[num].notesHit += 1
#MFH - tell ScoreCard to update its totalStreak counter if we've just passed 100% for some reason:
if self.scoring[num].notesHit > self.scoring[num].totalStreakNotes:
self.scoring[num].totalStreakNotes = self.scoring[num].notesHit
self.notesHit[num] = True #QQstarS:Set [0] to [i]
scoreCard.notesHit += 1 # glorandwarf: was len(self.guitars[num].playedNotes)
#MFH - tell ScoreCard to update its totalStreak counter if we've just passed 100% for some reason:
if scoreCard.notesHit > scoreCard.totalStreakNotes:
scoreCard.totalStreakNotes = scoreCard.notesHit
tempScoreValue = len(self.instruments[num].playedNotes) * self.baseScore * self.multi[num]
if self.coOpType and not self.coOpGH:
scoreCard.score += (tempScoreValue*self.scoring[num].getScoreMultiplier())
else:
scoreCard.addScore(tempScoreValue)
scoreCard.updateAvMult()
star = scoreCard.stars
a = scoreCard.getStarScores()
if a > star and self.engine.data.starDingSoundFound and ((self.inGameStars == 1 and self.theme == 2) or self.inGameStars == 2):
self.engine.data.starDingSound.play()
#self.updateStars(num)
#self.playerList[num].stars, self.partialStar[num], self.starRatio[num] = self.getStarScores(num)
self.stage.triggerPick(pos, [n[1].number for n in self.instruments[num].playedNotes])
if self.coOpGH:
if scoreCard.streak%10 == 0:
self.lastMultTime[num] = pos
self.instruments[num].setMultiplier(scoreCard.getScoreMultiplier())
elif not self.battleGH:
if self.scoring[num].streak % 10 == 0:
self.lastMultTime[num] = pos
self.instruments[num].setMultiplier(self.scoring[num].getScoreMultiplier())
isFirst = True
noteList = self.instruments[num].playedNotes
for tym, noat in noteList:
if noat.star and isFirst:
self.instruments[num].isStarPhrase = True
isFirst = False
else:
self.instruments[num].hopoActive = 0
self.instruments[num].wasLastNoteHopod = False
self.currentlyAnimating = False
self.instruments[num].hopoLast = -1
self.song.setInstrumentVolume(0.0, self.playerList[num].part)
if self.whammyEffect == 1: #pitchbend
self.song.resetInstrumentPitch(self.playerList[num].part)
scoreCard.streak = 0
if self.coOpType:
self.scoring[num].streak = 0
self.scoring[num].endingStreakBroken = True
self.instruments[num].setMultiplier(1)
self.stage.triggerMiss(pos)
self.guitarSoloBroken[num] = True
scoreCard.endingStreakBroken = True #MFH
self.notesMissed[num] = True #QQstarS:Set [0] to [i]
isFirst = True
noteList = self.instruments[num].matchingNotes
for tym, noat in noteList:
if (noat.star or noat.finalStar) and isFirst:
self.starNotesMissed[num] = True
isFirst = False
self.screwUp(num, self.controls)
#-----------------------
def doPick3RF(self, num, hopo = False):
if not self.song:
return
pos = self.getSongPosition()
#clear out any past the window missed notes before this pick since they are already missed by virtue of the pick
missedNotes = self.instruments[num].getMissedNotes(self.song, pos, catchup = True)
if self.coOpType:
scoreCard = self.coOpScoreCard
else:
scoreCard = self.scoring[num]
if len(missedNotes) > 0:
self.processedFirstNoteYet = True
scoreCard.streak = 0
if self.coOpType:
self.scoring[num].streak = 0
self.scoring[num].endingStreakBroken = True
self.instruments[num].setMultiplier(1)
self.instruments[num].hopoActive = 0
self.instruments[num].wasLastNoteHopod = False
self.instruments[num].hopoLast = -1
self.guitarSoloBroken[num] = True
scoreCard.endingStreakBroken = True #MFH
self.notesMissed[num] = True #qqstars
for tym, theNote in missedNotes: #MFH
if theNote.star or theNote.finalStar:
self.starNotesMissed[num] = True
if hopo == True:
return
#hopo fudge
hopoFudge = abs(abs(self.instruments[num].hopoActive) - pos)
activeList = [k for k in self.keysList[num] if self.controls.getState(k)]
if len(activeList) == 1 and (self.instruments[num].keys[self.instruments[num].hopoLast] == activeList[0] or self.instruments[num].keys[self.instruments[num].hopoLast+5] == activeList[0]):
if self.instruments[num].wasLastNoteHopod and hopoFudge > 0 and hopoFudge < self.instruments[num].lateMargin:
return
self.killswitchEngaged[num] = False #always reset killswitch status when picking / tapping
if self.instruments[num].startPick3(self.song, pos, self.controls, hopo):
self.processedFirstNoteYet = True
self.song.setInstrumentVolume(1.0, self.playerList[num].part)
#Any previous notes missed, but new ones hit, reset streak counter
if len(self.instruments[num].missedNotes) != 0:
scoreCard.streak = 0
if self.coOpType:
self.scoring[num].streak = 0
self.scoring[num].endingStreakBroken = True
self.guitarSoloBroken[num] = True
scoreCard.endingStreakBroken = True #MFH
self.notesMissed[num] = True #qqstars
for chord in self.instruments[num].missedNotes:
for tym, theNote in chord: #MFH
if not theNote.played and (theNote.star or theNote.finalStar):
self.starNotesMissed[num] = True
isFirst = True
noteList = self.instruments[num].playedNotes
for tym, noat in noteList:
if noat.star and isFirst:
self.instruments[num].isStarPhrase = True
isFirst = False
scoreCard.streak += 1
self.notesHit[num] = True #qqstars
self.currentlyAnimating = True
scoreCard.notesHit += 1 # glorandwarf: was len(self.instruments[num].playedNotes)
#MFH - tell ScoreCard to update its totalStreak counter if we've just passed 100% for some reason:
if scoreCard.notesHit > scoreCard.totalStreakNotes:
scoreCard.totalStreakNotes = scoreCard.notesHit
tempScoreValue = len(self.instruments[num].playedNotes) * self.baseScore * self.multi[num]
if self.coOpType:
self.scoring[num].streak += 1
self.scoring[num].notesHit += 1
#MFH - tell ScoreCard to update its totalStreak counter if we've just passed 100% for some reason:
if self.scoring[num].notesHit > self.scoring[num].totalStreakNotes:
self.scoring[num].totalStreakNotes = self.scoring[num].notesHit
if self.coOpGH:
scoreCard.addScore(tempScoreValue)
else:
scoreCard.score += (tempScoreValue*self.scoring[num].getScoreMultiplier())
else:
scoreCard.addScore(tempScoreValue)
scoreCard.updateAvMult()
star = scoreCard.stars
a = scoreCard.getStarScores()
if a > star and self.engine.data.starDingSoundFound and ((self.inGameStars == 1 and self.theme == 2) or self.inGameStars == 2):
self.engine.data.starDingSound.play()
self.stage.triggerPick(pos, [n[1].number for n in self.instruments[num].playedNotes])
if self.coOpGH:
if scoreCard.streak%10 == 0:
self.lastMultTime[num] = pos
self.instruments[num].setMultiplier(scoreCard.getScoreMultiplier())
else:
if self.scoring[num].streak % 10 == 0:
self.lastMultTime[num] = pos
self.instruments[num].setMultiplier(self.scoring[num].getScoreMultiplier())
#myfingershurt
if self.showAccuracy:
self.accuracy[num] = self.instruments[num].playedNotes[0][0] - pos
self.dispAccuracy[num] = True
else:
self.currentlyAnimating = False
self.instruments[num].hopoActive = 0
self.instruments[num].wasLastNoteHopod = False
self.instruments[num].hopoLast = 0
self.song.setInstrumentVolume(0.0, self.playerList[num].part)
if self.whammyEffect == 1: #pitchbend
self.song.resetInstrumentPitch(self.playerList[num].part)
scoreCard.streak = 0
if self.coOpType:
self.scoring[num].streak = 0
self.scoring[num].endingStreakBroken = True
self.guitarSoloBroken[num] = True
scoreCard.endingStreakBroken = True #MFH
self.instruments[num].setMultiplier(1)
self.stage.triggerMiss(pos)
self.notesMissed[num] = True #qqstars
isFirst = True
noteList = self.instruments[num].matchingNotes
for tym, noat in noteList:
if (noat.star or noat.finalStar) and isFirst:
self.starNotesMissed[num] = True
isFirst = False
self.screwUp(num, self.controls)
#myfingershurt: ensure accuracy display off when miss
self.dispAccuracy[num] = False
#-----------------------
def doPick3GH2(self, num, hopo = False, pullOff = False): #MFH - so DoPick knows when a pull-off was performed
if not self.song:
return
pos = self.getSongPosition()
chordFudge = 1 #MFH - was 10 #myfingershurt - needed to detect chords
if self.coOpType:
scoreCard = self.coOpScoreCard
else:
scoreCard = self.scoring[num]
missedNotes = self.instruments[num].getMissedNotesMFH(self.song, pos, catchup = True)
if len(missedNotes) > 0:
self.processedFirstNoteYet = True
scoreCard.streak = 0
if self.coOpType:
self.scoring[num].streak = 0
self.scoring[num].endingStreakBroken = True
self.guitarSoloBroken[num] = True
scoreCard.endingStreakBroken = True #MFH
self.instruments[num].setMultiplier(1)
self.instruments[num].hopoActive = 0
self.instruments[num].sameNoteHopoString = False
self.instruments[num].hopoProblemNoteNum = -1
#self.problemNotesP1 = []
#self.problemNotesP2 = []
self.instruments[num].wasLastNoteHopod = False
self.instruments[num].hopoLast = -1
self.notesMissed[num] = True #QQstarS:Set [0] to [i]
for tym, theNote in missedNotes: #MFH
if theNote.star or theNote.finalStar:
if self.logStarpowerMisses == 1:
Log.debug("SP Miss: doPick3GH2(), foundMissedCatchupNote: %d, gameTime: %s" % (theNote.number, self.timeLeft) )
self.starNotesMissed[num] = True
if self.unisonActive:
self.inUnison[num] = False
if self.hopoDebugDisp == 1:
missedNoteNums = [noat.number for time, noat in missedNotes]
#Log.debug("Miss: dopick3gh2(), found missed note(s).... %s" % str(missedNoteNums) + ", Time left=" + str(self.timeLeft))
Log.debug("Miss: dopick3gh2(), found missed note(s)... %(missedNotes)s, Song time=%(songTime)s" % \
{'missedNotes': str(missedNoteNums), 'songTime': str(self.timeLeft)})
if hopo == True:
return
#hopo fudge
hopoFudge = abs(abs(self.instruments[num].hopoActive) - pos)
activeList = [k for k in self.keysList[num] if self.controls.getState(k)]
#myfingershurt
#Perhaps, if I were to just treat all tappable = 3's as problem notes, and just accept a potential overstrum, that would cover all the bases...
# maybe, instead of checking against a known list of chord notes that might be associated, just track whether or not
# the original problem note (tappable = 3) is still held. If it is still held, whether or not it matches the notes, it means
# it can still be involved in the problematic pattern - so continue to monitor for an acceptable overstrum.
#On areas where it's just a tappable = 3 note with no other notes in the hitwindow, it will be marked as a problem and then
# if strummed, that would be considered the acceptable overstrum and it would behave the same. MUCH simpler logic!
activeKeyList = []
#myfingershurt: the following checks should be performed every time so GH2 Strict pull-offs can be detected properly.
LastHopoFretStillHeld = False
HigherFretsHeld = False
problemNoteStillHeld = False
for n, k in enumerate(self.keysList[num]):
if self.controls.getState(k):
activeKeyList.append(k)
if self.instruments[num].hopoLast == n or self.instruments[num].hopoLast == n - 5:
LastHopoFretStillHeld = True
elif (n > self.instruments[num].hopoLast and n < 5) or (n - 5 > self.instruments[num].hopoLast and n > 4):
HigherFretsHeld = True
if self.instruments[num].hopoProblemNoteNum == n or self.instruments[num].hopoProblemNoteNum == n - 5:
problemNoteStillHeld = True
#ImpendingProblem = False
if not hopo and self.instruments[num].wasLastNoteHopod and not self.instruments[num].LastStrumWasChord and not self.instruments[num].sameNoteHopoString:
#if not hopo and self.instruments[num].wasLastNoteHopod:
if LastHopoFretStillHeld == True and HigherFretsHeld == False:
if self.instruments[num].wasLastNoteHopod and hopoFudge >= 0 and hopoFudge < self.instruments[num].lateMargin:
if self.instruments[num].hopoActive < 0:
self.instruments[num].wasLastNoteHopod = False
#if self.hopoDebugDisp == 1:
# Log.debug("HOPO Strum ignored: Standard HOPO strum (hopoActive < 0). Time left=" + str(self.timeLeft))
return
elif self.instruments[num].hopoActive > 0: #make sure it's hopoActive!
self.instruments[num].wasLastNoteHopod = False
#if self.hopoDebugDisp == 1:
# Log.debug("HOPO Strum ignored: Standard HOPO strum (hopoActive not < 0). Time left=" + str(self.timeLeft))
return
#MFH - here, just check to see if we can release the expectation for an acceptable overstrum:
if self.instruments[num].sameNoteHopoString and not problemNoteStillHeld:
self.instruments[num].sameNoteHopoString = False
self.instruments[num].hopoProblemNoteNum = -1
self.killswitchEngaged[num] = False #always reset killswitch status when picking / tapping
if self.instruments[num].startPick3(self.song, pos, self.controls, hopo):
self.processedFirstNoteYet = True
self.song.setInstrumentVolume(1.0, self.playerList[num].part)
#Any previous notes missed, but new ones hit, reset streak counter
if len(self.instruments[num].missedNotes) > 0:
if self.hopoDebugDisp == 1 and not self.instruments[num].isDrum:
#Log.debug("Skipped note(s) detected in startpick3: " + str(self.instruments[num].missedNoteNums))
problemNoteMatchingList = [(int(tym), noat.number, noat.played) for tym, noat in self.instruments[num].matchingNotes]
#Log.debug("Skipped note(s) detected in startpick3: " + str(self.instruments[num].missedNoteNums) + ", problemMatchingNotes: " + str(problemNoteMatchingList) + ", activeKeys= " + str(activeKeyList) + ", Time left=" + str(self.timeLeft))
Log.debug("Skipped note(s) detected in startpick3: %(missedNotes)s, notesToMatch: %(matchNotes)s, activeFrets: %(activeFrets)s, Song time=%(songTime)s" % \
{'missedNotes': str(self.instruments[num].missedNoteNums), 'matchNotes': str(problemNoteMatchingList), 'activeFrets': str(activeKeyList), 'songTime': str(self.timeLeft)})
scoreCard.streak = 0
if self.coOpType:
self.scoring[num].streak = 0
self.scoring[num].endingStreakBroken = True
self.guitarSoloBroken[num] = True
scoreCard.endingStreakBroken = True #MFH
self.notesMissed[num] = True #QQstarS:Set [0] to [i]
for chord in self.instruments[num].missedNotes:
for tym, theNote in chord: #MFH
if not theNote.played and (theNote.star or theNote.finalStar):
if self.logStarpowerMisses == 1:
Log.debug("SP Miss: doPick3GH2(), afterStartPick3Ok-foundMissedCatchupNote: %d, gameTime: %s" % (theNote.number, self.timeLeft) )
self.starNotesMissed[num] = True
if self.unisonActive:
self.inUnison[num] = False
isFirst = True
noteList = self.instruments[num].playedNotes
for tym, noat in noteList:
if noat.star and isFirst:
self.instruments[num].isStarPhrase = True
isFirst = False
scoreCard.streak += 1
self.notesHit[num] = True #QQstarS:Set [0] to [i]
self.currentlyAnimating = True
scoreCard.notesHit += 1 # glorandwarf: was len(self.guitars[num].playedNotes)
#MFH - tell ScoreCard to update its totalStreak counter if we've just passed 100% for some reason:
if scoreCard.notesHit > scoreCard.totalStreakNotes:
scoreCard.totalStreakNotes = scoreCard.notesHit
tempScoreValue = len(self.instruments[num].playedNotes) * self.baseScore * self.multi[num]
if self.coOpType:
self.scoring[num].streak += 1 #needed in co-op GH for RF HO/PO
self.scoring[num].notesHit += 1
#MFH - tell ScoreCard to update its totalStreak counter if we've just passed 100% for some reason:
if self.scoring[num].notesHit > self.scoring[num].totalStreakNotes:
self.scoring[num].totalStreakNotes = self.scoring[num].notesHit
if self.coOpGH:
scoreCard.addScore(tempScoreValue)
else:
scoreCard.score += (tempScoreValue*self.scoring[num].getScoreMultiplier())
else:
scoreCard.addScore(tempScoreValue)
scoreCard.updateAvMult()
star = scoreCard.stars
a = scoreCard.getStarScores()
if a > star and self.engine.data.starDingSoundFound and ((self.inGameStars == 1 and self.theme == 2) or self.inGameStars == 2):
self.engine.data.starDingSound.play()
self.stage.triggerPick(pos, [n[1].number for n in self.instruments[num].playedNotes])
if self.scoring[num].streak % 10 == 0:
self.lastMultTime[num] = self.getSongPosition()
self.instruments[num].setMultiplier(self.scoring[num].getScoreMultiplier())
if self.showAccuracy:
self.accuracy[num] = self.instruments[num].playedNotes[0][0] - pos
self.dispAccuracy[num] = True
else:
ApplyPenalty = True
if self.hopoDebugDisp == 1:
sameNoteHopoFlagWas = self.instruments[num].sameNoteHopoString #MFH - need to store this for debug info
lastStrumWasChordWas = self.instruments[num].LastStrumWasChord #MFH - for debug info
#problemNotesForP1Were = self.problemNotesP1
if pullOff: #always ignore bad pull-offs
ApplyPenalty = False
if (self.hopoStyle == 2 and hopo == True): #GH2 Strict
if (self.instruments[num].LastStrumWasChord or (self.instruments[num].wasLastNoteHopod and LastHopoFretStillHeld)):
ApplyPenalty = False
if (self.hopoStyle == 4 and hopo == True): #GH2 Sloppy
ApplyPenalty = False
if (self.hopoStyle == 3 and hopo == True): #GH2
ApplyPenalty = False
if not (self.instruments[num].LastStrumWasChord or (self.instruments[num].wasLastNoteHopod and LastHopoFretStillHeld)):
self.instruments[num].hopoActive = 0
self.instruments[num].wasLastNoteHopod = False
self.instruments[num].LastStrumWasChord = False
self.instruments[num].sameNoteHopoString = False
self.instruments[num].hopoProblemNoteNum = -1
self.instruments[num].hopoLast = -1
if self.instruments[num].sameNoteHopoString:
#if LastHopoFretStillHeld and not HigherFretsHeld:
if LastHopoFretStillHeld:
ApplyPenalty = False
self.instruments[num].playedNotes = self.instruments[num].lastPlayedNotes #restore played notes status
self.instruments[num].sameNoteHopoString = False
self.instruments[num].hopoProblemNoteNum = -1
elif HigherFretsHeld:
self.instruments[num].sameNoteHopoString = False
self.instruments[num].hopoProblemNoteNum = -1
if ApplyPenalty == True:
self.currentlyAnimating = False
self.instruments[num].hopoActive = 0
self.instruments[num].wasLastNoteHopod = False
self.instruments[num].sameNoteHopoString = False
self.instruments[num].hopoProblemNoteNum = -1
self.instruments[num].hopoLast = -1
self.song.setInstrumentVolume(0.0, self.playerList[num].part)
if self.whammyEffect == 1: #pitchbend
self.song.resetInstrumentPitch(self.playerList[num].part)
scoreCard.streak = 0
if self.coOpType:
self.scoring[num].streak = 0
self.scoring[num].endingStreakBroken = True
self.guitarSoloBroken[num] = True
scoreCard.endingStreakBroken = True #MFH
self.instruments[num].setMultiplier(1)
self.stage.triggerMiss(pos)
if self.hopoDebugDisp == 1 and not self.instruments[num].isDrum:
problemNoteMatchingList = [(int(tym), noat.number, noat.played) for tym, noat in self.instruments[num].matchingNotes]
#Log.debug("Miss: dopick3gh2(), fail-startpick3()...HigherFretsHeld: " + str(HigherFretsHeld) + ", LastHopoFretHeld: " + str(LastHopoFretStillHeld) + ", lastStrumWasChord: " + str(lastStrumWasChordWas) + ", sameNoteHopoStringFlag: " + str(sameNoteHopoFlagWas) + ", problemNoteMatchingList: " + str(problemNoteMatchingList) + ", activeKeys= " + str(activeKeyList) + ", Time left=" + str(self.timeLeft))
Log.debug("Miss: dopick3gh2(), fail-startpick3()...HigherFretsHeld: %(higherFrets)s, LastHopoFretHeld: %(lastHopoFret)s, lastStrumWasChord: %(lastStrumChord)s, sameNoteHopoStringFlag: %(sameNoteHopoFlag)s, notesToMatch: %(matchNotes)s, activeFrets: %(activeFrets)s, Song time=%(songTime)s" % \
{'higherFrets': str(HigherFretsHeld), 'lastHopoFret': str(LastHopoFretStillHeld), 'lastStrumChord': str(lastStrumWasChordWas), 'sameNoteHopoFlag': str(sameNoteHopoFlagWas), 'matchNotes': str(problemNoteMatchingList), 'activeFrets': str(activeKeyList), 'songTime': str(self.timeLeft)})
self.notesMissed[num] = True #QQstarS:Set [0] to [i]
isFirst = True
noteList = self.instruments[num].matchingNotes
for tym, noat in noteList:
if (noat.star or noat.finalStar) and isFirst:
if self.logStarpowerMisses == 1:
Log.debug("SP Miss: doPick3GH2(), afterStartPick3Fail, matchingNote: %d, gameTime: %s" % (noat.number, self.timeLeft) )
self.starNotesMissed[num] = True
if self.unisonActive:
self.inUnison[num] = False
isFirst = False
self.screwUp(num, self.controls)
self.dispAccuracy[num] = False
#myfingershurt: bass drum sound play
if self.instruments[num].isDrum and self.bassKickSoundEnabled:
self.instruments[num].playDrumSounds(self.controls, playBassDrumOnly = True)
#if self.guitars[num].lastFretWasBassDrum:
# #self.sfxChannel.setVolume(self.screwUpVolume)
# self.engine.data.bassDrumSound.play()
#stump: hop a fretboard
def hopFretboard(self, num, height):
if self.instruments[num].fretboardHop < height:
self.instruments[num].fretboardHop = height
def activateSP(self, num): #QQstarS: Fix this function, add a element "num"
if self.battleGH: #from akedrou: this will die horribly if you allow vocal players in. Just sayin'. ... sorry?
time = self.getSongPosition()
if time - self.battleJustUsed[num] > 1500: #must wait 1.5sec before next object use
if self.instruments[num].battleObjects[0] != 0:
self.engine.data.battleUsedSound.play()
self.instruments[self.battleTarget[num]].battleStatus[self.instruments[num].battleObjects[0]] = True
#start object use on other player
self.instruments[self.battleTarget[num]].battleStartTimes[self.instruments[num].battleObjects[0]] = time
if self.instruments[num].battleObjects[0] == 1:
self.instruments[self.battleTarget[num]].battleDrainStart = time
elif self.instruments[num].battleObjects[0] == 3:
#Log.debug("String Cut")
self.instruments[self.battleTarget[num]].battleBreakNow = self.instruments[self.battleTarget[num]].battleBreakLimit
self.instruments[self.battleTarget[num]].battleBreakString = random.randint(0,4)
self.endPick(self.battleTarget[num])
elif self.instruments[num].battleObjects[0] == 4:
#Log.debug("Wammy")
self.instruments[self.battleTarget[num]].battleWhammyNow = self.instruments[self.battleTarget[num]].battleWhammyLimit
self.endPick(self.battleTarget[num])
elif self.instruments[num].battleObjects[0] == 5:
#Log.debug("Take Object")
if self.instruments[self.battleTarget[num]].battleObjects[0] != 0:
self.instruments[num].battleObjects[0] = self.instruments[self.battleTarget[num]].battleObjects[0]
self.instruments[self.battleTarget[num]].battleObjects[0] = self.instruments[self.battleTarget[num]].battleObjects[1]
self.instruments[self.battleTarget[num]].battleObjects[1] = self.instruments[self.battleTarget[num]].battleObjects[2]
self.instruments[self.battleTarget[num]].battleObjects[2] = 0
self.instruments[self.battleTarget[num]].battleStatus[5] = False
self.battleText[num] = None
self.battleTextTimer[num] = 0
self.instruments[num].battleObjectGained = self.instruments[num].battleObjects[0]
self.battleJustUsed[num] = time
return
#tells us which objects are currently running
if self.instruments[self.battleTarget[num]].battleBeingUsed[1] != 0:
self.instruments[self.battleTarget[num]].battleStatus[self.instruments[self.battleTarget[num]].battleBeingUsed[1]] = False
if self.instruments[self.battleTarget[num]].battleBeingUsed[0] != 0:
if self.instruments[self.battleTarget[num]].battleBeingUsed[0] != self.instruments[num].battleObjects[0]:
self.instruments[self.battleTarget[num]].battleBeingUsed[1] = self.instruments[self.battleTarget[num]].battleBeingUsed[0]
self.instruments[self.battleTarget[num]].battleBeingUsed[0] = self.instruments[num].battleObjects[0]
#bring up other objects in players queue
self.instruments[num].battleObjects[0] = self.instruments[num].battleObjects[1]
self.instruments[num].battleObjects[1] = self.instruments[num].battleObjects[2]
self.instruments[num].battleObjects[2] = 0
self.battleText[num] = None
self.battleTextTimer[num] = 0
self.battleJustUsed[num] = time
#Log.debug("Battle Object used, Objects left %s" % str(self.instruments[num].battleObjects))
elif self.coOpGH: #akedrou also says don't let vocal players in GH Co-Op.
if self.coOpStarPower >= (50 * self.numOfPlayers) and self.instruments[num].starPowerActive == False:
time = self.getSongPosition()
Log.debug("Star Power Activated at: " + str(time))
self.coOpStarPowerActive[num] = time
if time - min(self.coOpStarPowerActive) < 300.0 and not self.instruments[i].starPowerActive:
self.engine.data.starActivateSound.play()
for i in range(self.numOfPlayers):
self.hopFretboard(i, 0.07) #stump
self.instruments[i].starPowerActive = True
self.instruments[i].neck.overdriveFlashCount = 0 #MFH - this triggers the oFlash strings & timer
self.instruments[i].neck.ocount = 0 #MFH - this triggers the oFlash strings & timer
else:
if time - self.coOpStarPowerTimer > 1000.0:
for i in range(self.numOfPlayers):
Log.debug(str(time - self.coOpStarPowerActive[i]))
if time - self.coOpStarPowerActive[i] < 300.0:
continue
if self.instruments[i].isDrum and self.autoDrumStarpowerActivate == 0 and self.numDrumFills < 2:
self.activateSP(i)
break
if self.phrases > 1:
self.newScalingText(i, self.tsCoOpStarPower )
self.coOpStarPowerTimer = time
else:
guitar = self.instruments[num]
if guitar.starPower >= 50: #QQstarS:Set [0] to [i]
#self.sfxChannel.setVolume(self.sfxVolume)
#if self.engine.data.cheerSoundFound:
#self.engine.data.crowdSound.play()
if self.coOpRB:
while len(self.deadPlayerList) > 0:
i = self.deadPlayerList.pop(0) #keeps order intact (with >2 players)
if self.instruments[i].coOpFailed and self.timesFailed[i]<3:
self.instruments[i].coOpRescue(self.getSongPosition())
self.rock[i] = self.rockMax * 0.667
guitar.starPower -= 50
self.engine.data.rescueSound.play()
self.coOpFailDone[i] = False
self.numDeadPlayers -= 1
if not guitar.isVocal:
self.hopFretboard(num, 0.07) #stump
guitar.neck.overdriveFlashCount = 0 #MFH - this triggers the oFlash strings & timer
guitar.neck.ocount = 0 #MFH - this triggers the oFlash strings & timer
break
else:
if not guitar.starPowerActive:
self.engine.data.starActivateSound.play()
guitar.starPowerActive = True #QQstarS:Set [0] to [i]
if not guitar.isVocal:
self.hopFretboard(num, 0.07) #stump
guitar.neck.overdriveFlashCount = 0 #MFH - this triggers the oFlash strings & timer
guitar.neck.ocount = 0 #MFH - this triggers the oFlash strings & timer
else:
if not guitar.starPowerActive:
self.engine.data.starActivateSound.play()
guitar.starPowerActive = True #QQstarS:Set [0] to [i]
if not guitar.isVocal:
self.hopFretboard(num, 0.07) #stump
guitar.neck.overdriveFlashCount = 0 #MFH - this triggers the oFlash strings & timer
guitar.neck.ocount = 0 #MFH - this triggers the oFlash strings & timer
def goToResults(self):
self.ending = True
if self.song:
self.song.stop()
self.done = True
noScore = False
for i, player in enumerate(self.playerList):
player.twoChord = self.instruments[i].twoChord
if self.playerList[0].practiceMode:
self.scoring[i].score = 0
if self.scoring[i].score > 0:
noScore = False
break
else:
if not (self.coOpType and self.coOpScoreCard.score > 0):
noScore = True
#Reset Score if Jurgen played -- Spikehead777 - handled by GameResults now. You can watch your score evaporate!
# if self.jurgPlayer[i]:
# self.playerList[i].score = 0
# if self.coOpType and True in self.jurgPlayer:
# self.coOpScore = 0
# if not self.engine.audioSpeedFactor == 1: #MFH - only allow score uploads and unlocking when songs are played at full speed.
# noScore = True
# self.changeSong()
#if self.playerList[0].score == 0:
#if self.numOfPlayers == 1:
#noScore = True
#self.changeSong()
#if self.numOfPlayers == 2:
# if self.coOpType:
# if self.coOpScore == 0:
# noScore = True
# self.changeSong()
# if self.playerList[0].score == 0 and self.playerList[1].score == 0:
# noScore = True
# self.changeSong()
if not noScore:
#MFH/akedrou - force one stat update before gameresults just in case:
self.getHandicap()
for scoreCard in self.scoring:
scoreCard.updateAvMult()
scoreCard.getStarScores()
if self.coOpType:
#self.updateStars(self.coOpPlayerIndex, forceUpdate = True)
self.coOpScoreCard.updateAvMult()
self.coOpScoreCard.getStarScores()
#akedrou - begin the implementation of the ScoreCard
if self.coOpType:
scoreList = self.scoring
scoreList.append(self.coOpScoreCard)
if self.coOp:
coOpType = 1
elif self.coOpRB:
coOpType = 2
elif self.coOpGH:
coOpType = 3
else:
coOpType = 1
else:
scoreList = self.scoring
coOpType = 0
self.engine.view.setViewport(1,0)
#self.session.world.deleteScene(self)
self.freeResources()
self.engine.world.createScene("GameResultsScene", libraryName = self.libraryName, songName = self.songName, scores = scoreList, coOpType = coOpType, careerMode = self.careerMode)
else:
self.changeSong()
def keyPressed(self, key, unicode, control = None):
#RF style HOPO playing
#myfingershurt: drums :)
for i in range(self.numOfPlayers):
if self.instruments[i].isDrum and control in (self.instruments[i].keys):
if control in Player.bassdrums:
self.instruments[i].drumsHeldDown[0] = 100
elif control in Player.drum1s:
self.instruments[i].drumsHeldDown[1] = 100
elif control in Player.drum2s:
self.instruments[i].drumsHeldDown[2] = 100
elif control in Player.drum3s:
self.instruments[i].drumsHeldDown[3] = 100
elif control in Player.drum5s:
self.instruments[i].drumsHeldDown[4] = 100
self.handlePick(i)
return True
if self.hopoStyle > 0: #HOPOs enabled
res = self.keyPressed3(key, unicode, control)
return res
actual = False
if not control:
actual = True
control = self.controls.keyPressed(key)
num = self.getPlayerNum(control)
if num is None:
return True
if self.instruments[num].isDrum and control in self.instruments[num].keys:
if actual:
if control in Player.bassdrums:
self.instruments[num].drumsHeldDown[0] = 100
self.instruments[num].playedSound[0] = False
elif control in Player.drum1s:
self.instruments[num].drumsHeldDown[1] = 100
self.instruments[num].playedSound[1] = False
elif control in Player.drum2s:
self.instruments[num].drumsHeldDown[2] = 100
self.instruments[num].playedSound[2] = False
elif control in Player.drum3s:
self.instruments[num].drumsHeldDown[3] = 100
self.instruments[num].playedSound[3] = False
elif control in Player.drum5s:
self.instruments[num].drumsHeldDown[4] = 100
self.instruments[num].playedSound[4] = False
if self.battleGH:
if self.instruments[num].battleStatus[3]:
if control == self.instruments[num].keys[self.instruments[num].battleBreakString]:
self.instruments[num].battleBreakNow -= 1
self.controls.toggle(control, False)
if control in (self.instruments[num].actions):
for k in self.keysList[num]:
if self.controls.getState(k):
self.keyBurstTimeout[num] = None
break
else:
#self.keyBurstTimeout[num] = self.engine.timer.time + self.keyBurstPeriod
return True
if control in (self.instruments[num].actions) and self.song:
self.doPick(num)
elif control in self.keysList[num] and self.song:
# Check whether we can tap the currently required notes
pos = self.getSongPosition()
notes = self.instruments[num].getRequiredNotes(self.song, pos)
if ((self.scoring[num].streak > 0 and self.instruments[num].areNotesTappable(notes)) or \
(self.instruments[num].guitarSolo and control in self.soloKeysList[num])) and \
self.instruments[num].controlsMatchNotes(self.controls, notes):
self.doPick(num)
elif control in Player.starts:
if self.ending == True:
return True
self.pauseGame()
self.engine.view.pushLayer(self.menu)
return True
elif key >= ord('a') and key <= ord('z'):
# cheat codes
n = len(self.enteredCode)
for code, func in self.cheatCodes:
if n < len(code):
if key == code[n]:
self.enteredCode.append(key)
if self.enteredCode == code:
self.enteredCode = []
self.player.cheating = True
func()
break
else:
self.enteredCode = []
#myfingershurt: Adding starpower and killswitch for "no HOPOs" mode
for i, player in enumerate(self.playerList):
if (control == player.keyList[STAR] and not self.isSPAnalog[i]) or control == player.keyList[CANCEL]:
self.activateSP(i)
if control == player.keyList[KILL] and not self.isKillAnalog[i]: #MFH - only use this logic if digital killswitch
self.killswitchEngaged[i] = True
def keyPressed2(self, key, unicode, control = None):
hopo = False
if not control:
control = self.controls.keyPressed(key)
else:
hopo = True
if self.battleGH:
if self.instruments[0].battleStatus[3]:
if control == self.instruments[0].keys[self.instruments[0].battleBreakString]:
self.instruments[0].battleBreakNow -=1
self.controls.toggle(control, False)
if self.instruments[1].battleStatus[3]:
if control == self.instruments[1].keys[self.instruments[1].battleBreakString]:
self.instruments[1].battleBreakNow -=1
self.controls.toggle(control, False)
if len(self.instruments) > 2:
if self.instruments[2].battleStatus[3]:
if control == self.instruments[2].keys[self.instruments[2].battleBreakString]:
self.instruments[2].battleBreakNow -= 1
self.controls.toggle(control, False)
#if True: #akedrou - Probably not the best place for ontological discussions. Let's just assume True is always True.
pressed = -1
for i in range(self.numOfPlayers):
if control in (self.instruments[i].actions):
hopo = False
pressed = i
numpressed = [len([1 for k in guitar.keys if self.controls.getState(k)]) for guitar in self.instruments]
activeList = [k for k in self.keysList[pressed] if self.controls.getState(k)]
for i in range(self.numOfPlayers):
if control in (self.instruments[i].keys) and self.song and numpressed[i] >= 1:
if self.instruments[i].wasLastNoteHopod and self.instruments[i].hopoActive >= 0:
hopo = True
pressed = i
if pressed >= 0:
for k in self.keysList[pressed]:
if self.controls.getState(k):
self.keyBurstTimeout[pressed] = None
break
else:
self.keyBurstTimeout[pressed] = self.engine.timer.time + self.keyBurstPeriod
return True
if pressed >= 0 and self.song:
self.doPick2(pressed, hopo)
if control in Player.starts:
if self.ending == True:
return True
self.pauseGame()
self.engine.view.pushLayer(self.menu)
return True
elif key >= ord('a') and key <= ord('z'):
# cheat codes
n = len(self.enteredCode)
for code, func in self.cheatCodes:
if n < len(code):
if key == code[n]:
self.enteredCode.append(key)
if self.enteredCode == code:
self.enteredCode = []
for player in self.playerList:
player.cheating = True
func()
break
else:
self.enteredCode = []
for i, player in enumerate(self.playerList):
if (control == player.keyList[STAR] and not self.isSPAnalog[i]) or control == player.keyList[CANCEL]:
self.activateSP(i)
if control == player.keyList[KILL] and not self.isKillAnalog[i]: #MFH - only use this logic if digital killswitch
self.killswitchEngaged[i] = True
def keyPressed3(self, key, unicode, control = None, pullOff = False): #MFH - gonna pass whether this was called from a pull-off or not
hopo = False
actual = False
if not control:
actual = True
control = self.controls.keyPressed(key)
else:
hopo = True
num = self.getPlayerNum(control)
if self.battleGH and num is not None:
if self.instruments[num].battleStatus[3]:
if control == self.instruments[num].keys[self.instruments[num].battleBreakString]:
self.instruments[num].battleBreakNow -=1
self.controls.toggle(control, False)
pressed = -1
for i in range(self.numOfPlayers):
if self.instruments[i].isDrum and control in self.instruments[i].keys and actual:
if control in Player.bassdrums:
self.instruments[num].drumsHeldDown[0] = 100
self.instruments[num].playedSound[0] = False
elif control in Player.drum1s:
self.instruments[num].drumsHeldDown[1] = 100
self.instruments[num].playedSound[1] = False
elif control in Player.drum2s:
self.instruments[num].drumsHeldDown[2] = 100
self.instruments[num].playedSound[2] = False
elif control in Player.drum3s:
self.instruments[num].drumsHeldDown[3] = 100
self.instruments[num].playedSound[3] = False
elif control in Player.drum5s:
self.instruments[num].drumsHeldDown[4] = 100
self.instruments[num].playedSound[4] = False
if control in (self.instruments[i].actions):
hopo = False
pressed = i
numpressed = [len([1 for k in guitar.keys if self.controls.getState(k)]) for guitar in self.instruments]
activeList = [k for k in self.keysList[pressed] if self.controls.getState(k)]
if self.ignoreOpenStrums and len(activeList) < 1: #MFH - filter out strums without frets
pressed = -1
for i in range(self.numOfPlayers): #akedrou- probably loopable...
if control in self.instruments[i].keys and numpressed[i] >= 1:
if self.instruments[i].hopoActive > 0 or (self.instruments[i].wasLastNoteHopod and self.instruments[i].hopoActive == 0):
if not pullOff and (self.hopoStyle == 2 or self.hopoStyle == 3): #GH2 or GH2 Strict, don't allow lower-fret tapping while holding a higher fret
activeKeyList = []
LastHopoFretStillHeld = False
HigherFretsHeld = False
for p, k in enumerate(self.keysList[i]):
if self.controls.getState(k):
activeKeyList.append(k)
if self.instruments[i].hopoLast == p or self.instruments[i].hopoLast-5 == p:
LastHopoFretStillHeld = True
elif (p > self.instruments[i].hopoLast and p < 5) or (p > self.instruments[i].hopoLast and p > 4):
HigherFretsHeld = True
if not(LastHopoFretStillHeld and not HigherFretsHeld): #tapping a lower note should do nothing.
hopo = True
pressed = i
else: #GH2 Sloppy or RF-Mod
hopo = True
pressed = i
break
#MFH - this is where the marked little block above used to be - possibly causing false "late pick" detections from HOPOs...
if pressed >= 0:
#myfingershurt:
self.handlePick(pressed, hopo = hopo, pullOff = pullOff)
#if self.hopoStyle == 1: #1 = rf-mod
# self.doPick3RF(pressed, hopo)
#elif self.hopoStyle == 2 or self.hopoStyle == 3 or self.hopoStyle == 4: #GH2 style HOPO
# self.doPick3GH2(pressed, hopo, pullOff)
#else: #2 = no HOPOs
# self.doPick(pressed)
if control in Player.starts:
if self.ending == True:
return True
self.pauseGame()
self.engine.view.pushLayer(self.menu)
return True
elif key >= ord('a') and key <= ord('z'):
# cheat codes
n = len(self.enteredCode)
for code, func in self.cheatCodes:
if n < len(code):
if key == code[n]:
self.enteredCode.append(key)
if self.enteredCode == code:
self.enteredCode = []
for player in self.playerList:
player.cheating = True
func()
break
else:
self.enteredCode = []
for i, player in enumerate(self.playerList):
if (control == player.keyList[STAR] and not self.isSPAnalog[i]) or control == player.keyList[CANCEL]:
self.activateSP(i)
if control == player.keyList[KILL] and not self.isKillAnalog[i]: #MFH - only use this logic if digital killswitch
self.killswitchEngaged[i] = True
def CheckForValidKillswitchNote(self, num):
if not self.song:
return False
noteCount = len(self.instruments[num].playedNotes)
if noteCount > 0:
pickLength = self.instruments[num].getPickLength(self.getSongPosition())
if pickLength > 0.5 * (self.song.period / 4):
return True
else:
return False
else:
return False
def getExtraScoreForCurrentlyPlayedNotes(self, num):
if not self.song or self.instruments[num].isDrum or self.instruments[num].isVocal:
return 0
if self.coOpType:
scoreCard = self.coOpScoreCard
else:
scoreCard = self.scoring[num]
noteCount = len(self.instruments[num].playedNotes)
pickLength = self.instruments[num].getPickLength(self.getSongPosition())
if pickLength > 1.1 * self.song.period / 4:
tempExtraScore = self.baseSustainScore * pickLength * noteCount
if self.starScoreUpdates == 1:
scoreCard.updateAvMult()
star = scoreCard.stars
a = scoreCard.getStarScores(tempExtraScore = tempExtraScore)
if a > star and self.engine.data.starDingSoundFound and ((self.inGameStars == 1 and self.theme == 2) or self.inGameStars == 2):
self.engine.data.starDingSound.play()
return int(tempExtraScore) #original FoF sustain scoring
return 0
def keyReleased(self, key):
#RF style HOPO playing
control = self.controls.keyReleased(key)
num = self.getPlayerNum(control)
if num is None:
return
if self.instruments[num].isDrum:
return True
#myfingershurt:
if self.hopoStyle > 0: #hopos enabled
res = self.keyReleased3(key)
return res
if control in self.keysList[num] and self.song:
# Check whether we can tap the currently required notes
pos = self.getSongPosition()
notes = self.instruments[num].getRequiredNotes(self.song, pos)
if ((self.scoring[num].streak > 0 and self.instruments[num].areNotesTappable(notes)) or \
(self.instruments[num].guitarSolo and control in self.soloKeysList[num])) and \
self.instruments[num].controlsMatchNotes(self.controls, notes):
self.doPick(num)
# Otherwise we end the pick if the notes have been playing long enough
elif self.lastPickPos[num] is not None and pos - self.lastPickPos[num] > self.song.period / 2:
self.endPick(num)
#Digital killswitch disengage:
for i, player in enumerate(self.playerList):
if control == player.keyList[KILL] and not self.isKillAnalog[i]: #MFH - only use this logic if digital killswitch
self.killswitchEngaged[i] = False
def keyReleased2(self, key):
control = self.controls.keyReleased(key)
for i, keys in enumerate(self.keysList):
if control in keys and self.song:
for time, note in self.instruments[i].playedNotes:
if not self.instruments[i].wasLastNoteHopod or (self.instruments[i].hopoActive < 0 and (control == self.keysList[i][note.number] or control == self.keysList[i][note.number+5])):
self.endPick(i)
#Digital killswitch disengage:
for i, player in enumerate(self.playerList):
if control == player.keyList[KILL] and not self.isKillAnalog[i]: #MFH - only use this logic if digital killswitch
self.killswitchEngaged[i] = False
for i in range(self.numOfPlayers):
activeList = [k for k in self.keysList[i] if self.controls.getState(k) and k != control]
if len(activeList) != 0 and self.instruments[i].wasLastNoteHopod and activeList[0] != self.keysList[i][self.instruments[i].hopoLast] and activeList[0] != self.keysList[i][self.instruments[i].hopoLast+5] and control in self.keysList[i]:
self.keyPressed2(None, 0, activeList[0])
def keyReleased3(self, key):
control = self.controls.keyReleased(key)
#myfingershurt: this is where the lower-fret-release causes a held note to break:
for i, keys in enumerate(self.keysList):
if keys is None:
continue
if control in keys and self.song: #myfingershurt: if the released control was a fret:
for time, note in self.instruments[i].playedNotes:
#if self.instruments[i].hopoActive == 0 or (self.instruments[i].hopoActive < 0 and control == self.keysList[i][note.number]):
#if not self.instruments[i].wasLastNoteHopod or (self.instruments[i].hopoActive < 0 and control == self.keysList[i][note.number]):
#myfingershurt: only end the pick if no notes are being held.
if (self.instruments[i].hit[note.number] == True and (control == self.keysList[i][note.number] or control == self.keysList[i][note.number+5])):
#if control == self.keysList[i][note.number]:
#if self.hopoDebugDisp == 1:
# Log.debug("MFH: An early sustain release was just detected.")
self.endPick(i)
#Digital killswitch disengage:
for i, player in enumerate(self.playerList):
if control == player.keyList[KILL] and not self.isKillAnalog[i]: #MFH - only use this logic if digital killswitch
self.killswitchEngaged[i] = False
for i in range(self.numOfPlayers):
if self.keysList[i] is None:
continue
activeList = [k for k in self.keysList[i] if self.controls.getState(k) and k != control]
#myfingershurt: removing check for hopolast for GH2 system after-chord HOPOs
#myfingershurt: also added self.hopoAfterChord conditional to ensure this logic doesn't apply without HOPOs after chord
if self.hopoAfterChord and (self.hopoStyle == 2 or self.hopoStyle == 3 or self.hopoStyle == 4): #for GH2 systems: so user can release lower fret from chord to "tap" held HOPO
#if len(activeList) != 0 and guitar.wasLastNoteHopod and control in self.keysList[i]:
if len(activeList) != 0 and self.instruments[i].hopoActive > 0 and control in self.keysList[i]:
self.keyPressed3(None, 0, activeList[0], pullOff = True)
else:
#if len(activeList) != 0 and guitar.wasLastNoteHopod and activeList[0] != self.keysList[i][guitar.hopoLast] and control in self.keysList[i]:
if len(activeList) != 0 and self.instruments[i].hopoActive > 0 and activeList[0] != self.keysList[i][self.instruments[i].hopoLast] and activeList[0] != self.keysList[i][self.instruments[i].hopoLast+5] and control in self.keysList[i]:
self.keyPressed3(None, 0, activeList[0], pullOff = True)
def getPlayerNum(self, control):
for i, player in enumerate(self.playerList):
if control and control in player.keyList:
return i
else:
return -1
def render(self, visibility, topMost): #QQstarS: Fix this function for mostly. And there are lots of change in this, I just show the main ones
#MFH render function reorganization notes:
#Want to render all background / single-viewport graphics first
#if self.song:
#myfingershurt: Alarian's auto-stage scaling update
w = self.wFull
h = self.hFull
wBak = w
hBak = h
if self.fontShadowing:
font = self.engine.data.shadowfont
else:
font = self.engine.data.font
lyricFont = self.engine.data.font
bigFont = self.engine.data.bigFont
sphraseFont = self.engine.data.streakFont2
scoreFont = self.engine.data.scoreFont
streakFont = self.engine.data.streakFont
if self.song and self.song.readyToGo:
pos = self.getSongPosition()
if self.boardY <= 1:
self.setCamera()
if self.countdown > 0:
self.countdownOK = True
self.boardY = 1
elif self.boardY > 1:
self.boardY -= 0.01
self.setCamera()
#self.setCamera()
#self.engine.theme.setBaseColor()
Scene.render(self, visibility, topMost) #MFH - I believe this eventually calls the renderGuitar function, which also involves two viewports... may not be easy to move this one...
self.visibility = v = 1.0 - ((1 - visibility) ** 2)
self.engine.view.setOrthogonalProjection(normalize = True)
self.renderVocals()
#MFH: render the note sheet just on top of the background:
if self.lyricSheet != None and not self.playingVocals:
self.engine.drawImage(self.lyricSheet, scale = (self.lyricSheetScaleFactor,-self.lyricSheetScaleFactor), coord = (w/2, h*0.935))
#the timing line on this lyric sheet image is approx. 1/4 over from the left
#MFH - also render the scrolling lyrics & sections before changing viewports:
for instrument in self.instruments:
if instrument.isVocal == True:
minInst = instrument.currentPeriod * 2
maxInst = instrument.currentPeriod * 7
slopPer = instrument.currentPeriod
break
else:
if len(self.instruments) > 0:
minInst = (self.instruments[0].currentPeriod * self.instruments[0].beatsPerBoard) / 2
maxInst = (self.instruments[0].currentPeriod * self.instruments[0].beatsPerBoard) * 1.5
slopPer = self.instruments[0].currentPeriod
else: #This should never trigger...
minInst = 1000
maxInst = 3000
slopPer = 2000
minPos = pos - minInst
maxPos = pos + maxInst
eventWindow = (maxPos - minPos)
#lyricSlop = ( self.instruments[0].currentPeriod / (maxPos - minPos) ) / 4
lyricSlop = ( slopPer / ((maxPos - minPos)/2) ) / 2
if not self.pause and not self.failed and not self.ending:
if self.countdown <= 0: #MFH - only attempt to handle sections / lyrics / text events if the countdown is complete!
#handle the sections track
if self.midiSectionsEnabled > 0:
for time, event in self.song.eventTracks[Song.TK_SECTIONS].getEvents(minPos, maxPos):
if self.theme == 2:
#xOffset = 0.5
yOffset = 0.715
txtSize = 0.00170
else:
#gh3 or other standard mod
#xOffset = 0.5
yOffset = 0.69
txtSize = 0.00175
#is event happening now?
#this version will turn events green right as they hit the line and then grey shortly afterwards
#instead of an equal margin on both sides.
xOffset = (time - pos) / eventWindow
EventHappeningNow = False
if xOffset < (0.0 - lyricSlop * 2.0): #past
glColor3f(0.5, 0.5, 0.5) #I'm hoping this is some sort of grey.
elif xOffset < lyricSlop / 16.0: #present
EventHappeningNow = True
glColor3f(0, 1, 0.6) #green-blue
else: #future, and all other text
glColor3f(1, 1, 1) #cracker white
xOffset += 0.250
text = event.text
yOffset = 0.00005 #last change -.00035
txtSize = 0.00150
lyricFont.render(text, (xOffset, yOffset),(1, 0, 0),txtSize)
#handle the lyrics track
if self.midiLyricsEnabled > 0 and not self.playingVocals:
if self.midiLyricMode == 0: #scrolling lyrics mode:
for time, event in self.song.eventTracks[Song.TK_LYRICS].getEvents(minPos, maxPos):
if self.theme == 2:
#xOffset = 0.5
yOffset = 0.715
txtSize = 0.00170
else:
#gh3 or other standard mod
#xOffset = 0.5
yOffset = 0.69
txtSize = 0.00175
#is event happening now?
#this version will turn events green right as they hit the line and then grey shortly afterwards
#instead of an equal margin on both sides.
xOffset = (time - pos) / eventWindow
EventHappeningNow = False
if xOffset < (0.0 - lyricSlop * 2.0): #past
glColor3f(0.5, 0.5, 0.5) #I'm hoping this is some sort of grey.
elif xOffset < lyricSlop / 16.0: #present
EventHappeningNow = True
glColor3f(0, 1, 0.6) #green-blue
else: #future, and all other text
glColor3f(1, 1, 1) #cracker white
xOffset += 0.250
yOffset = 0.0696 #last change +0.0000
txtSize = 0.00160
text = event.text
if text.find("+") >= 0: #shift the pitch adjustment markers down one line
text = text.replace("+","~")
txtSize = 0.00145 #last change +.0000
yOffset -= 0.0115 #last change -.0005
lyricFont.render(text, (xOffset, yOffset),(1, 0, 0),txtSize)
#MFH - TODO - handle line-by-line lyric display and coloring here:
elif self.midiLyricMode == 1: #line-by-line lyrics mode:
if self.theme == 2:
txtSize = 0.00170
else:
#gh3 or other standard mod
txtSize = 0.00175
yOffset = 0.0696
xOffset = 0.5 - (lyricFont.getStringSize(self.currentSimpleMidiLyricLine, scale = txtSize)[0] / 2.0)
glColor3f(1, 1, 1)
lyricFont.render(self.currentSimpleMidiLyricLine, (xOffset, yOffset),(1, 0, 0),txtSize)
elif self.midiLyricMode == 2 and (self.numMidiLyricLines > self.activeMidiLyricLineIndex): #line-by-line lyrics mode:
if self.theme == 2:
txtSize = 0.00170
else:
#gh3 or other standard mod
txtSize = 0.00175
yOffset = 0.0696
#xOffset = 0.5 - (lyricFont.getStringSize(self.currentSimpleMidiLyricLine, scale = txtSize)[0] / 2.0)
tempTime, tempLyricLine = self.midiLyricLines[self.activeMidiLyricLineIndex]
xOffset = 0.5 - (lyricFont.getStringSize(tempLyricLine, scale = txtSize)[0] / 2.0)
glColor3f(0.75, 0.75, 0.75)
lyricFont.render(self.activeMidiLyricLine_GreyWords, (xOffset, yOffset),(1, 0, 0),txtSize)
xOffset += lyricFont.getStringSize(self.activeMidiLyricLine_GreyWords, scale = txtSize)[0]
glColor3f(0, 1, 0)
lyricFont.render(self.activeMidiLyricLine_GreenWords, (xOffset, yOffset),(1, 0, 0),txtSize)
xOffset += lyricFont.getStringSize(self.activeMidiLyricLine_GreenWords, scale = txtSize)[0]
glColor3f(1, 1, 1)
lyricFont.render(self.activeMidiLyricLine_WhiteWords, (xOffset, yOffset),(1, 0, 0),txtSize)
yOffset += self.lyricHeight
xOffset = 0.25
glColor3f(1, 1, 1)
lyricFont.render(self.currentSimpleMidiLyricLine, (xOffset, yOffset),(1, 0, 0),txtSize)
#finally, handle the unused text events track
if self.showUnusedTextEvents:
for time, event in self.song.eventTracks[Song.TK_UNUSED_TEXT].getEvents(minPos, maxPos):
if self.theme == 2:
#xOffset = 0.5
yOffset = 0.715
txtSize = 0.00170
else:
#gh3 or other standard mod
#xOffset = 0.5
yOffset = 0.69
txtSize = 0.00175
#is event happening now?
#this version will turn events green right as they hit the line and then grey shortly afterwards
#instead of an equal margin on both sides.
xOffset = (time - pos) / eventWindow
EventHappeningNow = False
if xOffset < (0.0 - lyricSlop * 2.0): #past
glColor3f(0.5, 0.5, 0.5) #I'm hoping this is some sort of grey.
elif xOffset < lyricSlop / 16.0: #present
EventHappeningNow = True
glColor3f(0, 1, 0.6) #green-blue
else: #future, and all other text
glColor3f(1, 1, 1) #cracker white
xOffset += 0.250
yOffset = 0.0190 #last change -.0020
txtSize = 0.00124
lyricFont.render(event.text, (xOffset, yOffset),(1, 0, 0),txtSize)
try:
now = self.getSongPosition()
countdownPos = self.lastEvent - now
for i,player in enumerate(self.playerList): #QQstarS: This part has big fix. I add the code into it,So he can shown corect
p = player.guitarNum
if p is not None:
self.engine.view.setViewportHalf(self.numberOfGuitars,p)
else:
self.engine.view.setViewportHalf(1,0)
streakFlag = 0 #set the flag to 0
#if not self.coOpGH or self.rmtype == 2:
#self.engine.view.setViewportHalf(self.numOfPlayers,i)
if self.coOpGH and self.rmtype != 2:
self.engine.view.setViewport(1,0)
self.engine.theme.setBaseColor()
if i is not None:
if self.song:
if self.youRock == True:
if self.rockTimer == 1:
#self.sfxChannel.setVolume(self.sfxVolume)
self.engine.data.rockSound.play()
if self.rockTimer < self.rockCountdown:
self.rockTimer += 1
self.engine.drawImage(self.rockMsg, scale = (0.5, -0.5), coord = (w/2,h/2))
if self.rockTimer >= self.rockCountdown:
self.rockFinished = True
if self.failed:
if self.failTimer == 0:
self.song.pause()
if self.failTimer == 1:
#self.sfxChannel.setVolume(self.sfxVolume)
self.engine.data.failSound.play()
if self.failTimer < 100:
self.failTimer += 1
self.engine.drawImage(self.failMsg, scale = (0.5, -0.5), coord = (w/2,h/2))
else:
self.finalFailed = True
if self.pause:
self.engine.view.setViewport(1,0)
if self.engine.graphicMenuShown == False:
self.engine.drawImage(self.pauseScreen, scale = (self.pause_bkg[2], -self.pause_bkg[3]), coord = (w*self.pause_bkg[0],h*self.pause_bkg[1]), stretched = 3)
if self.finalFailed and self.song:
self.engine.view.setViewport(1,0)
if self.engine.graphicMenuShown == False:
self.engine.drawImage(self.failScreen, scale = (self.fail_bkg[2], -self.fail_bkg[3]), coord = (w*self.fail_bkg[0],h*self.fail_bkg[1]), stretched = 3)
# evilynux - Closer to actual GH3
font = self.engine.data.pauseFont
text = Song.removeSongOrderPrefixFromName(self.song.info.name).upper()
scale = font.scaleText(text, maxwidth = 0.398, scale = 0.0038)
size = font.getStringSize(text, scale = scale)
font.render(text, (.5-size[0]/2.0,.37-size[1]), scale = scale)
#now = self.getSongPosition()
diff = str(self.playerList[0].difficulty)
# compute initial position
pctComplete = min(100, int(now/self.lastEvent*100))
curxpos = font.getStringSize(_("COMPLETED")+" ", scale = 0.0015)[0]
curxpos += font.getStringSize(str(pctComplete), scale = 0.003)[0]
curxpos += font.getStringSize( _(" % ON "), scale = 0.0015)[0]
curxpos += font.getStringSize(diff, scale = 0.003)[0]
curxpos = .5-curxpos/2.0
c1,c2,c3 = self.fail_completed_color
glColor3f(c1,c2,c3)
# now render
text = _("COMPLETED") + " "
size = font.getStringSize(text, scale = 0.0015)
# evilynux - Again, for this very font, the "real" height value is 75% of returned value
font.render(text, (curxpos, .37+(font.getStringSize(text, scale = 0.003)[1]-size[1])*.75), scale = 0.0015)
text = str(pctComplete)
curxpos += size[0]
size = font.getStringSize(text, scale = 0.003)
font.render(text, (curxpos, .37), scale = 0.003)
text = _(" % ON ")
curxpos += size[0]
size = font.getStringSize(text, scale = 0.0015)
font.render(text, (curxpos, .37+(font.getStringSize(text, scale = 0.003)[1]-size[1])*.75), scale = 0.0015)
text = diff
curxpos += size[0]
font.render(text, (curxpos, .37), scale = 0.003)
if not self.failEnd:
self.failGame()
if self.hopoIndicatorEnabled and not self.instruments[i].isDrum and not self.pause and not self.failed: #MFH - HOPO indicator (grey = strums required, white = strums not required)
text = _("HOPO")
if self.instruments[i].hopoActive > 0:
glColor3f(1.0, 1.0, 1.0) #white
else:
glColor3f(0.4, 0.4, 0.4) #grey
w, h = font.getStringSize(text,0.00150)
font.render(text, (.950 - w / 2, .710),(1, 0, 0),0.00150) #off to the right slightly above fretboard
glColor3f(1, 1, 1) #cracker white
#MFH - new location for star system support - outside theme-specific logic:
#if (self.coOp and i == 0) or not self.coOp: #MFH only render for player 0 if co-op mode
if (self.coOp and i == self.coOpPlayerMeter) or ((self.coOpRB or self.coOpGH) and i == 0) or not self.coOpType: #MFH only render for player 1 if co-op mode
if self.coOpType:
stars=self.coOpScoreCard.stars
partialStars=self.coOpScoreCard.partialStars
self.engine.view.setViewport(1,0)
ratio=self.coOpScoreCard.starRatio
else:
stars=self.scoring[i].stars
partialStars=self.scoring[i].partialStars
ratio=self.scoring[i].starRatio
w = wBak
h = hBak
vocaloffset = 0
if self.numOfSingers > 0 and self.numOfPlayers > 1:
vocaloffset = .05
if self.song and self.song.readyToGo:
if not self.coOpRB and not self.coOpGH:
if self.playerList[i].guitarNum is not None:
self.engine.view.setViewportHalf(self.numberOfGuitars,self.playerList[i].guitarNum)
else:
self.engine.view.setViewportHalf(1,0)
#MFH: Realtime hit accuracy display:
#if ((self.inGameStats == 2 or (self.inGameStats == 1 and self.theme == 2)) and (not self.pause and not self.failed)) and ( (not self.pause and not self.failed) or self.hopoDebugDisp == 1 ):
if ((self.inGameStats == 2 or (self.inGameStats == 1 and self.theme == 2) or self.hopoDebugDisp == 1 ) and (not self.pause and not self.failed) and not (self.coOpType and not i==0 and not self.coOp) and not self.battleGH):
#will not show on pause screen, unless HOPO debug is on (for debugging)
if self.coOpRB or self.coOpGH:
sNotesHit = self.coOpScoreCard.notesHit
sTotalNotes = self.coOpScoreCard.totalStreakNotes
sHitAcc = self.coOpScoreCard.hitAccuracy
sAvMult = self.coOpScoreCard.avMult
sEfHand = self.coOpScoreCard.handicapValue
else:
sNotesHit = self.scoring[i].notesHit
sTotalNotes = self.scoring[i].totalStreakNotes
sHitAcc = self.scoring[i].hitAccuracy
sAvMult = self.scoring[i].avMult
sEfHand = self.scoring[i].handicapValue
trimmedTotalNoteAcc = self.roundDecimalForDisplay(sHitAcc)
#text = str(self.playerList[i].notesHit) + "/" + str(self.playerList[i].totalStreakNotes) + ": " + str(trimmedTotalNoteAcc) + "%"
text = "%(notesHit)s/%(totalNotes)s: %(hitAcc)s%%" % \
{'notesHit': str(sNotesHit), 'totalNotes': str(sTotalNotes), 'hitAcc': str(trimmedTotalNoteAcc)}
c1,c2,c3 = self.ingame_stats_color
glColor3f(c1, c2, c3) #wht
w, h = font.getStringSize(text,0.00160)
if self.theme == 2:
if self.numDecimalPlaces < 2:
accDispX = 0.755
else:
accDispX = 0.740 #last change -0.015
accDispYac = 0.147
accDispYam = 0.170
else:
accDispX = 0.890 #last change -0.010
accDispYac = 0.140
accDispYam = 0.164
if self.battleGH:
if i == 0:
accDispX = 0.890
else:
accDispX = 0.110
font.render(text, (accDispX - w/2, accDispYac),(1, 0, 0),0.00140) #top-centered by streak under score
trimmedAvMult = self.roundDecimalForDisplay(sAvMult)
#text = _("Avg: ") + str(trimmedAvMult) + "x"
#avgLabel = _("Avg")
text = "%(avLab)s: %(avMult)sx" % \
{'avLab': self.tsAvgLabel, 'avMult': str(trimmedAvMult)}
glColor3f(c1, c2, c3)
w, h = font.getStringSize(text,0.00160)
font.render(text, (accDispX - w/2, accDispYam),(1, 0, 0),0.00140) #top-centered by streak under score
if sEfHand != 100.0:
text = "%s: %.1f%%" % (self.tsHandicapLabel, sEfHand)
w, h = font.getStringSize(text, .00160)
font.render(text, (.98 - w, .246), (1, 0, 0),0.00140)
if self.coOpRB or self.coOpGH:
if not self.instruments[i].isVocal:
self.engine.view.setViewportHalf(self.numberOfGuitars,self.playerList[i].guitarNum)
if not self.instruments[i].isVocal:
if self.dispSoloReview[i] and not self.pause and not self.failed:
if self.soloReviewCountdown[i] < self.soloReviewDispDelay:
self.soloReviewCountdown[i] += 1
if not (self.instruments[i].freestyleActive or self.scoring[i].freestyleWasJustActive):
#glColor3f(0, 0.85, 1) #grn-blu
glColor3f(1, 1, 1) #cracker white
text1 = self.soloReviewText[i][0]
text2 = self.soloReviewText[i][1]
xOffset = 0.950
if self.hitAccuracyPos == 0: #Center - need to move solo review above this!
yOffset = 0.080
elif self.jurgPlayer[i]: # and self.autoPlay: #akedrou - jurgPlayer checks if jurg was ever in town. This would block his notice if he came and left.
yOffset = 0.115 #above Jurgen Is Here
else: #no jurgens here:
yOffset = 0.155 #was 0.180, occluded notes
txtSize = 0.00185
Tw, Th = self.solo_soloFont.getStringSize(text1,txtSize)
Tw2, Th2 = self.solo_soloFont.getStringSize(text2,txtSize)
#MFH - scale and display self.soloFrame behind / around the text
lineSpacing = self.solo_soloFont.getLineSpacing(txtSize)
if self.soloFrame:
frameWidth = (max(Tw,Tw2))*1.15
#frameHeight = (Th+Th2)*1.10
frameHeight = lineSpacing*2.05
boxXOffset = 0.5
boxYOffset = self.hPlayer[i]-(self.hPlayer[i]* ((yOffset + lineSpacing) / self.fontScreenBottom) )
tempWScale = frameWidth*self.soloFrameWFactor
tempHScale = -(frameHeight)*self.soloFrameWFactor
self.engine.drawImage(self.soloFrame, scale = (tempWScale,tempHScale), coord = (self.wPlayer[i]*boxXOffset,boxYOffset))
self.solo_soloFont.render(text1, (0.5 - Tw/2, yOffset),(1, 0, 0),txtSize) #centered
self.solo_soloFont.render(text2, (0.5 - Tw2/2, yOffset+lineSpacing),(1, 0, 0),txtSize) #centered
else:
self.dispSoloReview[i] = False
if self.hopoDebugDisp == 1 and not self.pause and not self.failed and not self.instruments[i].isDrum:
#MFH: PlayedNote HOPO tappable marking
if self.instruments[i].playedNotes:
if len(self.instruments[i].playedNotes) > 1:
self.lastTapText = "tapp: %d, %d" % (self.instruments[i].playedNotes[0][1].tappable, self.instruments[i].playedNotes[1][1].tappable)
else:
self.lastTapText = "tapp: %d" % (self.instruments[i].playedNotes[0][1].tappable)
#self.lastTapText = "tapp: " + str(self.instruments[i].playedNotes[0][1].tappable)
#if len(self.instruments[i].playedNotes) > 1:
# self.lastTapText += ", " + str(self.instruments[i].playedNotes[1][1].tappable)
w, h = font.getStringSize(self.lastTapText,0.00170)
font.render(self.lastTapText, (.750 - w / 2, .440),(1, 0, 0),0.00170) #off to the right slightly above fretboard
#MFH: HOPO active debug
#text = "HOact: "
if self.instruments[i].hopoActive > 0:
glColor3f(1, 1, 0) #yel
#text += "+"
hoActDisp = "+"
elif self.instruments[i].hopoActive < 0:
glColor3f(0, 1, 1) #blu-grn
#text += "-"
hoActDisp = "-"
else:
glColor3f(0.5, 0.5, 0.5) #gry
#text += "0"
hoActDisp = "0"
text = "HOact: %s" % hoActDisp
w, h = font.getStringSize(text,0.00175)
font.render(text, (.750 - w / 2, .410),(1, 0, 0),0.00170) #off to the right slightly above fretboard
glColor3f(1, 1, 1) #whitey
#MFH: HOPO intention determination flag debug
if self.instruments[i].sameNoteHopoString:
glColor3f(1, 1, 0) #yel
else:
glColor3f(0.5, 0.5, 0.5) #gry
#text = "HOflag: " + str(self.instruments[i].sameNoteHopoString)
text = "HOflag: %s" % str(self.instruments[i].sameNoteHopoString)
w, h = font.getStringSize(text,0.00175)
font.render(text, (.750 - w / 2, .385),(1, 0, 0),0.00170) #off to the right slightly above fretboard
glColor3f(1, 1, 1) #whitey
##MFH: HOPO intention determination flag problematic note list debug
##glColor3f(1, 1, 1) #whitey
#text = "pNotes: " + str(self.problemNotesP1)
#w, h = font.getStringSize(text,0.00175)
#font.render(text, (.750 - w / 2, .355),(1, 0, 0),0.00170) #off to the right slightly above fretboard
##glColor3f(1, 1, 1) #whitey
#MFH: guitarSoloNoteCount list debug
text = str(self.guitarSolos[i])
glColor3f(0.9, 0.9, 0.9) #offwhite
w, h = font.getStringSize(text,0.00110)
font.render(text, (.900 - w / 2, .540),(1, 0, 0),0.00110) #off to the right slightly above fretboard
if self.killDebugEnabled and not self.pause and not self.failed:
killXpos = 0.760 #last change: +0.010
killYpos = 0.365 #last change: -0.010
killTsize = 0.00160 #last change: -0.00010
#if self.playerList[i].part.text != "Drums":
if not self.instruments[i].isDrum:
if self.isKillAnalog[i]:
if self.analogKillMode[i] == 2: #xbox mode:
if self.actualWhammyVol[i] < 1.0:
glColor3f(1, 1, 0) #yel
else:
glColor3f(0.5, 0.5, 0.5) #gry
else: #ps2 mode:
if self.actualWhammyVol[i] > 0.0:
glColor3f(1, 1, 0) #yel
else:
glColor3f(0.5, 0.5, 0.5) #gry
text = str(self.roundDecimalForDisplay(self.actualWhammyVol[i]))
w, h = font.getStringSize(text,killTsize)
font.render(text, (killXpos - w / 2, killYpos),(1, 0, 0),killTsize) #off to the right slightly above fretboard
else:
if self.killswitchEngaged[i]:
glColor3f(1, 1, 0) #yel
else:
glColor3f(0.5, 0.5, 0.5) #gry
text = str(self.killswitchEngaged[i])
w, h = font.getStringSize(text,killTsize)
font.render(text, (killXpos - w / 2, killYpos),(1, 0, 0),killTsize) #off to the right slightly above fretboard
glColor3f(1, 1, 1) #whitey reset (cracka cracka)
#MFH - freestyle active status debug display
if self.showFreestyleActive == 1 and not self.pause and not self.failed: #MFH - shows when freestyle is active
if self.instruments[i].isDrum: #also show the active status of drum fills
text = "BRE: %s, Fill: %s" % ( str(self.instruments[i].freestyleActive), str(self.instruments[i].drumFillsActive) )
else:
text = "BRE: %s" % str(self.instruments[i].freestyleActive)
freeX = .685
freeY = .510
freeTsize = 0.00150
font.render(text, (freeX, freeY),(1, 0, 0),freeTsize)
#MFH - TODO - show current tempo / BPM and neckspeed if enabled for debugging
if self.showBpm == 1 and i == 0:
if self.vbpmLogicType == 0: #MFH - VBPM (old)
currentBPM = self.instruments[i].currentBpm
targetBPM = self.instruments[i].targetBpm
else:
currentBPM = self.currentBpm
targetBPM = self.targetBpm
text = "BPM/Target:%.2f/%.2f, NS:%.2f" % (currentBPM, targetBPM, instrument.neckSpeed)
bpmX = .35
bpmY = .330
bpmTsize = 0.00120
font.render(text, (bpmX, bpmY),(1, 0, 0),bpmTsize)
#myfingershurt: lyrical display conditional logic:
# show the comments (lyrics)
if not self.instruments[i].isVocal:
#myfingershurt: first display the accuracy readout:
if self.dispAccuracy[i] and not self.pause and not self.failed:
trimmedAccuracy = self.roundDecimalForDisplay(self.accuracy[i])
if self.showAccuracy == 1: #numeric mode
#MFH string concatenation -> modulo formatting
#text = str(trimmedAccuracy) + " ms"
text = "%s %s" % (str(trimmedAccuracy), self.msLabel)
elif self.showAccuracy >= 2: #friendly / descriptive mode
#MFH Precalculated these hit accuracy thresholds instead of every frame
if (self.accuracy[i] >= self.instruments[i].accThresholdWorstLate) and (self.accuracy[i] < self.instruments[i].accThresholdVeryLate):
text = self.tsAccVeryLate
glColor3f(1, 0, 0)
elif (self.accuracy[i] >= self.instruments[i].accThresholdVeryLate) and (self.accuracy[i] < self.instruments[i].accThresholdLate):
text = self.tsAccLate
glColor3f(1, 1, 0)
elif (self.accuracy[i] >= self.instruments[i].accThresholdLate) and (self.accuracy[i] < self.instruments[i].accThresholdSlightlyLate):
text = self.tsAccSlightlyLate
glColor3f(1, 1, 0)
elif (self.accuracy[i] >= self.instruments[i].accThresholdSlightlyLate) and (self.accuracy[i] < self.instruments[i].accThresholdExcellentLate):
text = self.tsAccExcellentLate
glColor3f(0, 1, 0)
elif (self.accuracy[i] >= self.instruments[i].accThresholdExcellentLate) and (self.accuracy[i] < self.instruments[i].accThresholdPerfect):
#give the "perfect" reading some slack, -1.0 to 1.0
text = self.tsAccPerfect
glColor3f(0, 1, 1) #changed color
elif (self.accuracy[i] >= self.instruments[i].accThresholdPerfect) and (self.accuracy[i] < self.instruments[i].accThresholdExcellentEarly):
text = self.tsAccExcellentEarly
glColor3f(0, 1, 0)
elif (self.accuracy[i] >= self.instruments[i].accThresholdExcellentEarly) and (self.accuracy[i] < self.instruments[i].accThresholdSlightlyEarly):
text = self.tsAccSlightlyEarly
glColor3f(1, 1, 0)
elif (self.accuracy[i] >= self.instruments[i].accThresholdSlightlyEarly) and (self.accuracy[i] < self.instruments[i].accThresholdEarly):
text = self.tsAccEarly
glColor3f(1, 1, 0)
elif (self.accuracy[i] >= self.instruments[i].accThresholdEarly) and (self.accuracy[i] < self.instruments[i].accThresholdVeryEarly):
text = self.tsAccVeryEarly
glColor3f(1, 0, 0)
else:
#bug catch - show the problematic number:
#text = str(trimmedAccuracy) + _(" ms")
text = "%(acc)s %(ms)s" % \
{'acc': str(trimmedAccuracy), 'ms': self.msLabel}
glColor3f(1, 0, 0)
w, h = font.getStringSize(text,0.00175)
posX = 0.98 - (w / 2)
if self.theme == 2:
posY = 0.284
else:
if self.coOpGH:
posY = 0.25
else:
posY = 0.296
if self.hitAccuracyPos == 0: #Center
posX = .500
posY = .305 + h
if self.showAccuracy == 3: #for displaying numerical below descriptive
posY = .305
#if self.pov != 1: #not GH POV
# posY = y + 4 * h -- MFH: this line causes a bad hang.
elif self.hitAccuracyPos == 2:#Left-bottom
posX = .193
posY = .700 #(.193-size[0]/2, 0.667-size[1]/2+self.hFontOffset[i]))
elif self.hitAccuracyPos == 3: #Center-bottom
posX = .500
posY = .710
font.render(text, (posX - w / 2, posY - h / 2),(1, 0, 0),0.00170)
if self.showAccuracy == 3: #for displaying numerical below descriptive
#text = str(self.accuracy)
#text = str(trimmedAccuracy) + " ms"
#msText = _("ms")
text = "%(acc)s %(ms)s" % \
{'acc': str(trimmedAccuracy), 'ms': self.msLabel}
w, h = font.getStringSize(text,0.00140)
font.render(text, (posX - w / 2, posY - h / 2 + .030),(1, 0, 0),0.00140)
glColor3f(1, 1, 1)
#handle the guitar solo track
#if (self.readTextAndLyricEvents == 2 or (self.readTextAndLyricEvents == 1 and self.theme == 2)) and (not self.pause and not self.failed and not self.ending):
if (not self.pause and not self.failed and not self.ending):
#MFH - only use the TK_GUITAR_SOLOS track if at least one player has no MIDI solos marked:
if self.instruments[i].useMidiSoloMarkers: #mark using the new MIDI solo marking system
for time, event in self.song.midiEventTrack[i].getEvents(minPos, maxPos):
if isinstance(event, Song.MarkerNote):
if (event.number == Song.starPowerMarkingNote) and (self.song.midiStyle == Song.MIDI_TYPE_RB): #solo marker note.
soloChangeNow = False
xOffset = (time - pos) / eventWindow
if xOffset < lyricSlop / 16.0: #present
soloChangeNow = True
if soloChangeNow:
if event.endMarker: #solo ending
if self.instruments[i].guitarSolo and not event.happened:
self.endSolo(i)
event.happened = True
else: #solo beginning
if not self.instruments[i].guitarSolo and not event.happened:
self.startSolo(i)
event.happened = True
elif self.markSolos == 1: #fall back on old guitar solo marking system
for time, event in self.song.eventTracks[Song.TK_GUITAR_SOLOS].getEvents(minPos, maxPos):
#is event happening now?
xOffset = (time - pos) / eventWindow
EventHappeningNow = False
if xOffset < (0.0 - lyricSlop * 2.0): #past
EventHappeningNow = False
elif xOffset < lyricSlop / 16.0: #present
EventHappeningNow = True
if EventHappeningNow: #process the guitar solo event
if event.text.find("ON") >= 0:
if self.instruments[i].canGuitarSolo:
if not self.instruments[i].guitarSolo:
self.startSolo(i)
else:
#if self.instruments[i].canGuitarSolo:
if self.instruments[i].guitarSolo:
self.endSolo(i)
#MFH - render guitar solo in progress - stats
#try:
#if self.instruments[i].canGuitarSolo:
if self.instruments[i].guitarSolo:
#MFH - scale and display self.soloFrame behind / around the solo accuracy text display
#if self.fontMode==0: #0 = oGL Hack, 1=LaminaScreen, 2=LaminaFrames
if self.soloFrame:
frameWidth = self.solo_Tw[i]*1.15
frameHeight = self.solo_Th[i]*1.07
self.solo_boxYOffset[i] = self.hPlayer[i]-(self.hPlayer[i]* ((self.solo_yOffset[i] + self.solo_Th[i]/2.0 ) / self.fontScreenBottom) )
tempWScale = frameWidth*self.soloFrameWFactor
tempHScale = -(frameHeight)*self.soloFrameWFactor
self.engine.drawImage(self.soloFrame, scale = (tempWScale,tempHScale), coord = (self.wPlayer[i]*self.solo_boxXOffset[i],self.solo_boxYOffset[i]))
self.solo_soloFont.render(self.solo_soloText[i], (self.solo_xOffset[i], self.solo_yOffset[i]),(1, 0, 0),self.solo_txtSize)
#self.solo_soloFont.render("test", (0.5,0.0) ) #appears to render text from given position, down / right...
#self.solo_soloFont.render("test", (0.5,0.5) ) #this test confirms that the Y scale is in units relative to the X pixel width - 1280x960 yes but 1280x1024 NO
#this test locates the constant that the font rendering routine always considers the "bottom" of the screen
#self.solo_soloFont.render("test", (0.5,0.75-self.solo_Th[i]), scale=self.solo_txtSize ) #ah-ha! 4:3 AR viewport = 0.75 max!
#self.engine.view.setViewport(1,0)
#except Exception, e:
# Log.warn("Unable to render guitar solo accuracy text: %s" % e)
if self.coOpType: #1 BRE in co-op
scoreCard = self.coOpScoreCard
if i == 0:
self.engine.view.setViewportHalf(1,0)
oneTime = True
else:
oneTime = False
else:
scoreCard = self.scoring[i]
oneTime = True
#MFH - show BRE temp score frame
if (self.instruments[i].freestyleActive or (scoreCard.freestyleWasJustActive and not scoreCard.endingStreakBroken and not scoreCard.endingAwarded)) and oneTime == True:
#to render BEFORE the bonus is awarded.
text = "End Bonus"
yOffset = 0.110
xOffset = 0.500
tW, tH = self.solo_soloFont.getStringSize(text, scale = self.solo_txtSize/2.0)
if self.breScoreFrame:
frameWidth = tW*1.15
frameHeight = tH*1.07
if self.coOpType:
boxYOffset = (1.0-((yOffset + tH/2.0 ) / self.fontScreenBottom))*self.hFull
boxXOffset = xOffset*self.wFull
else:
boxYOffset = self.hPlayer[i]-(self.hPlayer[i]* ((yOffset + tH/2.0 ) / self.fontScreenBottom) )
boxXOffset = self.wPlayer[i]*xOffset
tempWScale = frameWidth*self.breScoreFrameWFactor
tempHScale = -(frameHeight)*self.breScoreFrameWFactor
self.engine.drawImage(self.breScoreFrame, scale = (tempWScale,tempHScale), coord = (boxXOffset,boxYOffset))
self.solo_soloFont.render(text, (xOffset - tW/2.0, yOffset),(1, 0, 0),self.solo_txtSize/2.0)
if self.coOpType and self.partImage:
freeX = .05*(self.numOfPlayers-1)
freeI = .05*self.numOfPlayers
for j in xrange(self.numOfPlayers):
self.engine.drawImage(self.part[j], scale = (.15,-.15), coord = (self.wFull*(.5-freeX+freeI*j),self.hFull*.58), color = (.8, .8, .8, 1))
text = "%s" % scoreCard.endingScore
if self.theme == 2:
text = text.replace("0","O")
tW, tH = self.solo_soloFont.getStringSize(text, scale = self.solo_txtSize)
yOffset = 0.175
xOffset = 0.500
if self.breScoreBackground:
#frameWidth = tW*3.0
frameHeight = tH*4.0
frameWidth = frameHeight
if self.coOpType:
boxYOffset = self.hFull*(1.0-(yOffset + tH/2.0 ) / self.fontScreenBottom)
boxXOffset = xOffset*self.wFull
else:
boxYOffset = self.hPlayer[i]-(self.hPlayer[i]* ((yOffset + tH/2.0 ) / self.fontScreenBottom) )
boxXOffset = self.wPlayer[i]*xOffset
tempWScale = frameWidth*self.breScoreBackgroundWFactor
tempHScale = -(frameHeight)*self.breScoreBackgroundWFactor
self.engine.drawImage(self.breScoreBackground, scale = (tempWScale,tempHScale), coord = (boxXOffset,boxYOffset))
if self.breScoreFrame:
frameWidth = tW*1.15
frameHeight = tH*1.07
if self.coOpType:
boxYOffset = self.hFull*(1.0-(yOffset + tH/2.0 ) / self.fontScreenBottom)
boxXOffset = xOffset*self.wFull
else:
boxYOffset = self.hPlayer[i]-(self.hPlayer[i]* ((yOffset + tH/2.0 ) / self.fontScreenBottom) )
boxXOffset = self.wPlayer[i]*xOffset
tempWScale = frameWidth*self.breScoreFrameWFactor
tempHScale = -(frameHeight)*self.breScoreFrameWFactor
self.engine.drawImage(self.breScoreFrame, scale = (tempWScale,tempHScale), coord = (boxXOffset,boxYOffset))
self.solo_soloFont.render(text, (xOffset - tW/2.0, yOffset),(1, 0, 0),self.solo_txtSize)
elif scoreCard.freestyleWasJustActive and not scoreCard.endingStreakBroken and scoreCard.endingAwarded and oneTime == True:
#MFH - TODO - ending bonus was awarded - scale up obtained score & box to signify rockage
text = "Success!"
yOffset = 0.110
xOffset = 0.500
tW, tH = self.solo_soloFont.getStringSize(text, scale = self.solo_txtSize/2.0)
if self.breScoreFrame:
frameWidth = tW*1.15
frameHeight = tH*1.07
if self.coOpType:
boxYOffset = self.hFull*(1.0-(yOffset + tH/2.0 ) / self.fontScreenBottom)
boxXOffset = xOffset*self.wFull
else:
boxYOffset = self.hPlayer[i]-(self.hPlayer[i]* ((yOffset + tH/2.0 ) / self.fontScreenBottom) )
boxXOffset = self.wPlayer[i]*xOffset
tempWScale = frameWidth*self.breScoreFrameWFactor
tempHScale = -(frameHeight)*self.breScoreFrameWFactor
self.engine.drawImage(self.breScoreFrame, scale = (tempWScale,tempHScale), coord = (boxXOffset,boxYOffset))
self.solo_soloFont.render(text, (xOffset - tW/2.0, yOffset),(1, 0, 0),self.solo_txtSize/2.0)
if self.coOpType and self.partImage:
freeX = .05*(self.numOfPlayers-1)
freeI = .05*self.numOfPlayers
for j in xrange(self.numOfPlayers):
self.engine.drawImage(self.part[j], scale = (.15,-.15), coord = (self.wFull*(.5-freeX+freeI*j),self.hFull*.58))
text = "%s" % scoreCard.endingScore
if self.theme == 2:
text = text.replace("0","O")
tW, tH = self.solo_soloFont.getStringSize(text, scale = self.solo_txtSize)
yOffset = 0.175
xOffset = 0.500
if self.breScoreBackground:
#frameWidth = tW*3.0
frameHeight = tH*4.0
frameWidth = frameHeight
if self.coOpType:
boxYOffset = self.hFull*(1.0-(yOffset + tH/2.0 ) / self.fontScreenBottom)
boxXOffset = xOffset*self.wFull
else:
boxYOffset = self.hPlayer[i]-(self.hPlayer[i]* ((yOffset + tH/2.0 ) / self.fontScreenBottom) )
boxXOffset = self.wPlayer[i]*xOffset
tempWScale = frameWidth*self.breScoreBackgroundWFactor
tempHScale = -(frameHeight)*self.breScoreBackgroundWFactor
self.engine.drawImage(self.breScoreBackground, scale = (tempWScale,tempHScale), coord = (boxXOffset,boxYOffset))
if self.breScoreFrame:
frameWidth = tW*1.15
frameHeight = tH*1.07
if self.coOpType:
boxYOffset = self.hFull*(1.0-(yOffset + tH/2.0 ) / self.fontScreenBottom)
boxXOffset = xOffset*self.wFull
else:
boxYOffset = self.hPlayer[i]-(self.hPlayer[i]* ((yOffset + tH/2.0 ) / self.fontScreenBottom) )
boxXOffset = self.wPlayer[i]*xOffset
tempWScale = frameWidth*self.breScoreFrameWFactor
tempHScale = -(frameHeight)*self.breScoreFrameWFactor
self.engine.drawImage(self.breScoreFrame, scale = (tempWScale,tempHScale), coord = (boxXOffset,boxYOffset))
self.solo_soloFont.render(text, (xOffset - tW/2.0, yOffset),(1, 0, 0),self.solo_txtSize)
elif scoreCard.freestyleWasJustActive and scoreCard.endingStreakBroken and oneTime == True:
#akedrou - ending bonus was not awarded - scale up to signify failure
text = "Failed!"
yOffset = 0.110
xOffset = 0.500
tW, tH = self.solo_soloFont.getStringSize(text, scale = self.solo_txtSize/2.0)
if self.breScoreFrame:
frameWidth = tW*1.15
frameHeight = tH*1.07
if self.coOpType:
boxYOffset = self.hFull*(1.0-(yOffset + tH/2.0 ) / self.fontScreenBottom)
boxXOffset = xOffset*self.wFull
else:
boxYOffset = self.hPlayer[i]-(self.hPlayer[i]* ((yOffset + tH/2.0 ) / self.fontScreenBottom) )
boxXOffset = self.wPlayer[i]*xOffset
tempWScale = frameWidth*self.breScoreFrameWFactor
tempHScale = -(frameHeight)*self.breScoreFrameWFactor
self.engine.drawImage(self.breScoreFrame, scale = (tempWScale,tempHScale), coord = (boxXOffset,boxYOffset))
self.solo_soloFont.render(text, (xOffset - tW/2.0, yOffset),(1, 0, 0),self.solo_txtSize/2.0)
if self.coOpType and self.partImage:
freeX = .05*(self.numOfPlayers-1)
freeI = .05*self.numOfPlayers
for j in xrange(self.numOfPlayers):
if self.scoring[j].endingStreakBroken:
partcolor = (.4, .4, .4, 1)
else:
partcolor = (.8, .8, .8, 1)
self.engine.drawImage(self.part[j], scale = (.15,-.15), coord = (self.wFull*(.5-freeX+freeI*j),self.hFull*.58), color = partcolor)
text = "%s" % 0
if self.theme == 2:
text = text.replace("0","O")
tW, tH = self.solo_soloFont.getStringSize(text, scale = self.solo_txtSize)
yOffset = 0.175
xOffset = 0.500
if self.breScoreBackground:
#frameWidth = tW*3.0
frameHeight = tH*4.0
frameWidth = frameHeight
if self.coOpType:
boxYOffset = self.hFull*(1.0-(yOffset + tH/2.0 ) / self.fontScreenBottom)
boxXOffset = xOffset*self.wFull
else:
boxYOffset = self.hPlayer[i]-(self.hPlayer[i]* ((yOffset + tH/2.0 ) / self.fontScreenBottom) )
boxXOffset = self.wPlayer[i]*xOffset
tempWScale = frameWidth*self.breScoreBackgroundWFactor
tempHScale = -(frameHeight)*self.breScoreBackgroundWFactor
self.engine.drawImage(self.breScoreBackground, scale = (tempWScale,tempHScale), coord = (boxXOffset,boxYOffset))
if self.breScoreFrame:
frameWidth = tW*1.15
frameHeight = tH*1.07
if self.coOpType:
boxYOffset = self.hFull*(1.0-(yOffset + tH/2.0 ) / self.fontScreenBottom)
boxXOffset = xOffset*self.wFull
else:
boxYOffset = self.hPlayer[i]-(self.hPlayer[i]* ((yOffset + tH/2.0 ) / self.fontScreenBottom) )
boxXOffset = self.wPlayer[i]*xOffset
tempWScale = frameWidth*self.breScoreFrameWFactor
tempHScale = -(frameHeight)*self.breScoreFrameWFactor
self.engine.drawImage(self.breScoreFrame, scale = (tempWScale,tempHScale), coord = (boxXOffset,boxYOffset))
self.solo_soloFont.render(text, (xOffset - tW/2.0, yOffset),(1, 0, 0),self.solo_txtSize)
self.engine.view.setViewportHalf(1,0)
# evilynux - Display framerate
if self.engine.show_fps: #probably only need to once through.
c1,c2,c3 = self.ingame_stats_color
glColor3f(c1, c2, c3)
text = _("FPS: %.2f" % self.engine.fpsEstimate)
w, h = font.getStringSize(text, scale = 0.00140)
font.render(text, (.85, .055 - h/2), (1,0,0), 0.00140)
pos = self.getSongPosition()
if self.showScriptLyrics and not self.pause and not self.failed:
#for time, event in self.song.track[i].getEvents(pos - self.song.period * 2, pos + self.song.period * 4):
for time, event in self.song.eventTracks[Song.TK_SCRIPT].getEvents(pos - self.song.period * 2, pos + self.song.period * 4): #MFH - script track
if isinstance(event, PictureEvent):
if pos < time or pos > time + event.length:
continue
try:
picture = event.picture
except:
self.engine.loadImgDrawing(event, "picture", os.path.join(self.libraryName, self.songName, event.fileName))
picture = event.picture
w = self.wFull
h = self.hFull
if self.theme == 2:
yOffset = 0.715
else:
#gh3 or other standard mod
yOffset = 0.69
fadePeriod = 500.0
f = (1.0 - min(1.0, abs(pos - time) / fadePeriod) * min(1.0, abs(pos - time - event.length) / fadePeriod)) ** 2
self.engine.drawImage(picture, scale = (1, -1), coord = (w / 2, (f * -2 + 1) * h/2+yOffset))
elif isinstance(event, TextEvent):
if pos >= time and pos <= time + event.length and not self.ending: #myfingershurt: to not display events after ending!
xOffset = 0.5
if self.scriptLyricPos == 0:
if self.theme == 2:
yOffset = 0.715
txtSize = 0.00170
else:
#gh3 or other standard mod
yOffset = 0.69
txtSize = 0.00175
else: #display in lyric bar position
yOffset = 0.0696 #last change +0.0000
txtSize = 0.00160
#MFH TODO - pre-retrieve and translate all current tutorial script.txt events, if applicable.
if self.song.info.tutorial:
text = _(event.text)
w, h = lyricFont.getStringSize(text,txtSize)
lyricFont.render(text, (xOffset - w / 2, yOffset),(1, 0, 0),txtSize)
#elif event.text.find("TXT:") < 0 and event.text.find("LYR:") < 0 and event.text.find("SEC:") < 0 and event.text.find("GSOLO") < 0: #filter out MIDI text events, only show from script here.
else:
text = event.text
w, h = lyricFont.getStringSize(text,txtSize)
lyricFont.render(text, (xOffset - w / 2, yOffset),(1, 0, 0),txtSize)
#-------------after "if showlyrics"
#self.engine.view.setViewport(1,0)
#scrolling lyrics & sections: moved to before player viewport split
#Show Jurgen played Spikehead777
self.engine.view.setViewport(1,0)
gN = 0
for i in range(self.numOfPlayers):
if self.instruments[i].isVocal:
continue
if self.jurgPlayer[i] == True:
if self.jurg[i]:
if self.customBot[i]:
text = self.tsJurgenIsHere % self.customBot[i]
else:
text = self.tsJurgenIsHere % self.tsBotNames[self.aiSkill[i]]
else:
if self.customBot[i]:
text = self.tsJurgenWasHere % self.customBot[i]
else:
text = self.tsJurgenWasHere % self.tsBotNames[self.aiSkill[i]]
#jurgScale = .001/self.numOfPlayers
jurgScale = float(self.jurgenText[2])
w, h = bigFont.getStringSize(text, scale = jurgScale)
self.engine.theme.setBaseColor()
if jurgScale > .2 or jurgScale < .0001:
jurgScale = .001
jurgX = float(self.jurgenText[0])
if jurgX < 0:
jurgX = 0
jurgX = (jurgX+gN)/self.numberOfGuitars
if jurgX > ((gN+1)/self.numberOfGuitars) - w:
jurgX = ((gN+1)/self.numberOfGuitars) - w
jurgY = float(self.jurgenText[1])
if jurgY > .75 - h:
jurgY = .75 - h
if not self.failed:
bigFont.render(text, (jurgX, jurgY), scale = jurgScale)#MFH - y was 0.4 - more positioning weirdness.
gN += 1
#End Jurgen Code
#MFH - Get Ready to Rock & countdown, song info during countdown, and song time left display on top of everything else
if (not self.pause and not self.failed and not self.ending):
if self.coOpType: #render co-op phrases (full screen width) above the rest.
if self.displayText[self.coOpPhrase] != None:
glColor3f(.8,.75,.01)
size = sphraseFont.getStringSize(self.displayText[self.coOpPhrase], scale = self.displayTextScale[self.coOpPhrase])
sphraseFont.render(self.displayText[self.coOpPhrase], (.5-size[0]/2,self.textY[self.coOpPhrase]-size[1]), scale = self.displayTextScale[self.coOpPhrase])
# show countdown
# glorandwarf: fixed the countdown timer
if self.countdownSeconds > 1:
self.engine.theme.setBaseColor(min(1.0, 3.0 - abs(4.0 - self.countdownSeconds)))
text = self.tsGetReady
w, h = font.getStringSize(text)
font.render(text, (.5 - w / 2, .3))
if self.countdownSeconds < 6:
if self.counting:
for i,player in enumerate(self.playerList):
if not self.instruments[i].isVocal:
w = self.wPlayer[i]
h = self.hPlayer[i]
partImgwidth = self.part[i].width1()
partwFactor = 250.000/partImgwidth
partX = ((i*2)+1) / (self.numOfPlayers*2.0)
self.engine.drawImage(self.part[i], scale = (partwFactor*0.25,partwFactor*-0.25), coord = (w*partX,h*.4), color = (1,1,1, 3.0 - abs(4.0 - self.countdownSeconds)))
self.engine.theme.setBaseColor(min(1.0, 3.0 - abs(4.0 - self.countdownSeconds)))
text = player.name
w, h = font.getStringSize(text)
font.render(text, (partX - w*.5, .5))
else:
w = self.wFull
h = self.hFull
partImgWidth = self.part[i].width1()
partwFactor = 250.000/partImgWidth
self.engine.drawImage(self.part[i], scale = (partwFactor*0.25, partwFactor*-0.25), coord = (w*.5,h*.75), color = (1,1,1, 3.0 - abs(4.0 - self.countdownSeconds)))
self.engine.theme.setBaseColor(min(1.0, 3.0 - abs(4.0 - self.countdownSeconds)))
text = player.name
w, h = font.getStringSize(text)
font.render(text, (.5 - w*.5, .25))
else:
scale = 0.002 + 0.0005 * (self.countdownSeconds % 1) ** 3
text = "%d" % (self.countdownSeconds)
w, h = bigFont.getStringSize(text, scale = scale)
self.engine.theme.setBaseColor()
bigFont.render(text, (.5 - w / 2, .45 - h / 2), scale = scale)
if self.resumeCountdownSeconds > 1:
scale = 0.002 + 0.0005 * (self.resumeCountdownSeconds % 1) ** 3
text = "%d" % (self.resumeCountdownSeconds)
w, h = bigFont.getStringSize(text, scale = scale)
self.engine.theme.setBaseColor()
bigFont.render(text, (.5 - w / 2, .45 - h / 2), scale = scale)
w, h = font.getStringSize(" ")
y = .05 - h / 2 - (1.0 - v) * .2
songFont = self.engine.data.songFont
# show song name
if self.countdown and self.song:
cover = ""
if self.song.info.findTag("cover") == True: #kk69: misc changes to make it more GH/RB
cover = "%s \n " % self.tsAsMadeFamousBy #kk69: no more ugly colon! ^_^
else:
if self.theme == 2:
cover = "" #kk69: for RB
else:
cover = self.tsBy #kk69: for GH
self.engine.theme.setBaseColor(min(1.0, 4.0 - abs(4.0 - self.countdown)))
comma = ""
extra = ""
if self.song.info.year: #add comma between year and artist
comma = ", "
if self.song.info.frets:
extra = "%s \n %s%s" % (extra, self.tsFrettedBy, self.song.info.frets)
if self.song.info.version:
extra = "%s \n v%s" % (extra, self.song.info.version)
if self.theme != 1: #shift this stuff down so it don't look so bad over top the lyricsheet:
Dialogs.wrapText(songFont, (self.songInfoDisplayX, self.songInfoDisplayX - h / 2), "%s \n %s%s%s%s%s" % (Song.removeSongOrderPrefixFromName(self.song.info.name), cover, self.song.info.artist, comma, self.song.info.year, extra), rightMargin = .6, scale = self.songInfoDisplayScale)#kk69: incorporates song.ttf
else:
Dialogs.wrapText(songFont, (self.songInfoDisplayX, self.songInfoDisplayY - h / 2), "%s \n %s%s%s%s%s" % (Song.removeSongOrderPrefixFromName(self.song.info.name), cover, self.song.info.artist, comma, self.song.info.year, extra), rightMargin = .6, scale = self.songInfoDisplayScale)
else:
#mfh: this is where the song countdown display is generated:
if pos < 0:
pos = 0
if countdownPos < 0:
countdownPos = 0
self.engine.theme.setBaseColor()
#Party mode
if self.partyMode == True:
timeleft = (now - self.partySwitch) / 1000
if timeleft > self.partyTime:
self.partySwitch = now
if self.partyPlayer == 0:
self.instruments[0].keys = PLAYER2KEYS
self.instruments[0].actions = PLAYER2ACTIONS
self.keysList = [PLAYER2KEYS]
self.partyPlayer = 1
else:
self.instruments[0].keys = PLAYER1KEYS
self.instruments[0].actions = PLAYER1ACTIONS
self.keysList = [PLAYER1KEYS]
self.partyPlayer = 0
t = "%d" % (self.partyTime - timeleft + 1)
if self.partyTime - timeleft < 5:
glColor3f(1, 0, 0)
w, h = font.getStringSize(t)#QQstarS:party
font.render(t, (.5 - w / 2, 0.4)) #QQstarS:party
elif self.partySwitch != 0 and timeleft < 1:
t = "Switch"
glColor3f(0, 1, 0)
w, h = font.getStringSize(t)#QQstarS:party
font.render(t, (.5 - w / 2, 0.4))#QQstarS:party
else:#QQstarS:party
w, h = font.getStringSize(t)
font.render(t, (.5 - w / 2, y + h))
finally:
self.engine.view.resetProjection()
|
gpl-2.0
| 3,266,319,482,678,174,000
| 46.720395
| 411
| 0.613254
| false
| 3.407236
| false
| false
| false
|
michaelhidalgo/7WCSQ
|
Tools/SQLMap/sqlmap/lib/techniques/brute/use.py
|
1
|
10676
|
#!/usr/bin/env python
"""
Copyright (c) 2006-2017 sqlmap developers (http://sqlmap.org/)
See the file 'doc/COPYING' for copying permission
"""
import time
from lib.core.common import clearConsoleLine
from lib.core.common import dataToStdout
from lib.core.common import filterListValue
from lib.core.common import getFileItems
from lib.core.common import Backend
from lib.core.common import getPageWordSet
from lib.core.common import hashDBWrite
from lib.core.common import randomInt
from lib.core.common import randomStr
from lib.core.common import readInput
from lib.core.common import safeStringFormat
from lib.core.common import safeSQLIdentificatorNaming
from lib.core.common import unsafeSQLIdentificatorNaming
from lib.core.data import conf
from lib.core.data import kb
from lib.core.data import logger
from lib.core.enums import DBMS
from lib.core.enums import HASHDB_KEYS
from lib.core.enums import PAYLOAD
from lib.core.exception import SqlmapDataException
from lib.core.exception import SqlmapMissingMandatoryOptionException
from lib.core.settings import BRUTE_COLUMN_EXISTS_TEMPLATE
from lib.core.settings import BRUTE_TABLE_EXISTS_TEMPLATE
from lib.core.settings import METADB_SUFFIX
from lib.core.threads import getCurrentThreadData
from lib.core.threads import runThreads
from lib.request import inject
def _addPageTextWords():
wordsList = []
infoMsg = "adding words used on web page to the check list"
logger.info(infoMsg)
pageWords = getPageWordSet(kb.originalPage)
for word in pageWords:
word = word.lower()
if len(word) > 2 and not word[0].isdigit() and word not in wordsList:
wordsList.append(word)
return wordsList
def tableExists(tableFile, regex=None):
if kb.tableExistsChoice is None and not any(_ for _ in kb.injection.data if _ not in (PAYLOAD.TECHNIQUE.TIME, PAYLOAD.TECHNIQUE.STACKED)) and not conf.direct:
warnMsg = "it's not recommended to use '%s' and/or '%s' " % (PAYLOAD.SQLINJECTION[PAYLOAD.TECHNIQUE.TIME], PAYLOAD.SQLINJECTION[PAYLOAD.TECHNIQUE.STACKED])
warnMsg += "for common table existence check"
logger.warn(warnMsg)
message = "are you sure you want to continue? [y/N] "
test = readInput(message, default="N")
kb.tableExistsChoice = test[0] in ("y", "Y")
if not kb.tableExistsChoice:
return None
result = inject.checkBooleanExpression("%s" % safeStringFormat(BRUTE_TABLE_EXISTS_TEMPLATE, (randomInt(1), randomStr())))
if conf.db and Backend.getIdentifiedDbms() in (DBMS.ORACLE, DBMS.DB2):
conf.db = conf.db.upper()
if result:
errMsg = "can't use table existence check because of detected invalid results "
errMsg += "(most likely caused by inability of the used injection "
errMsg += "to distinguish erroneous results)"
raise SqlmapDataException(errMsg)
tables = getFileItems(tableFile, lowercase=Backend.getIdentifiedDbms() in (DBMS.ACCESS,), unique=True)
infoMsg = "checking table existence using items from '%s'" % tableFile
logger.info(infoMsg)
tables.extend(_addPageTextWords())
tables = filterListValue(tables, regex)
threadData = getCurrentThreadData()
threadData.shared.count = 0
threadData.shared.limit = len(tables)
threadData.shared.value = []
threadData.shared.unique = set()
def tableExistsThread():
threadData = getCurrentThreadData()
while kb.threadContinue:
kb.locks.count.acquire()
if threadData.shared.count < threadData.shared.limit:
table = safeSQLIdentificatorNaming(tables[threadData.shared.count], True)
threadData.shared.count += 1
kb.locks.count.release()
else:
kb.locks.count.release()
break
if conf.db and METADB_SUFFIX not in conf.db and Backend.getIdentifiedDbms() not in (DBMS.SQLITE, DBMS.ACCESS, DBMS.FIREBIRD):
fullTableName = "%s.%s" % (conf.db, table)
else:
fullTableName = table
result = inject.checkBooleanExpression("%s" % safeStringFormat(BRUTE_TABLE_EXISTS_TEMPLATE, (randomInt(1), fullTableName)))
kb.locks.io.acquire()
if result and table.lower() not in threadData.shared.unique:
threadData.shared.value.append(table)
threadData.shared.unique.add(table.lower())
if conf.verbose in (1, 2) and not hasattr(conf, "api"):
clearConsoleLine(True)
infoMsg = "[%s] [INFO] retrieved: %s\n" % (time.strftime("%X"), unsafeSQLIdentificatorNaming(table))
dataToStdout(infoMsg, True)
if conf.verbose in (1, 2):
status = '%d/%d items (%d%%)' % (threadData.shared.count, threadData.shared.limit, round(100.0 * threadData.shared.count / threadData.shared.limit))
dataToStdout("\r[%s] [INFO] tried %s" % (time.strftime("%X"), status), True)
kb.locks.io.release()
try:
runThreads(conf.threads, tableExistsThread, threadChoice=True)
except KeyboardInterrupt:
warnMsg = "user aborted during table existence "
warnMsg += "check. sqlmap will display partial output"
logger.warn(warnMsg)
clearConsoleLine(True)
dataToStdout("\n")
if not threadData.shared.value:
warnMsg = "no table(s) found"
logger.warn(warnMsg)
else:
for item in threadData.shared.value:
if conf.db not in kb.data.cachedTables:
kb.data.cachedTables[conf.db] = [item]
else:
kb.data.cachedTables[conf.db].append(item)
for _ in ((conf.db, item) for item in threadData.shared.value):
if _ not in kb.brute.tables:
kb.brute.tables.append(_)
hashDBWrite(HASHDB_KEYS.KB_BRUTE_TABLES, kb.brute.tables, True)
return kb.data.cachedTables
def columnExists(columnFile, regex=None):
if kb.columnExistsChoice is None and not any(_ for _ in kb.injection.data if _ not in (PAYLOAD.TECHNIQUE.TIME, PAYLOAD.TECHNIQUE.STACKED)) and not conf.direct:
warnMsg = "it's not recommended to use '%s' and/or '%s' " % (PAYLOAD.SQLINJECTION[PAYLOAD.TECHNIQUE.TIME], PAYLOAD.SQLINJECTION[PAYLOAD.TECHNIQUE.STACKED])
warnMsg += "for common column existence check"
logger.warn(warnMsg)
message = "are you sure you want to continue? [y/N] "
test = readInput(message, default="N")
kb.columnExistsChoice = test[0] in ("y", "Y")
if not kb.columnExistsChoice:
return None
if not conf.tbl:
errMsg = "missing table parameter"
raise SqlmapMissingMandatoryOptionException(errMsg)
if conf.db and Backend.getIdentifiedDbms() in (DBMS.ORACLE, DBMS.DB2):
conf.db = conf.db.upper()
result = inject.checkBooleanExpression(safeStringFormat(BRUTE_COLUMN_EXISTS_TEMPLATE, (randomStr(), randomStr())))
if result:
errMsg = "can't use column existence check because of detected invalid results "
errMsg += "(most likely caused by inability of the used injection "
errMsg += "to distinguish erroneous results)"
raise SqlmapDataException(errMsg)
infoMsg = "checking column existence using items from '%s'" % columnFile
logger.info(infoMsg)
columns = getFileItems(columnFile, unique=True)
columns.extend(_addPageTextWords())
columns = filterListValue(columns, regex)
table = safeSQLIdentificatorNaming(conf.tbl, True)
if conf.db and METADB_SUFFIX not in conf.db and Backend.getIdentifiedDbms() not in (DBMS.SQLITE, DBMS.ACCESS, DBMS.FIREBIRD):
table = "%s.%s" % (safeSQLIdentificatorNaming(conf.db), table)
kb.threadContinue = True
kb.bruteMode = True
threadData = getCurrentThreadData()
threadData.shared.count = 0
threadData.shared.limit = len(columns)
threadData.shared.value = []
def columnExistsThread():
threadData = getCurrentThreadData()
while kb.threadContinue:
kb.locks.count.acquire()
if threadData.shared.count < threadData.shared.limit:
column = safeSQLIdentificatorNaming(columns[threadData.shared.count])
threadData.shared.count += 1
kb.locks.count.release()
else:
kb.locks.count.release()
break
result = inject.checkBooleanExpression(safeStringFormat(BRUTE_COLUMN_EXISTS_TEMPLATE, (column, table)))
kb.locks.io.acquire()
if result:
threadData.shared.value.append(column)
if conf.verbose in (1, 2) and not hasattr(conf, "api"):
clearConsoleLine(True)
infoMsg = "[%s] [INFO] retrieved: %s\n" % (time.strftime("%X"), unsafeSQLIdentificatorNaming(column))
dataToStdout(infoMsg, True)
if conf.verbose in (1, 2):
status = "%d/%d items (%d%%)" % (threadData.shared.count, threadData.shared.limit, round(100.0 * threadData.shared.count / threadData.shared.limit))
dataToStdout("\r[%s] [INFO] tried %s" % (time.strftime("%X"), status), True)
kb.locks.io.release()
try:
runThreads(conf.threads, columnExistsThread, threadChoice=True)
except KeyboardInterrupt:
warnMsg = "user aborted during column existence "
warnMsg += "check. sqlmap will display partial output"
logger.warn(warnMsg)
clearConsoleLine(True)
dataToStdout("\n")
if not threadData.shared.value:
warnMsg = "no column(s) found"
logger.warn(warnMsg)
else:
columns = {}
for column in threadData.shared.value:
if Backend.getIdentifiedDbms() in (DBMS.MYSQL,):
result = not inject.checkBooleanExpression("%s" % safeStringFormat("EXISTS(SELECT %s FROM %s WHERE %s REGEXP '[^0-9]')", (column, table, column)))
else:
result = inject.checkBooleanExpression("%s" % safeStringFormat("EXISTS(SELECT %s FROM %s WHERE ROUND(%s)=ROUND(%s))", (column, table, column, column)))
if result:
columns[column] = "numeric"
else:
columns[column] = "non-numeric"
kb.data.cachedColumns[conf.db] = {conf.tbl: columns}
for _ in map(lambda x: (conf.db, conf.tbl, x[0], x[1]), columns.items()):
if _ not in kb.brute.columns:
kb.brute.columns.append(_)
hashDBWrite(HASHDB_KEYS.KB_BRUTE_COLUMNS, kb.brute.columns, True)
return kb.data.cachedColumns
|
apache-2.0
| -1,944,711,308,653,261,800
| 38.25
| 167
| 0.65474
| false
| 3.736787
| false
| false
| false
|
theCatWisel/ThreatExchange
|
pytx/pytx/threat_descriptor.py
|
1
|
1324
|
from .common import Common
from .vocabulary import ThreatDescriptor as td
from .vocabulary import ThreatExchange as t
class ThreatDescriptor(Common):
_URL = t.URL + t.VERSION + t.THREAT_DESCRIPTORS
_DETAILS = t.URL + t.VERSION
_RELATED = t.URL + t.VERSION
_fields = [
td.ADDED_ON,
td.ATTACK_TYPE,
td.CONFIDENCE,
td.DESCRIPTION,
td.EXPIRED_ON,
td.ID,
td.INDICATOR,
td.LAST_UPDATED,
td.METADATA,
td.OWNER,
td.PRECISION,
td.PRIVACY_MEMBERS,
td.PRIVACY_TYPE,
td.RAW_INDICATOR,
td.REVIEW_STATUS,
td.SEVERITY,
td.SHARE_LEVEL,
td.SOURCE_URI,
td.STATUS,
td.TAGS,
td.THREAT_TYPE,
td.TYPE,
]
_default_fields = [
td.ADDED_ON,
td.ATTACK_TYPE,
td.CONFIDENCE,
td.DESCRIPTION,
td.EXPIRED_ON,
td.ID,
td.INDICATOR,
td.LAST_UPDATED,
td.METADATA,
td.OWNER,
td.PRECISION,
td.PRIVACY_MEMBERS,
td.PRIVACY_TYPE,
td.RAW_INDICATOR,
td.REVIEW_STATUS,
td.SEVERITY,
td.SHARE_LEVEL,
td.SOURCE_URI,
td.STATUS,
td.TAGS,
td.THREAT_TYPE,
td.TYPE,
]
_unique = [
]
|
bsd-3-clause
| -7,972,141,957,186,459,000
| 20.015873
| 51
| 0.522659
| false
| 3.293532
| false
| false
| false
|
cayetanobv/QGIS-Plugin-MBTiles2img
|
mbtiles2img.py
|
1
|
9872
|
"""
/***************************************************************************
MBTiles2img
A QGIS plugin
This plugin takes an mbtiles file and split it apart into a folder hierarchy
of individual image tile files.
-------------------
begin : 2014-12-09
copyright : (C) 2014 by Cayetano Benavent
email : cayetanobv@gmail.com
***************************************************************************/
/***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************/
"""
import os.path
from PyQt5.QtCore import QSettings, QTranslator, qVersion, QCoreApplication
from PyQt5.QtWidgets import QAction, QFileDialog, QMessageBox
from PyQt5.QtGui import QIcon
from qgis.core import Qgis
from MBTiles2img import resources_rc
from MBTiles2img.mbtilesextractor import MBTilesExtractor
from MBTiles2img.mbtiles2img_dialog import MBTiles2imgDialog
class MBTiles2img:
"""QGIS Plugin Implementation."""
def __init__(self, iface):
"""Constructor.
:param iface: An interface instance that will be passed to this class
which provides the hook by which you can manipulate the QGIS
application at run time.
:type iface: QgsInterface
"""
# Save reference to the QGIS interface
self.iface = iface
# initialize plugin directory
self.plugin_dir = os.path.dirname(__file__)
# initialize locale
locale = QSettings().value('locale/userLocale')[0:2]
locale_path = os.path.join(
self.plugin_dir,
'i18n',
'MBTiles2img_{}.qm'.format(locale))
if os.path.exists(locale_path):
self.translator = QTranslator()
self.translator.load(locale_path)
if qVersion() > '4.3.3':
QCoreApplication.installTranslator(self.translator)
# Create the dialog (after translation) and keep reference
self.dlg = MBTiles2imgDialog(parent=self.iface.mainWindow())
# Declare instance attributes
self.actions = []
self.menu = self.tr(u'&MBTiles images extract')
# TODO: We are going to let the user set this up in a future iteration
self.toolbar = self.iface.addToolBar(u'MBTiles2img')
self.toolbar.setObjectName(u'MBTiles2img')
# noinspection PyMethodMayBeStatic
def tr(self, message):
"""Get the translation for a string using Qt translation API.
We implement this ourselves since we do not inherit QObject.
:param message: String for translation.
:type message: str, QString
:returns: Translated version of message.
:rtype: QString
"""
# noinspection PyTypeChecker,PyArgumentList,PyCallByClass
return QCoreApplication.translate('MBTiles2img', message)
def add_action(
self,
icon_path,
text,
callback,
enabled_flag=True,
add_to_menu=True,
add_to_toolbar=True,
status_tip=None,
whats_this=None,
parent=None):
"""Add a toolbar icon to the toolbar.
:param icon_path: Path to the icon for this action. Can be a resource
path (e.g. ':/plugins/foo/bar.png') or a normal file system path.
:type icon_path: str
:param text: Text that should be shown in menu items for this action.
:type text: str
:param callback: Function to be called when the action is triggered.
:type callback: function
:param enabled_flag: A flag indicating if the action should be enabled
by default. Defaults to True.
:type enabled_flag: bool
:param add_to_menu: Flag indicating whether the action should also
be added to the menu. Defaults to True.
:type add_to_menu: bool
:param add_to_toolbar: Flag indicating whether the action should also
be added to the toolbar. Defaults to True.
:type add_to_toolbar: bool
:param status_tip: Optional text to show in a popup when mouse pointer
hovers over the action.
:type status_tip: str
:param parent: Parent widget for the new action. Defaults None.
:type parent: QWidget
:param whats_this: Optional text to show in the status bar when the
mouse pointer hovers over the action.
:returns: The action that was created. Note that the action is also
added to self.actions list.
:rtype: QAction
"""
icon = QIcon(icon_path)
action = QAction(icon, text, parent)
action.triggered.connect(callback)
action.setEnabled(enabled_flag)
if status_tip is not None:
action.setStatusTip(status_tip)
if whats_this is not None:
action.setWhatsThis(whats_this)
if add_to_toolbar:
self.toolbar.addAction(action)
if add_to_menu:
self.iface.addPluginToMenu(
self.menu,
action)
self.actions.append(action)
return action
def initGui(self):
"""Create the menu entries and toolbar icons inside the QGIS GUI."""
icon_path = ':/plugins/MBTiles2img/icon.png'
self.add_action(
icon_path,
text=self.tr(u'MBTiles images extract'),
callback=self.run,
parent=self.iface.mainWindow())
self.dlg.loadFileButton.clicked.connect(self.loadMBTilesFile)
self.dlg.selectDestFolderButton.clicked.connect(self.setDestFolder)
self.dlg.runExtractionButton.clicked.connect(self.runTileExtraction)
self.dlg.helpButton.clicked.connect(self.getHelp)
def unload(self):
"""Removes the plugin menu item and icon from QGIS GUI."""
for action in self.actions:
self.iface.removePluginMenu(
self.tr(u'&MBTiles images extract'),
action)
self.iface.removeToolBarIcon(action)
def run(self):
#show the dialog
self.dlg.show()
def runTileExtraction(self):
"""
Run tiles extraction from MBTiles file
"""
input_file = self.dlg.getPathMBTiles()
dest_folder = self.dlg.getPathDestFolder()
res = self.tileExtractor(input_file, dest_folder)
if res == 1:
self.dlg.clearLabelPathMBTiles()
self.dlg.clearLabelPathDestFolder()
self.dlg.setLabelPathDestFolder("Destination folder...")
self.dlg.setLabelPathMBTiles("MBTiles to extract...")
def loadMBTilesFile(self):
"""
Load MBTiles file
"""
self.dlg.progressBar.setValue(0)
# open file dialog to load MBTiles file
start_dir = '/home'
fl_types = "MBTiles files (*.mbtiles)"
file_path, _ = QFileDialog.getOpenFileName(self.iface.mainWindow(),
'Open MBTiles file',
start_dir, fl_types)
if file_path:
self.dlg.setLabelPathMBTiles(file_path)
else:
self.dlg.setLabelPathMBTiles("MBTiles to extract...")
def setDestFolder(self):
"""
Set Destination folder to save exported images
"""
self.dlg.progressBar.setValue(0)
# open file dialog to select folder
start_dir = '/home'
folder_path = QFileDialog.getExistingDirectory(self.iface.mainWindow(),
'Select destination folder to save exported images',
start_dir)
if folder_path:
self.dlg.setLabelPathDestFolder(folder_path)
else:
self.dlg.setLabelPathDestFolder("Destination folder...")
def tileExtractor(self, input_file, dest_folder):
"""
MBTiles images extraction method
This method uses MBTilesextractor library to do the work
"""
try:
ex_mbt = MBTilesExtractor(input_file, dirname=dest_folder, overwrite=True)
self.dlg.progressBar.setValue(10)
ex_mbt.extractTiles()
msg_type= "Info"
level = Qgis.Info
progress_value = 100
outfolder = os.path.join(dest_folder,os.path.basename(input_file).split('.')[0])
result = 'Tile extraction done! Output folder: {}'.format(outfolder)
self.iface.messageBar().pushMessage(msg_type, result, level=level, duration=10)
self.dlg.progressBar.setValue(progress_value)
return 1
except Exception as err:
result = 'Error: {0}'.format(err)
self.iface.messageBar().pushMessage("Error", result, level=Qgis.Critical, duration=10)
self.dlg.progressBar.setValue(0)
def getHelp(self):
"""
Show help to users
"""
QMessageBox.information(self.iface.mainWindow(),"Help",
"""
1) Select MBTiles to extract.
2) Select destination folder to
save exported images.
3) Push button "Run tile extraction".
Developed by Cayetano Benavent 2014-2018.
""")
|
gpl-2.0
| 6,449,369,633,904,403,000
| 33.15917
| 105
| 0.571212
| false
| 4.432869
| false
| false
| false
|
chjdev/euler
|
python/problem28.py
|
1
|
1182
|
# Number spiral diagonals
#
# Problem 28
#
# Starting with the number 1 and moving to the right in a clockwise direction a 5 by 5 spiral is formed as follows:
#
# 21 22 23 24 25
# 20 7 8 9 10
# 19 6 1 2 11
# 18 5 4 3 12
# 17 16 15 14 13
#
# It can be verified that the sum of the numbers on the diagonals is 101.
#
# What is the sum of the numbers on the diagonals in a 1001 by 1001 spiral formed in the same way?
# 43 44 45 46 47 48 49
# 42 21 22 23 24 25 26
# 41 20 7 8 9 10 27
# 40 19 6 1 2 11 28
# 39 18 5 4 3 12 29
# 38 17 16 15 14 13 30
# 37 36 35 34 33 32 31
# 3x3 -> 1
# 5x5 -> +2 -> 3
# 7x7 -> +2 -> 5
# 1 (2) 3 (4) 5 (6) 7 (8) 9
# (10) (11) (12) 13 (14) (15) (16) 17 (18) (19) (20) 21 (22) (23) (24) 25
# (26) (27) (28) (29) (30) 31 and so on
# in english: iterate 4 times using steps according to the sequence above
import itertools
def spiral_numbers(max_dim=None):
dim = 1
num = 1
yield num
for offset in itertools.count(2, 2):
dim += 2
for i in range(0, 4):
num += offset
yield num
if not max_dim is None and dim >= max_dim:
break
print(sum(spiral_numbers(1001)))
|
bsd-2-clause
| 7,553,869,055,711,668,000
| 22.176471
| 115
| 0.584602
| false
| 2.686364
| false
| false
| false
|
liushuaikobe/yixin
|
lib/yixin.py
|
1
|
10990
|
# -*- coding: utf-8 -*-
import hashlib
import time
import simplejson
from xml.etree import ElementTree as etree
from xml.etree.ElementTree import Element, SubElement, ElementTree
import utils
import log
import constant
import messagebuilder
class YiXin(object):
'''
Main class of this lib.
'''
def __init__(self, token, appId, appSecret):
self.token = token
self.appId = appId
self.appSecret = appSecret
self.accessToken = None
self.accessTokenExpiresIn = None
self.accessTokenGetTimeStamp = None
self.reply = Reply()
self.textMsgBuilder = None
self.picMsgBuilder = None
self.locationMsgBuilder = None
self.eventMsgBuilder = None
self.onTextMsgReceivedCallback = None
self.onPicMsgReceivedCallback = None
self.onLocationMsgReceivedCallback = None
self.onEventMsgReceivedCallback = None
self.onButtonClickCallback = None
self.onUserSubscribeCallback = None
self.onUserUnsbscribeCallback = None
def checkSignature(self, signature, timestamp, nonce, echostr):
'''
check the signature,
'''
if not utils.checkType(type(''), signature, timestamp, nonce, echostr):
log.log(log.ERROR, 'Your args for signature checking must be ' + str(type('')))
return None
tmpLst = [self.token, timestamp, nonce]
tmpLst.sort()
tmpStr = ''.join(tuple(tmpLst))
tmpStr = hashlib.sha1(tmpStr).hexdigest()
if tmpStr == signature:
log.log(log.INFO, 'Signature checking successfully.')
return echostr
else:
log.log(log.ERROR, 'Signature checking failed.')
return None
## ## ### ## ## ######## ## ######## ## ## ###### ######
## ## ## ## ### ## ## ## ## ## ### ### ## ## ## ##
## ## ## ## #### ## ## ## ## ## #### #### ## ##
######### ## ## ## ## ## ## ## ## ###### ## ### ## ###### ## ####
## ## ######### ## #### ## ## ## ## ## ## ## ## ##
## ## ## ## ## ### ## ## ## ## ## ## ## ## ## ##
## ## ## ## ## ## ######## ######## ######## ####### ## ## ###### ######
def handleMessage(self, rawMsg, callback=None):
'''
Handle the message posted from YiXin Server.
'''
msgType = self.getMsgType(rawMsg)
msg = None
# we received a text message
if msgType == constant.TEXT_TYPE:
if not self.textMsgBuilder:
self.textMsgBuilder = messagebuilder.TextMsgBuilder(rawMsg)
else:
self.textMsgBuilder.setXmlStr(rawMsg)
msg = self.textMsgBuilder.build()
if callable(self.onTextMsgReceivedCallback):
self.onTextMsgReceivedCallback(msgType, msg)
# we received a image message
elif msgType == constant.PIC_TYPE:
if not self.picMsgBuilder:
self.picMsgBuilder = messagebuilder.PicMsgBuilder(rawMsg)
else:
self.picMsgBuilder.setXmlStr(rawMsg)
msg = self.picMsgBuilder.build()
if callable(self.onPicMsgReceivedCallback):
self.onPicMsgReceivedCallback(msgType, msg)
# we received a image message
elif msgType == constant.LOCATION_TYPE:
if not self.locationMsgBuilder:
self.locationMsgBuilder = messagebuilder.LocationMsgBuilder(rawMsg)
else:
self.locationMsgBuilder.setXmlStr(rawMsg)
msg = self.locationMsgBuilder.build()
if callable(self.onLocationMsgReceivedCallback):
self.onLocationMsgReceivedCallback(msgType, msg)
# we received a event push
elif msgType == constant.EVENT_TYPE:
if not self.eventMsgBuilder:
self.eventMsgBuilder = messagebuilder.EventMsgBuilder(rawMsg)
else:
self.eventMsgBuilder.setXmlStr(rawMsg)
msg = self.eventMsgBuilder.build()
if callable(self.onEventMsgReceivedCallback):
self.onEventMsgReceivedCallback(msgType, msg)
# dispatch the specific event
event = msg.getEvent().lower()
# new subscribe
if event == constant.SUBSCRIBE_EVENT:
if callable(self.onUserSubscribeCallback):
self.onUserSubscribeCallback(msgType, msg)
# new unsubscribe
elif event == constant.UNSUBSCRIBE_EVENT:
if callable(self.onUserUnsbscribeCallback):
self.onUserUnsbscribeCallback(msgType, msg)
# button clicked
elif event == constant.CLICK_EVETN:
if callable(self.onButtonClickCallback):
self.onButtonClickCallback(msgType, msg)
if callable(callback):
callback(msgType, msg)
return msg
######## ######## ######## ## ## ##
## ## ## ## ## ## ## ##
## ## ## ## ## ## ####
######## ###### ######## ## ##
## ## ## ## ## ##
## ## ## ## ## ##
## ## ######## ## ######## ##
def replyText(self, toUser, fromUser, content=''):
'''
Wrpper for replying text message.
'''
return self.reply.replyText(toUser, fromUser, content)
def replyMusic(self, toUser, fromUser, title, description, musicUrl, HQMusicUrl):
'''
Wrapper for replying music message.
'''
return self.reply.replyMusic(toUser, fromUser, title, description, musicUrl, HQMusicUrl)
def replyNews(self, toUser, fromUser, articleCount, articles):
'''
Wrapper for replying news message.
'''
return self.reply.replyNews(toUser, fromUser, articleCount, articles)
def getMsgType(self, rawMsg):
root = etree.fromstring(rawMsg)
return root.find(constant.MSG_TYPE_NODE_NAME).text
###### ### ## ## ######## ### ###### ## ##
## ## ## ## ## ## ## ## ## ## ## ## ## ##
## ## ## ## ## ## ## ## ## ## ## ##
## ## ## ## ## ######## ## ## ## #####
## ######### ## ## ## ## ######### ## ## ##
## ## ## ## ## ## ## ## ## ## ## ## ## ##
###### ## ## ######## ######## ######## ## ## ###### ## ##
def setOnTextMsgReceivedCallback(self, callback):
assert callable(callback)
self.onTextMsgReceivedCallback = callback
def setOnPicMsgReceivedCallback(self, callback):
assert callable(callback)
self.onPicMsgReceivedCallback = callback
def setOnLocationMsgReceivedCallback(self, callback):
assert callable(callback)
self.onLocationMsgReceivedCallback = callback
def setOnEventMsgReceivedCallback(self, callback):
assert callable(callback)
self.onEventMsgReceivedCallback = callback
def setOnButtonClickCallback(self, callback):
assert callable(callback)
self.onButtonClickCallback = callback
def setOnUserSubscribeCallback(self, callback):
assert callable(callback)
self.onUserSubscribeCallback = callback
def setOnUserUnsbscribeCallback(self, callback):
assert callable(callback)
self.onUserUnsbscribeCallback = callback
def getAccessToken(self):
if self.accessToken and self.accessTokenExpiresIn and self.accessTokenGetTimeStamp: # We have got the access token.
if time.time() - self.accessTokenGetTimeStamp < self.accessTokenExpiresIn: # The access token is valid until now.
log.log(log.DEBUG, self.accessToken + ' old')
return self.accessToken
url = constant.GET_TOKEN_URL
params = {
'grant_type' : 'client_credential',
'appid' : self.appId,
'secret' : self.appSecret
}
result = simplejson.loads(utils.doGet(url, params))
self.accessToken = result['access_token']
self.accessTokenExpiresIn = float(result['expires_in'])
self.accessTokenGetTimeStamp = time.time()
log.log(log.DEBUG, self.accessToken + ' new')
return self.accessToken
## ## ######## ## ## ## ##
### ### ## ### ## ## ##
#### #### ## #### ## ## ##
## ### ## ###### ## ## ## ## ##
## ## ## ## #### ## ##
## ## ## ## ### ## ##
## ## ######## ## ## #######
def addMenu(self, buttonGroup):
log.log(log.DEBUG, simplejson.dumps(buttonGroup.meta))
utils.doPostWithoutParamsEncoding(''.join((constant.ADD_MENU_URL, self.getAccessToken())), \
simplejson.dumps(buttonGroup.meta))
def deleteMenu(self):
'''
Delete the menu.
'''
log.log(log.DEBUG, 'Delete menu.')
params = {
'access_token' : self.getAccessToken()
}
result = utils.doGet(constant.DELETE_MENU_URL, params)
log.log(log.DEBUG, result)
def queryCurrentMenu(self):
'''
Get the current structure of the menu.
'''
pass
class Reply(object):
'''
Get the reply message.
'''
def __init__(self):
pass
def replyText(self, toUser, fromUser, content=''):
return self.render(constant.REPLY_TEXT_TEMPLATE, (toUser, fromUser, self.getCurrentTime(), content))
def replyMusic(self, toUser, fromUser, title, description, musicUrl, HQMusicUrl):
return self.render(constant.REPLY_MUSIC_TEMPLATE, (toUser, fromUser, self.getCurrentTime(), title, description, musicUrl, HQMusicUrl))
def replyNews(self, toUser, fromUser, articleCount, articles):
root = Element(Article.ROOT_TAG_NAME)
for artile in articles:
item = SubElement(root, Article.ITEM_TAG_NAME)
for tag in artile.meta:
subElement = SubElement(item, tag)
subElement.text = str(artile.meta[tag])
return self.render(constant.REPLY_NEWS_TEMPLATE, (toUser, fromUser, self.getCurrentTime(), str(articleCount), etree.tostring(root)))
def getCurrentTime(self):
return str(int(time.time()))
def render(self, template, args):
return template % tuple(args)
class Article(object):
'''
Sub nodes of News type message that reply to the user.
NOTICE : the object of this class is used for replying to the user rather than being built from received message.
'''
ROOT_TAG_NAME = 'Articles'
ITEM_TAG_NAME = 'item'
def __init__(self):
self.meta = {
'Title' : '',
'Description' : '',
'PicUrl' : '',
'Url' : ''
}
def setTitle(self, title):
self.meta['Title'] = title
def setDescription(self, description):
self.meta['Description'] = description
def setPicUrl(self, picUrl):
self.meta['PicUrl'] = picUrl
def setUrl(self, url):
self.meta['Url'] = url
class Button(object):
'''
Base class of the Menu Button.
'''
CLICK_TYPE = 'click'
def __init__(self):
self.meta = {
'name' : '',
}
def setName(self, name):
self.meta['name'] = name
class CommonClickButton(Button):
'''
A common click-type Button including name and type and key.
'''
def __init__(self):
Button.__init__(self)
self.meta.update({
'type' : Button.CLICK_TYPE,
'key' : ''
})
def setKey(self, key):
self.meta['key'] = key
class TopLevelButton(Button):
'''
A top level button than contains some sub-buttons.
'''
def __init__(self):
Button.__init__(self)
self.meta.update({
'sub_button' : []
})
def addSubButton(self, commonButton):
self.meta['sub_button'].append(commonButton.meta)
class ButtonGroup(object):
'''
A group of buttons.
'''
def __init__(self):
self.meta = {
'button' : []
}
def addButton(self, button):
self.meta['button'].append(button.meta)
|
mit
| -6,823,000,264,583,889,000
| 30.489971
| 136
| 0.602821
| false
| 3.250518
| false
| false
| false
|
atupal/ccrawler
|
request/baseRequestHandler.py
|
1
|
5371
|
#!/usr/bin/env python2
# -*- coding: utf-8 -*-
"""
baseRequestHandler.py
~~~~~~~~~~~~~~~~~~~~~
Base request handler
"""
import gevent
from gevent import monkey
monkey.patch_all()
import requests
import logging
import os
import socket
import time
import json
from requests import ConnectionError
from random import choice
from response import Response
import proxy
import cookie
class BaseRequestHandler(object):
def __init__(self, use_proxy=False, proxy_module=proxy, cookie_module=cookie):
self.use_proxy = use_proxy
self.proxy_module = proxy
self.cookie_module = cookie_module
self._proxy_pool = []
self._proxy_pool_size = 0
self.proxy_list_cache_file = '/tmp/ccrawler_proxy_list.cache'
if os.path.exists(self.proxy_list_cache_file):
with open(self.proxy_list_cache_file) as f:
self._proxy_pool = json.load(f)
self._proxy_pool_size = len(self._proxy_pool)
self._redis_proxy_pool_connetion = None
self._proxy_lock = gevent.lock.Semaphore()
self._cookie_pool = {}
def handle(self, task, **kwargs):
return self.request(task, **kwargs)
def request(self, task, **kwargs):
url = task.get('url')
if not url:
logging.error('invalid url: emptry url!')
return task
_kwargs = {
'params': {}, # dict or bytes
'data': {}, # dict, bytes or file object
'headers': {'user-agent': 'googleBot'},
'cookies': {}, # dict or cookiejar object
'files': {},
'auth': None,
'timeout': 5,
'allow_redirects': True,
'proxies': {},
'verify': False,
'stream': False,
'cert': None,
}
_kwargs.update(kwargs)
if self.use_proxy or task.get('proxy'):
proxy = task.get('proxy') or self._pop_proxy()
_kwargs['proxies'].update(proxy)
if (not task.get('method')
or task.get('method', '').uppper() == 'GET'):
method = 'get'
elif task.get('method', '').upper() == 'POST':
method = 'post'
else:
raise 'Invalid or unsupported method!'
proxy_retry_cnt = 0
while 1:
try:
resp = requests.request(method, url, **_kwargs)
break
except (requests.exceptions.ProxyError,
requests.exceptions.Timeout,
ConnectionError,
socket.timeout) as e:
proxy_retry_cnt += 1
if self.use_proxy:
proxy = self._pop_proxy()
_kwargs['proxies'].update(proxy)
if proxy_retry_cnt >= 10:
raise e
if self.use_proxy and proxy:
self._add_proxy(proxy)
response = {
'content': resp.content,
'origin_url': task['url'],
'url': resp.url,
'cookies': dict(resp.cookies),
'status_code': resp.status_code,
'headers': dict(resp.headers),
}
#response['content'] = resp.content
task['response'] = response
if resp.status_code != 200:
#logging.error('not 200 http response')
#logging.error(url)
#logging.error(_kwargs)
raise Exception('not 200 http response')
if 'url_depth' in task:
task['url_depth'] += 1
else:
task['url_depth'] = 1
return task
def _pop_proxy(self):
fetch_cnt = 0
with self._proxy_lock:
while self._proxy_pool_size <= 0:
try:
self._fetch_new_proxy_list()
except:
raise
fetch_cnt += 1
if fetch_cnt == 3:
raise Exception('Can not fetch proxy list!')
proxy = self._proxy_pool.pop(0)
self._proxy_pool_size -= 1
return proxy
def _get_fastest_proxy(self):
pass
def _add_proxy(self, proxy):
self._proxy_pool.append(proxy)
self._proxy_pool_size += 1
def _fetch_new_proxy_list(self):
proxy_list = self.proxy_module.get_proxy_list()
try:
with open(self.proxy_list_cache_file, 'w') as f:
json.dump(proxy_list, f, indent=2)
except IOError:
pass
#while self._proxy_checking:
#gevent.sleep(0.1)
self._proxy_pool += proxy_list
self._proxy_pool_size += len(proxy_list)
def _check_proxy_pool_health(self):
self._proxy_checking = True
jobs = []
self._proxy_checking = False
@property
def proxy_pool_size(self):
return self._proxy_pool_size
def test():
requestHandler = BaseRequestHandler(use_proxy=True)
jobs = []
st = time.time()
for i in xrange(100):
jobs.append( gevent.spawn( requestHandler.handle, {'url': 'http://baidu.com'} ) )
for job in jobs:
try:
print job.get()
except:
pass
gevent.joinall(jobs)
print time.time() - st
if __name__ == '__main__':
test()
|
mit
| 4,501,589,908,298,685,400
| 29.174157
| 89
| 0.508658
| false
| 4.14749
| false
| false
| false
|
dshulyak/solar
|
solar/solar/core/handlers/ansible_template.py
|
1
|
1892
|
# -*- coding: utf-8 -*-
from fabric import api as fabric_api
from fabric.state import env
import os
from solar.core.log import log
from solar.core.handlers.base import TempFileHandler
from solar import errors
# otherwise fabric will sys.exit(1) in case of errors
env.warn_only = True
class AnsibleTemplate(TempFileHandler):
def action(self, resource, action_name):
inventory_file = self._create_inventory(resource)
playbook_file = self._create_playbook(resource, action_name)
log.debug('inventory_file: %s', inventory_file)
log.debug('playbook_file: %s', playbook_file)
call_args = ['ansible-playbook', '--module-path', '/vagrant/library', '-i', inventory_file, playbook_file]
log.debug('EXECUTING: %s', ' '.join(call_args))
with fabric_api.shell_env(ANSIBLE_HOST_KEY_CHECKING='False'):
out = fabric_api.local(' '.join(call_args), capture=True)
if out.failed:
raise errors.SolarError(out)
def _create_inventory(self, r):
directory = self.dirs[r.name]
inventory_path = os.path.join(directory, 'inventory')
with open(inventory_path, 'w') as inv:
inv.write(self._render_inventory(r))
return inventory_path
def _render_inventory(self, r):
inventory = '{0} ansible_ssh_host={1} ansible_connection=ssh ansible_ssh_user={2} ansible_ssh_private_key_file={3} {4}'
host, user, ssh_key = r.args['ip'].value, r.args['ssh_user'].value, r.args['ssh_key'].value
args = []
for arg in r.args:
args.append('{0}="{1}"'.format(arg, r.args[arg].value))
args = ' '.join(args)
inventory = inventory.format(host, host, user, ssh_key, args)
log.debug(inventory)
return inventory
def _create_playbook(self, resource, action):
return self._compile_action_file(resource, action)
|
apache-2.0
| -6,704,512,065,824,343,000
| 38.416667
| 127
| 0.641121
| false
| 3.549719
| false
| false
| false
|
psykzz/flask-admin
|
flask_admin/contrib/sqla/view.py
|
1
|
29504
|
import logging
from sqlalchemy.orm.attributes import InstrumentedAttribute
from sqlalchemy.orm import joinedload
from sqlalchemy.sql.expression import desc
from sqlalchemy import Column, Boolean, func, or_
from sqlalchemy.exc import IntegrityError
from flask import flash
from flask.ext.admin._compat import string_types
from flask.ext.admin.babel import gettext, ngettext, lazy_gettext
from flask.ext.admin.model import BaseModelView
from flask.ext.admin.actions import action
from flask.ext.admin._backwards import ObsoleteAttr
from flask.ext.admin.contrib.sqla import form, filters, tools
from .typefmt import DEFAULT_FORMATTERS
from .tools import is_inherited_primary_key, get_column_for_current_model, get_query_for_ids
from .ajax import create_ajax_loader
# Set up logger
log = logging.getLogger("flask-admin.sqla")
class ModelView(BaseModelView):
"""
SQLAlchemy model view
Usage sample::
admin = Admin()
admin.add_view(ModelView(User, db.session))
"""
column_auto_select_related = ObsoleteAttr('column_auto_select_related',
'auto_select_related',
True)
"""
Enable automatic detection of displayed foreign keys in this view
and perform automatic joined loading for related models to improve
query performance.
Please note that detection is not recursive: if `__unicode__` method
of related model uses another model to generate string representation, it
will still make separate database call.
"""
column_select_related_list = ObsoleteAttr('column_select_related',
'list_select_related',
None)
"""
List of parameters for SQLAlchemy `subqueryload`. Overrides `column_auto_select_related`
property.
For example::
class PostAdmin(ModelView):
column_select_related_list = ('user', 'city')
You can also use properties::
class PostAdmin(ModelView):
column_select_related_list = (Post.user, Post.city)
Please refer to the `subqueryload` on list of possible values.
"""
column_display_all_relations = ObsoleteAttr('column_display_all_relations',
'list_display_all_relations',
False)
"""
Controls if list view should display all relations, not only many-to-one.
"""
column_searchable_list = ObsoleteAttr('column_searchable_list',
'searchable_columns',
None)
"""
Collection of the searchable columns. Only text-based columns
are searchable (`String`, `Unicode`, `Text`, `UnicodeText`).
Example::
class MyModelView(ModelView):
column_searchable_list = ('name', 'email')
You can also pass columns::
class MyModelView(ModelView):
column_searchable_list = (User.name, User.email)
The following search rules apply:
- If you enter *ZZZ* in the UI search field, it will generate *ILIKE '%ZZZ%'*
statement against searchable columns.
- If you enter multiple words, each word will be searched separately, but
only rows that contain all words will be displayed. For example, searching
for 'abc def' will find all rows that contain 'abc' and 'def' in one or
more columns.
- If you prefix your search term with ^, it will find all rows
that start with ^. So, if you entered *^ZZZ*, *ILIKE 'ZZZ%'* will be used.
- If you prefix your search term with =, it will perform an exact match.
For example, if you entered *=ZZZ*, the statement *ILIKE 'ZZZ'* will be used.
"""
column_filters = None
"""
Collection of the column filters.
Can contain either field names or instances of :class:`flask.ext.admin.contrib.sqla.filters.BaseFilter` classes.
For example::
class MyModelView(BaseModelView):
column_filters = ('user', 'email')
or::
class MyModelView(BaseModelView):
column_filters = (BooleanEqualFilter(User.name, 'Name'))
"""
model_form_converter = form.AdminModelConverter
"""
Model form conversion class. Use this to implement custom field conversion logic.
For example::
class MyModelConverter(AdminModelConverter):
pass
class MyAdminView(ModelView):
model_form_converter = MyModelConverter
"""
inline_model_form_converter = form.InlineModelConverter
"""
Inline model conversion class. If you need some kind of post-processing for inline
forms, you can customize behavior by doing something like this::
class MyInlineModelConverter(AdminModelConverter):
def post_process(self, form_class, info):
form_class.value = wtf.TextField('value')
return form_class
class MyAdminView(ModelView):
inline_model_form_converter = MyInlineModelConverter
"""
filter_converter = filters.FilterConverter()
"""
Field to filter converter.
Override this attribute to use non-default converter.
"""
fast_mass_delete = False
"""
If set to `False` and user deletes more than one model using built in action,
all models will be read from the database and then deleted one by one
giving SQLAlchemy a chance to manually cleanup any dependencies (many-to-many
relationships, etc).
If set to `True`, will run a `DELETE` statement which is somewhat faster,
but may leave corrupted data if you forget to configure `DELETE
CASCADE` for your model.
"""
inline_models = None
"""
Inline related-model editing for models with parent-child relations.
Accepts enumerable with one of the following possible values:
1. Child model class::
class MyModelView(ModelView):
inline_models = (Post,)
2. Child model class and additional options::
class MyModelView(ModelView):
inline_models = [(Post, dict(form_columns=['title']))]
3. Django-like ``InlineFormAdmin`` class instance::
class MyInlineModelForm(InlineFormAdmin):
form_columns = ('title', 'date')
class MyModelView(ModelView):
inline_models = (MyInlineModelForm(MyInlineModel),)
You can customize the generated field name by:
1. Using the `form_name` property as a key to the options dictionary:
class MyModelView(ModelView):
inline_models = ((Post, dict(form_label='Hello')))
2. Using forward relation name and `column_labels` property:
class Model1(Base):
pass
class Model2(Base):
# ...
model1 = relation(Model1, backref='models')
class MyModel1View(Base):
inline_models = (Model2,)
column_labels = {'models': 'Hello'}
"""
column_type_formatters = DEFAULT_FORMATTERS
form_choices = None
"""
Map choices to form fields
Example::
class MyModelView(BaseModelView):
form_choices = {'my_form_field': [
('db_value', 'display_value'),
]
"""
form_optional_types = (Boolean,)
"""
List of field types that should be optional if column is not nullable.
Example::
class MyModelView(BaseModelView):
form_optional_types = (Boolean, Unicode)
"""
def __init__(self, model, session,
name=None, category=None, endpoint=None, url=None):
"""
Constructor.
:param model:
Model class
:param session:
SQLAlchemy session
:param name:
View name. If not set, defaults to the model name
:param category:
Category name
:param endpoint:
Endpoint name. If not set, defaults to the model name
:param url:
Base URL. If not set, defaults to '/admin/' + endpoint
"""
self.session = session
self._search_fields = None
self._search_joins = dict()
self._filter_joins = dict()
if self.form_choices is None:
self.form_choices = {}
super(ModelView, self).__init__(model, name, category, endpoint, url)
# Primary key
self._primary_key = self.scaffold_pk()
if self._primary_key is None:
raise Exception('Model %s does not have primary key.' % self.model.__name__)
# Configuration
if not self.column_select_related_list:
self._auto_joins = self.scaffold_auto_joins()
else:
self._auto_joins = self.column_select_related_list
# Internal API
def _get_model_iterator(self, model=None):
"""
Return property iterator for the model
"""
if model is None:
model = self.model
return model._sa_class_manager.mapper.iterate_properties
# Scaffolding
def scaffold_pk(self):
"""
Return the primary key name from a model
PK can be a single value or a tuple if multiple PKs exist
"""
return tools.get_primary_key(self.model)
def get_pk_value(self, model):
"""
Return the PK value from a model object.
PK can be a single value or a tuple if multiple PKs exist
"""
try:
return getattr(model, self._primary_key)
except TypeError:
v = []
for attr in self._primary_key:
v.append(getattr(model, attr))
return tuple(v)
def scaffold_list_columns(self):
"""
Return a list of columns from the model.
"""
columns = []
for p in self._get_model_iterator():
# Verify type
if hasattr(p, 'direction'):
if self.column_display_all_relations or p.direction.name == 'MANYTOONE':
columns.append(p.key)
elif hasattr(p, 'columns'):
column_inherited_primary_key = False
if len(p.columns) != 1:
if is_inherited_primary_key(p):
column = get_column_for_current_model(p)
else:
raise TypeError('Can not convert multiple-column properties (%s.%s)' % (self.model, p.key))
else:
# Grab column
column = p.columns[0]
# An inherited primary key has a foreign key as well
if column.foreign_keys and not is_inherited_primary_key(p):
continue
if not self.column_display_pk and column.primary_key:
continue
columns.append(p.key)
return columns
def scaffold_sortable_columns(self):
"""
Return a dictionary of sortable columns.
Key is column name, value is sort column/field.
"""
columns = dict()
for p in self._get_model_iterator():
if hasattr(p, 'columns'):
# Sanity check
if len(p.columns) > 1:
# Multi-column properties are not supported
continue
column = p.columns[0]
# Can't sort on primary or foreign keys by default
if column.foreign_keys:
continue
if not self.column_display_pk and column.primary_key:
continue
columns[p.key] = column
return columns
def _get_columns_for_field(self, field):
if isinstance(field, string_types):
attr = getattr(self.model, field, None)
if field is None:
raise Exception('Field %s was not found.' % field)
else:
attr = field
if (not attr or
not hasattr(attr, 'property') or
not hasattr(attr.property, 'columns') or
not attr.property.columns):
raise Exception('Invalid field %s: does not contains any columns.' % field)
return attr.property.columns
def _need_join(self, table):
return table not in self.model._sa_class_manager.mapper.tables
def init_search(self):
"""
Initialize search. Returns `True` if search is supported for this
view.
For SQLAlchemy, this will initialize internal fields: list of
column objects used for filtering, etc.
"""
if self.column_searchable_list:
self._search_fields = []
self._search_joins = dict()
for p in self.column_searchable_list:
for column in self._get_columns_for_field(p):
column_type = type(column.type).__name__
if not self.is_text_column_type(column_type):
raise Exception('Can only search on text columns. ' +
'Failed to setup search for "%s"' % p)
self._search_fields.append(column)
# If it belongs to different table - add a join
if self._need_join(column.table):
self._search_joins[column.table.name] = column.table
return bool(self.column_searchable_list)
def is_text_column_type(self, name):
"""
Verify if the provided column type is text-based.
:returns:
``True`` for ``String``, ``Unicode``, ``Text``, ``UnicodeText``
"""
if name:
name = name.lower()
return name in ('string', 'unicode', 'text', 'unicodetext')
def scaffold_filters(self, name):
"""
Return list of enabled filters
"""
join_tables = []
if isinstance(name, string_types):
model = self.model
for attribute in name.split('.'):
value = getattr(model, attribute)
if (hasattr(value, 'property') and
hasattr(value.property, 'direction')):
model = value.property.mapper.class_
table = model.__table__
if self._need_join(table):
join_tables.append(table)
attr = value
else:
attr = name
if attr is None:
raise Exception('Failed to find field for filter: %s' % name)
# Figure out filters for related column
if hasattr(attr, 'property') and hasattr(attr.property, 'direction'):
filters = []
for p in self._get_model_iterator(attr.property.mapper.class_):
if hasattr(p, 'columns'):
# TODO: Check for multiple columns
column = p.columns[0]
if column.foreign_keys or column.primary_key:
continue
visible_name = '%s / %s' % (self.get_column_name(attr.prop.table.name),
self.get_column_name(p.key))
type_name = type(column.type).__name__
flt = self.filter_converter.convert(type_name,
column,
visible_name)
if flt:
table = column.table
if join_tables:
self._filter_joins[table.name] = join_tables
elif self._need_join(table.name):
self._filter_joins[table.name] = [table.name]
filters.extend(flt)
return filters
else:
columns = self._get_columns_for_field(attr)
if len(columns) > 1:
raise Exception('Can not filter more than on one column for %s' % name)
column = columns[0]
if self._need_join(column.table) and name not in self.column_labels:
visible_name = '%s / %s' % (
self.get_column_name(column.table.name),
self.get_column_name(column.name)
)
else:
if not isinstance(name, string_types):
visible_name = self.get_column_name(name.property.key)
else:
visible_name = self.get_column_name(name)
type_name = type(column.type).__name__
if join_tables:
self._filter_joins[column.table.name] = join_tables
flt = self.filter_converter.convert(
type_name,
column,
visible_name,
options=self.column_choices.get(name),
)
if flt and not join_tables and self._need_join(column.table):
self._filter_joins[column.table.name] = [column.table]
return flt
def is_valid_filter(self, filter):
"""
Verify that the provided filter object is derived from the
SQLAlchemy-compatible filter class.
:param filter:
Filter object to verify.
"""
return isinstance(filter, filters.BaseSQLAFilter)
def scaffold_form(self):
"""
Create form from the model.
"""
converter = self.model_form_converter(self.session, self)
form_class = form.get_form(self.model, converter,
base_class=self.form_base_class,
only=self.form_columns,
exclude=self.form_excluded_columns,
field_args=self.form_args,
extra_fields=self.form_extra_fields)
if self.inline_models:
form_class = self.scaffold_inline_form_models(form_class)
return form_class
def scaffold_inline_form_models(self, form_class):
"""
Contribute inline models to the form
:param form_class:
Form class
"""
inline_converter = self.inline_model_form_converter(self.session,
self,
self.model_form_converter)
for m in self.inline_models:
form_class = inline_converter.contribute(self.model, form_class, m)
return form_class
def scaffold_auto_joins(self):
"""
Return a list of joined tables by going through the
displayed columns.
"""
if not self.column_auto_select_related:
return []
relations = set()
for p in self._get_model_iterator():
if hasattr(p, 'direction'):
# Check if it is pointing to same model
if p.mapper.class_ == self.model:
continue
if p.direction.name in ['MANYTOONE', 'MANYTOMANY']:
relations.add(p.key)
joined = []
for prop, name in self._list_columns:
if prop in relations:
joined.append(getattr(self.model, prop))
return joined
# AJAX foreignkey support
def _create_ajax_loader(self, name, options):
return create_ajax_loader(self.model, self.session, name, name, options)
# Database-related API
def get_query(self):
"""
Return a query for the model type.
If you override this method, don't forget to override `get_count_query` as well.
"""
return self.session.query(self.model)
def get_count_query(self):
"""
Return a the count query for the model type
"""
return self.session.query(func.count('*')).select_from(self.model)
def _order_by(self, query, joins, sort_field, sort_desc):
"""
Apply order_by to the query
:param query:
Query
:param joins:
Joins set
:param sort_field:
Sort field
:param sort_desc:
Ascending or descending
"""
# TODO: Preprocessing for joins
# Try to handle it as a string
if isinstance(sort_field, string_types):
# Create automatic join against a table if column name
# contains dot.
if '.' in sort_field:
parts = sort_field.split('.', 1)
if parts[0] not in joins:
query = query.join(parts[0])
joins.add(parts[0])
elif isinstance(sort_field, InstrumentedAttribute):
# SQLAlchemy 0.8+ uses 'parent' as a name
mapper = getattr(sort_field, 'parent', None)
if mapper is None:
# SQLAlchemy 0.7.x uses parententity
mapper = getattr(sort_field, 'parententity', None)
if mapper is not None:
table = mapper.tables[0]
if self._need_join(table) and table.name not in joins:
query = query.outerjoin(table)
joins.add(table.name)
elif isinstance(sort_field, Column):
pass
else:
raise TypeError('Wrong argument type')
if sort_field is not None:
if sort_desc:
query = query.order_by(desc(sort_field))
else:
query = query.order_by(sort_field)
return query, joins
def _get_default_order(self):
order = super(ModelView, self)._get_default_order()
if order is not None:
field, direction = order
if isinstance(field, string_types):
field = getattr(self.model, field)
return field, direction
return None
def get_list(self, page, sort_column, sort_desc, search, filters, execute=True):
"""
Return models from the database.
:param page:
Page number
:param sort_column:
Sort column name
:param sort_desc:
Descending or ascending sort
:param search:
Search query
:param execute:
Execute query immediately? Default is `True`
:param filters:
List of filter tuples
"""
# Will contain names of joined tables to avoid duplicate joins
joins = set()
query = self.get_query()
count_query = self.get_count_query()
# Apply search criteria
if self._search_supported and search:
# Apply search-related joins
if self._search_joins:
for jn in self._search_joins.values():
query = query.join(jn)
count_query = count_query.join(jn)
joins = set(self._search_joins.keys())
# Apply terms
terms = search.split(' ')
for term in terms:
if not term:
continue
stmt = tools.parse_like_term(term)
filter_stmt = [c.ilike(stmt) for c in self._search_fields]
query = query.filter(or_(*filter_stmt))
count_query = count_query.filter(or_(*filter_stmt))
# Apply filters
if filters and self._filters:
for idx, value in filters:
flt = self._filters[idx]
# Figure out joins
tbl = flt.column.table.name
join_tables = self._filter_joins.get(tbl, [])
for table in join_tables:
if table.name not in joins:
query = query.join(table)
count_query = count_query.join(table)
joins.add(table.name)
# Apply filter
query = flt.apply(query, value)
count_query = flt.apply(count_query, value)
# Calculate number of rows
count = count_query.scalar()
# Auto join
for j in self._auto_joins:
query = query.options(joinedload(j))
# Sorting
if sort_column is not None:
if sort_column in self._sortable_columns:
sort_field = self._sortable_columns[sort_column]
query, joins = self._order_by(query, joins, sort_field, sort_desc)
else:
order = self._get_default_order()
if order:
query, joins = self._order_by(query, joins, order[0], order[1])
# Pagination
if page is not None:
query = query.offset(page * self.page_size)
query = query.limit(self.page_size)
# Execute if needed
if execute:
query = query.all()
return count, query
def get_one(self, id):
"""
Return a single model by its id.
:param id:
Model id
"""
return self.session.query(self.model).get(id)
# Error handler
def handle_view_exception(self, exc):
if isinstance(exc, IntegrityError):
flash(gettext('Integrity error. %(message)s', message=exc.message), 'error')
return True
return super(BaseModelView, self).handle_view_exception(exc)
# Model handlers
def create_model(self, form):
"""
Create model from form.
:param form:
Form instance
"""
try:
model = self.model()
form.populate_obj(model)
self.session.add(model)
self._on_model_change(form, model, True)
self.session.commit()
except Exception as ex:
if not self.handle_view_exception(ex):
raise
flash(gettext('Failed to create model. %(error)s', error=str(ex)), 'error')
log.exception('Failed to create model')
self.session.rollback()
return False
else:
self.after_model_change(form, model, True)
return True
def update_model(self, form, model):
"""
Update model from form.
:param form:
Form instance
:param model:
Model instance
"""
try:
form.populate_obj(model)
self._on_model_change(form, model, False)
self.session.commit()
except Exception as ex:
if not self.handle_view_exception(ex):
raise
flash(gettext('Failed to update model. %(error)s', error=str(ex)), 'error')
log.exception('Failed to update model')
self.session.rollback()
return False
else:
self.after_model_change(form, model, False)
return True
def delete_model(self, model):
"""
Delete model.
:param model:
Model to delete
"""
try:
self.on_model_delete(model)
self.session.flush()
self.session.delete(model)
self.session.commit()
return True
except Exception as ex:
if not self.handle_view_exception(ex):
raise
flash(gettext('Failed to delete model. %(error)s', error=str(ex)), 'error')
log.exception('Failed to delete model')
self.session.rollback()
return False
# Default model actions
def is_action_allowed(self, name):
# Check delete action permission
if name == 'delete' and not self.can_delete:
return False
return super(ModelView, self).is_action_allowed(name)
@action('delete',
lazy_gettext('Delete'),
lazy_gettext('Are you sure you want to delete selected models?'))
def action_delete(self, ids):
try:
query = get_query_for_ids(self.get_query(), self.model, ids)
if self.fast_mass_delete:
count = query.delete(synchronize_session=False)
else:
count = 0
for m in query.all():
self.session.delete(m)
count += 1
self.session.commit()
flash(ngettext('Model was successfully deleted.',
'%(count)s models were successfully deleted.',
count,
count=count))
except Exception as ex:
if not self.handle_view_exception(ex):
raise
flash(gettext('Failed to delete models. %(error)s', error=str(ex)), 'error')
|
bsd-3-clause
| 28,792,571,841,860,910
| 31.529217
| 120
| 0.532436
| false
| 4.72442
| false
| false
| false
|
twitter/pants
|
src/python/pants/backend/native/subsystems/native_build_settings.py
|
1
|
1424
|
# coding=utf-8
# Copyright 2018 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import absolute_import, division, print_function, unicode_literals
from pants.build_graph.mirrored_target_option_mixin import MirroredTargetOptionMixin
from pants.subsystem.subsystem import Subsystem
class NativeBuildSettings(Subsystem, MirroredTargetOptionMixin):
"""Settings which affect both the compile and link phases."""
options_scope = 'native-build-settings'
mirrored_target_option_actions = {
'strict_deps': lambda tgt: tgt.strict_deps,
}
@classmethod
def register_options(cls, register):
super(NativeBuildSettings, cls).register_options(register)
# TODO: rename this so it's clear it is not the same option as JVM strict deps!
register('--strict-deps', type=bool, default=True, fingerprint=True, advanced=True,
help="Whether to include only dependencies directly declared in the BUILD file "
"for C and C++ targets by default. If this is False, all transitive dependencies "
"are used when compiling and linking native code. C and C++ targets may override "
"this behavior with the strict_deps keyword argument as well.")
def get_strict_deps_value_for_target(self, target):
return self.get_scalar_mirrored_target_option('strict_deps', target)
|
apache-2.0
| 2,196,205,430,313,482,000
| 44.935484
| 100
| 0.728933
| false
| 4.151603
| false
| false
| false
|
602p/mps-punctuality
|
app/oauth.py
|
1
|
2807
|
from flask import Flask, redirect, url_for, session, request, jsonify, flash
from flask_oauthlib.client import OAuth
from flask_login import login_user
import json
from . import util
from . import app, db
from . import models
oauth = OAuth(app)
google = oauth.remote_app(
'google',
consumer_key=app.config.get('GOOGLE_ID'),
consumer_secret=app.config.get('GOOGLE_SECRET'),
request_token_params={
'scope': 'email'
},
base_url='https://www.googleapis.com/oauth2/v1/',
request_token_url=None,
access_token_method='POST',
access_token_url='https://accounts.google.com/o/oauth2/token',
authorize_url='https://accounts.google.com/o/oauth2/auth',
)
@app.route('/oauth_login')
def oauth_login():
return google.authorize(callback=url_for('oauth_authorized', _external=True))
@app.route('/login/authorized')
def oauth_authorized():
resp = google.authorized_response()
if resp is None: #OAuth authorization failed
flash("OAuth login failed: %s -> %s" %(request.args['error_reason'], request.args['error_description']))
return redirect(url_for("home"))
session['google_token'] = (resp['access_token'], '') #Stick it in the session (if we potentially decide to use
#more of Google's API features later, e.g. mailing or
#whatever we'll need this for the OAuth scope in the
#API calls
me = google.get('userinfo').data #Snarf out the user's free data
user = models.User.query.filter_by(username=me["email"], auth_provider="OAUTH").first() #Is there a user with this
#email using OAuth already?
if user: #If so...
return util.try_login_user(user) #Proceed to try to log them in
else: #Otherwise
user=models.User( #Create a (disabled) account for them for the admin to enable later
marss_id=-1, #Cant find this w/o some kind of DB dump, if even applicable
username=me["email"], #Google's return gaurenteed to have email, this is the username for OAuth accounts
name=me["name"], #Google's return sometimes has name, otherwise empty string
email=me["email"], #Store it here too
auth_provider="OAUTH", #Use OAUTH provider, duh!
enabled=False #And leave them disabled
) #Default permission='view'
db.session.add(user)
db.session.commit()
flash("Please wait for an Administrator to enable your account")
return redirect(url_for("login_user_page"))
@google.tokengetter
def get_google_oauth_token():
return session.get('google_token')
|
gpl-3.0
| 1,188,039,750,926,177,800
| 45.8
| 119
| 0.615604
| false
| 4.068116
| false
| false
| false
|
tomchristie/django-rest-framework
|
rest_framework/compat.py
|
4
|
4387
|
"""
The `compat` module provides support for backwards compatibility with older
versions of Django/Python, and compatibility wrappers around optional packages.
"""
from django.conf import settings
from django.views.generic import View
def unicode_http_header(value):
# Coerce HTTP header value to unicode.
if isinstance(value, bytes):
return value.decode('iso-8859-1')
return value
def distinct(queryset, base):
if settings.DATABASES[queryset.db]["ENGINE"] == "django.db.backends.oracle":
# distinct analogue for Oracle users
return base.filter(pk__in=set(queryset.values_list('pk', flat=True)))
return queryset.distinct()
# django.contrib.postgres requires psycopg2
try:
from django.contrib.postgres import fields as postgres_fields
except ImportError:
postgres_fields = None
# coreapi is required for CoreAPI schema generation
try:
import coreapi
except ImportError:
coreapi = None
# uritemplate is required for OpenAPI and CoreAPI schema generation
try:
import uritemplate
except ImportError:
uritemplate = None
# coreschema is optional
try:
import coreschema
except ImportError:
coreschema = None
# pyyaml is optional
try:
import yaml
except ImportError:
yaml = None
# requests is optional
try:
import requests
except ImportError:
requests = None
# PATCH method is not implemented by Django
if 'patch' not in View.http_method_names:
View.http_method_names = View.http_method_names + ['patch']
# Markdown is optional (version 3.0+ required)
try:
import markdown
HEADERID_EXT_PATH = 'markdown.extensions.toc'
LEVEL_PARAM = 'baselevel'
def apply_markdown(text):
"""
Simple wrapper around :func:`markdown.markdown` to set the base level
of '#' style headers to <h2>.
"""
extensions = [HEADERID_EXT_PATH]
extension_configs = {
HEADERID_EXT_PATH: {
LEVEL_PARAM: '2'
}
}
md = markdown.Markdown(
extensions=extensions, extension_configs=extension_configs
)
md_filter_add_syntax_highlight(md)
return md.convert(text)
except ImportError:
apply_markdown = None
markdown = None
try:
import pygments
from pygments.formatters import HtmlFormatter
from pygments.lexers import TextLexer, get_lexer_by_name
def pygments_highlight(text, lang, style):
lexer = get_lexer_by_name(lang, stripall=False)
formatter = HtmlFormatter(nowrap=True, style=style)
return pygments.highlight(text, lexer, formatter)
def pygments_css(style):
formatter = HtmlFormatter(style=style)
return formatter.get_style_defs('.highlight')
except ImportError:
pygments = None
def pygments_highlight(text, lang, style):
return text
def pygments_css(style):
return None
if markdown is not None and pygments is not None:
# starting from this blogpost and modified to support current markdown extensions API
# https://zerokspot.com/weblog/2008/06/18/syntax-highlighting-in-markdown-with-pygments/
import re
from markdown.preprocessors import Preprocessor
class CodeBlockPreprocessor(Preprocessor):
pattern = re.compile(
r'^\s*``` *([^\n]+)\n(.+?)^\s*```', re.M | re.S)
formatter = HtmlFormatter()
def run(self, lines):
def repl(m):
try:
lexer = get_lexer_by_name(m.group(1))
except (ValueError, NameError):
lexer = TextLexer()
code = m.group(2).replace('\t', ' ')
code = pygments.highlight(code, lexer, self.formatter)
code = code.replace('\n\n', '\n \n').replace('\n', '<br />').replace('\\@', '@')
return '\n\n%s\n\n' % code
ret = self.pattern.sub(repl, "\n".join(lines))
return ret.split("\n")
def md_filter_add_syntax_highlight(md):
md.preprocessors.register(CodeBlockPreprocessor(), 'highlight', 40)
return True
else:
def md_filter_add_syntax_highlight(md):
return False
# `separators` argument to `json.dumps()` differs between 2.x and 3.x
# See: https://bugs.python.org/issue22767
SHORT_SEPARATORS = (',', ':')
LONG_SEPARATORS = (', ', ': ')
INDENT_SEPARATORS = (',', ': ')
|
bsd-2-clause
| -7,791,166,050,184,486,000
| 26.591195
| 101
| 0.644404
| false
| 4.010055
| false
| false
| false
|
mwalli/spark-cloudant
|
sql-cloudant/examples/python/CloudantDFOption.py
|
2
|
2848
|
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from pyspark.sql import SparkSession
spark = SparkSession\
.builder\
.appName("Cloudant Spark SQL Example in Python using dataframes with options")\
.getOrCreate()
cloudant_host = "ACCOUNT.cloudant.com"
cloudant_username = "USERNAME"
cloudant_password = "PASSWORD"
# ***1. Loading dataframe from Cloudant db
df = spark.read.format("org.apache.bahir.cloudant") \
.option("cloudant.host", cloudant_host) \
.option("cloudant.username", cloudant_username) \
.option("cloudant.password", cloudant_password) \
.load("n_airportcodemapping")
df.cache() # persisting in memory
df.printSchema()
df.filter(df._id >= 'CAA').select("_id",'airportName').show()
# ***2.Saving dataframe to Cloudant db
df.filter(df._id >= 'CAA').select("_id",'airportName') \
.write.format("org.apache.bahir.cloudant") \
.option("cloudant.host", cloudant_host) \
.option("cloudant.username", cloudant_username) \
.option("cloudant.password",cloudant_password) \
.option("bulkSize","100") \
.option("createDBOnSave", "true") \
.save("airportcodemapping_df")
df = spark.read.format("org.apache.bahir.cloudant") \
.option("cloudant.host", cloudant_host) \
.option("cloudant.username", cloudant_username) \
.option("cloudant.password", cloudant_password) \
.load("n_flight")
df.printSchema()
total = df.filter(df.flightSegmentId >'AA9') \
.select("flightSegmentId", "scheduledDepartureTime") \
.orderBy(df.flightSegmentId).count()
print ("Total", total, "flights from table")
# ***3. Loading dataframe from Cloudant search index
df = spark.read.format("org.apache.bahir.cloudant") \
.option("cloudant.host",cloudant_host) \
.option("cloudant.username",cloudant_username) \
.option("cloudant.password",cloudant_password) \
.option("index","_design/view/_search/n_flights").load("n_flight")
df.printSchema()
total = df.filter(df.flightSegmentId >'AA9') \
.select("flightSegmentId", "scheduledDepartureTime") \
.orderBy(df.flightSegmentId).count()
print ("Total", total, "flights from index")
|
apache-2.0
| 5,622,174,105,918,317,000
| 39.112676
| 83
| 0.716994
| false
| 3.456311
| false
| false
| false
|
marble/Toolchain_RenderDocumentation
|
36-Get-ready-for-publishing/run_01-Treat-pdf-folder.py
|
1
|
4291
|
#!/usr/bin/env python
# coding: utf-8
from __future__ import print_function
from __future__ import absolute_import
import os
import tct
import sys
params = tct.readjson(sys.argv[1])
binabspath = sys.argv[2]
facts = tct.readjson(params['factsfile'])
milestones = tct.readjson(params['milestonesfile'])
reason = ''
resultfile = params['resultfile']
result = tct.readjson(resultfile)
loglist = result['loglist'] = result.get('loglist', [])
toolname = params['toolname']
toolname_pure = params['toolname_pure']
toolchain_name = facts['toolchain_name']
workdir = params['workdir']
exitcode = CONTINUE = 0
# ==================================================
# Make a copy of milestones for later inspection?
# --------------------------------------------------
if 0 or milestones.get('debug_always_make_milestones_snapshot'):
tct.make_snapshot_of_milestones(params['milestonesfile'], sys.argv[1])
# ==================================================
# Get and check required milestone(s)
# --------------------------------------------------
def milestones_get(name, default=None):
result = milestones.get(name, default)
loglist.append((name, result))
return result
def facts_get(name, default=None):
result = facts.get(name, default)
loglist.append((name, result))
return result
def params_get(name, default=None):
result = params.get(name, default)
loglist.append((name, result))
return result
# ==================================================
# define
# --------------------------------------------------
pdf_dest_folder_htaccess = ''
pdf_url_relpath = ''
xeq_name_cnt = 0
# ==================================================
# Check params
# --------------------------------------------------
if exitcode == CONTINUE:
loglist.append('CHECK PARAMS')
# required milestones
requirements = ['configset']
# just test
for requirement in requirements:
v = milestones_get(requirement)
if not v:
loglist.append("'%s' not found" % requirement)
exitcode = 22
reason = 'Bad PARAMS or nothing to do'
if exitcode == CONTINUE:
configset = milestones_get('configset')
# fetch
webroot_abspath = tct.deepget(facts, 'tctconfig', configset, 'webroot_abspath')
loglist.append(('webroot_abspath', webroot_abspath))
if not webroot_abspath:
exitcode = 22
reason = 'Bad PARAMS or nothing to do'
if exitcode == CONTINUE:
loglist.append('PARAMS are ok')
else:
loglist.append('Bad PARAMS or nothing to do')
# ==================================================
# work
# --------------------------------------------------
if exitcode == CONTINUE:
pdf_dest_file = milestones_get('pdf_dest_file')
pdf_dest_folder = milestones_get('pdf_dest_folder')
publish_dir_pdf_planned = milestones_get('publish_dir_pdf_planned')
if not (pdf_dest_file and pdf_dest_folder and publish_dir_pdf_planned):
CONTINUE = -2
reason = 'Nothing to do'
loglist.append(reason)
if exitcode == CONTINUE:
temp = os.path.join(publish_dir_pdf_planned, os.path.split(pdf_dest_file)[1])
pdf_url_relpath = temp[len(webroot_abspath):]
loglist.append(('pdf_url_relpath', pdf_url_relpath))
htaccess_contents = (
"RewriteEngine On\n"
"RewriteCond %{REQUEST_FILENAME} !-f\n"
"RewriteRule ^(.*)$ " + pdf_url_relpath + " [L,R=301]\n")
pdf_dest_folder_htaccess = os.path.join(pdf_dest_folder, '.htaccess')
with open(pdf_dest_folder_htaccess, 'w') as f2:
f2.write(htaccess_contents)
# ==================================================
# Set MILESTONE
# --------------------------------------------------
if pdf_url_relpath:
result['MILESTONES'].append({'pdf_dest_folder_htaccess': pdf_dest_folder_htaccess})
if pdf_url_relpath:
result['MILESTONES'].append({'pdf_url_relpath': pdf_url_relpath})
# ==================================================
# save result
# --------------------------------------------------
tct.save_the_result(result, resultfile, params, facts, milestones, exitcode, CONTINUE, reason)
# ==================================================
# Return with proper exitcode
# --------------------------------------------------
sys.exit(exitcode)
|
mit
| 8,712,881,131,675,113,000
| 27.798658
| 94
| 0.543696
| false
| 3.760736
| false
| false
| false
|
keon/algorithms
|
algorithms/matrix/crout_matrix_decomposition.py
|
1
|
1286
|
"""
Crout matrix decomposition is used to find two matrices that, when multiplied
give our input matrix, so L * U = A.
L stands for lower and L has non-zero elements only on diagonal and below.
U stands for upper and U has non-zero elements only on diagonal and above.
This can for example be used to solve systems of linear equations.
The last if is used if to avoid dividing by zero.
Example:
We input the A matrix:
[[1,2,3],
[3,4,5],
[6,7,8]]
We get:
L = [1.0, 0.0, 0.0]
[3.0, -2.0, 0.0]
[6.0, -5.0, 0.0]
U = [1.0, 2.0, 3.0]
[0.0, 1.0, 2.0]
[0.0, 0.0, 1.0]
We can check that L * U = A.
I think the complexity should be O(n^3).
"""
def crout_matrix_decomposition(A):
n = len(A)
L = [[0.0] * n for i in range(n)]
U = [[0.0] * n for i in range(n)]
for j in range(n):
U[j][j] = 1.0
for i in range(j, n):
alpha = float(A[i][j])
for k in range(j):
alpha -= L[i][k]*U[k][j]
L[i][j] = float(alpha)
for i in range(j+1, n):
tempU = float(A[j][i])
for k in range(j):
tempU -= float(L[j][k]*U[k][i])
if int(L[j][j]) == 0:
L[j][j] = float(0.1**40)
U[j][i] = float(tempU/L[j][j])
return (L,U)
|
mit
| -7,678,984,974,903,344,000
| 26.361702
| 77
| 0.517107
| false
| 2.59798
| false
| false
| false
|
nine/webcam
|
annotate.py
|
1
|
1085
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import sys
from datetime import datetime
from wand.image import Image
from wand.drawing import Drawing
from wand.color import Color
def readStdinBinary():
return sys.stdin.buffer.read()
def main():
img_binary = readStdinBinary()
with Drawing() as draw:
with Image(blob=img_binary) as img:
draw.font_size = 32
draw.fill_color = Color('#ffffff')
draw.text_under_color = Color('#00000080')
draw.gravity = "north_west"
draw.text(0, 0, u' Landhaus Nindl – Hollersbach im Pinzgau ')
draw.gravity = "south_east"
draw.text(0, 0, u' webcam.hollersbach.eu ')
# exif timestamp example:
# 2014:08:29 17:49:00
img_time = datetime.strptime(img.metadata['exif:DateTimeOriginal'], '%Y:%m:%d %H:%M:%S').strftime('%d.%m.%Y %H:%M')
draw.gravity = "north_west"
draw.font_size = 26
draw.text(0, 38, ' ' + img_time + ' ')
draw(img)
jpeg_bin = img.make_blob('jpeg')
sys.stdout.buffer.write(jpeg_bin)
if __name__ == '__main__':
main()
#eof
|
gpl-3.0
| 8,877,753,994,074,388,000
| 24.186047
| 121
| 0.617729
| false
| 3
| false
| false
| false
|
punchagan/zulip
|
zerver/lib/markdown/__init__.py
|
1
|
105624
|
# Zulip's main Markdown implementation. See docs/subsystems/markdown.md for
# detailed documentation on our Markdown syntax.
import datetime
import functools
import html
import logging
import re
import time
import urllib
import urllib.parse
from collections import defaultdict, deque
from dataclasses import dataclass
from typing import (
Any,
Callable,
Dict,
Generic,
List,
Match,
Optional,
Pattern,
Set,
Tuple,
TypeVar,
Union,
)
from urllib.parse import urlencode, urlsplit
from xml.etree import ElementTree as etree
from xml.etree.ElementTree import Element, SubElement
import ahocorasick
import dateutil.parser
import dateutil.tz
import markdown
import markdown.blockprocessors
import markdown.inlinepatterns
import markdown.postprocessors
import markdown.treeprocessors
import markdown.util
import requests
from django.conf import settings
from django.db.models import Q
from markdown.blockparser import BlockParser
from markdown.extensions import codehilite, nl2br, sane_lists, tables
from tlds import tld_set
from typing_extensions import TypedDict
from zerver.lib import mention as mention
from zerver.lib.cache import NotFoundInCache, cache_with_key
from zerver.lib.camo import get_camo_url
from zerver.lib.emoji import (
codepoint_to_name,
emoticon_regex,
name_to_codepoint,
translate_emoticons,
)
from zerver.lib.exceptions import MarkdownRenderingException
from zerver.lib.markdown import fenced_code
from zerver.lib.markdown.fenced_code import FENCE_RE
from zerver.lib.mention import extract_user_group, possible_mentions, possible_user_group_mentions
from zerver.lib.subdomains import is_static_or_current_realm_url
from zerver.lib.tex import render_tex
from zerver.lib.thumbnail import user_uploads_or_external
from zerver.lib.timeout import TimeoutExpired, timeout
from zerver.lib.timezone import common_timezones
from zerver.lib.types import LinkifierDict
from zerver.lib.url_encoding import encode_stream, hash_util_encode
from zerver.lib.url_preview import preview as link_preview
from zerver.models import (
MAX_MESSAGE_LENGTH,
Message,
Realm,
UserGroup,
UserGroupMembership,
UserProfile,
get_active_streams,
linkifiers_for_realm,
)
ReturnT = TypeVar("ReturnT")
def one_time(method: Callable[[], ReturnT]) -> Callable[[], ReturnT]:
"""
Use this decorator with extreme caution.
The function you wrap should have no dependency
on any arguments (no args, no kwargs) nor should
it depend on any global state.
"""
val = None
def cache_wrapper() -> ReturnT:
nonlocal val
if val is None:
val = method()
return val
return cache_wrapper
class FullNameInfo(TypedDict):
id: int
email: str
full_name: str
class LinkInfo(TypedDict):
parent: Element
title: Optional[str]
index: Optional[int]
remove: Optional[Element]
DbData = Dict[str, Any]
# Format version of the Markdown rendering; stored along with rendered
# messages so that we can efficiently determine what needs to be re-rendered
version = 1
_T = TypeVar("_T")
ElementStringNone = Union[Element, Optional[str]]
EMOJI_REGEX = r"(?P<syntax>:[\w\-\+]+:)"
def verbose_compile(pattern: str) -> Pattern[str]:
return re.compile(
f"^(.*?){pattern}(.*?)$",
re.DOTALL | re.UNICODE | re.VERBOSE,
)
def normal_compile(pattern: str) -> Pattern[str]:
return re.compile(
fr"^(.*?){pattern}(.*)$",
re.DOTALL | re.UNICODE,
)
STREAM_LINK_REGEX = r"""
(?<![^\s'"\(,:<]) # Start after whitespace or specified chars
\#\*\* # and after hash sign followed by double asterisks
(?P<stream_name>[^\*]+) # stream name can contain anything
\*\* # ends by double asterisks
"""
@one_time
def get_compiled_stream_link_regex() -> Pattern[str]:
# Not using verbose_compile as it adds ^(.*?) and
# (.*?)$ which cause extra overhead of matching
# pattern which is not required.
# With new InlineProcessor these extra patterns
# are not required.
return re.compile(
STREAM_LINK_REGEX,
re.DOTALL | re.UNICODE | re.VERBOSE,
)
STREAM_TOPIC_LINK_REGEX = r"""
(?<![^\s'"\(,:<]) # Start after whitespace or specified chars
\#\*\* # and after hash sign followed by double asterisks
(?P<stream_name>[^\*>]+) # stream name can contain anything except >
> # > acts as separator
(?P<topic_name>[^\*]+) # topic name can contain anything
\*\* # ends by double asterisks
"""
@one_time
def get_compiled_stream_topic_link_regex() -> Pattern[str]:
# Not using verbose_compile as it adds ^(.*?) and
# (.*?)$ which cause extra overhead of matching
# pattern which is not required.
# With new InlineProcessor these extra patterns
# are not required.
return re.compile(
STREAM_TOPIC_LINK_REGEX,
re.DOTALL | re.UNICODE | re.VERBOSE,
)
LINK_REGEX: Optional[Pattern[str]] = None
def get_web_link_regex() -> Pattern[str]:
# We create this one time, but not at startup. So the
# first message rendered in any process will have some
# extra costs. It's roughly 75ms to run this code, so
# caching the value in LINK_REGEX is super important here.
global LINK_REGEX
if LINK_REGEX is not None:
return LINK_REGEX
tlds = "|".join(list_of_tlds())
# A link starts at a word boundary, and ends at space, punctuation, or end-of-input.
#
# We detect a URL either by the `https?://` or by building around the TLD.
# In lieu of having a recursive regex (which python doesn't support) to match
# arbitrary numbers of nested matching parenthesis, we manually build a regexp that
# can match up to six
# The inner_paren_contents chunk matches the innermore non-parenthesis-holding text,
# and the paren_group matches text with, optionally, a matching set of parens
inner_paren_contents = r"[^\s()\"]*"
paren_group = r"""
[^\s()\"]*? # Containing characters that won't end the URL
(?: \( %s \) # and more characters in matched parens
[^\s()\"]*? # followed by more characters
)* # zero-or-more sets of paired parens
"""
nested_paren_chunk = paren_group
for i in range(6):
nested_paren_chunk = nested_paren_chunk % (paren_group,)
nested_paren_chunk = nested_paren_chunk % (inner_paren_contents,)
file_links = r"| (?:file://(/[^/ ]*)+/?)" if settings.ENABLE_FILE_LINKS else r""
REGEX = fr"""
(?<![^\s'"\(,:<]) # Start after whitespace or specified chars
# (Double-negative lookbehind to allow start-of-string)
(?P<url> # Main group
(?:(?: # Domain part
https?://[\w.:@-]+? # If it has a protocol, anything goes.
|(?: # Or, if not, be more strict to avoid false-positives
(?:[\w-]+\.)+ # One or more domain components, separated by dots
(?:{tlds}) # TLDs
)
)
(?:/ # A path, beginning with /
{nested_paren_chunk} # zero-to-6 sets of paired parens
)?) # Path is optional
| (?:[\w.-]+\@[\w.-]+\.[\w]+) # Email is separate, since it can't have a path
{file_links} # File path start with file:///, enable by setting ENABLE_FILE_LINKS=True
| (?:bitcoin:[13][a-km-zA-HJ-NP-Z1-9]{{25,34}}) # Bitcoin address pattern, see https://mokagio.github.io/tech-journal/2014/11/21/regex-bitcoin.html
)
(?= # URL must be followed by (not included in group)
[!:;\?\),\.\'\"\>]* # Optional punctuation characters
(?:\Z|\s) # followed by whitespace or end of string
)
"""
LINK_REGEX = verbose_compile(REGEX)
return LINK_REGEX
def clear_state_for_testing() -> None:
# The link regex never changes in production, but our tests
# try out both sides of ENABLE_FILE_LINKS, so we need
# a way to clear it.
global LINK_REGEX
LINK_REGEX = None
markdown_logger = logging.getLogger()
def rewrite_local_links_to_relative(db_data: Optional[DbData], link: str) -> str:
"""If the link points to a local destination (e.g. #narrow/...),
generate a relative link that will open it in the current window.
"""
if db_data:
realm_uri_prefix = db_data["realm_uri"] + "/"
if (
link.startswith(realm_uri_prefix)
and urllib.parse.urljoin(realm_uri_prefix, link[len(realm_uri_prefix) :]) == link
):
return link[len(realm_uri_prefix) :]
return link
def url_embed_preview_enabled(
message: Optional[Message] = None, realm: Optional[Realm] = None, no_previews: bool = False
) -> bool:
if not settings.INLINE_URL_EMBED_PREVIEW:
return False
if no_previews:
return False
if realm is None:
if message is not None:
realm = message.get_realm()
if realm is None:
# realm can be None for odd use cases
# like generating documentation or running
# test code
return True
return realm.inline_url_embed_preview
def image_preview_enabled(
message: Optional[Message] = None, realm: Optional[Realm] = None, no_previews: bool = False
) -> bool:
if not settings.INLINE_IMAGE_PREVIEW:
return False
if no_previews:
return False
if realm is None:
if message is not None:
realm = message.get_realm()
if realm is None:
# realm can be None for odd use cases
# like generating documentation or running
# test code
return True
return realm.inline_image_preview
def list_of_tlds() -> List[str]:
# Skip a few overly-common false-positives from file extensions
common_false_positives = set(["java", "md", "mov", "py", "zip"])
tlds = list(tld_set - common_false_positives)
tlds.sort(key=len, reverse=True)
return tlds
def walk_tree(
root: Element, processor: Callable[[Element], Optional[_T]], stop_after_first: bool = False
) -> List[_T]:
results = []
queue = deque([root])
while queue:
currElement = queue.popleft()
for child in currElement:
if child:
queue.append(child)
result = processor(child)
if result is not None:
results.append(result)
if stop_after_first:
return results
return results
@dataclass
class ElementFamily:
grandparent: Optional[Element]
parent: Element
child: Element
in_blockquote: bool
T = TypeVar("T")
class ResultWithFamily(Generic[T]):
family: ElementFamily
result: T
def __init__(self, family: ElementFamily, result: T):
self.family = family
self.result = result
class ElementPair:
parent: Optional["ElementPair"]
value: Element
def __init__(self, parent: Optional["ElementPair"], value: Element):
self.parent = parent
self.value = value
def walk_tree_with_family(
root: Element,
processor: Callable[[Element], Optional[_T]],
) -> List[ResultWithFamily[_T]]:
results = []
queue = deque([ElementPair(parent=None, value=root)])
while queue:
currElementPair = queue.popleft()
for child in currElementPair.value:
if child:
queue.append(ElementPair(parent=currElementPair, value=child))
result = processor(child)
if result is not None:
if currElementPair.parent is not None:
grandparent_element = currElementPair.parent
grandparent: Optional[Element] = grandparent_element.value
else:
grandparent = None
family = ElementFamily(
grandparent=grandparent,
parent=currElementPair.value,
child=child,
in_blockquote=has_blockquote_ancestor(currElementPair),
)
results.append(
ResultWithFamily(
family=family,
result=result,
)
)
return results
def has_blockquote_ancestor(element_pair: Optional[ElementPair]) -> bool:
if element_pair is None:
return False
elif element_pair.value.tag == "blockquote":
return True
else:
return has_blockquote_ancestor(element_pair.parent)
@cache_with_key(lambda tweet_id: tweet_id, cache_name="database", with_statsd_key="tweet_data")
def fetch_tweet_data(tweet_id: str) -> Optional[Dict[str, Any]]:
if settings.TEST_SUITE:
from . import testing_mocks
res = testing_mocks.twitter(tweet_id)
else:
creds = {
"consumer_key": settings.TWITTER_CONSUMER_KEY,
"consumer_secret": settings.TWITTER_CONSUMER_SECRET,
"access_token_key": settings.TWITTER_ACCESS_TOKEN_KEY,
"access_token_secret": settings.TWITTER_ACCESS_TOKEN_SECRET,
}
if not all(creds.values()):
return None
# We lazily import twitter here because its import process is
# surprisingly slow, and doing so has a significant impact on
# the startup performance of `manage.py` commands.
import twitter
api = twitter.Api(tweet_mode="extended", **creds)
try:
# Sometimes Twitter hangs on responses. Timing out here
# will cause the Tweet to go through as-is with no inline
# preview, rather than having the message be rejected
# entirely. This timeout needs to be less than our overall
# formatting timeout.
tweet = timeout(3, lambda: api.GetStatus(tweet_id))
res = tweet.AsDict()
except TimeoutExpired:
# We'd like to try again later and not cache the bad result,
# so we need to re-raise the exception (just as though
# we were being rate-limited)
raise
except twitter.TwitterError as e:
t = e.args[0]
if len(t) == 1 and ("code" in t[0]):
# https://developer.twitter.com/en/docs/basics/response-codes
code = t[0]["code"]
if code in [34, 144, 421, 422]:
# All these "correspond with HTTP 404," and mean
# that the message doesn't exist; return None so
# that we will cache the error.
return None
elif code in [63, 179]:
# 63 is that the account is suspended, 179 is that
# it is now locked; cache the None.
return None
elif code in [88, 130, 131]:
# Code 88 means that we were rate-limited, 130
# means Twitter is having capacity issues, and 131
# is other 400-equivalent; in these cases, raise
# the error so we don't cache None and will try
# again later.
raise
# It's not clear what to do in cases of other errors,
# but for now it seems reasonable to log at error
# level (so that we get notified), but then cache the
# failure to proceed with our usual work
markdown_logger.exception("Unknown error fetching tweet data", stack_info=True)
return None
return res
HEAD_START_RE = re.compile("^head[ >]")
HEAD_END_RE = re.compile("^/head[ >]")
META_START_RE = re.compile("^meta[ >]")
META_END_RE = re.compile("^/meta[ >]")
def fetch_open_graph_image(url: str) -> Optional[Dict[str, Any]]:
in_head = False
# HTML will auto close meta tags, when we start the next tag add
# a closing tag if it has not been closed yet.
last_closed = True
head = []
# TODO: What if response content is huge? Should we get headers first?
try:
content = requests.get(url, timeout=1).text
except requests.RequestException:
return None
# Extract the head and meta tags
# All meta tags are self closing, have no children or are closed
# automatically.
for part in content.split("<"):
if not in_head and HEAD_START_RE.match(part):
# Started the head node output it to have a document root
in_head = True
head.append("<head>")
elif in_head and HEAD_END_RE.match(part):
# Found the end of the head close any remaining tag then stop
# processing
in_head = False
if not last_closed:
last_closed = True
head.append("</meta>")
head.append("</head>")
break
elif in_head and META_START_RE.match(part):
# Found a meta node copy it
if not last_closed:
head.append("</meta>")
last_closed = True
head.append("<")
head.append(part)
if "/>" not in part:
last_closed = False
elif in_head and META_END_RE.match(part):
# End of a meta node just copy it to close the tag
head.append("<")
head.append(part)
last_closed = True
try:
doc = etree.fromstring("".join(head))
except etree.ParseError:
return None
og_image = doc.find('meta[@property="og:image"]')
og_title = doc.find('meta[@property="og:title"]')
og_desc = doc.find('meta[@property="og:description"]')
title = None
desc = None
if og_image is not None:
image = og_image.get("content")
else:
return None
if og_title is not None:
title = og_title.get("content")
if og_desc is not None:
desc = og_desc.get("content")
return {"image": image, "title": title, "desc": desc}
def get_tweet_id(url: str) -> Optional[str]:
parsed_url = urllib.parse.urlparse(url)
if not (parsed_url.netloc == "twitter.com" or parsed_url.netloc.endswith(".twitter.com")):
return None
to_match = parsed_url.path
# In old-style twitter.com/#!/wdaher/status/1231241234-style URLs,
# we need to look at the fragment instead
if parsed_url.path == "/" and len(parsed_url.fragment) > 5:
to_match = parsed_url.fragment
tweet_id_match = re.match(
r"^!?/.*?/status(es)?/(?P<tweetid>\d{10,30})(/photo/[0-9])?/?$", to_match
)
if not tweet_id_match:
return None
return tweet_id_match.group("tweetid")
class InlineImageProcessor(markdown.treeprocessors.Treeprocessor):
"""
Rewrite inline img tags to serve external content via Camo.
This rewrites all images, except ones that are served from the current
realm or global STATIC_URL. This is to ensure that each realm only loads
images that are hosted on that realm or by the global installation,
avoiding information leakage to external domains or between realms. We need
to disable proxying of images hosted on the same realm, because otherwise
we will break images in /user_uploads/, which require authorization to
view.
"""
def run(self, root: Element) -> None:
# Get all URLs from the blob
found_imgs = walk_tree(root, lambda e: e if e.tag == "img" else None)
for img in found_imgs:
url = img.get("src")
assert url is not None
if is_static_or_current_realm_url(url, self.md.zulip_realm):
# Don't rewrite images on our own site (e.g. emoji, user uploads).
continue
img.set("src", get_camo_url(url))
class BacktickInlineProcessor(markdown.inlinepatterns.BacktickInlineProcessor):
""" Return a `<code>` element containing the matching text. """
def handleMatch( # type: ignore[override] # supertype incompatible with supersupertype
self, m: Match[str], data: str
) -> Union[Tuple[None, None, None], Tuple[Element, int, int]]:
# Let upstream's implementation do its job as it is, we'll
# just replace the text to not strip the group because it
# makes it impossible to put leading/trailing whitespace in
# an inline code block.
el, start, end = ret = super().handleMatch(m, data)
if el is not None and m.group(3):
# upstream's code here is: m.group(3).strip() rather than m.group(3).
el.text = markdown.util.AtomicString(markdown.util.code_escape(m.group(3)))
return ret
class InlineInterestingLinkProcessor(markdown.treeprocessors.Treeprocessor):
TWITTER_MAX_IMAGE_HEIGHT = 400
TWITTER_MAX_TO_PREVIEW = 3
INLINE_PREVIEW_LIMIT_PER_MESSAGE = 5
def __init__(self, md: markdown.Markdown) -> None:
markdown.treeprocessors.Treeprocessor.__init__(self, md)
def add_a(
self,
root: Element,
url: str,
link: str,
title: Optional[str] = None,
desc: Optional[str] = None,
class_attr: str = "message_inline_image",
data_id: Optional[str] = None,
insertion_index: Optional[int] = None,
already_thumbnailed: bool = False,
) -> None:
desc = desc if desc is not None else ""
# Update message.has_image attribute.
if "message_inline_image" in class_attr and self.md.zulip_message:
self.md.zulip_message.has_image = True
if insertion_index is not None:
div = Element("div")
root.insert(insertion_index, div)
else:
div = SubElement(root, "div")
div.set("class", class_attr)
a = SubElement(div, "a")
a.set("href", link)
if title is not None:
a.set("title", title)
if data_id is not None:
a.set("data-id", data_id)
img = SubElement(a, "img")
if (
settings.THUMBNAIL_IMAGES
and (not already_thumbnailed)
and user_uploads_or_external(url)
):
# See docs/thumbnailing.md for some high-level documentation.
#
# We strip leading '/' from relative URLs here to ensure
# consistency in what gets passed to /thumbnail
url = url.lstrip("/")
img.set("src", "/thumbnail?" + urlencode({"url": url, "size": "thumbnail"}))
img.set("data-src-fullsize", "/thumbnail?" + urlencode({"url": url, "size": "full"}))
else:
img.set("src", url)
if class_attr == "message_inline_ref":
summary_div = SubElement(div, "div")
title_div = SubElement(summary_div, "div")
title_div.set("class", "message_inline_image_title")
title_div.text = title
desc_div = SubElement(summary_div, "desc")
desc_div.set("class", "message_inline_image_desc")
def add_oembed_data(self, root: Element, link: str, extracted_data: Dict[str, Any]) -> bool:
oembed_resource_type = extracted_data.get("type", "")
title = extracted_data.get("title")
if oembed_resource_type == "photo":
image = extracted_data.get("image")
if image:
self.add_a(root, image, link, title=title)
return True
elif oembed_resource_type == "video":
html = extracted_data["html"]
image = extracted_data["image"]
title = extracted_data.get("title")
description = extracted_data.get("description")
self.add_a(
root,
image,
link,
title,
description,
"embed-video message_inline_image",
html,
already_thumbnailed=True,
)
return True
return False
def add_embed(self, root: Element, link: str, extracted_data: Dict[str, Any]) -> None:
oembed = extracted_data.get("oembed", False)
if oembed and self.add_oembed_data(root, link, extracted_data):
return
img_link = extracted_data.get("image")
if not img_link:
# Don't add an embed if an image is not found
return
container = SubElement(root, "div")
container.set("class", "message_embed")
parsed_img_link = urllib.parse.urlparse(img_link)
# Append domain where relative img_link url is given
if not parsed_img_link.netloc:
parsed_url = urllib.parse.urlparse(link)
domain = "{url.scheme}://{url.netloc}/".format(url=parsed_url)
img_link = urllib.parse.urljoin(domain, img_link)
img = SubElement(container, "a")
img.set("style", "background-image: url(" + img_link + ")")
img.set("href", link)
img.set("class", "message_embed_image")
data_container = SubElement(container, "div")
data_container.set("class", "data-container")
title = extracted_data.get("title")
if title:
title_elm = SubElement(data_container, "div")
title_elm.set("class", "message_embed_title")
a = SubElement(title_elm, "a")
a.set("href", link)
a.set("title", title)
a.text = title
description = extracted_data.get("description")
if description:
description_elm = SubElement(data_container, "div")
description_elm.set("class", "message_embed_description")
description_elm.text = description
def get_actual_image_url(self, url: str) -> str:
# Add specific per-site cases to convert image-preview URLs to image URLs.
# See https://github.com/zulip/zulip/issues/4658 for more information
parsed_url = urllib.parse.urlparse(url)
if parsed_url.netloc == "github.com" or parsed_url.netloc.endswith(".github.com"):
# https://github.com/zulip/zulip/blob/master/static/images/logo/zulip-icon-128x128.png ->
# https://raw.githubusercontent.com/zulip/zulip/master/static/images/logo/zulip-icon-128x128.png
split_path = parsed_url.path.split("/")
if len(split_path) > 3 and split_path[3] == "blob":
return urllib.parse.urljoin(
"https://raw.githubusercontent.com", "/".join(split_path[0:3] + split_path[4:])
)
return url
def is_image(self, url: str) -> bool:
if not self.md.image_preview_enabled:
return False
parsed_url = urllib.parse.urlparse(url)
# remove HTML URLs which end with image extensions that can not be shorted
if parsed_url.netloc == "pasteboard.co":
return False
# List from https://support.google.com/chromeos/bin/answer.py?hl=en&answer=183093
for ext in [".bmp", ".gif", ".jpe", "jpeg", ".jpg", ".png", ".webp"]:
if parsed_url.path.lower().endswith(ext):
return True
return False
def corrected_image_source(self, url: str) -> Optional[str]:
# This function adjusts any URLs from linx.li and
# wikipedia.org to point to the actual image URL. It's
# structurally very similar to dropbox_image, and possibly
# should be rewritten to use open graph, but has some value.
parsed_url = urllib.parse.urlparse(url)
if parsed_url.netloc.lower().endswith(".wikipedia.org"):
# Redirecting from "/wiki/File:" to "/wiki/Special:FilePath/File:"
# A possible alternative, that avoids the redirect after hitting "Special:"
# is using the first characters of md5($filename) to generate the URL
domain = parsed_url.scheme + "://" + parsed_url.netloc
correct_url = domain + parsed_url.path[:6] + "Special:FilePath" + parsed_url.path[5:]
return correct_url
if parsed_url.netloc == "linx.li":
return "https://linx.li/s" + parsed_url.path
return None
def dropbox_image(self, url: str) -> Optional[Dict[str, Any]]:
# TODO: The returned Dict could possibly be a TypedDict in future.
parsed_url = urllib.parse.urlparse(url)
if parsed_url.netloc == "dropbox.com" or parsed_url.netloc.endswith(".dropbox.com"):
is_album = parsed_url.path.startswith("/sc/") or parsed_url.path.startswith("/photos/")
# Only allow preview Dropbox shared links
if not (
parsed_url.path.startswith("/s/") or parsed_url.path.startswith("/sh/") or is_album
):
return None
# Try to retrieve open graph protocol info for a preview
# This might be redundant right now for shared links for images.
# However, we might want to make use of title and description
# in the future. If the actual image is too big, we might also
# want to use the open graph image.
image_info = fetch_open_graph_image(url)
is_image = is_album or self.is_image(url)
# If it is from an album or not an actual image file,
# just use open graph image.
if is_album or not is_image:
# Failed to follow link to find an image preview so
# use placeholder image and guess filename
if image_info is None:
return None
image_info["is_image"] = is_image
return image_info
# Otherwise, try to retrieve the actual image.
# This is because open graph image from Dropbox may have padding
# and gifs do not work.
# TODO: What if image is huge? Should we get headers first?
if image_info is None:
image_info = {}
image_info["is_image"] = True
parsed_url_list = list(parsed_url)
parsed_url_list[4] = "dl=1" # Replaces query
image_info["image"] = urllib.parse.urlunparse(parsed_url_list)
return image_info
return None
def youtube_id(self, url: str) -> Optional[str]:
if not self.md.image_preview_enabled:
return None
# YouTube video id extraction regular expression from https://pastebin.com/KyKAFv1s
# Slightly modified to support URLs of the forms
# - youtu.be/<id>
# - youtube.com/playlist?v=<id>&list=<list-id>
# - youtube.com/watch_videos?video_ids=<id1>,<id2>,<id3>
# If it matches, match.group(2) is the video id.
schema_re = r"(?:https?://)"
host_re = r"(?:youtu\.be/|(?:\w+\.)?youtube(?:-nocookie)?\.com/)"
param_re = (
r"(?:(?:(?:v|embed)/)|"
+ r"(?:(?:(?:watch|playlist)(?:_popup|_videos)?(?:\.php)?)?(?:\?|#!?)(?:.+&)?v(?:ideo_ids)?=))"
)
id_re = r"([0-9A-Za-z_-]+)"
youtube_re = r"^({schema_re}?{host_re}{param_re}?)?{id_re}(?(1).+)?$"
youtube_re = youtube_re.format(
schema_re=schema_re, host_re=host_re, id_re=id_re, param_re=param_re
)
match = re.match(youtube_re, url)
# URLs of the form youtube.com/playlist?list=<list-id> are incorrectly matched
if match is None or match.group(2) == "playlist":
return None
return match.group(2)
def youtube_title(self, extracted_data: Dict[str, Any]) -> Optional[str]:
title = extracted_data.get("title")
if title is not None:
return f"YouTube - {title}"
return None
def youtube_image(self, url: str) -> Optional[str]:
yt_id = self.youtube_id(url)
if yt_id is not None:
return f"https://i.ytimg.com/vi/{yt_id}/default.jpg"
return None
def vimeo_id(self, url: str) -> Optional[str]:
if not self.md.image_preview_enabled:
return None
# (http|https)?:\/\/(www\.)?vimeo.com\/(?:channels\/(?:\w+\/)?|groups\/([^\/]*)\/videos\/|)(\d+)(?:|\/\?)
# If it matches, match.group('id') is the video id.
vimeo_re = (
r"^((http|https)?:\/\/(www\.)?vimeo.com\/"
+ r"(?:channels\/(?:\w+\/)?|groups\/"
+ r"([^\/]*)\/videos\/|)(\d+)(?:|\/\?))$"
)
match = re.match(vimeo_re, url)
if match is None:
return None
return match.group(5)
def vimeo_title(self, extracted_data: Dict[str, Any]) -> Optional[str]:
title = extracted_data.get("title")
if title is not None:
return f"Vimeo - {title}"
return None
def twitter_text(
self,
text: str,
urls: List[Dict[str, str]],
user_mentions: List[Dict[str, Any]],
media: List[Dict[str, Any]],
) -> Element:
"""
Use data from the Twitter API to turn links, mentions and media into A
tags. Also convert Unicode emojis to images.
This works by using the URLs, user_mentions and media data from
the twitter API and searching for Unicode emojis in the text using
`unicode_emoji_regex`.
The first step is finding the locations of the URLs, mentions, media and
emoji in the text. For each match we build a dictionary with type, the start
location, end location, the URL to link to, and the text(codepoint and title
in case of emojis) to be used in the link(image in case of emojis).
Next we sort the matches by start location. And for each we add the
text from the end of the last link to the start of the current link to
the output. The text needs to added to the text attribute of the first
node (the P tag) or the tail the last link created.
Finally we add any remaining text to the last node.
"""
to_process: List[Dict[str, Any]] = []
# Build dicts for URLs
for url_data in urls:
short_url = url_data["url"]
full_url = url_data["expanded_url"]
for match in re.finditer(re.escape(short_url), text, re.IGNORECASE):
to_process.append(
{
"type": "url",
"start": match.start(),
"end": match.end(),
"url": short_url,
"text": full_url,
}
)
# Build dicts for mentions
for user_mention in user_mentions:
screen_name = user_mention["screen_name"]
mention_string = "@" + screen_name
for match in re.finditer(re.escape(mention_string), text, re.IGNORECASE):
to_process.append(
{
"type": "mention",
"start": match.start(),
"end": match.end(),
"url": "https://twitter.com/" + urllib.parse.quote(screen_name),
"text": mention_string,
}
)
# Build dicts for media
for media_item in media:
short_url = media_item["url"]
expanded_url = media_item["expanded_url"]
for match in re.finditer(re.escape(short_url), text, re.IGNORECASE):
to_process.append(
{
"type": "media",
"start": match.start(),
"end": match.end(),
"url": short_url,
"text": expanded_url,
}
)
# Build dicts for emojis
for match in re.finditer(unicode_emoji_regex, text, re.IGNORECASE):
orig_syntax = match.group("syntax")
codepoint = unicode_emoji_to_codepoint(orig_syntax)
if codepoint in codepoint_to_name:
display_string = ":" + codepoint_to_name[codepoint] + ":"
to_process.append(
{
"type": "emoji",
"start": match.start(),
"end": match.end(),
"codepoint": codepoint,
"title": display_string,
}
)
to_process.sort(key=lambda x: x["start"])
p = current_node = Element("p")
def set_text(text: str) -> None:
"""
Helper to set the text or the tail of the current_node
"""
if current_node == p:
current_node.text = text
else:
current_node.tail = text
db_data = self.md.zulip_db_data
current_index = 0
for item in to_process:
# The text we want to link starts in already linked text skip it
if item["start"] < current_index:
continue
# Add text from the end of last link to the start of the current
# link
set_text(text[current_index : item["start"]])
current_index = item["end"]
if item["type"] != "emoji":
elem = url_to_a(db_data, item["url"], item["text"])
assert isinstance(elem, Element)
else:
elem = make_emoji(item["codepoint"], item["title"])
current_node = elem
p.append(elem)
# Add any unused text
set_text(text[current_index:])
return p
def twitter_link(self, url: str) -> Optional[Element]:
tweet_id = get_tweet_id(url)
if tweet_id is None:
return None
try:
res = fetch_tweet_data(tweet_id)
if res is None:
return None
user: Dict[str, Any] = res["user"]
tweet = Element("div")
tweet.set("class", "twitter-tweet")
img_a = SubElement(tweet, "a")
img_a.set("href", url)
profile_img = SubElement(img_a, "img")
profile_img.set("class", "twitter-avatar")
# For some reason, for, e.g. tweet 285072525413724161,
# python-twitter does not give us a
# profile_image_url_https, but instead puts that URL in
# profile_image_url. So use _https if available, but fall
# back gracefully.
image_url = user.get("profile_image_url_https", user["profile_image_url"])
profile_img.set("src", image_url)
text = html.unescape(res["full_text"])
urls = res.get("urls", [])
user_mentions = res.get("user_mentions", [])
media: List[Dict[str, Any]] = res.get("media", [])
p = self.twitter_text(text, urls, user_mentions, media)
tweet.append(p)
span = SubElement(tweet, "span")
span.text = "- {} (@{})".format(user["name"], user["screen_name"])
# Add image previews
for media_item in media:
# Only photos have a preview image
if media_item["type"] != "photo":
continue
# Find the image size that is smaller than
# TWITTER_MAX_IMAGE_HEIGHT px tall or the smallest
size_name_tuples = list(media_item["sizes"].items())
size_name_tuples.sort(reverse=True, key=lambda x: x[1]["h"])
for size_name, size in size_name_tuples:
if size["h"] < self.TWITTER_MAX_IMAGE_HEIGHT:
break
media_url = "{}:{}".format(media_item["media_url_https"], size_name)
img_div = SubElement(tweet, "div")
img_div.set("class", "twitter-image")
img_a = SubElement(img_div, "a")
img_a.set("href", media_item["url"])
img = SubElement(img_a, "img")
img.set("src", media_url)
return tweet
except Exception:
# We put this in its own try-except because it requires external
# connectivity. If Twitter flakes out, we don't want to not-render
# the entire message; we just want to not show the Twitter preview.
markdown_logger.warning("Error building Twitter link", exc_info=True)
return None
def get_url_data(self, e: Element) -> Optional[Tuple[str, Optional[str]]]:
if e.tag == "a":
url = e.get("href")
assert url is not None
return (url, e.text)
return None
def get_inlining_information(
self,
root: Element,
found_url: ResultWithFamily[Tuple[str, Optional[str]]],
) -> LinkInfo:
grandparent = found_url.family.grandparent
parent = found_url.family.parent
ahref_element = found_url.family.child
(url, text) = found_url.result
# url != text usually implies a named link, which we opt not to remove
url_eq_text = text is None or url == text
title = None if url_eq_text else text
info: LinkInfo = {
"parent": root,
"title": title,
"index": None,
"remove": None,
}
if parent.tag == "li":
info["parent"] = parent
if not parent.text and not ahref_element.tail and url_eq_text:
info["remove"] = ahref_element
elif parent.tag == "p":
assert grandparent is not None
parent_index = None
for index, uncle in enumerate(grandparent):
if uncle is parent:
parent_index = index
break
# Append to end of list of grandparent's children as normal
info["parent"] = grandparent
if (
len(parent) == 1
and (not parent.text or parent.text == "\n")
and not ahref_element.tail
and url_eq_text
):
info["remove"] = parent
if parent_index is not None:
info["index"] = self.find_proper_insertion_index(grandparent, parent, parent_index)
return info
def handle_image_inlining(
self,
root: Element,
found_url: ResultWithFamily[Tuple[str, Optional[str]]],
) -> None:
info = self.get_inlining_information(root, found_url)
(url, text) = found_url.result
actual_url = self.get_actual_image_url(url)
self.add_a(
info["parent"], actual_url, url, title=info["title"], insertion_index=info["index"]
)
if info["remove"] is not None:
info["parent"].remove(info["remove"])
def handle_tweet_inlining(
self,
root: Element,
found_url: ResultWithFamily[Tuple[str, Optional[str]]],
twitter_data: Element,
) -> None:
info = self.get_inlining_information(root, found_url)
if info["index"] is not None:
div = Element("div")
root.insert(info["index"], div)
else:
div = SubElement(root, "div")
div.set("class", "inline-preview-twitter")
div.insert(0, twitter_data)
def handle_youtube_url_inlining(
self,
root: Element,
found_url: ResultWithFamily[Tuple[str, Optional[str]]],
yt_image: str,
) -> None:
info = self.get_inlining_information(root, found_url)
(url, text) = found_url.result
yt_id = self.youtube_id(url)
self.add_a(
info["parent"],
yt_image,
url,
None,
None,
"youtube-video message_inline_image",
yt_id,
insertion_index=info["index"],
already_thumbnailed=True,
)
def find_proper_insertion_index(
self, grandparent: Element, parent: Element, parent_index_in_grandparent: int
) -> int:
# If there are several inline images from same paragraph, ensure that
# they are in correct (and not opposite) order by inserting after last
# inline image from paragraph 'parent'
parent_links = [ele.attrib["href"] for ele in parent.iter(tag="a")]
insertion_index = parent_index_in_grandparent
while True:
insertion_index += 1
if insertion_index >= len(grandparent):
return insertion_index
uncle = grandparent[insertion_index]
inline_image_classes = [
"message_inline_image",
"message_inline_ref",
"inline-preview-twitter",
]
if (
uncle.tag != "div"
or "class" not in uncle.keys()
or uncle.attrib["class"] not in inline_image_classes
):
return insertion_index
uncle_link = list(uncle.iter(tag="a"))[0].attrib["href"]
if uncle_link not in parent_links:
return insertion_index
def is_absolute_url(self, url: str) -> bool:
return bool(urllib.parse.urlparse(url).netloc)
def run(self, root: Element) -> None:
# Get all URLs from the blob
found_urls = walk_tree_with_family(root, self.get_url_data)
unique_urls = {found_url.result[0] for found_url in found_urls}
# Collect unique URLs which are not quoted as we don't do
# inline previews for links inside blockquotes.
unique_previewable_urls = {
found_url.result[0] for found_url in found_urls if not found_url.family.in_blockquote
}
# Set has_link and similar flags whenever a message is processed by Markdown
if self.md.zulip_message:
self.md.zulip_message.has_link = len(found_urls) > 0
self.md.zulip_message.has_image = False # This is updated in self.add_a
self.md.zulip_message.potential_attachment_path_ids = []
for url in unique_urls:
# Due to rewrite_local_links_to_relative, we need to
# handle both relative URLs beginning with
# `/user_uploads` and beginning with `user_uploads`.
# This urllib construction converts the latter into
# the former.
parsed_url = urllib.parse.urlsplit(urllib.parse.urljoin("/", url))
host = parsed_url.netloc
if host != "" and host != self.md.zulip_realm.host:
continue
if not parsed_url.path.startswith("/user_uploads/"):
continue
path_id = parsed_url.path[len("/user_uploads/") :]
self.md.zulip_message.potential_attachment_path_ids.append(path_id)
if len(found_urls) == 0:
return
if len(unique_previewable_urls) > self.INLINE_PREVIEW_LIMIT_PER_MESSAGE:
return
processed_urls: Set[str] = set()
rendered_tweet_count = 0
for found_url in found_urls:
(url, text) = found_url.result
if url in unique_previewable_urls and url not in processed_urls:
processed_urls.add(url)
else:
continue
if not self.is_absolute_url(url):
if self.is_image(url):
self.handle_image_inlining(root, found_url)
# We don't have a strong use case for doing URL preview for relative links.
continue
dropbox_image = self.dropbox_image(url)
if dropbox_image is not None:
class_attr = "message_inline_ref"
is_image = dropbox_image["is_image"]
if is_image:
class_attr = "message_inline_image"
# Not making use of title and description of images
self.add_a(
root,
dropbox_image["image"],
url,
title=dropbox_image.get("title"),
desc=dropbox_image.get("desc", ""),
class_attr=class_attr,
already_thumbnailed=True,
)
continue
if self.is_image(url):
image_source = self.corrected_image_source(url)
if image_source is not None:
found_url = ResultWithFamily(
family=found_url.family,
result=(image_source, image_source),
)
self.handle_image_inlining(root, found_url)
continue
if get_tweet_id(url) is not None:
if rendered_tweet_count >= self.TWITTER_MAX_TO_PREVIEW:
# Only render at most one tweet per message
continue
twitter_data = self.twitter_link(url)
if twitter_data is None:
# This link is not actually a tweet known to twitter
continue
rendered_tweet_count += 1
self.handle_tweet_inlining(root, found_url, twitter_data)
continue
youtube = self.youtube_image(url)
if youtube is not None:
self.handle_youtube_url_inlining(root, found_url, youtube)
# NOTE: We don't `continue` here, to allow replacing the URL with
# the title, if INLINE_URL_EMBED_PREVIEW feature is enabled.
# The entire preview would ideally be shown only if the feature
# is enabled, but URL previews are a beta feature and YouTube
# previews are pretty stable.
db_data = self.md.zulip_db_data
if db_data and db_data["sent_by_bot"]:
continue
if not self.md.url_embed_preview_enabled:
continue
try:
extracted_data = link_preview.link_embed_data_from_cache(url)
except NotFoundInCache:
self.md.zulip_message.links_for_preview.add(url)
continue
if extracted_data:
if youtube is not None:
title = self.youtube_title(extracted_data)
if title is not None:
if url == text:
found_url.family.child.text = title
else:
found_url.family.child.text = text
continue
self.add_embed(root, url, extracted_data)
if self.vimeo_id(url):
title = self.vimeo_title(extracted_data)
if title:
if url == text:
found_url.family.child.text = title
else:
found_url.family.child.text = text
class Timestamp(markdown.inlinepatterns.Pattern):
def handleMatch(self, match: Match[str]) -> Optional[Element]:
time_input_string = match.group("time")
timestamp = None
try:
timestamp = dateutil.parser.parse(time_input_string, tzinfos=common_timezones)
except ValueError:
try:
timestamp = datetime.datetime.fromtimestamp(float(time_input_string))
except ValueError:
pass
if not timestamp:
error_element = Element("span")
error_element.set("class", "timestamp-error")
error_element.text = markdown.util.AtomicString(
f"Invalid time format: {time_input_string}"
)
return error_element
# Use HTML5 <time> element for valid timestamps.
time_element = Element("time")
if timestamp.tzinfo:
timestamp = timestamp.astimezone(datetime.timezone.utc)
else:
timestamp = timestamp.replace(tzinfo=datetime.timezone.utc)
time_element.set("datetime", timestamp.isoformat().replace("+00:00", "Z"))
# Set text to initial input, so simple clients translating
# HTML to text will at least display something.
time_element.text = markdown.util.AtomicString(time_input_string)
return time_element
# All of our emojis(non ZWJ sequences) belong to one of these Unicode blocks:
# \U0001f100-\U0001f1ff - Enclosed Alphanumeric Supplement
# \U0001f200-\U0001f2ff - Enclosed Ideographic Supplement
# \U0001f300-\U0001f5ff - Miscellaneous Symbols and Pictographs
# \U0001f600-\U0001f64f - Emoticons (Emoji)
# \U0001f680-\U0001f6ff - Transport and Map Symbols
# \U0001f900-\U0001f9ff - Supplemental Symbols and Pictographs
# \u2000-\u206f - General Punctuation
# \u2300-\u23ff - Miscellaneous Technical
# \u2400-\u243f - Control Pictures
# \u2440-\u245f - Optical Character Recognition
# \u2460-\u24ff - Enclosed Alphanumerics
# \u2500-\u257f - Box Drawing
# \u2580-\u259f - Block Elements
# \u25a0-\u25ff - Geometric Shapes
# \u2600-\u26ff - Miscellaneous Symbols
# \u2700-\u27bf - Dingbats
# \u2900-\u297f - Supplemental Arrows-B
# \u2b00-\u2bff - Miscellaneous Symbols and Arrows
# \u3000-\u303f - CJK Symbols and Punctuation
# \u3200-\u32ff - Enclosed CJK Letters and Months
unicode_emoji_regex = (
"(?P<syntax>["
"\U0001F100-\U0001F64F"
"\U0001F680-\U0001F6FF"
"\U0001F900-\U0001F9FF"
"\u2000-\u206F"
"\u2300-\u27BF"
"\u2900-\u297F"
"\u2B00-\u2BFF"
"\u3000-\u303F"
"\u3200-\u32FF"
"])"
)
# The equivalent JS regex is \ud83c[\udd00-\udfff]|\ud83d[\udc00-\ude4f]|\ud83d[\ude80-\udeff]|
# \ud83e[\udd00-\uddff]|[\u2000-\u206f]|[\u2300-\u27bf]|[\u2b00-\u2bff]|[\u3000-\u303f]|
# [\u3200-\u32ff]. See below comments for explanation. The JS regex is used by marked.js for
# frontend Unicode emoji processing.
# The JS regex \ud83c[\udd00-\udfff]|\ud83d[\udc00-\ude4f] represents U0001f100-\U0001f64f
# The JS regex \ud83d[\ude80-\udeff] represents \U0001f680-\U0001f6ff
# The JS regex \ud83e[\udd00-\uddff] represents \U0001f900-\U0001f9ff
# The JS regex [\u2000-\u206f] represents \u2000-\u206f
# The JS regex [\u2300-\u27bf] represents \u2300-\u27bf
# Similarly other JS regexes can be mapped to the respective Unicode blocks.
# For more information, please refer to the following article:
# http://crocodillon.com/blog/parsing-emoji-unicode-in-javascript
def make_emoji(codepoint: str, display_string: str) -> Element:
# Replace underscore in emoji's title with space
title = display_string[1:-1].replace("_", " ")
span = Element("span")
span.set("class", f"emoji emoji-{codepoint}")
span.set("title", title)
span.set("role", "img")
span.set("aria-label", title)
span.text = markdown.util.AtomicString(display_string)
return span
def make_realm_emoji(src: str, display_string: str) -> Element:
elt = Element("img")
elt.set("src", src)
elt.set("class", "emoji")
elt.set("alt", display_string)
elt.set("title", display_string[1:-1].replace("_", " "))
return elt
def unicode_emoji_to_codepoint(unicode_emoji: str) -> str:
codepoint = hex(ord(unicode_emoji))[2:]
# Unicode codepoints are minimum of length 4, padded
# with zeroes if the length is less than zero.
while len(codepoint) < 4:
codepoint = "0" + codepoint
return codepoint
class EmoticonTranslation(markdown.inlinepatterns.Pattern):
""" Translates emoticons like `:)` into emoji like `:smile:`. """
def handleMatch(self, match: Match[str]) -> Optional[Element]:
db_data = self.md.zulip_db_data
if db_data is None or not db_data["translate_emoticons"]:
return None
emoticon = match.group("emoticon")
translated = translate_emoticons(emoticon)
name = translated[1:-1]
return make_emoji(name_to_codepoint[name], translated)
class UnicodeEmoji(markdown.inlinepatterns.Pattern):
def handleMatch(self, match: Match[str]) -> Optional[Element]:
orig_syntax = match.group("syntax")
codepoint = unicode_emoji_to_codepoint(orig_syntax)
if codepoint in codepoint_to_name:
display_string = ":" + codepoint_to_name[codepoint] + ":"
return make_emoji(codepoint, display_string)
else:
return None
class Emoji(markdown.inlinepatterns.Pattern):
def handleMatch(self, match: Match[str]) -> Optional[Union[str, Element]]:
orig_syntax = match.group("syntax")
name = orig_syntax[1:-1]
active_realm_emoji: Dict[str, Dict[str, str]] = {}
db_data = self.md.zulip_db_data
if db_data is not None:
active_realm_emoji = db_data["active_realm_emoji"]
if self.md.zulip_message and name in active_realm_emoji:
return make_realm_emoji(active_realm_emoji[name]["source_url"], orig_syntax)
elif name == "zulip":
return make_realm_emoji(
"/static/generated/emoji/images/emoji/unicode/zulip.png", orig_syntax
)
elif name in name_to_codepoint:
return make_emoji(name_to_codepoint[name], orig_syntax)
else:
return orig_syntax
def content_has_emoji_syntax(content: str) -> bool:
return re.search(EMOJI_REGEX, content) is not None
class Tex(markdown.inlinepatterns.Pattern):
def handleMatch(self, match: Match[str]) -> Element:
rendered = render_tex(match.group("body"), is_inline=True)
if rendered is not None:
return self.md.htmlStash.store(rendered)
else: # Something went wrong while rendering
span = Element("span")
span.set("class", "tex-error")
span.text = markdown.util.AtomicString("$$" + match.group("body") + "$$")
return span
def sanitize_url(url: str) -> Optional[str]:
"""
Sanitize a URL against XSS attacks.
See the docstring on markdown.inlinepatterns.LinkPattern.sanitize_url.
"""
try:
parts = urllib.parse.urlparse(url.replace(" ", "%20"))
scheme, netloc, path, params, query, fragment = parts
except ValueError:
# Bad URL - so bad it couldn't be parsed.
return ""
# If there is no scheme or netloc and there is a '@' in the path,
# treat it as a mailto: and set the appropriate scheme
if scheme == "" and netloc == "" and "@" in path:
scheme = "mailto"
elif scheme == "" and netloc == "" and len(path) > 0 and path[0] == "/":
# Allow domain-relative links
return urllib.parse.urlunparse(("", "", path, params, query, fragment))
elif (scheme, netloc, path, params, query) == ("", "", "", "", "") and len(fragment) > 0:
# Allow fragment links
return urllib.parse.urlunparse(("", "", "", "", "", fragment))
# Zulip modification: If scheme is not specified, assume http://
# We re-enter sanitize_url because netloc etc. need to be re-parsed.
if not scheme:
return sanitize_url("http://" + url)
locless_schemes = ["mailto", "news", "file", "bitcoin"]
if netloc == "" and scheme not in locless_schemes:
# This fails regardless of anything else.
# Return immediately to save additional processing
return None
# Upstream code will accept a URL like javascript://foo because it
# appears to have a netloc. Additionally there are plenty of other
# schemes that do weird things like launch external programs. To be
# on the safe side, we whitelist the scheme.
if scheme not in ("http", "https", "ftp", "mailto", "file", "bitcoin"):
return None
# Upstream code scans path, parameters, and query for colon characters
# because
#
# some aliases [for javascript:] will appear to urllib.parse to have
# no scheme. On top of that relative links (i.e.: "foo/bar.html")
# have no scheme.
#
# We already converted an empty scheme to http:// above, so we skip
# the colon check, which would also forbid a lot of legitimate URLs.
# URL passes all tests. Return URL as-is.
return urllib.parse.urlunparse((scheme, netloc, path, params, query, fragment))
def url_to_a(
db_data: Optional[DbData], url: str, text: Optional[str] = None
) -> Union[Element, str]:
a = Element("a")
href = sanitize_url(url)
if href is None:
# Rejected by sanitize_url; render it as plain text.
return url
if text is None:
text = markdown.util.AtomicString(url)
href = rewrite_local_links_to_relative(db_data, href)
a.set("href", href)
a.text = text
return a
class CompiledPattern(markdown.inlinepatterns.Pattern):
def __init__(self, compiled_re: Pattern[str], md: markdown.Markdown) -> None:
# This is similar to the superclass's small __init__ function,
# but we skip the compilation step and let the caller give us
# a compiled regex.
self.compiled_re = compiled_re
self.md = md
class AutoLink(CompiledPattern):
def handleMatch(self, match: Match[str]) -> ElementStringNone:
url = match.group("url")
db_data = self.md.zulip_db_data
return url_to_a(db_data, url)
class OListProcessor(sane_lists.SaneOListProcessor):
def __init__(self, parser: BlockParser) -> None:
parser.md.tab_length = 2
super().__init__(parser)
parser.md.tab_length = 4
class UListProcessor(sane_lists.SaneUListProcessor):
""" Unordered lists, but with 2-space indent """
def __init__(self, parser: BlockParser) -> None:
parser.md.tab_length = 2
super().__init__(parser)
parser.md.tab_length = 4
class ListIndentProcessor(markdown.blockprocessors.ListIndentProcessor):
"""Process unordered list blocks.
Based on markdown.blockprocessors.ListIndentProcessor, but with 2-space indent
"""
def __init__(self, parser: BlockParser) -> None:
# HACK: Set the tab length to 2 just for the initialization of
# this class, so that bulleted lists (and only bulleted lists)
# work off 2-space indentation.
parser.md.tab_length = 2
super().__init__(parser)
parser.md.tab_length = 4
class HashHeaderProcessor(markdown.blockprocessors.HashHeaderProcessor):
"""Process hash headers.
Based on markdown.blockprocessors.HashHeaderProcessor, but requires space for heading.
"""
# Original regex for hashheader is
# RE = re.compile(r'(?:^|\n)(?P<level>#{1,6})(?P<header>(?:\\.|[^\\])*?)#*(?:\n|$)')
RE = re.compile(r"(?:^|\n)(?P<level>#{1,6})\s(?P<header>(?:\\.|[^\\])*?)#*(?:\n|$)")
class BlockQuoteProcessor(markdown.blockprocessors.BlockQuoteProcessor):
"""Process block quotes.
Based on markdown.blockprocessors.BlockQuoteProcessor, but with 2-space indent
"""
# Original regex for blockquote is RE = re.compile(r'(^|\n)[ ]{0,3}>[ ]?(.*)')
RE = re.compile(r"(^|\n)(?!(?:[ ]{0,3}>\s*(?:$|\n))*(?:$|\n))" r"[ ]{0,3}>[ ]?(.*)")
mention_re = re.compile(mention.find_mentions)
# run() is very slightly forked from the base class; see notes below.
def run(self, parent: Element, blocks: List[str]) -> None:
block = blocks.pop(0)
m = self.RE.search(block)
if m:
before = block[: m.start()] # Lines before blockquote
# Pass lines before blockquote in recursively for parsing first.
self.parser.parseBlocks(parent, [before])
# Remove ``> `` from beginning of each line.
block = "\n".join([self.clean(line) for line in block[m.start() :].split("\n")])
# Zulip modification: The next line is patched to match
# CommonMark rather than original Markdown. In original
# Markdown, blockquotes with a blank line between them were
# merged, which makes it impossible to break a blockquote with
# a blank line intentionally.
#
# This is a new blockquote. Create a new parent element.
quote = etree.SubElement(parent, "blockquote")
# Recursively parse block with blockquote as parent.
# change parser state so blockquotes embedded in lists use p tags
self.parser.state.set("blockquote")
self.parser.parseChunk(quote, block)
self.parser.state.reset()
def clean(self, line: str) -> str:
# Silence all the mentions inside blockquotes
line = re.sub(self.mention_re, lambda m: "@_{}".format(m.group("match")), line)
# And then run the upstream processor's code for removing the '>'
return super().clean(line)
@dataclass
class Fence:
fence_str: str
is_code: bool
class MarkdownListPreprocessor(markdown.preprocessors.Preprocessor):
"""Allows list blocks that come directly after another block
to be rendered as a list.
Detects paragraphs that have a matching list item that comes
directly after a line of text, and inserts a newline between
to satisfy Markdown"""
LI_RE = re.compile(r"^[ ]*([*+-]|\d\.)[ ]+(.*)", re.MULTILINE)
def run(self, lines: List[str]) -> List[str]:
""" Insert a newline between a paragraph and ulist if missing """
inserts = 0
in_code_fence: bool = False
open_fences: List[Fence] = []
copy = lines[:]
for i in range(len(lines) - 1):
# Ignore anything that is inside a fenced code block but not quoted.
# We ignore all lines where some parent is a non quote code block.
m = FENCE_RE.match(lines[i])
if m:
fence_str = m.group("fence")
is_code = not m.group("lang") in ("quote", "quoted")
has_open_fences = not len(open_fences) == 0
matches_last_fence = (
fence_str == open_fences[-1].fence_str if has_open_fences else False
)
closes_last_fence = not m.group("lang") and matches_last_fence
if closes_last_fence:
open_fences.pop()
else:
open_fences.append(Fence(fence_str, is_code))
in_code_fence = any(fence.is_code for fence in open_fences)
# If we're not in a fenced block and we detect an upcoming list
# hanging off any block (including a list of another type), add
# a newline.
li1 = self.LI_RE.match(lines[i])
li2 = self.LI_RE.match(lines[i + 1])
if not in_code_fence and lines[i]:
if (li2 and not li1) or (
li1 and li2 and (len(li1.group(1)) == 1) != (len(li2.group(1)) == 1)
):
copy.insert(i + inserts + 1, "")
inserts += 1
return copy
# Name for the outer capture group we use to separate whitespace and
# other delimiters from the actual content. This value won't be an
# option in user-entered capture groups.
OUTER_CAPTURE_GROUP = "linkifier_actual_match"
def prepare_linkifier_pattern(source: str) -> str:
"""Augment a linkifier so it only matches after start-of-string,
whitespace, or opening delimiters, won't match if there are word
characters directly after, and saves what was matched as
OUTER_CAPTURE_GROUP."""
return fr"""(?<![^\s'"\(,:<])(?P<{OUTER_CAPTURE_GROUP}>{source})(?!\w)"""
# Given a regular expression pattern, linkifies groups that match it
# using the provided format string to construct the URL.
class LinkifierPattern(markdown.inlinepatterns.Pattern):
""" Applied a given linkifier to the input """
def __init__(
self,
source_pattern: str,
format_string: str,
markdown_instance: Optional[markdown.Markdown] = None,
) -> None:
self.pattern = prepare_linkifier_pattern(source_pattern)
self.format_string = format_string
markdown.inlinepatterns.Pattern.__init__(self, self.pattern, markdown_instance)
def handleMatch(self, m: Match[str]) -> Union[Element, str]:
db_data = self.md.zulip_db_data
return url_to_a(
db_data,
self.format_string % m.groupdict(),
markdown.util.AtomicString(m.group(OUTER_CAPTURE_GROUP)),
)
class UserMentionPattern(markdown.inlinepatterns.InlineProcessor):
def handleMatch( # type: ignore[override] # supertype incompatible with supersupertype
self, m: Match[str], data: str
) -> Union[Tuple[None, None, None], Tuple[Element, int, int]]:
match = m.group("match")
silent = m.group("silent") == "_"
db_data = self.md.zulip_db_data
if self.md.zulip_message and db_data is not None:
if match.startswith("**") and match.endswith("**"):
name = match[2:-2]
else:
return None, None, None
wildcard = mention.user_mention_matches_wildcard(name)
# For @**|id** and @**name|id** mention syntaxes.
id_syntax_match = re.match(r"(?P<full_name>.+)?\|(?P<user_id>\d+)$", name)
if id_syntax_match:
full_name = id_syntax_match.group("full_name")
id = int(id_syntax_match.group("user_id"))
user = db_data["mention_data"].get_user_by_id(id)
# For @**name|id**, we need to specifically check that
# name matches the full_name of user in mention_data.
# This enforces our decision that
# @**user_1_name|id_for_user_2** should be invalid syntax.
if full_name:
if user and user["full_name"] != full_name:
return None, None, None
else:
# For @**name** syntax.
user = db_data["mention_data"].get_user_by_name(name)
if wildcard:
self.md.zulip_message.mentions_wildcard = True
user_id = "*"
elif user:
if not silent:
self.md.zulip_message.mentions_user_ids.add(user["id"])
name = user["full_name"]
user_id = str(user["id"])
else:
# Don't highlight @mentions that don't refer to a valid user
return None, None, None
el = Element("span")
el.set("data-user-id", user_id)
text = f"{name}"
if silent:
el.set("class", "user-mention silent")
else:
el.set("class", "user-mention")
text = f"@{text}"
el.text = markdown.util.AtomicString(text)
return el, m.start(), m.end()
return None, None, None
class UserGroupMentionPattern(markdown.inlinepatterns.InlineProcessor):
def handleMatch( # type: ignore[override] # supertype incompatible with supersupertype
self, m: Match[str], data: str
) -> Union[Tuple[None, None, None], Tuple[Element, int, int]]:
match = m.group(1)
db_data = self.md.zulip_db_data
if self.md.zulip_message and db_data is not None:
name = extract_user_group(match)
user_group = db_data["mention_data"].get_user_group(name)
if user_group:
self.md.zulip_message.mentions_user_group_ids.add(user_group.id)
name = user_group.name
user_group_id = str(user_group.id)
else:
# Don't highlight @-mentions that don't refer to a valid user
# group.
return None, None, None
el = Element("span")
el.set("class", "user-group-mention")
el.set("data-user-group-id", user_group_id)
text = f"@{name}"
el.text = markdown.util.AtomicString(text)
return el, m.start(), m.end()
return None, None, None
class StreamPattern(markdown.inlinepatterns.InlineProcessor):
def __init__(self, compiled_re: Pattern[str], md: markdown.Markdown) -> None:
# This is similar to the superclass's small __init__ function,
# but we skip the compilation step and let the caller give us
# a compiled regex.
self.compiled_re = compiled_re
self.md = md
def find_stream_by_name(self, name: str) -> Optional[Dict[str, Any]]:
db_data = self.md.zulip_db_data
if db_data is None:
return None
stream = db_data["stream_names"].get(name)
return stream
def handleMatch( # type: ignore[override] # supertype incompatible with supersupertype
self, m: Match[str], data: str
) -> Union[Tuple[None, None, None], Tuple[Element, int, int]]:
name = m.group("stream_name")
if self.md.zulip_message:
stream = self.find_stream_by_name(name)
if stream is None:
return None, None, None
el = Element("a")
el.set("class", "stream")
el.set("data-stream-id", str(stream["id"]))
# TODO: We should quite possibly not be specifying the
# href here and instead having the browser auto-add the
# href when it processes a message with one of these, to
# provide more clarity to API clients.
# Also do the same for StreamTopicPattern.
stream_url = encode_stream(stream["id"], name)
el.set("href", f"/#narrow/stream/{stream_url}")
text = f"#{name}"
el.text = markdown.util.AtomicString(text)
return el, m.start(), m.end()
return None, None, None
class StreamTopicPattern(markdown.inlinepatterns.InlineProcessor):
def __init__(self, compiled_re: Pattern[str], md: markdown.Markdown) -> None:
# This is similar to the superclass's small __init__ function,
# but we skip the compilation step and let the caller give us
# a compiled regex.
self.compiled_re = compiled_re
self.md = md
def find_stream_by_name(self, name: str) -> Optional[Dict[str, Any]]:
db_data = self.md.zulip_db_data
if db_data is None:
return None
stream = db_data["stream_names"].get(name)
return stream
def handleMatch( # type: ignore[override] # supertype incompatible with supersupertype
self, m: Match[str], data: str
) -> Union[Tuple[None, None, None], Tuple[Element, int, int]]:
stream_name = m.group("stream_name")
topic_name = m.group("topic_name")
if self.md.zulip_message:
stream = self.find_stream_by_name(stream_name)
if stream is None or topic_name is None:
return None, None, None
el = Element("a")
el.set("class", "stream-topic")
el.set("data-stream-id", str(stream["id"]))
stream_url = encode_stream(stream["id"], stream_name)
topic_url = hash_util_encode(topic_name)
link = f"/#narrow/stream/{stream_url}/topic/{topic_url}"
el.set("href", link)
text = f"#{stream_name} > {topic_name}"
el.text = markdown.util.AtomicString(text)
return el, m.start(), m.end()
return None, None, None
def possible_linked_stream_names(content: str) -> Set[str]:
matches = re.findall(STREAM_LINK_REGEX, content, re.VERBOSE)
for match in re.finditer(STREAM_TOPIC_LINK_REGEX, content, re.VERBOSE):
matches.append(match.group("stream_name"))
return set(matches)
class AlertWordNotificationProcessor(markdown.preprocessors.Preprocessor):
allowed_before_punctuation = {" ", "\n", "(", '"', ".", ",", "'", ";", "[", "*", "`", ">"}
allowed_after_punctuation = {
" ",
"\n",
")",
'",',
"?",
":",
".",
",",
"'",
";",
"]",
"!",
"*",
"`",
}
def check_valid_start_position(self, content: str, index: int) -> bool:
if index <= 0 or content[index] in self.allowed_before_punctuation:
return True
return False
def check_valid_end_position(self, content: str, index: int) -> bool:
if index >= len(content) or content[index] in self.allowed_after_punctuation:
return True
return False
def run(self, lines: List[str]) -> List[str]:
db_data = self.md.zulip_db_data
if self.md.zulip_message and db_data is not None:
# We check for alert words here, the set of which are
# dependent on which users may see this message.
#
# Our caller passes in the list of possible_words. We
# don't do any special rendering; we just append the alert words
# we find to the set self.md.zulip_message.alert_words.
realm_alert_words_automaton = db_data["realm_alert_words_automaton"]
if realm_alert_words_automaton is not None:
content = "\n".join(lines).lower()
for end_index, (original_value, user_ids) in realm_alert_words_automaton.iter(
content
):
if self.check_valid_start_position(
content, end_index - len(original_value)
) and self.check_valid_end_position(content, end_index + 1):
self.md.zulip_message.user_ids_with_alert_words.update(user_ids)
return lines
class LinkInlineProcessor(markdown.inlinepatterns.LinkInlineProcessor):
def zulip_specific_link_changes(self, el: Element) -> Union[None, Element]:
href = el.get("href")
assert href is not None
# Sanitize URL or don't parse link. See linkify_tests in markdown_test_cases for banned syntax.
href = sanitize_url(self.unescape(href.strip()))
if href is None:
return None # no-op; the link is not processed.
# Rewrite local links to be relative
db_data = self.md.zulip_db_data
href = rewrite_local_links_to_relative(db_data, href)
# Make changes to <a> tag attributes
el.set("href", href)
# Show link href if title is empty
if not el.text or not el.text.strip():
el.text = href
# Prevent linkifiers from running on the content of a Markdown link, breaking up the link.
# This is a monkey-patch, but it might be worth sending a version of this change upstream.
el.text = markdown.util.AtomicString(el.text)
return el
def handleMatch( # type: ignore[override] # supertype incompatible with supersupertype
self, m: Match[str], data: str
) -> Union[Tuple[None, None, None], Tuple[Element, int, int]]:
ret = super().handleMatch(m, data)
if ret[0] is not None:
el: Optional[Element]
el, match_start, index = ret
el = self.zulip_specific_link_changes(el)
if el is not None:
return el, match_start, index
return None, None, None
def get_sub_registry(r: markdown.util.Registry, keys: List[str]) -> markdown.util.Registry:
# Registry is a new class added by Python-Markdown to replace OrderedDict.
# Since Registry doesn't support .keys(), it is easier to make a new
# object instead of removing keys from the existing object.
new_r = markdown.util.Registry()
for k in keys:
new_r.register(r[k], k, r.get_index_for_name(k))
return new_r
# These are used as keys ("linkifiers_keys") to md_engines and the respective
# linkifier caches
DEFAULT_MARKDOWN_KEY = -1
ZEPHYR_MIRROR_MARKDOWN_KEY = -2
class Markdown(markdown.Markdown):
zulip_message: Optional[Message]
zulip_realm: Optional[Realm]
zulip_db_data: Optional[DbData]
image_preview_enabled: bool
url_embed_preview_enabled: bool
def __init__(
self,
linkifiers: List[LinkifierDict],
linkifiers_key: int,
email_gateway: bool,
) -> None:
self.linkifiers = linkifiers
self.linkifiers_key = linkifiers_key
self.email_gateway = email_gateway
super().__init__(
extensions=[
nl2br.makeExtension(),
tables.makeExtension(),
codehilite.makeExtension(
linenums=False,
guess_lang=False,
),
],
)
self.set_output_format("html")
def build_parser(self) -> markdown.Markdown:
# Build the parser using selected default features from Python-Markdown.
# The complete list of all available processors can be found in the
# super().build_parser() function.
#
# Note: for any Python-Markdown updates, manually check if we want any
# of the new features added upstream or not; they wouldn't get
# included by default.
self.preprocessors = self.build_preprocessors()
self.parser = self.build_block_parser()
self.inlinePatterns = self.build_inlinepatterns()
self.treeprocessors = self.build_treeprocessors()
self.postprocessors = self.build_postprocessors()
self.handle_zephyr_mirror()
return self
def build_preprocessors(self) -> markdown.util.Registry:
# We disable the following preprocessors from upstream:
#
# html_block - insecure
# reference - references don't make sense in a chat context.
preprocessors = markdown.util.Registry()
preprocessors.register(MarkdownListPreprocessor(self), "hanging_lists", 35)
preprocessors.register(
markdown.preprocessors.NormalizeWhitespace(self), "normalize_whitespace", 30
)
preprocessors.register(fenced_code.FencedBlockPreprocessor(self), "fenced_code_block", 25)
preprocessors.register(
AlertWordNotificationProcessor(self), "custom_text_notifications", 20
)
return preprocessors
def build_block_parser(self) -> BlockParser:
# We disable the following blockparsers from upstream:
#
# indent - replaced by ours
# setextheader - disabled; we only support hashheaders for headings
# olist - replaced by ours
# ulist - replaced by ours
# quote - replaced by ours
parser = BlockParser(self)
parser.blockprocessors.register(
markdown.blockprocessors.EmptyBlockProcessor(parser), "empty", 95
)
parser.blockprocessors.register(ListIndentProcessor(parser), "indent", 90)
if not self.email_gateway:
parser.blockprocessors.register(
markdown.blockprocessors.CodeBlockProcessor(parser), "code", 85
)
parser.blockprocessors.register(HashHeaderProcessor(parser), "hashheader", 80)
# We get priority 75 from 'table' extension
parser.blockprocessors.register(markdown.blockprocessors.HRProcessor(parser), "hr", 70)
parser.blockprocessors.register(OListProcessor(parser), "olist", 65)
parser.blockprocessors.register(UListProcessor(parser), "ulist", 60)
parser.blockprocessors.register(BlockQuoteProcessor(parser), "quote", 55)
parser.blockprocessors.register(
markdown.blockprocessors.ParagraphProcessor(parser), "paragraph", 50
)
return parser
def build_inlinepatterns(self) -> markdown.util.Registry:
# We disable the following upstream inline patterns:
#
# backtick - replaced by ours
# escape - probably will re-add at some point.
# link - replaced by ours
# image_link - replaced by ours
# autolink - replaced by ours
# automail - replaced by ours
# linebreak - we use nl2br and consider that good enough
# html - insecure
# reference - references not useful
# image_reference - references not useful
# short_reference - references not useful
# ---------------------------------------------------
# strong_em - for these three patterns,
# strong2 - we have our own versions where
# emphasis2 - we disable _ for bold and emphasis
# Declare regexes for clean single line calls to .register().
NOT_STRONG_RE = markdown.inlinepatterns.NOT_STRONG_RE
# Custom strikethrough syntax: ~~foo~~
DEL_RE = r"(?<!~)(\~\~)([^~\n]+?)(\~\~)(?!~)"
# Custom bold syntax: **foo** but not __foo__
# str inside ** must start and end with a word character
# it need for things like "const char *x = (char *)y"
EMPHASIS_RE = r"(\*)(?!\s+)([^\*^\n]+)(?<!\s)\*"
ENTITY_RE = markdown.inlinepatterns.ENTITY_RE
STRONG_EM_RE = r"(\*\*\*)(?!\s+)([^\*^\n]+)(?<!\s)\*\*\*"
# Add inline patterns. We use a custom numbering of the
# rules, that preserves the order from upstream but leaves
# space for us to add our own.
reg = markdown.util.Registry()
reg.register(BacktickInlineProcessor(markdown.inlinepatterns.BACKTICK_RE), "backtick", 105)
reg.register(
markdown.inlinepatterns.DoubleTagPattern(STRONG_EM_RE, "strong,em"), "strong_em", 100
)
reg.register(UserMentionPattern(mention.find_mentions, self), "usermention", 95)
reg.register(
Tex(r"\B(?<!\$)\$\$(?P<body>[^\n_$](\\\$|[^$\n])*)\$\$(?!\$)\B", self), "tex", 90
)
reg.register(StreamTopicPattern(get_compiled_stream_topic_link_regex(), self), "topic", 87)
reg.register(StreamPattern(get_compiled_stream_link_regex(), self), "stream", 85)
reg.register(Timestamp(r"<time:(?P<time>[^>]*?)>"), "timestamp", 75)
reg.register(
UserGroupMentionPattern(mention.user_group_mentions, self), "usergroupmention", 65
)
reg.register(LinkInlineProcessor(markdown.inlinepatterns.LINK_RE, self), "link", 60)
reg.register(AutoLink(get_web_link_regex(), self), "autolink", 55)
# Reserve priority 45-54 for linkifiers
reg = self.register_linkifiers(reg)
reg.register(markdown.inlinepatterns.HtmlInlineProcessor(ENTITY_RE, self), "entity", 40)
reg.register(
markdown.inlinepatterns.SimpleTagPattern(r"(\*\*)([^\n]+?)\2", "strong"), "strong", 35
)
reg.register(markdown.inlinepatterns.SimpleTagPattern(EMPHASIS_RE, "em"), "emphasis", 30)
reg.register(markdown.inlinepatterns.SimpleTagPattern(DEL_RE, "del"), "del", 25)
reg.register(
markdown.inlinepatterns.SimpleTextInlineProcessor(NOT_STRONG_RE), "not_strong", 20
)
reg.register(Emoji(EMOJI_REGEX, self), "emoji", 15)
reg.register(EmoticonTranslation(emoticon_regex, self), "translate_emoticons", 10)
# We get priority 5 from 'nl2br' extension
reg.register(UnicodeEmoji(unicode_emoji_regex), "unicodeemoji", 0)
return reg
def register_linkifiers(self, inlinePatterns: markdown.util.Registry) -> markdown.util.Registry:
for linkifier in self.linkifiers:
pattern = linkifier["pattern"]
inlinePatterns.register(
LinkifierPattern(pattern, linkifier["url_format"], self),
f"linkifiers/{pattern}",
45,
)
return inlinePatterns
def build_treeprocessors(self) -> markdown.util.Registry:
# Here we build all the processors from upstream, plus a few of our own.
treeprocessors = markdown.util.Registry()
# We get priority 30 from 'hilite' extension
treeprocessors.register(markdown.treeprocessors.InlineProcessor(self), "inline", 25)
treeprocessors.register(markdown.treeprocessors.PrettifyTreeprocessor(self), "prettify", 20)
treeprocessors.register(
InlineInterestingLinkProcessor(self), "inline_interesting_links", 15
)
if settings.CAMO_URI:
treeprocessors.register(InlineImageProcessor(self), "rewrite_images_proxy", 10)
return treeprocessors
def build_postprocessors(self) -> markdown.util.Registry:
# These are the default Python-Markdown processors, unmodified.
postprocessors = markdown.util.Registry()
postprocessors.register(markdown.postprocessors.RawHtmlPostprocessor(self), "raw_html", 20)
postprocessors.register(
markdown.postprocessors.AndSubstitutePostprocessor(), "amp_substitute", 15
)
postprocessors.register(markdown.postprocessors.UnescapePostprocessor(), "unescape", 10)
return postprocessors
def handle_zephyr_mirror(self) -> None:
if self.linkifiers_key == ZEPHYR_MIRROR_MARKDOWN_KEY:
# Disable almost all inline patterns for zephyr mirror
# users' traffic that is mirrored. Note that
# inline_interesting_links is a treeprocessor and thus is
# not removed
self.inlinePatterns = get_sub_registry(self.inlinePatterns, ["autolink"])
self.treeprocessors = get_sub_registry(
self.treeprocessors, ["inline_interesting_links", "rewrite_images_proxy"]
)
# insert new 'inline' processor because we have changed self.inlinePatterns
# but InlineProcessor copies md as self.md in __init__.
self.treeprocessors.register(
markdown.treeprocessors.InlineProcessor(self), "inline", 25
)
self.preprocessors = get_sub_registry(self.preprocessors, ["custom_text_notifications"])
self.parser.blockprocessors = get_sub_registry(
self.parser.blockprocessors, ["paragraph"]
)
md_engines: Dict[Tuple[int, bool], Markdown] = {}
linkifier_data: Dict[int, List[LinkifierDict]] = {}
def make_md_engine(linkifiers_key: int, email_gateway: bool) -> None:
md_engine_key = (linkifiers_key, email_gateway)
if md_engine_key in md_engines:
del md_engines[md_engine_key]
linkifiers = linkifier_data[linkifiers_key]
md_engines[md_engine_key] = Markdown(
linkifiers=linkifiers,
linkifiers_key=linkifiers_key,
email_gateway=email_gateway,
)
# Split the topic name into multiple sections so that we can easily use
# our common single link matching regex on it.
basic_link_splitter = re.compile(r"[ !;\?\),\'\"]")
# Security note: We don't do any HTML escaping in this
# function on the URLs; they are expected to be HTML-escaped when
# rendered by clients (just as links rendered into message bodies
# are validated and escaped inside `url_to_a`).
def topic_links(linkifiers_key: int, topic_name: str) -> List[Dict[str, str]]:
matches: List[Dict[str, Union[str, int]]] = []
linkifiers = linkifiers_for_realm(linkifiers_key)
for linkifier in linkifiers:
raw_pattern = linkifier["pattern"]
url_format_string = linkifier["url_format"]
pattern = prepare_linkifier_pattern(raw_pattern)
for m in re.finditer(pattern, topic_name):
match_details = m.groupdict()
match_text = match_details["linkifier_actual_match"]
# We format the linkifier's url string using the matched text.
# Also, we include the matched text in the response, so that our clients
# don't have to implement any logic of their own to get back the text.
matches += [
dict(
url=url_format_string % match_details,
text=match_text,
index=topic_name.find(match_text),
)
]
# Also make raw URLs navigable.
for sub_string in basic_link_splitter.split(topic_name):
link_match = re.match(get_web_link_regex(), sub_string)
if link_match:
actual_match_url = link_match.group("url")
result = urlsplit(actual_match_url)
if not result.scheme:
if not result.netloc:
i = (result.path + "/").index("/")
result = result._replace(netloc=result.path[:i], path=result.path[i:])
url = result._replace(scheme="https").geturl()
else:
url = actual_match_url
matches.append(
dict(url=url, text=actual_match_url, index=topic_name.find(actual_match_url))
)
# In order to preserve the order in which the links occur, we sort the matched text
# based on its starting index in the topic. We pop the index field before returning.
matches = sorted(matches, key=lambda k: k["index"])
return [{k: str(v) for k, v in match.items() if k != "index"} for match in matches]
def maybe_update_markdown_engines(linkifiers_key: int, email_gateway: bool) -> None:
global linkifier_data
linkifiers = linkifiers_for_realm(linkifiers_key)
if linkifiers_key not in linkifier_data or linkifier_data[linkifiers_key] != linkifiers:
# Linkifier data has changed, update `linkifier_data` and any
# of the existing Markdown engines using this set of linkifiers.
linkifier_data[linkifiers_key] = linkifiers
for email_gateway_flag in [True, False]:
if (linkifiers_key, email_gateway_flag) in md_engines:
# Update only existing engines(if any), don't create new one.
make_md_engine(linkifiers_key, email_gateway_flag)
if (linkifiers_key, email_gateway) not in md_engines:
# Markdown engine corresponding to this key doesn't exists so create one.
make_md_engine(linkifiers_key, email_gateway)
# We want to log Markdown parser failures, but shouldn't log the actual input
# message for privacy reasons. The compromise is to replace all alphanumeric
# characters with 'x'.
#
# We also use repr() to improve reproducibility, and to escape terminal control
# codes, which can do surprisingly nasty things.
_privacy_re = re.compile("\\w", flags=re.UNICODE)
def privacy_clean_markdown(content: str) -> str:
return repr(_privacy_re.sub("x", content))
def get_possible_mentions_info(realm_id: int, mention_texts: Set[str]) -> List[FullNameInfo]:
if not mention_texts:
return []
q_list = set()
name_re = r"(?P<full_name>.+)?\|(?P<mention_id>\d+)$"
for mention_text in mention_texts:
name_syntax_match = re.match(name_re, mention_text)
if name_syntax_match:
full_name = name_syntax_match.group("full_name")
mention_id = name_syntax_match.group("mention_id")
if full_name:
# For **name|id** mentions as mention_id
# cannot be null inside this block.
q_list.add(Q(full_name__iexact=full_name, id=mention_id))
else:
# For **|id** syntax.
q_list.add(Q(id=mention_id))
else:
# For **name** syntax.
q_list.add(Q(full_name__iexact=mention_text))
rows = (
UserProfile.objects.filter(
realm_id=realm_id,
is_active=True,
)
.filter(
functools.reduce(lambda a, b: a | b, q_list),
)
.values(
"id",
"full_name",
"email",
)
)
return list(rows)
class MentionData:
def __init__(self, realm_id: int, content: str) -> None:
mention_texts, has_wildcards = possible_mentions(content)
possible_mentions_info = get_possible_mentions_info(realm_id, mention_texts)
self.full_name_info = {row["full_name"].lower(): row for row in possible_mentions_info}
self.user_id_info = {row["id"]: row for row in possible_mentions_info}
self.init_user_group_data(realm_id=realm_id, content=content)
self.has_wildcards = has_wildcards
def message_has_wildcards(self) -> bool:
return self.has_wildcards
def init_user_group_data(self, realm_id: int, content: str) -> None:
user_group_names = possible_user_group_mentions(content)
self.user_group_name_info = get_user_group_name_info(realm_id, user_group_names)
self.user_group_members: Dict[int, List[int]] = defaultdict(list)
group_ids = [group.id for group in self.user_group_name_info.values()]
if not group_ids:
# Early-return to avoid the cost of hitting the ORM,
# which shows up in profiles.
return
membership = UserGroupMembership.objects.filter(user_group_id__in=group_ids)
for info in membership.values("user_group_id", "user_profile_id"):
group_id = info["user_group_id"]
user_profile_id = info["user_profile_id"]
self.user_group_members[group_id].append(user_profile_id)
def get_user_by_name(self, name: str) -> Optional[FullNameInfo]:
# warning: get_user_by_name is not dependable if two
# users of the same full name are mentioned. Use
# get_user_by_id where possible.
return self.full_name_info.get(name.lower(), None)
def get_user_by_id(self, id: int) -> Optional[FullNameInfo]:
return self.user_id_info.get(id, None)
def get_user_ids(self) -> Set[int]:
"""
Returns the user IDs that might have been mentioned by this
content. Note that because this data structure has not parsed
the message and does not know about escaping/code blocks, this
will overestimate the list of user ids.
"""
return set(self.user_id_info.keys())
def get_user_group(self, name: str) -> Optional[UserGroup]:
return self.user_group_name_info.get(name.lower(), None)
def get_group_members(self, user_group_id: int) -> List[int]:
return self.user_group_members.get(user_group_id, [])
def get_user_group_name_info(realm_id: int, user_group_names: Set[str]) -> Dict[str, UserGroup]:
if not user_group_names:
return {}
rows = UserGroup.objects.filter(realm_id=realm_id, name__in=user_group_names)
dct = {row.name.lower(): row for row in rows}
return dct
def get_stream_name_info(realm: Realm, stream_names: Set[str]) -> Dict[str, FullNameInfo]:
if not stream_names:
return {}
q_list = {Q(name=name) for name in stream_names}
rows = (
get_active_streams(
realm=realm,
)
.filter(
functools.reduce(lambda a, b: a | b, q_list),
)
.values(
"id",
"name",
)
)
dct = {row["name"]: row for row in rows}
return dct
def do_convert(
content: str,
realm_alert_words_automaton: Optional[ahocorasick.Automaton] = None,
message: Optional[Message] = None,
message_realm: Optional[Realm] = None,
sent_by_bot: bool = False,
translate_emoticons: bool = False,
mention_data: Optional[MentionData] = None,
email_gateway: bool = False,
no_previews: bool = False,
) -> str:
"""Convert Markdown to HTML, with Zulip-specific settings and hacks."""
# This logic is a bit convoluted, but the overall goal is to support a range of use cases:
# * Nothing is passed in other than content -> just run default options (e.g. for docs)
# * message is passed, but no realm is -> look up realm from message
# * message_realm is passed -> use that realm for Markdown purposes
if message is not None:
if message_realm is None:
message_realm = message.get_realm()
if message_realm is None:
linkifiers_key = DEFAULT_MARKDOWN_KEY
else:
linkifiers_key = message_realm.id
if message and hasattr(message, "id") and message.id:
logging_message_id = "id# " + str(message.id)
else:
logging_message_id = "unknown"
if message is not None and message_realm is not None:
if message_realm.is_zephyr_mirror_realm:
if message.sending_client.name == "zephyr_mirror":
# Use slightly customized Markdown processor for content
# delivered via zephyr_mirror
linkifiers_key = ZEPHYR_MIRROR_MARKDOWN_KEY
maybe_update_markdown_engines(linkifiers_key, email_gateway)
md_engine_key = (linkifiers_key, email_gateway)
_md_engine = md_engines[md_engine_key]
# Reset the parser; otherwise it will get slower over time.
_md_engine.reset()
# Filters such as UserMentionPattern need a message.
_md_engine.zulip_message = message
_md_engine.zulip_realm = message_realm
_md_engine.zulip_db_data = None # for now
_md_engine.image_preview_enabled = image_preview_enabled(message, message_realm, no_previews)
_md_engine.url_embed_preview_enabled = url_embed_preview_enabled(
message, message_realm, no_previews
)
# Pre-fetch data from the DB that is used in the Markdown thread
if message_realm is not None:
# Here we fetch the data structures needed to render
# mentions/stream mentions from the database, but only
# if there is syntax in the message that might use them, since
# the fetches are somewhat expensive and these types of syntax
# are uncommon enough that it's a useful optimization.
if mention_data is None:
mention_data = MentionData(message_realm.id, content)
stream_names = possible_linked_stream_names(content)
stream_name_info = get_stream_name_info(message_realm, stream_names)
if content_has_emoji_syntax(content):
active_realm_emoji = message_realm.get_active_emoji()
else:
active_realm_emoji = {}
_md_engine.zulip_db_data = {
"realm_alert_words_automaton": realm_alert_words_automaton,
"mention_data": mention_data,
"active_realm_emoji": active_realm_emoji,
"realm_uri": message_realm.uri,
"sent_by_bot": sent_by_bot,
"stream_names": stream_name_info,
"translate_emoticons": translate_emoticons,
}
try:
# Spend at most 5 seconds rendering; this protects the backend
# from being overloaded by bugs (e.g. Markdown logic that is
# extremely inefficient in corner cases) as well as user
# errors (e.g. a linkifier that makes some syntax
# infinite-loop).
rendered_content = timeout(5, lambda: _md_engine.convert(content))
# Throw an exception if the content is huge; this protects the
# rest of the codebase from any bugs where we end up rendering
# something huge.
if len(rendered_content) > MAX_MESSAGE_LENGTH * 10:
raise MarkdownRenderingException(
f"Rendered content exceeds {MAX_MESSAGE_LENGTH * 10} characters (message {logging_message_id})"
)
return rendered_content
except Exception:
cleaned = privacy_clean_markdown(content)
# NOTE: Don't change this message without also changing the
# logic in logging_handlers.py or we can create recursive
# exceptions.
markdown_logger.exception(
"Exception in Markdown parser; input (sanitized) was: %s\n (message %s)",
cleaned,
logging_message_id,
)
raise MarkdownRenderingException()
finally:
# These next three lines are slightly paranoid, since
# we always set these right before actually using the
# engine, but better safe then sorry.
_md_engine.zulip_message = None
_md_engine.zulip_realm = None
_md_engine.zulip_db_data = None
markdown_time_start = 0.0
markdown_total_time = 0.0
markdown_total_requests = 0
def get_markdown_time() -> float:
return markdown_total_time
def get_markdown_requests() -> int:
return markdown_total_requests
def markdown_stats_start() -> None:
global markdown_time_start
markdown_time_start = time.time()
def markdown_stats_finish() -> None:
global markdown_total_time
global markdown_total_requests
global markdown_time_start
markdown_total_requests += 1
markdown_total_time += time.time() - markdown_time_start
def markdown_convert(
content: str,
realm_alert_words_automaton: Optional[ahocorasick.Automaton] = None,
message: Optional[Message] = None,
message_realm: Optional[Realm] = None,
sent_by_bot: bool = False,
translate_emoticons: bool = False,
mention_data: Optional[MentionData] = None,
email_gateway: bool = False,
no_previews: bool = False,
) -> str:
markdown_stats_start()
ret = do_convert(
content,
realm_alert_words_automaton,
message,
message_realm,
sent_by_bot,
translate_emoticons,
mention_data,
email_gateway,
no_previews=no_previews,
)
markdown_stats_finish()
return ret
|
apache-2.0
| -5,365,257,474,454,156,000
| 38.076582
| 160
| 0.587632
| false
| 3.962188
| false
| false
| false
|
jiangdexiang/awesome-python-webapp
|
www/transwarp/db.py
|
1
|
8617
|
#!/usr/bin/env python
#-*-coding:utf8-*-
__author__ = 'jiangdexiang'
__version__ = '1.0'
__all__ = ['__author__', '__version__']
'''
Database operation module
'''
import threading
import time
import uuid
import functools
import logging
class Dict(dict):
"""
Simple dict but support access as x.y style.
>>> d1 = Dirt()
>>> d1['x'] = 100
>>> d1.x
100
>>> d1['y'] = 200
>>> d1.y
200
>>> d2 = Dirt(a=1, b=2, c='3')
>>> d2.c
'3'
>>> d2['empty']
Traceback (most recent call last:
...
KeyError:
"""
def __init__(self, names=(), values=(), **kw):
super(Dict, self).__init__(**kw)
for k, v in zip(names, values):
self[k] = v
def __getattr__(self, key):
try:
return self[key]
except KeyError:
raise AttributeError(r"'Dict' object has no attribute '%s'" % key)
def __setattr__(self, key, value):
self[key] = value
def next_id(t=None):
"""
Return next id as 50-char string.
Args:
t: unix timestamp, default to None and using time.time().
:param t:
:return:
"""
if t is None:
t = time.time()
return '%015d%s000' % (int(t * 1000), uuid.uuid4().hex)
def _profiling(start, sql=''):
t = time.time() - start
if t > 0.1:
logging.warning('[PROFILING] [DB] %s: %s' % (t, sql))
else:
logging.info('[PROFILING] [DB] %s: %s' % (t, sql))
class DBError(Exception):
pass
class MultiColumnsError(DBError):
pass
class _LasyConnection(object):
def __init__(self):
self.connection = None
def cursor(self):
if self.connection is None:
connection = engine.connect()
logging.info('open connection <%s>...' % hex(id(connection)))
self.connection = connection
return self.connection.cursor()
def commit(self):
self.connection.commit()
def rollback(self):
self.connection.rollback()
def cleanup(self):
if self.connection:
connection = self.connection
self.connection = None
logging.info('close connection <%s>...' % hex(id(connection)))
connection.close()
class _DbCtx(threading.local):
"""
Thread local object that holds connection info.
"""
def __init__(self):
self.connection = None
self.transactions = 0
def is_init(self):
return not self.connection is None
def init(self):
self.connection = _LasyConnection()
self.transactions = 0
def cleanup(self):
self.connection.cleanup()
self.connection = None
def cursor(self):
"""
return cursor
:return:
"""
return self.connection.cursor()
# thread-local db context:
_db_ctx = _DbCtx()
#global engine object:
engine = None
class _Engine(object):
def __init__(self, connect):
self._connect = connect
def connect(self):
return self._connect()
def create_engine(user, password, database, host='127.0.0.1', port=3306, **kw):
import mysql.connection
global engine
if engine is not None:
raise DBError('Engine is already initialized.')
params = dict(user=user, password=password, database=database, host=host, port=port)
defaults = dict(use_unicode=True, charset='utf8', collation='utf8_general_ci', autocommit=False)
for k, v in defaults.iteritems():
params[k] = kw.pop(k, v)
params.update(kw)
params['buffered'] = True
engine = _Engine(lambda: mysql.connection.connect(**params))
#test conncetion...
logging.info('Init mysql engine <%s> ok.' % hex(id(engine)))
class _ConnectionCtx(object):
"""
_ConnectionCtx object that can open and close connection context. _ConnectionCtx object can be nested and only the
most outer connection has effect.
with connection():
pass
with connection():
pass
"""
def __enter__(self):
global _db_ctx
self.should_cleanup = False
if not _db_ctx.is_init():
_db_ctx.init()
self.should_cleanup = True
return self
def __exit__(self, exctype, excvalue, traceback):
global _db_ctx
if self.should_cleanup:
_db_ctx.cleanup()
def connections():
return _ConnectionCtx()
def with_connection(func):
"""
Decorator for reuse connection.
@with_conncetion
def foo(*args, **kw):
f1()
f2()
f3()
:return:
"""
@functools.wraps(func)
def _wrapper(*args, **kw):
with _ConnectionCtx():
return func(*args, **kw)
return _wrapper
class _TransactionCtx(object):
"""
_TransactionCtx object that can handle transactions.
with _TransactionCtx():
pass
"""
def __enter__(self):
global _db_ctx
self.should_close_conn = False
if not _db_ctx.is_init():
# needs open a connection first:
_db_ctx.init()
self.should_close_conn = True
_db_ctx.transactions += 1
logging.info('Begin transaction...' if _db_ctx.transactions == 1 else 'join current transaction...')
return self
def __exit__(self, exctype, excvalue, traceback):
global _db_ctx
_db_ctx.transactions -= 1
try:
if _db_ctx.transactions == 0:
if exctype is None:
self.commit()
else:
self.rollback()
finally:
if self.should_close_conn:
_db_ctx.cleanup()
@staticmethod
def commit():
global _db_ctx
logging.info('Commit transaction...')
try:
_db_ctx.connection.commit()
logging.info('Commit OK.')
except:
logging.warning('Commit failed. try rollback...')
_db_ctx.connection.rollback()
raise
@staticmethod
def rollback():
global _db_ctx
logging.warning('Rollback transaction...')
_db_ctx.connection.rollback()
logging.info('Rollback OK.')
def transaction():
return _TransactionCtx()
def with_transaction(func):
@functools.wraps(func)
def _wrapper(*args, **kw):
_start = time.time()
with _TransactionCtx():
return func(*args, **kw)
_profiling(_start)
return _wrapper
def _select(sql, first, *args):
"""execute select SQL and return unique result or list results."""
global _db_ctx
cursor = None
names = []
sql = sql.replace('?', '%s')
logging.info('SQL: %s, ARGS: %s' % (sql, args))
try:
cursor = _db_ctx.connection.cursor()
cursor.execute(sql, args)
if cursor.description:
names = [x[0] for x in cursor.description]
if first:
values = cursor.fetchone()
if not values:
return None
return Dict(names, values)
return [Dict(names, x) for x in cursor.fetchall()]
finally:
if cursor:
cursor.close()
@with_connection
def select_one(sql, *args):
return _select(sql, True, *args)
@with_connection
def select_int(sql, *args):
d = _select(sql, True, *args)
if len(d) != 1:
raise MultiColumnsError('Expect only one column.')
return d.values()[0]
@with_connection
def select(sql, *args):
return _select(sql, False, *args)
@with_connection
def _update(sql, *args):
global _db_ctx
cursor = None
sql = sql.replace('?', '%s')
logging.info('SQL: %s, ARGS: %s' % (sql, args))
try:
cursor = _db_ctx.connection.cursor()
cursor.execute(sql, args)
r = cursor.rowcount
if _db_ctx.transactions == 0:
logging.info('auto commit')
_db_ctx.connection.commit()
return r
finally:
if cursor:
cursor.close()
def insert(table, **kw):
cols, args = zip(*kw.iteritems())
sql = 'insert into %s (%s) values (%s)' % (table, ','.join(['`%s`' % col for col in cols]),
','.join(['?' for i in range(len(cols))]))
return _update(sql, *args)
def update(sql, *args):
return _update(sql, *args)
if __name__ == '__main__':
logging.basicConfig(level=logging.DEBUG)
create_engine('www-data', 'www-data', 'test')
update('drop table if exists user')
update('create table user (id int primary key, name text, email text, passwd text, last_modified real)')
import doctest
doctest.testmod()
|
gpl-2.0
| 7,174,816,158,842,612,000
| 23.413598
| 118
| 0.559011
| false
| 3.812832
| false
| false
| false
|
fabteam1/komsukomsuhuhu
|
komsukomsuhuu/profiles/forms.py
|
1
|
2012
|
from django import forms
from django.contrib.auth import authenticate
from django.contrib.auth.forms import UserCreationForm
from django.core.exceptions import ValidationError
from django.contrib.auth.models import User
from profiles.models import CustomUser, UserLocation
class LoginForm(forms.Form):
username = forms.CharField(required=True)
password = forms.CharField(widget=forms.PasswordInput, required=True)
def clean(self):
username = self.cleaned_data.get('username')
password = self.cleaned_data.get('password')
if not username or not password:
return self.cleaned_data
user = authenticate(username=username, password=password)
if user:
self.user = user
else:
raise ValidationError('Wrong username or password !')
return self.cleaned_data
class RegistrationForm(UserCreationForm):
pass
class AdvancedRegistrationForm(UserCreationForm):
first_name = forms.CharField(max_length=30, required=True)
last_name = forms.CharField(max_length=30, required=True)
class Meta:
model = User
fields = ('first_name', 'last_name', 'username', 'email')
def clean_email(self):
if not self.cleaned_data['email']:
raise forms.ValidationError(u'Enter email.')
if User.objects.filter(email__iexact=self.cleaned_data['email']):
raise forms.ValidationError(
u'''
This email has already been in use. Please try with different email.
'''
)
return self.cleaned_data['email']
class ChangeCustomUserDetails(forms.ModelForm):
class Meta:
model = CustomUser
fields = ('address', 'phone', 'birthDay')
class UserLocationForm(forms.ModelForm):
class Meta:
model = UserLocation
fields = ['longitude', 'latitude']
class UserStatusForm(forms.ModelForm):
class Meta:
model = CustomUser
fields = ('status',)
|
mit
| 8,524,575,782,871,746,000
| 25.473684
| 84
| 0.657555
| false
| 4.471111
| false
| false
| false
|
induane/stomp.py3
|
stomp/connect.py
|
1
|
38302
|
import math
import random
import re
import socket
import sys
import threading
import time
import types
import xml.dom.minidom
import errno
try:
from cStringIO import StringIO
except ImportError:
from io import StringIO
protocols = frozenset([
'PROTOCOL_SSLv3',
'PROTOCOL_TLSv1_2',
'PROTOCOL_TLSv1_1',
'PROTOCOL_TLSv1',
'PROTOCOL_SSLv23',
'PROTOCOL_SSLv2',
])
DEFAULT_SSL_VERSION = None
SSL_AVAILABLE = True
try:
import ssl
from ssl import SSLError
except ImportError:
SSL_AVAILABLE = False
if SSL_AVAILABLE:
for protocol in protocols:
try:
DEFAULT_SSL_VERSION = getattr(ssl, protocol)
except AttributeError:
continue
except SSLError:
continue
break
try:
from socket import SOL_SOCKET, SO_KEEPALIVE
from socket import SOL_TCP, TCP_KEEPIDLE, TCP_KEEPINTVL, TCP_KEEPCNT
LINUX_KEEPALIVE_AVAIL=True
except ImportError:
LINUX_KEEPALIVE_AVAIL=False
import exception
import listener
import utils
from backward import decode, encode, hasbyte, pack, socksend, NULL
try:
import uuid
except ImportError:
from backward import uuid
try:
from fractions import gcd
except ImportError:
from backward import gcd
import logging
log = logging.getLogger('stomp.py')
class Connection(object):
"""
Represents a STOMP client connection.
"""
# ========= PRIVATE MEMBERS =========
# List of all host names (unqualified, fully-qualified, and IP
# addresses) that refer to the local host (both loopback interface
# and external interfaces). This is used for determining
# preferred targets.
__localhost_names = [ "localhost", "127.0.0.1" ]
try:
__localhost_names.append(socket.gethostbyname(socket.gethostname()))
except:
pass
try:
__localhost_names.append(socket.gethostname())
except:
pass
try:
__localhost_names.append(socket.getfqdn(socket.gethostname()))
except:
pass
#
# Used to parse the STOMP "content-length" header lines,
#
__content_length_re = re.compile('^content-length[:]\\s*(?P<value>[0-9]+)', re.MULTILINE)
def __init__(self,
host_and_ports = [ ('localhost', 61613) ],
user = None,
passcode = None,
prefer_localhost = True,
try_loopback_connect = True,
reconnect_sleep_initial = 0.1,
reconnect_sleep_increase = 0.5,
reconnect_sleep_jitter = 0.1,
reconnect_sleep_max = 60.0,
reconnect_attempts_max = 3,
use_ssl = False,
ssl_key_file = None,
ssl_cert_file = None,
ssl_ca_certs = None,
ssl_cert_validator = None,
wait_on_receipt = False,
ssl_version = DEFAULT_SSL_VERSION,
timeout = None,
version = 1.0,
strict = True,
heartbeats = (0, 0),
keepalive = None,
vhost = None
):
"""
Initialize and start this connection.
\param host_and_ports
a list of (host, port) tuples.
\param prefer_localhost
if True and the local host is mentioned in the (host,
port) tuples, try to connect to this first
\param try_loopback_connect
if True and the local host is found in the host
tuples, try connecting to it using loopback interface
(127.0.0.1)
\param reconnect_sleep_initial
initial delay in seconds to wait before reattempting
to establish a connection if connection to any of the
hosts fails.
\param reconnect_sleep_increase
factor by which the sleep delay is increased after
each connection attempt. For example, 0.5 means
to wait 50% longer than before the previous attempt,
1.0 means wait twice as long, and 0.0 means keep
the delay constant.
\param reconnect_sleep_max
maximum delay between connection attempts, regardless
of the reconnect_sleep_increase.
\param reconnect_sleep_jitter
random additional time to wait (as a percentage of
the time determined using the previous parameters)
between connection attempts in order to avoid
stampeding. For example, a value of 0.1 means to wait
an extra 0%-10% (randomly determined) of the delay
calculated using the previous three parameters.
\param reconnect_attempts_max
maximum attempts to reconnect
\param use_ssl
connect using SSL to the socket. This wraps the
socket in a SSL connection. The constructor will
raise an exception if you ask for SSL, but it can't
find the SSL module.
\param ssl_cert_file
the path to a X509 certificate
\param ssl_key_file
the path to a X509 key file
\param ssl_ca_certs
the path to the a file containing CA certificates
to validate the server against. If this is not set,
server side certificate validation is not done.
\param ssl_cert_validator
function which performs extra validation on the client
certificate, for example checking the returned
certificate has a commonName attribute equal to the
hostname (to avoid man in the middle attacks).
The signature is:
(OK, err_msg) = validation_function(cert, hostname)
where OK is a boolean, and cert is a certificate structure
as returned by ssl.SSLSocket.getpeercert()
\param wait_on_receipt
if a receipt is specified, then the send method should wait
(block) for the server to respond with that receipt-id
before continuing
\param ssl_version
SSL protocol to use for the connection. This should be
one of the PROTOCOL_x constants provided by the ssl module.
The default is ssl.PROTOCOL_SSLv3
\param timeout
the timeout value to use when connecting the stomp socket
\param version
STOMP protocol version (1.0 or 1.1)
\param strict
if true, use the strict version of the protocol. For STOMP 1.1, this means
it will use the STOMP connect header, rather than CONNECT.
\param heartbeats
a tuple containing the heartbeat send and receive time in millis. (0,0)
if no heartbeats
\param keepalive
some operating systems support sending the occasional heart
beat packets to detect when a connection fails. This
parameter can either be set set to a boolean to turn on the
default keepalive options for your OS, or as a tuple of
values, which also enables keepalive packets, but specifies
options specific to your OS implementation
\param vhost
specify a virtual hostname to provide in the 'host' header of the connection
"""
sorted_host_and_ports = []
sorted_host_and_ports.extend(host_and_ports)
#
# If localhost is preferred, make sure all (host, port) tuples that refer to the local host come first in the list
#
if prefer_localhost:
sorted_host_and_ports.sort(key = self.is_localhost)
#
# If the user wishes to attempt connecting to local ports using the loopback interface, for each (host, port) tuple
# referring to a local host, add an entry with the host name replaced by 127.0.0.1 if it doesn't exist already
#
loopback_host_and_ports = []
if try_loopback_connect:
for host_and_port in sorted_host_and_ports:
if self.is_localhost(host_and_port) == 1:
port = host_and_port[1]
if (not ("127.0.0.1", port) in sorted_host_and_ports
and not ("localhost", port) in sorted_host_and_ports):
loopback_host_and_ports.append(("127.0.0.1", port))
#
# Assemble the final, possibly sorted list of (host, port) tuples
#
self.__host_and_ports = []
self.__host_and_ports.extend(loopback_host_and_ports)
self.__host_and_ports.extend(sorted_host_and_ports)
self.__recvbuf = ''
self.__listeners = {}
self.__reconnect_sleep_initial = reconnect_sleep_initial
self.__reconnect_sleep_increase = reconnect_sleep_increase
self.__reconnect_sleep_jitter = reconnect_sleep_jitter
self.__reconnect_sleep_max = reconnect_sleep_max
self.__reconnect_attempts_max = reconnect_attempts_max
self.__timeout = timeout
self.__connect_headers = {}
if user is not None and passcode is not None:
self.__connect_headers['login'] = user
self.__connect_headers['passcode'] = passcode
self.__socket = None
self.__socket_semaphore = threading.BoundedSemaphore(1)
self.__current_host_and_port = None
self.__receiver_thread_exit_condition = threading.Condition()
self.__receiver_thread_exited = False
self.__send_wait_condition = threading.Condition()
self.__connect_wait_condition = threading.Condition()
self.blocking = None
self.connected = False
# setup SSL
if use_ssl and not ssl:
raise Exception("SSL connection requested, but SSL library not found.")
self.__ssl = use_ssl
self.__ssl_cert_file = ssl_cert_file
self.__ssl_key_file = ssl_key_file
self.__ssl_ca_certs = ssl_ca_certs
self.__ssl_cert_validator = ssl_cert_validator
self.__ssl_version = ssl_version
self.__receipts = {}
self.__wait_on_receipt = wait_on_receipt
# protocol version
self.version = version
self.__strict = strict
# setup heartbeating
if version < 1.1 and heartbeats != (0, 0):
raise exception.ProtocolException('Heartbeats can only be set on a 1.1+ connection')
self.heartbeats = heartbeats
# used for 1.1 heartbeat messages (set to true every time a heartbeat message arrives)
self.__received_heartbeat = time.time()
# flag used when we receive the disconnect receipt
self.__disconnect_receipt = None
# function for creating threads used by the connection
self.create_thread_fc = default_create_thread
self.__keepalive = keepalive
self.vhost = vhost
def is_localhost(self, host_and_port):
"""
Return true if the specified host+port is a member of the 'localhost' list of hosts
"""
(host, port) = host_and_port
if host in Connection.__localhost_names:
return 1
else:
return 2
def override_threading(self, create_thread_fc):
"""
Override for thread creation. Use an alternate threading library by
setting this to a function with a single argument (which is the receiver loop callback).
The thread which is returned should be started (ready to run)
"""
self.create_thread_fc = create_thread_fc
#
# Manage the connection
#
def start(self):
"""
Start the connection. This should be called after all
listeners have been registered. If this method is not called,
no frames will be received by the connection.
"""
self.__running = True
self.__attempt_connection()
thread = self.create_thread_fc(self.__receiver_loop)
self.__notify('connecting')
def stop(self):
"""
Stop the connection. This is equivalent to calling
disconnect() but will do a clean shutdown by waiting for the
receiver thread to exit.
"""
self.disconnect()
self.__receiver_thread_exit_condition.acquire()
while not self.__receiver_thread_exited:
self.__receiver_thread_exit_condition.wait()
self.__receiver_thread_exit_condition.release()
def get_host_and_port(self):
"""
Return a (host, port) tuple indicating which STOMP host and
port is currently connected, or None if there is currently no
connection.
"""
return self.__current_host_and_port
def is_connected(self):
"""
Return true if the socket managed by this connection is connected
"""
try:
return self.__socket is not None and self.__socket.getsockname()[1] != 0 and self.connected
except socket.error:
return False
#
# Manage objects listening to incoming frames
#
def set_listener(self, name, listener):
"""
Set a named listener on this connection
\see listener::ConnectionListener
\param name the name of the listener
\param listener the listener object
"""
self.__listeners[name] = listener
def remove_listener(self, name):
"""
Remove a listener according to the specified name
\param name the name of the listener to remove
"""
del self.__listeners[name]
def get_listener(self, name):
"""
Return a named listener
\param name the listener to return
"""
if name in self.__listeners:
return self.__listeners[name]
else:
return None
#
# STOMP transmissions
#
def subscribe(self, headers={}, **keyword_headers):
"""
Send a SUBSCRIBE frame to subscribe to a queue
"""
merged_headers = utils.merge_headers([headers, keyword_headers])
required_headers = [ 'destination' ]
if self.version >= 1.1:
required_headers.append('id')
self.__send_frame_helper('SUBSCRIBE', '', merged_headers, required_headers)
def unsubscribe(self, headers={}, **keyword_headers):
"""
Send an UNSUBSCRIBE frame to unsubscribe from a queue
"""
merged_headers = utils.merge_headers([headers, keyword_headers])
self.__send_frame_helper('UNSUBSCRIBE', '', merged_headers, [ ('destination', 'id') ])
def send(self, message='', headers={}, **keyword_headers):
"""
Send a message (SEND) frame
"""
merged_headers = utils.merge_headers([headers, keyword_headers])
wait_on_receipt = self.__wait_on_receipt and 'receipt' in merged_headers.keys()
if wait_on_receipt:
self.__send_wait_condition.acquire()
try:
self.__send_frame_helper('SEND', message, merged_headers, [ 'destination' ])
self.__notify('send', headers, message)
# if we need to wait-on-receipt, then block until the receipt frame arrives
if wait_on_receipt:
receipt = merged_headers['receipt']
while receipt not in self.__receipts:
self.__send_wait_condition.wait()
del self.__receipts[receipt]
finally:
if wait_on_receipt:
self.__send_wait_condition.release()
def ack(self, headers={}, **keyword_headers):
"""
Send an ACK frame, to acknowledge receipt of a message
"""
self.__send_frame_helper('ACK', '', utils.merge_headers([headers, keyword_headers]), [ 'message-id' ])
def nack(self, headers={}, **keyword_headers):
"""
Send an NACK frame, to acknowledge a message was not successfully processed
"""
if self.version < 1.1:
raise RuntimeError('NACK is not supported with 1.0 connections')
self.__send_frame_helper('NACK', '', utils.merge_headers([headers, keyword_headers]), [ 'message-id' ])
def begin(self, headers={}, **keyword_headers):
"""
Send a BEGIN frame to start a transaction
"""
use_headers = utils.merge_headers([headers, keyword_headers])
if not 'transaction' in use_headers.keys():
use_headers['transaction'] = str(uuid.uuid4())
self.__send_frame_helper('BEGIN', '', use_headers, [ 'transaction' ])
return use_headers['transaction']
def abort(self, headers={}, **keyword_headers):
"""
Send an ABORT frame to rollback a transaction
"""
self.__send_frame_helper('ABORT', '', utils.merge_headers([headers, keyword_headers]), [ 'transaction' ])
def commit(self, headers={}, **keyword_headers):
"""
Send a COMMIT frame to commit a transaction (send pending messages)
"""
self.__send_frame_helper('COMMIT', '', utils.merge_headers([headers, keyword_headers]), [ 'transaction' ])
def connect(self, headers={}, **keyword_headers):
"""
Send a CONNECT frame to start a connection
"""
wait = False
if 'wait' in keyword_headers and keyword_headers['wait']:
wait = True
del keyword_headers['wait']
if self.version >= 1.1:
if self.__strict:
cmd = 'STOMP'
else:
cmd = 'CONNECT'
if self.vhost is not None:
headers['host'] = self.vhost
headers['accept-version'] = self.version
headers['heart-beat'] = '%s,%s' % self.heartbeats
else:
cmd = 'CONNECT'
self.__send_frame_helper(cmd, '', utils.merge_headers([self.__connect_headers, headers, keyword_headers]), [ ])
if wait:
self.__connect_wait_condition.acquire()
while not self.is_connected():
self.__connect_wait_condition.wait()
self.__connect_wait_condition.release()
def disconnect_socket(self):
self.__running = False
if self.__socket is not None:
if self.__ssl:
#
# Even though we don't want to use the socket, unwrap is the only API method which does a proper SSL shutdown
#
try:
self.__socket = self.__socket.unwrap()
except Exception:
#
# unwrap seems flaky on Win with the backported ssl mod, so catch any exception and log it
#
_, e, _ = sys.exc_info()
log.warn(e)
elif hasattr(socket, 'SHUT_RDWR'):
try:
self.__socket.shutdown(socket.SHUT_RDWR)
except socket.error:
_, e, _ = sys.exc_info()
log.warn('Unable to issue SHUT_RDWR on socket because of error "%s"' % e)
#
# split this into a separate check, because sometimes the socket is nulled between shutdown and this call
#
if self.__socket is not None:
try:
self.__socket.close()
except socket.error:
_, e, _ = sys.exc_info()
log.warn('Unable to close socket because of error "%s"' % e)
self.__current_host_and_port = None
def disconnect(self, send_disconnect=True, headers={}, **keyword_headers):
"""
Send a DISCONNECT frame to finish a connection
"""
if self.version >= 1.1 and 'receipt' not in headers:
headers['receipt'] = str(uuid.uuid4())
try:
self.__send_frame_helper('DISCONNECT', '', utils.merge_headers([self.__connect_headers, headers, keyword_headers]), [ ])
except exception.NotConnectedException:
_, e, _ = sys.exc_info()
self.disconnect_socket()
raise e
if 'receipt' in headers:
self.__disconnect_receipt = headers['receipt']
else:
self.disconnect_socket()
def __convert_dict(self, payload):
"""
Encode a python dictionary as a <map>...</map> structure.
"""
xmlStr = "<map>\n"
for key in payload:
xmlStr += "<entry>\n"
xmlStr += "<string>%s</string>" % key
xmlStr += "<string>%s</string>" % payload[key]
xmlStr += "</entry>\n"
xmlStr += "</map>"
return xmlStr
def __send_frame_helper(self, command, payload, headers, required_header_keys):
"""
Helper function for sending a frame after verifying that a
given set of headers are present.
\param command
the command to send
\param payload
the frame's payload
\param headers
a dictionary containing the frame's headers
\param required_header_keys
a sequence enumerating all required header keys. If an element in this sequence is itself
a tuple, that tuple is taken as a list of alternatives, one of which must be present.
\throws ArgumentError
if one of the required header keys is not present in the header map.
"""
for required_header_key in required_header_keys:
if type(required_header_key) == tuple:
found_alternative = False
for alternative in required_header_key:
if alternative in headers.keys():
found_alternative = True
if not found_alternative:
raise KeyError("Command %s requires one of the following headers: %s" % (command, str(required_header_key)))
elif not required_header_key in headers.keys():
raise KeyError("Command %s requires header %r" % (command, required_header_key))
self.__send_frame(command, headers, payload)
def __send_frame(self, command, headers={}, payload=''):
"""
Send a STOMP frame.
\param command
the frame command
\param headers
a map of headers (key-val pairs)
\param payload
the message payload
"""
if type(payload) == dict:
headers["transformation"] = "jms-map-xml"
payload = self.__convert_dict(payload)
if payload:
payload = encode(payload)
if hasbyte(0, payload):
headers.update({'content-length': len(payload)})
if self.__socket is not None:
try:
frame = [ ]
if command is not None:
frame.append(command + '\n')
for key, val in headers.items():
frame.append('%s:%s\n' % (key, val))
frame.append('\n')
if payload:
frame.append(payload)
if command is not None:
# only send the terminator if we're sending a command (heartbeats have no term)
frame.append(NULL)
frame = pack(frame)
self.__socket_semaphore.acquire()
try:
socksend(self.__socket, frame)
log.debug("Sent frame: type=%s, headers=%r, body=%r" % (command, headers, payload))
finally:
self.__socket_semaphore.release()
except Exception:
_, e, _ = sys.exc_info()
log.error("Error sending frame: %s" % e)
raise e
else:
raise exception.NotConnectedException()
def __notify(self, frame_type, headers=None, body=None):
"""
Utility function for notifying listeners of incoming and outgoing messages
\param frame_type
the type of message
\param headers
the map of headers associated with the message
\param body
the content of the message
"""
if frame_type == 'receipt':
# logic for wait-on-receipt notification
receipt = headers['receipt-id']
self.__send_wait_condition.acquire()
try:
self.__receipts[receipt] = None
self.__send_wait_condition.notify()
finally:
self.__send_wait_condition.release()
# received a stomp 1.1 disconnect receipt
if receipt == self.__disconnect_receipt:
self.disconnect_socket()
if frame_type == 'connected':
self.__connect_wait_condition.acquire()
self.connected = True
self.__connect_wait_condition.notify()
self.__connect_wait_condition.release()
if 'version' not in headers.keys():
if self.version >= 1.1:
log.warn('Downgraded STOMP protocol version to 1.0')
self.version = 1.0
if 'heart-beat' in headers.keys():
self.heartbeats = utils.calculate_heartbeats(headers['heart-beat'].replace(' ', '').split(','), self.heartbeats)
if self.heartbeats != (0,0):
default_create_thread(self.__heartbeat_loop)
elif frame_type == 'disconnected':
self.__connect_wait_condition.acquire()
self.connected = False
self.__connect_wait_condition.release()
for listener in self.__listeners.values():
if not listener: continue
if not hasattr(listener, 'on_%s' % frame_type):
log.debug('listener %s has no method on_%s' % (listener, frame_type))
continue
if frame_type == 'connecting':
listener.on_connecting(self.__current_host_and_port)
continue
elif frame_type == 'disconnected':
listener.on_disconnected()
continue
notify_func = getattr(listener, 'on_%s' % frame_type)
notify_func(headers, body)
def __receiver_loop(self):
"""
Main loop listening for incoming data.
"""
log.debug("Starting receiver loop")
try:
try:
while self.__running:
if self.__socket is None:
break
try:
try:
while self.__running:
frames = self.__read()
for frame in frames:
(frame_type, headers, body) = utils.parse_frame(frame)
log.debug("Received frame: %r, headers=%r, body=%r" % (frame_type, headers, body))
frame_type = frame_type.lower()
if frame_type in [ 'connected', 'message', 'receipt', 'error' ]:
self.__notify(frame_type, headers, body)
elif frame_type == 'heartbeat':
# no notifications needed
pass
else:
log.warning('Unknown response frame type: "%s" (frame length was %d)' % (frame_type, len(frame)))
finally:
try:
self.__socket.close()
except:
pass # ignore errors when attempting to close socket
self.__socket = None
self.__current_host_and_port = None
except exception.ConnectionClosedException:
if self.__running:
log.error("Lost connection")
self.__notify('disconnected')
#
# Clear out any half-received messages after losing connection
#
self.__recvbuf = ''
self.__running = False
break
except:
log.exception("An unhandled exception was encountered in the stomp receiver loop")
finally:
self.__receiver_thread_exit_condition.acquire()
self.__receiver_thread_exited = True
self.__receiver_thread_exit_condition.notifyAll()
self.__receiver_thread_exit_condition.release()
log.debug("Receiver loop ended")
def __heartbeat_loop(self):
"""
Loop for sending (and monitoring received) heartbeats
"""
send_sleep = self.heartbeats[0] / 1000
# receive gets an additional threshold of 3 additional seconds
receive_sleep = (self.heartbeats[1] / 1000) + 3
if send_sleep == 0:
sleep_time = receive_sleep
elif receive_sleep == 0:
sleep_time = send_sleep
else:
# sleep is the GCD of the send and receive times
sleep_time = gcd(send_sleep, receive_sleep) / 2.0
send_time = time.time()
receive_time = time.time()
while self.__running:
time.sleep(sleep_time)
if time.time() - send_time > send_sleep:
send_time = time.time()
log.debug('Sending a heartbeat message')
self.__send_frame(None)
if time.time() - receive_time > receive_sleep:
if time.time() - self.__received_heartbeat > receive_sleep:
log.debug('Heartbeat timeout')
# heartbeat timeout
for listener in self.__listeners.values():
listener.on_heartbeat_timeout()
self.disconnect_socket()
self.__connect_wait_condition.acquire()
self.connected = False
self.__connect_wait_condition.release()
def __read(self):
"""
Read the next frame(s) from the socket.
"""
fastbuf = StringIO()
while self.__running:
try:
try:
c = self.__socket.recv(1024)
except socket.error:
_, e, _ = sys.exc_info()
if e.args[0] in (errno.EAGAIN, errno.EINTR):
log.debug("socket read interrupted, restarting")
continue
raise
c = decode(c)
# reset the heartbeat for any received message
self.__received_heartbeat = time.time()
except Exception:
_, e, _ = sys.exc_info()
c = ''
if len(c) == 0:
raise exception.ConnectionClosedException()
fastbuf.write(c)
if '\x00' in c:
break
elif c == '\x0a':
# heartbeat (special case)
return c
self.__recvbuf += fastbuf.getvalue()
fastbuf.close()
result = []
if len(self.__recvbuf) > 0 and self.__running:
while True:
pos = self.__recvbuf.find('\x00')
if pos >= 0:
frame = self.__recvbuf[0:pos]
preamble_end = frame.find('\n\n')
if preamble_end >= 0:
content_length_match = Connection.__content_length_re.search(frame[0:preamble_end])
if content_length_match:
content_length = int(content_length_match.group('value'))
content_offset = preamble_end + 2
frame_size = content_offset + content_length
if frame_size > len(frame):
#
# Frame contains NUL bytes, need to read more
#
if frame_size < len(self.__recvbuf):
pos = frame_size
frame = self.__recvbuf[0:pos]
else:
#
# Haven't read enough data yet, exit loop and wait for more to arrive
#
break
result.append(frame)
self.__recvbuf = self.__recvbuf[pos+1:]
else:
break
return result
def __enable_keepalive(self):
def try_setsockopt(sock, name, fam, opt, val):
if val is None:
return True # no value to set always works
try:
sock.setsockopt(fam, opt, val)
log.debug('keepalive: set %r option to %r on socket' % (name, val))
except:
log.error('keepalive: unable to set %r option to %r on socket' % (name,val))
return False
return True
ka = self.__keepalive
if not ka:
return
if ka == True:
ka_sig = 'auto'
ka_args = ()
else:
try:
ka_sig = ka[0]
ka_args = ka[1:]
except Exception:
log.error('keepalive: bad specification %r' % (ka,))
return
if ka_sig == 'auto':
if LINUX_KEEPALIVE_AVAIL:
ka_sig = 'linux'
ka_args = None
log.debug('keepalive: autodetected linux-style support')
else:
log.error('keepalive: unable to detect any implementation, DISABLED!')
return
if ka_sig == 'linux':
log.debug('keepalive: activating linux-style support')
if ka_args is None:
log.debug('keepalive: using system defaults')
ka_args = (None, None, None)
lka_idle, lka_intvl, lka_cnt = ka_args
if try_setsockopt(self.__socket, 'enable', SOL_SOCKET, SO_KEEPALIVE, 1):
try_setsockopt(self.__socket, 'idle time', SOL_TCP, TCP_KEEPIDLE, lka_idle)
try_setsockopt(self.__socket, 'interval', SOL_TCP, TCP_KEEPINTVL, lka_intvl)
try_setsockopt(self.__socket, 'count', SOL_TCP, TCP_KEEPCNT, lka_cnt)
else:
log.error('keepalive: implementation %r not recognized or not supported' % ka_sig)
def __attempt_connection(self):
"""
Try connecting to the (host, port) tuples specified at construction time.
"""
sleep_exp = 1
connect_count = 0
while self.__running and self.__socket is None and connect_count < self.__reconnect_attempts_max:
for host_and_port in self.__host_and_ports:
try:
log.debug("Attempting connection to host %s, port %s" % host_and_port)
self.__socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.__enable_keepalive()
if self.__ssl: # wrap socket
if self.__ssl_ca_certs:
cert_validation = ssl.CERT_REQUIRED
else:
cert_validation = ssl.CERT_NONE
self.__socket = ssl.wrap_socket(self.__socket, keyfile = self.__ssl_key_file,
certfile = self.__ssl_cert_file, cert_reqs = cert_validation,
ca_certs = self.__ssl_ca_certs, ssl_version = self.__ssl_version)
self.__socket.settimeout(self.__timeout)
if self.blocking is not None:
self.__socket.setblocking(self.blocking)
self.__socket.connect(host_and_port)
#
# Validate server cert
#
if self.__ssl and self.__ssl_cert_validator:
cert = self.__socket.getpeercert()
(ok, errmsg) = apply(self.__ssl_cert_validator, (cert, host_and_port[0]))
if not ok:
raise SSLError("Server certificate validation failed: %s" % errmsg)
self.__current_host_and_port = host_and_port
log.info("Established connection to host %s, port %s" % host_and_port)
break
except socket.error:
self.__socket = None
if isinstance(sys.exc_info()[1], tuple):
exc = sys.exc_info()[1][1]
else:
exc = sys.exc_info()[1]
connect_count += 1
log.warning("Could not connect to host %s, port %s: %s" % (host_and_port[0], host_and_port[1], exc))
if self.__socket is None:
sleep_duration = (min(self.__reconnect_sleep_max,
((self.__reconnect_sleep_initial / (1.0 + self.__reconnect_sleep_increase))
* math.pow(1.0 + self.__reconnect_sleep_increase, sleep_exp)))
* (1.0 + random.random() * self.__reconnect_sleep_jitter))
sleep_end = time.time() + sleep_duration
log.debug("Sleeping for %.1f seconds before attempting reconnect" % sleep_duration)
while self.__running and time.time() < sleep_end:
time.sleep(0.2)
if sleep_duration < self.__reconnect_sleep_max:
sleep_exp += 1
if not self.__socket:
raise exception.ConnectFailedException()
def default_create_thread(callback):
"""
Default thread creation
"""
thread = threading.Thread(None, callback)
thread.daemon = True # Don't let receiver thread prevent termination
thread.start()
return thread
|
apache-2.0
| -1,741,751,368,077,238,500
| 36.698819
| 137
| 0.534593
| false
| 4.661312
| false
| false
| false
|
mode89/snn
|
izhikevich/da.py
|
1
|
3623
|
import matplotlib.pyplot as plt
import numpy
import random
random.seed(0)
T = 100000
N = 1000
Ne = int(N * 0.8)
Ni = N - Ne
M = int(N * 0.1)
D = 20
a = numpy.concatenate((
0.02 * numpy.ones(Ne),
0.1 * numpy.ones(Ni)
))
d = numpy.concatenate((
8 * numpy.ones(Ne),
2 * numpy.ones(Ni)
))
# generate post-synaptic connections
post = numpy.empty((N, M), dtype=numpy.int)
for i in range(Ne):
post[i,:] = random.sample(range(N), M)
for i in range(Ne, N):
post[i,:] = random.sample(range(Ne), M)
# find pre-synaptic connections to excitatory neurons
pre = [[] for i in range(N)]
for i in range(Ne):
for j in range(M):
pre[post[i,j]].append(i)
# generate delays
delays = [[[] for i in range(D)] for j in range(N)]
for i in range(Ne):
for j in range(M):
delays[i][int(D * random.random())].append(post[i,j])
for i in range(Ne, N):
for j in range(M):
delays[i][0].append(post[i,j])
# generate matrix of synaptic weights
s = numpy.zeros((N, N))
for i in range(Ne):
s[i, post[i,:]] = 6.0
for i in range(Ne, N):
s[i, post[i,:]] = -5.0
v = -65 * numpy.ones(N)
u = 0.2 * v
firings = []
STDP = numpy.zeros(N)
sm = 4.0
sd = numpy.zeros((N, N))
DA = 0
rew = []
n1 = 700
n2 = post[n1, 0]
s[n1, n2] = 0.0
interval = 20
n1f = []
n2f = []
class data:
n0 = 700
n1 = post[n0, 0]
stdp0 = []
stdp1 = []
s01 = []
da = []
for t in range(T):
print(t)
# provide random input
I = 13.0 * numpy.array([random.uniform(-0.5, 0.5) for i in range(N)])
# identify fired neurons
fired = numpy.argwhere(v >= 30)
if fired.size > 0:
v[fired] = -65.0
u[fired] = u[fired] + d[fired]
# deliver spikes to post-synaptic neurons
firings.append(fired)
for time in range(min(D, len(firings))):
for fired_neuron in firings[t - time]:
post_neurons = delays[fired_neuron][time]
if len(post_neurons) > 0:
I[post_neurons] += s[fired_neuron, post_neurons]
# update post-synaptic potential
for i in range(2):
v += 0.5 * ((0.04 * v + 5.0) * v + 140.0 - u + I)
u += a * (0.2 * v - u)
# update synaptic weights
STDP[fired] = 0.1
for fired_neuron in fired:
if fired_neuron < Ne:
post_neurons = post[fired_neuron,:]
sd[fired_neuron, post_neurons] -= 1.2 * STDP[post_neurons]
pre_neurons = pre[fired_neuron]
sd[pre_neurons, fired_neuron] += STDP[pre_neurons]
STDP *= 0.95
DA *= 0.995
if t % 10 == 0:
s[0:Ne,:] = numpy.maximum(0.0, numpy.minimum(sm,
s[0:Ne,:] + (0.002 + DA) * sd[0:Ne,:]))
sd *= 0.99
if numpy.any(fired == n1):
n1f.append(t)
if numpy.any(fired == n2):
n2f.append(t)
if len(n1f) > 0:
if t - n1f[-1] < interval and n2f[-1] > n1f[-1]:
print("Coincident spiking")
rew.append(t + 1000 + int(2000 * random.random()))
if any([it == t for it in rew]):
print("Rewarding")
DA += 0.5
data.stdp0.append(STDP[data.n0])
data.stdp1.append(STDP[data.n1])
data.s01.append(s[data.n0, data.n1])
data.da.append(DA)
x = []
y = []
for t in range(T):
for fired in firings[t]:
x.append(t)
y.append(fired)
plt.subplot(411)
plt.scatter(x, y, color="black", marker=".")
plt.xlim(0, T)
plt.ylim(0, N)
plt.subplot(412)
plt.plot(
range(T), data.stdp0,
range(T), data.stdp1)
plt.xlim(0, T)
plt.subplot(413)
plt.plot(range(T), data.s01)
plt.xlim(0, T)
plt.subplot(414)
plt.plot(range(T), data.da)
plt.xlim(0, T)
plt.show()
|
mit
| 6,536,851,437,615,077,000
| 21.226994
| 73
| 0.553133
| false
| 2.567682
| false
| false
| false
|
bellhops/TapeDeck
|
tapedeck/deck/models.py
|
1
|
1249
|
import os
from binascii import hexlify
from django.db import models
def _createId():
return hexlify(os.urandom(8))
class Deck(models.Model):
hash = models.CharField(max_length=256, primary_key=True, default=_createId)
branch = models.ForeignKey('branch.Branch', on_delete=models.CASCADE)
version = models.ForeignKey('version.Version', on_delete=models.CASCADE)
previous = models.OneToOneField('deck.Deck', on_delete=models.SET_NULL, blank=True, null=True, related_name='next')
file = models.FileField(upload_to='decks')
uploaded_at = models.DateTimeField(auto_now_add=True)
active = models.BooleanField(default=True)
def save(self, *args, **kwargs):
# Expire other decks for this branch/version combination, and put this one at the end of the list.
overlapping_decks = Deck.objects.filter(branch=self.branch, version=self.version)
if overlapping_decks.exists():
try:
active_deck = overlapping_decks.filter(active=True).latest('uploaded_at')
except Deck.DoesNotExist:
pass
else:
self.previous = active_deck
overlapping_decks.update(active=False)
super().save(*args, **kwargs)
|
mit
| -3,008,835,626,457,854,500
| 39.290323
| 119
| 0.669335
| false
| 3.866873
| false
| false
| false
|
qualitio/qualitio
|
qualitio/settings.py
|
1
|
5111
|
import os
PROJECT_PATH = os.path.realpath(os.path.dirname(__file__))
DEBUG = True
TEMPLATE_DEBUG = DEBUG
ADMINS = (
('Admin Qualitio', 'admin@qualitio.com'),
)
MANAGERS = ADMINS
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(PROJECT_PATH, 'data.sqlite'),
}
}
TIME_ZONE = 'Europe/Warsaw'
LANGUAGE_CODE = 'en'
SITE_ID = 1
USE_I18N = True
DATE_FORMAT = "d-m-Y"
DATETIME_FORMAT = "d-m-Y, H:i:s"
DATE_INPUT_FORMATS = ('%d-%m-%Y',)
MEDIA_ROOT = os.path.join(PROJECT_PATH, 'static')
MEDIA_URL = '/static/'
ADMIN_MEDIA_PREFIX = '/static_admin/'
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
'dbtemplates.loader.Loader',
)
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'qualitio.organizations.middleware.OrganizationMiddleware',
'qualitio.organizations.middleware.ProjectMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'qualitio.core.middleware.LoginRequiredMiddleware',
'qualitio.core.middleware.QueriesCounterMiddleware',
'django.middleware.transaction.TransactionMiddleware',
)
ROOT_URLCONF = 'qualitio.urls'
LOGIN_REDIRECT_URL = "/"
LOGIN_URL = '/login/'
LOGIN_EXEMPT_URLS = (
(r'^$', lambda request: request.organization is None),
r'^r/.*',
r'^none/$',
r'^static/',
r'^login/',
r'^inactive/',
r'^admin/',
r'^register/.*',
r'^associate/*',
r'^complete/*',
r'^project/(?P<slug>[\w-]+)/report/external/*',
r'^__debug__/.*',
r'^api/.*',
r'^googleapps_setup/$',
r'^google_checkout/$',
r'^paypal_ipn/$',
)
PROJECT_EXEMPT_URLS = (
r'^static/.*',
r'^admin/.*',
r'^login/.*',
r'^register/.*',
r'^associate/*',
r'^complete/*',
r'^__debug__/.*',
r'^api/.*',
r'^project/new/.*',
)
ORGANIZATION_EXEMPT_URLS = (
r'^static/',
r'^admin/',
)
TEMPLATE_DIRS = (
os.path.join(PROJECT_PATH, 'templates'),
)
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.messages',
'django.contrib.admin',
'django.contrib.admindocs',
'django.contrib.webdesign',
'django.contrib.markup',
'django.contrib.humanize',
'mptt',
'social_auth',
'django_nose',
'reversion',
'south',
'pagination',
'compressor',
'dbtemplates',
'tastypie',
'articles',
'django_extensions',
'qualitio.core.custommodel', # iternal core django application
'qualitio.core',
'qualitio.organizations',
'qualitio.require',
'qualitio.report',
'qualitio.execute',
'qualitio.store',
'qualitio.filter',
'qualitio.actions',
'qualitio.glossary',
'qualitio.payments',
'qualitio.customizations',
)
TEMPLATE_CONTEXT_PROCESSORS = ("django.contrib.auth.context_processors.auth",
"django.core.context_processors.debug",
"django.core.context_processors.i18n",
"django.core.context_processors.media",
"django.core.context_processors.request",
"django.contrib.messages.context_processors.messages",
"qualitio.core.context_processors.settings",
"qualitio.core.context_processors.development",
"qualitio.core.context_processors.core",
"qualitio.core.context_processors.module",
"qualitio.organizations.context_processors.main")
AUTH_PROFILE_MODULE = 'organizations.UserProfile'
SOCIAL_AUTH_IMPORT_BACKENDS = (
'qualitio.googleapps.backends',
)
AUTHENTICATION_BACKENDS = (
'qualitio.googleapps.backends.GoogleBackend',
'qualitio.googleapps.backends.GoogleAppsBackend',
'qualitio.organizations.auth.backends.OrganizationModelBackend',
)
MPTT_ADMIN_LEVEL_INDENT = 30
ISSUE_BACKEND = "qualitio.execute.backends.bugzilla"
ISSUE_BACKEND_ABSOLUTE_URL = "https://bugzilla.mozilla.org/show_bug.cgi?id=%s"
ISSUE_BACKEND_BUGZILLA_URL = "https://bugzilla.mozilla.org/"
SOUTH_TESTS_MIGRATE = False
COMPRESS_CSS_FILTERS = ['compressor.filters.css_default.CssAbsoluteFilter',
'compressor.filters.cssmin.CSSMinFilter']
COMPRESS = False
DBTEMPLATES_CACHE_BACKEND = 'dummy://127.0.0.1/'
DBTEMPLATES_USE_REVERSION = True
DBTEMPLATES_MEDIA_PREFIX = MEDIA_URL
DBTEMPLATES_USE_CODEMIRROR = False
DBTEMPLATES_AUTO_POPULATE_CONTENT = False
EMAIL_USE_TLS = True
EMAIL_HOST = 'smtp.gmail.com'
EMAIL_HOST_USER = 'notifications@qualitio.com'
EMAIL_PORT = 587
DEFAULT_FROM_EMAIL = "Qualitio Notifications <notifications@qualitio.com>"
try:
from local_settings import *
except ImportError:
pass
|
gpl-3.0
| -5,309,156,436,536,011,000
| 24.683417
| 85
| 0.638231
| false
| 3.411883
| false
| true
| false
|
nrupatunga/PY-GOTURN
|
goturn/loader/loader_imagenet.py
|
1
|
5893
|
# Date: Nrupatunga: Tuesday 04 July 2017
# Email: nrupatunga@whodat.com
# Name: Nrupatunga
# Description: loading Imagenet dataset
from __future__ import print_function
import os
import cv2
import glob
from annotation import annotation
import xml.etree.ElementTree as ET
from ..logger.logger import setup_logger
from ..helper import config
kMaxRatio = 0.66
class loader_imagenet:
"""Docstring for loader_imagenetdet. """
def __init__(self, imagenet_folder, annotations_folder, logger):
"""TODO: to be defined1. """
self.logger = logger
self.imagenet_folder = imagenet_folder
self.annotations_folder = annotations_folder
if not os.path.isdir(imagenet_folder):
logger.error('{} is not a valid directory'.format(imagenet_folder))
def loaderImageNetDet(self):
"""TODO: Docstring for get_videos.
:returns: TODO
"""
logger = self.logger
imagenet_subdirs = sorted(self.find_subfolders(self.annotations_folder))
num_annotations = 0
list_of_annotations_out = []
for i, imgnet_sub_folder in enumerate(imagenet_subdirs):
annotations_files = sorted(glob.glob(os.path.join(self.annotations_folder, imgnet_sub_folder, '*.xml')))
logger.info('Loading {}/{} - annotation file from folder = {}'.format(i + 1, len(imagenet_subdirs), imgnet_sub_folder))
for ann in annotations_files:
list_of_annotations, num_ann_curr = self.load_annotation_file(ann)
num_annotations = num_annotations + num_ann_curr
if len(list_of_annotations) == 0:
continue
list_of_annotations_out.append(list_of_annotations)
logger.info('Found {} annotations from {} images'.format(num_annotations, len(list_of_annotations_out)))
# save it for future use
self.list_of_annotations_out = list_of_annotations_out
self.num_annotations = num_annotations
return list_of_annotations_out
def find_subfolders(self, imagenet_folder):
"""TODO: Docstring for find_subfolders.
:vot_folder: directory for vot videos
:returns: list of video sub directories
"""
return [dir_name for dir_name in os.listdir(imagenet_folder) if os.path.isdir(os.path.join(imagenet_folder, dir_name))]
def load_annotation_file(self, annotation_file):
"""TODO: Docstring for load_annotation_file.
:returns: TODO
"""
list_of_annotations = []
num_annotations = 0
root = ET.parse(annotation_file).getroot()
folder = root.find('folder').text
filename = root.find('filename').text
size = root.find('size')
disp_width = int(size.find('width').text)
disp_height = int(size.find('height').text)
for obj in root.findall('object'):
bbox = obj.find('bndbox')
xmin = int(bbox.find('xmin').text)
xmax = int(bbox.find('xmax').text)
ymin = int(bbox.find('ymin').text)
ymax = int(bbox.find('ymax').text)
width = xmax - xmin
height = ymax - ymin
if width > (kMaxRatio * disp_width) or height > (kMaxRatio * disp_height):
continue
if ((xmin < 0) or (ymin < 0) or (xmax <= xmin) or (ymax <= ymin)):
continue
objAnnotation = annotation()
objAnnotation.setbbox(xmin, xmax, ymin, ymax)
objAnnotation.setWidthHeight(disp_width, disp_height)
objAnnotation.setImagePath(os.path.join(folder, filename))
list_of_annotations.append(objAnnotation)
num_annotations = num_annotations + 1
return list_of_annotations, num_annotations
def load_annotation(self, image_num, annotation_num):
"""TODO: Docstring for load_annotation.
:returns: TODO
"""
logger = self.logger
images = self.list_of_annotations_out
list_annotations = images[image_num]
random_ann = list_annotations[annotation_num]
img_path = os.path.join(self.imagenet_folder, random_ann.image_path + '.JPEG')
if config.DEBUG:
img_path = "/media/nrupatunga/Data-Backup/DL/goturn/ILSVRC2014/ILSVRC2014_DET_train/ILSVRC2014_train_0005/ILSVRC2014_train_00059375.JPEG"
random_ann.bbox.x1 = 243
random_ann.bbox.y1 = 157
random_ann.bbox.x2 = 278
random_ann.bbox.y2 = 176
random_ann.disp_height = 375
random_ann.disp_width = 500
image = cv2.imread(img_path)
img_height = image.shape[0]
img_width = image.shape[1]
sc_factor_1 = 1.0
if img_height != random_ann.disp_height or img_width != random_ann.disp_width:
logger.info('Image Number = {}, Annotation Number = {}, Image file = {}'.format(image_num, annotation_num, img_path))
logger.info('Image Size = {} x {}'.format(img_width, img_height))
logger.info('Display Size = {} x {}'.format(random_ann.disp_width, random_ann.disp_height))
sc_factor_1 = (img_height * 1.) / random_ann.disp_height
sc_factor_2 = (img_width * 1.) / random_ann.disp_width
logger.info('Factor: {} {}'.format(sc_factor_1, sc_factor_2))
bbox = random_ann.bbox
bbox.x1 = bbox.x1 * sc_factor_1
bbox.x2 = bbox.x2 * sc_factor_1
bbox.y1 = bbox.y1 * sc_factor_1
bbox.y2 = bbox.y2 * sc_factor_1
return image, bbox
if '__main__' == __name__:
logger = setup_logger(logfile=None)
objLoaderImgNet = loader_imagenet('/media/nrupatunga/data/datasets/ILSVRC2014/ILSVRC2014_DET_train/', '/media/nrupatunga/data/datasets/ILSVRC2014/ILSVRC2014_DET_bbox_train/', logger)
dict_list_of_annotations = objLoaderImgNet.loaderImageNetDet()
|
mit
| -4,537,307,078,829,376,500
| 36.535032
| 186
| 0.614628
| false
| 3.63093
| false
| false
| false
|
taschetto/computationalMethods
|
apoio/parse_svg.py
|
1
|
1979
|
import xml.etree.ElementTree as etree
import re
import sys
__author__ = 'Ramon Costi Fernandes <ramon.fernandes@acad.pucrs.br>'
SVG_namespace = "http://www.w3.org/2000/svg"
SVG_fname = ''
OUTPUT_fname = 'output.txt'
coordinates_list = []
output_list = []
#Instrucoes de uso.
def usage():
print("Como executar:\n")
print("{} <{}>".format(sys.argv[0], "SVG input file"))
print("ou")
print("{} <{}> <{}>".format(sys.argv[0], "SVG input file", "OUTPUT file"))
sys.exit(1)
# Remove as coordenadas duplicadas do arquivo SVG de entrada.
def remove_duplicates(coord_list):
global coordinates_list
temp_list = []
for item in coord_list:
if item not in temp_list:
temp_list.append(item)
coordinates_list = temp_list
#Enumera os pontos.
def enumerate_coordinates():
count = 1
for item in coordinates_list:
coord = re.split(",", item)
output_list.append("{} {} {}\n".format(count, coord[0], coord[1]))
count += 1
#Gera o arquivo de saida.
def write_output_file():
file = open(OUTPUT_fname, "w+")
for item in output_list:
file.write("".join(str(s) for s in item) + "\n")
file.close()
#Processa o arquivo XML de entrada.
def parse_xml():
global coordinates_list
tree = etree.parse(SVG_fname)
root = tree.getroot()
coordinates = root.find('.//{%s}path' % SVG_namespace).get("d")
coordinates_list = re.findall("[0-9]+\.[0-9]+,[0-9]+\.[0-9]+", coordinates)
if __name__ == "__main__":
if len(sys.argv) < 2:
usage()
elif len(sys.argv) < 3:
print("Gravando resultados no arquivo de saida \"{}\"\n".format(OUTPUT_fname))
elif len(sys.argv) == 3:
OUTPUT_fname = sys.argv[2]
print("Gravando resultados no arquivo de saida \"{}\"\n".format(OUTPUT_fname))
else:
usage()
SVG_fname = sys.argv[1]
parse_xml()
remove_duplicates(coordinates_list)
enumerate_coordinates()
write_output_file()
|
mit
| -5,315,818,081,049,413,000
| 25.756757
| 86
| 0.610915
| false
| 3.111635
| false
| false
| false
|
nextcloud/appstore
|
nextcloudappstore/api/v1/serializers.py
|
1
|
5412
|
from django.contrib.auth import get_user_model
from parler_rest.fields import TranslatedFieldsField
from parler_rest.serializers import TranslatableModelSerializer
from rest_framework import serializers
from rest_framework.fields import SerializerMethodField, DateTimeField
from nextcloudappstore.core.models import PhpExtensionDependency, \
DatabaseDependency, Category, AppAuthor, AppRelease, Screenshot, \
AppRating, App, NextcloudRelease
from nextcloudappstore.core.validators import HttpsUrlValidator
class PhpExtensionDependencySerializer(serializers.ModelSerializer):
id = serializers.ReadOnlyField(source='php_extension.id')
version_spec = SerializerMethodField()
raw_version_spec = SerializerMethodField()
class Meta:
model = PhpExtensionDependency
fields = ('id', 'version_spec', 'raw_version_spec')
def get_version_spec(self, obj):
return obj.version_spec.replace(',', ' ')
def get_raw_version_spec(self, obj):
return obj.raw_version_spec.replace(',', ' ')
class DatabaseDependencySerializer(serializers.ModelSerializer):
id = serializers.ReadOnlyField(source='database.id')
version_spec = SerializerMethodField()
raw_version_spec = SerializerMethodField()
class Meta:
model = DatabaseDependency
fields = ('id', 'version_spec', 'raw_version_spec')
def get_version_spec(self, obj):
return obj.version_spec.replace(',', ' ')
def get_raw_version_spec(self, obj):
return obj.raw_version_spec.replace(',', ' ')
class CategorySerializer(TranslatableModelSerializer):
translations = TranslatedFieldsField(shared_model=Category)
class Meta:
model = Category
fields = ('id', 'translations')
class NextcloudReleaseSerializer(serializers.ModelSerializer):
class Meta:
model = NextcloudRelease
fields = ('has_release', 'version', 'is_supported')
class AuthorSerializer(serializers.ModelSerializer):
class Meta:
model = AppAuthor
fields = ('name', 'mail', 'homepage')
class AppReleaseSerializer(serializers.ModelSerializer):
databases = DatabaseDependencySerializer(many=True, read_only=True,
source='databasedependencies')
php_extensions = \
PhpExtensionDependencySerializer(many=True, read_only=True,
source='phpextensiondependencies')
php_version_spec = SerializerMethodField()
platform_version_spec = SerializerMethodField()
raw_php_version_spec = SerializerMethodField()
raw_platform_version_spec = SerializerMethodField()
translations = TranslatedFieldsField(shared_model=AppRelease)
class Meta:
model = AppRelease
fields = (
'version', 'php_extensions', 'databases', 'shell_commands',
'php_version_spec', 'platform_version_spec', 'min_int_size',
'download', 'created', 'licenses', 'last_modified', 'is_nightly',
'raw_php_version_spec', 'raw_platform_version_spec', 'signature',
'translations', 'signature_digest'
)
def get_platform_version_spec(self, obj):
return obj.platform_version_spec.replace(',', ' ')
def get_php_version_spec(self, obj):
return obj.php_version_spec.replace(',', ' ')
def get_raw_platform_version_spec(self, obj):
return obj.raw_platform_version_spec.replace(',', ' ')
def get_raw_php_version_spec(self, obj):
return obj.raw_php_version_spec.replace(',', ' ')
class ScreenshotSerializer(serializers.ModelSerializer):
class Meta:
model = Screenshot
fields = ('url', 'small_thumbnail')
class AppSerializer(serializers.ModelSerializer):
releases = AppReleaseSerializer(many=True, read_only=True)
discussion = SerializerMethodField()
screenshots = ScreenshotSerializer(many=True, read_only=True)
authors = AuthorSerializer(many=True, read_only=True)
translations = TranslatedFieldsField(shared_model=App)
last_modified = DateTimeField(source='last_release')
class Meta:
model = App
fields = (
'id', 'categories', 'user_docs', 'admin_docs', 'developer_docs',
'issue_tracker', 'website', 'created', 'last_modified', 'releases',
'screenshots', 'translations', 'is_featured', 'authors',
'rating_recent', 'rating_overall', 'rating_num_recent',
'rating_num_overall', 'certificate', 'discussion'
)
def get_discussion(self, obj):
return obj.discussion_url
class UserSerializer(serializers.ModelSerializer):
class Meta:
model = get_user_model()
fields = ('id', 'first_name', 'last_name')
class AppRatingSerializer(serializers.ModelSerializer):
user = UserSerializer(many=False, read_only=True)
translations = TranslatedFieldsField(shared_model=AppRating)
class Meta:
model = AppRating
fields = ('rating', 'rated_at', 'translations', 'user', 'app')
class AppReleaseDownloadSerializer(serializers.Serializer):
download = serializers.URLField(validators=[HttpsUrlValidator()])
signature = serializers.CharField()
nightly = serializers.BooleanField(required=False, default=False)
class AppRegisterSerializer(serializers.Serializer):
certificate = serializers.CharField()
signature = serializers.CharField()
|
agpl-3.0
| -6,720,910,243,370,764,000
| 34.84106
| 79
| 0.683666
| false
| 4.221529
| false
| false
| false
|
pomma89/Dessert
|
Dessert.Benchmarks/Common.py
|
1
|
2245
|
#
# Common.py
#
# Author(s):
# Alessio Parma <alessio.parma@gmail.com>
#
# Copyright (c) 2012-2016 Alessio Parma <alessio.parma@gmail.com>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
import random
from Galois.MemoryRecorder import memory_usage
simTime = 1000
memRecFreq = simTime/5.0
minTimeout = simTime/100.0
maxTimeout = simTime/20.0
repetitionCount = 21
processCounts = range(500, 20500, 500)
import platform
if platform.system().lower().startswith("linux"):
tag = "simpy-linux"
else:
tag = "simpy-windows"
class Counter:
def __init__(self):
self._random = random.Random()
self._total = 0
def total(self):
return self._total
def increment(self):
self._total += 1
def randomDelay(self):
return self._random.uniform(minTimeout, maxTimeout)
class Result:
def __init__(self, eventCount, avgMemUsage):
self._eventCount = eventCount
self._avgMemUsage = avgMemUsage
def eventCount(self):
return self._eventCount
def averageMemUsage(self):
return self._avgMemUsage
def memoryRecorder(env, tally):
while True:
yield env.timeout(memRecFreq)
tally.observe(memory_usage())
|
mit
| -4,538,445,810,943,743,500
| 30.591549
| 79
| 0.712885
| false
| 3.853952
| false
| false
| false
|
google/clif
|
clif/python/proto.py
|
1
|
5489
|
# Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
r"""Generate CLIF extension C++ source for a protobuf.
PROTO -cOUTPATH/CCNAME \
-hOUTPATH/HNAME \
--strip_dir=SYSPATH \
SYSPATH/PKGPATH/NAME.proto
reads NAME.proto and generates C++ CCNAME source and HNAME header files.
"""
import argparse
import itertools
import sys
from clif.python import gen
from clif.python import clif_types as types
from clif.python.utils import proto_util
VALID_EXT = ['.proto']
gen.PY3OUTPUT = None # Generate version-agnostic headers.
FLAGS = None
class _ParseError(Exception):
pass
def _ParseCommandline(doc, argv):
"""Define command-line flags and return parsed argv."""
parser = argparse.ArgumentParser(description=doc, add_help=False)
parser.add_argument('--source_dir', '-s', default='',
help=('The base of the source code tree to strip from'
' file names.'))
parser.add_argument('--strip_dir', '-d', default='',
help=('The base of the generated code tree to strip from'
' file names.'))
parser.add_argument('--ccdeps_out', '-c', help='output filename for base .cc')
parser.add_argument('--header_out', '-h', help='output filename for .h')
parser.add_argument('--allow_empty_package', action='store_true',
help=('Generate CLIF conversion library in ::clif '
'namespace, ADL will not work.'))
parser.add_argument('protobuf', nargs=1)
return parser.parse_args(argv[1:])
def _CppName(desc):
"""Return the fully qualified C++ name of the entity in |desc|."""
return '::'+desc.fqname.replace('.', '::')
def _PyName(desc, pkg):
"""Return the Python name of the entity in |desc| from proto package |pkg|."""
if not pkg: return desc.fqname
assert desc.fqname.startswith(pkg)
return desc.fqname[len(pkg)+1:] # Add 1 for '.' between pkg and name.
def CreatePyTypeInfo(desc, path,
package_required=True, generate_service_info=False):
"""Create the type objects from the proto file descriptor in |desc|."""
pypath = '' + path.replace('/', '.').replace('-', '_') + '_pb2'
messages = [] # Proto messages.
p = desc.PackageName()
if p:
n = '::'+p.replace('.', '::') + '::'
else:
if package_required:
raise ValueError('Package statement required')
n = '::'
for m in desc.Messages():
messages.append(types.ProtoType(_CppName(m), _PyName(m, p), pypath, ns=n))
for e in desc.Enums():
messages.append(types.ProtoEnumType(_CppName(e), _PyName(e, p), ns=n))
if generate_service_info:
for s in desc.Services():
messages.append(types.CapsuleType(_CppName(s), _PyName(s, p), ns=n))
return messages
def GenerateFrom(messages, proto_filename, clif_hdr, proto_hdr):
"""Traverse ast and generate output files."""
with open(FLAGS.header_out, 'w') as hout:
gen.WriteTo(hout, gen.Headlines(
proto_filename, [proto_hdr, 'clif/python/postconv.h']))
gen.WriteTo(hout, _GenHeader(messages))
with open(FLAGS.ccdeps_out, 'w') as cout:
gen.WriteTo(cout, gen.Headlines(
proto_filename, ['clif/python/runtime.h',
'clif/python/types.h',
clif_hdr]))
for ns, ts in itertools.groupby(messages, types.Namespace):
if ns == '::':
ns = 'clif'
gen.WriteTo(cout, gen.TypeConverters(ns, ts))
def _GenHeader(messages):
"""Helper function for GenerateFrom."""
for ns, ts in itertools.groupby(messages, types.Namespace):
yield ''
if ns == '::':
ns = 'clif'
yield gen.OpenNs(ns)
else:
yield gen.OpenNs(ns)
yield 'using namespace ::clif;'
yield ''
for t in ts:
for s in t.GenHeader():
yield s
yield ''
yield gen.CloseNs(ns)
def main(_):
assert FLAGS.ccdeps_out and FLAGS.header_out, ('Both output files '
'(-c, -h) must be specified.')
assert not FLAGS.strip_dir.endswith('/')
assert FLAGS.header_out.startswith(FLAGS.strip_dir)
strip_dir = len(FLAGS.strip_dir)+1 # +1 for '/'
hdr = FLAGS.header_out[strip_dir:]
name = src = FLAGS.protobuf[0]
assert not FLAGS.source_dir.endswith('/')
if FLAGS.source_dir and name.startswith(FLAGS.source_dir):
name = name[len(FLAGS.source_dir)+1:] # +1 for '/'
for ext in VALID_EXT:
if name.endswith(ext):
pypath = name[:-len(ext)]
break
else:
raise NameError('Proto file should have any%s extension' % VALID_EXT)
desc = proto_util.ProtoFileInfo(src, FLAGS.source_dir)
if not desc:
raise _ParseError(desc.ErrorMsg())
messages = CreatePyTypeInfo(desc, pypath, not FLAGS.allow_empty_package)
GenerateFrom(messages, name, hdr, pypath+'.pb.h')
def ParseFlags():
global FLAGS
FLAGS = _ParseCommandline(__doc__.splitlines()[0], sys.argv)
def Start():
ParseFlags()
main(0)
if __name__ == '__main__':
Start()
|
apache-2.0
| -3,793,998,925,067,095,000
| 32.469512
| 80
| 0.638914
| false
| 3.585238
| false
| false
| false
|
khchine5/book
|
lino_book/projects/homeworkschool/fixtures/demo.py
|
1
|
1546
|
# -*- coding: UTF-8 -*-
# Copyright 2012-2013 Luc Saffre
# License: BSD (see file COPYING for details)
from lino.utils.instantiator import Instantiator, i2d
from django.utils.translation import ugettext_lazy as _
from lino.api import dd
def objects():
#~ slot = Instantiator('courses.Slot','name start_time end_time').build
#~
#~ kw = dict(monday=True,tuesday=True,wednesday=False,thursday=True,friday=True)
#~ yield slot("Erste Stunde","16:00","17:00",**kw)
#~ yield slot("Zweite Stunde","17:00","18:00",**kw)
#~ yield slot("Dritte Stunde","18:00","19:00",**kw)
#~
#~ kw = dict(wednesday=True)
#~ yield slot("Mittwochs 13 Uhr","13:00","14:00",**kw)
#~ yield slot("Mittwochs 14 Uhr","14:00","15:00",**kw)
#~ yield slot("Mittwochs 15 Uhr","15:00","16:00",**kw)
#~ yield slot("Mittwochs 16 Uhr","16:00","17:00",**kw)
#~ yield slot("Mittwochs 17 Uhr","17:00","18:00",**kw)
#~ yield slot("Mittwochs 18 Uhr","18:00","19:00",**kw)
courses = dd.resolve_app('courses')
yield courses.Line(**dd.babelkw('name',
de=u"Deutsch Anfänger",
fr=u"Allemand débutants",
en=u"German beginners",
))
yield courses.Line(**dd.babelkw('name',
de=u"Französisch Anfänger",
fr=u"Français débutants",
en=u"French beginners",
))
|
bsd-2-clause
| 4,543,015,073,316,322,000
| 36.560976
| 84
| 0.529221
| false
| 3.188406
| false
| false
| false
|
s0lst1c3/eaphammer
|
local/hostapd-eaphammer/tests/hwsim/test_radius.py
|
1
|
70562
|
# RADIUS tests
# Copyright (c) 2013-2016, Jouni Malinen <j@w1.fi>
#
# This software may be distributed under the terms of the BSD license.
# See README for more details.
from remotehost import remote_compatible
import binascii
import hashlib
import hmac
import logging
logger = logging.getLogger()
import os
import select
import struct
import subprocess
import threading
import time
import hostapd
from utils import HwsimSkip, require_under_vm, skip_with_fips, alloc_fail, fail_test, wait_fail_trigger
from test_ap_hs20 import build_dhcp_ack
from test_ap_ft import ft_params1
def connect(dev, ssid, wait_connect=True):
dev.connect(ssid, key_mgmt="WPA-EAP", scan_freq="2412",
eap="PSK", identity="psk.user@example.com",
password_hex="0123456789abcdef0123456789abcdef",
wait_connect=wait_connect)
@remote_compatible
def test_radius_auth_unreachable(dev, apdev):
"""RADIUS Authentication server unreachable"""
params = hostapd.wpa2_eap_params(ssid="radius-auth")
params['auth_server_port'] = "18139"
hapd = hostapd.add_ap(apdev[0], params)
connect(dev[0], "radius-auth", wait_connect=False)
ev = dev[0].wait_event(["CTRL-EVENT-EAP-STARTED"])
if ev is None:
raise Exception("Timeout on EAP start")
logger.info("Checking for RADIUS retries")
time.sleep(4)
mib = hapd.get_mib()
if "radiusAuthClientAccessRequests" not in mib:
raise Exception("Missing MIB fields")
if int(mib["radiusAuthClientAccessRetransmissions"]) < 1:
raise Exception("Missing RADIUS Authentication retransmission")
if int(mib["radiusAuthClientPendingRequests"]) < 1:
raise Exception("Missing pending RADIUS Authentication request")
def test_radius_auth_unreachable2(dev, apdev):
"""RADIUS Authentication server unreachable (2)"""
subprocess.call(['ip', 'ro', 'replace', '192.168.213.17', 'dev', 'lo'])
params = hostapd.wpa2_eap_params(ssid="radius-auth")
params['auth_server_addr'] = "192.168.213.17"
params['auth_server_port'] = "18139"
hapd = hostapd.add_ap(apdev[0], params)
subprocess.call(['ip', 'ro', 'del', '192.168.213.17', 'dev', 'lo'])
connect(dev[0], "radius-auth", wait_connect=False)
ev = dev[0].wait_event(["CTRL-EVENT-EAP-STARTED"])
if ev is None:
raise Exception("Timeout on EAP start")
logger.info("Checking for RADIUS retries")
time.sleep(4)
mib = hapd.get_mib()
if "radiusAuthClientAccessRequests" not in mib:
raise Exception("Missing MIB fields")
logger.info("radiusAuthClientAccessRetransmissions: " + mib["radiusAuthClientAccessRetransmissions"])
def test_radius_auth_unreachable3(dev, apdev):
"""RADIUS Authentication server initially unreachable, but then available"""
subprocess.call(['ip', 'ro', 'replace', 'blackhole', '192.168.213.18'])
params = hostapd.wpa2_eap_params(ssid="radius-auth")
params['auth_server_addr'] = "192.168.213.18"
hapd = hostapd.add_ap(apdev[0], params)
connect(dev[0], "radius-auth", wait_connect=False)
ev = dev[0].wait_event(["CTRL-EVENT-EAP-STARTED"])
if ev is None:
raise Exception("Timeout on EAP start")
subprocess.call(['ip', 'ro', 'del', 'blackhole', '192.168.213.18'])
time.sleep(0.1)
dev[0].request("DISCONNECT")
hapd.set('auth_server_addr_replace', '127.0.0.1')
dev[0].request("RECONNECT")
dev[0].wait_connected()
def test_radius_acct_unreachable(dev, apdev):
"""RADIUS Accounting server unreachable"""
params = hostapd.wpa2_eap_params(ssid="radius-acct")
params['acct_server_addr'] = "127.0.0.1"
params['acct_server_port'] = "18139"
params['acct_server_shared_secret'] = "radius"
hapd = hostapd.add_ap(apdev[0], params)
connect(dev[0], "radius-acct")
logger.info("Checking for RADIUS retries")
time.sleep(4)
mib = hapd.get_mib()
if "radiusAccClientRetransmissions" not in mib:
raise Exception("Missing MIB fields")
if int(mib["radiusAccClientRetransmissions"]) < 2:
raise Exception("Missing RADIUS Accounting retransmissions")
if int(mib["radiusAccClientPendingRequests"]) < 2:
raise Exception("Missing pending RADIUS Accounting requests")
def test_radius_acct_unreachable2(dev, apdev):
"""RADIUS Accounting server unreachable(2)"""
subprocess.call(['ip', 'ro', 'replace', '192.168.213.17', 'dev', 'lo'])
params = hostapd.wpa2_eap_params(ssid="radius-acct")
params['acct_server_addr'] = "192.168.213.17"
params['acct_server_port'] = "18139"
params['acct_server_shared_secret'] = "radius"
hapd = hostapd.add_ap(apdev[0], params)
subprocess.call(['ip', 'ro', 'del', '192.168.213.17', 'dev', 'lo'])
connect(dev[0], "radius-acct")
logger.info("Checking for RADIUS retries")
time.sleep(4)
mib = hapd.get_mib()
if "radiusAccClientRetransmissions" not in mib:
raise Exception("Missing MIB fields")
if int(mib["radiusAccClientRetransmissions"]) < 1 and int(mib["radiusAccClientPendingRequests"]) < 1:
raise Exception("Missing pending or retransmitted RADIUS Accounting requests")
def test_radius_acct_unreachable3(dev, apdev):
"""RADIUS Accounting server initially unreachable, but then available"""
require_under_vm()
subprocess.call(['ip', 'ro', 'replace', 'blackhole', '192.168.213.18'])
as_hapd = hostapd.Hostapd("as")
as_mib_start = as_hapd.get_mib(param="radius_server")
params = hostapd.wpa2_eap_params(ssid="radius-acct")
params['acct_server_addr'] = "192.168.213.18"
params['acct_server_port'] = "1813"
params['acct_server_shared_secret'] = "radius"
hapd = hostapd.add_ap(apdev[0], params)
connect(dev[0], "radius-acct")
subprocess.call(['ip', 'ro', 'del', 'blackhole', '192.168.213.18'])
time.sleep(0.1)
dev[0].request("DISCONNECT")
hapd.set('acct_server_addr_replace', '127.0.0.1')
dev[0].request("RECONNECT")
dev[0].wait_connected()
time.sleep(1)
as_mib_end = as_hapd.get_mib(param="radius_server")
req_s = int(as_mib_start['radiusAccServTotalResponses'])
req_e = int(as_mib_end['radiusAccServTotalResponses'])
if req_e <= req_s:
raise Exception("Unexpected RADIUS server acct MIB value")
def test_radius_acct_unreachable4(dev, apdev):
"""RADIUS Accounting server unreachable and multiple STAs"""
params = hostapd.wpa2_eap_params(ssid="radius-acct")
params['acct_server_addr'] = "127.0.0.1"
params['acct_server_port'] = "18139"
params['acct_server_shared_secret'] = "radius"
hapd = hostapd.add_ap(apdev[0], params)
for i in range(20):
connect(dev[0], "radius-acct")
dev[0].request("REMOVE_NETWORK all")
dev[0].wait_disconnected()
def test_radius_acct(dev, apdev):
"""RADIUS Accounting"""
as_hapd = hostapd.Hostapd("as")
as_mib_start = as_hapd.get_mib(param="radius_server")
params = hostapd.wpa2_eap_params(ssid="radius-acct")
params['acct_server_addr'] = "127.0.0.1"
params['acct_server_port'] = "1813"
params['acct_server_shared_secret'] = "radius"
params['radius_auth_req_attr'] = ["126:s:Operator", "77:s:testing",
"62:d:1"]
params['radius_acct_req_attr'] = ["126:s:Operator", "62:d:1",
"77:s:testing"]
hapd = hostapd.add_ap(apdev[0], params)
connect(dev[0], "radius-acct")
dev[1].connect("radius-acct", key_mgmt="WPA-EAP", scan_freq="2412",
eap="PAX", identity="test-class",
password_hex="0123456789abcdef0123456789abcdef")
dev[2].connect("radius-acct", key_mgmt="WPA-EAP",
eap="GPSK", identity="gpsk-cui",
password="abcdefghijklmnop0123456789abcdef",
scan_freq="2412")
logger.info("Checking for RADIUS counters")
count = 0
while True:
mib = hapd.get_mib()
if int(mib['radiusAccClientResponses']) >= 3:
break
time.sleep(0.1)
count += 1
if count > 10:
raise Exception("Did not receive Accounting-Response packets")
if int(mib['radiusAccClientRetransmissions']) > 0:
raise Exception("Unexpected Accounting-Request retransmission")
as_mib_end = as_hapd.get_mib(param="radius_server")
req_s = int(as_mib_start['radiusAccServTotalRequests'])
req_e = int(as_mib_end['radiusAccServTotalRequests'])
if req_e < req_s + 2:
raise Exception("Unexpected RADIUS server acct MIB value")
acc_s = int(as_mib_start['radiusAuthServAccessAccepts'])
acc_e = int(as_mib_end['radiusAuthServAccessAccepts'])
if acc_e < acc_s + 1:
raise Exception("Unexpected RADIUS server auth MIB value")
def test_radius_acct_non_ascii_ssid(dev, apdev):
"""RADIUS Accounting and non-ASCII SSID"""
params = hostapd.wpa2_eap_params()
params['acct_server_addr'] = "127.0.0.1"
params['acct_server_port'] = "1813"
params['acct_server_shared_secret'] = "radius"
ssid2 = "740665007374"
params['ssid2'] = ssid2
hostapd.add_ap(apdev[0], params)
dev[0].connect(ssid2=ssid2, key_mgmt="WPA-EAP", scan_freq="2412",
eap="PSK", identity="psk.user@example.com",
password_hex="0123456789abcdef0123456789abcdef")
def test_radius_acct_pmksa_caching(dev, apdev):
"""RADIUS Accounting with PMKSA caching"""
as_hapd = hostapd.Hostapd("as")
as_mib_start = as_hapd.get_mib(param="radius_server")
params = hostapd.wpa2_eap_params(ssid="radius-acct")
params['acct_server_addr'] = "127.0.0.1"
params['acct_server_port'] = "1813"
params['acct_server_shared_secret'] = "radius"
hapd = hostapd.add_ap(apdev[0], params)
connect(dev[0], "radius-acct")
dev[1].connect("radius-acct", key_mgmt="WPA-EAP", scan_freq="2412",
eap="PAX", identity="test-class",
password_hex="0123456789abcdef0123456789abcdef")
for d in [dev[0], dev[1]]:
d.request("REASSOCIATE")
d.wait_connected(timeout=15, error="Reassociation timed out")
count = 0
while True:
mib = hapd.get_mib()
if int(mib['radiusAccClientResponses']) >= 4:
break
time.sleep(0.1)
count += 1
if count > 10:
raise Exception("Did not receive Accounting-Response packets")
if int(mib['radiusAccClientRetransmissions']) > 0:
raise Exception("Unexpected Accounting-Request retransmission")
as_mib_end = as_hapd.get_mib(param="radius_server")
req_s = int(as_mib_start['radiusAccServTotalRequests'])
req_e = int(as_mib_end['radiusAccServTotalRequests'])
if req_e < req_s + 2:
raise Exception("Unexpected RADIUS server acct MIB value")
acc_s = int(as_mib_start['radiusAuthServAccessAccepts'])
acc_e = int(as_mib_end['radiusAuthServAccessAccepts'])
if acc_e < acc_s + 1:
raise Exception("Unexpected RADIUS server auth MIB value")
def test_radius_acct_interim(dev, apdev):
"""RADIUS Accounting interim update"""
as_hapd = hostapd.Hostapd("as")
params = hostapd.wpa2_eap_params(ssid="radius-acct")
params['acct_server_addr'] = "127.0.0.1"
params['acct_server_port'] = "1813"
params['acct_server_shared_secret'] = "radius"
params['radius_acct_interim_interval'] = "1"
hapd = hostapd.add_ap(apdev[0], params)
connect(dev[0], "radius-acct")
logger.info("Checking for RADIUS counters")
as_mib_start = as_hapd.get_mib(param="radius_server")
time.sleep(4.1)
as_mib_end = as_hapd.get_mib(param="radius_server")
req_s = int(as_mib_start['radiusAccServTotalRequests'])
req_e = int(as_mib_end['radiusAccServTotalRequests'])
if req_e < req_s + 3:
raise Exception("Unexpected RADIUS server acct MIB value (req_e=%d req_s=%d)" % (req_e, req_s))
def test_radius_acct_interim_unreachable(dev, apdev):
"""RADIUS Accounting interim update with unreachable server"""
params = hostapd.wpa2_eap_params(ssid="radius-acct")
params['acct_server_addr'] = "127.0.0.1"
params['acct_server_port'] = "18139"
params['acct_server_shared_secret'] = "radius"
params['radius_acct_interim_interval'] = "1"
hapd = hostapd.add_ap(apdev[0], params)
start = hapd.get_mib()
connect(dev[0], "radius-acct")
logger.info("Waiting for interium accounting updates")
time.sleep(3.1)
end = hapd.get_mib()
req_s = int(start['radiusAccClientTimeouts'])
req_e = int(end['radiusAccClientTimeouts'])
if req_e < req_s + 2:
raise Exception("Unexpected RADIUS server acct MIB value")
def test_radius_acct_interim_unreachable2(dev, apdev):
"""RADIUS Accounting interim update with unreachable server (retry)"""
params = hostapd.wpa2_eap_params(ssid="radius-acct")
params['acct_server_addr'] = "127.0.0.1"
params['acct_server_port'] = "18139"
params['acct_server_shared_secret'] = "radius"
# Use long enough interim update interval to allow RADIUS retransmission
# case (3 seconds) to trigger first.
params['radius_acct_interim_interval'] = "4"
hapd = hostapd.add_ap(apdev[0], params)
start = hapd.get_mib()
connect(dev[0], "radius-acct")
logger.info("Waiting for interium accounting updates")
time.sleep(7.5)
end = hapd.get_mib()
req_s = int(start['radiusAccClientTimeouts'])
req_e = int(end['radiusAccClientTimeouts'])
if req_e < req_s + 2:
raise Exception("Unexpected RADIUS server acct MIB value")
def test_radius_acct_ipaddr(dev, apdev):
"""RADIUS Accounting and Framed-IP-Address"""
try:
_test_radius_acct_ipaddr(dev, apdev)
finally:
subprocess.call(['ip', 'link', 'set', 'dev', 'ap-br0', 'down'],
stderr=open('/dev/null', 'w'))
subprocess.call(['brctl', 'delbr', 'ap-br0'],
stderr=open('/dev/null', 'w'))
def _test_radius_acct_ipaddr(dev, apdev):
params = {"ssid": "radius-acct-open",
'acct_server_addr': "127.0.0.1",
'acct_server_port': "1813",
'acct_server_shared_secret': "radius",
'proxy_arp': '1',
'ap_isolate': '1',
'bridge': 'ap-br0'}
hapd = hostapd.add_ap(apdev[0], params, no_enable=True)
try:
hapd.enable()
except:
# For now, do not report failures due to missing kernel support
raise HwsimSkip("Could not start hostapd - assume proxyarp not supported in kernel version")
bssid = apdev[0]['bssid']
subprocess.call(['brctl', 'setfd', 'ap-br0', '0'])
subprocess.call(['ip', 'link', 'set', 'dev', 'ap-br0', 'up'])
dev[0].connect("radius-acct-open", key_mgmt="NONE", scan_freq="2412")
addr0 = dev[0].own_addr()
pkt = build_dhcp_ack(dst_ll="ff:ff:ff:ff:ff:ff", src_ll=bssid,
ip_src="192.168.1.1", ip_dst="255.255.255.255",
yiaddr="192.168.1.123", chaddr=addr0)
if "OK" not in hapd.request("DATA_TEST_FRAME ifname=ap-br0 " + binascii.hexlify(pkt).decode()):
raise Exception("DATA_TEST_FRAME failed")
dev[0].request("DISCONNECT")
dev[0].wait_disconnected()
hapd.disable()
def send_and_check_reply(srv, req, code, error_cause=0):
reply = srv.SendPacket(req)
logger.debug("RADIUS response from hostapd")
for i in list(reply.keys()):
logger.debug("%s: %s" % (i, reply[i]))
if reply.code != code:
raise Exception("Unexpected response code")
if error_cause:
if 'Error-Cause' not in reply:
raise Exception("Missing Error-Cause")
if reply['Error-Cause'][0] != error_cause:
raise Exception("Unexpected Error-Cause: {}".format(reply['Error-Cause']))
def test_radius_acct_psk(dev, apdev):
"""RADIUS Accounting - PSK"""
as_hapd = hostapd.Hostapd("as")
params = hostapd.wpa2_params(ssid="radius-acct", passphrase="12345678")
params['acct_server_addr'] = "127.0.0.1"
params['acct_server_port'] = "1813"
params['acct_server_shared_secret'] = "radius"
hapd = hostapd.add_ap(apdev[0], params)
dev[0].connect("radius-acct", psk="12345678", scan_freq="2412")
def test_radius_acct_psk_sha256(dev, apdev):
"""RADIUS Accounting - PSK SHA256"""
as_hapd = hostapd.Hostapd("as")
params = hostapd.wpa2_params(ssid="radius-acct", passphrase="12345678")
params["wpa_key_mgmt"] = "WPA-PSK-SHA256"
params['acct_server_addr'] = "127.0.0.1"
params['acct_server_port'] = "1813"
params['acct_server_shared_secret'] = "radius"
hapd = hostapd.add_ap(apdev[0], params)
dev[0].connect("radius-acct", key_mgmt="WPA-PSK-SHA256",
psk="12345678", scan_freq="2412")
def test_radius_acct_ft_psk(dev, apdev):
"""RADIUS Accounting - FT-PSK"""
as_hapd = hostapd.Hostapd("as")
params = ft_params1(ssid="radius-acct", passphrase="12345678")
params['acct_server_addr'] = "127.0.0.1"
params['acct_server_port'] = "1813"
params['acct_server_shared_secret'] = "radius"
hapd = hostapd.add_ap(apdev[0], params)
dev[0].connect("radius-acct", key_mgmt="FT-PSK",
psk="12345678", scan_freq="2412")
def test_radius_acct_ieee8021x(dev, apdev):
"""RADIUS Accounting - IEEE 802.1X"""
skip_with_fips(dev[0])
as_hapd = hostapd.Hostapd("as")
params = hostapd.radius_params()
params["ssid"] = "radius-acct-1x"
params["ieee8021x"] = "1"
params["wep_key_len_broadcast"] = "13"
params["wep_key_len_unicast"] = "13"
params['acct_server_addr'] = "127.0.0.1"
params['acct_server_port'] = "1813"
params['acct_server_shared_secret'] = "radius"
hapd = hostapd.add_ap(apdev[0], params)
dev[0].connect("radius-acct-1x", key_mgmt="IEEE8021X", eap="PSK",
identity="psk.user@example.com",
password_hex="0123456789abcdef0123456789abcdef",
scan_freq="2412")
def test_radius_das_disconnect(dev, apdev):
"""RADIUS Dynamic Authorization Extensions - Disconnect"""
try:
import pyrad.client
import pyrad.packet
import pyrad.dictionary
import radius_das
except ImportError:
raise HwsimSkip("No pyrad modules available")
params = hostapd.wpa2_eap_params(ssid="radius-das")
params['radius_das_port'] = "3799"
params['radius_das_client'] = "127.0.0.1 secret"
params['radius_das_require_event_timestamp'] = "1"
params['own_ip_addr'] = "127.0.0.1"
params['nas_identifier'] = "nas.example.com"
hapd = hostapd.add_ap(apdev[0], params)
connect(dev[0], "radius-das")
addr = dev[0].p2p_interface_addr()
sta = hapd.get_sta(addr)
id = sta['dot1xAuthSessionId']
dict = pyrad.dictionary.Dictionary("dictionary.radius")
srv = pyrad.client.Client(server="127.0.0.1", acctport=3799,
secret=b"secret", dict=dict)
srv.retries = 1
srv.timeout = 1
logger.info("Disconnect-Request with incorrect secret")
req = radius_das.DisconnectPacket(dict=dict, secret=b"incorrect",
User_Name="foo",
NAS_Identifier="localhost",
Event_Timestamp=int(time.time()))
logger.debug(req)
try:
reply = srv.SendPacket(req)
raise Exception("Unexpected response to Disconnect-Request")
except pyrad.client.Timeout:
logger.info("Disconnect-Request with incorrect secret properly ignored")
logger.info("Disconnect-Request without Event-Timestamp")
req = radius_das.DisconnectPacket(dict=dict, secret=b"secret",
User_Name="psk.user@example.com")
logger.debug(req)
try:
reply = srv.SendPacket(req)
raise Exception("Unexpected response to Disconnect-Request")
except pyrad.client.Timeout:
logger.info("Disconnect-Request without Event-Timestamp properly ignored")
logger.info("Disconnect-Request with non-matching Event-Timestamp")
req = radius_das.DisconnectPacket(dict=dict, secret=b"secret",
User_Name="psk.user@example.com",
Event_Timestamp=123456789)
logger.debug(req)
try:
reply = srv.SendPacket(req)
raise Exception("Unexpected response to Disconnect-Request")
except pyrad.client.Timeout:
logger.info("Disconnect-Request with non-matching Event-Timestamp properly ignored")
logger.info("Disconnect-Request with unsupported attribute")
req = radius_das.DisconnectPacket(dict=dict, secret=b"secret",
User_Name="foo",
User_Password="foo",
Event_Timestamp=int(time.time()))
send_and_check_reply(srv, req, pyrad.packet.DisconnectNAK, 401)
logger.info("Disconnect-Request with invalid Calling-Station-Id")
req = radius_das.DisconnectPacket(dict=dict, secret=b"secret",
User_Name="foo",
Calling_Station_Id="foo",
Event_Timestamp=int(time.time()))
send_and_check_reply(srv, req, pyrad.packet.DisconnectNAK, 407)
logger.info("Disconnect-Request with mismatching User-Name")
req = radius_das.DisconnectPacket(dict=dict, secret=b"secret",
User_Name="foo",
Event_Timestamp=int(time.time()))
send_and_check_reply(srv, req, pyrad.packet.DisconnectNAK, 503)
logger.info("Disconnect-Request with mismatching Calling-Station-Id")
req = radius_das.DisconnectPacket(dict=dict, secret=b"secret",
Calling_Station_Id="12:34:56:78:90:aa",
Event_Timestamp=int(time.time()))
send_and_check_reply(srv, req, pyrad.packet.DisconnectNAK, 503)
logger.info("Disconnect-Request with mismatching Acct-Session-Id")
req = radius_das.DisconnectPacket(dict=dict, secret=b"secret",
Acct_Session_Id="12345678-87654321",
Event_Timestamp=int(time.time()))
send_and_check_reply(srv, req, pyrad.packet.DisconnectNAK, 503)
logger.info("Disconnect-Request with mismatching Acct-Session-Id (len)")
req = radius_das.DisconnectPacket(dict=dict, secret=b"secret",
Acct_Session_Id="12345678",
Event_Timestamp=int(time.time()))
send_and_check_reply(srv, req, pyrad.packet.DisconnectNAK, 503)
logger.info("Disconnect-Request with mismatching Acct-Multi-Session-Id")
req = radius_das.DisconnectPacket(dict=dict, secret=b"secret",
Acct_Multi_Session_Id="12345678+87654321",
Event_Timestamp=int(time.time()))
send_and_check_reply(srv, req, pyrad.packet.DisconnectNAK, 503)
logger.info("Disconnect-Request with mismatching Acct-Multi-Session-Id (len)")
req = radius_das.DisconnectPacket(dict=dict, secret=b"secret",
Acct_Multi_Session_Id="12345678",
Event_Timestamp=int(time.time()))
send_and_check_reply(srv, req, pyrad.packet.DisconnectNAK, 503)
logger.info("Disconnect-Request with no session identification attributes")
req = radius_das.DisconnectPacket(dict=dict, secret=b"secret",
Event_Timestamp=int(time.time()))
send_and_check_reply(srv, req, pyrad.packet.DisconnectNAK, 503)
ev = dev[0].wait_event(["CTRL-EVENT-DISCONNECTED"], timeout=1)
if ev is not None:
raise Exception("Unexpected disconnection")
logger.info("Disconnect-Request with mismatching NAS-IP-Address")
req = radius_das.DisconnectPacket(dict=dict, secret=b"secret",
NAS_IP_Address="192.168.3.4",
Acct_Session_Id=id,
Event_Timestamp=int(time.time()))
send_and_check_reply(srv, req, pyrad.packet.DisconnectNAK, 403)
logger.info("Disconnect-Request with mismatching NAS-Identifier")
req = radius_das.DisconnectPacket(dict=dict, secret=b"secret",
NAS_Identifier="unknown.example.com",
Acct_Session_Id=id,
Event_Timestamp=int(time.time()))
send_and_check_reply(srv, req, pyrad.packet.DisconnectNAK, 403)
ev = dev[0].wait_event(["CTRL-EVENT-DISCONNECTED"], timeout=1)
if ev is not None:
raise Exception("Unexpected disconnection")
logger.info("Disconnect-Request with matching Acct-Session-Id")
req = radius_das.DisconnectPacket(dict=dict, secret=b"secret",
NAS_IP_Address="127.0.0.1",
NAS_Identifier="nas.example.com",
Acct_Session_Id=id,
Event_Timestamp=int(time.time()))
send_and_check_reply(srv, req, pyrad.packet.DisconnectACK)
dev[0].wait_disconnected(timeout=10)
dev[0].wait_connected(timeout=10, error="Re-connection timed out")
logger.info("Disconnect-Request with matching Acct-Multi-Session-Id")
sta = hapd.get_sta(addr)
multi_sess_id = sta['authMultiSessionId']
req = radius_das.DisconnectPacket(dict=dict, secret=b"secret",
NAS_IP_Address="127.0.0.1",
NAS_Identifier="nas.example.com",
Acct_Multi_Session_Id=multi_sess_id,
Event_Timestamp=int(time.time()))
send_and_check_reply(srv, req, pyrad.packet.DisconnectACK)
dev[0].wait_disconnected(timeout=10)
dev[0].wait_connected(timeout=10, error="Re-connection timed out")
logger.info("Disconnect-Request with matching User-Name")
req = radius_das.DisconnectPacket(dict=dict, secret=b"secret",
NAS_Identifier="nas.example.com",
User_Name="psk.user@example.com",
Event_Timestamp=int(time.time()))
send_and_check_reply(srv, req, pyrad.packet.DisconnectACK)
dev[0].wait_disconnected(timeout=10)
dev[0].wait_connected(timeout=10, error="Re-connection timed out")
logger.info("Disconnect-Request with matching Calling-Station-Id")
req = radius_das.DisconnectPacket(dict=dict, secret=b"secret",
NAS_IP_Address="127.0.0.1",
Calling_Station_Id=addr,
Event_Timestamp=int(time.time()))
send_and_check_reply(srv, req, pyrad.packet.DisconnectACK)
dev[0].wait_disconnected(timeout=10)
ev = dev[0].wait_event(["CTRL-EVENT-EAP-STARTED", "CTRL-EVENT-CONNECTED"])
if ev is None:
raise Exception("Timeout while waiting for re-connection")
if "CTRL-EVENT-EAP-STARTED" not in ev:
raise Exception("Unexpected skipping of EAP authentication in reconnection")
dev[0].wait_connected(timeout=10, error="Re-connection timed out")
logger.info("Disconnect-Request with matching Calling-Station-Id and non-matching CUI")
req = radius_das.DisconnectPacket(dict=dict, secret=b"secret",
Calling_Station_Id=addr,
Chargeable_User_Identity="foo@example.com",
Event_Timestamp=int(time.time()))
send_and_check_reply(srv, req, pyrad.packet.DisconnectNAK, error_cause=503)
logger.info("Disconnect-Request with matching CUI")
dev[1].connect("radius-das", key_mgmt="WPA-EAP",
eap="GPSK", identity="gpsk-cui",
password="abcdefghijklmnop0123456789abcdef",
scan_freq="2412")
req = radius_das.DisconnectPacket(dict=dict, secret=b"secret",
Chargeable_User_Identity="gpsk-chargeable-user-identity",
Event_Timestamp=int(time.time()))
send_and_check_reply(srv, req, pyrad.packet.DisconnectACK)
dev[1].wait_disconnected(timeout=10)
dev[1].wait_connected(timeout=10, error="Re-connection timed out")
ev = dev[0].wait_event(["CTRL-EVENT-DISCONNECTED"], timeout=1)
if ev is not None:
raise Exception("Unexpected disconnection")
connect(dev[2], "radius-das")
logger.info("Disconnect-Request with matching User-Name - multiple sessions matching")
req = radius_das.DisconnectPacket(dict=dict, secret=b"secret",
NAS_Identifier="nas.example.com",
User_Name="psk.user@example.com",
Event_Timestamp=int(time.time()))
send_and_check_reply(srv, req, pyrad.packet.DisconnectNAK, error_cause=508)
logger.info("Disconnect-Request with User-Name matching multiple sessions, Calling-Station-Id only one")
req = radius_das.DisconnectPacket(dict=dict, secret=b"secret",
NAS_Identifier="nas.example.com",
Calling_Station_Id=addr,
User_Name="psk.user@example.com",
Event_Timestamp=int(time.time()))
send_and_check_reply(srv, req, pyrad.packet.DisconnectACK)
dev[0].wait_disconnected(timeout=10)
dev[0].wait_connected(timeout=10, error="Re-connection timed out")
ev = dev[2].wait_event(["CTRL-EVENT-DISCONNECTED"], timeout=1)
if ev is not None:
raise Exception("Unexpected disconnection")
logger.info("Disconnect-Request with matching Acct-Multi-Session-Id after disassociation")
sta = hapd.get_sta(addr)
multi_sess_id = sta['authMultiSessionId']
dev[0].request("DISCONNECT")
dev[0].wait_disconnected(timeout=10)
req = radius_das.DisconnectPacket(dict=dict, secret=b"secret",
NAS_IP_Address="127.0.0.1",
NAS_Identifier="nas.example.com",
Acct_Multi_Session_Id=multi_sess_id,
Event_Timestamp=int(time.time()))
send_and_check_reply(srv, req, pyrad.packet.DisconnectACK)
dev[0].request("RECONNECT")
ev = dev[0].wait_event(["CTRL-EVENT-EAP-STARTED"], timeout=15)
if ev is None:
raise Exception("Timeout on EAP start")
dev[0].wait_connected(timeout=15)
logger.info("Disconnect-Request with matching User-Name after disassociation")
dev[0].request("DISCONNECT")
dev[0].wait_disconnected(timeout=10)
dev[2].request("DISCONNECT")
dev[2].wait_disconnected(timeout=10)
req = radius_das.DisconnectPacket(dict=dict, secret=b"secret",
NAS_IP_Address="127.0.0.1",
NAS_Identifier="nas.example.com",
User_Name="psk.user@example.com",
Event_Timestamp=int(time.time()))
send_and_check_reply(srv, req, pyrad.packet.DisconnectACK)
logger.info("Disconnect-Request with matching CUI after disassociation")
dev[1].request("DISCONNECT")
dev[1].wait_disconnected(timeout=10)
req = radius_das.DisconnectPacket(dict=dict, secret=b"secret",
NAS_IP_Address="127.0.0.1",
NAS_Identifier="nas.example.com",
Chargeable_User_Identity="gpsk-chargeable-user-identity",
Event_Timestamp=int(time.time()))
send_and_check_reply(srv, req, pyrad.packet.DisconnectACK)
logger.info("Disconnect-Request with matching Calling-Station-Id after disassociation")
dev[0].request("RECONNECT")
ev = dev[0].wait_event(["CTRL-EVENT-EAP-STARTED"], timeout=15)
if ev is None:
raise Exception("Timeout on EAP start")
dev[0].wait_connected(timeout=15)
dev[0].request("DISCONNECT")
dev[0].wait_disconnected(timeout=10)
req = radius_das.DisconnectPacket(dict=dict, secret=b"secret",
NAS_IP_Address="127.0.0.1",
NAS_Identifier="nas.example.com",
Calling_Station_Id=addr,
Event_Timestamp=int(time.time()))
send_and_check_reply(srv, req, pyrad.packet.DisconnectACK)
logger.info("Disconnect-Request with mismatching Calling-Station-Id after disassociation")
req = radius_das.DisconnectPacket(dict=dict, secret=b"secret",
NAS_IP_Address="127.0.0.1",
NAS_Identifier="nas.example.com",
Calling_Station_Id=addr,
Event_Timestamp=int(time.time()))
send_and_check_reply(srv, req, pyrad.packet.DisconnectNAK, error_cause=503)
def add_message_auth_req(req):
req.authenticator = req.CreateAuthenticator()
hmac_obj = hmac.new(req.secret)
hmac_obj.update(struct.pack("B", req.code))
hmac_obj.update(struct.pack("B", req.id))
# request attributes
req.AddAttribute("Message-Authenticator", 16*b"\x00")
attrs = b''
for code, datalst in sorted(req.items()):
for data in datalst:
attrs += req._PktEncodeAttribute(code, data)
# Length
flen = 4 + 16 + len(attrs)
hmac_obj.update(struct.pack(">H", flen))
hmac_obj.update(16*b"\x00") # all zeros Authenticator in calculation
hmac_obj.update(attrs)
del req[80]
req.AddAttribute("Message-Authenticator", hmac_obj.digest())
def test_radius_das_disconnect_time_window(dev, apdev):
"""RADIUS Dynamic Authorization Extensions - Disconnect - time window"""
try:
import pyrad.client
import pyrad.packet
import pyrad.dictionary
import radius_das
except ImportError:
raise HwsimSkip("No pyrad modules available")
params = hostapd.wpa2_eap_params(ssid="radius-das")
params['radius_das_port'] = "3799"
params['radius_das_client'] = "127.0.0.1 secret"
params['radius_das_require_event_timestamp'] = "1"
params['radius_das_require_message_authenticator'] = "1"
params['radius_das_time_window'] = "10"
params['own_ip_addr'] = "127.0.0.1"
params['nas_identifier'] = "nas.example.com"
hapd = hostapd.add_ap(apdev[0], params)
connect(dev[0], "radius-das")
addr = dev[0].own_addr()
sta = hapd.get_sta(addr)
id = sta['dot1xAuthSessionId']
dict = pyrad.dictionary.Dictionary("dictionary.radius")
srv = pyrad.client.Client(server="127.0.0.1", acctport=3799,
secret=b"secret", dict=dict)
srv.retries = 1
srv.timeout = 1
logger.info("Disconnect-Request with unsupported attribute")
req = radius_das.DisconnectPacket(dict=dict, secret=b"secret",
NAS_IP_Address="127.0.0.1",
NAS_Identifier="nas.example.com",
Calling_Station_Id=addr,
Event_Timestamp=int(time.time()) - 50)
add_message_auth_req(req)
logger.debug(req)
try:
reply = srv.SendPacket(req)
raise Exception("Unexpected response to Disconnect-Request")
except pyrad.client.Timeout:
logger.info("Disconnect-Request with non-matching Event-Timestamp properly ignored")
logger.info("Disconnect-Request with unsupported attribute")
req = radius_das.DisconnectPacket(dict=dict, secret=b"secret",
NAS_IP_Address="127.0.0.1",
NAS_Identifier="nas.example.com",
Calling_Station_Id=addr,
Event_Timestamp=int(time.time()))
add_message_auth_req(req)
send_and_check_reply(srv, req, pyrad.packet.DisconnectACK)
def test_radius_das_coa(dev, apdev):
"""RADIUS Dynamic Authorization Extensions - CoA"""
try:
import pyrad.client
import pyrad.packet
import pyrad.dictionary
import radius_das
except ImportError:
raise HwsimSkip("No pyrad modules available")
params = hostapd.wpa2_eap_params(ssid="radius-das")
params['radius_das_port'] = "3799"
params['radius_das_client'] = "127.0.0.1 secret"
params['radius_das_require_event_timestamp'] = "1"
hapd = hostapd.add_ap(apdev[0], params)
connect(dev[0], "radius-das")
addr = dev[0].p2p_interface_addr()
sta = hapd.get_sta(addr)
id = sta['dot1xAuthSessionId']
dict = pyrad.dictionary.Dictionary("dictionary.radius")
srv = pyrad.client.Client(server="127.0.0.1", acctport=3799,
secret=b"secret", dict=dict)
srv.retries = 1
srv.timeout = 1
# hostapd does not currently support CoA-Request, so NAK is expected
logger.info("CoA-Request with matching Acct-Session-Id")
req = radius_das.CoAPacket(dict=dict, secret=b"secret",
Acct_Session_Id=id,
Event_Timestamp=int(time.time()))
send_and_check_reply(srv, req, pyrad.packet.CoANAK, error_cause=405)
def test_radius_ipv6(dev, apdev):
"""RADIUS connection over IPv6"""
params = {}
params['ssid'] = 'as'
params['beacon_int'] = '2000'
params['radius_server_clients'] = 'auth_serv/radius_clients_ipv6.conf'
params['radius_server_ipv6'] = '1'
params['radius_server_auth_port'] = '18129'
params['radius_server_acct_port'] = '18139'
params['eap_server'] = '1'
params['eap_user_file'] = 'auth_serv/eap_user.conf'
params['ca_cert'] = 'auth_serv/ca.pem'
params['server_cert'] = 'auth_serv/server.pem'
params['private_key'] = 'auth_serv/server.key'
hostapd.add_ap(apdev[1], params)
params = hostapd.wpa2_eap_params(ssid="radius-ipv6")
params['auth_server_addr'] = "::0"
params['auth_server_port'] = "18129"
params['acct_server_addr'] = "::0"
params['acct_server_port'] = "18139"
params['acct_server_shared_secret'] = "radius"
params['own_ip_addr'] = "::0"
hostapd.add_ap(apdev[0], params)
connect(dev[0], "radius-ipv6")
def test_radius_macacl(dev, apdev):
"""RADIUS MAC ACL"""
params = hostapd.radius_params()
params["ssid"] = "radius"
params["macaddr_acl"] = "2"
hostapd.add_ap(apdev[0], params)
dev[0].connect("radius", key_mgmt="NONE", scan_freq="2412")
# Invalid VLAN ID from RADIUS server
dev[2].connect("radius", key_mgmt="NONE", scan_freq="2412")
dev[2].request("REMOVE_NETWORK all")
dev[2].wait_disconnected()
dev[2].connect("radius", key_mgmt="NONE", scan_freq="2412")
def test_radius_macacl_acct(dev, apdev):
"""RADIUS MAC ACL and accounting enabled"""
params = hostapd.radius_params()
params["ssid"] = "radius"
params["macaddr_acl"] = "2"
params['acct_server_addr'] = "127.0.0.1"
params['acct_server_port'] = "1813"
params['acct_server_shared_secret'] = "radius"
hostapd.add_ap(apdev[0], params)
dev[0].connect("radius", key_mgmt="NONE", scan_freq="2412")
dev[1].connect("radius", key_mgmt="NONE", scan_freq="2412")
dev[1].request("DISCONNECT")
dev[1].wait_disconnected()
dev[1].request("RECONNECT")
def test_radius_macacl_oom(dev, apdev):
"""RADIUS MAC ACL and OOM"""
params = hostapd.radius_params()
params["ssid"] = "radius"
params["macaddr_acl"] = "2"
hapd = hostapd.add_ap(apdev[0], params)
bssid = hapd.own_addr()
dev[0].scan_for_bss(bssid, freq="2412")
with alloc_fail(hapd, 1, "hostapd_allowed_address"):
dev[0].connect("radius", key_mgmt="NONE", scan_freq="2412")
dev[1].scan_for_bss(bssid, freq="2412")
with alloc_fail(hapd, 2, "hostapd_allowed_address"):
dev[1].connect("radius", key_mgmt="NONE", scan_freq="2412")
dev[2].scan_for_bss(bssid, freq="2412")
with alloc_fail(hapd, 2, "=hostapd_allowed_address"):
dev[2].connect("radius", key_mgmt="NONE", scan_freq="2412")
def test_radius_macacl_unreachable(dev, apdev):
"""RADIUS MAC ACL and server unreachable"""
params = hostapd.radius_params()
params['auth_server_port'] = "18139"
params["ssid"] = "radius"
params["macaddr_acl"] = "2"
hapd = hostapd.add_ap(apdev[0], params)
bssid = hapd.own_addr()
dev[0].scan_for_bss(bssid, freq="2412")
dev[0].connect("radius", key_mgmt="NONE", scan_freq="2412",
wait_connect=False)
ev = dev[0].wait_event(["CTRL-EVENT-CONNECTED"], timeout=3)
if ev is not None:
raise Exception("Unexpected connection")
logger.info("Fix authentication server port")
hapd.set("auth_server_port", "1812")
hapd.disable()
hapd.enable()
dev[0].wait_connected()
dev[0].request("DISCONNECT")
dev[0].wait_disconnected()
def test_radius_failover(dev, apdev):
"""RADIUS Authentication and Accounting server failover"""
subprocess.call(['ip', 'ro', 'replace', '192.168.213.17', 'dev', 'lo'])
as_hapd = hostapd.Hostapd("as")
as_mib_start = as_hapd.get_mib(param="radius_server")
params = hostapd.wpa2_eap_params(ssid="radius-failover")
params["auth_server_addr"] = "192.168.213.17"
params["auth_server_port"] = "1812"
params["auth_server_shared_secret"] = "testing"
params['acct_server_addr'] = "192.168.213.17"
params['acct_server_port'] = "1813"
params['acct_server_shared_secret'] = "testing"
params['radius_retry_primary_interval'] = "20"
hapd = hostapd.add_ap(apdev[0], params, no_enable=True)
hapd.set("auth_server_addr", "127.0.0.1")
hapd.set("auth_server_port", "1812")
hapd.set("auth_server_shared_secret", "radius")
hapd.set('acct_server_addr', "127.0.0.1")
hapd.set('acct_server_port', "1813")
hapd.set('acct_server_shared_secret', "radius")
hapd.enable()
ev = hapd.wait_event(["AP-ENABLED", "AP-DISABLED"], timeout=30)
if ev is None:
raise Exception("AP startup timed out")
if "AP-ENABLED" not in ev:
raise Exception("AP startup failed")
start = os.times()[4]
try:
subprocess.call(['ip', 'ro', 'replace', 'prohibit', '192.168.213.17'])
dev[0].request("SET EAPOL::authPeriod 5")
connect(dev[0], "radius-failover", wait_connect=False)
dev[0].wait_connected(timeout=20)
finally:
dev[0].request("SET EAPOL::authPeriod 30")
subprocess.call(['ip', 'ro', 'del', '192.168.213.17'])
as_mib_end = as_hapd.get_mib(param="radius_server")
req_s = int(as_mib_start['radiusAccServTotalRequests'])
req_e = int(as_mib_end['radiusAccServTotalRequests'])
if req_e <= req_s:
raise Exception("Unexpected RADIUS server acct MIB value")
end = os.times()[4]
try:
subprocess.call(['ip', 'ro', 'replace', 'prohibit', '192.168.213.17'])
dev[1].request("SET EAPOL::authPeriod 5")
if end - start < 21:
time.sleep(21 - (end - start))
connect(dev[1], "radius-failover", wait_connect=False)
dev[1].wait_connected(timeout=20)
finally:
dev[1].request("SET EAPOL::authPeriod 30")
subprocess.call(['ip', 'ro', 'del', '192.168.213.17'])
def run_pyrad_server(srv, t_events):
srv.RunWithStop(t_events)
def test_radius_protocol(dev, apdev):
"""RADIUS Authentication protocol tests with a fake server"""
try:
import pyrad.server
import pyrad.packet
import pyrad.dictionary
except ImportError:
raise HwsimSkip("No pyrad modules available")
class TestServer(pyrad.server.Server):
def _HandleAuthPacket(self, pkt):
pyrad.server.Server._HandleAuthPacket(self, pkt)
logger.info("Received authentication request")
reply = self.CreateReplyPacket(pkt)
reply.code = pyrad.packet.AccessAccept
if self.t_events['msg_auth'].is_set():
logger.info("Add Message-Authenticator")
if self.t_events['wrong_secret'].is_set():
logger.info("Use incorrect RADIUS shared secret")
pw = b"incorrect"
else:
pw = reply.secret
hmac_obj = hmac.new(pw)
hmac_obj.update(struct.pack("B", reply.code))
hmac_obj.update(struct.pack("B", reply.id))
# reply attributes
reply.AddAttribute("Message-Authenticator", 16*b"\x00")
attrs = reply._PktEncodeAttributes()
# Length
flen = 4 + 16 + len(attrs)
hmac_obj.update(struct.pack(">H", flen))
hmac_obj.update(pkt.authenticator)
hmac_obj.update(attrs)
if self.t_events['double_msg_auth'].is_set():
logger.info("Include two Message-Authenticator attributes")
else:
del reply[80]
reply.AddAttribute("Message-Authenticator", hmac_obj.digest())
self.SendReplyPacket(pkt.fd, reply)
def RunWithStop(self, t_events):
self._poll = select.poll()
self._fdmap = {}
self._PrepareSockets()
self.t_events = t_events
while not t_events['stop'].is_set():
for (fd, event) in self._poll.poll(1000):
if event == select.POLLIN:
try:
fdo = self._fdmap[fd]
self._ProcessInput(fdo)
except pyrad.server.ServerPacketError as err:
logger.info("pyrad server dropping packet: " + str(err))
except pyrad.packet.PacketError as err:
logger.info("pyrad server received invalid packet: " + str(err))
else:
logger.error("Unexpected event in pyrad server main loop")
srv = TestServer(dict=pyrad.dictionary.Dictionary("dictionary.radius"),
authport=18138, acctport=18139)
srv.hosts["127.0.0.1"] = pyrad.server.RemoteHost("127.0.0.1",
b"radius",
"localhost")
srv.BindToAddress("")
t_events = {}
t_events['stop'] = threading.Event()
t_events['msg_auth'] = threading.Event()
t_events['wrong_secret'] = threading.Event()
t_events['double_msg_auth'] = threading.Event()
t = threading.Thread(target=run_pyrad_server, args=(srv, t_events))
t.start()
try:
params = hostapd.wpa2_eap_params(ssid="radius-test")
params['auth_server_port'] = "18138"
hapd = hostapd.add_ap(apdev[0], params)
connect(dev[0], "radius-test", wait_connect=False)
ev = dev[0].wait_event(["CTRL-EVENT-EAP-STARTED"], timeout=15)
if ev is None:
raise Exception("Timeout on EAP start")
time.sleep(1)
dev[0].request("REMOVE_NETWORK all")
time.sleep(0.1)
dev[0].dump_monitor()
t_events['msg_auth'].set()
t_events['wrong_secret'].set()
connect(dev[0], "radius-test", wait_connect=False)
time.sleep(1)
dev[0].request("REMOVE_NETWORK all")
time.sleep(0.1)
dev[0].dump_monitor()
t_events['wrong_secret'].clear()
connect(dev[0], "radius-test", wait_connect=False)
time.sleep(1)
dev[0].request("REMOVE_NETWORK all")
time.sleep(0.1)
dev[0].dump_monitor()
t_events['double_msg_auth'].set()
connect(dev[0], "radius-test", wait_connect=False)
time.sleep(1)
finally:
t_events['stop'].set()
t.join()
def build_tunnel_password(secret, authenticator, psk):
a = b"\xab\xcd"
psk = psk.encode()
padlen = 16 - (1 + len(psk)) % 16
if padlen == 16:
padlen = 0
p = struct.pack('B', len(psk)) + psk + padlen * b'\x00'
cc_all = bytes()
b = hashlib.md5(secret + authenticator + a).digest()
while len(p) > 0:
pp = bytearray(p[0:16])
p = p[16:]
bb = bytearray(b)
cc = bytearray(pp[i] ^ bb[i] for i in range(len(bb)))
cc_all += cc
b = hashlib.md5(secret + cc).digest()
data = b'\x00' + a + bytes(cc_all)
return data
def start_radius_psk_server(psk, invalid_code=False, acct_interim_interval=0,
session_timeout=0, reject=False):
try:
import pyrad.server
import pyrad.packet
import pyrad.dictionary
except ImportError:
raise HwsimSkip("No pyrad modules available")
class TestServer(pyrad.server.Server):
def _HandleAuthPacket(self, pkt):
pyrad.server.Server._HandleAuthPacket(self, pkt)
logger.info("Received authentication request")
reply = self.CreateReplyPacket(pkt)
reply.code = pyrad.packet.AccessAccept
if self.t_events['invalid_code']:
reply.code = pyrad.packet.AccessRequest
if self.t_events['reject']:
reply.code = pyrad.packet.AccessReject
data = build_tunnel_password(reply.secret, pkt.authenticator,
self.t_events['psk'])
reply.AddAttribute("Tunnel-Password", data)
if self.t_events['acct_interim_interval']:
reply.AddAttribute("Acct-Interim-Interval",
self.t_events['acct_interim_interval'])
if self.t_events['session_timeout']:
reply.AddAttribute("Session-Timeout",
self.t_events['session_timeout'])
self.SendReplyPacket(pkt.fd, reply)
def RunWithStop(self, t_events):
self._poll = select.poll()
self._fdmap = {}
self._PrepareSockets()
self.t_events = t_events
while not t_events['stop'].is_set():
for (fd, event) in self._poll.poll(1000):
if event == select.POLLIN:
try:
fdo = self._fdmap[fd]
self._ProcessInput(fdo)
except pyrad.server.ServerPacketError as err:
logger.info("pyrad server dropping packet: " + str(err))
except pyrad.packet.PacketError as err:
logger.info("pyrad server received invalid packet: " + str(err))
else:
logger.error("Unexpected event in pyrad server main loop")
srv = TestServer(dict=pyrad.dictionary.Dictionary("dictionary.radius"),
authport=18138, acctport=18139)
srv.hosts["127.0.0.1"] = pyrad.server.RemoteHost("127.0.0.1",
b"radius",
"localhost")
srv.BindToAddress("")
t_events = {}
t_events['stop'] = threading.Event()
t_events['psk'] = psk
t_events['invalid_code'] = invalid_code
t_events['acct_interim_interval'] = acct_interim_interval
t_events['session_timeout'] = session_timeout
t_events['reject'] = reject
t = threading.Thread(target=run_pyrad_server, args=(srv, t_events))
t.start()
return t, t_events
def hostapd_radius_psk_test_params():
params = hostapd.radius_params()
params['ssid'] = "test-wpa2-psk"
params["wpa"] = "2"
params["wpa_key_mgmt"] = "WPA-PSK"
params["rsn_pairwise"] = "CCMP"
params['macaddr_acl'] = '2'
params['wpa_psk_radius'] = '2'
params['auth_server_port'] = "18138"
return params
def test_radius_psk(dev, apdev):
"""WPA2 with PSK from RADIUS"""
t, t_events = start_radius_psk_server("12345678")
try:
params = hostapd_radius_psk_test_params()
hapd = hostapd.add_ap(apdev[0], params)
dev[0].connect("test-wpa2-psk", psk="12345678", scan_freq="2412")
t_events['psk'] = "0123456789abcdef"
dev[1].connect("test-wpa2-psk", psk="0123456789abcdef",
scan_freq="2412")
finally:
t_events['stop'].set()
t.join()
def test_radius_psk_invalid(dev, apdev):
"""WPA2 with invalid PSK from RADIUS"""
t, t_events = start_radius_psk_server("1234567")
try:
params = hostapd_radius_psk_test_params()
hapd = hostapd.add_ap(apdev[0], params)
dev[0].connect("test-wpa2-psk", psk="12345678", scan_freq="2412",
wait_connect=False)
time.sleep(1)
finally:
t_events['stop'].set()
t.join()
def test_radius_psk_invalid2(dev, apdev):
"""WPA2 with invalid PSK (hexstring) from RADIUS"""
t, t_events = start_radius_psk_server(64*'q')
try:
params = hostapd_radius_psk_test_params()
hapd = hostapd.add_ap(apdev[0], params)
dev[0].connect("test-wpa2-psk", psk="12345678", scan_freq="2412",
wait_connect=False)
time.sleep(1)
finally:
t_events['stop'].set()
t.join()
def test_radius_psk_hex_psk(dev, apdev):
"""WPA2 with PSK hexstring from RADIUS"""
t, t_events = start_radius_psk_server(64*'2', acct_interim_interval=19,
session_timeout=123)
try:
params = hostapd_radius_psk_test_params()
hapd = hostapd.add_ap(apdev[0], params)
dev[0].connect("test-wpa2-psk", raw_psk=64*'2', scan_freq="2412")
finally:
t_events['stop'].set()
t.join()
def test_radius_psk_unknown_code(dev, apdev):
"""WPA2 with PSK from RADIUS and unknown code"""
t, t_events = start_radius_psk_server(64*'2', invalid_code=True)
try:
params = hostapd_radius_psk_test_params()
hapd = hostapd.add_ap(apdev[0], params)
dev[0].connect("test-wpa2-psk", psk="12345678", scan_freq="2412",
wait_connect=False)
time.sleep(1)
finally:
t_events['stop'].set()
t.join()
def test_radius_psk_reject(dev, apdev):
"""WPA2 with PSK from RADIUS and reject"""
t, t_events = start_radius_psk_server("12345678", reject=True)
try:
params = hostapd_radius_psk_test_params()
hapd = hostapd.add_ap(apdev[0], params)
dev[0].connect("test-wpa2-psk", psk="12345678", scan_freq="2412",
wait_connect=False)
ev = dev[0].wait_event(["CTRL-EVENT-AUTH-REJECT"], timeout=10)
if ev is None:
raise Exception("No CTRL-EVENT-AUTH-REJECT event")
dev[0].request("DISCONNECT")
finally:
t_events['stop'].set()
t.join()
def test_radius_psk_oom(dev, apdev):
"""WPA2 with PSK from RADIUS and OOM"""
t, t_events = start_radius_psk_server(64*'2')
try:
params = hostapd_radius_psk_test_params()
hapd = hostapd.add_ap(apdev[0], params)
bssid = hapd.own_addr()
dev[0].scan_for_bss(bssid, freq="2412")
with alloc_fail(hapd, 1, "=hostapd_acl_recv_radius"):
dev[0].connect("test-wpa2-psk", psk="12345678", scan_freq="2412",
wait_connect=False)
wait_fail_trigger(hapd, "GET_ALLOC_FAIL")
finally:
t_events['stop'].set()
t.join()
def test_radius_psk_default(dev, apdev):
"""WPA2 with default PSK"""
ssid = "test-wpa2-psk"
params = hostapd.radius_params()
params['ssid'] = ssid
params["wpa"] = "2"
params["wpa_key_mgmt"] = "WPA-PSK"
params["rsn_pairwise"] = "CCMP"
params['macaddr_acl'] = '2'
params['wpa_psk_radius'] = '1'
params['wpa_passphrase'] = 'qwertyuiop'
hapd = hostapd.add_ap(apdev[0], params)
dev[0].connect(ssid, psk="qwertyuiop", scan_freq="2412")
dev[0].dump_monitor()
dev[0].request("REMOVE_NETWORK all")
dev[0].wait_disconnected()
dev[0].dump_monitor()
hapd.disable()
hapd.set("wpa_psk_radius", "2")
hapd.enable()
dev[0].connect(ssid, psk="qwertyuiop", scan_freq="2412", wait_connect=False)
ev = dev[0].wait_event(["CTRL-EVENT-AUTH-REJECT"], timeout=10)
if ev is None:
raise Exception("No CTRL-EVENT-AUTH-REJECT event")
dev[0].request("DISCONNECT")
def test_radius_auth_force_client_addr(dev, apdev):
"""RADIUS client address specified"""
params = hostapd.wpa2_eap_params(ssid="radius-auth")
params['radius_client_addr'] = "127.0.0.1"
hapd = hostapd.add_ap(apdev[0], params)
connect(dev[0], "radius-auth")
@remote_compatible
def test_radius_auth_force_invalid_client_addr(dev, apdev):
"""RADIUS client address specified and invalid address"""
params = hostapd.wpa2_eap_params(ssid="radius-auth")
#params['radius_client_addr'] = "10.11.12.14"
params['radius_client_addr'] = "1::2"
hapd = hostapd.add_ap(apdev[0], params)
connect(dev[0], "radius-auth", wait_connect=False)
ev = dev[0].wait_event(["CTRL-EVENT-EAP-STARTED"])
if ev is None:
raise Exception("Timeout on EAP start")
ev = dev[0].wait_event(["CTRL-EVENT-CONNECTED"], timeout=1)
if ev is not None:
raise Exception("Unexpected connection")
def add_message_auth(req):
req.authenticator = req.CreateAuthenticator()
hmac_obj = hmac.new(req.secret)
hmac_obj.update(struct.pack("B", req.code))
hmac_obj.update(struct.pack("B", req.id))
# request attributes
req.AddAttribute("Message-Authenticator", 16*b"\x00")
attrs = req._PktEncodeAttributes()
# Length
flen = 4 + 16 + len(attrs)
hmac_obj.update(struct.pack(">H", flen))
hmac_obj.update(req.authenticator)
hmac_obj.update(attrs)
del req[80]
req.AddAttribute("Message-Authenticator", hmac_obj.digest())
def test_radius_server_failures(dev, apdev):
"""RADIUS server failure cases"""
try:
import pyrad.client
import pyrad.packet
import pyrad.dictionary
except ImportError:
raise HwsimSkip("No pyrad modules available")
dict = pyrad.dictionary.Dictionary("dictionary.radius")
client = pyrad.client.Client(server="127.0.0.1", authport=1812,
secret=b"radius", dict=dict)
client.retries = 1
client.timeout = 1
# unexpected State
req = client.CreateAuthPacket(code=pyrad.packet.AccessRequest,
User_Name="foo")
req['State'] = b'foo-state'
add_message_auth(req)
reply = client.SendPacket(req)
if reply.code != pyrad.packet.AccessReject:
raise Exception("Unexpected RADIUS response code " + str(reply.code))
# no EAP-Message
req = client.CreateAuthPacket(code=pyrad.packet.AccessRequest,
User_Name="foo")
add_message_auth(req)
try:
reply = client.SendPacket(req)
raise Exception("Unexpected response")
except pyrad.client.Timeout:
pass
def test_ap_vlan_wpa2_psk_radius_required(dev, apdev):
"""AP VLAN with WPA2-PSK and RADIUS attributes required"""
try:
import pyrad.server
import pyrad.packet
import pyrad.dictionary
except ImportError:
raise HwsimSkip("No pyrad modules available")
class TestServer(pyrad.server.Server):
def _HandleAuthPacket(self, pkt):
pyrad.server.Server._HandleAuthPacket(self, pkt)
logger.info("Received authentication request")
reply = self.CreateReplyPacket(pkt)
reply.code = pyrad.packet.AccessAccept
secret = reply.secret
if self.t_events['extra'].is_set():
reply.AddAttribute("Chargeable-User-Identity", "test-cui")
reply.AddAttribute("User-Name", "test-user")
if self.t_events['long'].is_set():
reply.AddAttribute("Tunnel-Type", 13)
reply.AddAttribute("Tunnel-Medium-Type", 6)
reply.AddAttribute("Tunnel-Private-Group-ID", "1")
self.SendReplyPacket(pkt.fd, reply)
def RunWithStop(self, t_events):
self._poll = select.poll()
self._fdmap = {}
self._PrepareSockets()
self.t_events = t_events
while not t_events['stop'].is_set():
for (fd, event) in self._poll.poll(1000):
if event == select.POLLIN:
try:
fdo = self._fdmap[fd]
self._ProcessInput(fdo)
except pyrad.server.ServerPacketError as err:
logger.info("pyrad server dropping packet: " + str(err))
except pyrad.packet.PacketError as err:
logger.info("pyrad server received invalid packet: " + str(err))
else:
logger.error("Unexpected event in pyrad server main loop")
srv = TestServer(dict=pyrad.dictionary.Dictionary("dictionary.radius"),
authport=18138, acctport=18139)
srv.hosts["127.0.0.1"] = pyrad.server.RemoteHost("127.0.0.1",
b"radius",
"localhost")
srv.BindToAddress("")
t_events = {}
t_events['stop'] = threading.Event()
t_events['long'] = threading.Event()
t_events['extra'] = threading.Event()
t = threading.Thread(target=run_pyrad_server, args=(srv, t_events))
t.start()
try:
ssid = "test-wpa2-psk"
params = hostapd.radius_params()
params['ssid'] = ssid
params["wpa"] = "2"
params["wpa_key_mgmt"] = "WPA-PSK"
params["rsn_pairwise"] = "CCMP"
params['macaddr_acl'] = '2'
params['dynamic_vlan'] = "2"
params['wpa_passphrase'] = '0123456789abcdefghi'
params['auth_server_port'] = "18138"
hapd = hostapd.add_ap(apdev[0], params)
logger.info("connecting without VLAN")
dev[0].connect(ssid, psk="0123456789abcdefghi", scan_freq="2412",
wait_connect=False)
ev = dev[0].wait_event(["CTRL-EVENT-CONNECTED",
"CTRL-EVENT-SSID-TEMP-DISABLED"], timeout=20)
if ev is None:
raise Exception("Timeout on connection attempt")
if "CTRL-EVENT-CONNECTED" in ev:
raise Exception("Unexpected success without vlan parameters")
logger.info("connecting without VLAN failed as expected")
logger.info("connecting without VLAN (CUI/User-Name)")
t_events['extra'].set()
dev[1].connect(ssid, psk="0123456789abcdefghi", scan_freq="2412",
wait_connect=False)
ev = dev[1].wait_event(["CTRL-EVENT-CONNECTED",
"CTRL-EVENT-SSID-TEMP-DISABLED"], timeout=20)
if ev is None:
raise Exception("Timeout on connection attempt")
if "CTRL-EVENT-CONNECTED" in ev:
raise Exception("Unexpected success without vlan parameters(2)")
logger.info("connecting without VLAN failed as expected(2)")
t_events['extra'].clear()
t_events['long'].set()
logger.info("connecting with VLAN")
dev[2].connect(ssid, psk="0123456789abcdefghi", scan_freq="2412",
wait_connect=False)
ev = dev[2].wait_event(["CTRL-EVENT-CONNECTED",
"CTRL-EVENT-SSID-TEMP-DISABLED"], timeout=20)
if ev is None:
raise Exception("Timeout on connection attempt")
if "CTRL-EVENT-SSID-TEMP-DISABLED" in ev:
raise Exception("Unexpected failure with vlan parameters")
logger.info("connecting with VLAN succeeded as expected")
finally:
t_events['stop'].set()
t.join()
def test_radius_mppe_failure(dev, apdev):
"""RADIUS failure when adding MPPE keys"""
params = {"ssid": "as", "beacon_int": "2000",
"radius_server_clients": "auth_serv/radius_clients.conf",
"radius_server_auth_port": '18127',
"eap_server": "1",
"eap_user_file": "auth_serv/eap_user.conf",
"ca_cert": "auth_serv/ca.pem",
"server_cert": "auth_serv/server.pem",
"private_key": "auth_serv/server.key"}
authsrv = hostapd.add_ap(apdev[1], params)
params = hostapd.wpa2_eap_params(ssid="test-wpa2-eap")
params['auth_server_port'] = "18127"
hapd = hostapd.add_ap(apdev[0], params)
with fail_test(authsrv, 1, "os_get_random;radius_msg_add_mppe_keys"):
dev[0].connect("test-wpa2-eap", key_mgmt="WPA-EAP", eap="TTLS",
identity="user", anonymous_identity="ttls",
password="password",
ca_cert="auth_serv/ca.pem", phase2="autheap=GTC",
wait_connect=False, scan_freq="2412")
dev[0].wait_disconnected()
dev[0].request("REMOVE_NETWORK all")
def test_radius_acct_failure(dev, apdev):
"""RADIUS Accounting and failure to add attributes"""
# Connection goes through, but Accounting-Request cannot be sent out due to
# NAS-Identifier being too long to fit into a RADIUS attribute.
params = {"ssid": "radius-acct-open",
'acct_server_addr': "127.0.0.1",
'acct_server_port': "1813",
'acct_server_shared_secret': "radius",
'nas_identifier': 255*'A'}
hapd = hostapd.add_ap(apdev[0], params)
dev[0].connect("radius-acct-open", key_mgmt="NONE", scan_freq="2412")
def test_radius_acct_failure_oom(dev, apdev):
"""RADIUS Accounting and failure to add attributes due to OOM"""
params = {"ssid": "radius-acct-open",
'acct_server_addr': "127.0.0.1",
'acct_server_port': "1813",
'acct_server_shared_secret': "radius",
'radius_acct_interim_interval': "1",
'nas_identifier': 250*'A',
'radius_acct_req_attr': ["126:s:" + 250*'B',
"77:s:" + 250*'C',
"127:s:" + 250*'D',
"181:s:" + 250*'E']}
hapd = hostapd.add_ap(apdev[0], params)
bssid = hapd.own_addr()
dev[0].scan_for_bss(bssid, freq="2412")
with alloc_fail(hapd, 1, "radius_msg_add_attr;?radius_msg_add_attr_int32;=accounting_msg"):
dev[0].connect("radius-acct-open", key_mgmt="NONE", scan_freq="2412")
wait_fail_trigger(hapd, "GET_ALLOC_FAIL")
dev[0].request("REMOVE_NETWORK all")
dev[0].wait_disconnected()
dev[1].scan_for_bss(bssid, freq="2412")
with alloc_fail(hapd, 1, "accounting_sta_report"):
dev[1].connect("radius-acct-open", key_mgmt="NONE", scan_freq="2412")
wait_fail_trigger(hapd, "GET_ALLOC_FAIL")
dev[1].request("REMOVE_NETWORK all")
dev[1].wait_disconnected()
tests = [(1, "radius_msg_add_attr;?radius_msg_add_attr_int32;=accounting_msg"),
(2, "radius_msg_add_attr;accounting_msg"),
(3, "radius_msg_add_attr;accounting_msg")]
for count, func in tests:
with fail_test(hapd, count, func):
dev[0].connect("radius-acct-open", key_mgmt="NONE",
scan_freq="2412")
wait_fail_trigger(hapd, "GET_FAIL")
dev[0].request("REMOVE_NETWORK all")
dev[0].wait_disconnected()
dev[0].connect("radius-acct-open", key_mgmt="NONE", scan_freq="2412")
with fail_test(hapd, 8,
"radius_msg_add_attr;?radius_msg_add_attr_int32;=accounting_sta_report"):
dev[0].request("REMOVE_NETWORK all")
dev[0].wait_disconnected()
wait_fail_trigger(hapd, "GET_FAIL")
with fail_test(hapd, 1, "radius_msg_add_attr;=accounting_report_state"):
hapd.disable()
def test_radius_acct_failure_oom_rsn(dev, apdev):
"""RADIUS Accounting in RSN and failure to add attributes due to OOM"""
params = hostapd.wpa2_eap_params(ssid="radius-acct")
params['acct_server_addr'] = "127.0.0.1"
params['acct_server_port'] = "1813"
params['acct_server_shared_secret'] = "radius"
params['radius_acct_interim_interval'] = "1"
params['nas_identifier'] = 250*'A'
params['radius_acct_req_attr'] = ["126:s:" + 250*'B',
"77:s:" + 250*'C',
"127:s:" + 250*'D',
"181:s:" + 250*'E']
hapd = hostapd.add_ap(apdev[0], params)
bssid = hapd.own_addr()
dev[0].scan_for_bss(bssid, freq="2412")
with alloc_fail(hapd, 1, "radius_msg_add_attr;?radius_msg_add_attr_int32;=accounting_msg"):
connect(dev[0], "radius-acct")
wait_fail_trigger(hapd, "GET_ALLOC_FAIL")
dev[1].scan_for_bss(bssid, freq="2412")
with alloc_fail(hapd, 1, "accounting_sta_report"):
connect(dev[1], "radius-acct")
wait_fail_trigger(hapd, "GET_ALLOC_FAIL")
dev[2].scan_for_bss(bssid, freq="2412")
connect(dev[2], "radius-acct")
for i in range(1, 8):
with alloc_fail(hapd, i, "radius_msg_add_attr;?radius_msg_add_attr_int32;=accounting_msg"):
wait_fail_trigger(hapd, "GET_ALLOC_FAIL")
for i in range(1, 15):
with alloc_fail(hapd, i, "radius_msg_add_attr;?radius_msg_add_attr_int32;=accounting_sta_report"):
wait_fail_trigger(hapd, "GET_ALLOC_FAIL")
def test_radius_acct_failure_sta_data(dev, apdev):
"""RADIUS Accounting and failure to get STA data"""
params = {"ssid": "radius-acct-open",
'acct_server_addr': "127.0.0.1",
'acct_server_port': "1813",
'acct_server_shared_secret': "radius"}
hapd = hostapd.add_ap(apdev[0], params)
with fail_test(hapd, 1, "accounting_sta_update_stats"):
dev[0].connect("radius-acct-open", key_mgmt="NONE", scan_freq="2412")
dev[0].request("DISCONNECT")
dev[0].wait_disconnected()
hapd.wait_event(["AP-STA-DISCONNECTED"], timeout=1)
|
gpl-3.0
| -3,032,000,768,132,133,000
| 41.920925
| 108
| 0.593733
| false
| 3.437856
| true
| false
| false
|
OpenEntityMap/oem-client-anidb
|
examples/anidb_example.py
|
1
|
2245
|
from __future__ import print_function
import logging
logging.basicConfig(level=logging.DEBUG)
from oem import OemClient
from oem.media.show.identifier import EpisodeIdentifier
log = logging.getLogger(__name__)
def run():
# Initialize client
client = OemClient(['anidb'], 'package')
#
# Basic
#
log.debug("\n%s\nBasic\n%s", '=' * 60, '=' * 60)
log.debug(client['anidb'].to('tvdb').map('3', EpisodeIdentifier(1, 2)))
log.debug(client['anidb'].to('tvdb').map('38', EpisodeIdentifier(1, 2)))
log.debug(client['anidb'].to('tvdb').map('818', EpisodeIdentifier(0, 1)))
log.debug(client['anidb'].to('tvdb').map('1041', EpisodeIdentifier(1, 45)))
#
# Timeline
#
log.debug("\n%s\nTimeline\n%s", '=' * 60, '=' * 60)
log.debug(client['anidb'].to('tvdb').map('10648', EpisodeIdentifier(1, 1, progress=34)))
log.debug(client['anidb'].to('tvdb').map('10648', EpisodeIdentifier(1, 1, progress=49)))
log.debug(client['anidb'].to('tvdb').map('10648', EpisodeIdentifier(1, 1, progress=50)))
log.debug(client['anidb'].to('tvdb').map('10648', EpisodeIdentifier(1, 1, progress=51)))
log.debug(client['anidb'].to('tvdb').map('10648', EpisodeIdentifier(1, 1, progress=64)))
log.debug(client['anidb'].to('tvdb').map('10648', EpisodeIdentifier(1, 1, progress=99)))
log.debug(client['anidb'].to('tvdb').map('10648', EpisodeIdentifier(1, 1, progress=100)))
# Movies
log.debug("\n%s\nMovies\n%s", '=' * 60, '=' * 60)
log.debug(client['anidb'].to('imdb').get(7103))
log.debug(client['imdb'].to('anidb').get("tt1663145"))
# Shows
log.debug("\n%s\nShows\n%s", '=' * 60, '=' * 60)
log.debug(client['anidb'].to('tvdb').get(3))
log.debug(client['tvdb'].to('anidb').get( 70973))
log.debug(client['tvdb'].to('anidb').get( 71551))
log.debug(client['tvdb'].to('anidb').get(103691))
log.debug(client['tvdb'].to('anidb').get(136251))
log.debug(client['tvdb'].to('anidb').get(137151))
log.debug(client['tvdb'].to('anidb').get(138691))
if __name__ == '__main__':
# Run example
run()
# Display call statistics
from oem_framework.core.elapsed import Elapsed
for line in Elapsed.format_statistics():
print(line)
|
bsd-3-clause
| 7,731,758,222,099,678,000
| 34.634921
| 93
| 0.621381
| false
| 2.915584
| false
| false
| false
|
mdmintz/SeleniumBase
|
examples/raw_parameter_script.py
|
1
|
3034
|
""" The main purpose of this file is to demonstrate running SeleniumBase
scripts without the use of Pytest by calling the script directly
with Python or from a Python interactive interpreter. Based on
whether relative imports work or don't, the script can autodetect
how this file was run. With pure Python, it will initialize
all the variables that would've been automatically initialized
by the Pytest plugin. The setUp() and tearDown() methods are also
now called from the script itself.
One big advantage to running tests with Pytest is that most of this
is done for you automatically, with the option to update any of the
parameters through command line parsing. Pytest also provides you
with other plugins, such as ones for generating test reports,
handling multithreading, and parametrized tests. Depending on your
specific needs, you may need to call SeleniumBase commands without
using Pytest, and this example shows you how. """
try:
# Running with Pytest / (Finds test methods to run using autodiscovery)
# Example run command: "pytest raw_parameter_script.py"
from .my_first_test import MyTestClass # (relative imports work: ".~")
except (ImportError, ValueError):
# Running with pure Python OR from a Python interactive interpreter
# Example run command: "python raw_parameter_script.py"
from my_first_test import MyTestClass # (relative imports DON'T work)
sb = MyTestClass("test_basic")
sb.browser = "chrome"
sb.headless = False
sb.headed = False
sb.start_page = None
sb.locale_code = None
sb.servername = "localhost"
sb.port = 4444
sb.data = None
sb.environment = "test"
sb.user_agent = None
sb.incognito = False
sb.guest_mode = False
sb.devtools = False
sb.mobile_emulator = False
sb.device_metrics = None
sb.extension_zip = None
sb.extension_dir = None
sb.database_env = "test"
sb.log_path = "latest_logs/"
sb.archive_logs = False
sb.disable_csp = False
sb.enable_ws = False
sb.enable_sync = False
sb.use_auto_ext = False
sb.no_sandbox = False
sb.disable_gpu = False
sb._reuse_session = False
sb._crumbs = False
sb.visual_baseline = False
sb.maximize_option = False
sb.save_screenshot_after_test = False
sb.timeout_multiplier = None
sb.pytest_html_report = None
sb.with_db_reporting = False
sb.with_s3_logging = False
sb.js_checking_on = False
sb.report_on = False
sb.is_pytest = False
sb.slow_mode = False
sb.demo_mode = False
sb.time_limit = None
sb.demo_sleep = 1
sb.message_duration = 2
sb.block_images = False
sb.settings_file = None
sb.user_data_dir = None
sb.proxy_string = None
sb.swiftshader = False
sb.ad_block_on = False
sb.highlights = None
sb.check_js = False
sb.cap_file = None
sb.cap_string = None
sb.setUp()
try:
sb.test_basic()
finally:
sb.tearDown()
del sb
|
mit
| 8,546,046,157,528,285,000
| 33.477273
| 75
| 0.678972
| false
| 3.816352
| true
| false
| false
|
jfecroft/DOS
|
data/alkalis/jfec_k2/rovib.py
|
1
|
1600
|
import subprocess
import numpy as np
import os
import numpy.ma as ma
import re
from tempfile import mkstemp, mkdtemp
import shutil
import scipy.constants
#########################################
#replace will search through a file for a specific word an then replace that line
def replace(file, pattern, subst):
p = re.compile(pattern)
#Create temp file
fh, abs_path = mkstemp()
new_file = open(abs_path,'w')
old_file = open(file)
for line in old_file:
if p.match(line): #using match because for input files only want to replace the currently used variable
line = pattern + ' = ' + str(subst) + ', \n'
new_file.write(line)
#close temp file
new_file.close()
os.close(fh)
old_file.close()
os.remove(file)
shutil.move(abs_path, file)
#routine which call 1d_schrodinger eqn solver and returns
#all the levels below zero outputted
def run_1d_schrodinger(inputfile_name,outputfile_name,L):
home = os.getcwd()
replace(inputfile_name, ' L', L) #editing inputfile such that L=L is called
subprocess.call(home+"/1d_schrodinger.x < " + inputfile_name, stdout=open(os.devnull, 'w'), shell=True)
return()
############################################
lmax = 100
inputfile = 'input_K2.txt'
outputfile = 'fort.10'
sys = 'kk'
#generate to states of the dimer for different n upto nmax
for i in range(0,lmax+1):
run_1d_schrodinger(inputfile,outputfile,i)
# shutil.copyfile(outputfile,sys+'_results_j'+str(i)+'.dat')
try:
shutil.move(outputfile,sys+'_results_j'+str(i)+'.dat')
except IOError:
pass
|
mit
| 8,721,651,068,161,395,000
| 30.372549
| 111
| 0.646875
| false
| 3.305785
| false
| false
| false
|
seleniumbase/SeleniumBase
|
examples/test_hack_search.py
|
1
|
1313
|
""" Testing the "self.set_attribute()" and "self.set_attributes()" methods
to modify a Google search into becoming a Bing search.
set_attribute() -> Modifies the attribute of the first matching element.
set_attributes() -> Modifies the attribute of all matching elements. """
from seleniumbase import BaseCase
class HackingTests(BaseCase):
def test_hack_search(self):
self.open("https://google.com/ncr")
self.assert_element('input[title="Search"]')
self.set_attribute('[action="/search"]', "action", "//bing.com/search")
self.set_attributes('[value="Google Search"]', "value", "Bing Search")
self.type('input[title="Search"]', "SeleniumBase GitHub")
self.sleep(0.5)
self.js_click('[value="Bing Search"]')
self.highlight("h1.b_logo")
self.highlight_click('a[href*="github.com/seleniumbase/SeleniumBase"]')
self.switch_to_newest_window()
self.assert_element('[href="/seleniumbase/SeleniumBase"]')
self.assert_true("seleniumbase/SeleniumBase" in self.get_current_url())
self.click('a[title="examples"]')
self.assert_text("examples", "strong.final-path")
self.highlight_click('[title="test_hack_search.py"]')
self.assert_text("test_hack_search.py", "strong.final-path")
|
mit
| 5,558,656,236,004,526,000
| 49.5
| 79
| 0.657273
| false
| 3.698592
| false
| false
| false
|
campadrenalin/EJTP-lib-python
|
setup.py
|
1
|
3110
|
#!/usr/bin/env python
from setuptools import setup
long_desc = '''
Encrypted JSON Transport Protocol
---------------------------------
EJTP is an overlay protocol that allows the pluggable use of underlying transports, such as UDP, TCP, HTTP, IRC, Email and carrier pigeon to provide a cryptographically secure network of unreliable message forwarding. You can think of it as a bit like a more general-purpose and security-minded successor to XMPP, using JSON rather than XML as its frame medium.
On top of a simple frame format, EJTP boasts a consistent and simple format for describing encryption credentials, which is useful even without the rest of EJTP. The ejtp-crypto script makes it easy for other projects to take advantage of this pending a native port of ejtp.crypto to languages other than Python.
The intention of EJTP is to make it trivial to establish secure and NAT-oblivious distributed services across a common network of message relays. Your system only has to worry about exchanging encryption credentials and establishing a connection with a relay host, helping to pave the way toward distributed apps that run entirely in HTML5 (pending a port of the project to JS). You can be serverless *and* smartphone-friendly.
Optionally supports elliptic curve cryptography if the PyECC_ module is installed.
For more technical and in-depth information, visit the `Github project <https://github.com/campadrenalin/EJTP-lib-python>`_.
.. _PyECC: https://pypi.python.org/pypi/PyECC
'''
setup(
name = 'ejtp',
version = '0.9.7p1',
description = 'Encrypted JSON Transport Protocol library',
long_description = long_desc,
author = 'Philip Horger',
author_email = 'philip.horger@gmail.com',
url = 'https://github.com/campadrenalin/EJTP-lib-python/',
package_data={
'ejtp.tests' : ['examplecache.json', 'idents/*']
},
install_requires = [
'pycrypto',
'persei',
'requests',
'streql',
],
classifiers = [
'Development Status :: 4 - Beta',
'License :: OSI Approved :: GNU Library or Lesser General Public License (LGPL)',
'Environment :: Console',
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Operating System :: MacOS :: MacOS X',
'Operating System :: Microsoft :: Windows',
'Operating System :: POSIX',
'Intended Audience :: Developers',
'Topic :: Communications',
'Topic :: Internet',
'Topic :: Security :: Cryptography',
],
scripts = [
'scripts/ejtpd',
'scripts/ejtp-keygen',
'scripts/ejtp-console',
'scripts/ejtp-crypto',
'scripts/ejtp-identity',
],
packages = [
'ejtp',
'ejtp.applications',
'ejtp.applications.ejforward',
'ejtp.crypto',
'ejtp.frame',
'ejtp.identity',
'ejtp.jacks',
'ejtp.tests',
'ejtp.util',
'ejtp.vendor',
],
)
|
lgpl-3.0
| 5,118,365,220,916,456,000
| 39.38961
| 427
| 0.671704
| false
| 3.715651
| false
| false
| false
|
juliakreger/bifrost
|
playbooks/roles/ironic-install/files/parse_zuul_changes.py
|
1
|
2325
|
#!/usr/bin/env python
# (c) 2015, Hewlett-Packard Development Company, L.P.
#
# This module is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This software is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this software. If not, see <http://www.gnu.org/licenses/>.
import re
import subprocess
import sys
if len(sys.argv) is 1:
print("ERROR: This script requires arguments!\n"
"%s repository_path review_url repository_name "
"zuul_changes" % sys.argv[0])
sys.exit(1)
repo_path = sys.argv[1]
review_url = sys.argv[2]
repo_name = sys.argv[3]
change_list = str(sys.argv[4]).split('^')
applicable_changes = [x for x in change_list if repo_name in x]
try:
for change in applicable_changes:
(project, branch, ref) = change.split(':')
if re.search(repo_name, project):
if not re.search(branch, subprocess.check_output(
['git', '-C', repo_path, 'status', '-s', '-b'])):
command = ['git', '-C', repo_path, 'checkout', branch]
subprocess.call(command, stdout=True)
command = ['git', '-C', repo_path, 'fetch',
review_url + "/" + repo_name, ref]
if subprocess.call(command, stdout=True) is 0:
if subprocess.call(
['git', '-C', repo_path, 'cherry-pick',
'-n', 'FETCH_HEAD'], stdout=True) is 0:
print("Applied %s" % ref)
else:
print("Failed to cherry pick %s on to %s branch %s"
% (ref, repo_name, branch))
sys.exit(1)
else:
print("Failed to download %s on to %s branch %s"
% (ref, repo_name, branch))
sys.exit(1)
except Exception as e:
print("Failed to process change: %s" % e)
|
apache-2.0
| -3,221,562,445,962,822,000
| 37.75
| 78
| 0.581505
| false
| 3.824013
| false
| false
| false
|
threedliams/CallbackBot
|
src/api/base.py
|
1
|
10608
|
import json
import os
#TODO: handle unicode better instead of just ignoring it
from unidecode import unidecode
from abc import ABC, abstractmethod
import src.util.callbackUtil
import src.data.messages
import src.data.polls
class API(ABC):
def __init__(self, token):
self.apiName = ""
self.client = None
self.isSavedReady = False
self.isLiveReady = False
self.savedChannelTextMap = {}
self.liveChannelTextMap = {}
self.markovModelCache = {}
self.callbackData = {}
self.polls = {}
super().__init__()
@abstractmethod
def author(self, payload):
pass
@abstractmethod
def authorName(self, payload):
pass
@abstractmethod
def content(self, payload):
pass
@abstractmethod
def messageChannel(self, payload):
pass
@abstractmethod
def emoji(self, payload):
pass
@abstractmethod
def reactionMessage(self, payload):
pass
@abstractmethod
def messageID(self, payload):
pass
@abstractmethod
def clientName(self):
pass
@abstractmethod
def clientID(self):
pass
@abstractmethod
def clientUser(self):
pass
@abstractmethod
def getServers(self):
pass
@abstractmethod
def serverName(self, server):
pass
@abstractmethod
def channels(self, server):
pass
@abstractmethod
def channelName(self, channel):
pass
@abstractmethod
def channelID(self, channel):
pass
@abstractmethod
async def getLogs(self, channel):
pass
@abstractmethod
async def editMessage(self, message, newContent):
pass
################################################################################
# onReady
#
# When the bot starts up, this runs all the startup functions
#
# Args:
#
# None
#
# Returns - nothing
################################################################################
async def onReady(self):
print('Logged in as')
print(self.clientName())
print(self.clientID())
print('------')
rootFolder = "./servers/" + self.apiName + "/"
callbackFile = "./callbacks/callbacks.json"
#load callbackFile
with open(callbackFile) as data_file:
self.callbackData = json.load(data_file)
servers = self.getServers()
#preload any saved channels
for server in servers:
underscoredServerName = self.serverName(server).replace(" ", "_")
if(os.path.isdir(rootFolder + underscoredServerName)):
for channel in server.text_channels:
underscoredChannelName = self.channelName(channel).replace(" ", "_")
#TODO: channels with the same name on one server?
if(os.path.isdir(rootFolder + underscoredServerName + "/" + underscoredChannelName)):
if not(channel.id in list(self.savedChannelTextMap.keys())):
self.savedChannelTextMap[self.channelID(channel)] = {}
for fileName in os.listdir(rootFolder + underscoredServerName + "/" + underscoredChannelName):
f = open(rootFolder + underscoredServerName + "/" + underscoredChannelName + "/" + fileName, 'r')
#TODO: handle people with . in their name
self.savedChannelTextMap[self.channelID(channel)][fileName.split('.')[0]] = f.read()
self.isSavedReady = True
print("saved ready!")
#catch up to current logs
for server in servers:
for channel in server.text_channels:
if not(self.channelID(channel) in list(self.liveChannelTextMap.keys())):
self.liveChannelTextMap[self.channelID(channel)] = {}
await self.getLogs(channel)
#save current logs for next time
for server in servers:
underscoredServerName = self.serverName(server).replace(" ", "_")
if not(os.path.isdir(rootFolder + underscoredServerName)):
os.makedirs(rootFolder + underscoredServerName)
if(os.path.isdir(rootFolder + underscoredServerName)):
for channel in server.text_channels:
underscoredChannelName = self.channelName(channel).replace(" ", "_")
if not(os.path.isdir(rootFolder + underscoredServerName + "/" + underscoredChannelName)):
os.makedirs(rootFolder + underscoredServerName + "/" + underscoredChannelName)
if(os.path.isdir(rootFolder + underscoredServerName + "/" + underscoredChannelName)):
for username in self.liveChannelTextMap[self.channelID(channel)].keys():
f = open(rootFolder + underscoredServerName + "/" + underscoredChannelName + "/" + username + ".txt", 'w')
f.write(self.liveChannelTextMap[self.channelID(channel)][username])
self.isLiveReady = True
for server in servers:
for channel in server.text_channels:
src.app.attemptMarkovCacheRefresh(self, channel.id, True)
print("live ready!")
################################################################################
# onMessage
#
# When someone sends a message in a channel with a bot, this function fires
# so you can process the given message
#
# Args:
#
# message - a Message object
#
# Returns - nothing
################################################################################
async def onMessage(self, message):
await src.util.callbackUtil.functionSwitcher(message)
if(self.isSavedReady and not self.isLiveReady):
src.data.messages.saveMessage(message, self.savedChannelTextMap)
if(self.isLiveReady):
src.data.messages.saveMessage(message, self.liveChannelTextMap)
src.app.attemptMarkovCacheRefresh(message.api, message.channelID)
################################################################################
# onReactionAdd
#
# When someone adds a reaction in a channel with a bot, this function fires
# so you can process the given reaction
#
# Args:
#
# reaction - a Reaction object
#
# username - the reacting user
#
# Returns - nothing
################################################################################
async def onReactionAdd(self, reaction, username):
message = self.reactionMessage(reaction)
isPoll = False
for pollID in self.polls:
if(message.messageID == pollID):
isPoll = True
if not(isPoll):
return
newPoll = await self.editMessage(message, src.data.polls.addVote(message, reaction, username))
# This either replaces the old poll with the new, or adds the new one
self.polls[message.messageID] = newPoll
################################################################################
# onReactionRemove
#
# When someone removes a reaction in a channel with a bot, this function fires
# so you can process the given reaction
#
# Args:
#
# reaction - a Reaction object
#
# username - the reacting user
#
# Returns - nothing
################################################################################
async def onReactionRemove(self, reaction, username):
message = self.reactionMessage(reaction)
isPoll = False
for pollID in self.polls:
if(message.messageID == pollID):
isPoll = True
if not(isPoll):
return
newPoll = await self.editMessage(message, src.data.polls.removeVote(message, reaction, username))
# This either replaces the old poll with the new, or adds the new one
self.polls[message.messageID] = newPoll
################################################################################
# onReactionClear
#
# When someone clears a reaction in a channel with a bot, this function fires
# so you can process the given reaction
#
# Args:
#
# reaction - the Reaction object
#
# username - the reacting user
#
# Returns - nothing
################################################################################
async def onReactionClear(self, reaction, username):
message = self.reactionMessage(reaction)
isPoll = False
for pollID in self.polls:
if(message.messageID == pollID):
isPoll = True
if not(isPoll):
return
newPoll = await self.editMessage(message, src.data.polls.removeVote(message, reaction, username))
# This either replaces the old poll with the new, or adds the new one
self.polls[message.messageID] = newPoll
################################################################################
# sendFile
#
# Sends the given file to the given channel
#
# Args:
#
# message - a Message object
#
# fileToSend - a string with the path of the file to send
#
# Return - nothing
################################################################################
@abstractmethod
async def sendFile(self, message, fileToSend):
pass
################################################################################
# addReaction
#
# Adds the given reaction to the given message
#
# Args:
#
# message - a Message object
#
# reactionToAdd - a string with the name of the emoji to add, found in
# emojiDict
#
# Return - nothing
################################################################################
@abstractmethod
async def addReaction(self, message, reactionToAdd):
pass
################################################################################
# sendMessage
#
# Sends the given message to the given channel
#
# Args:
#
# message - a Message object
#
# messageToSend - a string message to send
#
# Return - nothing
################################################################################
@abstractmethod
async def sendMessage(self, message, messageToSend):
pass
|
mit
| -6,713,889,131,034,812,000
| 30.954819
| 134
| 0.523284
| false
| 4.85048
| false
| false
| false
|
tantexian/sps-2014-12-4
|
sps/openstack/common/policy.py
|
1
|
21606
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright (c) 2012 OpenStack Foundation.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Common Policy Engine Implementation
Policies can be expressed in one of two forms: A list of lists, or a
string written in the new policy language.
In the list-of-lists representation, each check inside the innermost
list is combined as with an "and" conjunction--for that check to pass,
all the specified checks must pass. These innermost lists are then
combined as with an "or" conjunction. This is the original way of
expressing policies, but there now exists a new way: the policy
language.
In the policy language, each check is specified the same way as in the
list-of-lists representation: a simple "a:b" pair that is matched to
the correct code to perform that check. However, conjunction
operators are available, allowing for more expressiveness in crafting
policies.
As an example, take the following rule, expressed in the list-of-lists
representation::
[["role:admin"], ["project_id:%(project_id)s", "role:projectadmin"]]
In the policy language, this becomes::
role:admin or (project_id:%(project_id)s and role:projectadmin)
The policy language also has the "not" operator, allowing a richer
policy rule::
project_id:%(project_id)s and not role:dunce
Finally, two special policy checks should be mentioned; the policy
check "@" will always accept an access, and the policy check "!" will
always reject an access. (Note that if a rule is either the empty
list ("[]") or the empty string, this is equivalent to the "@" policy
check.) Of these, the "!" policy check is probably the most useful,
as it allows particular rules to be explicitly disabled.
"""
import abc
import re
import urllib
import six
import urllib2
from sps.openstack.common.gettextutils import _
from sps.openstack.common import jsonutils
from sps.openstack.common import log as logging
LOG = logging.getLogger(__name__)
_rules = None
_checks = {}
class Rules(dict):
"""
A store for rules. Handles the default_rule setting directly.
"""
@classmethod
def load_json(cls, data, default_rule=None):
"""
Allow loading of JSON rule data.
"""
# Suck in the JSON data and parse the rules
rules = dict((k, parse_rule(v)) for k, v in
jsonutils.loads(data).items())
return cls(rules, default_rule)
def __init__(self, rules=None, default_rule=None):
"""Initialize the Rules store."""
super(Rules, self).__init__(rules or {})
self.default_rule = default_rule
def __missing__(self, key):
"""Implements the default rule handling."""
# If the default rule isn't actually defined, do something
# reasonably intelligent
if not self.default_rule or self.default_rule not in self:
raise KeyError(key)
return self[self.default_rule]
def __str__(self):
"""Dumps a string representation of the rules."""
# Start by building the canonical strings for the rules
out_rules = {}
for key, value in self.items():
# Use empty string for singleton TrueCheck instances
if isinstance(value, TrueCheck):
out_rules[key] = ''
else:
out_rules[key] = str(value)
# Dump a pretty-printed JSON representation
return jsonutils.dumps(out_rules, indent=4)
# Really have to figure out a way to deprecate this
def set_rules(rules):
"""Set the rules in use for policy checks."""
global _rules
_rules = rules
# Ditto
def reset():
"""Clear the rules used for policy checks."""
global _rules
_rules = None
def check(rule, target, creds, exc=None, *args, **kwargs):
"""
Checks authorization of a rule against the target and credentials.
:param rule: The rule to evaluate.
:param target: As much information about the object being operated
on as possible, as a dictionary.
:param creds: As much information about the user performing the
action as possible, as a dictionary.
:param exc: Class of the exception to raise if the check fails.
Any remaining arguments passed to check() (both
positional and keyword arguments) will be passed to
the exception class. If exc is not provided, returns
False.
:return: Returns False if the policy does not allow the action and
exc is not provided; otherwise, returns a value that
evaluates to True. Note: for rules using the "case"
expression, this True value will be the specified string
from the expression.
"""
# Allow the rule to be a Check tree
if isinstance(rule, BaseCheck):
result = rule(target, creds)
elif not _rules:
# No rules to reference means we're going to fail closed
result = False
else:
try:
# Evaluate the rule
result = _rules[rule](target, creds)
except KeyError:
# If the rule doesn't exist, fail closed
result = False
# If it is False, raise the exception if requested
if exc and result is False:
raise exc(*args, **kwargs)
return result
class BaseCheck(object):
"""
Abstract base class for Check classes.
"""
__metaclass__ = abc.ABCMeta
@abc.abstractmethod
def __str__(self):
"""
Retrieve a string representation of the Check tree rooted at
this node.
"""
pass
@abc.abstractmethod
def __call__(self, target, cred):
"""
Perform the check. Returns False to reject the access or a
true value (not necessary True) to accept the access.
"""
pass
class FalseCheck(BaseCheck):
"""
A policy check that always returns False (disallow).
"""
def __str__(self):
"""Return a string representation of this check."""
return "!"
def __call__(self, target, cred):
"""Check the policy."""
return False
class TrueCheck(BaseCheck):
"""
A policy check that always returns True (allow).
"""
def __str__(self):
"""Return a string representation of this check."""
return "@"
def __call__(self, target, cred):
"""Check the policy."""
return True
class Check(BaseCheck):
"""
A base class to allow for user-defined policy checks.
"""
def __init__(self, kind, match):
"""
:param kind: The kind of the check, i.e., the field before the
':'.
:param match: The match of the check, i.e., the field after
the ':'.
"""
self.kind = kind
self.match = match
def __str__(self):
"""Return a string representation of this check."""
return "%s:%s" % (self.kind, self.match)
class NotCheck(BaseCheck):
"""
A policy check that inverts the result of another policy check.
Implements the "not" operator.
"""
def __init__(self, rule):
"""
Initialize the 'not' check.
:param rule: The rule to negate. Must be a Check.
"""
self.rule = rule
def __str__(self):
"""Return a string representation of this check."""
return "not %s" % self.rule
def __call__(self, target, cred):
"""
Check the policy. Returns the logical inverse of the wrapped
check.
"""
return not self.rule(target, cred)
class AndCheck(BaseCheck):
"""
A policy check that requires that a list of other checks all
return True. Implements the "and" operator.
"""
def __init__(self, rules):
"""
Initialize the 'and' check.
:param rules: A list of rules that will be tested.
"""
self.rules = rules
def __str__(self):
"""Return a string representation of this check."""
return "(%s)" % ' and '.join(str(r) for r in self.rules)
def __call__(self, target, cred):
"""
Check the policy. Requires that all rules accept in order to
return True.
"""
for rule in self.rules:
if not rule(target, cred):
return False
return True
def add_check(self, rule):
"""
Allows addition of another rule to the list of rules that will
be tested. Returns the AndCheck object for convenience.
"""
self.rules.append(rule)
return self
class OrCheck(BaseCheck):
"""
A policy check that requires that at least one of a list of other
checks returns True. Implements the "or" operator.
"""
def __init__(self, rules):
"""
Initialize the 'or' check.
:param rules: A list of rules that will be tested.
"""
self.rules = rules
def __str__(self):
"""Return a string representation of this check."""
return "(%s)" % ' or '.join(str(r) for r in self.rules)
def __call__(self, target, cred):
"""
Check the policy. Requires that at least one rule accept in
order to return True.
"""
for rule in self.rules:
if rule(target, cred):
return True
return False
def add_check(self, rule):
"""
Allows addition of another rule to the list of rules that will
be tested. Returns the OrCheck object for convenience.
"""
self.rules.append(rule)
return self
def _parse_check(rule):
"""
Parse a single base check rule into an appropriate Check object.
"""
# Handle the special checks
if rule == '!':
return FalseCheck()
elif rule == '@':
return TrueCheck()
try:
kind, match = rule.split(':', 1)
except Exception:
LOG.exception(_("Failed to understand rule %(rule)s") % locals())
# If the rule is invalid, we'll fail closed
return FalseCheck()
# Find what implements the check
if kind in _checks:
return _checks[kind](kind, match)
elif None in _checks:
return _checks[None](kind, match)
else:
LOG.error(_("No handler for matches of kind %s") % kind)
return FalseCheck()
def _parse_list_rule(rule):
"""
Provided for backwards compatibility. Translates the old
list-of-lists syntax into a tree of Check objects.
"""
# Empty rule defaults to True
if not rule:
return TrueCheck()
# Outer list is joined by "or"; inner list by "and"
or_list = []
for inner_rule in rule:
# Elide empty inner lists
if not inner_rule:
continue
# Handle bare strings
if isinstance(inner_rule, basestring):
inner_rule = [inner_rule]
# Parse the inner rules into Check objects
and_list = [_parse_check(r) for r in inner_rule]
# Append the appropriate check to the or_list
if len(and_list) == 1:
or_list.append(and_list[0])
else:
or_list.append(AndCheck(and_list))
# If we have only one check, omit the "or"
if not or_list:
return FalseCheck()
elif len(or_list) == 1:
return or_list[0]
return OrCheck(or_list)
# Used for tokenizing the policy language
_tokenize_re = re.compile(r'\s+')
def _parse_tokenize(rule):
"""
Tokenizer for the policy language.
Most of the single-character tokens are specified in the
_tokenize_re; however, parentheses need to be handled specially,
because they can appear inside a check string. Thankfully, those
parentheses that appear inside a check string can never occur at
the very beginning or end ("%(variable)s" is the correct syntax).
"""
for tok in _tokenize_re.split(rule):
# Skip empty tokens
if not tok or tok.isspace():
continue
# Handle leading parens on the token
clean = tok.lstrip('(')
for i in range(len(tok) - len(clean)):
yield '(', '('
# If it was only parentheses, continue
if not clean:
continue
else:
tok = clean
# Handle trailing parens on the token
clean = tok.rstrip(')')
trail = len(tok) - len(clean)
# Yield the cleaned token
lowered = clean.lower()
if lowered in ('and', 'or', 'not'):
# Special tokens
yield lowered, clean
elif clean:
# Not a special token, but not composed solely of ')'
if len(tok) >= 2 and ((tok[0], tok[-1]) in
[('"', '"'), ("'", "'")]):
# It's a quoted string
yield 'string', tok[1:-1]
else:
yield 'check', _parse_check(clean)
# Yield the trailing parens
for i in range(trail):
yield ')', ')'
class ParseStateMeta(type):
"""
Metaclass for the ParseState class. Facilitates identifying
reduction methods.
"""
def __new__(mcs, name, bases, cls_dict):
"""
Create the class. Injects the 'reducers' list, a list of
tuples matching token sequences to the names of the
corresponding reduction methods.
"""
reducers = []
for key, value in cls_dict.items():
if not hasattr(value, 'reducers'):
continue
for reduction in value.reducers:
reducers.append((reduction, key))
cls_dict['reducers'] = reducers
return super(ParseStateMeta, mcs).__new__(mcs, name, bases, cls_dict)
def reducer(*tokens):
"""
Decorator for reduction methods. Arguments are a sequence of
tokens, in order, which should trigger running this reduction
method.
"""
def decorator(func):
# Make sure we have a list of reducer sequences
if not hasattr(func, 'reducers'):
func.reducers = []
# Add the tokens to the list of reducer sequences
func.reducers.append(list(tokens))
return func
return decorator
class ParseState(object):
"""
Implement the core of parsing the policy language. Uses a greedy
reduction algorithm to reduce a sequence of tokens into a single
terminal, the value of which will be the root of the Check tree.
Note: error reporting is rather lacking. The best we can get with
this parser formulation is an overall "parse failed" error.
Fortunately, the policy language is simple enough that this
shouldn't be that big a problem.
"""
__metaclass__ = ParseStateMeta
def __init__(self):
"""Initialize the ParseState."""
self.tokens = []
self.values = []
def reduce(self):
"""
Perform a greedy reduction of the token stream. If a reducer
method matches, it will be executed, then the reduce() method
will be called recursively to search for any more possible
reductions.
"""
for reduction, methname in self.reducers:
if (len(self.tokens) >= len(reduction) and
self.tokens[-len(reduction):] == reduction):
# Get the reduction method
meth = getattr(self, methname)
# Reduce the token stream
results = meth(*self.values[-len(reduction):])
# Update the tokens and values
self.tokens[-len(reduction):] = [r[0] for r in results]
self.values[-len(reduction):] = [r[1] for r in results]
# Check for any more reductions
return self.reduce()
def shift(self, tok, value):
"""Adds one more token to the state. Calls reduce()."""
self.tokens.append(tok)
self.values.append(value)
# Do a greedy reduce...
self.reduce()
@property
def result(self):
"""
Obtain the final result of the parse. Raises ValueError if
the parse failed to reduce to a single result.
"""
if len(self.values) != 1:
raise ValueError("Could not parse rule")
return self.values[0]
@reducer('(', 'check', ')')
@reducer('(', 'and_expr', ')')
@reducer('(', 'or_expr', ')')
def _wrap_check(self, _p1, check, _p2):
"""Turn parenthesized expressions into a 'check' token."""
return [('check', check)]
@reducer('check', 'and', 'check')
def _make_and_expr(self, check1, _and, check2):
"""
Create an 'and_expr' from two checks joined by the 'and'
operator.
"""
return [('and_expr', AndCheck([check1, check2]))]
@reducer('and_expr', 'and', 'check')
def _extend_and_expr(self, and_expr, _and, check):
"""
Extend an 'and_expr' by adding one more check.
"""
return [('and_expr', and_expr.add_check(check))]
@reducer('check', 'or', 'check')
def _make_or_expr(self, check1, _or, check2):
"""
Create an 'or_expr' from two checks joined by the 'or'
operator.
"""
return [('or_expr', OrCheck([check1, check2]))]
@reducer('or_expr', 'or', 'check')
def _extend_or_expr(self, or_expr, _or, check):
"""
Extend an 'or_expr' by adding one more check.
"""
return [('or_expr', or_expr.add_check(check))]
@reducer('not', 'check')
def _make_not_expr(self, _not, check):
"""Invert the result of another check."""
return [('check', NotCheck(check))]
def _parse_text_rule(rule):
"""
Translates a policy written in the policy language into a tree of
Check objects.
"""
# Empty rule means always accept
if not rule:
return TrueCheck()
# Parse the token stream
state = ParseState()
for tok, value in _parse_tokenize(rule):
state.shift(tok, value)
try:
return state.result
except ValueError:
# Couldn't parse the rule
LOG.exception(_("Failed to understand rule %(rule)r") % locals())
# Fail closed
return FalseCheck()
def parse_rule(rule):
"""
Parses a policy rule into a tree of Check objects.
"""
# If the rule is a string, it's in the policy language
if isinstance(rule, basestring):
return _parse_text_rule(rule)
return _parse_list_rule(rule)
def register(name, func=None):
"""
Register a function or Check class as a policy check.
:param name: Gives the name of the check type, e.g., 'rule',
'role', etc. If name is None, a default check type
will be registered.
:param func: If given, provides the function or class to register.
If not given, returns a function taking one argument
to specify the function or class to register,
allowing use as a decorator.
"""
# Perform the actual decoration by registering the function or
# class. Returns the function or class for compliance with the
# decorator interface.
def decorator(func):
_checks[name] = func
return func
# If the function or class is given, do the registration
if func:
return decorator(func)
return decorator
@register("rule")
class RuleCheck(Check):
def __call__(self, target, creds):
"""
Recursively checks credentials based on the defined rules.
"""
try:
return _rules[self.match](target, creds)
except KeyError:
# We don't have any matching rule; fail closed
return False
@register("role")
class RoleCheck(Check):
def __call__(self, target, creds):
"""Check that there is a matching role in the cred dict."""
return self.match.lower() in [x.lower() for x in creds['roles']]
@register('http')
class HttpCheck(Check):
def __call__(self, target, creds):
"""
Check http: rules by calling to a remote server.
This example implementation simply verifies that the response
is exactly 'True'.
"""
url = ('http:' + self.match) % target
data = {'target': jsonutils.dumps(target),
'credentials': jsonutils.dumps(creds)}
post_data = urllib.urlencode(data)
f = urllib2.urlopen(url, post_data)
return f.read() == "True"
@register(None)
class GenericCheck(Check):
def __call__(self, target, creds):
"""
Check an individual match.
Matches look like:
tenant:%(tenant_id)s
role:compute:admin
"""
# TODO(termie): do dict inspection via dot syntax
match = self.match % target
if self.kind in creds:
return match == six.text_type(creds[self.kind])
return False
|
apache-2.0
| 9,064,018,407,252,972,000
| 26.7
| 78
| 0.594048
| false
| 4.255663
| false
| false
| false
|
codecakes/algorithms_monk
|
search/numof_occurence_sorted.py
|
1
|
1383
|
def firstOccur(arr, N, x):
lastmid = lefti = mid = 0
righti = N-1
while lefti<righti:
mid = (lefti+righti)/2
if lastmid == mid:
mid += 1
if mid == righti:
return righti
if arr[mid] >= x:
righti = mid
else:
lefti = mid
lastmid = mid
# print lefti, righti
return righti if arr[righti] == x else -1
def lastOccur(arr, N, x):
lastmid = lefti = mid = 0
righti = N-1
while lefti<righti:
mid = (lefti+righti)/2
if lastmid == mid:
mid += 1
if mid == lefti:
return lefti
if arr[mid] <= x:
lefti = mid
else:
righti = mid
lastmid = mid
# print lefti, righti
return lefti if arr[lefti] == x else -1
def numOccur(arr, N, x):
left_index = firstOccur(arr, N, x)
right_index = lastOccur(arr, N, x)
# print left_index, right_index
return right_index - left_index + 1 if arr[left_index] == x and arr[right_index] == x else -1
if __name__ == "__main__":
arr = [2,2, 2, 3,46,1,5,90, 90, 90]
arr.sort()
print firstOccur(arr, len(arr), 2)
print lastOccur(arr, len(arr), 2)
print numOccur(arr, len(arr), 2)
print firstOccur(arr, len(arr), 90)
print lastOccur(arr, len(arr), 90)
print numOccur(arr, len(arr), 90)
|
mit
| 6,805,885,785,412,038,000
| 25.596154
| 97
| 0.52133
| false
| 3.019651
| false
| false
| false
|
freelan-developers/teapot
|
teapot/path.py
|
1
|
5726
|
"""
A teapot path-handling class.
"""
import os
import stat
import shutil
import errno
from contextlib import contextmanager
from functools import wraps
from teapot.log import LOGGER
from teapot.log import Highlight as hl
def from_user_path(path):
"""
Perform all variables substitutions from the specified user path.
"""
return os.path.normpath(os.path.expanduser(os.path.expandvars(path)))
def resolve_user_path(func):
"""
A decorator that resolves user paths in the return value.
"""
@wraps(func)
def wrapped_func(*args, **kwargs):
return from_user_path(func(*args, **kwargs))
return wrapped_func
def read_path(value, base_path, default_path):
"""
Read a path value from a string.
If `value` is a string, the Cache is default created at the location
specified by `value`.
If `value` is falsy, the Cache is default created.
"""
if not value:
cache_path = from_user_path(default_path)
else:
cache_path = from_user_path(value)
if not os.path.isabs(cache_path):
cache_path = os.path.normpath(os.path.join(base_path, cache_path))
return cache_path
@contextmanager
def chdir(path):
"""
Changes the directory temporarily.
"""
path = os.path.abspath(path)
saved_dir = os.getcwd()
if os.path.abspath(saved_dir) != os.path.abspath(path):
LOGGER.debug(
"Temporarily changing current directory from %s to %s",
hl(saved_dir),
hl(path),
)
os.chdir(path)
try:
yield
finally:
if os.path.abspath(saved_dir) != os.path.abspath(path):
LOGGER.debug(
"Changing back current directory from %s to %s",
hl(path),
hl(saved_dir),
)
os.chdir(saved_dir)
def mkdir(path):
"""
Create the specified path.
Does nothing if the path exists.
"""
try:
if not os.path.isdir(path):
LOGGER.debug('Creating directory at %s.', hl(path))
os.makedirs(path)
except OSError as ex:
if ex.errno != errno.EEXIST or not os.path.isdir(path):
raise
def rmdir(path):
"""
Delete the specified path if it exists.
Does nothing if the path doesn't exist.
"""
try:
LOGGER.info('Removing directory at %s.', hl(path))
def onerror(func, path, excinfo):
if os.path.exists(path):
LOGGER.debug('Was unable to delete "%s": %s', hl(path), excinfo[1])
LOGGER.debug('Trying again after changing permissions...')
os.chmod(path, stat.S_IWUSR)
try:
func(path)
except Exception as ex:
LOGGER.error('Unable to delete "%s": %s', hl(path), excinfo[1])
raise
shutil.rmtree(path, ignore_errors=False, onerror=onerror)
except Exception as ex:
LOGGER.warning(ex)
def copytree(src, dst, symlinks=False, ignore=None, copy_function=shutil.copy2):
names = os.listdir(src)
if ignore is not None:
ignored_names = ignore(src, names)
else:
ignored_names = set()
os.makedirs(dst)
errors = []
for name in names:
if name in ignored_names:
continue
srcname = os.path.join(src, name)
dstname = os.path.join(dst, name)
try:
if symlinks and os.path.islink(srcname):
linkto = os.readlink(srcname)
os.symlink(linkto, dstname)
elif os.path.isdir(srcname):
copytree(srcname, dstname, symlinks, ignore, copy_function)
else:
try:
copy_function(srcname, dstname)
except (IOError, WindowsError):
shutil.copy2(srcname, dstname)
# XXX What about devices, sockets etc.?
except (IOError, os.error) as why:
errors.append((srcname, dstname, str(why)))
# catch the Error from the recursive copytree so that we can
# continue with other files
except Error as err:
errors.extend(err.args[0])
try:
shutil.copystat(src, dst)
except WindowsError:
# can't copy file access times on Windows
pass
except OSError as why:
errors.extend((src, dst, str(why)))
if errors:
raise Error(errors)
@contextmanager
def temporary_copy(source_path, target_path, persistent=False):
"""
Copy a source path to a target path.
The target will be deleted upon function exist, unless `persistent`
is truthy.
"""
try:
if os.path.exists(target_path):
rmdir(target_path)
LOGGER.info('Copying %s to %s...', hl(source_path), hl(target_path))
copytree(source_path, target_path, copy_function=getattr(os, 'link', shutil.copy2))
yield target_path
finally:
if not persistent:
rmdir(target_path)
else:
LOGGER.info('Not erasing temporary directory at %s.', hl(target_path))
def windows_to_unix_path(path):
"""
Convert a Windows path to a UNIX path, in such a way that it can be used in
MSys or Cygwin.
"""
drive, tail = os.path.splitdrive(path)
if drive:
drive = '/' + drive[0]
return drive + tail.replace('\\', '/')
@contextmanager
def chdir(path):
"""
Change the current directory.
"""
old_path = os.getcwd()
LOGGER.debug('Moving to: %s', hl(path))
os.chdir(path)
try:
yield path
finally:
LOGGER.debug('Moving back to: %s', hl(old_path))
os.chdir(old_path)
|
mit
| 7,732,962,308,856,751,000
| 23.470085
| 91
| 0.580685
| false
| 3.908532
| false
| false
| false
|
rsepassi/tensor2tensor
|
tensor2tensor/data_generators/translate_enmk.py
|
1
|
2362
|
# coding=utf-8
# Copyright 2018 The Tensor2Tensor Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Data generators for translation data-sets."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
# Dependency imports
from tensor2tensor.data_generators import problem
from tensor2tensor.data_generators import text_encoder
from tensor2tensor.data_generators import translate
from tensor2tensor.utils import registry
import tensorflow as tf
FLAGS = tf.flags.FLAGS
# End-of-sentence marker.
EOS = text_encoder.EOS_ID
# For Macedonian-English the SETimes corpus
# from http://nlp.ffzg.hr/resources/corpora/setimes/ is used.
# The original dataset has 207,777 parallel sentences.
# For training the first 205,777 sentences are used.
_MKEN_TRAIN_DATASETS = [[
"https://github.com/stefan-it/nmt-mk-en/raw/master/data/setimes.mk-en.train.tgz", # pylint: disable=line-too-long
("train.mk", "train.en")
]]
# For development 1000 parallel sentences are used.
_MKEN_TEST_DATASETS = [[
"https://github.com/stefan-it/nmt-mk-en/raw/master/data/setimes.mk-en.dev.tgz", # pylint: disable=line-too-long
("dev.mk", "dev.en")
]]
@registry.register_problem
class TranslateEnmkSetimes32k(translate.TranslateProblem):
"""Problem spec for SETimes Mk-En translation."""
@property
def approx_vocab_size(self):
return 2**15 # 32768
@property
def vocab_filename(self):
return "vocab.mken.%d" % self.approx_vocab_size
def source_data_files(self, dataset_split):
train = dataset_split == problem.DatasetSplit.TRAIN
datasets = _MKEN_TRAIN_DATASETS if train else _MKEN_TEST_DATASETS
source_datasets = [[item[0], [item[1][0]]] for item in datasets]
target_datasets = [[item[0], [item[1][1]]] for item in datasets]
return source_datasets + target_datasets
|
apache-2.0
| -664,401,956,210,390,800
| 33.231884
| 118
| 0.738781
| false
| 3.473529
| false
| false
| false
|
Larpon/DeadAscend
|
sbin/translation.py
|
1
|
1945
|
# -*- coding: utf-8 -*-
#!/usr/bin/env python
from __future__ import print_function
import os
import re
import sys
import json
import argparse
import fileinput
def translation_qml(file_path):
j = json.load(open(file_path))
layers = j['layers']
qml_out = """import QtQuick 2.0
/*
* Auto-generated by sbin/translation.py
* Will export any scene editor descriptions to the game
*/
QtObject {
Component.onCompleted: {
"""
for layer in layers:
if layer['type'] == "objectgroup":
objects = layer['objects']
for obj in objects:
if 'properties' in obj and 'description' in obj['properties']:
desc = obj['properties']['description']
pad = " "
if desc[0] == '[':
desc = json.loads(desc)
for d in desc:
qml_out += pad+"qsTranslate(\"fromEditor\",\""+d+"\")"+"\n"
continue
qml_out += pad+"qsTranslate(\"fromEditor\",\""+desc+"\")"+"\n"
qml_out += """
}
}"""
return qml_out
def main(arguments):
parser = argparse.ArgumentParser(
description=__doc__,
formatter_class=argparse.RawDescriptionHelpFormatter)
#parser.add_argument('flag', help="flags (on, off)", type=str)
parser.add_argument('scene_file', help="Scene (json) file") #, type=argparse.FileType('rw')
#parser.add_argument('-i', '--infile', help="Input file",
# default=sys.stdout, type=argparse.FileType('w'))
args = parser.parse_args(arguments)
qml = translation_qml(args.scene_file)
with open("Extra.qml", "w") as text_file:
text_file.write(qml)
#print(args.infile)
#if args.flag == "on":
# comment(args.scene_file)
#else:
# uncomment(args.infile)
if __name__ == '__main__':
sys.exit(main(sys.argv[1:]))
|
mit
| 3,238,838,331,313,436,700
| 24.933333
| 95
| 0.548586
| false
| 3.747592
| false
| false
| false
|
mattrobenolt/invoke
|
integration/main.py
|
1
|
4239
|
import os
import sys
from spec import Spec, trap, eq_, skip, ok_
from invoke import run
from invoke._version import __version__
from invoke.platform import WINDOWS
def _output_eq(cmd, expected):
return eq_(run(cmd, hide=True).stdout, expected)
class Main(Spec):
def setup(self):
# Enter integration/ so Invoke loads its local tasks.py
os.chdir(os.path.dirname(__file__))
@trap
def basic_invocation(self):
_output_eq("invoke print_foo", "foo\n")
@trap
def version_output(self):
_output_eq("invoke --version", "Invoke {0}\n".format(__version__))
@trap
def help_output(self):
ok_("Usage: inv[oke] " in run("invoke --help").stdout)
@trap
def shorthand_binary_name(self):
_output_eq("inv print_foo", "foo\n")
@trap
def explicit_task_module(self):
_output_eq("inv --collection _explicit foo", "Yup\n")
@trap
def invocation_with_args(self):
_output_eq(
"inv print_name --name whatevs",
"whatevs\n"
)
@trap
def bad_collection_exits_nonzero(self):
result = run("inv -c nope -l", warn=True)
eq_(result.exited, 1)
assert not result.stdout
assert result.stderr
def loads_real_user_config(self):
path = os.path.expanduser("~/.invoke.yaml")
try:
with open(path, 'w') as fd:
fd.write("foo: bar")
_output_eq("inv print_config", "bar\n")
finally:
try:
os.unlink(path)
except OSError:
pass
def complex_nesting_under_ptys_doesnt_break(self):
if WINDOWS: # Not sure how to make this work on Windows
return
# GH issue 191
substr = " hello\t\t\nworld with spaces"
cmd = """ eval 'echo "{0}" ' """.format(substr)
expected = ' hello\t\t\r\nworld with spaces\r\n'
eq_(run(cmd, pty=True, hide='both').stdout, expected)
def KeyboardInterrupt_on_stdin_doesnt_flake(self):
# E.g. inv test => Ctrl-C halfway => shouldn't get buffer API errors
skip()
class funky_characters_in_stdout:
def basic_nonstandard_characters(self):
os.chdir('_support')
# Crummy "doesn't explode with decode errors" test
if WINDOWS:
cmd = "type tree.out"
else:
cmd = "cat tree.out"
run(cmd, hide='both')
def nonprinting_bytes(self):
# Seriously non-printing characters (i.e. non UTF8) also don't
# asplode
run("echo '\xff'", hide='both')
def nonprinting_bytes_pty(self):
if WINDOWS:
return
# PTY use adds another utf-8 decode spot which can also fail.
run("echo '\xff'", pty=True, hide='both')
def pty_puts_both_streams_in_stdout(self):
if WINDOWS:
return
os.chdir('_support')
err_echo = "{0} err.py".format(sys.executable)
command = "echo foo && {0} bar".format(err_echo)
r = run(command, hide='both', pty=True)
eq_(r.stdout, 'foo\r\nbar\r\n')
eq_(r.stderr, '')
def simple_command_with_pty(self):
"""
Run command under PTY
"""
# Most Unix systems should have stty, which asplodes when not run under
# a pty, and prints useful info otherwise
result = run('stty -a', hide=True, pty=True)
# PTYs use \r\n, not \n, line separation
ok_("\r\n" in result.stdout)
eq_(result.pty, True)
def pty_size_is_realistic(self):
# When we don't explicitly set pty size, 'stty size' sees it as 0x0.
# When we do set it, it should be some non 0x0, non 80x24 (the default)
# value. (yes, this means it fails if you really do have an 80x24
# terminal. but who does that?)
size = run('stty size', hide=True, pty=True).stdout.strip()
assert size != ""
assert size != "0 0"
# Apparently true-headless execution like Travis does that!
if os.environ.get('TRAVIS', False):
assert size == "24 80"
else:
assert size != "24 80"
|
bsd-2-clause
| 6,658,644,813,651,164,000
| 30.87218
| 79
| 0.557915
| false
| 3.623077
| false
| false
| false
|
Azure/azure-sdk-for-python
|
sdk/network/azure-mgmt-network/azure/mgmt/network/v2020_08_01/operations/_express_route_cross_connections_operations.py
|
1
|
43844
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import TYPE_CHECKING
import warnings
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.paging import ItemPaged
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import HttpRequest, HttpResponse
from azure.core.polling import LROPoller, NoPolling, PollingMethod
from azure.mgmt.core.exceptions import ARMErrorFormat
from azure.mgmt.core.polling.arm_polling import ARMPolling
from .. import models as _models
if TYPE_CHECKING:
# pylint: disable=unused-import,ungrouped-imports
from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar, Union
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
class ExpressRouteCrossConnectionsOperations(object):
"""ExpressRouteCrossConnectionsOperations operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.network.v2020_08_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
def list(
self,
**kwargs # type: Any
):
# type: (...) -> Iterable["_models.ExpressRouteCrossConnectionListResult"]
"""Retrieves all the ExpressRouteCrossConnections in a subscription.
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either ExpressRouteCrossConnectionListResult or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.network.v2020_08_01.models.ExpressRouteCrossConnectionListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ExpressRouteCrossConnectionListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-08-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
def extract_data(pipeline_response):
deserialized = self._deserialize('ExpressRouteCrossConnectionListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Network/expressRouteCrossConnections'} # type: ignore
def list_by_resource_group(
self,
resource_group_name, # type: str
**kwargs # type: Any
):
# type: (...) -> Iterable["_models.ExpressRouteCrossConnectionListResult"]
"""Retrieves all the ExpressRouteCrossConnections in a resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either ExpressRouteCrossConnectionListResult or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.network.v2020_08_01.models.ExpressRouteCrossConnectionListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ExpressRouteCrossConnectionListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-08-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list_by_resource_group.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
def extract_data(pipeline_response):
deserialized = self._deserialize('ExpressRouteCrossConnectionListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list_by_resource_group.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/expressRouteCrossConnections'} # type: ignore
def get(
self,
resource_group_name, # type: str
cross_connection_name, # type: str
**kwargs # type: Any
):
# type: (...) -> "_models.ExpressRouteCrossConnection"
"""Gets details about the specified ExpressRouteCrossConnection.
:param resource_group_name: The name of the resource group (peering location of the circuit).
:type resource_group_name: str
:param cross_connection_name: The name of the ExpressRouteCrossConnection (service key of the
circuit).
:type cross_connection_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: ExpressRouteCrossConnection, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2020_08_01.models.ExpressRouteCrossConnection
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ExpressRouteCrossConnection"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-08-01"
accept = "application/json"
# Construct URL
url = self.get.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'crossConnectionName': self._serialize.url("cross_connection_name", cross_connection_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('ExpressRouteCrossConnection', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/expressRouteCrossConnections/{crossConnectionName}'} # type: ignore
def _create_or_update_initial(
self,
resource_group_name, # type: str
cross_connection_name, # type: str
parameters, # type: "_models.ExpressRouteCrossConnection"
**kwargs # type: Any
):
# type: (...) -> "_models.ExpressRouteCrossConnection"
cls = kwargs.pop('cls', None) # type: ClsType["_models.ExpressRouteCrossConnection"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-08-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self._create_or_update_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'crossConnectionName': self._serialize.url("cross_connection_name", cross_connection_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'ExpressRouteCrossConnection')
body_content_kwargs['content'] = body_content
request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('ExpressRouteCrossConnection', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_create_or_update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/expressRouteCrossConnections/{crossConnectionName}'} # type: ignore
def begin_create_or_update(
self,
resource_group_name, # type: str
cross_connection_name, # type: str
parameters, # type: "_models.ExpressRouteCrossConnection"
**kwargs # type: Any
):
# type: (...) -> LROPoller["_models.ExpressRouteCrossConnection"]
"""Update the specified ExpressRouteCrossConnection.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param cross_connection_name: The name of the ExpressRouteCrossConnection.
:type cross_connection_name: str
:param parameters: Parameters supplied to the update express route crossConnection operation.
:type parameters: ~azure.mgmt.network.v2020_08_01.models.ExpressRouteCrossConnection
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: Pass in True if you'd like the ARMPolling polling method,
False for no polling, or your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either ExpressRouteCrossConnection or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.network.v2020_08_01.models.ExpressRouteCrossConnection]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.ExpressRouteCrossConnection"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._create_or_update_initial(
resource_group_name=resource_group_name,
cross_connection_name=cross_connection_name,
parameters=parameters,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('ExpressRouteCrossConnection', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'crossConnectionName': self._serialize.url("cross_connection_name", cross_connection_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'azure-async-operation'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/expressRouteCrossConnections/{crossConnectionName}'} # type: ignore
def update_tags(
self,
resource_group_name, # type: str
cross_connection_name, # type: str
cross_connection_parameters, # type: "_models.TagsObject"
**kwargs # type: Any
):
# type: (...) -> "_models.ExpressRouteCrossConnection"
"""Updates an express route cross connection tags.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param cross_connection_name: The name of the cross connection.
:type cross_connection_name: str
:param cross_connection_parameters: Parameters supplied to update express route cross
connection tags.
:type cross_connection_parameters: ~azure.mgmt.network.v2020_08_01.models.TagsObject
:keyword callable cls: A custom type or function that will be passed the direct response
:return: ExpressRouteCrossConnection, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2020_08_01.models.ExpressRouteCrossConnection
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ExpressRouteCrossConnection"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-08-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self.update_tags.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'crossConnectionName': self._serialize.url("cross_connection_name", cross_connection_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(cross_connection_parameters, 'TagsObject')
body_content_kwargs['content'] = body_content
request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('ExpressRouteCrossConnection', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
update_tags.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/expressRouteCrossConnections/{crossConnectionName}'} # type: ignore
def _list_arp_table_initial(
self,
resource_group_name, # type: str
cross_connection_name, # type: str
peering_name, # type: str
device_path, # type: str
**kwargs # type: Any
):
# type: (...) -> Optional["_models.ExpressRouteCircuitsArpTableListResult"]
cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.ExpressRouteCircuitsArpTableListResult"]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-08-01"
accept = "application/json"
# Construct URL
url = self._list_arp_table_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'crossConnectionName': self._serialize.url("cross_connection_name", cross_connection_name, 'str'),
'peeringName': self._serialize.url("peering_name", peering_name, 'str'),
'devicePath': self._serialize.url("device_path", device_path, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.post(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('ExpressRouteCircuitsArpTableListResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_list_arp_table_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/expressRouteCrossConnections/{crossConnectionName}/peerings/{peeringName}/arpTables/{devicePath}'} # type: ignore
def begin_list_arp_table(
self,
resource_group_name, # type: str
cross_connection_name, # type: str
peering_name, # type: str
device_path, # type: str
**kwargs # type: Any
):
# type: (...) -> LROPoller["_models.ExpressRouteCircuitsArpTableListResult"]
"""Gets the currently advertised ARP table associated with the express route cross connection in a
resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param cross_connection_name: The name of the ExpressRouteCrossConnection.
:type cross_connection_name: str
:param peering_name: The name of the peering.
:type peering_name: str
:param device_path: The path of the device.
:type device_path: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: Pass in True if you'd like the ARMPolling polling method,
False for no polling, or your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either ExpressRouteCircuitsArpTableListResult or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.network.v2020_08_01.models.ExpressRouteCircuitsArpTableListResult]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.ExpressRouteCircuitsArpTableListResult"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._list_arp_table_initial(
resource_group_name=resource_group_name,
cross_connection_name=cross_connection_name,
peering_name=peering_name,
device_path=device_path,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('ExpressRouteCircuitsArpTableListResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'crossConnectionName': self._serialize.url("cross_connection_name", cross_connection_name, 'str'),
'peeringName': self._serialize.url("peering_name", peering_name, 'str'),
'devicePath': self._serialize.url("device_path", device_path, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_list_arp_table.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/expressRouteCrossConnections/{crossConnectionName}/peerings/{peeringName}/arpTables/{devicePath}'} # type: ignore
def _list_routes_table_summary_initial(
self,
resource_group_name, # type: str
cross_connection_name, # type: str
peering_name, # type: str
device_path, # type: str
**kwargs # type: Any
):
# type: (...) -> Optional["_models.ExpressRouteCrossConnectionsRoutesTableSummaryListResult"]
cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.ExpressRouteCrossConnectionsRoutesTableSummaryListResult"]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-08-01"
accept = "application/json"
# Construct URL
url = self._list_routes_table_summary_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'crossConnectionName': self._serialize.url("cross_connection_name", cross_connection_name, 'str'),
'peeringName': self._serialize.url("peering_name", peering_name, 'str'),
'devicePath': self._serialize.url("device_path", device_path, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.post(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('ExpressRouteCrossConnectionsRoutesTableSummaryListResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_list_routes_table_summary_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/expressRouteCrossConnections/{crossConnectionName}/peerings/{peeringName}/routeTablesSummary/{devicePath}'} # type: ignore
def begin_list_routes_table_summary(
self,
resource_group_name, # type: str
cross_connection_name, # type: str
peering_name, # type: str
device_path, # type: str
**kwargs # type: Any
):
# type: (...) -> LROPoller["_models.ExpressRouteCrossConnectionsRoutesTableSummaryListResult"]
"""Gets the route table summary associated with the express route cross connection in a resource
group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param cross_connection_name: The name of the ExpressRouteCrossConnection.
:type cross_connection_name: str
:param peering_name: The name of the peering.
:type peering_name: str
:param device_path: The path of the device.
:type device_path: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: Pass in True if you'd like the ARMPolling polling method,
False for no polling, or your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either ExpressRouteCrossConnectionsRoutesTableSummaryListResult or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.network.v2020_08_01.models.ExpressRouteCrossConnectionsRoutesTableSummaryListResult]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.ExpressRouteCrossConnectionsRoutesTableSummaryListResult"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._list_routes_table_summary_initial(
resource_group_name=resource_group_name,
cross_connection_name=cross_connection_name,
peering_name=peering_name,
device_path=device_path,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('ExpressRouteCrossConnectionsRoutesTableSummaryListResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'crossConnectionName': self._serialize.url("cross_connection_name", cross_connection_name, 'str'),
'peeringName': self._serialize.url("peering_name", peering_name, 'str'),
'devicePath': self._serialize.url("device_path", device_path, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_list_routes_table_summary.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/expressRouteCrossConnections/{crossConnectionName}/peerings/{peeringName}/routeTablesSummary/{devicePath}'} # type: ignore
def _list_routes_table_initial(
self,
resource_group_name, # type: str
cross_connection_name, # type: str
peering_name, # type: str
device_path, # type: str
**kwargs # type: Any
):
# type: (...) -> Optional["_models.ExpressRouteCircuitsRoutesTableListResult"]
cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.ExpressRouteCircuitsRoutesTableListResult"]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-08-01"
accept = "application/json"
# Construct URL
url = self._list_routes_table_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'crossConnectionName': self._serialize.url("cross_connection_name", cross_connection_name, 'str'),
'peeringName': self._serialize.url("peering_name", peering_name, 'str'),
'devicePath': self._serialize.url("device_path", device_path, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.post(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('ExpressRouteCircuitsRoutesTableListResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_list_routes_table_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/expressRouteCrossConnections/{crossConnectionName}/peerings/{peeringName}/routeTables/{devicePath}'} # type: ignore
def begin_list_routes_table(
self,
resource_group_name, # type: str
cross_connection_name, # type: str
peering_name, # type: str
device_path, # type: str
**kwargs # type: Any
):
# type: (...) -> LROPoller["_models.ExpressRouteCircuitsRoutesTableListResult"]
"""Gets the currently advertised routes table associated with the express route cross connection
in a resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param cross_connection_name: The name of the ExpressRouteCrossConnection.
:type cross_connection_name: str
:param peering_name: The name of the peering.
:type peering_name: str
:param device_path: The path of the device.
:type device_path: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: Pass in True if you'd like the ARMPolling polling method,
False for no polling, or your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either ExpressRouteCircuitsRoutesTableListResult or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.network.v2020_08_01.models.ExpressRouteCircuitsRoutesTableListResult]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.ExpressRouteCircuitsRoutesTableListResult"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._list_routes_table_initial(
resource_group_name=resource_group_name,
cross_connection_name=cross_connection_name,
peering_name=peering_name,
device_path=device_path,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('ExpressRouteCircuitsRoutesTableListResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'crossConnectionName': self._serialize.url("cross_connection_name", cross_connection_name, 'str'),
'peeringName': self._serialize.url("peering_name", peering_name, 'str'),
'devicePath': self._serialize.url("device_path", device_path, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_list_routes_table.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/expressRouteCrossConnections/{crossConnectionName}/peerings/{peeringName}/routeTables/{devicePath}'} # type: ignore
|
mit
| -2,148,993,289,227,831,800
| 51.257449
| 277
| 0.653043
| false
| 4.291699
| true
| false
| false
|
jolyonb/edx-platform
|
common/djangoapps/microsite_configuration/migrations/0001_initial.py
|
1
|
5579
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
import django.db.models.deletion
import django.utils.timezone
import jsonfield.fields
import model_utils.fields
from django.conf import settings
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('sites', '0001_initial'),
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='HistoricalMicrositeOrganizationMapping',
fields=[
('id', models.IntegerField(verbose_name='ID', db_index=True, auto_created=True, blank=True)),
('organization', models.CharField(max_length=63, db_index=True)),
('history_id', models.AutoField(serialize=False, primary_key=True)),
('history_date', models.DateTimeField()),
('history_type', models.CharField(max_length=1, choices=[('+', 'Created'), ('~', 'Changed'), ('-', 'Deleted')])),
('history_user', models.ForeignKey(related_name='+', on_delete=django.db.models.deletion.SET_NULL, to=settings.AUTH_USER_MODEL, null=True)),
],
options={
'ordering': ('-history_date', '-history_id'),
'get_latest_by': 'history_date',
'verbose_name': 'historical microsite organization mapping',
},
),
migrations.CreateModel(
name='HistoricalMicrositeTemplate',
fields=[
('id', models.IntegerField(verbose_name='ID', db_index=True, auto_created=True, blank=True)),
('template_uri', models.CharField(max_length=255, db_index=True)),
('template', models.TextField()),
('history_id', models.AutoField(serialize=False, primary_key=True)),
('history_date', models.DateTimeField()),
('history_type', models.CharField(max_length=1, choices=[('+', 'Created'), ('~', 'Changed'), ('-', 'Deleted')])),
('history_user', models.ForeignKey(related_name='+', on_delete=django.db.models.deletion.SET_NULL, to=settings.AUTH_USER_MODEL, null=True)),
],
options={
'ordering': ('-history_date', '-history_id'),
'get_latest_by': 'history_date',
'verbose_name': 'historical microsite template',
},
),
migrations.CreateModel(
name='Microsite',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('key', models.CharField(unique=True, max_length=63, db_index=True)),
('values', jsonfield.fields.JSONField(blank=True)),
('site', models.OneToOneField(related_name='microsite', to='sites.Site', on_delete=models.CASCADE)),
],
),
migrations.CreateModel(
name='MicrositeHistory',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('created', model_utils.fields.AutoCreatedField(default=django.utils.timezone.now, verbose_name='created', editable=False)),
('modified', model_utils.fields.AutoLastModifiedField(default=django.utils.timezone.now, verbose_name='modified', editable=False)),
('key', models.CharField(unique=True, max_length=63, db_index=True)),
('values', jsonfield.fields.JSONField(blank=True)),
('site', models.OneToOneField(related_name='microsite_history', to='sites.Site', on_delete=models.CASCADE)),
],
options={
'verbose_name_plural': 'Microsite histories',
},
),
migrations.CreateModel(
name='MicrositeOrganizationMapping',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('organization', models.CharField(unique=True, max_length=63, db_index=True)),
('microsite', models.ForeignKey(to='microsite_configuration.Microsite', on_delete=models.CASCADE)),
],
),
migrations.CreateModel(
name='MicrositeTemplate',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('template_uri', models.CharField(max_length=255, db_index=True)),
('template', models.TextField()),
('microsite', models.ForeignKey(to='microsite_configuration.Microsite', on_delete=models.CASCADE)),
],
),
migrations.AddField(
model_name='historicalmicrositetemplate',
name='microsite',
field=models.ForeignKey(related_name='+', on_delete=django.db.models.deletion.DO_NOTHING, db_constraint=False, blank=True, to='microsite_configuration.Microsite', null=True),
),
migrations.AddField(
model_name='historicalmicrositeorganizationmapping',
name='microsite',
field=models.ForeignKey(related_name='+', on_delete=django.db.models.deletion.DO_NOTHING, db_constraint=False, blank=True, to='microsite_configuration.Microsite', null=True),
),
migrations.AlterUniqueTogether(
name='micrositetemplate',
unique_together=set([('microsite', 'template_uri')]),
),
]
|
agpl-3.0
| -6,011,930,392,601,583,000
| 51.140187
| 186
| 0.589353
| false
| 4.318111
| false
| false
| false
|
mbohlool/client-python
|
kubernetes/client/apis/apis_api.py
|
1
|
4259
|
# coding: utf-8
"""
Kubernetes
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen)
OpenAPI spec version: v1.8.2
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import sys
import os
import re
# python 2 and python 3 compatibility library
from six import iteritems
from ..api_client import ApiClient
class ApisApi(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def get_api_versions(self, **kwargs):
"""
get available API versions
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_api_versions(async=True)
>>> result = thread.get()
:param async bool
:return: V1APIGroupList
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.get_api_versions_with_http_info(**kwargs)
else:
(data) = self.get_api_versions_with_http_info(**kwargs)
return data
def get_api_versions_with_http_info(self, **kwargs):
"""
get available API versions
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_api_versions_with_http_info(async=True)
>>> result = thread.get()
:param async bool
:return: V1APIGroupList
If the method is called asynchronously,
returns the request thread.
"""
all_params = []
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_api_versions" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf'])
# Authentication setting
auth_settings = ['BearerToken']
return self.api_client.call_api('/apis/', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1APIGroupList',
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
|
apache-2.0
| -4,531,586,328,895,382,000
| 33.346774
| 119
| 0.540268
| false
| 4.685369
| false
| false
| false
|
raphaelvalentin/Utils
|
spectre/syntax/analyse.py
|
1
|
10005
|
import os
from spectre.syntax import Instance, common
from string import *
import numpy as np
__all__ = ['Noise', 'Xf', 'Ac', 'Dc', 'Sweep', 'MonteCarlo', 'Sp', 'Transient', 'Pss' ]
class values(list):
def __init__(self, it):
if isinstance(it, (list, np.ndarray)):
list.__init__(self, it)
else:
list.__init__(self, [it])
def __str__(self):
l = []
for v in self:
if type(v) == str:
l.append(v)
else:
l.append("%g"%v)
return "[{it}]".format(it=" ".join(l))
class Noise(Instance):
__type__ = "analyse"
__name__ = "noise"
__pattern__ = "{name} %s {**parameters}" % __name__
__default__ = {'oprobe':'', 'iprobe':'', 'annotate':'status', 'oppoint':'raw_file' }
def __init__(self, name='noise1', **parameters):
self['name'] = name
self.update(self.__default__)
self.update(parameters)
def __str__(self):
if 'values' in self:
self['values'] = values(self['values'])
return self.__pattern__.format(**{'name': self['name'],
'**parameters':" ".join(["%s=%s" % (k, v) for k, v in self.iteritems() if not k in ('name',)])
})
def getRawFiles(self):
return ['{name}.{extension}'.format(name=self['name'], extension=self.__name__)]
class Xf(Instance):
__type__ = "analyse"
__name__ = "xf"
__pattern__ = "{name} %s {**parameters}" % __name__
__default__ = {'annotate':'status', 'oppoint':'raw_file' }
def __init__(self, name='xf1', **parameters):
self['name'] = name
self.update(self.__default__)
self.update(parameters)
def __str__(self):
if 'values' in self:
self['values'] = values(self['values'])
return self.__pattern__.format(**{'name': self['name'],
'**parameters':" ".join(["%s=%s" % (k, v) for k, v in self.iteritems() if not k in ('name',)])
})
def getRawFiles(self):
return ['{name}.{extension}'.format(name=self['name'], extension=self.__name__)]
class Ac(Instance):
__type__ = "analyse"
__name__ = "ac"
__pattern__ = "{name} %s {**parameters}" % __name__
def __init__(self, name='ac1', **parameters):
self['name'] = name
self.update(parameters)
def __str__(self):
if 'values' in self:
self['values'] = values(self['values'])
return self.__pattern__.format(**{'name': self['name'],
'**parameters':" ".join(["%s=%s" % (k, v) for k, v in self.iteritems() if not k in ('name',)])
})
def getRawFiles(self):
return ['{name}.{extension}'.format(name=self['name'], extension=self.__name__)]
class Dc(Instance):
__type__ = "analyse"
__name__ = "dc"
__pattern__ = "{name} %s {**parameters}" % __name__
__default__ = { 'oppoint':'rawfile', 'maxiters':150, 'maxsteps':10000, 'annotate':'status' }
def __init__(self, name='dc1', **parameters):
self['name'] = name
self.update(self.__default__)
self.update(parameters)
def __str__(self):
if 'values' in self:
self['values'] = values(self['values'])
return self.__pattern__.format(**{'name': self['name'],
'**parameters':" ".join(["%s=%s" % (k, v) for k, v in self.iteritems() if not k in ('name',)])
})
def getRawFiles(self):
return ['{name}.{extension}'.format(name=self['name'], extension=self.__name__)]
class Sweep(Instance):
__type__ = "analyse"
__name__ = "sweep"
__pattern__ = "{name} %s {**parameters} {child}" % __name__
__default__ = { 'oppoint':'rawfile' }
def __init__(self, name='swp1', child=None, **parameters):
self['name'] = name
self.update(self.__default__)
self.update(parameters)
self['child'] = child
def __str__(self):
if 'values' in self:
self['values'] = values(self['values'])
child = ''
if self['child']:
child = "{\n" + str(self['child']) + "\n}"
return self.__pattern__.format(**{'name': self['name'],
'child': child,
'**parameters':" ".join(["%s=%s" % (k, v) for k, v in self.iteritems() if not k in ('name', 'child')])
})
def getNumberOfPoints(self):
if 'values' in self:
return len(self['values'])
elif 'stop' in self:
if 'lin' in self:
return self['lin']+1
elif 'log' in self:
return self['log']
elif 'step' in self:
if 'start' in self:
return (self['stop']-self['start'])/self['step']+1
else:
return self['stop']/self['step']+1
else:
return 50
else:
return 1
def getRawFiles(self):
raw_files = []
if self['child']:
for i in xrange(int(self.getNumberOfPoints())):
raw_files.append("{name}-{number}_{childname}".format(name=self['name'], number=zfill(i,3), childname=self['child'].getRawFiles()[0]))
else:
raw_files.append(['{name}.{extension}'.format(name=self['name'], extension=self.__name__)])
return raw_files
class MonteCarlo(Instance):
__type__ = "analyse"
__name__ = "montecarlo"
__pattern__ = "{name} %s {**parameters} {child}" % __name__
__default__ = { 'variations':'all', 'numruns':3, 'savefamilyplots':'yes', 'saveprocessparams':'yes', 'processscalarfile':'\"process.dat\"' }
def __init__(self, name='mc1', child=None, **parameters):
self['name'] = name
self.update(self.__default__)
self.update(parameters)
self['child'] = child
def __str__(self):
child = ''
if self['child']:
child = "{\n" + str(self['child']) + "\n}"
return self.__pattern__.format(**{'name': self['name'],
'child': child,
'**parameters':" ".join(["%s=%s" % (k, v) for k, v in self.iteritems() if not k in ('name','child')])
})
def getRawFiles(self):
raw_files = []
if self['child']:
for i in xrange(self.getNumberOfPoints()):
raw_files.append("{name}-{number}_{childname}".format(name=self['name'], number=zfill(i+1,3), childname=self['child'].getRawFiles()[0]))
else:
raw_files.append(['{name}.{extension}'.format(name=self['name'], extension='mc')])
return raw_files
def getNumberOfPoints(self):
return self['numruns']
class Sp(Instance):
__type__ = "analyse"
__name__ = "sp"
__pattern__ = "{name} %s {**parameters}" % __name__
__default__ = {'annotate':'status', 'paramtype':'yz',
'oppoint':'screen', 'datatype':'realimag'}
def __init__(self, name='sp1', **parameters):
self['name'] = name
if isinstance(parameters['ports'], list):
parameters['ports'] = "[" + " ".join(parameters['ports']) + "]"
self.update(self.__default__)
self.update(parameters)
def __str__(self):
if 'values' in self:
self['values'] = values(self['values'])
parameters = ["%s=%s" % (k, v) \
for k, v in self.iteritems() \
if k not in ('name', 'donoise')]
if self.get('donoise',False):
parameters.append('donoise=yes')
return self.__pattern__.format(**{'name': self['name'],
'**parameters': " ".join(parameters),
})
def getRawFiles(self):
if 'donoise' in self:
if self['donoise'] in ('True', True):
return ['{name}.{extension}'.format(name=self['name'], extension=self.__name__),
'{name}.noise.{extension}'.format(name=self['name'], extension=self.__name__)]
return ['{name}.{extension}'.format(name=self['name'], extension=self.__name__)]
class Transient(Instance):
__type__ = "analyse"
__name__ = "tran"
__pattern__ = "{name} %s {**parameters}" % __name__
__default__ = { 'errpreset':'conservative', 'write':'spectre.ic', 'writefinal':'spectre.fc', 'annotate':'status', 'maxiters':5 }
def __init__(self, name='tran1', **parameters):
self['name'] = name
self.update(self.__default__)
self.update(parameters)
def __str__(self):
if 'values' in self:
self['values'] = values(self['values'])
return self.__pattern__.format(**{'name': self['name'],
'**parameters':" ".join(["%s=%s" % (k, v) for k, v in self.iteritems() if not k in ('name',)])
})
def getRawFiles(self):
return ['{name}.{extension}'.format(name=self['name'], extension=self.__name__)]
class Pss(Instance):
__type__ = "analyse"
__name__ = "pss"
__pattern__ = "{name} %s {**parameters}" % __name__
__default__ = { 'errpreset':'conservative', 'annotate':'status'}
def __init__(self, name='pss1', **parameters):
self['name'] = name
self.update(self.__default__)
self.update(parameters)
def __str__(self):
if 'values' in self:
self['values'] = values(self['values'])
return self.__pattern__.format(**{'name': self['name'],
'**parameters':" ".join(["%s=%s" % (k, v) for k, v in self.iteritems() if not k in ('name',)])
})
def getRawFiles(self):
return ['{name}.fd.{extension}'.format(name=self['name'], extension=self.__name__)]
"""
liberal 1e-3 sigglobal traponly 3.5 0.001 period/50
moderate 1e-3 alllocal gear2only 3.5 0.001 period/200
conservative 1e-4 alllocal gear2only * 0.01 period/200
"""
|
gpl-2.0
| 4,857,034,511,372,369,000
| 39.836735
| 152
| 0.50015
| false
| 3.659473
| false
| false
| false
|
google-research/ssl_detection
|
third_party/tensorpack/tensorpack/input_source/input_source.py
|
1
|
25606
|
# -*- coding: utf-8 -*-
# File: input_source.py
import threading
from contextlib import contextmanager
from itertools import chain
import tensorflow as tf
from ..compat import tfv1
from ..callbacks.base import Callback, CallbackFactory
from ..callbacks.graph import RunOp
from ..dataflow import DataFlow, MapData, RepeatedData, DataFlowTerminated
from ..tfutils.common import get_op_tensor_name
from ..tfutils.dependency import dependency_of_fetches
from ..tfutils.summary import add_moving_summary
from ..tfutils.tower import get_current_tower_context
from ..utils import logger
from ..utils.concurrency import ShareSessionThread
from .input_source_base import InputSource, build_or_reuse_placeholder
try:
from tensorflow.python.ops.data_flow_ops import StagingArea
except ImportError:
pass
__all__ = ['PlaceholderInput', 'FeedInput', 'FeedfreeInput',
'QueueInput', 'BatchQueueInput',
'DummyConstantInput', 'TensorInput',
'ZMQInput', 'TFDatasetInput',
'StagingInput']
def _get_reset_callback(df):
return CallbackFactory(setup_graph=lambda _: df.reset_state())
def _make_feeds(placeholders, datapoint):
assert len(datapoint) == len(placeholders), \
"Size of datapoint and placeholders are different: {} != {}".format(
len(datapoint), len(placeholders))
if isinstance(datapoint, (list, tuple)):
return dict(zip(placeholders, datapoint))
elif isinstance(datapoint, dict):
ret = {p: datapoint[p.op.name] for p in placeholders}
return ret
else:
raise TypeError("Got a datapoint of type {}!".format(type(datapoint)))
class PlaceholderInput(InputSource):
"""
Just produce placeholders as input tensors.
"""
def __init__(self):
pass
def _setup(self, inputs):
self._all_placehdrs = [build_or_reuse_placeholder(v) for v in inputs]
def _get_input_tensors(self):
return self._all_placehdrs
class FeedInput(InputSource):
"""
Input by iterating over a DataFlow and feed datapoints.
Note:
If `get_input_tensors()` is called more than one time, it will return the same placeholders (i.e. feed points)
as the first time.
Therefore you can't use it for data-parallel training.
"""
class _FeedCallback(Callback):
def __init__(self, ds, placeholders):
self._ds = ds
self._itr = self._ds.__iter__()
self._placeholders = placeholders
def _before_run(self, _):
dp = next(self._itr)
assert len(dp) == len(self._placeholders), "[FeedInput] datapoints and inputs are of different length!"
feed = _make_feeds(self._placeholders, dp)
return tfv1.train.SessionRunArgs(fetches=[], feed_dict=feed)
def _reset(self):
self._itr = self._ds.__iter__()
def __init__(self, ds, infinite=True):
"""
Args:
ds (DataFlow): the input DataFlow.
infinite (bool): When set to False, will raise StopIteration when
ds is exhausted.
"""
if not isinstance(ds, DataFlow):
raise ValueError("FeedInput takes a DataFlow! Got {}".format(ds))
self.ds = ds
if infinite:
self._iter_ds = RepeatedData(self.ds, -1)
else:
self._iter_ds = self.ds
def _size(self):
return len(self.ds)
def _setup(self, inputs):
# placeholders as input are always safe to reuse.
self._all_placehdrs = [build_or_reuse_placeholder(v) for v in inputs]
self._cb = self._FeedCallback(self._iter_ds, self._all_placehdrs)
def _get_input_tensors(self):
return self._all_placehdrs
def _reset_state(self):
self._cb._reset()
def _get_callbacks(self):
return [self._cb, _get_reset_callback(self._iter_ds)]
class FeedfreeInput(InputSource):
""" Abstract base for input without feed,
e.g. by queue or other operations. """
def _reset_state(self):
pass
# TODO enqueue_many? https://github.com/tensorflow/tensorflow/issues/7817#issuecomment-282053155
class EnqueueThread(ShareSessionThread):
def __init__(self, queue, ds, placehdrs):
super(EnqueueThread, self).__init__()
self.name = 'EnqueueThread ' + queue.name
self.daemon = True
self.dataflow = ds
self.queue = queue
self.placehdrs = placehdrs
self.op = self.queue.enqueue(self.placehdrs)
self.close_op = self.queue.close(cancel_pending_enqueues=True)
self._running = threading.Event()
self._running.set()
# self._size = queue.size()
def run(self):
with self.default_sess():
try:
self.reinitialize_dataflow()
while True:
# pausable loop
if not self._running.is_set():
self._running.wait()
dp = next(self._itr)
feed = _make_feeds(self.placehdrs, dp)
# _, sz = sess.run([self.op, self._sz], feed_dict=feed)
self.op.run(feed_dict=feed)
except (tf.errors.CancelledError, tf.errors.OutOfRangeError):
pass
except DataFlowTerminated:
logger.info("[EnqueueThread] DataFlow has terminated.")
except Exception as e:
if isinstance(e, RuntimeError) and 'closed Session' in str(e):
pass
else:
logger.exception("[EnqueueThread] Exception in thread {}:".format(self.name))
finally:
try:
self.close_op.run()
except Exception:
pass
logger.info("[EnqueueThread] Thread {} Exited.".format(self.name))
def reinitialize_dataflow(self):
self._itr = self.dataflow.__iter__()
def pause(self):
self._running.clear()
def resume(self):
self._running.set()
class QueueInput(FeedfreeInput):
""" Enqueue datapoints from a DataFlow to a TF queue.
And the model receives dequeued tensors.
"""
def __init__(self, ds, queue=None):
"""
Args:
ds(DataFlow): the input DataFlow.
queue (tf.QueueBase): A :class:`tf.QueueBase` whose type
should match the corresponding input signature of the model.
Defaults to a FIFO queue of size 50.
"""
if not isinstance(ds, DataFlow):
raise ValueError("QueueInput takes a DataFlow! Got {}".format(ds))
self.queue = queue
self.ds = ds
self._inf_ds = RepeatedData(ds, -1)
self._started = False
def _size(self):
return len(self.ds)
def _setup(self, inputs):
self._input_placehdrs = [build_or_reuse_placeholder(v) for v in inputs]
assert len(self._input_placehdrs) > 0, \
"QueueInput has to be used with some inputs!"
with self.cached_name_scope():
if self.queue is None:
self.queue = tfv1.FIFOQueue(
50, [x.dtype for x in self._input_placehdrs],
name='input_queue')
logger.info("Setting up the queue '{}' for CPU prefetching ...".format(self.queue.name))
self.thread = EnqueueThread(self.queue, self._inf_ds, self._input_placehdrs)
self._dequeue_op = self.queue.dequeue(name='dequeue_for_reset')
def refill_queue(self):
"""
Clear the queue, then call dataflow.__iter__() again and fill into the queue.
"""
self.thread.pause() # pause enqueue
opt = tfv1.RunOptions()
opt.timeout_in_ms = 2000 # 2s
sess = tfv1.get_default_session()
# dequeue until empty
try:
while True:
sess.run(self._dequeue_op, options=opt)
except tf.errors.DeadlineExceededError:
pass
# reset dataflow, start thread
self.thread.reinitialize_dataflow()
self.thread.resume()
def _create_ema_callback(self):
"""
Create a hook-only callback which maintain EMA of the queue size.
Also tf.summary.scalar the EMA.
"""
with self.cached_name_scope():
# in TF there is no API to get queue capacity, so we can only summary the size
size = tf.cast(self.queue.size(), tf.float32, name='queue_size')
size_ema_op = add_moving_summary(size, collection=None, decay=0.5)[0].op
ret = RunOp(
lambda: size_ema_op,
run_before=False,
run_as_trigger=False,
run_step=True)
ret.name_scope = "InputSource/EMA"
return ret
def _get_callbacks(self):
from ..callbacks.concurrency import StartProcOrThread
cb = StartProcOrThread(self.thread)
return [cb, self._create_ema_callback(), _get_reset_callback(self._inf_ds)]
def _get_input_tensors(self):
with tf.device('/cpu:0'), self.cached_name_scope():
ret = self.queue.dequeue(name='input_deque')
if isinstance(ret, tf.Tensor): # only one input
ret = [ret]
assert len(ret) == len(self._input_placehdrs)
for qv, v in zip(ret, self._input_placehdrs):
qv.set_shape(v.get_shape())
return ret
class BatchQueueInput(QueueInput):
""" Enqueue datapoints from a DataFlow to a TF queue.
And the model receives batches formed by concatenating
dequeued tensors.
"""
def __init__(self, ds, batch_size, queue=None):
"""
Args:
ds(DataFlow): the input DataFlow.
batch_size(int): the batch size.
queue (tf.QueueBase): A :class:`tf.QueueBase` whose type
should match the corresponding input signature of the model.
Defaults to a FIFO queue of size 3000.
"""
super(BatchQueueInput, self).__init__(ds, queue)
self.batch_size = int(batch_size)
def _size(self):
return len(self.ds) // self.batch_size
def _setup(self, inputs):
logger.info("Setting up the queue for CPU prefetching ...")
self.input_placehdrs = [build_or_reuse_placeholder(v) for v in inputs]
assert len(self.input_placehdrs) > 0, \
"BatchQueueInput has to be used with some input signature!"
# prepare placeholders without the first dimension
placehdrs_nobatch = []
for p in self.input_placehdrs:
placehdrs_nobatch.append(tfv1.placeholder(
dtype=p.dtype, shape=p.get_shape().as_list()[1:],
name=get_op_tensor_name(p.name)[0] + '-nobatch'))
# dequeue_many requires fully-defined shapes
shape_err = "Use of BatchQueueInput requires inputs to have fully-defined "
"shapes except for the batch dimension"
shapes = []
for p in placehdrs_nobatch:
assert p.get_shape().is_fully_defined(), shape_err
shapes.append(p.get_shape())
with self.cached_name_scope():
if self.queue is None:
self.queue = tf.FIFOQueue(
3000, [x.dtype for x in self.input_placehdrs],
shapes=shapes,
name='input_queue')
for shp in self.queue.shapes:
assert shp.is_fully_defined(), shape_err
self.thread = EnqueueThread(self.queue, self._inf_ds, placehdrs_nobatch)
def _get_input_tensors(self):
with tf.device('/cpu:0'), self.cached_name_scope():
ret = self.queue.dequeue_many(self.batch_size, name='input_deque')
if isinstance(ret, tf.Tensor): # only one input
ret = [ret]
assert len(ret) == len(self.input_placehdrs)
for qv, v in zip(ret, self.input_placehdrs):
shp = v.get_shape().as_list()
shp[0] = self.batch_size
qv.set_shape(shp)
return ret
# TODO tensor inputs can be drained? look at the new dataset API.
class TensorInput(FeedfreeInput):
""" Use inputs from a list of tensors, e.g. a TF data reading pipeline.
The PTB training example shows how to use it.
"""
def __init__(self, get_tensor_fn, size=None):
"""
Args:
get_tensor_fn ( -> [tf.Tensor]): a function which returns a list of input tensors
(for example, [image, label]) when called.
It will be called under a TowerContext and should return the inputs to be used in that tower.
The returned tensors will be evaluated every iteration, it's your job to make sure it's possible.
size(int): size of this input. Use None to leave it undefined.
"""
if not callable(get_tensor_fn):
raise ValueError("get_tensor_fn has to be a function! Got {}".format(get_tensor_fn))
self.get_tensor_fn = get_tensor_fn
if size is not None:
size = int(size)
assert size > 0
self._fixed_size = size
def _setup(self, input_signature):
self._spec = input_signature
def _size(self):
if self._fixed_size is None:
raise NotImplementedError("size of TensorInput is undefined!")
return self._fixed_size
def _get_input_tensors(self):
with self.cached_name_scope():
ret = self.get_tensor_fn()
assert isinstance(ret, (list, tuple)), "get_tensor_fn needs to return a list!"
assert len(ret) == len(self._spec), \
"get_tensor_fn returns {} tensors but there are {} inputs".format(len(ret), len(self._spec))
return ret
class DummyConstantInput(TensorInput):
""" Input with a constant zero tensor placed on GPU.
Useful for debugging performance issues """
def __init__(self, shapes):
"""
Args:
shapes (list[list]): a list of fully-specified shapes.
"""
self.shapes = shapes
logger.warn("Using dummy input for debug!")
def fn():
tlist = []
ctx = get_current_tower_context()
assert ctx is not None
assert len(self.shapes) == len(self._spec)
for idx, p in enumerate(self._spec):
tlist.append(tf.constant(
0, dtype=p.dtype,
name='dummy-{}-{}'.format(p.name, ctx.index),
shape=self.shapes[idx]))
return tlist
super(DummyConstantInput, self).__init__(fn)
class ZMQInput(TensorInput):
"""
Receive tensors from a ZMQ endpoint, with ops from https://github.com/tensorpack/zmq_ops.
It works with :func:`dataflow.remote.send_dataflow_zmq(format='zmq_ops')`.
"""
def __init__(self, end_point, hwm, bind=True):
"""
Args:
end_point (str): the ZMQ endpoint
hwm (int): the ZMQ high-water-mark
"""
self._end_point = end_point
self._hwm = int(hwm)
self._bind = bind
def fn():
ret = self._zmq_pull_socket.pull()
assert len(ret) == len(self._spec)
for qv, v in zip(ret, self._spec):
qv.set_shape(v.shape)
return ret
super(ZMQInput, self).__init__(fn)
def _setup(self, input_signature):
super(ZMQInput, self)._setup(input_signature)
assert len(input_signature) > 0, \
"ZMQInput has to be used with input signature!"
import zmq_ops
self._zmq_pull_socket = zmq_ops.ZMQPullSocket(
self._end_point,
[x.dtype for x in input_signature],
hwm=self._hwm,
bind=self._bind)
class TFDatasetInput(FeedfreeInput):
"""
Use a :class:`tf.data.Dataset` instance as input.
Note:
1. In training, the given dataset or dataflow has to be infinite
(you can use :func:`repeat()`, or :class:`RepeatedData` ).
2. TensorFlow may keep the dataflow alive even if the dataset is no
longer used.
"""
def __init__(self, dataset):
"""
Args:
dataset (tf.data.Dataset or DataFlow):
"""
if isinstance(dataset, tf.data.Dataset):
self._dataset = dataset
self._dataflow = None
elif isinstance(dataset, DataFlow):
self._dataset = None
self._dataflow = dataset
else:
raise ValueError("TFDatasetInput takes a tf.data.Dataset or DataFlow! Got {}".format(dataset))
def _setup(self, input_signature):
self._spec = input_signature
if self._dataset is not None:
types = self._dataset.output_types
spec_types = tuple(k.dtype for k in input_signature)
assert len(types) == len(spec_types), \
"Dataset and input signature have different length! {} != {}".format(
len(types), len(spec_types))
assert types == spec_types, \
"Data types of dataset and input signature don't match! {} != {}".format(
str(types), str(spec_types))
shapes = self._dataset.output_shapes
spec_shapes = [k.shape for k in input_signature]
for idx, (s1, s2) in enumerate(zip(shapes, spec_shapes)):
s2 = tf.TensorShape(s2)
assert s2.is_compatible_with(s1), \
"Input signature '{}' has incompatible shape with dataset! {} vs {}".format(
input_signature[idx].name, s2, s1)
else:
self._dataset = TFDatasetInput.dataflow_to_dataset(self._dataflow, [x.dtype for x in input_signature])
self._iterator = self._dataset.make_initializable_iterator()
self._init_op = self._iterator.initializer
def _reset_state(self):
self._init_op.run()
def _get_input_tensors(self):
spec_shapes = [k.shape for k in self._spec]
ret = self._iterator.get_next()
assert len(ret) == len(spec_shapes), \
"Dataset returns {} tensors but there are {} inputs!".format(len(ret), len(spec_shapes))
for t, shp in zip(ret, spec_shapes):
t.set_shape(shp)
return ret
@staticmethod
def dataflow_to_dataset(df, types):
"""
Wrap a dataflow to tf.data.Dataset.
This function will also reset the dataflow.
If the dataflow itself is finite, the returned dataset is also finite.
Therefore, if used for training, you'll need to add `.repeat()` on the returned
dataset.
Args:
df (DataFlow): a dataflow which produces lists
types([tf.DType]): list of types
Returns:
(tf.data.Dataset)
Note:
TensorFlow may keep the dataflow alive even if the dataset is no
longer used.
"""
# TODO theoretically it can support dict
assert isinstance(df, DataFlow), df
assert isinstance(types, (list, tuple)), types
df = MapData(df, tuple)
df.reset_state()
ds = tf.data.Dataset.from_generator(
df.get_data, tuple(types))
return ds
class StagingInput(FeedfreeInput):
"""
A wrapper around a feedfree input,
to prefetch the input in StagingArea (on GPUs).
It works by registering hooks to put & get tensors into the StagingArea.
If `get_input_tensors` gets called multiple times,
it requires that all outputs ever produced by this InputSource will be fetched together.
This means that in multi-GPU training, you should ensure that each call on `hooked_sess.run`
depends on either all input tensors on all GPUs, or no input tensors at all.
As a result you cannot use this InputSource for :class:`InferenceRunner`.
More than one StagingInput cannot be used together.
"""
class StagingCallback(Callback):
"""
A callback registered by this input source, to make sure stage/unstage
is run at each step.
"""
def __init__(self, input, nr_stage):
self.nr_stage = nr_stage
self._input = input
self._initialized = False
def _setup_graph(self):
self.stage_op = self._input._get_stage_op()
unstage_ops = self._input._get_unstage_ops()
unstage_op = tf.group(*unstage_ops, name='unstage_all')
self._check_dependency_op = unstage_ops[0]
self.fetches = tfv1.train.SessionRunArgs(
fetches=[self.stage_op, unstage_op])
def _prefill(self, sess):
logger.info("Pre-filling StagingArea ...")
for _ in range(self.nr_stage):
self.stage_op.run(session=sess)
logger.info("{} element{} put into StagingArea on each tower.".format(
self.nr_stage, "s were" if self.nr_stage > 1 else " was"))
def _before_run(self, ctx):
# This has to happen once, right before the first iteration.
# doing it in `before_train` may not work because QueueInput happens in before_train.
if not self._initialized:
self._initialized = True
self._prefill(ctx.session)
# Only step the stagingarea when the input is evaluated in this sess.run
fetches = ctx.original_args.fetches
if dependency_of_fetches(fetches, self._check_dependency_op):
# note: this disable nesting of StagingInput
return self.fetches
def __init__(self, input, nr_stage=1, device=None):
"""
Args:
input (FeedfreeInput):
nr_stage (int): number of elements to prefetch into each StagingArea, at the beginning.
Since enqueue and dequeue are synchronized, prefetching 1 element should be sufficient.
device (str or None): if not None, place the StagingArea on a specific device. e.g., '/cpu:0'.
Otherwise, they are placed under where `get_inputs_tensors`
gets called, which could be unspecified in case of simple trainers.
"""
if not isinstance(input, FeedfreeInput):
raise ValueError("StagingInput takes a FeedfreeInput! Got {}".format(input))
if isinstance(input, StagingInput):
raise ValueError("StagingInput cannot be nested!")
self._input = input
self._nr_stage = nr_stage
self._areas = []
self._stage_ops = []
self._unstage_ops = []
self._device = device
def _setup(self, inputs):
self._input.setup(inputs)
with self.cached_name_scope():
pass # just to cache the correct ns to use
def _get_callbacks(self):
cbs = self._input.get_callbacks()
# this callback has to happen after others, so StagingInput can be stacked together
cbs.append(
StagingInput.StagingCallback(self, self._nr_stage))
return cbs
def _size(self):
return self._input.size()
@contextmanager
def _device_ctx(self):
if not self._device:
yield
else:
with tf.device(self._device):
yield
def _get_input_tensors(self):
inputs = self._input.get_input_tensors()
with self._device_ctx():
with self.cached_name_scope():
# Putting variables to stagingarea will cause trouble
dtypes = []
for idx in range(len(inputs)):
dtype = inputs[idx].dtype
if dtype.base_dtype != dtype: # is reference type
inputs[idx] = tf.identity(inputs[idx])
dtypes.append(dtype.base_dtype)
# TODO tensorflow/benchmarks use static shapes here,
# though it doesn't seem to help. We can use it when it's known.
# Setting capacity to 1 to potentially save some memory, because we should
# expect the consumers to run slower than the producer.
stage = StagingArea(dtypes, shapes=None, capacity=1)
# put & get automatically inherit the name scope from the area
self._stage_ops.append(stage.put(inputs))
self._areas.append(stage)
outputs = stage.get()
if isinstance(outputs, tf.Tensor): # when size=1, TF doesn't return a list
outputs = [outputs]
for vin, vout in zip(inputs, outputs):
vout.set_shape(vin.get_shape())
self._unstage_ops.append(outputs)
# self._size_ops.append(stage.size())
return outputs
def _get_stage_op(self):
with self.cached_name_scope():
return tf.group(*self._stage_ops)
def _get_unstage_ops(self):
with self.cached_name_scope():
all_outputs = list(chain.from_iterable(self._unstage_ops))
return all_outputs
# for debugging only
def _create_ema_callback(self):
def create_ema_op():
with self.cached_name_scope():
avg_size = tf.truediv(tf.add_n(self._size_ops), len(self._size_ops), name='avg_stagingarea_size')
return add_moving_summary(avg_size, collection=None)[0].op
return RunOp(
create_ema_op,
run_before=False,
run_as_trigger=False,
run_step=True)
|
apache-2.0
| 7,025,043,686,877,972,000
| 36.218023
| 118
| 0.582012
| false
| 4.073497
| false
| false
| false
|
r3n0us/irhelper
|
modules/cmds/vol_pslist_module.py
|
1
|
23357
|
import re
import collections
import json
import sys
sys.path.append(sys.path[0]+"/../../")
from modules.utils.helper import *
from modules.db import DBops as dbops
##TODO remove sqlite and create dbops
import sqlite3
result = {'status': True, 'message': '', 'cmd_results': '', 'errors': []}
def vol_pslist(project):
global result
######TEST AREA
######TEST AREA
print_header("Executing vol_pslist...")
rdb = dbops.DBOps(project.db_name)
if not rdb.table_exists("PSList"):
rc, result = execute_volatility_plugin(plugin_type="default",
plugin_name="pslist",
output="db",
result=result,
project=project,
shell=False,
dump=False,
plugin_parms=None)
if result['status']:
debug("CMD completed")
else:
err(result['message'])
print("Gathering more process info...")
if not rdb.table_exists("psinfo2"):
rc, result = execute_volatility_plugin(plugin_type="contrib",
plugin_name="psinfo2",
output="stdout",
result=result,
project=project,
shell=False,
dump=False,
plugin_parms=None)
if result['status']:
debug("CMD completed")
else:
err(result['message'])
if result['status']:
processinfo_data = []
for line in result['cmd_results'].split("\n"):
try:
psinfo_line = line.rstrip("\n").split("|")
psinfo = {}
psinfo['process'] = psinfo_line[0]
psinfo['process_fullname'] = psinfo_line[1]
psinfo['pid'] = psinfo_line[2]
psinfo['ppid'] = psinfo_line[3]
psinfo['imagepath'] = psinfo_line[4]
psinfo['cmdline'] = psinfo_line[5].replace(" ","/").split("//")[0].replace("\/\"","|").replace("\"","")
if psinfo_line[2] == "4":
psinfo['process_fullname'] = "system"
processinfo_data.append(psinfo.copy())
except Exception, e:
err(e)
debug(line)
_table_name = "psinfo2"
rdb = dbops.DBOps(project.db_name)
rdb.new_table(_table_name, {'process':'text','process_fullname':'text',
'pid':'integer', 'ppid':'text','imagepath':'text',
'cmdline':'text'})
rdb.insert_into_table(_table_name, processinfo_data)
if not rdb.table_exists("VerInfo"):
rc, result = execute_volatility_plugin(plugin_type="default",
plugin_name="verinfo",
output="db",
result=result,
project=project,
shell=False,
dump=False,
plugin_parms=None)
if result['status']:
debug("CMD completed")
else:
err(result['message'])
###Dump pslist processes in dump dir and run checks
rc, result = execute_volatility_plugin(plugin_type="default",
plugin_name="procdump",
output="stdout",
result=result,
project=project,
shell=True,
dump=True,
plugin_parms=None)
##Run exiftool and store information
if not rdb.table_exists("exiftool"):
#cmd = "exiftool -j pslist_dump/*"
cmd_array = []
cmd_array.append('exiftool')
cmd_array.append('-j')
cmd_array.append('-q')
cmd_array.append(project.dump_dir)
debug(cmd_array)
try:
rc = subprocess.check_output(cmd_array)
result['status'] = True
cmd_out = rc
except subprocess.CalledProcessError as e:
result['status'] = False
result['message'] = "Exception: exiftool plugin failed!"
err(result['message'])
if result['status']:
debug("Loading exiftool results to DB")
try:
jdata = json.loads(cmd_out)
jdata_keys = []
for i in jdata:
for n in i.keys():
if n not in jdata_keys:
jdata_keys.append(n)
table_columns = {}
for x in jdata_keys:
table_columns[x] = "text"
_table_name = "exiftool"
rdb = dbops.DBOps(project.db_name)
rdb.new_table_from_keys(_table_name, table_columns)
rdb.insert_into_table(_table_name, jdata)
result['cmd_results'] = "PS info finished"
except Exception as e:
err("Error running exiftool")
result['errors'].append(e)
##Now run the analyser code
violations, plist = analyse_processes(project)
result['cmd_results'] = {'violations': [], 'plist': [],
'plist_extended': [],
'suspicious_processes': [],}
result['cmd_results']['plist'] = plist
result['cmd_results']['violations'] = violations
enrich_exif_with_shanon_entropy()
calculate_md5()
epslist_data = enrich_pslist(project, plist)
result['cmd_results']['plist_extended'] = epslist_data
risk_list = analyse_scan_processes(project)
suspicious_plist = []
for p in risk_list:
suspicious_process = {}
suspicious_process['pid'] = p
suspicious_process['risk'] = risk_list[p]
for i in plist:
if str(i['pid']) == str(p):
suspicious_process['name'] = i['name']
break
suspicious_plist.append(suspicious_process.copy())
result['cmd_results']['suspicious_processes'] = suspicious_plist
def enrich_pslist(project, plist):
rdb = dbops.DBOps(project.db_name)
query = "select FileName,CompanyName,OriginalFileName," \
"FileDescription,FileSize,LegalCopyright,FileDescription,md5," \
"InternalName,sentropy from exiftool"
jdata = rdb.sqlite_query_to_json(query)
for entry in jdata:
new_entry = {}
pid = entry['FileName'].split(".")[1]
entry['pid'] = pid
for e in plist:
if str(pid) == str(e['pid']):
entry['process_name'] = e['name']
entry['sn_level'] = check_entropy_level(entry['sentropy'])
return jdata
def calculate_md5():
print_header("Calculating MD5 of dumped files. This may take a while")
rdb = dbops.DBOps("results.db")
rdb.patch_table('exiftool','md5','text')
rows = rdb.get_all_rows('exiftool')
for rs in rows:
try:
md5 = md5sum(rs['SourceFile'])
table_name = "exiftool"
column_name = "md5"
value = str(md5)
key_name = "SourceFile"
_key = rs[key_name]
rdb.update_value(table_name, column_name, value, key_name, _key)
except Exception as e:
err(e)
def enrich_exif_with_shanon_entropy():
'''
The information returned from the exiftool and psinfo contains a lot of
information about the extracted files. To have a more complete view of
the extracted files we can also add entropy information
@param: the data dictionary from exiftool
'''
print_header("Calculating entropy of dumped files. This may take a while")
get_a_cofee()
rdb = dbops.DBOps("results.db")
rdb.patch_table('exiftool','sentropy','REAL')
rows = rdb.get_all_rows('exiftool')
for rs in rows:
try:
sn = str(calculate_shanon_entropy_file(rs['SourceFile']))
table_name = "exiftool"
column_name = "sentropy"
value = sn
key_name = "SourceFile"
_key = rs[key_name]
rdb.update_value(table_name, column_name, value, key_name, _key)
except Exception as e:
pass
def analyse_processes(project):
'''
This module will check all running processes to verify that the correct
parent process has spawned the running one.
Some ideas like the rules format has been taken from DAMM - @ 504ENSICS Labs
@param: param
'''
print_header("Analysing processes")
violations = []
violations_count = 0
violation_message = {'process':'','rule': '','details':''}
known_processes_XP = {
'system' : { 'pid' : 4, 'imagepath' : '', 'user_account' : 'Local System', 'parent' : 'none', 'singleton' : True, 'prio' : '8' },
'smss.exe' : {'imagepath' : 'windows\System32\smss.exe' , 'user_account' : ['NT AUTHORITY\SYSTEM'], 'parent' : 'system', 'singleton' : True, 'session' : '', 'prio' : '11' },
'lsass.exe' : {'imagepath' : 'windows\system32\lsass.exe', 'user_account' : 'Local System', 'parent' : 'winlogon.exe', 'singleton' : True, 'session' : '0', 'prio' : '9', 'childless' : True, 'starts_at_boot' : True, 'starts_at_boot' : True },
'winlogon.exe' : {'imagepath' : 'windows\system32\winlogon.exe', 'user_account' : 'Local System', 'session' : '0', 'prio' : '13' },
'csrss.exe' : {'imagepath' : 'windows\system32\csrss.exe' , 'user_account' : ['NT AUTHORITY\SYSTEM'], 'session' : '0', 'prio' : '13', 'starts_at_boot' : True },
'services.exe' : {'imagepath' : 'windows\system32\services.exe' , 'parent' : 'winlogon.exe', 'session' : '0', 'prio' : '9', 'starts_at_boot' : True },
'svchost.exe' : {'imagepath' : 'windows\System32\svchost.exe' , 'user_account' : ['NT AUTHORITY\SYSTEM', 'LOCAL SERVICE', 'NETWORK SERVICE'], 'parent' : 'services.exe', 'singleton' : False, 'session' : '0', 'prio' : '8', 'starts_at_boot' : True },
'explorer.exe' : {'imagepath' : 'windows\explorer.exe' , 'prio' : '8' },
}
###Notes:
###wininit.exe starts from an instance of smss.exe that exits so most likely the parent does not exist
known_processes_Vista = {
'system' : { 'pid' : 4, 'image_path' : '', 'user_account' : 'Local System', 'parent' : 'none', 'singleton' : True, 'prio' : '8' },
'smss.exe' : {'image_path' : 'windows\System32\smss.exe' , 'user_account' : ['NT AUTHORITY\SYSTEM'], 'parent' : 'system', 'singleton' : True, 'session' : '', 'prio' : '11' },
'wininit.exe' : {'image_path' : 'windows\System32\wininit.exe' , 'user_account' : ['NT AUTHORITY\SYSTEM'], 'parent' : 'none', 'session' : '0', 'children' : False, 'prio' : '13', 'starts_at_boot' : True },
'lsass.exe' : {'image_path' : 'windows\system32\lsass.exe' , 'user_account' : 'Local System', 'parent' : 'wininit.exe', 'singleton' : True, 'session' : '0', 'prio' : '9', 'childless' : True, 'starts_at_boot' : True },
'winlogon.exe' : {'image_path' : 'windows\system32\winlogon.exe' , 'user_account' : 'Local System', 'session' : '1' , 'prio' : '13'},
'csrss.exe' : {'image_path' : 'windows\system32\csrss.exe' , 'user_account' : ['NT AUTHORITY\SYSTEM'], 'prio' : '13', 'starts_at_boot' : True },
'services.exe' : {'image_path' : 'windows\system32\services.exe' , 'parent' : 'wininit.exe', 'session' : '0', 'prio' : '9', 'starts_at_boot' : True },
'svchost.exe' : {'image_path' : 'windows\System32\svchost.exe' , 'user_account' : ['NT AUTHORITY\SYSTEM', 'LOCAL SERVICE', 'NETWORK SERVICE'], 'parent' : 'services.exe', 'singleton' : False, 'session' : '0', 'prio' : '8', 'starts_at_boot' : True },
'lsm.exe' : {'image_path' : 'windows\System32\lsm.exe' , 'user_account' : ['NT AUTHORITY\SYSTEM'], 'parent' : 'wininit.exe', 'session' : '0', 'prio' : '8', 'childless' : True, 'starts_at_boot' : True },
'explorer.exe' : {'image_path' : 'windows\explorer.exe' , 'prio' : '8' },
}
##First we need to construct relevant process information structure so
#we can easily verify them
##for every process in our running list
##{pid:2,ppid:3,path:xxx}
## check by name
## example:
## get the element with name system from our list and check if each key matches the required value
#process_fullname|process |pid|ppid|imagepath |Hnds|Sess|Thds
#NoPEB |System |4 |0 |NoPEB |1003|-1 |65
##TODO: here we need a more novel approach for the violation checks
## to minimise false positives . Not all information is available sometimes
##First put all processes from pslist with enriched info into an array
con = sqlite3.connect('results.db')
con.row_factory = sqlite3.Row
cur = con.cursor()
cur.execute('select psinfo2.process_fullname,psinfo2.process,psinfo2.pid,psinfo2.ppid,'
'psinfo2.imagepath,pslist.hnds,pslist.sess,pslist.thds, '
'(SELECT ps2.process_fullname FROM psinfo2 ps2 WHERE ps2.pid = psinfo2.ppid) AS parentname'
' from psinfo2 inner join pslist on psinfo2.pid = pslist.pid')
rows = cur.fetchall()
target_process_list = []
full_pslist_dict = {}
for rs in rows:
ps = {}
ps['pid'] = rs['pid']
ps['imagepath'] = str(rs['imagepath']).lower().lstrip("c:/\/")
ps['imagepath'] = str(ps['imagepath']).lstrip('??/\/\c:/\/\/')
ps['imagepath'] = str(ps['imagepath']).replace('systemroot','windows')
if ps['imagepath'] == "nopeb":
ps['imagepath'] = ''
ps['ppid'] = rs['ppid']
ps['parent'] = str(rs['parentname']).lower()
if rs['ppid'] == "4":
ps['parent'] = "system"
ps['name'] = rs['process'].lower()
if rs['process'].lower() == "system":
ps['fullname'] = str(rs['process']).lower()
else:
ps['fullname'] = rs['process_fullname'].lower()
target_process_list.append(ps.copy())
full_pslist_dict[ps['name']] = ps.copy()
if str(project.get_volatility_profile()).startswith("WinXP") \
or str(project.get_volatility_profile()).startswith("Win2003"):
rule_list = known_processes_XP
else:
rule_list = known_processes_Vista
for key in rule_list:
for process in target_process_list:
if re.search(process['name'], key, re.IGNORECASE):
for check in rule_list[key]:
if check in process:
###NOt all have peb information
if not str(process[check]).lower() == str(rule_list[key][check]).lower() and str(process[check]).lower() != "nopeb" :
print("Violation detected on: [%s] Actual value: [%s] Expected value: [%s]" %(check, process[check], rule_list[key][check]))
print(process)
violations_count += 1
violation_message['id'] = violations_count
violation_message['process'] = process
violation_message['rule'] = check
violation_message['details'] = ("Violation detected on: [%s] Actual value: [%s] Expected value: [%s]" %(check,process[check],rule_list[key][check]))
violations.append(violation_message.copy())
##Check for singleton violations as DAMM call it
processes = []
for process in target_process_list:
processes.append(str(process['name']).lower())
counter=collections.Counter(processes)
for key in rule_list:
if key in processes and "singleton" in rule_list[key]:
if int(counter[key]) > 1 and rule_list[key]['singleton']:
print("Violation detected on: [singleton] condition from [%s] Actual value: [%s]" %(key,int(counter[key])))
violations_count += 1
violation_message['id'] = violations_count
violation_message['process'] = full_pslist_dict[key]
violation_message['rule'] = "[Singleton]"
violation_message['details'] = ("Violation detected on: [singleton] condition from [%s] Actual value: [%s]" %(key,int(counter[key])))
violations.append(violation_message.copy())
print(full_pslist_dict[key])
####Lets try to detect similar wording in well known processes
usual_suspects = ['smss.exe', 'wininit.exe','csrss.exe','svchost.exe',
'lsass.exe','lsm.exe','wmpnetwk.exe','wuauclt.exe']
##Injecting bad process names
#target_process_list.append("scvhost.exe")
#target_process_list.append("lsa.exe")
for process in target_process_list:
for suspect in usual_suspects:
flag, score = score_jaro_distance(process,suspect)
if flag:
print("Possible culrpit process detected: [%s] resembles to: [%s] Score: [%s]" %(process,suspect,score))
violations_count += 1
violation_message['id'] = violations_count
violation_message['process'] = process
violation_message['rule'] = "[Culrpit]"
violation_message['details'] = ("Possible culrpit process detected: [%s] resembles to: [%s] Score: [%s]" %(process,suspect,score))
violations.append(violation_message.copy())
return violations, target_process_list
def analyse_scan_processes(_project):
## First we retrieve psxview all processes
global result
print_header("Gathering information from scan process")
rdb = dbops.DBOps(_project.db_name)
if not rdb.table_exists("PsXview"):
rc, result = execute_volatility_plugin(plugin_type="contrib",
plugin_name="psxview",
output="db",
result=result,
project=_project,
shell=False,
dump=False,
plugin_parms=None)
if result['status']:
debug("CMD completed")
else:
err(result['message'])
if not rdb.table_exists("ApiHooks"):
rc, result = execute_volatility_plugin(plugin_type="contrib",
plugin_name="apihooks",
output="db",
result=result,
project=_project,
shell=False,
dump=False,
plugin_parms=None)
if result['status']:
debug("CMD completed")
else:
err(result['message'])
if not rdb.table_exists("Malfind"):
rc, result = execute_volatility_plugin(plugin_type="contrib",
plugin_name="malfind",
output="db",
result=result,
project=_project,
shell=False,
dump=False,
plugin_parms=None)
if result['status']:
debug("CMD completed")
else:
err(result['message'])
##Three arrays
psxview = []
apihooked = []
malfinded = []
process_risk = {}
## Analyse further the ones with PID=false psscan=True and ExitTime null
#select * from psxview where pslist="False" and psscan="True" and exittime="";
if rdb.table_exists("PsXview"):
jdata = {}
#query = 'select * from psxview where pslist=\"False\"' \
# ' and psscan=\"True\" and not ExitTime '
query = "select * from psxview where psscan=\"True\""
jdata = rdb.sqlite_query_to_json(query)
for entry in jdata:
psxview.append(entry['PID'])
process_risk[entry['PID']] = 1
else:
err("No PSXView data")
if rdb.table_exists("ApiHooks"):
jdata = {}
query = "select PID, Process, VictimModule, Function from ApiHooks"
jdata = rdb.sqlite_query_to_json(query)
for entry in jdata:
apihooked.append(entry['PID'])
if entry['PID'] in psxview:
process_risk[entry['PID']] = 2
else:
process_risk[entry['PID']] = 1
else:
err("No ApiHooks data")
if rdb.table_exists("Malfind"):
jdata = {}
query = "select Pid, Process from Malfind group by Pid"
jdata = rdb.sqlite_query_to_json(query)
for entry in jdata:
malfinded.append(entry['Pid'])
if entry['Pid'] in apihooked and entry['Pid'] in psxview:
process_risk[entry['Pid']] = 3
if entry['Pid'] in apihooked and entry['Pid'] not in psxview:
process_risk[entry['Pid']] = 2
if entry['Pid'] not in apihooked and entry['Pid'] in psxview:
process_risk[entry['Pid']] = 2
else:
err("No Malfind data")
##Then for every process from above check the following :
#1. apihooks
#2. malfind
# more to come this is just a very simple approach (there will be false positives as well
##Finally we assign a risk score:
# 10 to the ones from psscan
# 10 to the ones from apihooks
# 10 to the ones in malfind (next version we identify shellcode with ML ! :)
debug("Process risk list:%s " %process_risk)
return process_risk
def get_result():
return result
def show_json(in_response):
##Function to test json output
print(json.dumps(in_response, sort_keys=False, indent=4))
if __name__ == "__main__":
#
print("Python version: %s\n " %sys.version)
DB_NAME = "results.db"
set_debug(True)
##Get module parameters
image = sys.argv[1]
profile = sys.argv[2]
##Call the actual command
current_wd = sys.path[0]
project = Project(current_wd)
project.init_db(DB_NAME)
project.set_volatility_profile(profile)
project.set_image_name(image)
vol_pslist(project)
show_json(get_result())
|
gpl-3.0
| 8,066,239,497,995,908,000
| 39.550347
| 258
| 0.518046
| false
| 3.958143
| false
| false
| false
|
jim-cooley/abletonremotescripts
|
remote-scripts/samples/Twister Ableton Script v1.2.2/Twister/Twister.py
|
1
|
78676
|
from __future__ import with_statement
import Live
import time
import math
import sys
from _Framework.ButtonElement import ButtonElement # Class representing a button a the controller
from _Framework.ButtonMatrixElement import ButtonMatrixElement # Class representing a 2-dimensional set of buttons
from _Framework.ChannelStripComponent import ChannelStripComponent # Class attaching to the mixer of a given track
from _Framework.ClipSlotComponent import ClipSlotComponent # Class representing a ClipSlot within Live
from _Framework.CompoundComponent import CompoundComponent # Base class for classes encompasing other components to form complex components
from _Framework.ControlElement import ControlElement # Base class for all classes representing control elements on a controller
from _Framework.ControlSurface import ControlSurface # Central base class for scripts based on the new Framework
from _Framework.ControlSurfaceComponent import ControlSurfaceComponent # Base class for all classes encapsulating functions in Live
from _Framework.DeviceComponent import DeviceComponent # Class representing a device in Live
from _Framework.EncoderElement import EncoderElement # Class representing a continuous control on the controller
from _Framework.InputControlElement import * # Base class for all classes representing control elements on a controller
from _Framework.MixerComponent import MixerComponent # Class encompassing several channel strips to form a mixer
from _Framework.ModeSelectorComponent import ModeSelectorComponent # Class for switching between modes, handle several functions with few controls
from _Framework.NotifyingControlElement import NotifyingControlElement # Class representing control elements that can send values
from _Framework.SceneComponent import SceneComponent # Class representing a scene in Live
from _Framework.SessionComponent import SessionComponent # Class encompassing several scene to cover a defined section of Live's session
from _Framework.SessionZoomingComponent import DeprecatedSessionZoomingComponent as SessionZoomingComponent # Class using a matrix of buttons to choose blocks of clips in the session
from _Framework.SliderElement import SliderElement # Class representing a slider on the controller
from VCM600.MixerComponent import MixerComponent
from VCM600.TrackFilterComponent import TrackFilterComponent
from _Framework.TransportComponent import TransportComponent # Class encapsulating all functions in Live's transport section
from _Mono_Framework.CodecEncoderElement import CodecEncoderElement
from _Mono_Framework.EncoderMatrixElement import EncoderMatrixElement
from _Mono_Framework.MonoChopperComponent import MonoChopperComponent
from _Mono_Framework.MonoBridgeElement import MonoBridgeElement
from _Mono_Framework.MonoButtonElement import MonoButtonElement
from _Mono_Framework.MonoEncoderElement import MonoEncoderElement
from _Mono_Framework.ResetSendsComponent import ResetSendsComponent
from _Mono_Framework.DetailViewControllerComponent import DetailViewControllerComponent
from _Mono_Framework.DeviceSelectorComponent import DeviceSelectorComponent
from _Mono_Framework.MonomodComponent import MonomodComponent
from _Mono_Framework.MonoDeviceComponent import MonoDeviceComponent
from _Mono_Framework.SwitchboardElement import SwitchboardElement
from _Mono_Framework.MonoClient import MonoClient
from _Mono_Framework.LiveUtils import *
from _Generic.Devices import *
from ModDevices import *
from Map import *
class ShiftModeComponent(ModeSelectorComponent):
def __init__(self, script, callback, *a, **k):
super(ShiftModeComponent, self).__init__(*a, **k)
self._script = script
self.update = callback
self._modes_buttons = []
self._last_mode = 0
self._set_protected_mode_index(0)
def set_mode_buttons(self, buttons):
for button in self._modes_buttons:
button.remove_value_listener(self._mode_value)
self._modes_buttons = []
if (buttons != None):
for button in buttons:
assert isinstance(button, ButtonElement or FlashingButtonElement)
identify_sender = True
button.add_value_listener(self._mode_value, identify_sender)
self._modes_buttons.append(button)
def number_of_modes(self):
return 5
def set_mode(self, mode):
assert isinstance(mode, int)
mode += 1
assert (mode in range(self.number_of_modes()))
if (self._mode_index != mode):
self._mode_index = mode
self.update()
elif (self._mode_index != 0):
self._mode_index = 0
self.update()
def _mode_value(self, value, sender):
assert (len(self._modes_buttons) > 0)
assert isinstance(value, int)
assert isinstance(sender, ButtonElement)
assert (self._modes_buttons.count(sender) == 1)
if ((value is not 0) or (not sender.is_momentary())):
self.set_mode(self._modes_buttons.index(sender))
class MonomodModeComponent(ModeSelectorComponent):
__module__ = __name__
__doc__ = ' Class for switching between modes, handle several functions with few controls '
def __init__(self, script, *a, **k):
super(MonomodModeComponent, self).__init__(*a, **k)
self._script = script
self._set_protected_mode_index(0)
def set_mode_buttons(self, buttons):
for button in self._modes_buttons:
button.remove_value_listener(self._mode_value)
self._modes_buttons = []
if (buttons != None):
for button in buttons:
assert isinstance(button, ButtonElement)
identify_sender = True
button.add_value_listener(self._mode_value, identify_sender)
self._modes_buttons.append(button)
for index in range(len(self._modes_buttons)):
if (index == self._mode_index):
self._modes_buttons[index].turn_on()
else:
self._modes_buttons[index].turn_off()
def set_mode_toggle(self, button):
assert ((button == None) or isinstance(button, ButtonElement or FlashingButtonElement))
if (self._mode_toggle != None):
self._mode_toggle.remove_value_listener(self._toggle_value)
self._mode_toggle = button
if (self._mode_toggle != None):
self._mode_toggle.add_value_listener(self._toggle_value)
def number_of_modes(self):
return 2
class CntrlrDetailViewControllerComponent(DetailViewControllerComponent):
def __init__(self, script, *a, **k):
super(CntrlrDetailViewControllerComponent, self).__init__(*a, **k)
self._script = script
def _nav_value(self, value, sender):
super(CntrlrDetailViewControllerComponent, self)._nav_value(value, sender)
if (self.is_enabled() and (not self._shift_pressed)):
if ((not sender.is_momentary()) or (value != 0)):
modifier_pressed = True
if not ((not self.application().view.is_view_visible('Detail')) or (not self.application().view.is_view_visible('Detail/DeviceChain'))):
self._script._update_selected_device()
class CntrlrSwitchboardElement(SwitchboardElement):
def __init__(self, *a, **k):
super(CntrlrSwitchboardComponent, self).__init__(*a, **k)
class CntrlrMonoDevice(MonoDeviceComponent):
def __init__(self, *a, **k):
super(CntrlrMonoDevice, self).__init__(*a, **k)
class CntrlrMonoClient(MonoClient):
def __init__(self, *a, **k):
super(CntrlrMonoClient, self).__init__(*a, **k)
self._raw = False
def _banner(self):
pass
def disconnect_client(self, *a, **k):
super(CntrlrMonoClient, self).disconnect_client(*a, **k)
if not self._mod_dial == None:
if self._mod_dial._parameter is self._mod_dial_parameter:
self._mod_dial.release_parameter()
def _send_c_key(self, index, value, *a, **k):
self._send('key', index, value)
if self._raw is True:
control = self._host._host._keys[index]
if control != None:
self._send('raw', control._msg_type + control._original_channel, control._original_identifier, value)
def _send_c_grid(self, column, row, value, *a, **k):
self._send('grid', column, row, value)
if self._raw is True:
control = self._host._host._grid.get_button(column, row)
if control != None:
self._send('raw', control._msg_type + control._original_channel, control._original_identifier, value)
def _send_c_dial(self, column, row, value, *a, **k):
self._send('dial', column, row, value)
if self._raw is True:
control = self._host._host._dial_matrix.get_dial(column, row)
if control != None:
self._send('raw', control._msg_type + control._original_channel, control._original_identifier, value)
def _send_c_dial_button(self, column, row, value, *a, **k):
if row > 0:
self._send('dial_button', column, row-1, value)
if self._raw is True:
control = self._host._host._dial_button_matrix.get_button(column, row)
if control != None:
self._send('raw', control._msg_type + control._original_channel, control._original_identifier, value)
def _send_key(self, *a):
pass
def _send_grid(self, *a):
pass
def _send_dial(self, *a):
pass
def _send_dial_button(self, *a):
pass
"""receive methods (from m4l)"""
def receive_key(self, *a, **k):
super(CntrlrMonoClient, self).receive_c_key(*a, **k)
def receive_grid(self, *a, **k):
super(CntrlrMonoClient, self).receive_c_grid(*a, **k)
def receive_grid_row(self, *a, **k):
super(CntrlrMonoClient, self).receive_c_grid_row(*a, **k)
def receive_grid_column(self, *a, **k):
super(CntrlrMonoClient, self).receive_c_grid_column(*a, **k)
def receive_grid_all(self, *a, **k):
super(CntrlrMonoClient, self).receive_c_grid_all(*a, **k)
def receive_mask_key(self, *a, **k):
super(CntrlrMonoClient, self).receive_mask_c_key(*a, **k)
def receive_mask_grid(self, *a, **k):
super(CntrlrMonoClient, self).receive_mask_c_grid(*a, **k)
def receive_mask_column(self, *a, **k):
super(CntrlrMonoClient, self).receive_mask_c_column(*a, **k)
def receive_mask_row(self, row, value):
super(CntrlrMonoClient, self).receive_mask_c_row(*a, **k)
def receive_mask_all(self, *a, **k):
super(CntrlrMonoClient, self).receive_mask_c_all(*a, **k)
def receive_wheel(self, *a, **k):
super(CntrlrMonoClient, self).receive_c_wheel(*a, **k)
def set_local_ring_control(self, *a, **k):
super(CntrlrMonoClient, self).set_c_local_ring_control(*a, **k)
def set_absolute_mode(self, *a, **k):
super(CntrlrMonoClient, self).set_c_absolute_mode(*a, **k)
def receive_mod_color(self, val):
if val != 1:
self._mod_color = val
self._host.shift_update()
"""raw data integration"""
def set_raw_enabled(self, value):
self._raw = value > 0
#self._host.log_message('raw enabled' + str(self._raw))
if(self._raw is True):
self._update_controls_dictionary()
def receive_raw(self, Type, Identifier, value):
#self._host.log_message('recieve raw' + str(Type) + str(Identifier) + str(value))
if self._controls[Type]:
if Identifier in self._controls[Type]:
self._controls[Type][Identifier](value)
def _update_controls_dictionary(self):
if self._host._host != None:
self._controls = [{}, {}]
if self._control_defs['grid'] != None:
for column in range(self._control_defs['grid'].width()):
for row in range(self._control_defs['grid'].height()):
button = self._control_defs['grid'].get_button(column, row)
if button != None:
self._controls[0][button._original_identifier]=self._make_grid_call(column, row)
if self._control_defs['keys'] != None:
for index in range(len(self._control_defs['keys'])):
key = self._control_defs['keys'][index]
if key != None:
self._controls[0][key._original_identifier]=self._make_key_call(index)
if self._control_defs['dials'] != None:
for index in range(12):
column = index%4
row = int(index/4)
dial = self._control_defs['dials'].get_dial(column, row)
if dial != None:
self._controls[1][dial._original_identifier]=self._make_dial_call(index)
if self._control_defs['buttons'] != None:
for index in range(8):
column = index%4
row = int(index/4)+1
button = self._control_defs['buttons'].get_button(column, row)
if button != None:
self._controls[0][button._original_identifier]=self._make_dial_button_call(index+4)
def _make_grid_call(self, column, row):
def recieve_grid(value):
#self._host.log_message('receive grid' + str(value) + str(column) + str(row))
self.receive_c_grid(column, row, value)
return recieve_grid
def _make_key_call(self, number):
def receive_key(value):
#self._host.log_message('receive key' + str(number) + str(value))
self.receive_c_key(number, value)
return receive_key
def _make_dial_call(self, number):
def receive_c_wheel(value):
self.receive_wheel(number, 'value', value)
return receive_wheel
def _make_dial_button_call(self, number):
def receive_c_wheel(value):
self.receive_wheel(number, 'white', value)
return receive_wheel
class CntrlrMonomodComponent(MonomodComponent):
def __init__(self, *a, **k):
super(CntrlrMonomodComponent, self).__init__(*a, **k)
def _send_grid(self, *a):
pass
def _send_key(self, *a):
pass
def disconnect(self, *a, **k):
self._release_mod_dials()
super(CntrlrMonomodComponent, self).disconnect(*a, **k)
def connect_to_clients(self, *a, **k):
super(CntrlrMonomodComponent, self).connect_to_clients(*a, **k)
for index in range(4):
self._client[index]._mod_dial = (self._script._encoder[index]) #assign it a modDial so that we can control its modVolume from the unshifted CNTRLR
def _select_client(self, *a, **k):
super(CntrlrMonomodComponent, self)._select_client(*a, **k)
self._script.set_local_ring_control(self._active_client._c_local_ring_control)
self._script.set_absolute_mode(self._active_client._c_absolute_mode)
self._active_client._device_component.update()
def on_enabled_changed(self, *a, **k):
super(CntrlrMonomodComponent, self).on_enabled_changed(*a, **k)
if self._active_client != None:
if self.is_enabled():
self._active_client._device_component.update()
self._script.set_absolute_mode(self._active_client._c_absolute_mode)
self._script.set_local_ring_control(self._active_client._c_local_ring_control)
else:
for control in self._parameter_controls:
control.release_parameter()
self._script.set_absolute_mode(1)
self._script.set_local_ring_control(1)
def _set_button_matrix(self, grid):
assert isinstance(grid, (ButtonMatrixElement, type(None)))
if grid != self._grid:
if self._grid != None:
self._grid.remove_value_listener(self._matrix_value)
self._grid = grid
if self._grid != None:
self._grid.add_value_listener(self._matrix_value)
self.update()
return None
def _matrix_value(self, value, x, y, is_momentary): #to be sent to client from controller
assert (self._grid != None)
assert (value in range(128))
assert isinstance(is_momentary, type(False))
if (self.is_enabled()):
self._active_client._send_c_grid(x + self._x, y + self._y, value)
def _update_grid(self):
if self.is_enabled() and self._grid != None:
for column in range(4):
for row in range(4):
self._send_c_grid(column, row, self._active_client._c_grid[column][row])
def _alt_value(self, value):
if self._shift_pressed == 0:
self._alt_pressed = value != 0
self._active_client._send('alt', int(self._alt_pressed))
self.update()
def _set_key_buttons(self, buttons):
assert (buttons == None) or (isinstance(buttons, tuple))
for key in self._keys:
if key.value_has_listener(self._key_value):
key.remove_value_listener(self._key_value)
self._keys = []
if buttons != None:
assert len(buttons) == 32
for button in buttons:
assert isinstance(button, MonoButtonElement)
self._keys.append(button)
button.add_value_listener(self._key_value, True)
def _key_value(self, value, sender):
if self.is_enabled():
self._active_client._send_c_key(self._keys.index(sender), int(value!=0))
def _update_keys(self):
for index in range(32):
self._send_c_key(index, self._active_client._c_key[index])
def _set_knobs(self, knobs):
assert (knobs == None) or (isinstance(knobs, tuple))
for knob in self._knobs:
if knob.has_value_listener(self._knob_value):
knob.remove_value_listener(self._knob_value)
self._knobs = []
if knobs != None:
assert len(knobs) == 24
for knob in knobs:
assert isinstance(knob, EncoderElement)
self._knobs.append(knob)
knob.add_value_listener(self._knob_value, True)
def _knob_value(self, value, sender):
if self.is_enabled():
self._active_client._send_c_knob(self._knobs.index(sender), value)
def _dial_matrix_value(self, value, x, y):
if self.is_enabled() and self._active_client != None:
if self._script._absolute_mode == 0:
value = RELATIVE[int(value == 1)]
self._active_client._send_c_dial(x, y, value)
def _dial_button_matrix_value(self, value, x, y, force):
if (self.is_enabled()) and (self._active_client != None):
self._active_client._send_c_dial_button(x, y, value)
def _reset_encoder(self, coord):
self._dial_matrix.get_dial(coord[0], coord[1])._reset_to_center()
"""CNTRLR specific methods"""
def _send_c_grid(self, column, row, value): #to be sent to controller from client
if self.is_enabled() and self._grid != None:
if column in range(self._x, self._x + 4):
if row in range(self._y, self._y + 4):
self._grid.get_button(column - self._x, row - self._y).send_value(int(self._colors[value]))
def _send_c_key(self, index, value):
if self.is_enabled():
#if (self._shift_pressed > 0) or (self._locked > 0):
# self._grid.get_button(index, 7).send_value(int(self._colors[value]))
if self._keys != None and len(self._keys) > index:
self._keys[index].send_value(int(self._colors[value]))
def _send_c_wheel(self, column, row, wheel, parameter=None): #to be sent to controller from client
if self.is_enabled() and wheel != None:
if column < 4 and row < 3:
dial = self._dial_matrix.get_dial(column, row)
if(parameter=='value'):
dial._ring_value = int(wheel['value'])
dial._ring_mode = int(wheel['mode'])
dial._ring_green = int(wheel['green']!=0)
dial._ring_log = int(wheel['log'])
if(parameter=='custom'):
dial._ring_custom = dial._calculate_custom(str(wheel['custom']))
self._dial_button_matrix.send_value(column, row, wheel['white'])
if(self._script._absolute_mode > 0) and (not self._active_client._device_component.is_enabled()):
dial.send_value(wheel['log'], True)
def _update_c_wheel(self):
if self._dial_button_matrix != None:
for column in range(4):
for row in range(3):
self._send_c_wheel(column, row, self._active_client._c_wheel[column][row])
if not self._active_client._device_component.is_enabled():
self._send_to_lcd(column, row, self._active_client._c_wheel[column][row])
#self._script.log_message('dial value update' +str(column) + str(row) + str(self._active_client._wheel[column][row]['value']))
def set_c_local_ring_control(self, val = 1):
self._c_local_ring_control = (val!=0)
self._script.set_local_ring_control(self._c_local_ring_control)
def set_c_absolute_mode(self, val=1):
self._c_absolute_mode = (val!=0)
self._script.set_absolute_mode(self._c_absolute_mode)
def _release_mod_dials(self):
if not self._client is None:
for index in range(4): #for each of our 4 clients:
if not self._client[index]._mod_dial == None: #if the client has a modDial assigned to it
self._client[index]._mod_dial.release_parameter() #remove the modDial's parameter assignment
def _assign_mod_dials(self):
if not self._client is None:
for index in range(4): #recursion to contain all available clients
param = self._client[index]._mod_dial_parameter() #param is a local variable, and we assign its value to the mod_dial_parameter (this is handled by each individual client module)
#self._script.log_message('mod dial param ' + str(param))
if not self._client[index]._mod_dial == None: #if the client has been assigned a mod dial (which it should have been in setup_mod() )
if not param == None: #if the param variable was properly assigned in the client module
self._client[index]._mod_dial.connect_to(param) #connect the physical control to the parameter (this should be the moddial parameter in the m4l patch)
else:
self._client[index]._mod_dial.release_parameter() #if the param was None, release the physical control from any assignments
self._script.request_rebuild_midi_map()
def _display_mod_colors(self):
if not self._client is None:
for index in range(4): #set up a recursion of 4
self._script._shift_mode._modes_buttons[index].send_value(self._client[index]._mod_color) #update the modLEDs to display the color assigned to its contained mod
if self._is_enabled:
self._script._shift_mode._modes_buttons[self._client.index(self._active_client)].send_value(8)
else:
for index in range(4):
self._script._shift_mode._modes_buttons[index].send_value(0)
class Twister(ControlSurface):
__module__ = __name__
__doc__ = " Monomodular controller script for Twister "
def __init__(self, *a, **k):
super(Twister, self).__init__(*a, **k)
"""MonoComponent specific variables - best not change these unless you know what you're doing"""
#self._version_check = 'b994'
self._host_name = 'Twister'
self._color_type = 'OhmRGB'
self._hosts = []
self.hosts = []
self._client = [None for index in range(4)]
self._active_client = None
self._rgb = 0 ##will change which color scheme is used, 0 is Livid 1 is AumHaa 2 is Monochrome(deprecated)
self._timer = 0 #used for flashing states, and is incremented by each call from self._update_display()
self._touched = 0 #used by the LCD patch to determine the last time a control was changed
self._local_ring_control = False #used by CodecEncoderElement to determine whether individual ring LEDs are addressable
self.set_local_ring_control(1) #initialize the local_control state of the encoder rings
self._absolute_mode = 1 #used by CodecEncoderElement to determine whether inc/dec or absolute changes are sent from CNTRLR
self.flash_status = 1 #used to determine whether button LED's use flashing states or not
self._device_selection_follows_track_selection = FOLLOW
with self.component_guard():
"""Initialization methods - comments included in the corresponding method"""
self._setup_monobridge()
self._setup_controls()
self._setup_transport_control()
self._setup_mixer_control()
self._setup_session_control()
self._assign_session_colors()
self._setup_device_control()
self._setup_device_selector()
self._setup_mod()
self._setup_switchboard()
self._setup_chopper()
self._setup_modes()
self.schedule_message(1, self._open_log)
self.song().view.add_selected_track_listener(self._update_selected_device) #Add a listener so that when the track content changes our device selection will aslo be updated
"""script initialization methods"""
def _open_log(self):
self.log_message("<<<<<<<<<<<<<<<<<<<<= " + str(self._host_name) + " log opened =>>>>>>>>>>>>>>>>>>>")
self.show_message(str(self._host_name) + ' Control Surface Loaded')
"""monobridge is used to send parameter names and values to the m4l LCD patch"""
def _setup_monobridge(self):
self._monobridge = MonoBridgeElement(self)
self._monobridge.name = 'MonoBridge'
def _setup_controls(self):
is_momentary = True #this variable will be used when sending arguments to the __init__ function of the modules we are creating instances of
self._fader = [None for index in range(8)]
self._dial_left = [None for index in range(12)]
self._dial_right = [None for index in range(12)]
self._encoder = [None for index in range(12)]
self._encoder_button = [None for index in range(12)]
self._grid = [None for index in range(16)]
self._button = [None for index in range(32)]
#self._side = [None for index in range(6)]
"""Now that we have our arrays, we can fill them with the controltypes that we'll be using."""
for index in range(8):
self._fader[index] = MonoEncoderElement(MIDI_CC_TYPE, CHANNEL-1, TW_FADERS[index], Live.MidiMap.MapMode.absolute, 'Fader_' + str(index), index, self)
self._knobs = []
for index in range(12):
self._dial_left[index] = MonoEncoderElement(MIDI_CC_TYPE, CHANNEL-1, TW_KNOBS_LEFT[index], Live.MidiMap.MapMode.absolute, 'Dial_Left_' + str(index), TW_KNOBS_LEFT[index], self)
self._knobs.append(self._dial_left[index])
for index in range(12):
self._dial_right[index] = MonoEncoderElement(MIDI_CC_TYPE, CHANNEL-1, TW_KNOBS_RIGHT[index], Live.MidiMap.MapMode.absolute, 'Dial_Right_' + str(index), TW_KNOBS_RIGHT[index], self)
self._knobs.append(self._dial_right[index])
for index in range(12):
self._encoder[index] = CodecEncoderElement(MIDI_CC_TYPE, CHANNEL-1, TW_DIALS[index], Live.MidiMap.MapMode.absolute, 'Encoder_' + str(index), TW_DIALS[index], self)
for index in range(12):
self._encoder_button[index] = MonoButtonElement(is_momentary, MIDI_NOTE_TYPE, CHANNEL, TW_DIAL_BUTTONS[index], 'Encoder_Button_' + str(index), self)
for index in range(16):
self._grid[index] = MonoButtonElement(is_momentary, MIDI_NOTE_TYPE, CHANNEL, TW_GRID[index], 'Grid' + str(index), self)
for index in range(32):
self._button[index] = MonoButtonElement(is_momentary, MIDI_NOTE_TYPE, CHANNEL, TW_BUTTONS[index], 'Button_' + str(index), self)
"""We'll also need to assign some of our controls to ButtonMatrixElements so that we can use them with the Session Zoom and the Mod components"""
"""We use the same formula here: first we create the holders:"""
self._matrix = ButtonMatrixElement() #this is a standard _Framework object used by many of the other scripts
self._matrix.name = 'Matrix'
self._dial_matrix = EncoderMatrixElement(self) #this is a special Mono object, used specifically for the mod components
self._dial_matrix.name = 'Dial_Matrix'
self._dial_button_matrix = ButtonMatrixElement() #this is a special Mono object, used specifically for the mod components
self._dial_button_matrix.name = 'Dial_Button_Matrix'
"""And then we fill the with the control elements that are assigned to them"""
for row in range(4): #we have 4 rows, and 4 columns, forming the 4x4 grid in the center of the controller
button_row = [] #since the matrix is two dimensional, first we create the outer array,
for column in range(4):
button_row.append(self._grid[(row*4) + column]) #then we create the inner array. The process is the same for the other controls here.
self._matrix.add_row(tuple(button_row)) #add_row() is a method of the ButtonMatrixElement. You can look in its parent module to see how it works
for row in range(3):
dial_row = []
for column in range(4):
dial_row.append(self._encoder[(row*4) + column])
self._dial_matrix.add_row(tuple(dial_row))
for row in range(3):
dial_button_row = []
for column in range(4):
dial_button_row.append(self._encoder_button[(row*4) + column])
self._dial_button_matrix.add_row(tuple(dial_button_row))
self._key_matrix = ButtonMatrixElement()
button_row = [] #since we only use one row for the chopper, we can define a 1 dimensional button matrix for this one.
for column in range(16): #We use the ButtonMatrixObject because it takes care of setting up callbacks for all the buttons easily when we need them later
button_row.append(self._button[16 + column])
self._key_matrix.add_row(tuple(button_row))
"""the transport component allows us to assign controls to transport functions in Live"""
def _setup_transport_control(self):
self._transport = TransportComponent()
self._transport.name = 'Transport'
"""the mixer component corresponds and moves with our selection in Live, and allows us to assign physical controls"""
"""to Live's mixer functions without having to make all the links ourselves"""
def _setup_mixer_control(self):
is_momentary = True
self._num_tracks = (4) #A mixer is one-dimensional;
self._mixer = MixerComponent(4, 2, True, False) #These values represent the (Number_of_tracks, Number_of_returns, EQ_component, Filter_component)
self._mixer.name = 'Mixer' #We name everything that we might want to access in m4l
self._mixer.set_track_offset(0) #Sets start point for mixer strip (offset from left)
for index in range(4):
self._mixer.channel_strip(index).set_volume_control(self._fader[index]) #Since we gave our mixer 4 tracks above, we'll now assign our fader controls to it
self._mixer.channel_strip(index).name = 'Mixer_ChannelStrip_' + str(index) #We also name the individual channel_strip so we can access it
self._mixer.track_eq(index).name = 'Mixer_EQ_' + str(index) #We also name the individual EQ_component so we can access it
self._mixer.channel_strip(index)._invert_mute_feedback = True #This makes it so that when a track is muted, the corresponding button is turned off
self.song().view.selected_track = self._mixer.channel_strip(0)._track #set the selected strip to the first track, so that we don't, for example, try to assign a button to arm the master track, which would cause an assertion error
self._send_reset = ResetSendsComponent(self) #This creates a custom MonoComponent that allows us to reset all the sends on a track to zero with a single button
self._send_reset.name = 'Sends_Reset' #We name it so that we can access it from m4l
"""the session component represents a grid of buttons that can be used to fire, stop, and navigate clips in the session view"""
def _setup_session_control(self):
is_momentary = True
num_tracks = 4 #we are working with a 4x4 grid,
num_scenes = 4 #so the height and width are both set to 4
right_button = ButtonElement(is_momentary, MIDI_NOTE_TYPE, 3, 12)
left_button = ButtonElement(is_momentary, MIDI_NOTE_TYPE, 3, 9)
up_button = ButtonElement(is_momentary, MIDI_NOTE_TYPE, 3, 11)
down_button = ButtonElement(is_momentary, MIDI_NOTE_TYPE, 3, 13)
right_button.name = 'Bank_Select_Right_Button'
left_button.name = 'Bank_Select_Left_Button'
up_button.name = 'Bank_Select_Up_Button'
down_button.name = 'Bank_Select_Down_Button'
self._session = SessionComponent(num_tracks, num_scenes) #we create our SessionComponent with the variables we set above it
self._session.name = "Session" #we name it so we can access it in m4l
self._session.set_offsets(0, 0)
self._session.set_track_bank_buttons(right_button, left_button)
self._session.set_scene_bank_buttons(down_button, up_button) #we set the initial offset to the far left, top of the session grid
#self._session.set_stopped_value(STOP_CLIP[self._rgb]) #we assign the colors that will be displayed when the stop_clip button is pressed. This value comes from CNTRLR_Map.py, which is imported in the header of our script
self._scene = [None for index in range(4)] #we create an array to hold the Scene subcomponents so that we can get to them if we need them.
for row in range(num_scenes): #now we'll fill the array with different objects that were created when we called the SessionComponent() module
self._scene[row] = self._session.scene(row) #each session row is a SceneComponent
self._scene[row].name = 'Scene_' + str(row) #name it so we can access it in m4l
for column in range(num_tracks): #now we'll create holders and names for the contents of each scene
clip_slot = self._scene[row].clip_slot(column) #we use our assignment of the scene above to gain access to the individual clipslots. Here, we are just assigning 'clip_slot' each time as a local variable so we can manipulated it's properties
clip_slot.name = str(column) + '_Clip_Slot' + str(row) #name it so that we can acces it in m4l
clip_slot.set_triggered_to_play_value(CLIP_TRG_PLAY[self._rgb]) #set its triggered to play color
clip_slot.set_triggered_to_record_value(CLIP_TRG_REC[self._rgb])#set its triggered to record color
clip_slot.set_stopped_value(CLIP_STOP[self._rgb]) #set its stop color
clip_slot.set_started_value(CLIP_STARTED[self._rgb]) #set its started color
clip_slot.set_recording_value(CLIP_RECORDING[self._rgb]) #set its recording value
self.set_highlighting_session_component(self._session)
self._session.set_mixer(self._mixer) #now we link the MixerComponent we created in _setup_mixer_control() to our session component so that they will follow each other when either is navigated
self._session_zoom = SessionZoomingComponent(self._session) #this creates the ZoomingComponent that allows navigation when the shift button is pressed
self._session_zoom.name = 'Session_Overview' #name it so we can access it in m4l
self._session_zoom.set_stopped_value(ZOOM_STOPPED[self._rgb]) #set the zooms stopped color
self._session_zoom.set_playing_value(ZOOM_PLAYING[self._rgb]) #set the zooms playing color
self._session_zoom.set_selected_value(ZOOM_SELECTED[self._rgb]) #set the zooms selected color
self._session_zoom.set_button_matrix(self._matrix) #assign the ButtonMatrixElement that we created in _setup_controls() to the zooming component so that we can control it
self._session_zoom.set_zoom_button(self._button[31]) #assign a shift button so that we can switch states between the SessionComponent and the SessionZoomingComponent
"""this section is used so that we can reassign the color properties of each state. Legacy, from the OhmModes script, to support either RGB or Monochrome"""
def _assign_session_colors(self):
num_tracks = 4
num_scenes = 4
#self._session.set_stopped_value(STOP_ALL[self._rgb])
for row in range(num_scenes):
for column in range(num_tracks):
self._scene[row].clip_slot(column).set_triggered_to_play_value(CLIP_TRG_PLAY[self._rgb])
self._scene[row].clip_slot(column).set_triggered_to_record_value(CLIP_TRG_REC[self._rgb])
self._scene[row].clip_slot(column).set_stopped_value(CLIP_STOP[self._rgb])
self._scene[row].clip_slot(column).set_started_value(CLIP_STARTED[self._rgb])
self._scene[row].clip_slot(column).set_recording_value(CLIP_RECORDING[self._rgb])
self._session_zoom.set_stopped_value(ZOOM_STOPPED[self._rgb])
self._session_zoom.set_playing_value(ZOOM_PLAYING[self._rgb])
self._session_zoom.set_selected_value(ZOOM_SELECTED[self._rgb])
self.refresh_state()
"""the device component allows us to assign encoders to the selected device in Live"""
def _setup_device_control(self):
self._device = DeviceComponent() #create the device component
self._device.name = 'Device_Component' #name it so we can access it in m4l
self._device._is_banking_enabled = self.device_is_banking_enabled(self._device) #we do this to defeat some undesirable behavior in the DeviceComponent which defeats banking if no controls are assigned
self._device.set_device = self._device_set_device(self._device)
self._device.update = self._device_update(self._device)
self._device.set_parameter_controls(tuple([self._encoder[index+4] for index in range(8)])) #set its controls to the bottom 8 encoders; we use [index+4] to offset past the first 4 encoders
self.set_device_component(self._device) #assign our component to the control_surface main script; this allows special updating, like being able to lock the devicecomponent to the currently selected device
self._device_navigator = CntrlrDetailViewControllerComponent(self) #this is a special component taken out of the APC scripts; its used to move from one device to another with the controller
self._device_navigator.name = 'Device_Navigator' #name it so that we can access it in m4l
self._device_selection_follows_track_selection = FOLLOW #_device_selection_follows_track_selection is a property of the main ControlSurface script, and does what it says it does. The FOLLOW variable is taken from CNTRLR_Map.py
"""the device selector component allows the user to set buttons that will automatically select a device based on its name"""
"""its not used in the stock CNTRLR script, but it could easily be assigned to any buttons using the correct syntax"""
"""for more information, check out the documentation for the MonOhm script"""
def _setup_device_selector(self):
self._device_selector = DeviceSelectorComponent(self)
self._device_selector.name = 'Device_Selector'
"""this section sets up the host environment that allows the controller to access different mods from the modButtons"""
def _setup_mod(self):
self._host = CntrlrMonomodComponent(self) #the MonomodComponent is the bridge between the CNTRLR's controls and the client patches that connect to m4l
self._host.name = 'Cntrlr_Host' #name it so we can access it
self.hosts = [self._host] #since some CNTRLR's can have more than one grid to access its clients, we create an array to hold all of the hosts that are included in this script. The CNTRLR only holds one.
self._hosts = [self._host] #this is redundant, and needs to be fixed
self._host._set_parameter_controls(self._encoder)
for index in range(4): #now we create our clients that will be connected to the actual m4l mods
self._client[index] = CntrlrMonoClient(self, index) #create an instance, and pass it its index
self._client[index].name = 'Client_' + str(index) #name it so we can access it
self._client[index]._mod_dial = (self._encoder[index]) #assign it a modDial so that we can control its modVolume from the unshifted CNTRLR
self._client[index]._device_component = MonoDeviceComponent(self._client[index], MOD_BANK_DICT, MOD_TYPES)
self._client[index]._control_defs = {'dials':self._dial_matrix, 'buttons':self._dial_button_matrix, 'grid':self._matrix, 'keys':self._button, 'knobs':self._knobs} #assign controls that raw data will be addressed at
self._active_client = self._client[0] #select the first client as our active client
self._active_client._is_active = True #initialize its active state, used by MonomodComponent to determine its status when sending it messages
self._host.connect_to_clients(self) #connect our MonomodComponent to our clients now that they are set up and ready to go.
"""the switchboard allows us to manage connections and disconnections between our clients and any mods that are currently installed in our Live project"""
def _setup_switchboard(self):
self._switchboard = SwitchboardElement(self, self._client) #here we are passing the main script and the array of client modules we created above to create an instance of the switchboard controlelement
self._switchboard.name = 'Switchboard' #name it so we can access it in m4l
"""the clipchopper component is a custom component we can access by switching modes"""
def _setup_chopper(self):
self._chopper = MonoChopperComponent(self, self._mixer) #create the chopper module, and pass it our mixer so that we can use it to navigate which clip is being manipulated
self._chopper.name = 'Chopper' #name it so we can access it via m4l
self._chopper._set_button_matrix(self._key_matrix) #set its controls to the ButtonMatrixElement we created in _setup_controls()
"""since there are many different configurations possible with the modButtons, we'll need to create a ModeSelectorComponent"""
"""to manage the different states of our controller"""
def _setup_modes(self):
self._shift_mode = ShiftModeComponent(self, self.shift_update) #here we call a new component by passing this module and its shift_update method
self._shift_mode.name = 'Mod_Mode' #name it so we can access it
self._shift_mode.set_mode_buttons([self._encoder_button[index] for index in range(4)]) #set the mode buttons that we will use to change states
"""cntrlr modes"""
"""here we set up some methods that will be used to update the control assignments when we change between different modes"""
"""this method is called everytime we change modes. If we make any assignments in the other mode assignment methods, we"""
"""have to be sure to remove them in this function. This creates a 'blank slate' for all the CNTRLRs control elements"""
def deassign_live_controls(self):
#for index in range(4):
# if self._encoder[index].value_has_listener(self._client[index]._mod_dial_value):
# self._encoder[index].remove_value_listener(self._client[index]._mod_dial_value)
"""THIS SECTION IS MISSING FROM THE ORIGINAL SCRIPT AND NEEDS TO BE FIXED...THE ASSIGNMENTS WERE MADE AT __init__"""
for index in range(4):
self._mixer.channel_strip(index).set_volume_control(None) #Since we gave our mixer 4 tracks above, we'll now assign our fader controls to it
for index in range(2):
self._mixer.return_strip(index).set_volume_control(None) #assign the right faders to control the volume of our return strips
self._mixer.master_strip().set_volume_control(None) #assign the far right fader to control our master channel strip
self._mixer.set_prehear_volume_control(None) #assign the remaining fader to control our prehear volume of the the master channel strip
for index in range(4): #for the left side of the mixer
self._mixer.channel_strip(index).set_solo_button(None) #remove the solo button assignments
self._mixer.channel_strip(index).set_arm_button(None) #remove the arm button assignments
self._mixer.channel_strip(index).set_mute_button(None) #remove the mute button assignments
self._mixer.channel_strip(index).set_select_button(None) #remove the select button assignments
for column in range(4):
for row in range(4):
self._scene[row].clip_slot(column).set_launch_button(None) #remove the clip launch assignments
self._send_reset.set_buttons(tuple([None for index in range(4)])) #remove the send_reset button assignments - this has to be sent as a tuple
self._session.set_stop_track_clip_buttons(None) #remove the clip_stop button assignments
self._transport.set_play_button(None) #remove the play button assignment
self._transport.set_record_button(None) #remove the record button assignment
self._transport.set_stop_button(None) #remove the stop button assignment
for index in range(16):
self._grid[index].set_on_off_values(127, 0) #reset the on/off values of the grid buttons
self._grid[index].reset() #turn the buttons LEDs off
for index in range(32):
self._button[index].set_on_off_values(127, 0) #reset the on/off values for the key buttons
self._button[index].reset() #turn the buttons LEDs off
self._button[index].release_parameter() #remove the parameter assignment that was assigned to the keys
for client in self._client: #for each of our 4 clients:
if not client._mod_dial == None: #if the client has a modDial assigned to it
client._mod_dial.release_parameter() #remove the modDial's parameter assignment
self._device.set_parameter_controls(tuple([self._encoder[index+4] for index in range(8)])) #assign the encoders from the device component controls - we are doing this here b
self._device_navigator.set_device_nav_buttons(None, None) #remove the assignment of the device nav buttons
self._device_navigator.set_enabled(False) #turn off the device navigator
self._device.set_on_off_button(None) #remove the assignment of the on/off button from the device component
self._device.set_lock_button(None) #remove the assignment of the lock button from the device component
self._device.set_bank_nav_buttons(None, None) #remove the assignment of the navigation buttons from the device component
self._device.set_enabled(False) #turn off the device component
self._session.set_enabled(False) #turn off the session component
self._session_zoom.set_enabled(False) #turn off the zoom component
for index in range(16):
self._grid[index].clear_send_cache() #set the last_sent value of the grid to -1, so that the next value it receives will always be transmitted to the CNTRLR
for index in range(32):
self._button[index].clear_send_cache() #set the last_sent value of the keys to -1, so that the next value it receives will always be transmitted to the CNTRLR
for index in range(12):
self._device._parameter_controls = None
self._encoder[index].release_parameter()
self._encoder[index].send_value(0, True) #turn off all the encoder rings. We send it the second argument, True, so that it is forced to update regardless of its last_sent property
self._encoder[index].clear_send_cache() #set the last_sent value of the encoder rings to -1, so that the next value it receives will always be transmitted to the CNTRLR
for index in range(8):
self._encoder_button[index+4].send_value(0, True) #turn off all the encoder LEDs. We send it the second argument, True, so that it is forced to update regardless of its last_sent property
self._encoder_button[index+4].clear_send_cache() #set the last_sent value of the encoder LEDs to -1, so that the next value it receives will always be transmitted to the CNTRLR
self._session_zoom.set_zoom_button(None) #remove the assignment of the shift button from the ZoomingComponent
self._host._release_mod_dials()
self.request_rebuild_midi_map() #now that we've finished deassigning all of our controls, we tell the main script to rebuild its MIDI map and update the values in Live
def assign_live_controls(self):
"""the following lines update all of the controls' last_sent properties, so that they forward the next value they receive regardless of whether or not it is the same as the last it recieved"""
"""we also reset the encoder rings and buttons, since the device component will not update them if it is not locked to a device in Live"""
for index in range(16):
self._grid[index].clear_send_cache()
for index in range(32):
self._button[index].clear_send_cache()
for index in range(8):
self._encoder_button[index+4].send_value(0, True)
self._encoder_button[index+4].clear_send_cache()
for index in range(8):
self._encoder[index+4].send_value(0, True)
for index in range(12):
self._encoder[index].clear_send_cache()
"""here we assign the top encoders to the mod_dial, if it exists, in any connected mods"""
self.schedule_message(4, self._assign_mod_dials)
"""here we assign the left side of our mixer's buttons on the lower 32 keys"""
for index in range(4): #we set up a recursive loop to assign all four of our track channel strips' controls
self._button[index].set_on_value(SOLO[self._rgb]) #set the solo color from the Map.py
self._mixer.channel_strip(index).set_solo_button(self._button[index]) #assign the solo buttons to our mixer channel strips
self._button[index+4].set_on_value(ARM[self._rgb]) #set the arm color from the Map.py
self._mixer.channel_strip(index).set_arm_button(self._button[index+4]) #assign the arm buttons to our mixer channel strips
self._button[index+16].set_on_value(MUTE[self._rgb]) #set the mute color from the Map.py
self._mixer.channel_strip(index).set_mute_button(self._button[index+16]) #assign the mute buttons to our mixer channel strips
self._button[index+20].set_on_value(SELECT[self._rgb]) #set the select color from the Map.py
self._mixer.channel_strip(index).set_select_button(self._button[index+20]) #assign the select buttons to our mixer channel strips
self._send_reset.set_buttons(tuple(self._button[index + 8] for index in range(4))) #this is yet another way to quickly assign multiple elements conveniently in-place. We are creating a recursion inside an assignment. The tuple() method creates an immutable array. It can't be modified until it gets where it's going and is unpacked.
self._session.set_stop_track_clip_buttons(tuple(self._button[index+24] for index in range(4))) #these last two lines assign the send_reset buttons and the stop_clip buttons for each track
for index in range(4):
self._button[index+8].send_value(SEND_RESET[self._rgb], True) #now we are going to send a message to turn the LEDs on for the send_reset buttons
self._button[index+24].set_on_off_values(STOP_CLIP[self._rgb], STOP_CLIP[self._rgb]) #this assigns the custom colors defined in the Map.py file to the stop_clip buttons. They have seperate on/off values, but we assign them both the same value so we can always identify them
self._button[index+24].send_value(STOP_CLIP[self._rgb], True) #finally, we send the on/off colors out to turn the LEDs on for the stop clip buttons
self._button[28].set_on_off_values(PLAY_ON[self._rgb], PLAY[self._rgb]) #assing the on/off colors for play. These are two seperate values, dependant upon whether play is engaged or not
self._transport.set_play_button(self._button[28]) #set the transports play control to the corresponding button on the CNTRLR
self._button[30].set_on_off_values(RECORD_ON[self._rgb], RECORD[self._rgb]) #set the on/off colors for the transport record buttons
self._transport.set_record_button(self._button[30]) #assign the correct button for the transport record control
self._button[29].set_on_value(STOP[self._rgb]) #set the on value for the Stop button
self._transport.set_stop_button(self._button[29]) #assign the correct button for the transport stop control
self._button[29].send_value(STOP_OFF[self._rgb], True) #turn on the LED for the stop button
#for index in range(4): #set up a for loop to generate an index for assigning the session nav buttons' colors
# self._button[index + 12].set_on_off_values(SESSION_NAV[self._rgb], SESSION_NAV_OFF[self._rgb]) #assign the colors from Map.py to the session nav buttons
#set the track bank buttons for the Session navigation controls
#set the scnee bank buttons for the Session navigation controls
#for index in range(4):
# self._button[index].set_on_off_values(SESSION_NAV[self._rgb], SESSION_NAV_OFF[self._rgb]) #assign the colors from Map.py to the session nav buttons
#self._session.set_track_bank_buttons(self._side[2], self._side[5]) #set the track bank buttons for the Session navigation controls
#self._session.set_scene_bank_buttons(self._side[4], self._side[6]) #set the scnee bank buttons for the Session navigation controls
"""this section assigns the grid to the clip launch functionality of the SessionComponent"""
for column in range(4): #we need to set up a double recursion so that we can generate the indexes needed to assign the grid buttons
for row in range(4): #the first recursion passes the column index, the second the row index
self._scene[row].clip_slot(column).set_launch_button(self._grid[(row*4)+column]) #we use the indexes to grab the first the scene and then the clip we assigned above, and then we use them again to define the button held in the grid array that we want to assign to the clip slot from the session component
"""this section assigns the faders and knobs"""
for index in range(4):
self._mixer.channel_strip(index).set_volume_control(self._fader[index]) #Since we gave our mixer 4 tracks above, we'll now assign our fader controls to it
for index in range(2):
self._mixer.return_strip(index).set_volume_control(self._fader[index+4]) #assign the right faders to control the volume of our return strips
self._mixer.master_strip().set_volume_control(self._fader[7]) #assign the far right fader to control our master channel strip
self._mixer.set_prehear_volume_control(self._fader[6]) #assign the remaining fader to control our prehear volume of the the master channel strip
for track in range(4): #we set up a recursive loop to assign all four of our track channel strips' controls
channel_strip_send_controls = [] #the channelstripcomponent requires that we pass the send controls in an array, so we create a local variable, channel_strip_send_controls, to hold them
for control in range(2): #since we are going to assign two controls to the sends, we create a recursion
channel_strip_send_controls.append(self._dial_left[track + (control * 4)]) #then use the append __builtin__ method to add them to the array
self._mixer.channel_strip(track).set_send_controls(tuple(channel_strip_send_controls)) #now that we have an array containing the send controls, we pass it to the channelstrip component with its set_send_controls() method
self._mixer.channel_strip(track).set_pan_control(self._dial_left[track + 8]) #now we set the pan control to the bottom
self._mixer.track_eq(track).set_gain_controls(tuple([self._dial_right[track+8], self._dial_right[track+4], self._dial_right[track]])) #here's another way of doing the same thing, but instead of creating the array before hand, we define it in-place. Its probably bad coding to mix styles like this, but I'll leave it for those of you trying to figure this stuff out
self._mixer.track_eq(track).set_enabled(True) #turn the eq component on
"""this section assigns the encoders and encoder buttons"""
self._device.set_parameter_controls(tuple([self._encoder[index+4] for index in range(8)])) #assign the encoders from the device component controls - we are doing this here b
self._encoder_button[7].set_on_value(DEVICE_LOCK[self._rgb]) #set the on color for the Device lock encoder button
self._device.set_lock_button(self._encoder_button[7]) #assign encoder button 7 to the device lock control
self._encoder_button[4].set_on_value(DEVICE_ON[self._rgb]) #set the on color for the Device on/off encoder button
self._device.set_on_off_button(self._encoder_button[4]) #assing encoder button 4 to the device on/off control
for index in range(2): #setup a recursion to generate indexes so that we can reference the correct controls to assing to the device_navigator functions
self._encoder_button[index + 8].set_on_value(DEVICE_NAV[self._rgb]) #assign the on color for the device navigator
self._encoder_button[index + 10].set_on_value(DEVICE_BANK[self._rgb]) #assign the on color for the device bank controls
self._device_navigator.set_device_nav_buttons(self._encoder_button[10], self._encoder_button[11]) #set the device navigators controls to encoder buttons 10 and 11
self._device.set_bank_nav_buttons(self._encoder_button[8], self._encoder_button[9]) #set the device components bank nav controls to encoder buttons 8 and 9
self._session_zoom.set_zoom_button(self._button[31]) #assign the lower right key button to the shift function of the Zoom component
"""now we turn on and update some of the components we've just made assignments to"""
self._device.set_enabled(True) #enable the Device Component
self._device_navigator.set_enabled(True) #enable the Device Navigator
self._session.set_enabled(True) #enable the Session Component
self._session_zoom.set_enabled(True) #enable the Session Zoom
self._device.update() #tell the Device component to update its assingments so that it will detect the currently selected device parameters and display them on the encoder rings
self._session.update() #tell the Session component to update so that the grid will display the currently selected session region
"""this assigns the CNTRLR's controls on for 4th empty modSlot"""
"""these assignments mirror the main section; commenting is restricted to the differences"""
def assign_chopper_controls(self):
"""the following lines update all of the controls' last_sent properties, so that they forward the next value they receive regardless of whether or not it is the same as the last it recieved"""
"""we also reset the encoder rings and buttons, since the device component will not update them if it is not locked to a device in Live"""
for index in range(16):
self._grid[index].clear_send_cache()
for index in range(32):
self._button[index].clear_send_cache()
for index in range(8):
self._encoder_button[index+4].send_value(0, True)
self._encoder_button[index+4].clear_send_cache()
for index in range(12):
self._encoder[index].send_value(0, True)
self._encoder[index].clear_send_cache()
"""here we assign the top encoders to the mod_dial, if it exists, in any connected mods"""
self.schedule_message(4, self._assign_mod_dials)
"""the following lines differ from the assignments in self.assign_live_controls()"""
"""the assignments merely moving certain elements from their original positions"""
for index in range(4):
self._button[index].set_on_value(MUTE[self._rgb])
self._mixer.channel_strip(index).set_mute_button(self._button[index])
self._button[index+4].set_on_value(SELECT[self._rgb])
self._mixer.channel_strip(index).set_select_button(self._button[index+4])
self._session.set_stop_track_clip_buttons(tuple(self._button[index+8] for index in range(4)))
for index in range(4):
self._button[index + 8].set_on_off_values(STOP_CLIP[self._rgb], STOP_CLIP[self._rgb])
self._button[index+8].send_value(STOP_CLIP[self._rgb], True)
#for index in range(4):
# self._button[index + 12].set_on_off_values(SESSION_NAV[self._rgb], SESSION_NAV_OFF[self._rgb])
#self._session.set_scene_bank_buttons(self._button[13], self._button[12])
#self._session.set_track_bank_buttons(self._button[15], self._button[14])
#for index in range(4):
# self._side[index].set_on_off_values(SESSION_NAV[self._rgb], SESSION_NAV_OFF[self._rgb])
#self._session.set_scene_bank_buttons(self._side[4], self._side[6])
#self._session.set_track_bank_buttons(self._side[2], self._side[5])
"""the rest of this method mirrors self._assign_live_controls, comments can be found there"""
for index in range(2):
self._mixer.return_strip(index).set_volume_control(self._fader[index+4])
self._mixer.master_strip().set_volume_control(self._fader[7])
self._mixer.set_prehear_volume_control(self._fader[6])
for track in range(4):
channel_strip_send_controls = []
for control in range(2):
channel_strip_send_controls.append(self._dial_left[track + (control * 4)])
self._mixer.channel_strip(track).set_send_controls(tuple(channel_strip_send_controls))
self._mixer.channel_strip(track).set_pan_control(self._dial_left[track + 8])
gain_controls = []
self._mixer.track_eq(track).set_gain_controls(tuple([self._dial_right[track+8], self._dial_right[track+4], self._dial_right[track]]))
self._mixer.track_eq(track).set_enabled(True)
for column in range(4):
for row in range(4):
self._scene[row].clip_slot(column).set_launch_button(self._grid[(row*4)+column])
self._encoder_button[7].set_on_value(DEVICE_LOCK[self._rgb])
self._device.set_lock_button(self._encoder_button[7])
self._encoder_button[4].set_on_value(DEVICE_ON[self._rgb])
self._device.set_on_off_button(self._encoder_button[4])
for index in range(2):
self._encoder_button[index + 8].set_on_value(DEVICE_NAV[self._rgb])
self._encoder_button[index + 10].set_on_value(DEVICE_BANK[self._rgb])
self._device_navigator.set_device_nav_buttons(self._encoder_button[10], self._encoder_button[11])
self._device.set_bank_nav_buttons(self._encoder_button[8], self._encoder_button[9])
self._device.set_enabled(True)
self._device_navigator.set_enabled(True)
self._session.set_enabled(True)
self._session_zoom.set_enabled(True)
self._device.update()
self._session.update()
self.request_rebuild_midi_map()
"""function mode callbacks"""
def display_mod_colors(self):
for index in range(4): #set up a recursion of 4
self._shift_mode._modes_buttons[index].send_value(self._client[index]._mod_color) #update the modLEDs to display the color assigned to its contained mod
"""this method changes modes when we press a modButton. It is also called from Monomod when it needs to update the modDial assignments"""
def shift_update(self):
#self.log_message('shift_update')
self.assign_alternate_mappings(0) #first, we remove any channel reassingments we might have made by assigning alternate mappings, but to channel 0 (the original channel)
self._chopper.set_enabled(False) #disable the chopper, we will enable it later if we are in chopper mode
for index in range(4): #set up a recursion of 4
self._shift_mode._modes_buttons[index].send_value(self._client[index]._mod_color) #update the modLEDs to display the color assigned to its contained mod
if self._shift_mode._mode_index is 0: #if the shift mode is 0, meaning we've selecte the main script mode:
self._host._set_dial_matrix(None, None) #deassign the Monomod Components dial matrix
#self._host._set_knobs(None)
self._host._set_button_matrix(None) #deassign the Monomod Component's button matrix
self._host._set_key_buttons(None) #deassign the Monomod Component's key matrix
self._host.set_enabled(False) #disable the Monomod Component
self.set_local_ring_control(1) #send sysex to the CNTRLR to put it in local ring mode
self.assign_live_controls() #assign our top level control assignments
elif CHOPPER_ENABLE and not self._host._client[3].is_connected() and self._shift_mode._mode_index == 4: #if the fourth mod button has been pressed and there is no mod installed
self.deassign_live_controls() #deassign the top level assignments
for index in range(4): #set up a recursion of 4
if self._shift_mode._mode_index == (index + 1): #for each recursion, if the recursion number is the same as the shift_mode_index +1
self._shift_mode._modes_buttons[index].send_value(1) #turn on the LED below the modButton
self.schedule_message(4, self._assign_mod_dials)
self._host._set_dial_matrix(None, None) #deassign the Monomod Components dial matrix
self._host._set_button_matrix(None) #deassign the Monomod Component's button matrix
self._host._set_key_buttons(None) #deassign the Monomod Component's key matrix
self._host.set_enabled(False) #disable the Monomod Component
self.set_local_ring_control(1) #send sysex to the CNTRLR to put it in local ring mode
self.assign_chopper_controls() #assign the controls for the Chopper Component
self._chopper.set_enabled(True) #turn the Chopper Component on
else: #otherwise, if we are in modMode
self.deassign_live_controls() #remove all of our assignments from the controls and refresh their caches
self.assign_mixer_controls()
self._host.set_enabled(True) #turn on the Monomod Component
self._host._set_dial_matrix(self._dial_matrix, self._dial_button_matrix) #assign the encoders to it
#self._host._set_knobs(tuple(self._knobs))
self._host._set_button_matrix(self._matrix) #assign the 4x4 to it
self._host._set_key_buttons(tuple(self._button)) #assign the lower buttons to it
self._host._select_client(self._shift_mode._mode_index-1) #select the client corresponding to the button we pressed
self._host.display_active_client() #tell Monomod Component to update the LEDs on the CNTRLR corresponding to the client that is selected
for index in range(4): #set up a recursion for each of our modButtons
if self._shift_mode._mode_index == (index + 1): #if the button is the mode we've chosen
self._shift_mode._modes_buttons[index].send_value(1) #turn the LED white
if not self._host._active_client.is_connected(): #if there is not a mod in the currently selected modSlot
self.assign_alternate_mappings(self._shift_mode._mode_index) #assign a different MIDI channel that the controls translated to when entering Live
def assign_mixer_controls(self):
for index in range(4):
self._mixer.channel_strip(index).set_volume_control(self._fader[index]) #Since we gave our mixer 4 tracks above, we'll now assign our fader controls to it
for index in range(2):
self._mixer.return_strip(index).set_volume_control(self._fader[index+4]) #assign the right faders to control the volume of our return strips
self._mixer.master_strip().set_volume_control(self._fader[7]) #assign the far right fader to control our master channel strip
self._mixer.set_prehear_volume_control(self._fader[6]) #assign the remaining fader to control our prehear volume of the the master channel strip
"""assign alternate mappings to the controls when a modSlot is selected that doesn't contain a mod"""
def assign_alternate_mappings(self, chan):
chan = min(16, max(chan, 0))
for index in range(8):
self._encoder_button[index + 4].set_channel(chan) #set the contols channel to the methods second argument
self._encoder_button[index + 4].set_enabled(chan is 0) #if the channel is not 0, we need to disable the control so that it
self._encoder_button[index + 4].force_next_send()
for encoder in self._encoder: #is forwarded to Live, but not used by the script for internal processing
encoder.set_channel(chan)
encoder.set_enabled(chan is 0)
encoder.force_next_send()
for button in self._button:
button.set_channel(chan)
button.set_enabled(chan is 0)
button.force_next_send()
for cell in self._grid:
cell.set_channel(chan)
cell.set_enabled(chan is 0)
cell.force_next_send()
self.request_rebuild_midi_map()
"""reassign the original channel and identifier to all the controls that can be remapped through assign_alternate_mappings"""
def assign_original_mappings(self):
for index in range(8):
self._encoder_button[index + 4].set_channel(self._encoder_button[index + 4]._original_channel)
self._encoder_button[index + 4].set_enabled(True)
self._encoder_button[index + 4].force_next_send()
for encoder in self._encoder:
encoder.set_channel(encoder._original_channel)
encoder.set_enabled(True)
encoder.force_next_send()
for button in self._button:
button.set_channel(button._original_channel)
button.set_enabled(True)
button.force_next_send()
for cell in self._grid:
cell.set_channel(cell._original_channel)
cell.set_enabled(True)
cell.force_next_send()
self.request_rebuild_midi_map()
"""called on timer"""
def update_display(self):
super(Twister, self).update_display() #since we are overriding this from the inherited method, we need to call the original routine as well
self._timer = (self._timer + 1) % 256 #each 100/60ms, increase the self._timer property by one. Start over at 0 when we hit 256
if(self._local_ring_control is False): #if local rings are turned off, then we need to send the new values if they've changed
self.send_ring_leds()
self.flash() #call the flash method below
"""this method recurses through all the controls, causing them to flash depending on their stored values"""
def flash(self):
if(self.flash_status > 0):
for control in self.controls:
if isinstance(control, MonoButtonElement):
control.flash(self._timer)
"""m4l bridge"""
"""this is a method taken and modified from the MackieControl scripts"""
"""it takes a display string and modifies it to be a specified length"""
def generate_strip_string(self, display_string):
NUM_CHARS_PER_DISPLAY_STRIP = 12
if (not display_string):
return (' ' * NUM_CHARS_PER_DISPLAY_STRIP)
if ((len(display_string.strip()) > (NUM_CHARS_PER_DISPLAY_STRIP - 1)) and (display_string.endswith('dB') and (display_string.find('.') != -1))):
display_string = display_string[:-2]
if (len(display_string) > (NUM_CHARS_PER_DISPLAY_STRIP - 1)):
for um in [' ',
'i',
'o',
'u',
'e',
'a']:
while ((len(display_string) > (NUM_CHARS_PER_DISPLAY_STRIP - 1)) and (display_string.rfind(um, 1) != -1)):
um_pos = display_string.rfind(um, 1)
display_string = (display_string[:um_pos] + display_string[(um_pos + 1):])
else:
display_string = display_string.center((NUM_CHARS_PER_DISPLAY_STRIP - 1))
ret = u''
for i in range((NUM_CHARS_PER_DISPLAY_STRIP - 1)):
if ((ord(display_string[i]) > 127) or (ord(display_string[i]) < 0)):
ret += ' '
else:
ret += display_string[i]
ret += ' '
ret = ret.replace(' ', '_')
assert (len(ret) == NUM_CHARS_PER_DISPLAY_STRIP)
return ret
"""this method forwards display information from control elements to the LCD patch"""
def notification_to_bridge(self, name, value, sender):
if(isinstance(sender, (MonoEncoderElement, CodecEncoderElement))):
pn = str(self.generate_strip_string(name))
pv = str(self.generate_strip_string(value))
self._monobridge._send(sender.name, 'lcd_name', pn)
self._monobridge._send(sender.name, 'lcd_value', pv)
"""this method regulates parameter values from being sent on updates if the control has not actually been changed"""
def touched(self):
if self._touched is 0:
self._monobridge._send('touch', 'on')
self.schedule_message(2, self.check_touch)
self._touched +=1
"""this method is called by the LCD patch to determine whether any controls have been changed"""
def check_touch(self):
if self._touched > 5:
self._touched = 5
elif self._touched > 0:
self._touched -= 1
if self._touched is 0:
self._monobridge._send('touch', 'off')
else:
self.schedule_message(2, self.check_touch)
"""this is an unnused method. It provides a way to retrieve all the clip names belonging to the current session views clips"""
def get_clip_names(self):
clip_names = []
for scene in self._session._scenes:
for clip_slot in scene._clip_slots:
if clip_slot.has_clip() is True:
clip_names.append(clip_slot._clip_slot)##.clip.name)
return clip_slot._clip_slot
return clip_names
"""midi functionality"""
"""this method needs to be here so that Live knows what to do (nothing, in this case) when it receives sysex from the CNTRLR"""
def handle_sysex(self, midi_bytes):
pass
"""this method can be linked to from m4l, and provides a way to update the parameter value of an assigned DeviceComponent parameter control"""
def to_encoder(self, num, val):
rv=int(val*127)
self._device._parameter_controls[num].receive_value(rv)
p = self._device._parameter_controls[num]._parameter_to_map_to
newval = (val * (p.max - p.min)) + p.min
p.value = newval
"""this method sets the instance variable for local ring control, and sends the appropriate sysex string to change states on the CNTRLR"""
def set_local_ring_control(self, val = 1):
self._local_ring_control = (val!=0)
if(self._local_ring_control is True):
self._send_midi(tuple([240, 0, 1, 97, 8, 32, 0, 247]))
else:
self._send_midi(tuple([240, 0, 1, 97, 8, 32, 1, 247]))
"""this method sets the instance variable for absolute encoder changes, and sends the appropriate sysex string to change states on the CNTRLR"""
def set_absolute_mode(self, val = 1):
self._absolute_mode = (val!=0)
if self._absolute_mode is True:
self._send_midi(tuple([240, 0, 1, 97, 8, 17, 0, 0, 0, 0, 0, 0, 0, 0, 247]))
else:
self._send_midi(tuple([240, 0, 1, 97, 8, 17, 127, 127, 127, 127, 127, 127, 127, 127, 247]))
"""this method is used to update the individual elements of the encoder rings when the CNTRLR is in local ring control mode"""
def send_ring_leds(self):
if self._host._is_enabled == True:
leds = [240, 0, 1, 97, 8, 31]
for index in range(12):
wheel = self._encoder[index]
bytes = wheel._get_ring()
leds.append(bytes[0])
leds.append(int(bytes[1]) + int(bytes[2]))
leds.append(247)
self._send_midi(tuple(leds))
def _release_mod_dials(self):
if not self._client is None:
for client in self._client: #for each of our 4 clients:
if not client._mod_dial == None: #if the client has a modDial assigned to it
client._mod_dial.release_parameter() #remove the modDial's parameter assignment
def _assign_mod_dials(self):
if not self._client is None:
for client in self._client: #recursion to contain all available clients
param = client._mod_dial_parameter() #param is a local variable, and we assign its value to the mod_dial_parameter (this is handled by each individual client module)
if not client._mod_dial == None: #if the client has been assigned a mod dial (which it should have been in setup_mod() )
if not param == None: #if the param variable was properly assigned in the client module
client._mod_dial.connect_to(param) #connect the physical control to the parameter (this should be the moddial parameter in the m4l patch)
else:
client._mod_dial.release_parameter() #if the param was None, release the physical control from any assignments
self.request_rebuild_midi_map()
"""general functionality"""
"""this method is called by Live when it needs to disconnect. It's very important that any observers that were set up in the script are removed here"""
def disconnect(self):
"""clean things up on disconnect"""
if self.song().view.selected_track_has_listener(self._update_selected_device):
self.song().view.remove_selected_track_listener(self._update_selected_device)
self._hosts = []
self.log_message("<<<<<<<<<<<<<<<<<<<<<<<<< " + str(self._host_name) + " log closed >>>>>>>>>>>>>>>>>>>>>>>>>") #Create entry in log file
super(Cntrlr, self).disconnect()
"""this provides a hook that can be called from m4l to change the DeviceComponent's behavior"""
def device_follows_track(self, val):
self._device_selection_follows_track_selection = (val == 1)
return self
"""this is a customizationo of the inherited behavior of ControlSurface"""
def _update_selected_device(self):
if self._device_selection_follows_track_selection is True:
track = self.song().view.selected_track
device_to_select = track.view.selected_device
if device_to_select == None and len(track.devices) > 0:
device_to_select = track.devices[0]
if device_to_select != None:
self.song().view.select_device(device_to_select)
#self._device.set_device(device_to_select)
self.set_appointed_device(device_to_select)
#self._device_selector.set_enabled(True)
self.request_rebuild_midi_map()
return None
"""this provides a hook to get the current tracks length from other modules"""
def _get_num_tracks(self):
return self.num_tracks
"""device component methods and overrides"""
"""this closure replaces the default DeviceComponent update() method without requiring us to build an override class"""
"""it calls the _update_selected_device method of this script in addition to its normal routine"""
"""it also ensures a rebuilt midi_map; for some reason the Abe's pulled that part out of the post 8.22 scripts, and under certain circumstances"""
"""things don't work as expected anymore."""
def _device_update(self, device):
def _update():
#for client in self._client:
# if (device._device != None) and (client.device == device._device):
# device._bank_index = max(client._device_component._cntrl_offset, device._bank_index)
DeviceComponent.update(device)
self.request_rebuild_midi_map()
return _update
def _device_set_device(self, device_component):
def set_device(device):
is_monodevice = False
for client in self._client:
if (device != None) and (client.device == device):
is_monodevice = client
if is_monodevice != False:
#device = client._device_component._device
self.log_message('is monodevice' + str(device.name))
assert ((device == None) or isinstance(device, Live.Device.Device))
if ((not device_component._locked_to_device) and (device != device_component._device)):
if (device_component._device != None):
device_component._device.remove_name_listener(device_component._on_device_name_changed)
device_component._device.remove_parameters_listener(device_component._on_parameters_changed)
parameter = device_component._on_off_parameter()
if (parameter != None):
parameter.remove_value_listener(device_component._on_on_off_changed)
if (device_component._parameter_controls != None):
for control in device_component._parameter_controls:
control.release_parameter()
device_component._device = device
if (device_component._device != None):
device_component._bank_index = 0
device_component._device.add_name_listener(self._on_device_name_changed)
device_component._device.add_parameters_listener(self._on_parameters_changed)
parameter = device_component._on_off_parameter()
if (parameter != None):
parameter.add_value_listener(device_component._on_on_off_changed)
for key in device_component._device_bank_registry.keys():
if (key == device_component._device):
device_component._bank_index = device_component._device_bank_registry.get(key, 0)
del device_component._device_bank_registry[key]
break
device_component._bank_name = '<No Bank>' #added
device_component._bank_index = max(is_monodevice._cntrl_offset, device_component._bank_index)
device_component._on_device_name_changed()
device_component.update()
else:
DeviceComponent.set_device(device_component, device)
return set_device
"""this closure replaces the default ChannelStripComponent _on_cf_assign_changed() method without requiring us to build an override class"""
"""it allows us to change different colors to its assigned controls based on the crossfade assignment, which the default _Framework doesn't support"""
def mixer_on_cf_assign_changed(self, channel_strip):
def _on_cf_assign_changed():
if (channel_strip.is_enabled() and (channel_strip._crossfade_toggle != None)):
if (channel_strip._track != None) and (channel_strip._track in (channel_strip.song().tracks + channel_strip.song().return_tracks)):
if channel_strip._track.mixer_device.crossfade_assign == 1: #modified
channel_strip._crossfade_toggle.turn_off()
elif channel_strip._track.mixer_device.crossfade_assign == 0:
channel_strip._crossfade_toggle.send_value(1)
else:
channel_strip._crossfade_toggle.send_value(2)
return _on_cf_assign_changed
"""a closure fix for banking when we deassign the bank buttons and still want to change bank indexes"""
def device_is_banking_enabled(self, device):
def _is_banking_enabled():
return True
return _is_banking_enabled
# a
|
apache-2.0
| -6,756,718,938,802,655,000
| 51.451333
| 368
| 0.711958
| false
| 3.259155
| false
| false
| false
|
zenwarr/microhex
|
src/hex/struct.py
|
1
|
1291
|
class AbstractDataType(object):
def __init__(self, name):
self.name = name
self.fixedSize = True
def parse(self, cursor):
"""Should return Value structure"""
raise NotImplementedError()
class Integer(AbstractDataType):
def __init__(self, binary_format, signed=True):
pass
def parse(self, cursor):
return struct.unpack(...)
class ZeroString(AbstractDataType):
def __init__(self, encoding):
pass
def parse(self, cursor):
offset = 0
while not cursor.atEnd(offset) and cursor[offset] != 0:
pass
return self.fromEncoding(cursor[:offset])
class PascalString(AbstractDataType):
def __init__(self, encoding):
pass
def parse(self, cursor):
string_length = Integer(signed=False).parse(cursor).value
return self.fromEncoding(cursor[:string_length])
class Win32_UnicodeString(AbstractDataType):
pass
class Enumeration(AbstractDataType):
def __init__(self, primary_type, members):
pass
def parse(self, cursor):
value = self.primaryType.parse(cursor).value
if value in self.members:
return self.members[value]
class Structure(AbstractDataType):
def __init__(self, members):
pass
|
mit
| -4,922,127,411,357,640,000
| 22.053571
| 65
| 0.630519
| false
| 4.260726
| false
| false
| false
|
bitmazk/django-multilingual-news
|
multilingual_news/south_migrations/0014_auto__add_field_newsentry_author_user.py
|
1
|
15887
|
# flake8: noqa
# -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
no_dry_run = True
def forwards(self, orm):
# Adding field 'NewsEntry.author_user'
db.add_column(u'multilingual_news_newsentry', 'author_user',
self.gf('django.db.models.fields.related.ForeignKey')(blank=True, related_name='user_entries', null=True, to=orm['auth.User']),
keep_default=False)
for entry in orm['multilingual_news.NewsEntry'].objects.all():
entry.author_user = entry.author
def backwards(self, orm):
for entry in orm['multilingual_news.NewsEntry'].objects.all():
entry.author = entry.author_user
# Deleting field 'NewsEntry.author_user'
db.delete_column(u'multilingual_news_newsentry', 'author_user_id')
models = {
u'auth.group': {
'Meta': {'object_name': 'Group'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Group']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Permission']"}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'cms.cmsplugin': {
'Meta': {'object_name': 'CMSPlugin'},
'changed_date': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'creation_date': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'language': ('django.db.models.fields.CharField', [], {'max_length': '15', 'db_index': 'True'}),
'level': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'lft': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'parent': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['cms.CMSPlugin']", 'null': 'True', 'blank': 'True'}),
'placeholder': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['cms.Placeholder']", 'null': 'True'}),
'plugin_type': ('django.db.models.fields.CharField', [], {'max_length': '50', 'db_index': 'True'}),
'position': ('django.db.models.fields.PositiveSmallIntegerField', [], {'null': 'True', 'blank': 'True'}),
'rght': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'tree_id': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'})
},
'cms.placeholder': {
'Meta': {'object_name': 'Placeholder'},
'default_width': ('django.db.models.fields.PositiveSmallIntegerField', [], {'null': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'slot': ('django.db.models.fields.CharField', [], {'max_length': '50', 'db_index': 'True'})
},
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'filer.file': {
'Meta': {'object_name': 'File'},
'_file_size': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'file': ('django.db.models.fields.files.FileField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'folder': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'all_files'", 'null': 'True', 'to': "orm['filer.Folder']"}),
'has_all_mandatory_data': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_public': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'modified_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '255', 'blank': 'True'}),
'original_filename': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'owner': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'owned_files'", 'null': 'True', 'to': u"orm['auth.User']"}),
'polymorphic_ctype': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'polymorphic_filer.file_set'", 'null': 'True', 'to': u"orm['contenttypes.ContentType']"}),
'sha1': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '40', 'blank': 'True'}),
'uploaded_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'})
},
'filer.folder': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('parent', 'name'),)", 'object_name': 'Folder'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
u'level': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
u'lft': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'modified_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'owner': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'filer_owned_folders'", 'null': 'True', 'to': u"orm['auth.User']"}),
'parent': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'children'", 'null': 'True', 'to': "orm['filer.Folder']"}),
u'rght': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
u'tree_id': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'uploaded_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'})
},
'filer.image': {
'Meta': {'object_name': 'Image', '_ormbases': ['filer.File']},
'_height': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'_width': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'author': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'date_taken': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'default_alt_text': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'default_caption': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
u'file_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['filer.File']", 'unique': 'True', 'primary_key': 'True'}),
'must_always_publish_author_credit': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'must_always_publish_copyright': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'subject_location': ('django.db.models.fields.CharField', [], {'default': 'None', 'max_length': '64', 'null': 'True', 'blank': 'True'})
},
u'multilingual_news.category': {
'Meta': {'object_name': 'Category'},
'creation_date': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'parent': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['multilingual_news.Category']", 'null': 'True', 'blank': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '512'})
},
u'multilingual_news.categoryplugin': {
'Meta': {'object_name': 'CategoryPlugin', '_ormbases': ['cms.CMSPlugin']},
'categories': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['multilingual_news.Category']", 'symmetrical': 'False'}),
u'cmsplugin_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['cms.CMSPlugin']", 'unique': 'True', 'primary_key': 'True'}),
'template_argument': ('django.db.models.fields.CharField', [], {'max_length': '20', 'blank': 'True'})
},
u'multilingual_news.categorytranslation': {
'Meta': {'unique_together': "[('language_code', 'master')]", 'object_name': 'CategoryTranslation', 'db_table': "u'multilingual_news_category_translation'"},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'language_code': ('django.db.models.fields.CharField', [], {'max_length': '15', 'db_index': 'True'}),
'master': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'translations'", 'null': 'True', 'to': u"orm['multilingual_news.Category']"}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '256'})
},
u'multilingual_news.newsentry': {
'Meta': {'ordering': "('-pub_date',)", 'object_name': 'NewsEntry'},
'author': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']", 'null': 'True', 'blank': 'True'}),
'author_user': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'user_entries'", 'null': 'True', 'to': u"orm['auth.User']"}),
'categories': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'newsentries'", 'null': 'True', 'symmetrical': 'False', 'to': u"orm['multilingual_news.Category']"}),
'content': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'multilingual_news_contents'", 'null': 'True', 'to': "orm['cms.Placeholder']"}),
'excerpt': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'multilingual_news_excerpts'", 'null': 'True', 'to': "orm['cms.Placeholder']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['filer.Image']", 'null': 'True', 'blank': 'True'}),
'image_float': ('django.db.models.fields.CharField', [], {'max_length': '8', 'blank': 'True'}),
'image_height': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'image_source_text': ('django.db.models.fields.CharField', [], {'max_length': '1024', 'blank': 'True'}),
'image_source_url': ('django.db.models.fields.CharField', [], {'max_length': '1024', 'blank': 'True'}),
'image_width': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'pub_date': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'})
},
u'multilingual_news.newsentrytranslation': {
'Meta': {'unique_together': "[('language_code', 'master')]", 'object_name': 'NewsEntryTranslation', 'db_table': "u'multilingual_news_newsentry_translation'"},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_published': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'language_code': ('django.db.models.fields.CharField', [], {'max_length': '15', 'db_index': 'True'}),
'master': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'translations'", 'null': 'True', 'to': u"orm['multilingual_news.NewsEntry']"}),
'meta_description': ('django.db.models.fields.TextField', [], {'max_length': '512', 'null': 'True', 'blank': 'True'}),
'meta_title': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True', 'blank': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '512'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '512'})
},
u'multilingual_news.recentplugin': {
'Meta': {'object_name': 'RecentPlugin', '_ormbases': ['cms.CMSPlugin']},
u'cmsplugin_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['cms.CMSPlugin']", 'unique': 'True', 'primary_key': 'True'}),
'current_language_only': ('django.db.models.fields.BooleanField', [], {}),
'limit': ('django.db.models.fields.PositiveIntegerField', [], {})
}
}
complete_apps = ['multilingual_news']
|
mit
| 4,539,886,842,019,949,000
| 83.957219
| 218
| 0.563102
| false
| 3.603311
| false
| false
| false
|
gabberthomson/fm_finpy
|
ois_products.py
|
1
|
4520
|
from date_conventions import *
class OvernightIndexSwap:
''' We define the product by its:
- startDate
- endDate
- floatingLegNominal: the nominal used to compute the flows of the floating leg:
if positive the flows are received, negative paid
- fixedLegDates: the union of the start and end dates of all of the flows paid by the fixed leg (each flow has two
dates needed to computed the accrual period, the end date of the i-th flow coincides with the start
date of the i+1-th flow)
- fixedRate: the coupon paid/received in the fixed leg
- fixedLegNominal: the nominal used to compute the flows of the floating leg:
if positive the flows are received, negative paid
'''
def __init__(self, startDate, endDate, floatingLegNominal, fixedLegDates, fixedRate, fixedLegNominal):
# we want opposite signs for the two nominals: if one leg is paid, the other is received
# if this is not the case generates an error that will stop the program
if floatingLegNominal * fixedLegNominal > 0:
raise "Nominal must have opposite sign"
# store the input variables
self.startDate = startDate
self.endDate = endDate
self.fixedRate = fixedRate
self.fixedLegDates = fixedLegDates
self.floatingLegNominal = floatingLegNominal
self.fixedLegNominal = fixedLegNominal
# With this method we compute the value of the floating leg at the observation date of the discount curve
def npv_floating_leg(self, discountCurve):
# this formula comes from the fact that for OIS the evaluation method is still the same of
# the "old" world with just one single curve for forward rate estimation and flow discounting
floatingleg_npv = discountCurve.df(self.startDate) - discountCurve.df(self.endDate)
# We multiply the result for the nominal before returning it
return floatingleg_npv * self.floatingLegNominal
def npv_fixed_leg(self, discountCurve):
# we now evaluate the fixed leg
fixed_npv = 0
for i in range(len(self.fixedLegDates) - 1):
startPeriod = self.fixedLegDates[i]
endPeriod = self.fixedLegDates[i+1]
tau = dc_act360(startPeriod, endPeriod)
df = discountCurve.df(endPeriod)
fixed_npv = fixed_npv + df * tau * self.fixedRate
# We multiply the result for the nominal before returning it
return fixed_npv * self.fixedLegNominal
def npv(self, discountCurve):
# the npv is the sum of the floating and fixed leg values (taken with their sign)
floatingleg_npv = self.npv_floating_leg(discountCurve)
fixed_npv = self.npv_fixed_leg(discountCurve)
# we sum them (the nominal have opposite sign)
npv = fixed_npv + floatingleg_npv
return npv
# This function just makes life easier; it allows to create a standard OIS with less
# parameters because it uses some common conventions:
# - startDate: the start date of the swap
# - maturity: the maturity of the swap express as a number of months (2 year: 24 months)
# - fixedTenor: the frequency of the fixed leg expressed in months: semi-annual payments -> fixedTenor = 6
# Market convention is 12 months
# - nominal: the absolute value nominal of the swap (1 is 1 Eur for example)
# - swapType: a string that can be "receiver" (it means that the fixed rate is received) or payer
def buildOIS(startDate, maturity, fixedTenor, fixedRate, nominal = 1, swapType = "receiver"):
endDate = startDate + relativedelta(months = maturity)
fixedLegDates = dates_generator(fixedTenor, startDate, endDate)
if swapType == "receiver":
fixedLegNominal = nominal
floatingLegNominal = - nominal
elif swapType == "payer":
fixedLegNominal = - nominal
floatingLegNominal = nominal
else:
raise "SwapType not supported"
ois = OvernightIndexSwap(startDate, endDate, floatingLegNominal, fixedLegDates, fixedRate, fixedLegNominal)
return ois
from ir_curves import DiscountCurve
if __name__ == '__main__':
obsdate = date(2010,1,1)
pillars = [date(2011,1,1), date(2012,1,1)]
dfs = [0.9, 0.8]
dc = DiscountCurve(obsdate, pillars, dfs)
# we build an Overnight Index Swap with 1 year maturity and strike 8%
startSwap = date(2010,2,1)
maturity = 12
ois = buildOIS(startSwap, maturity, 12, 0.08)
print "Swap NPV:", ois.npv(dc)
|
mit
| -8,323,141,087,552,210,000
| 46.578947
| 120
| 0.685619
| false
| 3.735537
| false
| false
| false
|
lichengshuang/createvhost
|
python/others/others/scan.py
|
1
|
2273
|
#!/usr/bin/python
#-*- coding:utf-8 -*-
import nmap
import re
import mytools as tool
import sys
from multiprocessing import Pool
from functools import partial
reload(sys)
sys.setdefaultencoding('utf8')
def nmScan(host,portrange,whitelist):
p = re.compile("^(\d*)\-(\d*)$")
# if type(hostlist) != list:
# help()
portmatch = re.match(p,portrange)
if not portmatch:
help()
if host == '121.42.32.172':
whitelist = [25,]
result = ''
nm = nmap.PortScanner()
tmp = nm.scan(host,portrange)
result = result + "<h2>ip地址:%s 主机名:[%s] ...... %s</h2><hr>" %(host,tmp['scan'][host]['hostname'],tmp['scan'][host]['status']['state'])
try:
ports = tmp['scan'][host]['tcp'].keys()
for port in ports:
info = ''
if port not in whitelist:
info = '<strong><font color=red>Alert:非预期端口</font><strong> '
else:
info = '<strong><font color=green>Info:正常开放端口</font><strong> '
portinfo = "%s <strong>port</strong> : %s <strong>state</strong> : %s <strong>product<strong/> : %s <br>" %(info,port,tmp['scan'][host]['tcp'][port]['state'], tmp['scan'][host]['tcp'][port]['product'])
result = result + portinfo
except KeyError,e:
if whitelist:
whitestr = ','.join(whitelist)
result = result + "未扫到开放端口!请检查%s端口对应的服务状态" %whitestr
else:
result = result + "扫描结果正常,无暴漏端口"
return result
def help():
print "Usage: nmScan(['127.0.0.1',],'0-65535')"
return None
if __name__ == "__main__":
hostlist = ['115.231.79.2']
pool = Pool(5)
nmargu = partial(nmScan,portrange='0-65535',whitelist=[])
results = pool.map(nmargu,hostlist)
#send email
sender = 'gccmx@163.com'
receiver = ['gccmx@qq.com',]
subject = '服务器端口扫描'
smtpserver = 'smtp.exmail.qq.com'
smtpuser = 'gccmx@163.com'
smtppass = 'gccmx163'
mailcontent = '<br>'.join(results)
tool.sendemail(sender,receiver,subject,mailcontent,smtpserver,smtpuser,smtppass)
|
apache-2.0
| -457,130,135,378,470,200
| 36.982456
| 349
| 0.553349
| false
| 3.045007
| false
| false
| false
|
ubuntunux/PyEngine3D
|
PyEngine3D/Render/RenderInfo.py
|
1
|
2991
|
import math
from PyEngine3D.Utilities import *
def always_pass(*args):
return False
def cone_sphere_culling_actor(camera, actor):
to_actor = actor.transform.pos - camera.transform.pos
dist = length(to_actor)
if 0.0 < dist:
to_actor /= dist
rad = math.acos(np.dot(to_actor, -camera.transform.front)) - camera.half_cone
projected_dist = dist * math.sin(rad)
radius = actor.model.mesh.radius * max(actor.transform.scale)
if 0.0 < rad and radius < projected_dist:
return True
elif HALF_PI < rad and radius < dist:
return True
return False
def view_frustum_culling_geometry(camera, light, actor, geometry_bound_box):
to_geometry = geometry_bound_box.bound_center - camera.transform.pos
for i in range(4):
d = np.dot(camera.frustum_vectors[i], to_geometry)
if geometry_bound_box.radius < d:
return True
return False
def shadow_culling(camera, light, actor, geometry_bound_box):
bound_min = np.dot(np.array([geometry_bound_box.bound_min[0], geometry_bound_box.bound_min[1], geometry_bound_box.bound_min[2], 1.0], dtype=np.float32), light.shadow_view_projection)[: 3]
bound_max = np.dot(np.array([geometry_bound_box.bound_max[0], geometry_bound_box.bound_max[1], geometry_bound_box.bound_max[2], 1.0], dtype=np.float32), light.shadow_view_projection)[: 3]
minimum = np.minimum(bound_min, bound_max)
maximum = np.maximum(bound_min, bound_max)
if any(x < -1.0 for x in maximum) or any(1.0 < x for x in minimum):
return True
return False
def gather_render_infos(culling_func, camera, light, actor_list, solid_render_infos, translucent_render_infos):
for actor in actor_list:
for i in range(actor.get_geometry_count()):
if not actor.visible:
continue
if culling_func(camera, light, actor, actor.get_geometry_bound_box(i)):
continue
material_instance = actor.get_material_instance(i)
render_info = RenderInfo()
render_info.actor = actor
render_info.geometry = actor.get_geometry(i)
render_info.geometry_data = actor.get_geometry_data(i)
render_info.gl_call_list = actor.get_gl_call_list(i)
render_info.material = material_instance.material if material_instance else None
render_info.material_instance = material_instance
if render_info.material_instance is not None and render_info.material_instance.is_translucent():
if translucent_render_infos is not None:
translucent_render_infos.append(render_info)
elif solid_render_infos is not None:
solid_render_infos.append(render_info)
class RenderInfo:
def __init__(self):
self.actor = None
self.geometry = None
self.geometry_data = None
self.gl_call_list = None
self.material = None
self.material_instance = None
|
bsd-2-clause
| 8,852,743,141,399,126,000
| 37.844156
| 191
| 0.647944
| false
| 3.522968
| false
| false
| false
|
MarkusHackspacher/unknown-horizons
|
horizons/gui/tabs/tabwidget.py
|
1
|
5897
|
# ###################################################
# Copyright (C) 2008-2017 The Unknown Horizons Team
# team@unknown-horizons.org
# This file is part of Unknown Horizons.
#
# Unknown Horizons is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the
# Free Software Foundation, Inc.,
# 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
# ###################################################
import logging
import traceback
import weakref
from fife.extensions.pychan.widgets import Container, Icon
from horizons.gui.util import load_uh_widget
from horizons.gui.widgets.imagebutton import ImageButton
from horizons.util.changelistener import metaChangeListenerDecorator
from horizons.util.python.callback import Callback
@metaChangeListenerDecorator('remove')
class TabWidget:
"""The TabWidget class handles widgets which consist of many
different tabs(subpanels, switchable via buttons(TabButtons).
"""
log = logging.getLogger("gui.tabs.tabwidget")
def __init__(self, ingame_gui, tabs=None, name=None, active_tab=None):
"""
@param ingame_gui: IngameGui instance
@param tabs: tab instances to show
@param name: optional name for the tabwidget
@param active_tab: int id of tab, 0 <= active_tab < len(tabs)
"""
super().__init__() #TODO: check if this call is needed
self.name = name
self.ingame_gui = ingame_gui
self._tabs = [] if not tabs else tabs
self.current_tab = self._tabs[0] # Start with the first tab
self.current_tab.ensure_loaded() # loading current_tab widget
self.widget = load_uh_widget("tab_base.xml")
self.widget.position_technique = 'right-239:top+209'
self.content = self.widget.findChild(name='content')
self._init_tab_buttons()
# select a tab to show (first one is default)
if active_tab is not None:
self.show_tab(active_tab)
def _init_tab_buttons(self):
"""Add enough tabbuttons for all widgets."""
def on_tab_removal(tabwidget):
# called when a tab is being removed (via weakref since tabs shouldn't have references to the parent tabwidget)
# If one tab is removed, the whole tabwidget will die..
# This is easy usually the desired behavior.
if tabwidget():
tabwidget().on_remove()
# Load buttons
for index, tab in enumerate(self._tabs):
# don't add a reference to the
tab.add_remove_listener(Callback(on_tab_removal, weakref.ref(self)))
container = Container(name="container_{}".format(index))
background = Icon(name="bg_{}".format(index))
button = ImageButton(name=str(index), size=(50, 50))
if self.current_tab is tab:
background.image = tab.button_background_image_active
button.path = tab.path_active
else:
background.image = tab.button_background_image
button.path = tab.path
button.capture(Callback(self.show_tab, index))
if hasattr(tab, 'helptext') and tab.helptext:
button.helptext = tab.helptext
container.size = (50, 52)
container.addChild(background)
container.addChild(button)
self.content.addChild(container)
self.widget.size = (54, 55 * len(self._tabs))
self.widget.adaptLayout()
self._apply_layout_hack()
def show_tab(self, number):
"""Used as callback function for the TabButtons.
@param number: tab number that is to be shown.
"""
if number >= len(self._tabs):
# this usually indicates a non-critical error, therefore we can handle it without crashing
traceback.print_stack()
self.log.warning("Invalid tab number %s, available tabs: %s", number, self._tabs)
return
if self.current_tab.is_visible():
self.current_tab.hide()
new_tab = self._tabs[number]
old_bg = self.content.findChild(name="bg_{}".format(self._tabs.index(self.current_tab)))
old_bg.image = self.current_tab.button_background_image
name = str(self._tabs.index(self.current_tab))
old_button = self.content.findChild(name=name)
old_button.path = self.current_tab.path
new_bg = self.content.findChild(name="bg_{}".format(number))
new_bg.image = self.current_tab.button_background_image_active
new_button = self.content.findChild(name=str(number))
new_button.path = new_tab.path_active
self.current_tab = new_tab
# important to display the tabs correctly in front
self.widget.hide()
self.show()
self._apply_layout_hack()
def _apply_layout_hack(self):
# pychan layouting depends on time, it's usually in a better mood later.
# this introduces some flickering, but fixes #916
from horizons.extscheduler import ExtScheduler
def do_apply_hack():
# just query widget when executing, since if lazy loading is used, the widget
# does not exist yet in the outer function
self.current_tab.widget.adaptLayout()
ExtScheduler().add_new_object(do_apply_hack, self, run_in=0)
def _draw_widget(self):
"""Draws the widget, but does not show it automatically"""
self.current_tab.position = (self.widget.position[0] + self.widget.size[0] - 11,
self.widget.position[1] - 52)
self.current_tab.refresh()
def show(self):
"""Show the current widget"""
# show before drawing so that position_technique properly sets
# button positions (which we want to draw our tabs relative to)
self.widget.show()
self._draw_widget()
self.current_tab.show()
self.ingame_gui.minimap_to_front()
def hide(self, caller=None):
"""Hides current tab and this widget"""
self.current_tab.hide()
self.widget.hide()
|
gpl-2.0
| 8,626,222,870,787,321,000
| 37.542484
| 114
| 0.709852
| false
| 3.39298
| false
| false
| false
|
brclark-usgs/flopy
|
flopy/utils/reference.py
|
1
|
57544
|
"""
Module spatial referencing for flopy model objects
"""
import sys
import os
import numpy as np
import warnings
class SpatialReference(object):
"""
a class to locate a structured model grid in x-y space
Parameters
----------
delr : numpy ndarray
the model discretization delr vector
(An array of spacings along a row)
delc : numpy ndarray
the model discretization delc vector
(An array of spacings along a column)
lenuni : int
the length units flag from the discretization package
(default 2)
xul : float
the x coordinate of the upper left corner of the grid
Enter either xul and yul or xll and yll.
yul : float
the y coordinate of the upper left corner of the grid
Enter either xul and yul or xll and yll.
xll : float
the x coordinate of the lower left corner of the grid
Enter either xul and yul or xll and yll.
yll : float
the y coordinate of the lower left corner of the grid
Enter either xul and yul or xll and yll.
rotation : float
the counter-clockwise rotation (in degrees) of the grid
proj4_str: str
a PROJ4 string that identifies the grid in space. warning: case
sensitive!
units : string
Units for the grid. Must be either feet or meters
epsg : int
EPSG code that identifies the grid in space. Can be used in lieu of
proj4. PROJ4 attribute will auto-populate if there is an internet
connection(via get_proj4 method).
See https://www.epsg-registry.org/ or spatialreference.org
length_multiplier : float
multiplier to convert model units to spatial reference units.
delr and delc above will be multiplied by this value. (default=1.)
Attributes
----------
xedge : ndarray
array of column edges
yedge : ndarray
array of row edges
xgrid : ndarray
numpy meshgrid of xedges
ygrid : ndarray
numpy meshgrid of yedges
xcenter : ndarray
array of column centers
ycenter : ndarray
array of row centers
xcentergrid : ndarray
numpy meshgrid of column centers
ycentergrid : ndarray
numpy meshgrid of row centers
vertices : 1D array
1D array of cell vertices for whole grid in C-style (row-major) order
(same as np.ravel())
Notes
-----
xul and yul can be explicitly (re)set after SpatialReference
instantiation, but only before any of the other attributes and methods are
accessed
"""
xul, yul = None, None
xll, yll = None, None
rotation = 0.
length_multiplier = 1.
origin_loc = 'ul' # or ll
defaults = {"xul": None, "yul": None, "rotation": 0.,
"proj4_str": None,
"units": None, "lenuni": 2, "length_multiplier": None}
lenuni_values = {'undefined': 0,
'feet': 1,
'meters': 2,
'centimeters': 3}
lenuni_text = {v:k for k, v in lenuni_values.items()}
def __init__(self, delr=np.array([]), delc=np.array([]), lenuni=2,
xul=None, yul=None, xll=None, yll=None, rotation=0.0,
proj4_str=None, epsg=None, units=None,
length_multiplier=None):
for delrc in [delr, delc]:
if isinstance(delrc, float) or isinstance(delrc, int):
msg = ('delr and delcs must be an array or sequences equal in '
'length to the number of rows/columns.')
raise TypeError(msg)
self.delc = np.atleast_1d(np.array(delc)).astype(np.float64) # * length_multiplier
self.delr = np.atleast_1d(np.array(delr)).astype(np.float64) # * length_multiplier
if self.delr.sum() == 0 or self.delc.sum() == 0:
if xll is None or yll is None:
msg = ('Warning: no grid spacing or lower-left corner '
'supplied. Setting the offset with xul, yul requires '
'arguments for delr and delc. Origin will be set to '
'zero.')
print(msg)
xll, yll = 0, 0
xul, yul = None, None
self._lenuni = lenuni
self._proj4_str = proj4_str
self._epsg = epsg
if epsg is not None:
self._proj4_str = getproj4(self._epsg)
self.supported_units = ["feet", "meters"]
self._units = units
self._length_multiplier = length_multiplier
self._reset()
self.set_spatialreference(xul, yul, xll, yll, rotation)
@property
def xll(self):
if self.origin_loc == 'll':
xll = self._xll if self._xll is not None else 0.
elif self.origin_loc == 'ul':
# calculate coords for lower left corner
xll = self._xul - (np.sin(self.theta) * self.yedge[0] *
self.length_multiplier)
return xll
@property
def yll(self):
if self.origin_loc == 'll':
yll = self._yll if self._yll is not None else 0.
elif self.origin_loc == 'ul':
# calculate coords for lower left corner
yll = self._yul - (np.cos(self.theta) * self.yedge[0] *
self.length_multiplier)
return yll
@property
def xul(self):
if self.origin_loc == 'll':
# calculate coords for upper left corner
xul = self._xll + (np.sin(self.theta) * self.yedge[0] *
self.length_multiplier)
if self.origin_loc == 'ul':
# calculate coords for lower left corner
xul = self._xul if self._xul is not None else 0.
return xul
@property
def yul(self):
if self.origin_loc == 'll':
# calculate coords for upper left corner
yul = self._yll + (np.cos(self.theta) * self.yedge[0] *
self.length_multiplier)
if self.origin_loc == 'ul':
# calculate coords for lower left corner
yul = self._yul if self._yul is not None else 0.
return yul
@property
def proj4_str(self):
if self._proj4_str is not None and \
"epsg" in self._proj4_str.lower():
if "init" not in self._proj4_str.lower():
proj4_str = "+init=" + self._proj4_str
else:
proj4_str = self._proj4_str
# set the epsg if proj4 specifies it
tmp = [i for i in self._proj4_str.split() if 'epsg' in i.lower()]
self._epsg = int(tmp[0].split(':')[1])
else:
proj4_str = self._proj4_str
return proj4_str
@property
def epsg(self):
#don't reset the proj4 string here
#because proj4 attribute may already be populated
#(with more details than getproj4 would return)
#instead reset proj4 when epsg is set
#(on init or setattr)
return self._epsg
@property
def lenuni(self):
return self._lenuni
def _parse_units_from_proj4(self):
units = None
try:
# need this because preserve_units doesn't seem to be
# working for complex proj4 strings. So if an
# epsg code was passed, we have no choice, but if a
# proj4 string was passed, we can just parse it
if "EPSG" in self.proj4_str.upper():
import pyproj
crs = pyproj.Proj(self.proj4_str,
preseve_units=True,
errcheck=True)
proj_str = crs.srs
else:
proj_str = self.proj4_str
# http://proj4.org/parameters.html#units
# from proj4 source code
# "us-ft", "0.304800609601219", "U.S. Surveyor's Foot",
# "ft", "0.3048", "International Foot",
if "units=m" in proj_str:
units = "meters"
elif "units=ft" in proj_str or \
"units=us-ft" in proj_str or \
"to_meters:0.3048" in proj_str:
units = "feet"
return units
except:
pass
@property
def units(self):
if self._units is not None:
units = self._units.lower()
else:
units = self._parse_units_from_proj4()
if units is None:
#print("warning: assuming SpatialReference units are meters")
units = 'meters'
assert units in self.supported_units
return units
@property
def length_multiplier(self):
"""Attempt to identify multiplier for converting from
model units to sr units, defaulting to 1."""
lm = None
if self._length_multiplier is not None:
lm = self._length_multiplier
else:
if self.model_length_units == 'feet':
if self.units == 'meters':
lm = 0.3048
elif self.units == 'feet':
lm = 1.
elif self.model_length_units == 'meters':
if self.units == 'feet':
lm = 1/.3048
elif self.units == 'meters':
lm = 1.
elif self.model_length_units == 'centimeters':
if self.units == 'meters':
lm = 1/100.
elif self.units == 'feet':
lm = 1/30.48
else: # model units unspecified; default to 1
lm = 1.
return lm
@property
def model_length_units(self):
return self.lenuni_text[self.lenuni]
@property
def bounds(self):
"""Return bounding box in shapely order."""
xmin, xmax, ymin, ymax = self.get_extent()
return xmin, ymin, xmax, ymax
@staticmethod
def load(namefile=None, reffile='usgs.model.reference'):
"""Attempts to load spatial reference information from
the following files (in order):
1) usgs.model.reference
2) NAM file (header comment)
3) SpatialReference.default dictionary
"""
reffile = os.path.join(os.path.split(namefile)[0], reffile)
d = SpatialReference.read_usgs_model_reference_file(reffile)
if d is not None:
return d
d = SpatialReference.attribs_from_namfile_header(namefile)
if d is not None:
return d
else:
return SpatialReference.defaults
@staticmethod
def attribs_from_namfile_header(namefile):
# check for reference info in the nam file header
d = SpatialReference.defaults.copy()
if namefile is None:
return None
header = []
with open(namefile, 'r') as f:
for line in f:
if not line.startswith('#'):
break
header.extend(line.strip().replace('#', '').split(';'))
for item in header:
if "xul" in item.lower():
try:
d['xul'] = float(item.split(':')[1])
except:
pass
elif "yul" in item.lower():
try:
d['yul'] = float(item.split(':')[1])
except:
pass
elif "rotation" in item.lower():
try:
d['rotation'] = float(item.split(':')[1])
except:
pass
elif "proj4_str" in item.lower():
try:
proj4_str = ':'.join(item.split(':')[1:]).strip()
if proj4_str.lower() == 'none':
proj4_str = None
d['proj4_str'] = proj4_str
except:
pass
elif "start" in item.lower():
try:
d['start_datetime'] = item.split(':')[1].strip()
except:
pass
# spatial reference length units
elif "units" in item.lower():
d['units'] = item.split(':')[1].strip()
# model length units
elif "lenuni" in item.lower():
d['lenuni'] = int(item.split(':')[1].strip())
# multiplier for converting from model length units to sr length units
elif "length_multiplier" in item.lower():
d['length_multiplier'] = float(item.split(':')[1].strip())
return d
@staticmethod
def read_usgs_model_reference_file(reffile='usgs.model.reference'):
"""read spatial reference info from the usgs.model.reference file
https://water.usgs.gov/ogw/policy/gw-model/modelers-setup.html"""
ITMUNI = {0: "undefined", 1: "seconds", 2: "minutes", 3: "hours", 4: "days",
5: "years"}
itmuni_values = {v: k for k, v in ITMUNI.items()}
d = SpatialReference.defaults.copy()
d.pop('proj4_str') # discard default to avoid confusion with epsg code if entered
if os.path.exists(reffile):
with open(reffile) as input:
for line in input:
if line.strip()[0] != '#':
info = line.strip().split('#')[0].split()
if len(info) > 1:
d[info[0].lower()] = ' '.join(info[1:])
d['xul'] = float(d['xul'])
d['yul'] = float(d['yul'])
d['rotation'] = float(d['rotation'])
# convert the model.reference text to a lenuni value
# (these are the model length units)
if 'length_units' in d.keys():
d['lenuni'] = SpatialReference.lenuni_values[d['length_units']]
if 'time_units' in d.keys():
d['itmuni'] = itmuni_values[d['time_units']]
if 'start_date' in d.keys():
start_datetime = d.pop('start_date')
if 'start_time' in d.keys():
start_datetime += ' {}'.format(d.pop('start_time'))
d['start_datetime'] = start_datetime
if 'epsg' in d.keys():
try:
d['epsg'] = int(d['epsg'])
except Exception as e:
raise Exception(
"error reading epsg code from file:\n" + str(e))
# this prioritizes epsg over proj4 if both are given
# (otherwise 'proj4' entry will be dropped below)
elif 'proj4' in d.keys():
d['proj4_str'] = d['proj4']
# drop any other items that aren't used in sr class
d = {k:v for k, v in d.items() if k.lower() in SpatialReference.defaults.keys()
or k.lower() in {'epsg', 'start_datetime', 'itmuni'}}
return d
else:
return None
def __setattr__(self, key, value):
reset = True
if key == "delr":
super(SpatialReference, self). \
__setattr__("delr", np.atleast_1d(np.array(value)))
elif key == "delc":
super(SpatialReference, self). \
__setattr__("delc", np.atleast_1d(np.array(value)))
elif key == "xul":
super(SpatialReference, self). \
__setattr__("_xul", float(value))
self.origin_loc = 'ul'
elif key == "yul":
super(SpatialReference, self). \
__setattr__("_yul", float(value))
self.origin_loc = 'ul'
elif key == "xll":
super(SpatialReference, self). \
__setattr__("_xll", float(value))
self.origin_loc = 'll'
elif key == "yll":
super(SpatialReference, self). \
__setattr__("_yll", float(value))
self.origin_loc = 'll'
elif key == "length_multiplier":
super(SpatialReference, self). \
__setattr__("_length_multiplier", float(value))
#self.set_origin(xul=self.xul, yul=self.yul, xll=self.xll,
# yll=self.yll)
elif key == "rotation":
super(SpatialReference, self). \
__setattr__("rotation", float(value))
#self.set_origin(xul=self.xul, yul=self.yul, xll=self.xll,
# yll=self.yll)
elif key == "lenuni":
super(SpatialReference, self). \
__setattr__("_lenuni", int(value))
#self.set_origin(xul=self.xul, yul=self.yul, xll=self.xll,
# yll=self.yll)
elif key == "units":
value = value.lower()
assert value in self.supported_units
super(SpatialReference, self). \
__setattr__("_units", value)
elif key == "proj4_str":
super(SpatialReference, self). \
__setattr__("_proj4_str", value)
# reset the units and epsg
units = self._parse_units_from_proj4()
if units is not None:
self._units = units
self._epsg = None
elif key == "epsg":
super(SpatialReference, self). \
__setattr__("_epsg", value)
# reset the units and proj4
self._units = None
self._proj4_str = getproj4(self._epsg)
else:
super(SpatialReference, self).__setattr__(key, value)
reset = False
if reset:
self._reset()
def reset(self, **kwargs):
for key, value in kwargs.items():
setattr(self, key, value)
return
def _reset(self):
self._xgrid = None
self._ygrid = None
self._ycentergrid = None
self._xcentergrid = None
self._vertices = None
return
@property
def nrow(self):
return self.delc.shape[0]
@property
def ncol(self):
return self.delr.shape[0]
def __eq__(self, other):
if not isinstance(other, SpatialReference):
return False
if other.xul != self.xul:
return False
if other.yul != self.yul:
return False
if other.rotation != self.rotation:
return False
if other.proj4_str != self.proj4_str:
return False
return True
@classmethod
def from_namfile(cls, namefile):
attribs = SpatialReference.attribs_from_namfile_header(namefile)
try:
attribs.pop("start_datetime")
except:
pass
return SpatialReference(**attribs)
@classmethod
def from_gridspec(cls, gridspec_file, lenuni=0):
f = open(gridspec_file, 'r')
raw = f.readline().strip().split()
nrow = int(raw[0])
ncol = int(raw[1])
raw = f.readline().strip().split()
xul, yul, rot = float(raw[0]), float(raw[1]), float(raw[2])
delr = []
j = 0
while j < ncol:
raw = f.readline().strip().split()
for r in raw:
if '*' in r:
rraw = r.split('*')
for n in range(int(rraw[0])):
delr.append(float(rraw[1]))
j += 1
else:
delr.append(float(r))
j += 1
delc = []
i = 0
while i < nrow:
raw = f.readline().strip().split()
for r in raw:
if '*' in r:
rraw = r.split('*')
for n in range(int(rraw[0])):
delc.append(float(rraw[1]))
i += 1
else:
delc.append(float(r))
i += 1
f.close()
return cls(np.array(delr), np.array(delc),
lenuni, xul=xul, yul=yul, rotation=rot)
@property
def attribute_dict(self):
return {"xul": self.xul, "yul": self.yul, "rotation": self.rotation,
"proj4_str": self.proj4_str}
def set_spatialreference(self, xul=None, yul=None, xll=None, yll=None,
rotation=0.0):
"""
set spatial reference - can be called from model instance
"""
if xul is not None and xll is not None:
msg = ('Both xul and xll entered. Please enter either xul, yul or '
'xll, yll.')
raise ValueError(msg)
if yul is not None and yll is not None:
msg = ('Both yul and yll entered. Please enter either xul, yul or '
'xll, yll.')
raise ValueError(msg)
# set the origin priority based on the left corner specified
# (the other left corner will be calculated). If none are specified
# then default to upper left
if xul is None and yul is None and xll is None and yll is None:
self.origin_loc = 'ul'
xul = 0.
yul = self.delc.sum()
elif xll is not None:
self.origin_loc = 'll'
else:
self.origin_loc = 'ul'
self.rotation = rotation
self._xll = xll if xll is not None else 0.
self._yll = yll if yll is not None else 0.
self._xul = xul if xul is not None else 0.
self._yul = yul if yul is not None else 0.
#self.set_origin(xul, yul, xll, yll)
return
def __repr__(self):
s = "xul:{0:<.10G}; yul:{1:<.10G}; rotation:{2:<G}; ". \
format(self.xul, self.yul, self.rotation)
s += "proj4_str:{0}; ".format(self.proj4_str)
s += "units:{0}; ".format(self.units)
s += "lenuni:{0}; ".format(self.lenuni)
s += "length_multiplier:{}".format(self.length_multiplier)
return s
def set_origin(self, xul=None, yul=None, xll=None, yll=None):
if self.origin_loc == 'll':
# calculate coords for upper left corner
self._xll = xll if xll is not None else 0.
self.yll = yll if yll is not None else 0.
self.xul = self._xll + (np.sin(self.theta) * self.yedge[0] *
self.length_multiplier)
self.yul = self.yll + (np.cos(self.theta) * self.yedge[0] *
self.length_multiplier)
if self.origin_loc == 'ul':
# calculate coords for lower left corner
self.xul = xul if xul is not None else 0.
self.yul = yul if yul is not None else 0.
self._xll = self.xul - (np.sin(self.theta) * self.yedge[0] *
self.length_multiplier)
self.yll = self.yul - (np.cos(self.theta) * self.yedge[0] *
self.length_multiplier)
self._reset()
return
@property
def theta(self):
return -self.rotation * np.pi / 180.
@property
def xedge(self):
return self.get_xedge_array()
@property
def yedge(self):
return self.get_yedge_array()
@property
def xgrid(self):
if self._xgrid is None:
self._set_xygrid()
return self._xgrid
@property
def ygrid(self):
if self._ygrid is None:
self._set_xygrid()
return self._ygrid
@property
def xcenter(self):
return self.get_xcenter_array()
@property
def ycenter(self):
return self.get_ycenter_array()
@property
def ycentergrid(self):
if self._ycentergrid is None:
self._set_xycentergrid()
return self._ycentergrid
@property
def xcentergrid(self):
if self._xcentergrid is None:
self._set_xycentergrid()
return self._xcentergrid
def _set_xycentergrid(self):
self._xcentergrid, self._ycentergrid = np.meshgrid(self.xcenter,
self.ycenter)
self._xcentergrid, self._ycentergrid = self.transform(
self._xcentergrid,
self._ycentergrid)
def _set_xygrid(self):
self._xgrid, self._ygrid = np.meshgrid(self.xedge, self.yedge)
self._xgrid, self._ygrid = self.transform(self._xgrid, self._ygrid)
@staticmethod
def rotate(x, y, theta, xorigin=0., yorigin=0.):
"""
Given x and y array-like values calculate the rotation about an
arbitrary origin and then return the rotated coordinates. theta is in
degrees.
"""
# jwhite changed on Oct 11 2016 - rotation is now positive CCW
# theta = -theta * np.pi / 180.
theta = theta * np.pi / 180.
xrot = xorigin + np.cos(theta) * (x - xorigin) - np.sin(theta) * \
(y - yorigin)
yrot = yorigin + np.sin(theta) * (x - xorigin) + np.cos(theta) * \
(y - yorigin)
return xrot, yrot
def transform(self, x, y, inverse=False):
"""
Given x and y array-like values, apply rotation, scale and offset,
to convert them from model coordinates to real-world coordinates.
"""
if isinstance(x, list):
x = np.array(x)
y = np.array(y)
if not np.isscalar(x):
x, y = x.copy(), y.copy()
if not inverse:
x *= self.length_multiplier
y *= self.length_multiplier
x += self.xll
y += self.yll
x, y = SpatialReference.rotate(x, y, theta=self.rotation,
xorigin=self.xll, yorigin=self.yll)
else:
x, y = SpatialReference.rotate(x, y, -self.rotation,
self.xll, self.yll)
x -= self.xll
y -= self.yll
x /= self.length_multiplier
y /= self.length_multiplier
return x, y
def get_extent(self):
"""
Get the extent of the rotated and offset grid
Return (xmin, xmax, ymin, ymax)
"""
x0 = self.xedge[0]
x1 = self.xedge[-1]
y0 = self.yedge[0]
y1 = self.yedge[-1]
# upper left point
x0r, y0r = self.transform(x0, y0)
# upper right point
x1r, y1r = self.transform(x1, y0)
# lower right point
x2r, y2r = self.transform(x1, y1)
# lower left point
x3r, y3r = self.transform(x0, y1)
xmin = min(x0r, x1r, x2r, x3r)
xmax = max(x0r, x1r, x2r, x3r)
ymin = min(y0r, y1r, y2r, y3r)
ymax = max(y0r, y1r, y2r, y3r)
return (xmin, xmax, ymin, ymax)
def get_grid_lines(self):
"""
Get the grid lines as a list
"""
xmin = self.xedge[0]
xmax = self.xedge[-1]
ymin = self.yedge[-1]
ymax = self.yedge[0]
lines = []
# Vertical lines
for j in range(self.ncol + 1):
x0 = self.xedge[j]
x1 = x0
y0 = ymin
y1 = ymax
x0r, y0r = self.transform(x0, y0)
x1r, y1r = self.transform(x1, y1)
lines.append([(x0r, y0r), (x1r, y1r)])
# horizontal lines
for i in range(self.nrow + 1):
x0 = xmin
x1 = xmax
y0 = self.yedge[i]
y1 = y0
x0r, y0r = self.transform(x0, y0)
x1r, y1r = self.transform(x1, y1)
lines.append([(x0r, y0r), (x1r, y1r)])
return lines
def get_grid_line_collection(self, **kwargs):
"""
Get a LineCollection of the grid
"""
from matplotlib.collections import LineCollection
lc = LineCollection(self.get_grid_lines(), **kwargs)
return lc
def get_xcenter_array(self):
"""
Return a numpy one-dimensional float array that has the cell center x
coordinate for every column in the grid in model space - not offset or rotated.
"""
x = np.add.accumulate(self.delr) - 0.5 * self.delr
return x
def get_ycenter_array(self):
"""
Return a numpy one-dimensional float array that has the cell center x
coordinate for every row in the grid in model space - not offset of rotated.
"""
Ly = np.add.reduce(self.delc)
y = Ly - (np.add.accumulate(self.delc) - 0.5 *
self.delc)
return y
def get_xedge_array(self):
"""
Return a numpy one-dimensional float array that has the cell edge x
coordinates for every column in the grid in model space - not offset
or rotated. Array is of size (ncol + 1)
"""
xedge = np.concatenate(([0.], np.add.accumulate(self.delr)))
return xedge
def get_yedge_array(self):
"""
Return a numpy one-dimensional float array that has the cell edge y
coordinates for every row in the grid in model space - not offset or
rotated. Array is of size (nrow + 1)
"""
length_y = np.add.reduce(self.delc)
yedge = np.concatenate(([length_y], length_y -
np.add.accumulate(self.delc)))
return yedge
def write_gridSpec(self, filename):
""" write a PEST-style grid specification file
"""
f = open(filename, 'w')
f.write(
"{0:10d} {1:10d}\n".format(self.delc.shape[0], self.delr.shape[0]))
f.write("{0:15.6E} {1:15.6E} {2:15.6E}\n".format(self.xul, self.yul,
self.rotation))
for r in self.delr:
f.write("{0:15.6E} ".format(r))
f.write('\n')
for c in self.delc:
f.write("{0:15.6E} ".format(c))
f.write('\n')
return
def write_shapefile(self, filename='grid.shp', epsg=None, prj=None):
"""Write a shapefile of the grid with just the row and column attributes"""
from ..export.shapefile_utils import write_grid_shapefile2
if epsg is None and prj is None:
epsg = self.epsg
write_grid_shapefile2(filename, self, array_dict={}, nan_val=-1.0e9,
epsg=epsg, prj=prj)
def get_vertices(self, i, j):
pts = []
xgrid, ygrid = self.xgrid, self.ygrid
pts.append([xgrid[i, j], ygrid[i, j]])
pts.append([xgrid[i + 1, j], ygrid[i + 1, j]])
pts.append([xgrid[i + 1, j + 1], ygrid[i + 1, j + 1]])
pts.append([xgrid[i, j + 1], ygrid[i, j + 1]])
pts.append([xgrid[i, j], ygrid[i, j]])
return pts
def get_rc(self, x, y):
"""Return the row and column of a point or sequence of points
in real-world coordinates.
Parameters
----------
x : scalar or sequence of x coordinates
y : scalar or sequence of y coordinates
Returns
-------
r : row or sequence of rows (zero-based)
c : column or sequence of columns (zero-based)
"""
if np.isscalar(x):
c = (np.abs(self.xcentergrid[0] - x)).argmin()
r = (np.abs(self.ycentergrid[:, 0] - y)).argmin()
else:
xcp = np.array([self.xcentergrid[0]] * (len(x)))
ycp = np.array([self.ycentergrid[:, 0]] * (len(x)))
c = (np.abs(xcp.transpose() - x)).argmin(axis=0)
r = (np.abs(ycp.transpose() - y)).argmin(axis=0)
return r, c
def get_grid_map_plotter(self):
"""
Create a QuadMesh plotting object for this grid
Returns
-------
quadmesh : matplotlib.collections.QuadMesh
"""
from matplotlib.collections import QuadMesh
verts = np.vstack((self.xgrid.flatten(), self.ygrid.flatten())).T
qm = QuadMesh(self.ncol, self.nrow, verts)
return qm
def plot_array(self, a, ax=None, **kwargs):
"""
Create a QuadMesh plot of the specified array using pcolormesh
Parameters
----------
a : np.ndarray
Returns
-------
quadmesh : matplotlib.collections.QuadMesh
"""
import matplotlib.pyplot as plt
if ax is None:
ax = plt.gca()
qm = ax.pcolormesh(self.xgrid, self.ygrid, a, **kwargs)
return qm
def export_array(self, filename, a, nodata=-9999,
fieldname='value',
**kwargs):
"""Write a numpy array to Arc Ascii grid
or shapefile with the model reference.
Parameters
----------
filename : str
Path of output file. Export format is determined by
file extention.
'.asc' Arc Ascii grid
'.shp' Shapefile
a : 2D numpy.ndarray
Array to export
nodata : scalar
Value to assign to np.nan entries (default -9999)
fieldname : str
Attribute field name for array values (shapefile export only).
(default 'values')
kwargs:
keyword arguments to np.savetxt (ascii)
or flopy.export.shapefile_utils.write_grid_shapefile2
Notes
-----
Rotated grids will be unrotated prior to export to Arc Ascii format,
using scipy.ndimage.rotate. As a result, their pixels will no longer
coincide exactly with the model grid.
"""
if filename.lower().endswith(".asc"):
if len(np.unique(self.delr)) != len(np.unique(self.delc)) != 1 \
or self.delr[0] != self.delc[0]:
raise ValueError('Arc ascii arrays require a uniform grid.')
xll, yll = self.xll, self.yll
cellsize = self.delr[0] * self.length_multiplier
a = a.copy()
a[np.isnan(a)] = nodata
if self.rotation != 0:
try:
from scipy.ndimage import rotate
a = rotate(a, self.rotation, cval=nodata)
height_rot, width_rot = a.shape
xmin, ymin, xmax, ymax = self.bounds
dx = (xmax - xmin) / width_rot
dy = (ymax - ymin) / height_rot
cellsize = dx
xll, yll = xmin, ymin
except ImportError:
print('scipy package required to export rotated grid.')
pass
filename = '.'.join(filename.split('.')[:-1]) + '.asc' # enforce .asc ending
nrow, ncol = a.shape
a[np.isnan(a)] = nodata
txt = 'ncols {:d}\n'.format(ncol)
txt += 'nrows {:d}\n'.format(nrow)
txt += 'xllcorner {:f}\n'.format(xll)
txt += 'yllcorner {:f}\n'.format(yll)
txt += 'cellsize {}\n'.format(cellsize)
txt += 'NODATA_value {:.0f}\n'.format(nodata)
with open(filename, 'w') as output:
output.write(txt)
with open(filename, 'ab') as output:
np.savetxt(output, a, **kwargs)
print('wrote {}'.format(filename))
elif filename.lower().endswith(".shp"):
from ..export.shapefile_utils import write_grid_shapefile2
epsg = kwargs.get('epsg', None)
prj = kwargs.get('prj', None)
if epsg is None and prj is None:
epsg = self.epsg
write_grid_shapefile2(filename, self, array_dict={fieldname: a}, nan_val=nodata,
epsg=epsg, prj=prj)
def contour_array(self, ax, a, **kwargs):
"""
Create a QuadMesh plot of the specified array using pcolormesh
Parameters
----------
ax : matplotlib.axes.Axes
ax to add the contours
a : np.ndarray
array to contour
Returns
-------
contour_set : ContourSet
"""
contour_set = ax.contour(self.xcentergrid, self.ycentergrid,
a, **kwargs)
return contour_set
@property
def vertices(self):
"""Returns a list of vertices for"""
if self._vertices is None:
self._set_vertices()
return self._vertices
def _set_vertices(self):
"""populate vertices for the whole grid"""
jj, ii = np.meshgrid(range(self.ncol), range(self.nrow))
jj, ii = jj.ravel(), ii.ravel()
vrts = np.array(self.get_vertices(ii, jj)).transpose([2, 0, 1])
self._vertices = [v.tolist() for v in vrts] # conversion to lists
"""
code above is 3x faster
xgrid, ygrid = self.xgrid, self.ygrid
ij = list(map(list, zip(xgrid[:-1, :-1].ravel(), ygrid[:-1, :-1].ravel())))
i1j = map(list, zip(xgrid[1:, :-1].ravel(), ygrid[1:, :-1].ravel()))
i1j1 = map(list, zip(xgrid[1:, 1:].ravel(), ygrid[1:, 1:].ravel()))
ij1 = map(list, zip(xgrid[:-1, 1:].ravel(), ygrid[:-1, 1:].ravel()))
self._vertices = np.array(map(list, zip(ij, i1j, i1j1, ij1, ij)))
"""
def interpolate(self, a, xi, method='nearest'):
"""
Use the griddata method to interpolate values from an array onto the
points defined in xi. For any values outside of the grid, use
'nearest' to find a value for them.
Parameters
----------
a : numpy.ndarray
array to interpolate from. It must be of size nrow, ncol
xi : numpy.ndarray
array containing x and y point coordinates of size (npts, 2). xi
also works with broadcasting so that if a is a 2d array, then
xi can be passed in as (xgrid, ygrid).
method : {'linear', 'nearest', 'cubic'}
method to use for interpolation (default is 'nearest')
Returns
-------
b : numpy.ndarray
array of size (npts)
"""
from scipy.interpolate import griddata
# Create a 2d array of points for the grid centers
points = np.empty((self.ncol * self.nrow, 2))
points[:, 0] = self.xcentergrid.flatten()
points[:, 1] = self.ycentergrid.flatten()
# Use the griddata function to interpolate to the xi points
b = griddata(points, a.flatten(), xi, method=method, fill_value=np.nan)
# if method is linear or cubic, then replace nan's with a value
# interpolated using nearest
if method != 'nearest':
bn = griddata(points, a.flatten(), xi, method='nearest')
idx = np.isnan(b)
b[idx] = bn[idx]
return b
def get_2d_vertex_connectivity(self):
"""
Create the cell 2d vertices array and the iverts index array. These
are the same form as the ones used to instantiate an unstructured
spatial reference.
Returns
-------
verts : ndarray
array of x and y coordinates for the grid vertices
iverts : list
a list with a list of vertex indices for each cell in clockwise
order starting with the upper left corner
"""
x = self.xgrid.flatten()
y = self.ygrid.flatten()
nrowvert = self.nrow + 1
ncolvert = self.ncol + 1
npoints = nrowvert * ncolvert
verts = np.empty((npoints, 2), dtype=np.float)
verts[:, 0] = x
verts[:, 1] = y
iverts = []
for i in range(self.nrow):
for j in range(self.ncol):
iv1 = i * ncolvert + j # upper left point number
iv2 = iv1 + 1
iv4 = (i + 1) * ncolvert + j
iv3 = iv4 + 1
iverts.append([iv1, iv2, iv3, iv4])
return verts, iverts
def get_3d_shared_vertex_connectivity(self, nlay, botm, ibound=None):
# get the x and y points for the grid
x = self.xgrid.flatten()
y = self.ygrid.flatten()
# set the size of the vertex grid
nrowvert = self.nrow + 1
ncolvert = self.ncol + 1
nlayvert = nlay + 1
nrvncv = nrowvert * ncolvert
npoints = nrvncv * nlayvert
# create and fill a 3d points array for the grid
verts = np.empty((npoints, 3), dtype=np.float)
verts[:, 0] = np.tile(x, nlayvert)
verts[:, 1] = np.tile(y, nlayvert)
istart = 0
istop = nrvncv
for k in range(nlay + 1):
verts[istart:istop, 2] = self.interpolate(botm[k],
verts[istart:istop, :2],
method='linear')
istart = istop
istop = istart + nrvncv
# create the list of points comprising each cell. points must be
# listed a specific way according to vtk requirements.
iverts = []
for k in range(nlay):
koffset = k * nrvncv
for i in range(self.nrow):
for j in range(self.ncol):
if ibound is not None:
if ibound[k, i, j] == 0:
continue
iv1 = i * ncolvert + j + koffset
iv2 = iv1 + 1
iv4 = (i + 1) * ncolvert + j + koffset
iv3 = iv4 + 1
iverts.append([iv4 + nrvncv, iv3 + nrvncv,
iv1 + nrvncv, iv2 + nrvncv,
iv4, iv3, iv1, iv2])
return verts, iverts
def get_3d_vertex_connectivity(self, nlay, botm, ibound=None):
if ibound is None:
ncells = nlay * self.nrow * self.ncol
ibound = np.ones((nlay, self.nrow, self.ncol), dtype=np.int)
else:
ncells = (ibound != 0).sum()
npoints = ncells * 8
verts = np.empty((npoints, 3), dtype=np.float)
iverts = []
ipoint = 0
for k in range(nlay):
for i in range(self.nrow):
for j in range(self.ncol):
if ibound[k, i, j] == 0:
continue
ivert = []
pts = self.get_vertices(i, j)
pt0, pt1, pt2, pt3, pt0 = pts
z = botm[k + 1, i, j]
verts[ipoint, 0:2] = np.array(pt1)
verts[ipoint, 2] = z
ivert.append(ipoint)
ipoint += 1
verts[ipoint, 0:2] = np.array(pt2)
verts[ipoint, 2] = z
ivert.append(ipoint)
ipoint += 1
verts[ipoint, 0:2] = np.array(pt0)
verts[ipoint, 2] = z
ivert.append(ipoint)
ipoint += 1
verts[ipoint, 0:2] = np.array(pt3)
verts[ipoint, 2] = z
ivert.append(ipoint)
ipoint += 1
z = botm[k, i, j]
verts[ipoint, 0:2] = np.array(pt1)
verts[ipoint, 2] = z
ivert.append(ipoint)
ipoint += 1
verts[ipoint, 0:2] = np.array(pt2)
verts[ipoint, 2] = z
ivert.append(ipoint)
ipoint += 1
verts[ipoint, 0:2] = np.array(pt0)
verts[ipoint, 2] = z
ivert.append(ipoint)
ipoint += 1
verts[ipoint, 0:2] = np.array(pt3)
verts[ipoint, 2] = z
ivert.append(ipoint)
ipoint += 1
iverts.append(ivert)
return verts, iverts
class SpatialReferenceUnstructured(SpatialReference):
"""
a class to locate an unstructured model grid in x-y space
Parameters
----------
verts : ndarray
2d array of x and y points.
iverts : list of lists
should be of len(ncells) with a list of vertex numbers for each cell
ncpl : ndarray
array containing the number of cells per layer. ncpl.sum() must be
equal to the total number of cells in the grid.
layered : boolean
flag to indicated that the grid is layered. In this case, the vertices
define the grid for single layer, and all layers use this same grid.
In this case the ncpl value for each layer must equal len(iverts).
If not layered, then verts and iverts are specified for all cells and
all layers in the grid. In this case, npcl.sum() must equal
len(iverts).
lenuni : int
the length units flag from the discretization package
proj4_str: str
a PROJ4 string that identifies the grid in space. warning: case
sensitive!
units : string
Units for the grid. Must be either feet or meters
epsg : int
EPSG code that identifies the grid in space. Can be used in lieu of
proj4. PROJ4 attribute will auto-populate if there is an internet
connection(via get_proj4 method).
See https://www.epsg-registry.org/ or spatialreference.org
length_multiplier : float
multiplier to convert model units to spatial reference units.
delr and delc above will be multiplied by this value. (default=1.)
Attributes
----------
xcenter : ndarray
array of x cell centers
ycenter : ndarray
array of y cell centers
Notes
-----
"""
def __init__(self, xc, yc, verts, iverts, ncpl, layered=True, lenuni=1,
proj4_str="EPSG:4326", epsg=None, units=None,
length_multiplier=1.):
self.xc = xc
self.yc = yc
self.verts = verts
self.iverts = iverts
self.ncpl = ncpl
self.layered = layered
self.lenuni = lenuni
self._proj4_str = proj4_str
self.epsg = epsg
if epsg is not None:
self._proj4_str = getproj4(epsg)
self.supported_units = ["feet", "meters"]
self._units = units
self.length_multiplier = length_multiplier
# set defaults
self.xul = 0.
self.yul = 0.
self.rotation = 0.
if self.layered:
assert all([n == len(iverts) for n in ncpl])
assert self.xc.shape[0] == self.ncpl[0]
assert self.yc.shape[0] == self.ncpl[0]
else:
msg = ('Length of iverts must equal ncpl.sum '
'({} {})'.format(len(iverts), ncpl))
assert len(iverts) == ncpl.sum(), msg
assert self.xc.shape[0] == self.ncpl.sum()
assert self.yc.shape[0] == self.ncpl.sum()
return
def write_shapefile(self, filename='grid.shp'):
"""
Write shapefile of the grid
Parameters
----------
filename : string
filename for shapefile
Returns
-------
"""
raise NotImplementedError()
return
def write_gridSpec(self, filename):
"""
Write a PEST-style grid specification file
Parameters
----------
filename : string
filename for grid specification file
Returns
-------
"""
raise NotImplementedError()
return
@classmethod
def from_gridspec(cls, fname):
"""
Create a new SpatialReferenceUnstructured grid from an PEST
grid specification file
Parameters
----------
fname : string
File name for grid specification file
Returns
-------
sru : flopy.utils.reference.SpatialReferenceUnstructured
"""
raise NotImplementedError()
return
@classmethod
def from_argus_export(cls, fname, nlay=1):
"""
Create a new SpatialReferenceUnstructured grid from an Argus One
Trimesh file
Parameters
----------
fname : string
File name
nlay : int
Number of layers to create
Returns
-------
sru : flopy.utils.reference.SpatialReferenceUnstructured
"""
from ..utils.geometry import get_polygon_centroid
f = open(fname, 'r')
line = f.readline()
ll = line.split()
ncells, nverts = ll[0:2]
ncells = int(ncells)
nverts = int(nverts)
verts = np.empty((nverts, 2), dtype=np.float)
xc = np.empty((ncells), dtype=np.float)
yc = np.empty((ncells), dtype=np.float)
# read the vertices
f.readline()
for ivert in range(nverts):
line = f.readline()
ll = line.split()
c, iv, x, y = ll[0:4]
verts[ivert, 0] = x
verts[ivert, 1] = y
# read the cell information and create iverts, xc, and yc
iverts = []
for icell in range(ncells):
line = f.readline()
ll = line.split()
ivlist = []
for ic in ll[2:5]:
ivlist.append(int(ic) - 1)
if ivlist[0] != ivlist[-1]:
ivlist.append(ivlist[0])
iverts.append(ivlist)
xc[icell], yc[icell] = get_polygon_centroid(verts[ivlist, :])
# close file and return spatial reference
f.close()
return cls(xc, yc, verts, iverts, np.array(nlay * [len(iverts)]))
def __setattr__(self, key, value):
super(SpatialReference, self).__setattr__(key, value)
return
def get_extent(self):
"""
Get the extent of the grid
Returns
-------
extent : tuple
min and max grid coordinates
"""
xmin = self.verts[:, 0].min()
xmax = self.verts[:, 0].max()
ymin = self.verts[:, 1].min()
ymax = self.verts[:, 1].max()
return (xmin, xmax, ymin, ymax)
def get_xcenter_array(self):
"""
Return a numpy one-dimensional float array that has the cell center x
coordinate for every cell in the grid in model space - not offset or
rotated.
"""
return self.xc
def get_ycenter_array(self):
"""
Return a numpy one-dimensional float array that has the cell center x
coordinate for every cell in the grid in model space - not offset of
rotated.
"""
return self.yc
def plot_array(self, a, ax=None):
"""
Create a QuadMesh plot of the specified array using patches
Parameters
----------
a : np.ndarray
Returns
-------
quadmesh : matplotlib.collections.QuadMesh
"""
from ..plot import plotutil
if ax is None:
ax = plt.gca()
patch_collection = plotutil.plot_cvfd(self.verts, self.iverts, a=a,
ax=ax)
return patch_collection
def get_grid_line_collection(self, **kwargs):
"""
Get a patch collection of the grid
"""
from ..plot import plotutil
edgecolor = kwargs.pop('colors')
pc = plotutil.cvfd_to_patch_collection(self.verts, self.iverts)
pc.set(facecolor='none')
pc.set(edgecolor=edgecolor)
return pc
def contour_array(self, ax, a, **kwargs):
"""
Create a QuadMesh plot of the specified array using pcolormesh
Parameters
----------
ax : matplotlib.axes.Axes
ax to add the contours
a : np.ndarray
array to contour
Returns
-------
contour_set : ContourSet
"""
contour_set = ax.tricontour(self.xcenter, self.ycenter,
a, **kwargs)
return contour_set
class epsgRef:
"""Sets up a local database of projection file text referenced by epsg code.
The database is located in the site packages folder in epsgref.py, which
contains a dictionary, prj, of projection file text keyed by epsg value.
"""
def __init__(self):
sp = [f for f in sys.path if f.endswith('site-packages')][0]
self.location = os.path.join(sp, 'epsgref.py')
def _remove_pyc(self):
try: # get rid of pyc file
os.remove(self.location + 'c')
except:
pass
def make(self):
if not os.path.exists(self.location):
newfile = open(self.location, 'w')
newfile.write('prj = {}\n')
newfile.close()
def reset(self, verbose=True):
if os.path.exists(self.location):
os.remove(self.location)
self._remove_pyc()
self.make()
if verbose:
print('Resetting {}'.format(self.location))
def add(self, epsg, prj):
"""add an epsg code to epsgref.py"""
with open(self.location, 'a') as epsgfile:
epsgfile.write("prj[{:d}] = '{}'\n".format(epsg, prj))
def remove(self, epsg):
"""removes an epsg entry from epsgref.py"""
from epsgref import prj
self.reset(verbose=False)
if epsg in prj.keys():
del prj[epsg]
for epsg, prj in prj.items():
self.add(epsg, prj)
@staticmethod
def show():
import importlib
import epsgref
importlib.reload(epsgref)
from epsgref import prj
for k, v in prj.items():
print('{}:\n{}\n'.format(k, v))
def getprj(epsg, addlocalreference=True, text='esriwkt'):
"""Gets projection file (.prj) text for given epsg code from spatialreference.org
See: https://www.epsg-registry.org/
Parameters
----------
epsg : int
epsg code for coordinate system
addlocalreference : boolean
adds the projection file text associated with epsg to a local
database, epsgref.py, located in site-packages.
Returns
-------
prj : str
text for a projection (*.prj) file.
"""
epsgfile = epsgRef()
prj = None
try:
from epsgref import prj
prj = prj.get(epsg)
except:
epsgfile.make()
if prj is None:
prj = get_spatialreference(epsg, text=text)
if addlocalreference:
epsgfile.add(epsg, prj)
return prj
def get_spatialreference(epsg, text='esriwkt'):
"""Gets text for given epsg code and text format from spatialreference.org
Fetches the reference text using the url:
http://spatialreference.org/ref/epsg/<epsg code>/<text>/
See: https://www.epsg-registry.org/
Parameters
----------
epsg : int
epsg code for coordinate system
text : str
string added to url
Returns
-------
url : str
"""
from flopy.utils.flopy_io import get_url_text
url = "http://spatialreference.org/ref/epsg/{0}/{1}/".format(epsg, text)
text = get_url_text(url,
error_msg='No internet connection or epsg code {} '
'not found on spatialreference.org.'.format(epsg))
if text is None: # epsg code not listed on spatialreference.org may still work with pyproj
return '+init=epsg:{}'.format(epsg)
return text.replace("\n", "")
def getproj4(epsg):
"""Gets projection file (.prj) text for given epsg code from
spatialreference.org. See: https://www.epsg-registry.org/
Parameters
----------
epsg : int
epsg code for coordinate system
Returns
-------
prj : str
text for a projection (*.prj) file.
"""
return get_spatialreference(epsg, text='proj4')
|
bsd-3-clause
| 7,134,044,055,945,005,000
| 32.212842
| 94
| 0.501373
| false
| 4.018997
| false
| false
| false
|
privacyidea/privacyideaadm
|
privacyideautils/commands/audit.py
|
1
|
2377
|
# -*- coding: utf-8 -*-
#
# 2020-04-13 Cornelius Kölbel <cornelius.koelbel@netknights.it>
# migrate to click
#
# This code is free software; you can redistribute it and/or
# modify it under the terms of the GNU AFFERO GENERAL PUBLIC LICENSE
# License as published by the Free Software Foundation; either
# version 3 of the License, or any later version.
#
# This code is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU AFFERO GENERAL PUBLIC LICENSE for more details.
#
# You should have received a copy of the GNU Affero General Public
# License along with this program. If not, see <http://www.gnu.org/licenses/>.
#
import click
import datetime
import logging
from privacyideautils.clientutils import (showresult,
dumpresult,
privacyideaclient,
__version__)
@click.group()
@click.pass_context
def audit(ctx):
"""
Manage the audit log. Basically fetch audit information.
"""
pass
@audit.command()
@click.pass_context
@click.option("--page", help="The page number to view", type=int)
@click.option("--rp", help="The number of entries per page", type=int)
@click.option("--sortname", help="The name of the column to sort by", default="number")
@click.option("--sortorder", help="The order to sort (desc, asc)",
type=click.Choice(["desc", "asc"]), default="desc")
@click.option("--query", help="A search tearm to search for")
@click.option("--qtype", help="The column to search for")
def list(ctx, page, rp, sortname, sortorder, query, qtype):
"""
List the audit log
"""
client = ctx.obj["pi_client"]
param = {}
if page:
param["page"] = page
if rp:
param["rp"] = rp
if sortname:
param["sortname"] = sortname
if sortorder:
param["sortorder"] = sortorder
if query:
param["query"] = query
if qtype:
param["qtype"] = qtype
resp = client.auditsearch(param)
r1 = resp.data
auditdata = r1.get("result").get("value").get("auditdata")
count = r1.get("result").get("value").get("count")
for row in auditdata:
print(row)
print("Total: {0!s}".format(count))
|
agpl-3.0
| 7,420,498,847,606,918,000
| 32.942857
| 87
| 0.631313
| false
| 3.741732
| false
| false
| false
|
arikpoz/deep-visualization-toolbox
|
run_webui.py
|
1
|
1847
|
#! /usr/bin/env python
import os
import thread
from live_vis import LiveVis
from bindings import bindings
try:
import settings
except:
print '\nError importing settings.py. Check the error message below for more information.'
print "If you haven't already, you'll want to open the settings_model_selector.py file"
print 'and edit it to point to your caffe checkout.\n'
raise
if not os.path.exists(settings.caffevis_caffe_root):
raise Exception('ERROR: Set caffevis_caffe_root in settings.py first.')
import cv2
from flask import Flask, render_template, Response
app = Flask(__name__)
@app.route('/')
def index():
return render_template('index.html')
def gen():
while True:
frame = get_frame()
yield (b'--frame\r\n'
b'Content-Type: image/jpeg\r\n\r\n' + frame + b'\r\n\r\n')
def get_frame():
# We are using Motion JPEG, but OpenCV defaults to capture raw images,
# so we must encode it into JPEG in order to correctly display the
# video stream.
global lv
ret, jpeg = cv2.imencode('.jpg', lv.window_buffer[:,:,::-1])
return jpeg.tobytes()
@app.route('/video_feed')
def video_feed():
return Response(gen(), mimetype='multipart/x-mixed-replace; boundary=frame')
if __name__ == '__main__':
global lv
def someFunc():
print "someFunc was called"
lv.run_loop()
if os.environ.get("WERKZEUG_RUN_MAIN") == "true":
# The reloader has already run - do what you want to do here
lv = LiveVis(settings)
help_keys, _ = bindings.get_key_help('help_mode')
quit_keys, _ = bindings.get_key_help('quit')
print '\n\nRunning toolbox. Push %s for help or %s to quit.\n\n' % (help_keys[0], quit_keys[0])
thread.start_new_thread(someFunc, ())
app.run(host='127.0.0.1', debug=True)
|
mit
| 5,724,847,422,443,700,000
| 25.768116
| 103
| 0.644288
| false
| 3.382784
| false
| false
| false
|
smtpinc/sendapi-python
|
lib/smtpcom/sendapi/api.py
|
1
|
2905
|
from smtpcom.sendapi.send import SendAPI
from smtpcom.sendapi.report import ReportAPI
from smtpcom.sendapi.campaign import CampaignAPI
from smtpcom.sendapi.template import TemplateAPI
class API(object):
def __init__(self, content_type='json'):
self.__report = ReportAPI(content_type)
self.__template = TemplateAPI(content_type)
self.__campaign = CampaignAPI(content_type)
self.__email = SendAPI(content_type)
def create_campaign(self, *args, **kwargs):
return self.__campaign.create_campaign(*args, **kwargs)
def get_campaigns(self, *args, **kwargs):
return self.__campaign.get_campaigns(*args, **kwargs)
def add_template(self, *args, **kwargs):
return self.__template.add_template(*args, **kwargs)
def update_template(self, *args, **kwargs):
return self.__template.update_template(*args, **kwargs)
def delete_campaign(self, campaign_id):
return self.__campaign.delete_campaign(campaign_id)
def update_campaign(self, campaign_id, campaign_name):
return self.__campaign.update_campaign(campaign_id, campaign_name)
def delete_template(self, template_id):
return self.__template.delete_template(template_id)
def get_template(self, template_id):
return self.__template.get_template(template_id)
def get_templates(self, count, page):
return self.__template.get_templates(count, page)
def export_clicks(self, *args, **kwargs):
return self.__report.export_clicks(*args, **kwargs)
def export_clicks_by_url(self, *args, **kwargs):
return self.__report.export_clicks_by_url(*args, **kwargs)
def get_clicks_by_url(self, *args, **kwargs):
return self.__report.get_clicks_by_url(*args, **kwargs)
def export_opens(self, *args, **kwargs):
return self.__report.export_opens(*args, **kwargs)
def export_summary_stats(self, *args, **kwargs):
return self.__report.export_summary_stats(*args, **kwargs)
def get_clicks(self, *args, **kwargs):
return self.__report.get_clicks(*args, **kwargs)
def get_failed_sends(self, *args, **kwargs):
return self.__report.get_failed_sends(*args, **kwargs)
def get_opens(self, *args, **kwargs):
return self.__report.get_opens(*args, **kwargs)
def update_realtime_reporting(self, *args, **kwargs):
return self.__report.update_realtime_reporting(*args, **kwargs)
def get_realtime_reporting(self):
return self.__report.get_realtime_reporting()
def get_senders(self):
return self.__report.get_senders()
def get_sends(self, count, page):
return self.__report.get_sends(count, page)
def get_summary_stats(self, *args, **kwargs):
return self.__report.get_summary_stats(*args, **kwargs)
def send(self, *args, **kwargs):
return self.__email.send(*args, **kwargs)
|
mit
| 8,842,121,755,625,203,000
| 34.864198
| 74
| 0.657143
| false
| 3.573186
| false
| false
| false
|
mabotech/mabo.io
|
py/vision/vision24/vision_crop2.py
|
1
|
6011
|
import time
import socket
import gevent
import numpy as np
import sys
import cv2
from load_config import LoadConfig
import cvlib
conf = LoadConfig("config.toml").config
"""
def match():
img = cv2.imread("box_in_scene2.png")#sys.argv[1])
temp = cv2.imread("box4.png")#sys.argv[2])
try:
dist = int(sys.argv[3])
except IndexError:
dist = 200
try:
num = int(sys.argv[4])
except IndexError:
num = -1
skp, tkp = findKeyPoints(img, temp, dist)
newimg = drawKeyPoints(img, temp, skp, tkp, num)
cv2.imshow("image", newimg)
cv2.waitKey(0)
"""
def supress(v, w):
#v[0],v[1],
print v
if v[2] < w/2 and v[2] > 20:# and v[0] - v[2] >0 and v[1] - v[2]>0 :
return True
def main():
print conf
target = cv2.imread(conf["app"]["target"])#sys.argv[2])
#target = cv2.cvtColor(target, cv2.COLOR_BGR2GRAY)
#print type(target)
#cv2.NamedWindow("camera", 1)
#capture = cv2.VideoCapture(0)
capture = cv2.VideoCapture(conf["app"]["camera_uri"])
i = 0
pt1 = (conf["app"]["crop_start"][0],conf["app"]["crop_start"][1])
w = conf["app"]["corp_width"]
pt2 = (pt1[0]+w,pt1[1]+w)
debug = 1# conf["app"]["debug"]
cp = [0,0]
while True:
#i = i +1
#if i > 200:
# i = 0
ret, img_read = capture.read() #cv.QueryFrame(capture)
#if i == 1:
# pass
if ret == False:
print ret,
time.sleep(0.1)
#raise(Exception("can't connect camera"))
#mat=cv2.GetMat(img)
#img_p = np.asarray(mat)
#img_p = cv.CreateImage(cv.GetSize(img),cv.IPL_DEPTH_8U,1)
#print dir(img)
"""
im_gray = cv.CreateImage(cv.GetSize(img),cv.IPL_DEPTH_8U,1)
cv.CvtColor(img,im_gray,cv.CV_RGB2GRAY)
# Sobel operator
dstSobel = cv.CreateMat(im_gray.height, im_gray.width, cv.CV_32FC1)
# Sobel(src, dst, xorder, yorder, apertureSize = 3)
cv.Sobel(im_gray,dstSobel,1,1,3)
"""
#print ret
try:
# skp: source key points, tkp: target key points
t1 = time.time()
#img[200:400, 100:300] # Crop from x, y, w, h -> 100, 200, 300, 400
#im[y1:y2, x1:x2]
#
crop_img = img_read[pt1[1]:pt2[1], pt1[0]:pt2[0]]
#print(len(crop_img))
distance = conf["app"]["distance"]
#skp, tkp = cvlib.findKeyPoints(crop_img , target, distance)
skp = 1
if skp == None:
print("skp is none")
img_read = cv2.medianBlur(img_read,5)
img_read = cv2.cvtColor(img_read, cv2.COLOR_BGR2GRAY)
cv2.imshow("camera", img_read)
#continue
else:
print "==" * 20
print "time:[%.3f]" %(time.time() - t1)
#print "skp", len(skp)#, skp
#print "tkp",len(tkp)#, tkp
if debug:
crop_img = cv2.medianBlur(crop_img,5)
gray = cv2.cvtColor(crop_img, cv2.COLOR_BGR2GRAY)
circles = cv2.HoughCircles(gray, cv2.cv.CV_HOUGH_GRADIENT,
30, ## dp
200, ## minDist
param1=100,
param2=100, ##
minRadius=70,
maxRadius=200)
print circles
circles = np.uint16(np.around(circles))
j = 0
cv2.rectangle(img_read, pt1, pt2, (0,255,0))
for i in circles[0,:]:
if supress(i, w):
j = j + 1
"""if i[0] - cp[0] > 30 or i[1] - cp[1] > 30 :
pass
else:
"""
cv2.circle(img_read,(pt1[0]+i[0],pt1[1]+i[1]),i[2],(0,255,0),2)
cv2.circle(img_read,(pt1[0]+i[0],pt1[1]+i[1]),2,(0,0,255),3)
cp = [ i[0], i[1] ]
#newimg = cvlib.drawKeyPoints(img_read, target, skp, tkp, pt1, pt2, -1)
cv2.imshow("camera", img_read)
#gevent.sleep(1)
except Exception as ex:
print(ex)
#gevent.sleep(3)
continue
#cv.ShowImage('camera', newimg)
# image smoothing and subtraction
# imageBlur = cv.CreateImage(cv.GetSize(im_gray), im_gray.depth, im_gray.nChannels)
# # filering the original image
# # Smooth(src, dst, smoothtype=CV_GAUSSIAN, param1=3, param2=0, param3=0, param4=0)
# cv.Smooth(im_gray, imageBlur, cv.CV_BLUR, 11, 11)
# diff = cv.CreateImage(cv.GetSize(im_gray), im_gray.depth, im_gray.nChannels)
# # subtraction (original - filtered)
# cv.AbsDiff(im_gray,imageBlur,diff)
# cv.ShowImage('camera', diff)
if cv2.waitKey(10) == 27:
break
#gevent.sleep(0.1)
# cv2.destroyWindow("camera")
if __name__ == "__main__":
main()
|
mit
| -8,267,754,926,154,588,000
| 25.955157
| 91
| 0.414241
| false
| 3.540047
| false
| false
| false
|
camilonova/sentry
|
src/sentry/utils/http.py
|
1
|
3666
|
"""
sentry.utils.http
~~~~~~~~~~~~~~~~~
:copyright: (c) 2010-2014 by the Sentry Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
from __future__ import absolute_import
import six
import urllib
from django.conf import settings
from urlparse import urlparse, urljoin
def absolute_uri(url=None):
if not url:
return settings.SENTRY_URL_PREFIX
return urljoin(settings.SENTRY_URL_PREFIX.rstrip('/') + '/', url.lstrip('/'))
def safe_urlencode(params, doseq=0):
"""
UTF-8-safe version of safe_urlencode
The stdlib safe_urlencode prior to Python 3.x chokes on UTF-8 values
which can't fail down to ascii.
"""
# Snippet originally from pysolr: https://github.com/toastdriven/pysolr
if hasattr(params, "items"):
params = params.items()
new_params = list()
for k, v in params:
k = k.encode("utf-8")
if isinstance(v, six.string_types):
new_params.append((k, v.encode("utf-8")))
elif isinstance(v, (list, tuple)):
new_params.append((k, [i.encode("utf-8") for i in v]))
else:
new_params.append((k, six.text_type(v)))
return urllib.urlencode(new_params, doseq)
def is_same_domain(url1, url2):
"""
Returns true if the two urls should be treated as if they're from the same
domain (trusted).
"""
url1 = urlparse(url1)
url2 = urlparse(url2)
return url1.netloc == url2.netloc
def get_origins(project=None):
if settings.SENTRY_ALLOW_ORIGIN == '*':
return frozenset(['*'])
if settings.SENTRY_ALLOW_ORIGIN:
result = settings.SENTRY_ALLOW_ORIGIN.split(' ')
else:
result = []
if project:
# TODO: we should cache this
from sentry.plugins.helpers import get_option
optval = get_option('sentry:origins', project)
if optval:
result.extend(optval)
# lowercase and strip the trailing slash from all origin values
# filter out empty values
return frozenset(filter(bool, map(lambda x: x.lower().rstrip('/'), result)))
def is_valid_origin(origin, project=None):
"""
Given an ``origin`` which matches a base URI (e.g. http://example.com)
determine if a valid origin is present in the project settings.
Origins may be defined in several ways:
- http://domain.com[:port]: exact match for base URI (must include port)
- *: allow any domain
- *.domain.com: matches domain.com and all subdomains, on any port
- domain.com: matches domain.com on any port
"""
allowed = get_origins(project)
if '*' in allowed:
return True
if not origin:
return False
# we always run a case insensitive check
origin = origin.lower()
# Fast check
if origin in allowed:
return True
# XXX: In some cases origin might be localhost (or something similar) which causes a string value
# of 'null' to be sent as the origin
if origin == 'null':
return False
parsed = urlparse(origin)
# There is no hostname, so the header is probably invalid
if parsed.hostname is None:
return False
for valid in allowed:
if '://' in valid:
# Support partial uri matches that may include path
if origin.startswith(valid):
return True
continue
if valid[:2] == '*.':
# check foo.domain.com and domain.com
if parsed.hostname.endswith(valid[1:]) or parsed.hostname == valid[2:]:
return True
continue
if parsed.hostname == valid:
return True
return False
|
bsd-3-clause
| 5,602,243,267,741,755,000
| 26.358209
| 101
| 0.619749
| false
| 3.980456
| false
| false
| false
|
sauli6692/ibc-server
|
core/models/user.py
|
1
|
2164
|
from django.db import models
from django.utils import timezone
from django.contrib.auth.models import PermissionsMixin
from django.contrib.auth.base_user import BaseUserManager, AbstractBaseUser
from django.utils.translation import ugettext_lazy as _
from .mixins.log_fields import LogFieldsMixin
class UserManager(BaseUserManager):
use_in_migrations = True
def _create_user(self, username, password, is_superuser, **extra_fields):
"""Creates user with username and password."""
now = timezone.now()
if not username:
raise ValueError('The given username must be set')
user = self.model(
username=username,
is_active=True,
is_superuser=is_superuser,
last_login=now,
**extra_fields
)
print(user.__dict__)
user.set_password(password)
user.save(using=self._db)
return user
def create_user(self, username, password=None, **extra_fields):
return self._create_user(username, password, False, **extra_fields)
def create_superuser(self, username, password, **extra_fields):
return self._create_user(username, password, True, **extra_fields)
class User(AbstractBaseUser, PermissionsMixin, LogFieldsMixin):
class Meta:
verbose_name = _('User')
verbose_name_plural = _('Users')
USERNAME_FIELD = 'username'
REQUIRED_FIELDS = []
objects = UserManager()
username = models.CharField(_('User'), max_length=50, unique=True)
is_active = models.BooleanField(_('Active'), default=True,)
owner = models.OneToOneField(
'pmm.Member',
verbose_name=_('Owner'),
on_delete=models.CASCADE,
null=False
)
@property
def is_staff(self):
"""Needed to acces to admin."""
return self.is_superuser
def __str__(self):
return self.username
def get_full_name(self):
"""Get member full name."""
return '{0} {1}'.format(self.owner.first_name, self.owner.last_name).strip()
def get_short_name(self):
"""Get member first name."""
return self.owner.first_name
|
mit
| -6,175,051,724,267,287,000
| 29.055556
| 84
| 0.635397
| false
| 4.098485
| false
| false
| false
|
relic7/prodimages
|
python/pm_update_photodate_andmultistyles.py
|
1
|
8394
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os, sys, re, csv
def update_pm_photodate(colorstyle):
import subprocess
update_url = 'http://dmzimage01.l3.bluefly.com:8080/photo/{0}'.format(colorstyle)
subprocess.call([
"curl",
'-d',
"sample_image=Y",
'-d',
"photographed_date=now",
"-X",
"PUT",
"-format",
update_url,
])
def found3digit_rename(filename):
import os
#print filename
fdir = os.path.dirname(filename)
destdir = fdir #.strip("['")
#print destdir
fname = os.path.basename(filename)
style = fname.split('_')[0]
ext = fname.split('.')[-1]
oldname = filename
incr = 1
newstyle = str(style + "_" + str(incr) + "." + ext)
newname = os.path.join(destdir, newstyle)
while os.path.isfile(newname) == True:
newstyle = str(style + "_" + str(incr) + "." + ext)
newname = os.path.join(destdir, newstyle)
print newname
incr += 1
os.path.isfile(newname)
else:
#print newname
os.rename(oldname,newname)
return
def splitswim_updatepm(file_path):
import re
regex_fullmultistyle = re.compile(r'^.+?/[1-9][0-9]{8}_[b-zB-Z][a-zA-Z]{1,10}[1-9][0-9]{8}_[1-6].+?\.CR2')
#regex_multistyle = re.compile(r'^.+?/[1-9][0-9]{8}_[1-6]\.jpg')
regex_split = re.compile(r'[b-zB-Z][a-zA-Z]{1,10}')
if re.findall(regex_fullmultistyle, file_path):
print "Multistyle".format(file_path)
try:
fname = file_path.split('/')[-1]
secondarycat = re.split(regex_split, fname)
primarystyle = secondarycat[0][:9]
secondarystyle = secondarycat[1][:9]
# m = re.match(r"(\d+)\.?(\d+)?", "24")
#m = re.match(regex_fullmultistyle,file_path)
# m.groups('0') # Now, the second group defaults to '0'.
# groupdict([m])
#primarystyle = m.groups('0')[0]
#secondarystyle = m.groups('0')[1]
# try:
# secondarycategory = fname.split('_')[2]
# print secondarycategory,"SECOND"
# except:
# pass
# print primarystyle,secondarystyle
try:
return primarystyle, secondarystyle
except:
pass
except OSError:
print "FailedSwimSplit {}".format(file_path)
pass
##############################RUN###########################
from PIL import Image
import os, sys, re, glob, datetime
todaysdate = str(datetime.date.today())
todaysfolder = "{0}{1}{2}_".format(todaysdate[5:7],todaysdate[8:10],todaysdate[2:4])
eFashion_root = '/mnt/Post_Ready/eFashionPush'
aPhoto_root = '/mnt/Post_Ready/aPhotoPush'
#rootdir = sys.argv[1]
#walkedout = recursive_dirlist(rootdir)
regex = re.compile(r'.*?/[0-9]{9}_[1].*?\.[jpgJPGCR2]{3}$')
regex_raw = re.compile(r'.*?/RAW/.+?/[0-9]{9}_[1].*?\.[jpgJPGCR2]{3}$')
#regex = re.compile(r'.+?\.[jpgJPG]{3}$')
basedir = os.path.join('/mnt/Production_Raw/PHOTO_STUDIO_OUTPUT/ON_FIGURE/*/', todaysfolder + '*')
basedirstill = os.path.join(aPhoto_root, todaysfolder + '*')
flagged = ''
try:
args = sys.argv
except:
args = ''
if args and len(args) == 1:
globalldirs = os.path.abspath(sys.argv[1])
#globexportdir = os.path.abspath(sys.argv[1])#glob.glob(os.path.join(basedir, "EXPORT/*/*.jpg"))
#globstilldir = os.path.abspath(sys.argv[1])#'.'
flagged = 'SET'# glob.glob(os.path.join(basedirstill, "*/*.jpg"))
elif args and len(args) > 1:
globalldirs = args[1:]
flagged = 'SET'
else:
#globrawdir = glob.glob(os.path.join(basedir, "*/*/*.CR2"))
#globexportdir = glob.glob(os.path.join(basedir, "EXPORT/*/*.jpg"))
globstilldir = glob.glob(os.path.join(basedirstill, "*/*.jpg"))
#curl -d sample_image=Y -d photographed_date=now -X PUT http://dmzimage01.l3.bluefly.com:8080/photo/"$outName"
globalldirs = globstilldir
colorstyles_unique = []
#stylestringsdict = {}
for line in globalldirs:
#stylestringsdict_tmp = {}
regex_fullmultistyle = re.compile(r'^.+?/[1-9][0-9]{8}_[b-zB-Z][a-zA-Z]{1,10}[1-9][0-9]{8}_[1-6].+?\.CR2')
try:
if re.findall(regex_fullmultistyle, line):
swimpair = splitswim_updatepm(line)
primarystyle = swimpair[0]
secondarystyle = swimpair[1]
#if primarystyle not in colorstyles_unique:
print "YAY_SWIMTOP-->{0}".format(primarystyle)
colorstyles_unique.append(primarystyle)
colorstyles_unique = sorted(colorstyles_unique)
#if secondarystyle not in colorstyles_unique:
print "YAY_SWIMBOTTOM-->{0}".format(secondarystyle)
colorstyles_unique.append(secondarystyle)
colorstyles_unique = sorted(colorstyles_unique)
elif re.findall(regex_raw,line):
try:
file_path = line
filename = file_path.split('/')[-1]
colorstyle = filename.split('_')[0]
alt = filename.split('_')[1]
shot_ext = file_path.split('_')[-1]
shot_number = shot_ext.split('.')[0]
ext = shot_ext.split('.')[-1]
## Unique Styles Only
if colorstyle not in colorstyles_unique:
print colorstyle
colorstyles_unique.append(colorstyle)
colorstyles_unique = sorted(colorstyles_unique)
else:
print "Already Accounted {0}".format(colorstyle)
except IOError:
print "IOError on {0}".format(line)
except AttributeError:
print "AttributeError on {0}".format(line)
## If file_path doesnt match the Regular 9digit_# format, checks for 2 styles in 1 shot
elif len(line) == 9 and line.isdigit():
colorstyle = line
colorstyles_unique.append(colorstyle)
colorstyles_unique = sorted(colorstyles_unique)
else:
try:
file_path = line
filename = file_path.split('/')[-1]
colorstyle = filename.split('_')[0]
alt = filename.split('_')[1]
#shot_ext = file_path.split('_')[-1]
#shot_number = shot_ext.split('.')[0]
#ext = shot_ext.split('.')[-1]
## Unique Styles Only
if colorstyle not in colorstyles_unique:
print colorstyle
colorstyles_unique.append(colorstyle)
colorstyles_unique = sorted(colorstyles_unique)
else:
print "Already Accounted {0}".format(colorstyle)
except IOError:
print "IOError on {0}".format(line)
except AttributeError:
print "AttributeError on {0}".format(line)
except:
print "Error appending {}".format(line)
pass
############ Send Shots to PM API to update photodate
colorstyles_unique = set(sorted(colorstyles_unique))
for colorstyle in colorstyles_unique:
try:
update_pm_photodate(colorstyle)
except:
print "FAILED UPDATE for {0}".format(colorstyle)
########### Check for Exports Remove Shot Number & and Move to eFashionPush ##########
if not flagged:
try:
import shutil
if globexportdir:
try:
for f in globexportdir:
found3digit_rename(f)
except:
print 'Faild'
### Get ShootDir Name from last "f" in previous glob and rename ops, then create if not exist
## eFashionPush Dir to Create for Exports used below
eFashion_name = file_path.split('/')[6]
#eFashion_name = '121913'
eFashion_dir = os.path.join(eFashion_root, eFashion_name)
# if not os.path.isdir(eFashion_dir):
# os.makedirs(eFashion_dir, 16877)
## Refresh and Get Renamed files then copy to eFashion Dir
globexportdir = glob.glob(os.path.join(basedir, "EXPORT/*/*.jpg"))
if globexportdir:
for f in globexportdir:
shutil.copy2(f, eFashion_dir)
except:
pass
|
mit
| 3,212,388,557,149,291,500
| 33.979167
| 114
| 0.546938
| false
| 3.565845
| false
| false
| false
|
baklanovp/pystella
|
tests/test_band.py
|
1
|
8215
|
import unittest
import numpy as np
import pystella.rf.band as band
from pystella.rf.rad_func import Flux2MagAB, MagAB2Flux
from pystella.util.phys_var import phys
__author__ = 'bakl'
class BandTests(unittest.TestCase):
def test_load_names(self):
bands = band.band_load_names()
self.assertTrue(len(bands) > 0, "You have to see more bands. Not %d" % len(bands))
def test_band_colors_name(self):
bands = band.band_load_names()
for bname in bands:
self.assertTrue(bname in band.colors(), "You have not color for band: %s" % bname)
def test_band_by_name(self):
b = band.band_by_name("BesU")
self.assertTrue(b.is_load, "The band should be loaded and with data")
def test_aliases_load(self):
band.Band.load_settings()
aliases = band.band_get_aliases()
self.assertTrue(len(aliases), "Should be more aliases.")
def test_aliases(self):
bo = band.band_by_name("BesU")
ba = band.band_by_name("U")
self.assertTrue(ba.is_load, "The band should be loaded and with data")
self.assertCountEqual(bo.wl, ba.wl, msg="The alias wl should be the same as original")
self.assertCountEqual(bo.resp_wl, ba.resp_wl, msg="The alias wl should be the same as original")
def test_available_bands(self):
bands = ['U', 'B', 'V', 'R', "I"]
for n in bands:
b = band.band_by_name(n)
self.assertTrue(b is not None, "Band %s does not exist." % b)
bands = ['g', 'i', 'r', 'u', "z"]
for n in bands:
b = band.band_by_name(n)
self.assertTrue(b is not None, "Band %s does not exist." % b)
bands4 = dict(UVM2="photonUVM2.dat", UVW1="photonUVW1.dat", UVW2="photonUVW2.dat",
SwiftU="photonU_Swift.dat", SwiftB="photonB_Swift.dat", SwiftV="photonV_Swift.dat")
bands = ['SwiftU', 'SwiftB', 'SwiftV', 'UVM2', "UVW1", "UVW2"]
for n in bands:
b = band.band_by_name(n)
self.assertTrue(b is not None, "Band %s does not exist." % n)
def test_zero_point(self):
zp = 0.748 # See filters.ini
b = band.band_by_name('U')
self.assertAlmostEqual(b.zp, zp, msg="Zero points of band %s equals %f. Should be %f" % (b.Name, b.zp, zp))
def test_band_ubvri(self):
import pylab as plt
b = band.band_by_name(band.Band.NameUBVRI)
plt.plot(b.wl * phys.cm_to_angs, b.resp_wl, band.colors(b.Name), label=b.Name, linewidth=2)
plt.legend(loc=4)
plt.ylabel('Amplitude Response')
plt.xlabel('Wave [A]')
plt.grid(linestyle=':')
plt.show()
def test_wl_eff(self):
# wl_eff = {'U': 3650, 'B': 4450, 'V': 5510, 'R': 6580, 'I': 8060}
wl_eff = {'u': 3560, 'g': 4830, 'r': 6260, 'i': 7670, 'z': 8890,
'U': 3600, 'B': 4380, 'V': 5450, 'R': 6410, 'I': 7980, 'J': 12200, 'H': 16300,
'K': 21900}
for bname, wl in wl_eff.items():
b = band.band_by_name(bname)
res = b.wl_eff_angs
print('{} {:.0f} VS {:.0f}'.format(bname, res, wl))
self.assertAlmostEqual(res, wl, delta=wl * 0.03, msg="The effective wavelength of band %s equals %f. "
"Should be %f" % (b.Name, res, wl))
def test_fwhm(self):
# wl_eff = {'U': 3650, 'B': 4450, 'V': 5510, 'R': 6580, 'I': 8060}
wl_fwhm = {'U': 660., 'B': 940., 'V': 880, 'R': 1380., 'I': 1490.,
'J': 2130., 'H': 3070., 'K': 3900.} # AA
# convert to cm
wl_fwhm = {bn: wl * 1e-8 for bn, wl in wl_fwhm.items()}
for bname, wl in wl_fwhm.items():
b = band.band_by_name(bname)
res = b.fwhm
print('{} {:.3e} VS {:.3e}'.format(bname, res, wl))
self.assertAlmostEqual(res, wl, delta=wl * 0.1,
msg="The fwhm of band {} equals {:.3e}. Should be {:.3e}".format(b.Name, res, wl))
def test_band_uniform(self):
b = band.BandUni()
self.assertTrue(np.any(b.resp_wl == 1), "Response values is equal 1. band: %s" % b.name)
self.assertTrue(np.any(b.resp_fr == 1), "Response values is equal 1. band: %s" % b.name)
def test_band_zp_vs_Jy(self):
bands = band.band_load_names()
for bname in bands:
b = band.band_by_name(bname)
if b.is_zp and b.is_Jy:
m_ab = Flux2MagAB(b.Jy * phys.jy_to_erg)
self.assertAlmostEqual(m_ab, b.zp, msg="Band [%s] zp and Jy should be coincide each other. "
"zp=%f, m_zp(Jy) = %f, Jy = %f"
% (b.Name, b.zp, m_ab, b.Jy),
delta=0.01)
def test_zp_AB(self):
# see https://www.gemini.edu/sciops/instruments/magnitudes-and-fluxes
# see http://ssc.spitzer.caltech.edu/warmmission/propkit/pet/magtojy/
qq = 1.
qq1 = MagAB2Flux(Flux2MagAB(qq))
self.assertAlmostEqual(qq, qq1, msg="MagAB2Flux(Flux2MagAB(x)) %f. Should be %f" % (qq, qq1), delta=0.05)
# U
f, ab = 1823., 0.748
m_ab = Flux2MagAB(f * phys.jy_to_erg)
f_ab = MagAB2Flux(ab) / phys.jy_to_erg
self.assertAlmostEqual(f_ab, f, msg="Zero points of band U equals %f. Should be %f" % (f_ab, f), delta=10.05)
self.assertAlmostEqual(m_ab, ab, msg="Zero points of band U equals %f. Should be %f" % (m_ab, ab), delta=0.003)
# B
f, ab = 4260., -0.174
m_ab = Flux2MagAB(f * phys.jy_to_erg)
f_ab = MagAB2Flux(ab) / phys.jy_to_erg
self.assertAlmostEqual(f_ab, f, msg="Zero points of band B equals %f. Should be %f" % (f_ab, f), delta=10.05)
self.assertAlmostEqual(m_ab, ab, msg="Zero points of band B equals %f. Should be %f" % (m_ab, ab), delta=0.003)
# V
f, ab = 3640., -0.0028 # https://www.astro.umd.edu/~ssm/ASTR620/mags.html
# f, ab = 3781., -0.044
m_ab = Flux2MagAB(f * phys.jy_to_erg)
f_ab = MagAB2Flux(ab) / phys.jy_to_erg
self.assertAlmostEqual(f_ab, f, msg="Zero points of band V equals %f. Should be %f" % (f_ab, f), delta=10.05)
self.assertAlmostEqual(m_ab, ab, msg="Zero points of band V equals %f. Should be %f" % (m_ab, ab), delta=0.005)
# R
f, ab = 3080., 0.18 # https://www.astro.umd.edu/~ssm/ASTR620/mags.html
m_ab = Flux2MagAB(f * phys.jy_to_erg)
f_ab = MagAB2Flux(ab) / phys.jy_to_erg
self.assertAlmostEqual(f_ab, f, msg="Zero points of band V equals %f. Should be %f" % (f_ab, f), delta=10.05)
self.assertAlmostEqual(m_ab, ab, msg="Zero points of band V equals %f. Should be %f" % (m_ab, ab), delta=0.005)
# I
f, ab = 2550., 0.38 # https://www.astro.umd.edu/~ssm/ASTR620/mags.html
# f, ab = 3781., -0.044
m_ab = Flux2MagAB(f * phys.jy_to_erg)
f_ab = MagAB2Flux(ab) / phys.jy_to_erg
self.assertAlmostEqual(f_ab, f, msg="Zero points of band V equals %f. Should be %f" % (f_ab, f), delta=10.05)
self.assertAlmostEqual(m_ab, ab, msg="Zero points of band V equals %f. Should be %f" % (m_ab, ab), delta=0.005)
# J
f, ab = 1600., 0.88970004336
m_ab = Flux2MagAB(f * phys.jy_to_erg)
f_ab = MagAB2Flux(ab) / phys.jy_to_erg
print("Flux of Zero points of band u equals %f. m_zp = %f" % (f, m_ab))
self.assertAlmostEqual(f_ab, f, msg="Zero points of band u equals %f. Should be %f" % (f_ab, f), delta=10.05)
self.assertAlmostEqual(m_ab, ab, msg="Zero points of band u equals %f. Should be %f" % (m_ab, ab), delta=0.003)
# g
f, ab = 3991., -0.103
m_ab = Flux2MagAB(f * phys.jy_to_erg)
f_ab = MagAB2Flux(ab) / phys.jy_to_erg
self.assertAlmostEqual(f_ab, f, msg="Zero points of band g equals %f. Should be %f" % (f_ab, f), delta=10.05)
self.assertAlmostEqual(m_ab, ab, msg="Zero points of band g equals %f. Should be %f" % (m_ab, ab), delta=0.005)
def main():
unittest.main()
if __name__ == '__main__':
main()
|
mit
| 4,692,399,836,042,024,000
| 45.676136
| 119
| 0.546683
| false
| 2.809508
| true
| false
| false
|
dceresoli/ce-espresso
|
test-suite/testcode/bin/testcode.py
|
1
|
32042
|
#!/usr/bin/env python2
'''testcode [options] [action1 [action2...]]
testcode is a simple framework for comparing output from (principally numeric)
programs to previous output to reveal regression errors or miscompilation.
Run a set of actions on a set of tests.
Available actions:
compare compare set of test outputs from a previous testcode
run against the benchmark outputs.
diff diff set of test outputs from a previous testcode
run against the benchmark outputs.
make-benchmarks create a new set of benchmarks and update the userconfig
file with the new benchmark id. Also runs the 'run'
action unless the 'compare' action is also given.
recheck compare a set of test outputs and rerun failed tests.
run run a set of tests and compare against the benchmark
outputs. Default action.
tidy Remove files from previous testcode runs from the test
directories.
Requires two configuration files, jobconfig and userconfig. See testcode
documentation for further details.'''
# copyright: (c) 2012 James Spencer
# license: modified BSD; see LICENSE for more details
import glob
import optparse
import os
import re
import subprocess
import sys
import threading
import time
try:
import testcode2
except ImportError:
# try to find testcode2 assuming it is being run directly from the source
# layout.
SCRIPT_DIR = os.path.abspath(os.path.dirname(sys.argv[0]))
TESTCODE2_LIB = os.path.join(SCRIPT_DIR, '../lib/')
sys.path.extend([TESTCODE2_LIB])
import testcode2
import testcode2.config
import testcode2.util
import testcode2.compatibility
import testcode2.exceptions
import testcode2.validation
#--- testcode initialisation ---
def init_tests(userconfig, jobconfig, test_id, reuse_id, executables=None,
categories=None, nprocs=-1, benchmark=None, userconfig_options=None,
jobconfig_options=None):
'''Initialise tests from the configuration files and command-line options.
userconfig, executables, test_id and userconfig_options are passed to
testcode2.config.userconfig.
jobconfig and jobconfig_options are passed to testcode2.config.parse_jobconfig.
categories is passed to testcode2.config.select_tests.
test_id is used to set the test identifier. If test_id is null and reused_id
is true, then the identifier is set to that of the last tests ran by testcode
otherwise a unique identifier based upon the date is used.
nprocs is the number of processors each test is run on. If negative, the
defaults in the configuration files are used.
benchmark is the benchmark id labelling the set of benchmarks to compare the
tests too. If None, the default in userconfig is used.
Returns:
user_options: dictionary containing user options specified in userconfig.
test_programs: dict of the test programs defined in userconfig.
tests: list of selected tests.
'''
config_exists = os.path.exists(userconfig) and os.path.exists(jobconfig)
try:
(user_options, test_programs) = testcode2.config.parse_userconfig(
userconfig, executables, test_id, userconfig_options)
except testcode2.exceptions.TestCodeError:
err = str(sys.exc_info()[1])
if not config_exists:
err += (' Please run from a directory containing (or specify) the '
'userconfig file. Use ``--help`` to see available options.')
raise testcode2.exceptions.TestCodeError(err)
# Set benchmark if required.
if benchmark:
for key in test_programs:
test_programs[key].benchmark = [benchmark]
try:
(tests, test_categories) = testcode2.config.parse_jobconfig(
jobconfig, user_options, test_programs, jobconfig_options)
except testcode2.exceptions.TestCodeError:
err = str(sys.exc_info()[1])
if not config_exists:
err += (' Please run from a directory containing (or specify) the '
'jobconfig file. Use ``--help`` to see available options.')
raise testcode2.exceptions.TestCodeError(err)
# Set number of processors...
if nprocs >= 0:
for test in tests:
test.nprocs = nprocs
if test.nprocs < test.min_nprocs:
test.nprocs = test.min_nprocs
if test.nprocs > test.max_nprocs:
test.nprocs = test.max_nprocs
# parse selected job categories from command line
# Remove those tests which weren't run most recently if comparing.
if categories:
tests = testcode2.config.select_tests(tests, test_categories,
categories, os.path.abspath(os.path.dirname(userconfig)))
# Sort by path (as that's how they appear in the user's directory).
tests.sort(key=lambda test: test.path)
if not test_id:
test_id = testcode2.config.get_unique_test_id(tests, reuse_id,
user_options['date_fmt'])
for key in test_programs:
test_programs[key].test_id = test_id
return (user_options, test_programs, tests)
#--- create command line interface ---
def parse_cmdline_args(args):
'''Parse command line arguments.
args: list of supplied arguments.
Returns:
options: object returned by optparse containing the options.
actions: list of testcode2 actions to run.
'''
# Curse not being able to use argparse in order to support python <= 2.7!
parser = optparse.OptionParser(usage=__doc__)
allowed_actions = ['compare', 'run', 'diff', 'tidy', 'make-benchmarks',
'recheck']
parser.add_option('-b', '--benchmark', help='Set the file ID of the '
'benchmark files. Default: specified in the [user] section of the '
'userconfig file.')
parser.add_option('-c', '--category', action='append', default=[],
help='Select the category/group of tests. Can be specified '
'multiple times. Default: use the _default_ category if run is an '
'action unless make-benchmarks is an action. All other cases use '
'the _all_ category by default. The _default_ category contains '
'all tests unless otherwise set in the jobconfig file.')
parser.add_option('-e', '--executable', action='append', default=[],
help='Set the executable(s) to be used to run the tests. Can be'
' a path or name of an option in the userconfig file, in which'
' case all test programs are set to use that value, or in the'
' format program_name=value, which affects only the specified'
' program.')
parser.add_option('-i', '--insert', action='store_true', default=False,
help='Insert the new benchmark into the existing list of benchmarks'
' in userconfig rather than overwriting it. Only relevant to the'
' make-benchmarks action. Default: %default.')
parser.add_option('--jobconfig', default='jobconfig', help='Set path to the'
' job configuration file. Default: %default.')
parser.add_option('--job-option', action='append', dest='job_option',
default=[], nargs=3, help='Override/add setting to jobconfig. '
'Takes three arguments. Format: section_name option_name value. '
'Default: none.')
parser.add_option('--older-than', type='int', dest='older_than', default=14,
help='Set the age (in days) of files to remove. Only relevant to '
'the tidy action. Default: %default days.')
parser.add_option('-p', '--processors', type='int', default=-1,
dest='nprocs', help='Set the number of processors to run each test '
'on. Default: use settings in configuration files.')
parser.add_option('-q', '--quiet', action='store_const', const=0,
dest='verbose', default=1, help='Print only minimal output. '
'Default: False.')
parser.add_option('-s', '--submit', dest='queue_system', default=None,
help='Submit tests to a queueing system of the specified type. '
'Only PBS system is currently implemented. Default: %default.')
parser.add_option('-t', '--test-id', dest='test_id', help='Set the file ID '
'of the test outputs. Default: unique filename based upon date '
'if running tests and most recent test_id if comparing tests.')
parser.add_option('--total-processors', type='int', default=-1,
dest='tot_nprocs', help='Set the total number of processors to use '
'to run tests concurrently. Relevant only to the run option. '
'Default: run all tests concurrently run if --submit is used; run '
'tests sequentially otherwise.')
parser.add_option('--userconfig', default='userconfig', help='Set path to '
'the user configuration file. Default: %default.')
parser.add_option('--user-option', action='append', dest='user_option',
default=[], nargs=3, help='Override/add setting to userconfig. '
'Takes three arguments. Format: section_name option_name value. '
'Default: none.')
parser.add_option('-v', '--verbose', default=1, action="count",
dest='verbose', help='Increase verbosity of output. Can be '
'specified multiple times.')
(options, args) = parser.parse_args(args)
# Default action.
if not args or ('make-benchmarks' in args and 'compare' not in args
and 'run' not in args):
# Run tests by default if no action provided.
# Run tests before creating benchmark by default.
args.append('run')
# Default category.
if not options.category:
# We quietly filter out tests which weren't run last when diffing
# or comparing.
options.category = ['_all_']
if 'run' in args and 'make-benchmarks' not in args:
options.category = ['_default_']
test_args = (arg not in allowed_actions for arg in args)
if testcode2.compatibility.compat_any(test_args):
print('At least one action is not understood: %s.' % (' '.join(args)))
parser.print_usage()
sys.exit(1)
# Parse executable option to form dictionary in format expected by
# parse_userconfig.
exe = {}
for item in options.executable:
words = item.split('=')
if len(words) == 1:
# setting executable for all programs (unless otherwise specified)
exe['_tc_all'] = words[0]
else:
# format: program_name=executable
exe[words[0]] = words[1]
options.executable = exe
# Set FILESTEM if test_id refers to a benchmark file or the benchmark
# refers to a test_id.
filestem = testcode2.FILESTEM.copy()
if options.benchmark and options.benchmark[:2] == 't:':
filestem['benchmark'] = testcode2.FILESTEM['test']
options.benchmark = options.benchmark[2:]
if options.test_id and options.test_id[:2] == 'b:':
filestem['test'] = testcode2.FILESTEM['benchmark']
options.test_id = options.test_id[2:]
if filestem['test'] != testcode2.FILESTEM['test'] and 'run' in args:
print('Not allowed to set test filename to be a benchmark filename '
'when running calculations.')
sys.exit(1)
testcode2.FILESTEM = filestem.copy()
# Convert job-options and user-options to dict of dicsts format.
for item in ['user_option', 'job_option']:
uj_opt = getattr(options, item)
opt = dict( (section, {}) for section in
testcode2.compatibility.compat_set(opt[0] for opt in uj_opt) )
for (section, option, value) in uj_opt:
opt[section][option] = value
setattr(options, item, opt)
return (options, args)
#--- actions ---
def run_tests(tests, verbose=1, cluster_queue=None, tot_nprocs=0):
'''Run tests.
tests: list of tests.
verbose: level of verbosity in output.
cluster_queue: name of cluster system to use. If None, tests are run locally.
Currently only PBS is implemented.
tot_nprocs: total number of processors available to run tests on. As many
tests (in a LIFO fashion from the tests list) are run at the same time as
possible without using more processors than this value. If less than 1 and
cluster_queue is specified, then all tests are submitted to the cluster at
the same time. If less than one and cluster_queue is not set, then
tot_nprocs is ignored and the tests are run sequentially (default).
'''
def run_test_worker(semaphore, semaphore_lock, tests, *run_test_args):
'''Launch a test after waiting until resources are available to run it.
semaphore: threading.Semaphore object containing the number of cores/processors
which can be used concurrently to run tests.
semaphore.lock: threading.Lock object used to restrict acquiring the semaphore
to one thread at a time.
tests: list of (serialized) tests to run in this thread.
run_test_args: arguments to pass to test.run_test method.
'''
# Ensure that only one test attempts to register resources with the
# semaphore at a time. This restricts running the tests to a LIFO
# fashion which is not perfect (we don't attempt to backfill with
# smaller tests, for example) but is a reasonable and (most
# importantly) simple first-order approach.
for test in tests:
semaphore_lock.acquire()
# test.nprocs is <1 when program is run in serial.
nprocs_used = max(1, test.nprocs)
for i in range(nprocs_used):
semaphore.acquire()
semaphore_lock.release()
test.run_test(*run_test_args)
for i in range(nprocs_used):
semaphore.release()
# Check executables actually exist...
compat = testcode2.compatibility
executables = [test.test_program.exe for test in tests]
executables = compat.compat_set(executables)
for exe in executables:
mswin = sys.platform.startswith('win') or sys.platform.startswith('cyg')
# The test is not reliable if there's an unholy combination of windows
# and cygwin being used to run the program. We've already warned the
# user (in config.set_program_name) that we struggled to find the
# executable.
if not os.path.exists(exe) and not mswin:
err = 'Executable does not exist: %s.' % (exe)
raise testcode2.exceptions.TestCodeError(err)
if tot_nprocs <= 0 and cluster_queue:
# Running on cluster. Default to submitting all tests at once.
tot_nprocs = sum(test.nprocs for test in tests)
if tot_nprocs > 0:
# Allow at most tot_nprocs cores to be used at once by tests.
max_test_nprocs = max(test.nprocs for test in tests)
if max_test_nprocs > tot_nprocs:
err = ('Number of available cores less than the number required by '
'the largest test: at least %d needed, %d available.'
% (max_test_nprocs, tot_nprocs))
raise testcode2.exceptions.TestCodeError(err)
# Need to serialize tests that run in the same directory with wildcard
# patterns in the output file--otherwise we can't figure out which
# output file belongs to which test. We might be able to for some
# wildcards, but let's err on the side of caution.
wildcards = re.compile('.*(\*|\?|\[.*\]).*')
serialized_tests = []
test_store = {}
for test in tests:
if test.output and wildcards.match(test.output):
if test.path in test_store:
test_store[test.path].append(test)
else:
test_store[test.path] = [test]
else:
serialized_tests.append([test])
for (key, stests) in test_store.items():
if (len(stests) > 1) and verbose > 2:
print('Warning: cannot run tests in %s concurrently.' % stests[0].path)
serialized_tests += test_store.values()
semaphore = threading.BoundedSemaphore(tot_nprocs)
slock = threading.Lock()
jobs = [threading.Thread(
target=run_test_worker,
args=(semaphore, slock, test, verbose, cluster_queue,
os.getcwd())
)
for test in serialized_tests]
for job in jobs:
# daemonise so thread terminates when master dies
try:
job.setDaemon(True)
except AttributeError:
job.daemon = True
job.start()
# We avoid .join() which is blocking making it unresponsive to TERM
while threading.activeCount() > 1:
time.sleep(0.5)
else:
# run straight through, one at a time
for test in tests:
test.run_test(verbose, cluster_queue, os.getcwd())
def compare_tests(tests, verbose=1):
'''Compare tests.
tests: list of tests.
verbose: level of verbosity in output.
Returns:
number of tests not checked due to test output file not existing.
'''
not_checked = 0
for test in tests:
for (inp, args) in test.inputs_args:
test_file = testcode2.util.testcode_filename(
testcode2.FILESTEM['test'],
test.test_program.test_id, inp, args
)
test_file = os.path.join(test.path, test_file)
if os.path.exists(test_file):
test.verify_job(inp, args, verbose, os.getcwd())
else:
if verbose > 0 and verbose <= 2:
info_line = testcode2.util.info_line(test.path, inp, args, os.getcwd())
print('%sNot checked.' % info_line)
if verbose > 1:
print('Skipping comparison. '
'Test file does not exist: %s.\n' % test_file)
not_checked += 1
return not_checked
def recheck_tests(tests, verbose=1, cluster_queue=None, tot_nprocs=0):
'''Check tests and re-run any failed/skipped tests.
tests: list of tests.
verbose: level of verbosity in output.
cluster_queue: name of cluster system to use. If None, tests are run locally.
Currently only PBS is implemented.
tot_nprocs: total number of processors available to run tests on. As many
tests (in a LIFO fashion from the tests list) are run at the same time as
possible without using more processors than this value. If less than 1 and
cluster_queue is specified, then all tests are submitted to the cluster at
the same time. If less than one and cluster_queue is not set, then
tot_nprocs is ignored and the tests are run sequentially (default).
Returns:
not_checked: number of tests not checked due to missing test output.
'''
if verbose == 0:
sep = ' '
else:
sep = '\n\n'
sys.stdout.write('Comparing tests to benchmarks:'+sep)
not_checked = compare_tests(tests, verbose)
end_status(tests, not_checked, verbose, False)
rerun_tests = []
skip = testcode2.validation.Status(name='skipped')
for test in tests:
stat = test.get_status()
if sum(stat[key] for key in ('failed', 'unknown')) != 0:
rerun_tests.append(test)
elif stat['ran'] != 0:
# mark tests as skipped using an internal API (naughty!)
for inp_arg in test.inputs_args:
test._update_status(skip, inp_arg)
if verbose > 0:
print('')
if rerun_tests:
sys.stdout.write('Rerunning failed tests:'+sep)
run_tests(rerun_tests, verbose, cluster_queue, tot_nprocs)
return not_checked
def diff_tests(tests, diff_program, verbose=1):
'''Diff tests.
tests: list of tests.
diff_program: diff program to use.
verbose: level of verbosity in output.
'''
for test in tests:
cwd = os.getcwd()
os.chdir(test.path)
for (inp, args) in test.inputs_args:
have_benchmark = True
try:
benchmark = test.test_program.select_benchmark_file(
test.path, inp, args
)
except testcode2.exceptions.TestCodeError:
err = sys.exc_info()[1]
have_benchmark = False
test_file = testcode2.util.testcode_filename(
testcode2.FILESTEM['test'],
test.test_program.test_id, inp, args
)
if not os.path.exists(test_file):
if verbose > 0:
print('Skipping diff with %s in %s: %s does not exist.'
% (benchmark, test.path, test_file))
elif not have_benchmark:
if verbose > 0:
print('Skipping diff with %s. %s' % (test.path, err))
else:
if verbose > 0:
print('Diffing %s and %s in %s.' %
(benchmark, test_file, test.path))
diff_cmd = '%s %s %s' % (diff_program, benchmark, test_file)
diff_popen = subprocess.Popen(diff_cmd, shell=True)
diff_popen.wait()
os.chdir(cwd)
def tidy_tests(tests, ndays):
'''Tidy up test directories.
tests: list of tests.
ndays: test files older than ndays are deleted.
'''
epoch_time = time.time() - 86400*ndays
test_globs = ['test.out*','test.err*']
print(
'Delete all %s files older than %s days from each job directory?'
% (' '.join(test_globs), ndays)
)
ans = ''
while ans != 'y' and ans != 'n':
ans = testcode2.compatibility.compat_input('Confirm [y/n]: ')
if ans == 'n':
print('No files deleted.')
else:
for test in tests:
cwd = os.getcwd()
os.chdir(test.path)
if test.submit_template:
file_globs = test_globs + [test.submit_template]
else:
file_globs = test_globs
for file_glob in file_globs:
for test_file in glob.glob(file_glob):
if os.stat(test_file)[-2] < epoch_time:
os.remove(test_file)
os.chdir(cwd)
def make_benchmarks(test_programs, tests, userconfig, copy_files_since,
insert_id=False):
'''Make a new set of benchmarks.
test_programs: dictionary of test programs.
tests: list of tests.
userconfig: path to the userconfig file. This is updated with the new benchmark id.
copy_files_since: files produced since the timestamp (in seconds since the
epoch) are copied to the testcode_data subdirectory in each test.
insert_id: insert the new benchmark id into the existing list of benchmark ids in
userconfig if True, otherwise overwrite the existing benchmark ids with the
new benchmark id (default).
'''
# All tests passed?
statuses = [test.get_status() for test in tests]
npassed = sum(status['passed'] for status in statuses)
nran = sum(status['ran'] for status in statuses)
if npassed != nran:
ans = ''
print('Not all tests passed.')
while ans != 'y' and ans != 'n':
ans = testcode2.compatibility.compat_input(
'Create new benchmarks? [y/n] ')
if ans != 'y':
return None
# Get vcs info.
# vcs = {}
# for (key, program) in test_programs.items():
# if program.vcs and program.vcs.vcs:
# vcs[key] = program.vcs.get_code_id()
# else:
# print('Program not under (known) version control system')
# vcs[key] = testcode2.compatibility.compat_input(
# 'Enter revision id for %s: ' % (key))
# HACK
code_id = testcode2.compatibility.compat_input(
'Enter new revision id : ')
# Benchmark label from vcs info.
# if len(vcs) == 1:
# benchmark = vcs.popitem()[1]
# else:
# benchmark = []
# for (key, code_id) in vcs.items():
# benchmark.append('%s-%s' % (key, code_id))
# benchmark = '.'.join(benchmark)
# HACK
benchmark = code_id
# Create benchmarks.
for test in tests:
test.create_new_benchmarks(benchmark, copy_files_since)
# update userconfig file.
if userconfig:
config = testcode2.compatibility.configparser.RawConfigParser()
config.optionxform = str # Case sensitive file.
config.read(userconfig)
if insert_id:
ids = config.get('user', 'benchmark').split()
if benchmark in ids:
ids.remove(benchmark)
ids.insert(0, benchmark)
benchmark = ' '.join(ids)
if len(benchmark.split()) > 1:
print('Setting new benchmarks in userconfig to be: %s.' %
(benchmark))
else:
print('Setting new benchmark in userconfig to be: %s.' %
(benchmark))
config.set('user', 'benchmark', benchmark)
userconfig = open(userconfig, 'w')
config.write(userconfig)
userconfig.close()
#--- info output ---
def start_status(tests, running, verbose=1):
'''Print a header containing useful information.
tests: list of tests.
running: true if tests are to be run.
verbose: level of verbosity in output (no output if <1).
'''
if verbose > 0:
exes = [test.test_program.exe for test in tests]
exes = testcode2.compatibility.compat_set(exes)
if running:
for exe in exes:
print('Using executable: %s.' % (exe))
# All tests use the same test_id and benchmark.
print('Test id: %s.' % (tests[0].test_program.test_id))
if len(tests[0].test_program.benchmark) > 1:
benchmark_ids = ', '.join(tests[0].test_program.benchmark)
print('Benchmarks: %s.' % (benchmark_ids))
else:
print('Benchmark: %s.' % (tests[0].test_program.benchmark[0]))
print('')
def end_status(tests, not_checked=0, verbose=1, final=True):
'''Print a footer containing useful information.
tests: list of tests.
not_checked: number of tests not checked (ie not run or compared).
verbose: level of verbosity in output. A summary footer is produced if greater
than 0; otherwise a minimal status line is printed out.
final: final call (so print a goodbye messge).
'''
def pluralise(string, num):
'''Return plural form (just by adding s) to string if num > 1.'''
if num > 1:
string = string+'s'
return string
def select_tests(stat_key, tests, statuses):
'''Select a subset of tests.
(test.name, test.path) is included if the test object contains at least
one test of the desired status (stat_key).'''
test_subset = [(test.name, test.path) for (test, status)
in zip(tests, statuses) if status[stat_key] != 0]
return sorted(test_subset)
def format_test_subset(subset):
'''Format each entry in the list returned by select_tests.'''
subset_fmt = []
for (name, path) in subset:
if os.path.abspath(name) == os.path.abspath(path):
entry = name
else:
entry = '%s (test name: %s)' % (path, name)
if entry not in subset_fmt:
subset_fmt.append(entry)
return subset_fmt
statuses = [test.get_status() for test in tests]
npassed = sum(status['passed'] for status in statuses)
nwarning = sum(status['warning'] for status in statuses)
nfailed = sum(status['failed'] for status in statuses)
nunknown = sum(status['unknown'] for status in statuses)
nskipped = sum(status['skipped'] for status in statuses)
nran = sum(status['ran'] for status in statuses)
failures = format_test_subset(select_tests('failed', tests, statuses))
warnings = format_test_subset(select_tests('warning', tests, statuses))
skipped = format_test_subset(select_tests('skipped', tests, statuses))
# Treat warnings as passes but add a note about how many warnings.
npassed += nwarning
# Treat skipped tests as tests which weren't run.
nran -= nskipped
# Pedantic.
warning = pluralise('warning', nwarning)
ran_test = pluralise('test', nran)
failed_test = pluralise('test', nfailed)
skipped_test = pluralise('test', nskipped)
add_info_msg = []
if nwarning != 0:
add_info_msg.append('%s %s' % (nwarning, warning))
if nskipped != 0:
add_info_msg.append('%s skipped' % (nskipped,))
if nunknown != 0:
add_info_msg.append('%s unknown' % (nunknown,))
if not_checked != 0:
add_info_msg.append('%s not checked' % (not_checked,))
add_info_msg = ', '.join(add_info_msg)
if add_info_msg:
add_info_msg = ' (%s)' % (add_info_msg,)
if nran == 0:
print('No tests to run.')
elif verbose > 0:
if verbose < 2:
print('') # Obsessive formatting.
msg = '%s%s out of %s %s passed%s.'
if final:
msg = 'All done. %s' % (msg,)
if npassed == nran:
print(msg % ('', npassed, nran, ran_test, add_info_msg))
else:
print(msg % ('ERROR: only ', npassed, nran, ran_test, add_info_msg))
if failures:
print('Failed %s in:\n\t%s' % (failed_test, '\n\t'.join(failures)))
if warnings:
print('%s in:\n\t%s' % (warning.title(), '\n\t'.join(warnings)))
if skipped:
print('Skipped %s in:\n\t%s' % (skipped_test, '\n\t'.join(skipped)))
else:
print(' [%s/%s%s]'% (npassed, nran, add_info_msg))
# ternary operator not in python 2.4. :-(
ret_val = 0
if nran != npassed:
ret_val = 1
return ret_val
#--- main runner ---
def main(args):
'''main controller procedure.
args: command-line arguments passed to testcode2.
'''
start_time = time.time()
(options, actions) = parse_cmdline_args(args)
# Shortcut names to options used multiple times.
verbose = options.verbose
userconfig = options.userconfig
reuse_id = 'run' not in actions and testcode2.compatibility.compat_any(
[action in actions for action in ['compare', 'diff', 'recheck']]
)
(user_options, test_programs, tests) = init_tests(userconfig,
options.jobconfig, options.test_id, reuse_id,
options.executable, options.category, options.nprocs,
options.benchmark, options.user_option,
options.job_option)
ret_val = 0
if not (len(actions) == 1 and 'tidy' in actions):
start_status(tests, 'run' in actions, verbose)
if 'run' in actions:
run_tests(tests, verbose, options.queue_system, options.tot_nprocs)
ret_val = end_status(tests, 0, verbose)
if 'recheck' in actions:
not_checked = recheck_tests(tests, verbose,
options.queue_system,options.tot_nprocs)
ret_val = end_status(tests, not_checked, verbose)
if 'compare' in actions:
not_checked = compare_tests(tests, verbose)
ret_val = end_status(tests, not_checked, verbose)
if 'diff' in actions:
diff_tests(tests, user_options['diff'], verbose)
if 'tidy' in actions:
tidy_tests(tests, options.older_than)
if 'make-benchmarks' in actions:
make_benchmarks(test_programs, tests, userconfig, start_time,
options.insert)
return ret_val
if __name__ == '__main__':
try:
sys.exit(main(sys.argv[1:]))
except testcode2.exceptions.TestCodeError:
err = sys.exc_info()[1]
print(err)
sys.exit(1)
|
gpl-2.0
| 2,921,529,414,655,211,000
| 38.803727
| 91
| 0.610324
| false
| 3.946059
| true
| false
| false
|
Alberto-Beralix/Beralix
|
i386-squashfs-root/usr/share/pyshared/orca/scripts/toolkits/WebKitGtk/script.py
|
1
|
18079
|
# Orca
#
# Copyright (C) 2010-2011 The Orca Team
#
# Author: Joanmarie Diggs <joanmarie.diggs@gmail.com>
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the
# Free Software Foundation, Inc., Franklin Street, Fifth Floor,
# Boston MA 02110-1301 USA.
__id__ = "$Id$"
__version__ = "$Revision$"
__date__ = "$Date$"
__copyright__ = "Copyright (c) 2010-2011 The Orca Team"
__license__ = "LGPL"
import pyatspi
import pyatspi.utils as utils
import orca.scripts.default as default
import orca.input_event as input_event
import orca.orca as orca
import orca.settings as settings
import orca.speechserver as speechserver
import orca.orca_state as orca_state
import orca.speech as speech
from orca.orca_i18n import _
import script_settings
from structural_navigation import StructuralNavigation
from braille_generator import BrailleGenerator
from speech_generator import SpeechGenerator
from script_utilities import Utilities
_settingsManager = getattr(orca, '_settingsManager')
########################################################################
# #
# The WebKitGtk script class. #
# #
########################################################################
class Script(default.Script):
CARET_NAVIGATION_KEYS = ['Left', 'Right', 'Up', 'Down', 'Home', 'End']
def __init__(self, app, isBrowser=False):
"""Creates a new script for WebKitGtk applications.
Arguments:
- app: the application to create a script for.
"""
default.Script.__init__(self, app)
self._loadingDocumentContent = False
self._isBrowser = isBrowser
self.sayAllOnLoadCheckButton = None
def getListeners(self):
"""Sets up the AT-SPI event listeners for this script."""
listeners = default.Script.getListeners(self)
listeners["document:reload"] = \
self.onDocumentReload
listeners["document:load-complete"] = \
self.onDocumentLoadComplete
listeners["document:load-stopped"] = \
self.onDocumentLoadStopped
listeners["object:state-changed:busy"] = \
self.onStateChanged
return listeners
def setupInputEventHandlers(self):
"""Defines InputEventHandler fields for this script that can be
called by the key and braille bindings."""
default.Script.setupInputEventHandlers(self)
self.inputEventHandlers.update(
self.structuralNavigation.inputEventHandlers)
self.inputEventHandlers["sayAllHandler"] = \
input_event.InputEventHandler(
Script.sayAll,
# Translators: the Orca "SayAll" command allows the
# user to press a key and have the entire document in
# a window be automatically spoken to the user. If
# the user presses any key during a SayAll operation,
# the speech will be interrupted and the cursor will
# be positioned at the point where the speech was
# interrupted.
#
_("Speaks entire document."))
def getKeyBindings(self):
"""Defines the key bindings for this script. Setup the default
key bindings, then add one in for reading the input line.
Returns an instance of keybindings.KeyBindings.
"""
keyBindings = default.Script.getKeyBindings(self)
bindings = self.structuralNavigation.keyBindings
for keyBinding in bindings.keyBindings:
keyBindings.add(keyBinding)
return keyBindings
def getAppPreferencesGUI(self):
"""Return a GtkGrid containing the application unique configuration
GUI items for the current application."""
from gi.repository import Gtk
grid = Gtk.Grid()
grid.set_border_width(12)
# Translators: when the user loads a new page in WebKit, they
# can optionally tell Orca to automatically start reading a
# page from beginning to end.
#
label = \
_("Automatically start speaking a page when it is first _loaded")
self.sayAllOnLoadCheckButton = \
Gtk.CheckButton.new_with_mnemonic(label)
self.sayAllOnLoadCheckButton.set_active(script_settings.sayAllOnLoad)
grid.attach(self.sayAllOnLoadCheckButton, 0, 0, 1, 1)
grid.show_all()
return grid
def setAppPreferences(self, prefs):
"""Write out the application specific preferences lines and set the
new values.
Arguments:
- prefs: file handle for application preferences.
"""
prefs.writelines("\n")
prefix = "orca.scripts.toolkits.WebKitGtk.script_settings"
prefs.writelines("import %s\n\n" % prefix)
value = self.sayAllOnLoadCheckButton.get_active()
prefs.writelines("%s.sayAllOnLoad = %s\n" % (prefix, value))
script_settings.sayAllOnLoad = value
def getBrailleGenerator(self):
"""Returns the braille generator for this script."""
return BrailleGenerator(self)
def getSpeechGenerator(self):
"""Returns the speech generator for this script."""
return SpeechGenerator(self)
def getStructuralNavigation(self):
"""Returns the 'structural navigation' class for this script."""
types = self.getEnabledStructuralNavigationTypes()
return StructuralNavigation(self, types, True)
def getUtilities(self):
"""Returns the utilites for this script."""
return Utilities(self)
def onCaretMoved(self, event):
"""Called whenever the caret moves.
Arguments:
- event: the Event
"""
lastKey, mods = self.utilities.lastKeyAndModifiers()
if lastKey in ['Tab', 'ISO_Left_Tab']:
return
if lastKey == 'Down' \
and orca_state.locusOfFocus == event.source.parent \
and event.source.getIndexInParent() == 0 \
and orca_state.locusOfFocus.getRole() == pyatspi.ROLE_LINK:
self.updateBraille(event.source)
return
if self.utilities.isWebKitGtk(orca_state.locusOfFocus):
orca.setLocusOfFocus(event, event.source, False)
default.Script.onCaretMoved(self, event)
def onDocumentReload(self, event):
"""Called when the reload button is hit for a web page."""
if event.source.getRole() == pyatspi.ROLE_DOCUMENT_FRAME:
self._loadingDocumentContent = True
def onDocumentLoadComplete(self, event):
"""Called when a web page load is completed."""
if event.source.getRole() != pyatspi.ROLE_DOCUMENT_FRAME:
return
self._loadingDocumentContent = False
if not self._isBrowser:
return
# TODO: We need to see what happens in Epiphany on pages where focus
# is grabbed rather than set the caret at the start. But for simple
# content in both Yelp and Epiphany this is alright for now.
obj, offset = self.setCaretAtStart(event.source)
orca.setLocusOfFocus(event, obj, False)
self.updateBraille(obj)
if script_settings.sayAllOnLoad \
and _settingsManager.getSetting('enableSpeech'):
self.sayAll(None)
def onDocumentLoadStopped(self, event):
"""Called when a web page load is interrupted."""
if event.source.getRole() == pyatspi.ROLE_DOCUMENT_FRAME:
self._loadingDocumentContent = False
def onFocus(self, event):
"""Called whenever an object gets focus.
Arguments:
- event: the Event
"""
obj = event.source
role = obj.getRole()
if role == pyatspi.ROLE_LIST_ITEM and obj.childCount:
return
textRoles = [pyatspi.ROLE_HEADING,
pyatspi.ROLE_PANEL,
pyatspi.ROLE_PARAGRAPH,
pyatspi.ROLE_SECTION]
if role in textRoles:
return
if role == pyatspi.ROLE_LINK and obj.childCount:
try:
text = obj.queryText()
except NotImplementedError:
orca.setLocusOfFocus(event, obj[0])
default.Script.onFocus(self, event)
def onStateChanged(self, event):
"""Called whenever an object's state changes.
Arguments:
- event: the Event
"""
if not event.type.startswith("object:state-changed:busy"):
default.Script.onStateChanged(self, event)
return
if not event.source \
or event.source.getRole() != pyatspi.ROLE_DOCUMENT_FRAME \
or not self._isBrowser:
return
if event.detail1:
# Translators: this is in reference to loading a web page
# or some other content.
#
self.presentMessage(_("Loading. Please wait."))
elif event.source.name:
# Translators: this is in reference to loading a web page
# or some other content.
#
self.presentMessage(_("Finished loading %s.") % event.source.name)
else:
# Translators: this is in reference to loading a web page
# or some other content.
#
self.presentMessage(_("Finished loading."))
def onTextSelectionChanged(self, event):
"""Called when an object's text selection changes.
Arguments:
- event: the Event
"""
# The default script's method attempts to handle various and sundry
# complications that simply do not apply here.
#
spokenRange = self.pointOfReference.get("spokenTextRange") or [0, 0]
startOffset, endOffset = spokenRange
self.speakTextSelectionState(event.source, startOffset, endOffset)
def sayCharacter(self, obj):
"""Speak the character at the caret.
Arguments:
- obj: an Accessible object that implements the AccessibleText
interface
"""
if obj.getRole() == pyatspi.ROLE_SEPARATOR:
speech.speak(self.speechGenerator.generateSpeech(obj))
return
default.Script.sayCharacter(self, obj)
def sayLine(self, obj):
"""Speaks the line of an AccessibleText object that contains the
caret.
Arguments:
- obj: an Accessible object that implements the AccessibleText
interface
"""
default.Script.sayLine(self, obj)
rolesToSpeak = [pyatspi.ROLE_HEADING]
if obj.getRole() in rolesToSpeak:
speech.speak(self.speechGenerator.getRoleName(obj))
def skipObjectEvent(self, event):
"""Gives us, and scripts, the ability to decide an event isn't
worth taking the time to process under the current circumstances.
Arguments:
- event: the Event
Returns True if we shouldn't bother processing this object event.
"""
if event.type.startswith('object:state-changed:focused') \
and event.detail1:
if event.source.getRole() == pyatspi.ROLE_LINK:
return False
lastKey, mods = self.utilities.lastKeyAndModifiers()
if lastKey in self.CARET_NAVIGATION_KEYS:
return True
return default.Script.skipObjectEvent(self, event)
def useStructuralNavigationModel(self):
"""Returns True if we should do our own structural navigation.
This should return False if we're in a form field, or not in
document content.
"""
doNotHandleRoles = [pyatspi.ROLE_ENTRY,
pyatspi.ROLE_TEXT,
pyatspi.ROLE_PASSWORD_TEXT,
pyatspi.ROLE_LIST,
pyatspi.ROLE_LIST_ITEM,
pyatspi.ROLE_MENU_ITEM]
if not self.structuralNavigation.enabled:
return False
if not self.utilities.isWebKitGtk(orca_state.locusOfFocus):
return False
if orca_state.locusOfFocus.getRole() in doNotHandleRoles:
states = orca_state.locusOfFocus.getState()
if states.contains(pyatspi.STATE_FOCUSED):
return False
return True
def setCaretAtStart(self, obj):
"""Attempts to set the caret at the specified offset in obj. Because
this is not always possible, this method will attempt to locate the
first place inside of obj in which the caret can be positioned.
Arguments:
- obj: the accessible object in which the caret should be placed.
Returns the object and offset in which we were able to set the caret.
Otherwise, None if we could not find a text object, and -1 if we were
not able to set the caret.
"""
def implementsText(obj):
return 'Text' in utils.listInterfaces(obj)
child = obj
if not implementsText(obj):
child = utils.findDescendant(obj, implementsText)
if not child:
return None, -1
index = -1
text = child.queryText()
for i in xrange(text.characterCount):
if text.setCaretOffset(i):
index = i
break
return child, index
def sayAll(self, inputEvent):
"""Speaks the contents of the document beginning with the present
location. Overridden in this script because the sayAll could have
been started on an object without text (such as an image).
"""
if not self.utilities.isWebKitGtk(orca_state.locusOfFocus):
return default.Script.sayAll(self, inputEvent)
speech.sayAll(self.textLines(orca_state.locusOfFocus),
self.__sayAllProgressCallback)
return True
def getTextSegments(self, obj, boundary, offset=0):
segments = []
text = obj.queryText()
length = text.characterCount
string, start, end = text.getTextAtOffset(offset, boundary)
while string and offset < length:
string = self.utilities.adjustForRepeats(string)
voice = self.speechGenerator.getVoiceForString(obj, string)
string = self.utilities.adjustForLinks(obj, string, start)
segments.append([string, start, end, voice])
offset = end
string, start, end = text.getTextAtOffset(offset, boundary)
return segments
def textLines(self, obj):
"""Creates a generator that can be used to iterate over each line
of a text object, starting at the caret offset.
Arguments:
- obj: an Accessible that has a text specialization
Returns an iterator that produces elements of the form:
[SayAllContext, acss], where SayAllContext has the text to be
spoken and acss is an ACSS instance for speaking the text.
"""
document = utils.findAncestor(
obj, lambda x: x.getRole() == pyatspi.ROLE_DOCUMENT_FRAME)
allTextObjs = utils.findAllDescendants(
document, lambda x: 'Text' in utils.listInterfaces(x))
allTextObjs = allTextObjs[allTextObjs.index(obj):len(allTextObjs)]
textObjs = filter(lambda x: x.parent not in allTextObjs, allTextObjs)
if not textObjs:
return
boundary = pyatspi.TEXT_BOUNDARY_LINE_START
sayAllStyle = _settingsManager.getSetting('sayAllStyle')
if sayAllStyle == settings.SAYALL_STYLE_SENTENCE:
boundary = pyatspi.TEXT_BOUNDARY_SENTENCE_START
offset = textObjs[0].queryText().caretOffset
for textObj in textObjs:
textSegments = self.getTextSegments(textObj, boundary, offset)
roleInfo = self.speechGenerator.getRoleName(textObj)
if roleInfo:
roleName, voice = roleInfo
textSegments.append([roleName, 0, -1, voice])
for (string, start, end, voice) in textSegments:
yield [speechserver.SayAllContext(textObj, string, start, end),
voice]
offset = 0
def __sayAllProgressCallback(self, context, progressType):
if progressType == speechserver.SayAllContext.PROGRESS:
return
obj = context.obj
orca.setLocusOfFocus(None, obj, notifyScript=False)
offset = context.currentOffset
text = obj.queryText()
if progressType == speechserver.SayAllContext.INTERRUPTED:
text.setCaretOffset(offset)
return
# SayAllContext.COMPLETED doesn't necessarily mean done with SayAll;
# just done with the current object. If we're still in SayAll, we do
# not want to set the caret (and hence set focus) in a link we just
# passed by.
try:
hypertext = obj.queryHypertext()
except NotImplementedError:
pass
else:
linkCount = hypertext.getNLinks()
links = [hypertext.getLink(x) for x in range(linkCount)]
if filter(lambda l: l.startIndex <= offset <= l.endIndex, links):
return
text.setCaretOffset(offset)
|
gpl-3.0
| 4,101,551,143,108,722,000
| 33.969052
| 79
| 0.61176
| false
| 4.330299
| false
| false
| false
|
googleads/google-ads-python
|
google/ads/googleads/v6/resources/types/search_term_view.py
|
1
|
2117
|
# -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import proto # type: ignore
from google.ads.googleads.v6.enums.types import search_term_targeting_status
__protobuf__ = proto.module(
package="google.ads.googleads.v6.resources",
marshal="google.ads.googleads.v6",
manifest={"SearchTermView",},
)
class SearchTermView(proto.Message):
r"""A search term view with metrics aggregated by search term at
the ad group level.
Attributes:
resource_name (str):
Output only. The resource name of the search term view.
Search term view resource names have the form:
``customers/{customer_id}/searchTermViews/{campaign_id}~{ad_group_id}~{URL-base64_search_term}``
search_term (str):
Output only. The search term.
ad_group (str):
Output only. The ad group the search term
served in.
status (google.ads.googleads.v6.enums.types.SearchTermTargetingStatusEnum.SearchTermTargetingStatus):
Output only. Indicates whether the search
term is currently one of your targeted or
excluded keywords.
"""
resource_name = proto.Field(proto.STRING, number=1)
search_term = proto.Field(proto.STRING, number=5, optional=True)
ad_group = proto.Field(proto.STRING, number=6, optional=True)
status = proto.Field(
proto.ENUM,
number=4,
enum=search_term_targeting_status.SearchTermTargetingStatusEnum.SearchTermTargetingStatus,
)
__all__ = tuple(sorted(__protobuf__.manifest))
|
apache-2.0
| 4,458,826,763,105,366,500
| 33.145161
| 109
| 0.68871
| false
| 3.957009
| false
| false
| false
|
gr1d99/shopping-list
|
shopping_app/views.py
|
1
|
12925
|
"""This module contains all necessary views to power up shopping list web application"""
import time
import main
from flask import flash, redirect, render_template, request, session, url_for
from flask.views import View
from .db.shopping_list.shopping import ShoppingList
from .forms import (CreateShoppingItemForm, CreateShoppingListForm, LoginForm, RegistrationForm)
from .utils.helpers import (check_name, get_shl, check_duplicate_item_name,
change_shl_name, check_item, get_item, check_username,
check_email, get_user)
class RegisterView(View):
"""A view class to handle """
methods = ['GET', 'POST']
def dispatch_request(self):
form = RegistrationForm(request.form)
if 'user' in session:
flash(u'you are already logged in!', 'info')
return redirect(url_for('index'))
if request.method == 'POST':
# get required data
form = RegistrationForm(request.form)
if form.validate():
username = form.username.data
email = form.email.data
password1 = form.password.data
errors = []
if not check_username(username): # check username is already taken
if not check_email(email): # check if email is taken
user = main.APP.user_manager.create_user(username, email, password1)
main.APP_USERS.insert(0, user)
flash(u'Success! you may now login using '
u'your username and password', 'success')
return redirect(url_for('index'))
else:
error = '%(email)s already taken' % dict(email=email)
errors.append(error)
else:
error = '%(username)s already taken' % dict(username=username)
errors.append(error)
flash(u'%(errors)s' % dict(errors=', '.join(errors)), 'warning')
return render_template('register.html', title='Register', form=form)
class LoginView(View):
"""Class that handles user login"""
methods = ['GET', 'POST']
def dispatch_request(self):
if 'user' in session:
flash(u'you are already logged in!', 'info')
return redirect(url_for('index'))
form = LoginForm()
if request.method == 'POST':
form = LoginForm(request.form)
if form.validate():
username = form.username.data
password = form.password.data
user = get_user(username)
if user is not False:
if user.verify_password(password):
session['user'] = username
flash(u'login successful', 'success')
return redirect(url_for('index'))
flash(u'incorrect username or password', 'info')
return render_template('login.html', form=form, title='Login')
class LogoutView(View):
"""A view to logout a user"""
methods = ['GET', ]
def dispatch_request(self):
if 'user' in session:
session.pop('user')
return redirect(url_for('index'))
flash(u'successfully logged out!', 'success')
return redirect(url_for('index'))
class IndexView(View):
"""User home page view"""
methods = ['GET', ]
def dispatch_request(self):
is_auth = False
if 'user' in session:
is_auth = True
return render_template('index.html', is_auth=is_auth, title='Home Page')
class DashboardView(View):
"""A view to display user dashboard"""
methods = ['GET', ]
def dispatch_request(self):
is_auth = False
username = None
if 'user' not in session: # check if user is logged in
flash('you must be logged in, or create an account if you dont have one', 'warning')
return redirect(url_for('login'))
if 'user' in session:
is_auth = True
username = session.get('user')
owner = session.get('user')
user_shopping_list = [ushl for ushl in main.APP.shopping_list
if owner == ushl.get('shl').added_by]
return render_template('dashboard.html', is_auth=is_auth,
shopping_lists=user_shopping_list, title='Dashboard',
username=username)
class CreateShoppingListView(View):
"""A view to create shopping list"""
methods = ['GET', 'POST']
def dispatch_request(self):
form = CreateShoppingListForm()
is_auth = False
if 'user' not in session:
flash(u'Warning!! you must be logged in', 'warning')
return redirect(url_for('login'))
if 'user' in session:
is_auth = True
if request.method == 'POST':
form = CreateShoppingListForm(request.form)
if form.validate():
name = form.name.data
# check if shopping list name exists
if not check_name(name):
user = session.get('user')
today = time.strftime("%x")
shl = ShoppingList()
shl.create(name, user, today)
main.APP.shopping_list.append({'name': name, 'shl': shl})
flash(u'Shopping list created', 'success')
return redirect(url_for('dashboard'))
flash(u'Shopping list with that name already exists, '
u'try another name', 'warning')
flash(u'Correct the errors', 'warning')
return render_template('shopping_list/create-shopping-list.html', is_auth=is_auth,
title='Create Shopping List', form=form)
class ShoppingListDetailView(View):
"""
A View to handle retrieval of a specific shopping list and creation of
its shopping items
"""
methods = ['GET', 'POST']
def dispatch_request(self):
is_auth = False
if 'user' not in session: # check if user is logged in
flash('you must be logged in, or create an account if you dont have one')
return redirect(url_for('login'))
if 'user' in session:
is_auth = True
form = CreateShoppingItemForm()
name = request.args.get('name')
if not check_name(name):
flash(u'The requested shopping list does not exist!', 'danger')
return redirect(url_for('dashboard'))
shl = get_shl(name)
if request.method == 'POST':
form = CreateShoppingItemForm(request.form)
if form.validate():
shl_item = main.APP.shopping_item()
item_name = form.item_name.data
if check_duplicate_item_name(name, item_name):
flash(u"item with that name already exists", 'warning')
else:
item_quantity = form.quantity.data
item_price = form.price.data
shl_item.create(item_name, float(item_quantity), float(item_price), False)
shl.get('shl').items.append(shl_item)
flash(u'Item successfully added', 'success')
return redirect(url_for('shopping-list-detail', name=name))
flash(u'Please correct the errors below', 'warning')
return render_template(
'shopping_list/shopping-list-detail.html',
obj=shl, form=form, is_auth=is_auth, title=name.capitalize())
class UpdateShoppingListView(View):
"""
A class to update shopping list
"""
methods = ['GET', 'POST']
def dispatch_request(self):
name = request.args.get('name')
form = CreateShoppingListForm(name=name)
if not check_name(name):
flash(u'The requested shopping list does not exist', 'danger')
return redirect(url_for('dashboard'))
if request.method == 'POST':
form = CreateShoppingListForm(request.form)
if form.validate():
new_name = form.name.data
shl = get_shl(name)
shl.get('shl').update('name', new_name)
change_shl_name(name, new_name)
flash(u'Shopping list name changed successfully', 'success')
return redirect(url_for('dashboard'))
return render_template('shopping_list/shopping-list-edit.html', form=form, name=name)
class UpdateShoppingItemView(View):
"""
A View to only update a single shopping item
"""
methods = ['GET', 'POST']
def dispatch_request(self):
is_auth = False
if 'user' not in session: # check if user is logged in
flash('you must be logged in, or create an account if you dont have one')
return redirect(url_for('login'))
if 'user' in session:
is_auth = True
name = request.args.get('sname') # name of the shopping list
item_name = request.args.get('iname')
if not check_name(name):
flash(u'The requested shopping list does not exist', 'warning')
return redirect(url_for('dashboard'))
if not check_item(name, item_name):
flash(u'The requested shopping item does not exist', 'warning')
return redirect(url_for('dashboard'))
prev_data = {}
for item in get_shl(name).get('shl').items:
if item.name == item_name:
prev_data.update({'name': item.name})
prev_data.update({'quantity': item.quantity})
prev_data.update({'price': item.price})
prev_data.update({'checked': item.checked})
break
if not prev_data:
flash(u'The shopping item you are trying to update does not exist', 'danger')
form = CreateShoppingItemForm(
item_name=prev_data.pop('name'),
quantity=prev_data.pop('quantity'),
price=prev_data.pop('price'),
checked=prev_data.pop('checked')
)
if request.method == 'POST':
form = CreateShoppingItemForm(request.form)
if form.validate():
new_item_name = form.item_name.data
new_quantity = float(form.quantity.data)
new_price = float(form.price.data)
checked = form.checked.data
item = get_item(name, item_name)
if item:
item.update('name', new_item_name)
item.update('quantity', new_quantity)
item.update('price', new_price)
item.update('checked', checked)
flash(u'Item successfully updated', 'success')
return redirect(url_for('shopping-list-detail', name=name))
return render_template('shopping_list/shopping-item-edit.html', form=form,
item_name=item_name, is_auth=is_auth,
title='Update %(item)s' % dict(item=item_name))
class RemoveShoppingListView(View):
"""A view to remove a single shopping list"""
methods = ['GET', ]
def dispatch_request(self):
is_auth = False
if 'user' not in session: # check if user is logged in
flash('you must be logged in, or create an account if you dont have one')
return redirect(url_for('login'))
if 'user' in session:
is_auth = True
name = request.args.get('name')
shl = get_shl(name)
main.APP.shopping_list.remove(shl)
flash(u'Success!! Shopping List removed', 'success')
return redirect(url_for('dashboard'))
class RemoveShoppingItemView(View):
"""A view to remove shopping item"""
methods = ['GET', 'POST']
def dispatch_request(self):
is_auth = False
if 'user' not in session: # check if user is logged in
flash('you must be logged in, or create an account if you dont have one')
return redirect(url_for('login'))
if 'user' in session:
is_auth = True
name = request.args.get('name')
item_name = request.args.get('item_name')
shl_items = get_shl(name).get('shl').items
for item in shl_items:
if item.name == item_name:
shl_items.remove(item)
flash(u"Success!! Item succesfully removed", 'success')
return redirect(url_for('shopping-list-detail', name=name))
class AboutView(View):
"""About view"""
methods = ['GET']
def dispatch_request(self):
return render_template('flatpages/about.html', title='About')
|
mit
| 3,321,740,856,181,348,400
| 32.926509
| 96
| 0.554894
| false
| 4.237705
| false
| false
| false
|
overfl0/Bulletproof-Arma-Launcher
|
src/utils/filecache.py
|
1
|
1887
|
# Bulletproof Arma Launcher
# Copyright (C) 2017 Lukasz Taczuk
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
from __future__ import unicode_literals
import errno
import hashlib
import os
from utils import paths
from utils import context
def get_cache_directory():
return paths.get_launcher_directory('filecache')
def map_file(url):
"""Get the path where the file should be stored in the cache."""
file_name = hashlib.sha256(url).hexdigest()
return os.path.join(get_cache_directory(), file_name)
def get_file(url):
"""Get the file contents from the cache or None if the file is not present
in the cache.
"""
path = map_file(url)
f = None
try:
f = open(path, 'rb')
return f.read()
except IOError as ex:
if ex.errno == errno.ENOENT: # No such file
return None
raise
finally:
if f:
f.close()
def save_file(url, data):
"""Save the file contents to the cache.
The contents of the file are saved to a temporary file and then moved to
ensure that no truncated file is present in the cache.
"""
# Ensure the directory exists
paths.mkdir_p(get_cache_directory())
path = map_file(url)
tmp_path = path + '_tmp'
f = open(tmp_path, 'wb')
f.write(data)
f.close()
# Ensure the file does not exist (would raise an exception on Windows
with context.ignore_nosuchfile_exception():
os.unlink(path)
os.rename(tmp_path, path)
|
gpl-3.0
| -3,351,975,119,354,723,300
| 23.506494
| 78
| 0.670906
| false
| 3.843177
| false
| false
| false
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.