repo_name
stringlengths 5
92
| path
stringlengths 4
221
| copies
stringclasses 19
values | size
stringlengths 4
6
| content
stringlengths 766
896k
| license
stringclasses 15
values | hash
int64 -9,223,277,421,539,062,000
9,223,102,107B
| line_mean
float64 6.51
99.9
| line_max
int64 32
997
| alpha_frac
float64 0.25
0.96
| autogenerated
bool 1
class | ratio
float64 1.5
13.6
| config_test
bool 2
classes | has_no_keywords
bool 2
classes | few_assignments
bool 1
class |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
flavour/tldrmp
|
modules/s3db/survey.py
|
1
|
135539
|
# -*- coding: utf-8 -*-
""" Sahana Eden Survey Tool
@copyright: 2011-2013 (c) Sahana Software Foundation
@license: MIT
ADAT - Assessment Data Analysis Tool
For more details see the blueprint at:
http://eden.sahanafoundation.org/wiki/BluePrint/SurveyTool/ADAT
Permission is hereby granted, free of charge, to any person
obtaining a copy of this software and associated documentation
files (the "Software"), to deal in the Software without
restriction, including without limitation the rights to use,
copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the
Software is furnished to do so, subject to the following
conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
OTHER DEALINGS IN THE SOFTWARE.
"""
__all__ = ["S3SurveyTemplateModel",
"S3SurveyQuestionModel",
"S3SurveyFormatterModel",
"S3SurveySeriesModel",
"S3SurveyCompleteModel",
"S3SurveyTranslateModel",
"survey_template_represent",
"survey_series_represent",
"survey_answer_list_represent",
"survey_template_rheader",
"survey_series_rheader",
"survey_getAllSectionsForTemplate",
"survey_getAllQuestionsForTemplate",
"survey_buildQuestionnaireFromTemplate",
"survey_buildQuestionnaireFromSeries",
"survey_getTemplateFromSeries",
"survey_getAllTemplates",
"survey_getAllWidgetsForTemplate",
"survey_getWidgetFromQuestion",
"survey_getAllSectionsForSeries",
"survey_getAllSectionsForTemplate",
"survey_getQuestionFromCode",
"survey_getAllQuestionsForTemplate",
"survey_getAllQuestionsForSeries",
"survey_getAllQuestionsForComplete",
"survey_save_answers_for_series",
"survey_updateMetaData",
"survey_getAllAnswersForQuestionInSeries",
"survey_getQstnLayoutRules",
"survey_getSeries",
"survey_getSeriesName",
"survey_getAllSeries",
"survey_getAllTranslationsForTemplate",
"survey_getAllTranslationsForSeries",
"survey_build_template_summary",
"survey_serieslist_dataTable_post",
"survey_answerlist_dataTable_pre",
"survey_answerlist_dataTable_post",
"survey_json2py",
"survey_json2list",
]
try:
import json # try stdlib (Python 2.6)
except ImportError:
try:
import simplejson as json # try external module
except:
import gluon.contrib.simplejson as json # fallback to pure-Python module
from gluon import *
from gluon.dal import Row
from gluon.storage import Storage
from ..s3 import *
from s3chart import S3Chart
from s3survey import survey_question_type, \
survey_analysis_type, \
_debug
# =============================================================================
def json2py(jsonstr):
"""
Utility function to convert a string in json to a python structure
"""
from xml.sax.saxutils import unescape
if not isinstance(jsonstr, str):
return jsonstr
try:
jsonstr = unescape(jsonstr, {"u'": '"'})
jsonstr = unescape(jsonstr, {"'": '"'})
pythonStructure = json.loads(jsonstr)
except:
_debug("ERROR: attempting to convert %s using modules/s3db/survey/json2py.py" % (jsonstr))
return jsonstr
else:
return pythonStructure
survey_json2py = json2py
# =============================================================================
def json2list(jsonstr):
"""
Used to modify a json string to a python list.
"""
if jsonstr == "":
valueList = []
else:
if jsonstr[0] == "[":
valueList = json2py(jsonstr)
else:
valueList = jsonstr.split(",")
if not isinstance(valueList, list):
valueList = [valueList]
return valueList
survey_json2list = json2list
# =============================================================================
class S3SurveyTemplateModel(S3Model):
"""
Template model
The template model is a container for the question model
"""
names = ["survey_template",
"survey_template_id",
"survey_section",
"survey_template_status",
]
def model(self):
T = current.T
db = current.db
template_status = {
1: T("Pending"),
2: T("Active"),
3: T("Closed"),
4: T("Master")
}
add_component = self.add_component
configure = self.configure
crud_strings = current.response.s3.crud_strings
define_table = self.define_table
# ---------------------------------------------------------------------
# survey_template
#
# The template is the root table and acts as a container for
# the questions that will be used in a survey.
tablename = "survey_template"
table = define_table(tablename,
Field("name", "string", length=120,
notnull=True, unique=True,
label = T("Template Name"),
default="",
),
Field("description", "text", length=500,
label = T("Description"),
default=""),
Field("status", "integer",
label = T("Status"),
requires = IS_IN_SET(template_status,
zero=None),
default=1,
represent = lambda index: \
template_status[index],
readable=True,
writable=False),
# Standard questions which may belong to all template
# competion_qstn: who completed the assessment
Field("competion_qstn", "string", length=200,
label = T("Completion Question"),
),
# date_qstn: when it was completed (date)
Field("date_qstn", "string", length=200,
label = T("Date Question"),
),
# time_qstn: when it was completed (time)
Field("time_qstn", "string", length=200,
label = T("Time Question"),
),
# location_detail: json of the location question
# May consist of any of the following:
# L0, L1, L2, L3, L4, Lat, Lon
Field("location_detail", "string", length=200,
label = T("Location Detail"),
),
# The priority question is the default question used
# to determine the priority of each point on the map.
# The data is stored as the question code.
Field("priority_qstn", "string", length=16,
label = T("Default map question"),
),
*s3_meta_fields())
# CRUD Strings
crud_strings[tablename] = Storage(
title_create = T("Add Assessment Template"),
title_display = T("Assessment Template Details"),
title_list = T("Assessment Templates"),
title_analysis_summary = T("Template Summary"),
title_update = T("Edit Assessment Template"),
title_question_details = T("Details of each question in the Template"),
subtitle_create = T("Add a new Assessment Template"),
subtitle_analysis_summary = T("Summary by Question Type - (The fewer text questions the better the analysis can be)"),
label_list_button = T("List Assessment Templates"),
label_create_button = T("Add a new Assessment Template"),
label_delete_button = T("Delete this Assessment Template"),
msg_record_created = T("Assessment Template added"),
msg_record_modified = T("Assessment Template updated"),
msg_record_deleted = T("Assessment Template deleted"),
msg_list_empty = T("No Assessment Templates"))
template_id = S3ReusableField("template_id", table,
sortby="name",
label=T("Template"),
requires = IS_ONE_OF(db,
"survey_template.id",
self.survey_template_represent,
),
represent = self.survey_template_represent,
ondelete = "CASCADE")
# Components
add_component("survey_series", survey_template="template_id")
add_component("survey_translate", survey_template = "template_id")
configure(tablename,
onvalidation = self.template_onvalidate,
onaccept = self.template_onaccept,
deduplicate = self.survey_template_duplicate,
)
# ---------------------------------------------------------------------
# survey_sections
#
# The questions can be grouped into sections this provides
# the description of the section and
# the position of the section within the template
tablename = "survey_section"
table = define_table(tablename,
Field("name", "string", length=120,
notnull=True,
default="",
),
Field("description", "text", length=500,
default="",
),
Field("posn", "integer",
),
Field("cloned_section_id", "integer",
readable=False,
writable=False,
),
template_id(),
*s3_meta_fields())
# CRUD Strings
crud_strings[tablename] = Storage(
title_create = T("Add Template Section"),
title_display = T("Template Section Details"),
title_list = T("Template Sections"),
title_update = T("Edit Template Section"),
subtitle_create = T("Add a new Template Section"),
label_list_button = T("List Template Sections"),
label_create_button = T("Add a new Template Section"),
label_delete_button = T("Delete this Template Section"),
msg_record_created = T("Template Section added"),
msg_record_modified = T("Template Section updated"),
msg_record_deleted = T("Template Section deleted"),
msg_list_empty = T("No Template Sections"))
configure(tablename, orderby = tablename+".posn",
deduplicate=self.survey_section_duplicate
)
# Pass names back to global scope (s3.*)
return Storage(
survey_template_id = template_id,
survey_template_status = template_status,
)
# -------------------------------------------------------------------------
@staticmethod
def template_onvalidate(form):
"""
It is not valid to re-import a template that already has a
status of Active or higher
"""
template_id = form.vars.id
table = current.s3db.survey_template
row = current.db(table.id == template_id).select(table.status,
limitby=(0, 1)
).first()
if row is not None and row.status > 1:
return False
return True
# -------------------------------------------------------------------------
@staticmethod
def addQuestion(template_id, name, code, notes, type, posn, metadata={}):
"""
"""
db = current.db
s3db = current.s3db
# Add the question to the database if it's not already there
qstntable = s3db.survey_question
query = (qstntable.name == name) & \
(qstntable.code == code)
record = db(query).select(qstntable.id, limitby=(0, 1)).first()
if record:
qstn_id = record.id
else:
qstn_id = qstntable.insert(name = name,
code = code,
notes = notes,
type = type
)
qstn_metadata_table = s3db.survey_question_metadata
for (descriptor, value) in metadata.items():
qstn_metadata_table.insert(question_id = qstn_id,
descriptor = descriptor,
value = value
)
# Add these questions to the section: "Background Information"
sectable = s3db.survey_section
section_name = "Background Information"
query = (sectable.name == section_name) & \
(sectable.template_id == template_id)
record = db(query).select(sectable.id, limitby=(0, 1)).first()
if record:
section_id = record.id
else:
section_id = sectable.insert(name = section_name,
template_id = template_id,
posn = 0 # special section with no position
)
# Add the question to the list of questions in the template
qstn_list_table = s3db.survey_question_list
query = (qstn_list_table.question_id == qstn_id) & \
(qstn_list_table.template_id == template_id)
record = db(query).select(qstntable.id, limitby=(0, 1)).first()
if not record:
qstn_list_table.insert(question_id = qstn_id,
template_id = template_id,
section_id = section_id,
posn = posn
)
# -------------------------------------------------------------------------
@staticmethod
def template_onaccept(form):
"""
All of the standard questions will now be generated
competion_qstn: who completed the assessment
date_qstn: when it was completed (date)
time_qstn: when it was completed (time)
location_detail: json of the location question
May consist of any of the following:
L0, L1, L2, L3, L4, Lat, Lon
for json entry a question will be generated
The code for each question will start with "STD-" followed by
the type of question.
"""
vars = form.vars
if vars.id:
template_id = vars.id
else:
return
addQuestion = S3SurveyTemplateModel.addQuestion
if vars.competion_qstn != None:
name = vars.competion_qstn
code = "STD-WHO"
notes = "Who completed the assessment"
type = "String"
posn = -10 # negative used to force these question to appear first
addQuestion(template_id, name, code, notes, type, posn)
if vars.date_qstn != None:
name = vars.date_qstn
code = "STD-DATE"
notes = "Date the assessment was completed"
type = "Date"
posn += 1
addQuestion(template_id, name, code, notes, type, posn)
if vars.time_qstn != None:
name = vars.time_qstn
code = "STD-TIME"
notes = "Time the assessment was completed"
type = "Time"
posn += 1
addQuestion(template_id, name, code, notes, type, posn)
if vars.location_detail != None:
locationList = json2py(vars.location_detail)
if len(locationList) > 0:
name = "The location P-code"
code = "STD-P-Code"
type = "String"
posn += 1
addQuestion(template_id, name, code, None, type, posn)
for loc in locationList:
if loc == "Lat":
name = "Latitude"
elif loc == "Lon":
name = "Longitude"
else:
name = loc
code = "STD-%s" % loc
if loc == "Lat" or loc == "Lon":
type = "Numeric"
metadata = {"Format": "nnn.nnnnnn"}
else:
type = "Location"
metadata = {}
posn += 1
addQuestion(template_id, name, code, "", type, posn, metadata)
# -------------------------------------------------------------------------
@staticmethod
def survey_template_duplicate(job):
"""
Rules for finding a duplicate:
- Look for a record with a similar name, ignoring case
"""
if job.tablename == "survey_template":
table = job.table
data = job.data
name = "name" in data and data.name
query = table.name.lower().like('%%%s%%' % name.lower())
return duplicator(job, query)
# -------------------------------------------------------------------------
@staticmethod
def survey_section_duplicate(job):
"""
Rules for finding a duplicate:
- Look for a record with the same name
- the same template
- and the same position within the template
- however if their is a record with position of zero then that record should be updated
"""
if job.tablename == "survey_section":
table = job.table
data = job.data
name = "name" in data and data.name
template = "template_id" in data and data.template_id
query = (table.name == name) & \
(table.template_id == template)
return duplicator(job, query)
# =============================================================================
def survey_template_represent(id, row=None):
"""
Display the template name rather than the id
"""
if row:
return row.name
elif not id:
return current.messages["NONE"]
table = current.s3db.survey_template
query = (table.id == id)
record = current.db(query).select(table.name,
limitby=(0, 1)).first()
try:
return record.name
except:
return current.messages.UNKNOWN_OPT
# =============================================================================
def survey_template_rheader(r, tabs=[]):
"""
The template rheader
"""
if r.representation == "html":
tablename, record = s3_rheader_resource(r)
if tablename == "survey_template" and record:
T = current.T
s3db = current.s3db
# Tabs
tabs = [(T("Basic Details"), "read"),
(T("Question Details"),"templateRead/"),
(T("Question Summary"),"templateSummary/"),
#(T("Sections"), "section"),
]
if current.auth.s3_has_permission("create", "survey_translate"):
tabs.append((T("Translate"),"translate"))
rheader_tabs = s3_rheader_tabs(r, tabs)
sectionTable = s3db.survey_section
qlistTable = s3db.survey_question_list
viewing = current.request.get_vars.get("viewing", None)
if viewing:
dummy, template_id = viewing.split(".")
else:
template_id = r.id
query = (qlistTable.template_id == template_id) & \
(qlistTable.section_id == sectionTable.id)
rows = current.db(query).select(sectionTable.id,
sectionTable.name,
orderby = qlistTable.posn)
tsection = TABLE(_class="survey-section-list")
lblSection = SPAN(T("Sections that are part of this template"),
_style="font-weight:bold;")
if (rows.__len__() == 0):
rsection = SPAN(T("As of yet, no sections have been added to this template."))
else:
rsection = TR()
count = 0
lastSection = ""
for section in rows:
if section.name == lastSection:
continue
rsection.append(TD(section.name))
# Comment out the following until templates can be built online
#rsection.append(TD(A(section.name,
# _href=URL(c="survey",
# f="section",
# args="%s" % section.id))))
lastSection = section.name
count += 1
if count % 4 == 0:
tsection.append(rsection)
rsection=TR()
tsection.append(rsection)
rheader = DIV(TABLE(
TR(
TH("%s: " % T("Name")),
record.name,
TH("%s: " % T("Status")),
s3db.survey_template_status[record.status],
),
),
lblSection,
tsection,
rheader_tabs)
return rheader
return None
# =============================================================================
def survey_getTemplateFromSeries(series_id):
"""
Return the template data from the series_id passed in
@ToDo: Remove wrapper
"""
stable = current.s3db.survey_series
ttable = current.s3db.survey_template
query = (stable.id == series_id) & \
(ttable.id == stable.template_id)
row = current.db(query).select(ttable.ALL,
limitby=(0, 1)).first()
return row
# =============================================================================
def survey_getAllTemplates():
"""
Function to return all the templates on the database
@ToDo: Remove wrapper
"""
table = current.s3db.survey_template
rows = current.db(table).select()
return rows
# =============================================================================
def survey_getAllWidgetsForTemplate(template_id):
"""
Function to return the widgets for each question for the given
template. The widgets are returned in a dict with the key being
the question code.
"""
s3db = current.s3db
q_ltable = s3db.survey_question_list
qsntable = s3db.survey_question
query = (q_ltable.template_id == template_id) & \
(q_ltable.question_id == qsntable.id)
rows = current.db(query).select(qsntable.id,
qsntable.code,
qsntable.type,
q_ltable.posn,
)
widgets = {}
for row in rows:
sqrow = row.survey_question
qstnType = sqrow.type
qstn_id = sqrow.id
qstn_code = sqrow.code
qstn_posn = row.survey_question_list.posn
widgetObj = survey_question_type[qstnType](qstn_id)
widgets[qstn_code] = widgetObj
widgetObj.question["posn"] = qstn_posn
question = {}
return widgets
# =============================================================================
def survey_getAllSectionsForSeries(series_id):
"""
Function to return the list of sections for the given series
The sections are returned in the order of their position in the
template.
The data on each section is held in a dict and is as follows:
section_id, name, template_id, and posn
"""
row = survey_getSeries(series_id)
template_id = row.template_id
return survey_getAllSectionsForTemplate(template_id)
# =============================================================================
def survey_buildQuestionnaireFromTemplate(template_id):
"""
Build a form displaying all the questions for a given template_id
@ToDo: Remove wrapper
"""
questions = survey_getAllQuestionsForTemplate(template_id)
return buildQuestionsForm(questions, readOnly=True)
# =============================================================================
def survey_getAllSectionsForTemplate(template_id):
"""
function to return the list of sections for the given template
The sections are returned in the order of their position in the
template.
The data on each section is held in a dict and is as follows:
section_id, name, template_id, and posn
"""
sectable = current.s3db.survey_section
query = (sectable.template_id == template_id)
rows = current.db(query).select(sectable.id,
sectable.name,
sectable.template_id,
sectable.posn,
orderby = sectable.posn)
sections = []
for sec in rows:
sections.append({"section_id": sec.id,
"name" : sec.name,
"template_id": sec.template_id,
"posn" : sec.posn
}
)
return sections
# =============================================================================
def survey_getWidgetFromQuestion(question_id):
"""
Function that gets the right widget for the question
"""
qtable = current.s3db.survey_question
query = (qtable.id == question_id)
question = current.db(query).select(qtable.type,
limitby=(0, 1)).first()
qstnType = question.type
widgetObj = survey_question_type[qstnType](question_id)
return widgetObj
# =============================================================================
def buildQuestionsForm(questions, complete_id=None, readOnly=False):
"""
Create the form, hard-coded table layout :(
"""
form = FORM()
table = None
sectionTitle = ""
for question in questions:
if sectionTitle != question["section"]:
if sectionTitle != "":
form.append(P())
form.append(HR(_width="90%"))
form.append(P())
div = DIV(_class="survey_scrollable")
table = TABLE()
div.append(table)
form.append(div)
table.append(TR(TH(question["section"],
_colspan="2"),
_class="survey_section"))
sectionTitle = question["section"]
widgetObj = survey_getWidgetFromQuestion(question["qstn_id"])
if readOnly:
table.append(TR(TD(question["code"]),
TD(widgetObj.type_represent()),
TD(question["name"])
)
)
else:
if complete_id != None:
widgetObj.loadAnswer(complete_id, question["qstn_id"])
widget = widgetObj.display(question_id = question["qstn_id"])
if widget != None:
if isinstance(widget, TABLE):
table.append(TR(TD(widget, _colspan=2)))
else:
table.append(widget)
if not readOnly:
button = INPUT(_type="submit", _name="Save", _value=current.T("Save"))
form.append(button)
return form
# =============================================================================
def survey_build_template_summary(template_id):
"""
"""
from s3.s3data import S3DataTable
T = current.T
table = TABLE(_id="template_summary",
_class="dataTable display")
hr = TR(TH(T("Position")), TH(T("Section")))
qstnTypeList = {}
posn = 1
for (key, type) in survey_question_type.items():
if key == "Grid" or key == "GridChild":
continue
hr.append(TH(type().type_represent()))
qstnTypeList[key] = posn
posn += 1
hr.append(TH(T("Total")))
header = THEAD(hr)
numOfQstnTypes = len(survey_question_type) - 1 # exclude the grid questions
questions = survey_getAllQuestionsForTemplate(template_id)
sectionTitle = ""
line = []
body = TBODY()
section = 0
total = ["", T("Total")] + [0]*numOfQstnTypes
for question in questions:
if sectionTitle != question["section"]:
if line != []:
br = TR()
for cell in line:
br.append(cell)
body.append(br)
section += 1
sectionTitle = question["section"]
line = [section, sectionTitle] + [0]*numOfQstnTypes
if question["type"] == "Grid":
continue
if question["type"] == "GridChild":
# get the real grid question type
widgetObj = survey_getWidgetFromQuestion(question["qstn_id"])
question["type"] = widgetObj.typeDescription
line[qstnTypeList[question["type"]]+1] += 1
line[numOfQstnTypes+1] += 1
total[qstnTypeList[question["type"]]+1] += 1
total[numOfQstnTypes+1] += 1
# Add the trailing row
br = TR()
for cell in line:
br.append(cell)
body.append(br)
# Add the footer to the table
foot = TFOOT()
tr = TR()
for cell in total:
tr.append(TD(B(cell))) # don't use TH() otherwise dataTables will fail
foot.append(tr)
table.append(header)
table.append(body)
table.append(foot)
# Turn off server side pagination
s3 = current.response.s3
s3.no_sspag = True
s3.no_formats = True
s3.dataTableID = None
attr = S3DataTable.getConfigData()
form = S3DataTable.htmlConfig(table,
"template_summary",
[[0, 'asc']], # order by
"", # the filter string
None, # the rfields
dt_action_col = -1,
**attr
)
return form
# =============================================================================
class S3SurveyQuestionModel(S3Model):
"""
Question Model
"""
names = ["survey_question",
"survey_question_metadata",
"survey_question_list",
"survey_qstn_name_represent"
]
def model(self):
T = current.T
s3 = current.response.s3
configure = self.configure
crud_strings = s3.crud_strings
define_table = self.define_table
# ---------------------------------------------------------------------
# survey_question
# Defines a question that will appear within a section, and thus belong
# to the template.
#
# This holds the actual question and
# A string code (unique within the template) is used to identify the question.
#
# It will have a type from the questionType dictionary.
# This type will determine the options that can be associated with it.
# A question can belong to many different sections.
# The notes are to help the enumerator and will typically appear as a
# footnote in the printed form.
tablename = "survey_question"
table = define_table(tablename,
Field("name", "string", length=200,
notnull=True,
represent = self.qstn_name_represent,
),
Field("code", "string", length=16,
notnull=True,
),
Field("notes", "string", length=400
),
Field("type", "string", length=40,
notnull=True,
),
Field("metadata", "text",
),
*s3_meta_fields()
)
# CRUD Strings
crud_strings[tablename] = Storage(
title_create = T("Add an Assessment Question"),
title_display = T("Assessment Question Details"),
title_list = T("Assessment Questions"),
title_update = T("Edit Assessment Question"),
subtitle_create = T("Add a new Assessment Question"),
label_list_button = T("List Assessment Questions"),
label_create_button = T("Add a new Assessment Question"),
label_delete_button = T("Delete this Assessment Question"),
msg_record_created = T("Assessment Question added"),
msg_record_modified = T("Assessment Question updated"),
msg_record_deleted = T("Assessment Question deleted"),
msg_list_empty = T("No Assessment Questions"))
configure(tablename,
onvalidation = self.question_onvalidate,
onaccept = self.question_onaccept,
deduplicate = self.survey_question_duplicate,
)
# ---------------------------------------------------------------------
# survey_question_metadata
# referenced by
# the survey_question table and is used to manage
# the metadata that will be associated with a question type.
# For example: if the question type is option, then valid metadata
# might be:
# count: the number of options that will be presented: 3
# 1 : the first option : Female
# 2 : the second option : Male
# 3 : the third option : Not Specified
# So in the above case a question record will be associated with four
# question_metadata records.
tablename = "survey_question_metadata"
table = define_table(tablename,
Field("question_id",
"reference survey_question",
readable=False,
writable=False
),
Field("descriptor",
"string",
length=20,
notnull=True,
),
Field("value",
"text",
notnull=True,
),
*s3_meta_fields()
)
# CRUD Strings
crud_strings[tablename] = Storage(
title_create = T("Add Question Meta-Data"),
title_display = T("Question Meta-Data Details"),
title_list = T("Question Meta-Data"),
title_update = T("Edit Question Meta-Data"),
subtitle_create = T("Add new Question Meta-Data"),
label_list_button = T("List Question Meta-Data"),
label_create_button = T("Add new Question Meta-Data"),
label_delete_button = T("Delete this Question Meta-Data"),
msg_record_created = T("Question Meta-Data added"),
msg_record_modified = T("Question Meta-Data updated"),
msg_record_deleted = T("Question Meta-Data deleted"),
msg_list_empty = T("No Question Meta-Data"),
title_upload = T("Upload a Question List import file")
)
configure(tablename,
deduplicate = self.survey_question_metadata_duplicate
)
# -------------------------------------------------------------------------
# The survey_question_list table is a resolver between
# the survey_question and the survey_section tables.
#
# Along with ids mapping back to these tables
# it will have a code that can be used to reference the question
# it will have the position that the question will appear in the template
tablename = "survey_question_list"
table = define_table(tablename,
Field("posn",
"integer",
notnull=True,
),
self.survey_template_id(),
Field("question_id",
"reference survey_question",
readable=False,
writable=False
),
Field("section_id",
"reference survey_section",
readable=False,
writable=False
),
*s3_meta_fields()
)
# CRUD Strings
crud_strings[tablename] = Storage(
title_upload = T("Upload an Assessment Template import file")
)
configure(tablename,
onaccept = self.question_list_onaccept,
deduplicate = self.survey_question_list_duplicate,
)
# Pass names back to global scope (s3.*)
# ---------------------------------------------------------------------
return Storage(
survey_qstn_name_represent = self.qstn_name_represent
)
# -------------------------------------------------------------------------
@staticmethod
def qstn_name_represent(value):
"""
Return the question name, for locations in the gis hierarchy
the localised name will be returned
"""
if value == "L0" or value == "L1" or \
value == "L2" or value == "L3" or value == "L4":
return current.gis.get_location_hierarchy(value)
else:
return value
# -------------------------------------------------------------------------
@staticmethod
def question_onvalidate(form):
"""
Any text with the metadata that is imported will be held in
single quotes, rather than double quotes and so these need
to be escaped to double quotes to make it valid JSON
"""
from xml.sax.saxutils import unescape
metadata = form.vars.metadata
if metadata != None:
metadata = unescape(metadata, {"'":'"'})
return True
# -------------------------------------------------------------------------
@staticmethod
def question_onaccept(form):
"""
All of the question metadata will be stored in the metadata
field in a JSON format.
They will then be inserted into the survey_question_metadata
table pair will be a record on that table.
"""
vars = form.vars
if vars.metadata is None:
return
if vars.id:
record = current.s3db.survey_question[vars.id]
else:
return
if vars.metadata and \
vars.metadata != "":
survey_updateMetaData(record,
vars.type,
vars.metadata
)
# -------------------------------------------------------------------------
@staticmethod
def survey_question_duplicate(job):
"""
Rules for finding a duplicate:
- Look for the question code
"""
if job.tablename == "survey_question":
table = job.table
data = job.data
code = "code" in data and data.code
query = (table.code == code)
return duplicator(job, query)
# -------------------------------------------------------------------------
@staticmethod
def survey_question_metadata_duplicate(job):
"""
Rules for finding a duplicate:
- Look for the question_id and descriptor
"""
if job.tablename == "survey_question_metadata":
table = job.table
data = job.data
question = "question_id" in data and data.question_id
descriptor = "descriptor" in data and data.descriptor
query = (table.descriptor == descriptor) & \
(table.question_id == question)
return duplicator(job, query)
# -------------------------------------------------------------------------
@staticmethod
def question_list_onaccept(form):
"""
If a grid question is added to the the list then all of the
grid children will need to be added as well
"""
qstntable = current.s3db.survey_question
try:
vars = form.vars
question_id = vars.question_id
template_id = vars.template_id
section_id = vars.section_id
posn = vars.posn
except:
return
record = qstntable[question_id]
try:
type = record.type
except:
_debug("survey question missing type: %s" % record)
return
if type == "Grid":
widgetObj = survey_question_type["Grid"]()
widgetObj.insertChildrenToList(question_id,
template_id,
section_id,
posn,
)
if type == "Location":
widgetObj = survey_question_type["Location"]()
widgetObj.insertChildrenToList(question_id,
template_id,
section_id,
posn,
)
# -------------------------------------------------------------------------
@staticmethod
def survey_question_list_duplicate(job):
"""
Rules for finding a duplicate:
- The template_id, question_id and section_id are the same
"""
if job.tablename == "survey_question_list":
table = job.table
data = job.data
tid = "template_id" in data and data.template_id
qid = "question_id" in data and data.question_id
sid = "section_id" in data and data.section_id
query = (table.template_id == tid) & \
(table.question_id == qid) & \
(table.section_id == sid)
return duplicator(job, query)
# =============================================================================
def survey_getQuestionFromCode(code, series_id=None):
"""
Function to return the question for the given series
with the code that matches the one passed in
"""
s3db = current.s3db
sertable = s3db.survey_series
q_ltable = s3db.survey_question_list
qsntable = s3db.survey_question
if series_id != None:
query = (sertable.id == series_id) & \
(q_ltable.template_id == sertable.template_id) & \
(q_ltable.question_id == qsntable.id) & \
(qsntable.code == code)
else:
query = (q_ltable.template_id == sertable.template_id) & \
(q_ltable.question_id == qsntable.id) & \
(qsntable.code == code)
record = current.db(query).select(qsntable.id,
qsntable.code,
qsntable.name,
qsntable.type,
q_ltable.posn,
limitby=(0, 1)).first()
question = {}
if record != None:
sq = record.survey_question
question["qstn_id"] = sq.id
question["code"] = sq.code
question["name"] = sq.name
question["type"] = sq.type
question["posn"] = record.survey_question_list.posn
return question
# =============================================================================
def survey_getAllQuestionsForTemplate(template_id):
"""
Function to return the list of questions for the given template
The questions are returned in the order of their position in the
template.
The data on a question that it returns is as follows:
qstn_id, code, name, type, posn, section
"""
s3db = current.s3db
sectable = s3db.survey_section
q_ltable = s3db.survey_question_list
qsntable = s3db.survey_question
query = (q_ltable.template_id == template_id) & \
(q_ltable.section_id == sectable.id) & \
(q_ltable.question_id == qsntable.id)
rows = current.db(query).select(qsntable.id,
qsntable.code,
qsntable.name,
qsntable.type,
sectable.name,
q_ltable.posn,
orderby=(q_ltable.posn))
questions = []
for row in rows:
question = {}
sq = row.survey_question
question["qstn_id"] = sq.id
question["code"] = sq.code
question["name"] = s3db.survey_qstn_name_represent(sq.name)
question["type"] = sq.type
question["posn"] = row.survey_question_list.posn
question["section"] = row.survey_section.name
questions.append(question)
return questions
# =============================================================================
def survey_getAllQuestionsForSeries(series_id):
"""
Function to return the list of questions for the given series
The questions are returned in to order of their position in the
template.
The data on a question that is returns is as follows:
qstn_id, code, name, type, posn, section
"""
table = current.s3db.survey_series
row = current.db(table.id == series_id).select(table.template_id,
limitby=(0, 1)).first()
template_id = row.template_id
questions = survey_getAllQuestionsForTemplate(template_id)
return questions
# =============================================================================
def survey_getAllQuestionsForComplete(complete_id):
"""
Function to return a tuple of the list of questions and series_id
for the given completed_id
The questions are returned in to order of their position in the
template.
The data on a question that is returns is as follows:
qstn_id, code, name, type, posn, section
"""
table = current.s3db.survey_complete
row = current.db(table.id == complete_id).select(table.series_id,
limitby=(0, 1)).first()
series_id = row.series_id
questions = survey_getAllQuestionsForSeries(series_id)
return (questions, series_id)
# =============================================================================
def survey_get_series_questions_of_type(questionList, type):
"""
"""
if isinstance(type, (list, tuple)):
types = type
else:
types = (type)
questions = []
for question in questionList:
if question["type"] in types:
questions.append(question)
elif question["type"] == "Link" or \
question["type"] == "GridChild":
widgetObj = survey_getWidgetFromQuestion(question["qstn_id"])
if widgetObj.getParentType() in types:
question["name"] = widgetObj.fullName()
questions.append(question)
return questions
# =============================================================================
def survey_getQuestionFromName(name, series_id):
"""
Function to return the question for the given series
with the name that matches the one passed in
"""
s3db = current.s3db
sertable = s3db.survey_series
q_ltable = s3db.survey_question_list
qsntable = s3db.survey_question
query = (sertable.id == series_id) & \
(q_ltable.template_id == sertable.template_id) & \
(q_ltable.question_id == qsntable.id) & \
(qsntable.name == name)
record = current.db(query).select(qsntable.id,
qsntable.code,
qsntable.name,
qsntable.type,
q_ltable.posn,
limitby=(0, 1)).first()
if record == None:
# Unable to get the record from the question name
# It could be because the question is a location
# So get the location names and then check
locList = current.gis.get_all_current_levels()
for row in locList.items():
if row[1] == name:
return survey_getQuestionFromName(row[0],series_id)
question = {}
sq = record.survey_question
question["qstn_id"] = sq.id
question["code"] = sq.code
question["name"] = sq.name
question["type"] = sq.type
question["posn"] = record.survey_question_list.posn
return question
# =============================================================================
def survey_updateMetaData (record, type, metadata):
"""
"""
metatable = current.s3db.survey_question_metadata
id = record.id
# the metadata can either be passed in as a JSON string
# or as a parsed map. If it is a string load the map.
if isinstance(metadata, str):
metadataList = json2py(metadata)
else:
metadataList = metadata
for (desc, value) in metadataList.items():
desc = desc.strip()
if not isinstance(value, str):
# web2py stomps all over a list so convert back to a string
# before inserting it on the database
value = json.dumps(value)
value = value.strip()
metatable.insert(question_id = id,
descriptor = desc,
value = value
)
if type == "Grid":
widgetObj = survey_question_type["Grid"]()
widgetObj.insertChildren(record, metadataList)
# =============================================================================
class S3SurveyFormatterModel(S3Model):
"""
The survey_formatter table defines the order in which the questions
will be laid out when a formatted presentation is used.
The idea is to be able to present the questions in a format that
best uses the available space and is familiar to those using the
tool.
Examples of formatted presentation are the spreadsheet and the web
form. This may be extended to PDF documents.
The rules are held as a JSON record and describe where each question
within the section should appear in terms of rows and columns. Each
question is referenced by the question code.
For example assume a section with the following eight questions:
QSTN_1, QSTN_2, QSTN_3, QSTN_4, QSTN_5, QSTN_6, QSTN_7, QSTN_8
Then to display them in three rows:
[[QSTN_1, QSTN_2, QSTN_3], [QSTN_4, QSTN_5, QSTN_6], [QSTN_7, QSTN_8]]
would present it as follows:
QSTN_1, QSTN_2, QSTN_3,
QSTN_4, QSTN_5, QSTN_6,
QSTN_7, QSTN_8
The order of the questions does not need to be preserved, thus:
[[QSTN_1, QSTN_2], [QSTN_4, QSTN_5, QSTN_3], [QSTN_7, QSTN_8, QSTN_6]]
would be valid, and give:
QSTN_1, QSTN_2,
QSTN_4, QSTN_5, QSTN_3,
QSTN_7, QSTN_8, QSTN_6,
***NOTE***
When importing this record with a CSV file the question code will be
single quoted, rather than double quoted which JSON requires.
This is because the whole rule needs to be double quoted. Code that
extracts the records from the table will then need to change all
single quotes to double quotes. This can be done as follows:
rowList = json2py(rules)
"""
names = ["survey_formatter"]
def model(self):
T = current.T
survey_formatter_methods = {
1: T("Default"),
2: T("Web Form"),
3: T("Spreadsheet"),
4: T("PDF"),
}
# ---------------------------------------------------------------------
tablename = "survey_formatter"
table = self.define_table(tablename,
self.survey_template_id(),
Field("section_id", "reference survey_section",
readable=False,
writable=False
),
Field("method", "integer",
requires = IS_IN_SET(survey_formatter_methods,
zero=None),
default=1,
represent = lambda index: \
survey_formatter_methods[index],
readable=True,
writable=False),
Field("rules", "text", default=""),
*s3_meta_fields()
)
self.configure(tablename,
onaccept = self.formatter_onaccept,
deduplicate=self.survey_formatter_duplicate
)
# ---------------------------------------------------------------------
return Storage()
# -------------------------------------------------------------------------
@staticmethod
def formatter_onaccept(form):
"""
If this is the formatter rules for the Background Information
section then add the standard questions to the layout
"""
s3db = current.s3db
section_id = form.vars.section_id
sectionTbl = s3db.survey_section
section_name = sectionTbl[section_id].name
if section_name == "Background Information":
col1 = []
# Add the default layout
templateTbl = s3db.survey_template
template = templateTbl[form.vars.template_id]
if template.competion_qstn != "":
col1.append("STD-WHO")
if template.date_qstn != "":
col1.append("STD-DATE")
if template.time_qstn != "":
col1.append("STD-TIME")
if "location_detail" in template:
col2 = ["STD-P-Code"]
locationList = json2py(template.location_detail)
for loc in locationList:
col2.append("STD-%s" % loc)
col = [col1, col2]
rule = [{"columns":col}]
ruleList = json2py(form.vars.rules)
ruleList[:0]=rule
rules = json.dumps(ruleList)
db = current.db
ftable = db.survey_formatter
db(ftable.id == form.vars.id).update(rules = rules)
# -------------------------------------------------------------------------
@staticmethod
def survey_formatter_duplicate(job):
"""
Rules for finding a duplicate:
- Look for a record with the same template_id and section_id
"""
if job.tablename == "survey_formatter":
table = job.table
data = job.data
tid = "template_id" in data and data.template_id
sid = "section_id" in data and data.section_id
query = (table.template_id == tid) & \
(table.section_id == sid)
return duplicator(job, query)
# =============================================================================
def survey_getQstnLayoutRules(template_id,
section_id,
method = 1
):
"""
This will return the rules for laying out the questions for
the given section within the template.
This is used when generating a formatted layout.
First it will look for a survey_formatter record that matches
the method given. Failing that it will look for a default
survey_formatter record. If no appropriate survey_formatter
record exists for the section then it will use the posn
field found in the survey_question_list record.
The function will return a list of rows. Each row is a list
of question codes.
"""
db = current.db
s3db = current.s3db
# search for layout rules on the survey_formatter table
fmttable = s3db.survey_formatter
query = (fmttable.template_id == template_id) & \
(fmttable.section_id == section_id)
rows = db(query).select(fmttable.method,
fmttable.rules)
rules = None
drules = None # default rules
for row in rows:
if row.method == method:
rules = row.rules
break
elif row.method == 1:
drules = row.rules
if rules == None and drules != None:
rules = drules
rowList = []
if rules is None or rules == "":
# get the rules from survey_question_list
q_ltable = s3db.survey_question_list
qsntable = s3db.survey_question
query = (q_ltable.template_id == template_id) & \
(q_ltable.section_id == section_id) & \
(q_ltable.question_id == qsntable.id)
rows = db(query).select(qsntable.code,
q_ltable.posn,
orderby=(q_ltable.posn))
append = rowList.append
for qstn in rows:
append([qstn.survey_question.code])
else:
# convert the JSON rules to python
rowList = json2py(rules)
return rowList
# =============================================================================
class S3SurveySeriesModel(S3Model):
"""
Series Model
"""
names = ["survey_series",
"survey_series_status",
]
def model(self):
T = current.T
person_id = self.pr_person_id
pr_person_comment = self.pr_person_comment
organisation_id = self.org_organisation_id
s3_date_represent = S3DateTime.date_represent
s3_date_format = current.deployment_settings.get_L10n_date_format()
crud_strings = current.response.s3.crud_strings
set_method = self.set_method
if current.deployment_settings.get_org_autocomplete():
org_widget = S3OrganisationAutocompleteWidget(default_from_profile=True)
else:
org_widget = None
# ---------------------------------------------------------------------
# The survey_series table is used to hold all uses of a template
#
# When a series is first created the template status will change from
# Pending to Active and at the stage no further changes to the
# template can be made.
#
# Typically a series will be created for an event, which may be a
# response to a natural disaster, an exercise,
# or regular data collection activity.
#
# The series is a container for all the responses for the event
series_status = {
1: T("Active"),
2: T("Closed"),
}
tablename = "survey_series"
table = self.define_table(tablename,
Field("name", "string", length=120,
default="",
requires = IS_NOT_EMPTY()),
Field("description", "text", default="", length=500),
Field("status", "integer",
requires = IS_IN_SET(series_status,
zero=None),
default=1,
represent = lambda index: series_status[index],
readable=True,
writable=False),
self.survey_template_id(empty=False,
ondelete="RESTRICT"),
person_id(),
organisation_id(widget = org_widget),
Field("logo", "string", default="", length=512),
Field("language", "string", default="en", length=8),
Field("start_date", "date",
requires = IS_EMPTY_OR(IS_DATE(format = s3_date_format)),
represent = s3_date_represent,
widget = S3DateWidget(),
default=None),
Field("end_date", "date",
requires = IS_EMPTY_OR(IS_DATE(format = s3_date_format)),
represent = s3_date_represent,
widget = S3DateWidget(),
default=None),
#self.super_link("source_id", "doc_source_entity"),
*s3_meta_fields())
# CRUD Strings
crud_strings[tablename] = Storage(
title_create = T("Conduct a Disaster Assessment"),
title_display = T("Details of Disaster Assessment"),
title_list = T("Disaster Assessments"),
title_update = T("Edit this Disaster Assessment"),
title_analysis_summary = T("Disaster Assessment Summary"),
title_analysis_chart = T("Disaster Assessment Chart"),
title_map = T("Disaster Assessment Map"),
subtitle_create = T("Add a new Disaster Assessment"),
subtitle_analysis_summary = T("Summary of Completed Assessment Forms"),
help_analysis_summary = T("Click on questions below to select them, then click 'Display Selected Questions' button to view the selected questions for all Completed Assessment Forms"),
subtitle_analysis_chart = T("Select a label question and at least one numeric question to display the chart."),
subtitle_map = T("Disaster Assessment Map"),
label_list_button = T("List Disaster Assessments"),
label_create_button = T("Add a new Disaster Assessment"),
label_delete_button = T("Delete this Disaster Assessment"),
msg_record_created = T("Disaster Assessment added"),
msg_record_modified = T("Disaster Assessment updated"),
msg_record_deleted = T("Disaster Assessment deleted"),
msg_list_empty = T("No Disaster Assessments"))
self.configure(tablename,
create_next = URL(f="newAssessment",
vars={"viewing":"survey_series.[id]"}),
onaccept = self.series_onaccept,
deduplicate = self.survey_series_duplicate,
)
# Components
self.add_component("survey_complete", survey_series="series_id")
# Custom Methods
set_method("survey", "series", method="summary", action=self.seriesSummary)
set_method("survey", "series", method="graph", action=self.seriesGraph)
set_method("survey", "series", method="map", action=self.seriesMap)
set_method("survey", "series",
method="series_chart_download",
action=self.seriesChartDownload
)
# ---------------------------------------------------------------------
# Pass names back to global scope (s3.*)
return Storage(
survey_series_status = series_status,
)
# -------------------------------------------------------------------------
@staticmethod
def series_onaccept(form):
"""
Ensure that the template status is set to Active
"""
if form.vars.template_id:
template_id = form.vars.template_id
else:
return
table = current.s3db.survey_template
current.db(table.id == template_id).update(status = 2)
# -------------------------------------------------------------------------
@staticmethod
def survey_series_duplicate(job):
"""
Rules for finding a duplicate:
- Look for a record with a similar name, ignoring case
"""
if job.tablename == "survey_series":
table = job.table
data = job.data
name = "name" in data and data.name
query = table.name.lower().like('%%%s%%' % name.lower())
return duplicator(job, query)
# -------------------------------------------------------------------------
@staticmethod
def seriesSummary(r, **attr):
"""
"""
db = current.db
s3db = current.s3db
request = current.request
s3 = current.response.s3
posn_offset = 11
# Retain the rheader
rheader = attr.get("rheader", None)
if rheader:
rheader = rheader(r)
output = dict(rheader=rheader)
else:
output = dict()
if request.env.request_method == "POST" \
or "mode" in request.vars:
# This means that the user has selected the questions and
# Wants to display the details of the selected questions
crud_strings = s3.crud_strings["survey_complete"]
question_ids = []
vars = request.vars
if "mode" in vars:
mode = vars["mode"]
series_id = r.id
if "selected" in vars:
selected = vars["selected"].split(",")
else:
selected = []
q_ltable = s3db.survey_question_list
sertable = s3db.survey_series
query = (sertable.id == series_id) & \
(sertable.template_id == q_ltable.template_id)
questions = db(query).select(q_ltable.posn,
q_ltable.question_id,
orderby = q_ltable.posn)
for question in questions:
qstn_posn = question.posn + posn_offset
if mode == "Inclusive":
if str(qstn_posn) in selected:
question_ids.append(str(question.question_id))
elif mode == "Exclusive":
if str(qstn_posn) not in selected:
question_ids.append(str(question.question_id))
items = buildCompletedList(series_id, question_ids)
if r.representation == "xls":
from ..s3.codecs.xls import S3XLS
exporter = S3XLS()
return exporter.encode(items,
title=crud_strings.title_selected,
use_colour=False
)
if r.representation == "html":
table = buildTableFromCompletedList(items)
#exporter = S3Exporter()
#table = exporter.html(items)
output["items"] = table
output["sortby"] = [[0, "asc"]]
url_pdf = URL(c="survey", f="series",
args=[series_id, "summary.pdf"],
vars = {"mode": mode,
"selected": vars["selected"]}
)
url_xls = URL(c="survey", f="series",
args=[series_id, "summary.xls"],
vars = {"mode": mode,
"selected": vars["selected"]}
)
s3.formats["pdf"] = url_pdf
s3.formats["xls"] = url_xls
else:
output["items"] = None
output["title"] = crud_strings.title_selected
output["subtitle"] = crud_strings.subtitle_selected
output["help"] = ""
else:
crud_strings = s3.crud_strings["survey_series"]
viewing = request.get_vars.get("viewing", None)
if viewing:
dummy, series_id = viewing.split(".")
else:
series_id = request.get_vars.get("series", None)
if not series_id:
series_id = r.id
form = buildSeriesSummary(series_id, posn_offset)
output["items"] = form
output["sortby"] = [[0, "asc"]]
output["title"] = crud_strings.title_analysis_summary
output["subtitle"] = crud_strings.subtitle_analysis_summary
output["help"] = crud_strings.help_analysis_summary
s3.dataTableBulkActionPosn = "top"
s3.actions = None
current.response.view = "survey/series_summary.html"
return output
# -------------------------------------------------------------------------
@staticmethod
def getChartName():
"""
Create a Name for a Chart
"""
import hashlib
vars = current.request.vars
end_part = "%s_%s" % (vars.numericQuestion,
vars.labelQuestion)
h = hashlib.sha256()
h.update(end_part)
encoded_part = h.hexdigest()
chartName = "survey_series_%s_%s" % (vars.series, encoded_part)
return chartName
# -------------------------------------------------------------------------
@staticmethod
def seriesChartDownload(r, **attr):
"""
"""
from gluon.contenttype import contenttype
series_id = r.id
seriesName = survey_getSeriesName(series_id)
filename = "%s_chart.png" % seriesName
response = current.response
response.headers["Content-Type"] = contenttype(".png")
response.headers["Content-disposition"] = "attachment; filename=\"%s\"" % filename
chartFile = S3SurveySeriesModel.getChartName()
cached = S3Chart.getCachedFile(chartFile)
if cached:
return cached
# The cached version doesn't exist so regenerate it
output = dict()
vars = current.request.get_vars
if "labelQuestion" in vars:
labelQuestion = vars.labelQuestion
if "numericQuestion" in vars:
numQstnList = vars.numericQuestion
if not isinstance(numQstnList, (list, tuple)):
numQstnList = [numQstnList]
if (numQstnList != None) and (labelQuestion != None):
S3SurveySeriesModel.drawChart(output, series_id, numQstnList,
labelQuestion, outputFormat="png")
return output["chart"]
# -------------------------------------------------------------------------
@staticmethod
def seriesGraph(r, **attr):
"""
Allows the user to select one string question and multiple numeric
questions. The string question is used to group the numeric data,
with the result displayed as a bar chart.
For example:
The string question can be Geographic area, and the numeric
questions could be people injured and families displaced.
Then the results will be grouped by each geographical area.
"""
T = current.T
request = current.request
s3 = current.response.s3
output = dict()
# Draw the chart
vars = request.vars
if "viewing" in vars:
dummy, series_id = vars.viewing.split(".")
elif "series" in vars:
series_id = vars.series
else:
series_id = r.id
chartFile = S3SurveySeriesModel.getChartName()
cachePath = S3Chart.getCachedPath(chartFile)
if cachePath and request.ajax:
return IMG(_src=cachePath)
else:
numQstnList = None
labelQuestion = None
post_vars = request.post_vars
if post_vars is not None:
if "labelQuestion" in post_vars:
labelQuestion = post_vars.labelQuestion
if "numericQuestion" in post_vars:
numQstnList = post_vars.numericQuestion
if not isinstance(numQstnList, (list, tuple)):
numQstnList = [numQstnList]
if (numQstnList != None) and (labelQuestion != None):
S3SurveySeriesModel.drawChart(output, series_id, numQstnList,
labelQuestion)
if request.ajax == True and "chart" in output:
return output["chart"]
# retain the rheader
rheader = attr.get("rheader", None)
if rheader:
rheader = rheader(r)
output["rheader"] = rheader
# ---------------------------------------------------------------------
def addQstnChkboxToTR(numQstnList, qstn):
"""
Build the form
"""
tr = TR()
if numQstnList != None and qstn["code"] in numQstnList:
tr.append(INPUT(_type="checkbox",
_name="numericQuestion",
_value=qstn["code"],
value=True,
)
)
else:
tr.append(INPUT(_type="checkbox",
_name="numericQuestion",
_value=qstn["code"],
)
)
tr.append(LABEL(qstn["name"]))
return tr
if series_id == None:
return output
allQuestions = survey_getAllQuestionsForSeries(series_id)
labelTypeList = ("String",
"Option",
"YesNo",
"YesNoDontKnow",
"Location",
)
labelQuestions = survey_get_series_questions_of_type (allQuestions, labelTypeList)
lblQstns = []
for question in labelQuestions:
lblQstns.append(question["name"])
numericTypeList = ("Numeric")
form = FORM(_id="mapGraphForm")
table = TABLE()
labelQstn = SELECT(lblQstns, _name="labelQuestion", value=labelQuestion)
table.append(TR(TH("%s:" % T("Select Label Question")), _class="survey_question"))
table.append(labelQstn)
table.append(TR(TH(T("Select Numeric Questions (one or more):")), _class="survey_question"))
# First add the special questions
specialQuestions = [{"code":"Count", "name" : T("Number of Completed Assessment Forms")}]
innerTable = TABLE()
for qstn in specialQuestions:
tr = addQstnChkboxToTR(numQstnList, qstn)
innerTable.append(tr)
table.append(innerTable)
# Now add the numeric questions
numericQuestions = survey_get_series_questions_of_type (allQuestions, numericTypeList)
innerTable = TABLE()
for qstn in numericQuestions:
tr = addQstnChkboxToTR(numQstnList, qstn)
innerTable.append(tr)
table.append(innerTable)
form.append(table)
series = INPUT(_type="hidden",
_id="selectSeriesID",
_name="series",
_value="%s" % series_id
)
button = INPUT(_type="button", _id="chart_btn", _name="Chart", _value=T("Display Chart"))
form.append(series)
form.append(button)
# Set up the javascript code for ajax interaction
jurl = URL(r=request, c=r.prefix, f=r.function, args=request.args)
s3.jquery_ready.append('''
$('#chart_btn').click(function(){
var data=$('#mapGraphForm').serialize()
var url='<a class="action-btn" href=series_chart_download?' + data + '>Download Chart</a>'
$.post('%s',data,function(data){
$('#survey_chart').empty();
$('#survey_chart').append(data);
$('#survey_chart_download').empty();
$('#survey_chart_download').append(url);
});
});
''' % jurl)
output["showForm"] = P(T("Click on the chart to show/hide the form."))
output["form"] = form
output["title"] = s3.crud_strings["survey_series"].title_analysis_chart
current.response.view = "survey/series_analysis.html"
return output
# -------------------------------------------------------------------------
@staticmethod
def drawChart(output, series_id, numQstnList, labelQuestion, outputFormat=None):
"""
"""
T = current.T
getAnswers = survey_getAllAnswersForQuestionInSeries
gqstn = survey_getQuestionFromName(labelQuestion, series_id)
gqstn_id = gqstn["qstn_id"]
ganswers = getAnswers(gqstn_id, series_id)
dataList = []
legendLabels = []
for numericQuestion in numQstnList:
if numericQuestion == "Count":
# get the count of replies for the label question
gqstn_type = gqstn["type"]
analysisTool = survey_analysis_type[gqstn_type](gqstn_id, ganswers)
map = analysisTool.uniqueCount()
label = map.keys()
data = map.values()
legendLabels.append(T("Count of Question"))
else:
qstn = survey_getQuestionFromCode(numericQuestion, series_id)
qstn_id = qstn["qstn_id"]
qstn_type = qstn["type"]
answers = getAnswers(qstn_id, series_id)
analysisTool = survey_analysis_type[qstn_type](qstn_id, answers)
label = analysisTool.qstnWidget.fullName()
if len(label) > 20:
label = "%s..." % label[0:20]
legendLabels.append(label)
grouped = analysisTool.groupData(ganswers)
aggregate = "Sum"
filtered = analysisTool.filter(aggregate, grouped)
(label, data) = analysisTool.splitGroupedData(filtered)
if data != []:
dataList.append(data)
if dataList == []:
output["chart"] = H4(T("There is insufficient data to draw a chart from the questions selected"))
else:
chartFile = S3SurveySeriesModel.getChartName()
chart = S3Chart(path=chartFile, width=7.2)
chart.asInt = True
chart.survey_bar(labelQuestion,
dataList,
label,
legendLabels)
if outputFormat == None:
image = chart.draw()
else:
image = chart.draw(output=outputFormat)
output["chart"] = image
request = current.request
chartLink = A(T("Download"),
_href=URL(c="survey",
f="series",
args=request.args,
vars=request.vars
)
)
output["chartDownload"] = chartLink
# -------------------------------------------------------------------------
@staticmethod
def seriesMap(r, **attr):
"""
"""
import math
from s3survey import S3AnalysisPriority
T = current.T
response = current.response
s3 = response.s3
request = current.request
gis = current.gis
# retain the rheader
rheader = attr.get("rheader", None)
if rheader:
rheader = rheader(r)
output = dict(rheader=rheader)
else:
output = dict()
crud_strings = s3.crud_strings["survey_series"]
viewing = request.get_vars.get("viewing", None)
if viewing:
dummy, series_id = viewing.split(".")
else:
series_id = request.get_vars.get("series", None)
if not series_id:
series_id = r.id
if series_id == None:
seriesList = []
append = seriesList.append
records = survey_getAllSeries()
for row in records:
append(row.id)
else:
seriesList = [series_id]
pqstn = {}
pqstn_name = request.post_vars.get("pqstn_name", None)
if pqstn_name is None:
pqstn = survey_getPriorityQuestionForSeries(series_id)
if "name" in pqstn:
pqstn_name = pqstn["name"]
feature_queries = []
bounds = {}
# Build the drop down list of priority questions
allQuestions = survey_getAllQuestionsForSeries(series_id)
numericTypeList = ("Numeric")
numericQuestions = survey_get_series_questions_of_type(allQuestions,
numericTypeList)
numQstns = []
for question in numericQuestions:
numQstns.append(question["name"])
form = FORM(_id="mapQstnForm")
table = TABLE()
if pqstn:
priorityQstn = SELECT(numQstns, _name="pqstn_name",
value=pqstn_name)
else:
priorityQstn = None
# Set up the legend
priorityObj = S3AnalysisPriority(range=[-.66, .66],
colour={-1:"#888888", # grey
0:"#008000", # green
1:"#FFFF00", # yellow
2:"#FF0000", # red
},
# Make Higher-priority show up more clearly
opacity={-1:0.5,
0:0.6,
1:0.7,
2:0.8,
},
image={-1:"grey",
0:"green",
1:"yellow",
2:"red",
},
desc={-1:"No Data",
0:"Low",
1:"Average",
2:"High",
},
zero = True)
for series_id in seriesList:
series_name = survey_getSeriesName(series_id)
response_locations = getLocationList(series_id)
if pqstn == {} and pqstn_name:
for question in numericQuestions:
if pqstn_name == question["name"]:
pqstn = question
if pqstn != {}:
pqstn_id = pqstn["qstn_id"]
answers = survey_getAllAnswersForQuestionInSeries(pqstn_id,
series_id)
analysisTool = survey_analysis_type["Numeric"](pqstn_id,
answers)
analysisTool.advancedResults()
else:
analysisTool = None
if analysisTool != None and not math.isnan(analysisTool.mean):
pBand = analysisTool.priorityBand(priorityObj)
legend = TABLE(
TR (TH(T("Marker Levels"), _colspan=3),
_class= "survey_question"),
)
for key in priorityObj.image.keys():
tr = TR(TD(priorityObj.imageURL(request.application,
key)),
TD(priorityObj.desc(key)),
TD(priorityObj.rangeText(key, pBand)),
)
legend.append(tr)
output["legend"] = legend
if len(response_locations) > 0:
for i in range( 0 , len( response_locations) ):
location = response_locations[i]
complete_id = location.complete_id
# Insert how we want this to appear on the map
url = URL(c="survey",
f="series",
args=[series_id,
"complete",
complete_id,
"read"
]
)
location.shape = "circle"
location.size = 5
if analysisTool is None:
priority = -1
else:
priority = analysisTool.priority(complete_id,
priorityObj)
location.colour = priorityObj.colour[priority]
location.opacity = priorityObj.opacity[priority]
location.popup_url = url
location.popup_label = response_locations[i].name
feature_queries.append({ "name": "%s: Assessments" % series_name,
"query": response_locations,
"active": True })
if bounds == {}:
bounds = (gis.get_bounds(features=response_locations))
else:
new_bounds = gis.get_bounds(features=response_locations)
# Where is merge_bounds defined!?
bounds = merge_bounds([bounds, new_bounds])
if bounds == {}:
bounds = gis.get_bounds()
map = gis.show_map(feature_queries = feature_queries,
#height = 600,
#width = 720,
bbox = bounds,
#collapsed = True,
catalogue_layers = True,
)
series = INPUT(_type="hidden",
_id="selectSeriesID",
_name="series",
_value="%s" % series_id
)
table.append(TR(TH("%s:" % T("Display Question on Map")),
_class="survey_question"))
table.append(priorityQstn)
table.append(series)
form.append(table)
button = INPUT(_type="submit", _name="Chart",
_value=T("Update Map"))
# REMOVED until we have dynamic loading of maps.
#button = INPUT(_type="button", _id="map_btn", _name="Map_Btn", _value=T("Select the Question"))
#jurl = URL(r=request, c=r.prefix, f=r.function, args=request.args)
#s3.jquery_ready.append('''
#$('#map_btn').click(function(){
# $.post('%s',$('#mapQstnForm').serialize(),function(data){
# obj = jQuery.parseJSON(data);
# $('#survey_map-legend').empty();
# $('#survey_map-legend').append(obj.legend);
# $('#survey_map-container').empty();
# $('#survey_map-container').append(obj.map);
# });
#});
#''' % jurl)
form.append(button)
output["title"] = crud_strings.title_map
output["subtitle"] = crud_strings.subtitle_map
output["instructions"] = T("Click on a marker to see the Completed Assessment Form")
output["form"] = form
output["map"] = map
response.view = "survey/series_map.html"
return output
# =============================================================================
def survey_serieslist_dataTable_post(r):
"""
Replace the Action Buttons
"""
#S3CRUD.action_buttons(r)
current.response.s3.actions = [
dict(label=current.messages.UPDATE,
_class="action-btn edit",
url=URL(c="survey", f="series",
args=["[id]", "summary"]
)
),
]
# =============================================================================
def survey_series_represent(value):
"""
This will display the series name, rather than the id
"""
table = current.s3db.survey_series
row = current.db(table.id == value).select(table.name,
limitby=(0, 1)).first()
return row.name
# =============================================================================
def survey_series_rheader(r):
"""
The series rheader
"""
if r.representation == "html":
tablename, record = s3_rheader_resource(r)
if not record:
series_id = current.request.vars.series
record = survey_getSeries(series_id)
if record != None:
T = current.T
s3db = current.s3db
# Tabs
tabs = [(T("Details"), None),
(T("Completed Assessments"), "complete"),
(T("Summary"), "summary"),
(T("Chart"), "graph"),
(T("Map"), "map"),
]
if current.auth.s3_has_permission("create", "survey_complete"):
tabs.insert(1, (T("Enter Completed Assessment"), "newAssessment/"))
rheader_tabs = s3_rheader_tabs(r, tabs)
completeTable = s3db.survey_complete
qty = current.db(completeTable.series_id == record.id).count()
tsection = TABLE(_class="survey-complete-list")
lblSection = T("Number of Completed Assessment Forms")
rsection = TR(TH(lblSection), TD(qty))
tsection.append(rsection)
urlexport = URL(c="survey", f="series_export_formatted",
args=[record.id])
tranForm = FORM(_action=urlexport)
translationList = survey_getAllTranslationsForSeries(record.id)
if len(translationList) > 0:
tranTable = TABLE()
tr = TR(INPUT(_type='radio',
_name='translationLanguage',
_value="Default",
_checked=True,
),
LABEL("Default"))
colCnt = 1
for translation in translationList:
# include a maximum of 4 translation languages per row
if colCnt == 4:
tranTable.append(tr)
tr = TR()
colCnt = 0
tr.append(INPUT(_type="radio",
_name="translationLanguage",
_value=translation["code"],
))
tr.append(LABEL(translation["language"]))
colCnt += 1
if colCnt != 0:
tranTable.append(tr)
tranForm.append(tranTable)
export_xls_btn = INPUT(_type="submit",
_id="export_xls_btn",
_name="Export_Spreadsheet",
_value=T("Download Assessment Form Spreadsheet"),
_class="action-btn"
)
tranForm.append(export_xls_btn)
try:
# only add the Export to Word button up if PyRTF is installed
from PyRTF import Document
export_rtf_btn = INPUT(_type="submit",
_id="export_rtf_btn",
_name="Export_Word",
_value=T("Download Assessment Form Document"),
_class="action-btn"
)
tranForm.append(export_rtf_btn)
except:
pass
urlimport = URL(c="survey",
f="export_all_responses",
args=[record.id],
)
buttons = DIV(A(T("Export all Completed Assessment Data"),
_href=urlimport,
_id="All_resposnes",
_class="action-btn"
),
)
rheader = DIV(TABLE(
TR(TH("%s: " % T("Template")),
survey_template_represent(record.template_id),
TH("%s: " % T("Name")),
record.name,
TH("%s: " % T("Status")),
s3db.survey_series_status[record.status],
),
),
tsection,
tranForm,
buttons,
rheader_tabs)
return rheader
return None
# =============================================================================
def survey_getSeries(series_id):
"""
function to return the series from a series id
"""
table = current.s3db.survey_series
row = current.db(table.id == series_id).select(limitby=(0, 1)).first()
return row
# =============================================================================
def survey_getSeriesName(series_id):
"""
function to return the Series Name from the id
"""
table = current.s3db.survey_series
row = current.db(table.id == series_id).select(table.name,
limitby=(0, 1)).first()
try:
return row.name
except:
return ""
# =============================================================================
def survey_getAllSeries():
"""
function to return all the series on the database
"""
table = current.s3db.survey_series
row = current.db(table.id > 0).select()
return row
# =============================================================================
def survey_buildQuestionnaireFromSeries(series_id, complete_id=None):
"""
build a form displaying all the questions for a given series_id
If the complete_id is also provided then the responses to each
completed question will also be displayed
"""
questions = survey_getAllQuestionsForSeries(series_id)
return buildQuestionsForm(questions, complete_id)
# =============================================================================
def survey_save_answers_for_series(series_id, complete_id, vars):
"""
function to save the list of answers for a completed series
"""
questions = survey_getAllQuestionsForSeries(series_id)
return saveAnswers(questions, series_id, complete_id, vars)
# =============================================================================
def saveAnswers(questions, series_id, complete_id, vars):
"""
"""
text = ""
table = current.s3db.survey_complete
for question in questions:
code = question["code"]
if (code in vars) and vars[code] != "":
line = '"%s","%s"\n' % (code, vars[code])
text += line
if complete_id == None:
# Insert into database
id = table.insert(series_id = series_id, answer_list = text)
S3SurveyCompleteModel.completeOnAccept(id)
return id
else:
# Update the complete_id record
current.db(table.id == complete_id).update(answer_list = text)
S3SurveyCompleteModel.completeOnAccept(complete_id)
return complete_id
# =============================================================================
def survey_getPriorityQuestionForSeries(series_id):
"""
"""
templateRec = survey_getTemplateFromSeries(series_id)
if templateRec != None:
priorityQstnCode = templateRec["priority_qstn"]
question = survey_getQuestionFromCode(priorityQstnCode, series_id)
return question
else:
return None
# =============================================================================
def buildSeriesSummary(series_id, posn_offset):
"""
"""
from s3.s3data import S3DataTable
T = current.T
table = TABLE(_id="series_summary",
_class="dataTable display")
hr = TR(TH(""), # Bulk action column
TH(T("Position")),
TH(T("Question")),
TH(T("Type")),
TH(T("Summary"))
)
header = THEAD(hr)
questions = survey_getAllQuestionsForSeries(series_id)
line = []
body = TBODY()
for question in questions:
if question["type"] == "Grid":
continue
question_id = question["qstn_id"]
widgetObj = survey_getWidgetFromQuestion(question_id)
br = TR()
posn = int(question["posn"])+posn_offset
br.append(TD(INPUT(_id="select%s" % posn,
_type="checkbox",
_class="bulkcheckbox",
)))
br.append(posn) # add an offset to make all id's +ve
br.append(widgetObj.fullName())
#br.append(question["name"])
type = widgetObj.type_represent()
answers = survey_getAllAnswersForQuestionInSeries(question_id,
series_id)
analysisTool = survey_analysis_type[question["type"]](question_id,
answers)
chart = analysisTool.chartButton(series_id)
cell = TD()
cell.append(type)
if chart:
cell.append(chart)
br.append(cell)
analysisTool.count()
br.append(analysisTool.summary())
body.append(br)
table.append(header)
table.append(body)
s3 = current.response.s3
# Turn off server side pagination
s3.no_sspag = True
# Turn multi-select on
s3.dataTableBulkActions = [current.T("Display Selected Questions")]
attr = S3DataTable.getConfigData()
form = S3DataTable.htmlConfig(table,
"series_summary",
[[0, 'asc']], # order by
"", # the filter string
None, # the rfields
**attr
)
series = INPUT(_type="hidden", _id="selectSeriesID", _name="series",
_value="%s" % series_id)
form.append(series)
return form
# =============================================================================
class S3SurveyCompleteModel(S3Model):
"""
Completed Surveys Model
"""
names = ["survey_complete",
"survey_answer",
]
def model(self):
T = current.T
configure = self.configure
crud_strings = current.response.s3.crud_strings
define_table = self.define_table
# ---------------------------------------------------------------------
# The survey_complete table holds all of the answers for a completed
# response. It has a link back to the series this response belongs to.
#
# Whilst this table holds all of the answers in a text field during
# the onaccept each answer is extracted and then stored in the
# survey_answer table. This process of moving the answers to a
# separate table makes it easier to analyse the answers
# for a given question across all responses.
tablename = "survey_complete"
table = define_table(tablename,
Field("series_id", "reference survey_series",
represent = survey_series_represent,
label = T("Series"),
readable=False,
writable=False
),
Field("answer_list", "text",
represent = survey_answer_list_represent
),
Field("location", "text",
readable=False,
writable=False
),
*s3_meta_fields())
# CRUD Strings
crud_strings[tablename] = Storage(
title_create = T("Enter Completed Assessment Form"),
title_display = T("Completed Assessment Form Details"),
title_list = T("Completed Assessment Forms"),
title_update = T("Edit Completed Assessment Form"),
title_selected = T("Selected Questions for all Completed Assessment Forms"),
subtitle_create = T("Enter Completed Assessment Form"),
subtitle_selected = T("Selected Questions for all Completed Assessment Forms"),
label_list_button = T("List Completed Assessment Forms"),
label_create_button = T("Add a new Completed Assessment Form"),
label_delete_button = T("Delete this Completed Assessment Form"),
msg_record_created = T("Completed Assessment Form entered"),
msg_record_modified = T("Completed Assessment Form updated"),
msg_record_deleted = T("Completed Assessment Form deleted"),
msg_list_empty = T("No Completed Assessment Forms"),
title_upload = T("Upload the Completed Assessment Form")
)
configure(tablename,
onvalidation = self.complete_onvalidate,
onaccept = self.complete_onaccept,
deduplicate=self.survey_complete_duplicate,
)
self.add_component("survey_complete",
survey_series = dict(joinby="series_id",
multiple=True)
)
# ---------------------------------------------------------------------
# The survey_answer table holds the answer for a single response
# of a given question.
tablename = "survey_answer"
table = define_table(tablename,
Field("complete_id", "reference survey_complete",
readable=False,
writable=False
),
Field("question_id", "reference survey_question",
readable=True,
writable=False
),
Field("value", "text",
readable=True,
writable=True
),
*s3_meta_fields())
crud_strings[tablename] = Storage(
title_create = T("Add Assessment Answer"),
title_display = T("Assessment Answer Details"),
title_list = T("Assessment Answers"),
title_update = T("Edit Assessment Answer"),
subtitle_create = T("Add a new Assessment Answer"),
label_list_button = T("List Assessment Answers"),
label_create_button = T("Add a new Assessment Answer"),
label_delete_button = T("Delete this Assessment Answer"),
msg_record_created = T("Assessment Answer added"),
msg_record_modified = T("Assessment Answer updated"),
msg_record_deleted = T("Assessment Answer deleted"),
msg_list_empty = T("No Assessment Answers"))
configure(tablename,
onaccept = self.answer_onaccept,
deduplicate = self.survey_answer_duplicate
)
# ---------------------------------------------------------------------
return Storage()
# -------------------------------------------------------------------------
@staticmethod
def extractAnswerFromAnswerList(answerList, qstnCode):
"""
function to extract the answer for the question code
passed in from the list of answers. This is in a CSV
format created by the XSL stylesheet or by the function
saveAnswers()
"""
start = answerList.find(qstnCode)
if start == -1:
return None
start = start + len(qstnCode) + 3
end = answerList.find('"', start)
answer = answerList[start:end]
return answer
# -------------------------------------------------------------------------
@staticmethod
def complete_onvalidate(form):
"""
"""
T = current.T
vars = form.vars
if "series_id" not in vars or vars.series_id == None:
form.errors.series_id = T("Series details missing")
return False
if "answer_list" not in vars or vars.answer_list == None:
form.errors.answer_list = T("The answers are missing")
return False
series_id = vars.series_id
answer_list = vars.answer_list
qstn_list = survey_getAllQuestionsForSeries(series_id)
qstns = []
for qstn in qstn_list:
qstns.append(qstn["code"])
answerList = answer_list.splitlines(True)
for answer in answerList:
qstn_code = answer[1:answer.find('","')]
if qstn_code not in qstns:
msg = "%s: %s" % (T("Unknown question code"), qstn_code)
if answer_list not in form.errors:
form.errors.answer_list = msg
else:
form.errors.answer_list += msg
return True
# -------------------------------------------------------------------------
@staticmethod
def complete_onaccept(form):
"""
All of the answers will be stored in the answer_list in the
format "code","answer"
They will then be inserted into the survey_answer table
each item will be a record on that table.
This will also extract the default location question as
defined by the template and store this in the location field
"""
if form.vars.id:
S3SurveyCompleteModel.completeOnAccept(form.vars.id)
# -------------------------------------------------------------------------
@staticmethod
def completeOnAccept(complete_id):
"""
"""
# Get the basic data that is needed
s3db = current.s3db
rtable = s3db.survey_complete
atable = s3db.survey_answer
record = rtable[complete_id]
series_id = record.series_id
purgePrefix = "survey_series_%s" % series_id
S3Chart.purgeCache(purgePrefix)
if series_id == None:
return
# Save all the answers from answerList in the survey_answer table
answerList = record.answer_list
S3SurveyCompleteModel.importAnswers(complete_id, answerList)
# Extract the default template location question and save the
# answer in the location field
templateRec = survey_getTemplateFromSeries(series_id)
locDetails = templateRec["location_detail"]
if not locDetails:
return
widgetObj = get_default_location(complete_id)
if widgetObj:
current.db(rtable.id == complete_id).update(location = widgetObj.repr())
locations = get_location_details(complete_id)
S3SurveyCompleteModel.importLocations(locations)
# -------------------------------------------------------------------------
@staticmethod
def importAnswers(id, list):
"""
private function used to save the answer_list stored in
survey_complete into answer records held in survey_answer
"""
import csv
import os
try:
from cStringIO import StringIO # Faster, where available
except:
from StringIO import StringIO
strio = StringIO()
strio.write(list)
strio.seek(0)
answer = []
append = answer.append
reader = csv.reader(strio)
for row in reader:
if row != None:
row.insert(0, id)
append(row)
from tempfile import TemporaryFile
csvfile = TemporaryFile()
writer = csv.writer(csvfile)
writerow = writer.writerow
writerow(["complete_id", "question_code", "value"])
for row in answer:
writerow(row)
csvfile.seek(0)
xsl = os.path.join("applications",
current.request.application,
"static",
"formats",
"s3csv",
"survey",
"answer.xsl")
resource = current.s3db.resource("survey_answer")
resource.import_xml(csvfile, stylesheet = xsl, format="csv",)
# -------------------------------------------------------------------------
@staticmethod
def importLocations(location_dict):
"""
private function used to save the locations to gis.location
"""
import csv
import os
lastLocWidget = None
codeList = ["STD-L0","STD-L1","STD-L2","STD-L3","STD-L4"]
headingList = ["Country",
"ADM1_NAME",
"ADM2_NAME",
"ADM3_NAME",
"ADM4_NAME"
]
cnt = 0
answer = []
headings = []
aappend = answer.append
happend = headings.append
for loc in codeList:
if loc in location_dict:
aappend(location_dict[loc].repr())
lastLocWidget = location_dict[loc]
happend(headingList[cnt])
cnt += 1
# Check that we have at least one location question answered
if lastLocWidget == None:
return
codeList = ["STD-P-Code","STD-Lat","STD-Lon"]
for loc in codeList:
if loc in location_dict:
aappend(location_dict[loc].repr())
else:
aappend("")
from tempfile import TemporaryFile
csvfile = TemporaryFile()
writer = csv.writer(csvfile)
headings += ["Code2", "Lat", "Lon"]
writer.writerow(headings)
writer.writerow(answer)
csvfile.seek(0)
xsl = os.path.join("applications",
current.request.application,
"static",
"formats",
"s3csv",
"gis",
"location.xsl")
resource = current.s3db.resource("gis_location")
resource.import_xml(csvfile, stylesheet = xsl, format="csv",)
# -------------------------------------------------------------------------
@staticmethod
def survey_complete_duplicate(job):
"""
Rules for finding a duplicate:
- Look for a record with the same name, answer_list
"""
if job.tablename == "survey_complete":
table = job.table
data = job.data
answers = "answer_list" in data and data.answer_list
query = (table.answer_list == answers)
try:
return duplicator(job, query)
except:
# if this is part of an import then the select will throw an error
# if the question code doesn't exist.
# This can happen during an import if the wrong file is used.
return
# -------------------------------------------------------------------------
@staticmethod
def answer_onaccept(form):
"""
Some question types may require additional processing
"""
vars = form.vars
if vars.complete_id and vars.question_id:
atable = current.s3db.survey_answer
complete_id = vars.complete_id
question_id = vars.question_id
value = vars.value
widgetObj = survey_getWidgetFromQuestion(question_id)
newValue = widgetObj.onaccept(value)
if newValue != value:
query = (atable.question_id == question_id) & \
(atable.complete_id == complete_id)
current.db(query).update(value = newValue)
# -------------------------------------------------------------------------
@staticmethod
def survey_answer_duplicate(job):
"""
Rules for finding a duplicate:
- Look for a record with the same complete_id and question_id
"""
if job.tablename == "survey_answer":
table = job.table
data = job.data
qid = "question_id" in data and data.question_id
rid = "complete_id" in data and data.complete_id
query = (table.question_id == qid) & \
(table.complete_id == rid)
return duplicator(job, query)
# =============================================================================
def survey_answerlist_dataTable_pre():
"""
The answer list has been removed for the moment. Currently it
displays all answers for a summary it would be better to
be able to display just a few select answers
"""
list_fields = ["created_on", "series_id", "location", "modified_by"]
current.s3db.configure("survey_complete", list_fields=list_fields)
# =============================================================================
def survey_answerlist_dataTable_post(r):
"""
Replace Action Buttons
"""
#S3CRUD.action_buttons(r)
current.response.s3.actions = [
dict(label=current.messages["UPDATE"],
_class="action-btn edit",
url=URL(c="survey", f="series",
args=[r.id, "complete", "[id]", "update"])
),
]
# =============================================================================
def survey_answer_list_represent(value):
"""
Display the answer list in a formatted table.
Displaying the full question (rather than the code)
and the answer.
"""
db = current.db
qtable = current.s3db.survey_question
answer_text = value
list = answer_text.splitlines()
result = TABLE()
questions = {}
xml_decode = S3Codec.xml_decode
for line in list:
line = xml_decode(line)
(question, answer) = line.split(",",1)
question = question.strip("\" ")
if question in questions:
question = questions[question]
else:
query = (qtable.code == question)
qstn = db(query).select(qtable.name,
limitby=(0, 1)).first()
if not qstn:
continue
questions[question] = qstn.name
question = qstn.name
answer = answer.strip("\" ")
result.append(TR(TD(B(question)), TD(answer)))
return result
# =============================================================================
def get_location_details(complete_id):
"""
It will return a dict of values for all of the standard location
questions that have been answered
"""
db = current.db
s3db = current.s3db
locations = {}
comtable = s3db.survey_complete
qsntable = s3db.survey_question
answtable = s3db.survey_answer
query = (answtable.question_id == qsntable.id) & \
(answtable.complete_id == comtable.id)
codeList = ["STD-P-Code",
"STD-L0", "STD-L1", "STD-L2", "STD-L3", "STD-L4",
"STD-Lat", "STD-Lon"]
for locCode in codeList:
record = db(query & (qsntable.code == locCode)).select(qsntable.id,
limitby=(0, 1)).first()
if record:
widgetObj = survey_getWidgetFromQuestion(record.id)
widgetObj.loadAnswer(complete_id, record.id)
locations[locCode] = widgetObj
return locations
# =============================================================================
def get_default_location(complete_id):
"""
It will check each standard location question in
the hierarchy until either one is found or none are found
"""
db = current.db
s3db = current.s3db
comtable = s3db.survey_complete
qsntable = s3db.survey_question
answtable = s3db.survey_answer
query = (answtable.question_id == qsntable.id) & \
(answtable.complete_id == comtable.id)
codeList = ["STD-L4", "STD-L3", "STD-L2", "STD-L1", "STD-L0"]
for locCode in codeList:
record = db(query & (qsntable.code == locCode)).select(qsntable.id,
limitby=(0, 1)).first()
if record:
widgetObj = survey_getWidgetFromQuestion(record.id)
break
if record:
widgetObj.loadAnswer(complete_id, record.id)
return widgetObj
else:
return None
# =============================================================================
def survey_getAllAnswersForQuestionInSeries(question_id, series_id):
"""
function to return all the answers for a given question
from with a specified series
"""
s3db = current.s3db
ctable = s3db.survey_complete
atable = s3db.survey_answer
query = (atable.question_id == question_id) & \
(atable.complete_id == ctable.id) & \
(ctable.series_id == series_id)
rows = current.db(query).select(atable.id,
atable.value,
atable.complete_id)
answers = []
for row in rows:
answer = {}
answer["answer_id"] = row.id
answer["value"] = row.value
answer["complete_id"] = row.complete_id
answers.append(answer)
return answers
# =============================================================================
def buildTableFromCompletedList(dataSource):
"""
"""
headers = dataSource[0]
items = dataSource[2:]
table = TABLE(_id="completed_list",
_class="dataTable display")
hr = TR()
for title in headers:
hr.append(TH(title))
header = THEAD(hr)
body = TBODY()
for row in items:
tr = TR()
for answer in row:
tr.append(TD(answer))
body.append(tr)
table.append(header)
table.append(body)
# Turn off server side pagination
current.response.s3.no_sspag = True
attr = S3DataTable.getConfigData()
form = S3DataTable.htmlConfig(table,
"completed_list",
[[0, 'asc']], # order by
"", # the filter string
None, # the rfields
**attr
)
return form
# =============================================================================
def buildCompletedList(series_id, question_id_list):
"""
build a list of completed items for the series including
just the questions in the list passed in
The list will come in three parts.
1) The first row is the header (list of field labels)
2) The seconds row is the type of each column
3) The remaining rows are the data
@param series_id: The id of the series
@param question_id_list: The list of questions to display
"""
db = current.db
qtable = current.s3db.survey_question
headers = []
happend = headers.append
types = []
items = []
qstn_posn = 0
rowLen = len(question_id_list)
complete_lookup = {}
for question_id in question_id_list:
answers = survey_getAllAnswersForQuestionInSeries(question_id,
series_id)
widgetObj = survey_getWidgetFromQuestion(question_id)
question = db(qtable.id == question_id).select(qtable.name,
limitby=(0, 1)).first()
happend(question.name)
types.append(widgetObj.db_type())
for answer in answers:
complete_id = answer["complete_id"]
if complete_id in complete_lookup:
row = complete_lookup[complete_id]
else:
row = len(complete_lookup)
complete_lookup[complete_id]=row
items.append([''] * rowLen)
items[row][qstn_posn] = widgetObj.repr(answer["value"])
qstn_posn += 1
return [headers] + [types] + items
# =============================================================================
def getLocationList(series_id):
"""
Get a list of the LatLons for each Response in a Series
"""
response_locations = []
rappend = response_locations.append
codeList = ["STD-L4", "STD-L3", "STD-L2", "STD-L1", "STD-L0"]
table = current.s3db.survey_complete
rows = current.db(table.series_id == series_id).select(table.id,
table.answer_list)
for row in rows:
lat = None
lon = None
name = None
answer_list = row.answer_list.splitlines()
answer_dict = {}
for line in answer_list:
(question, answer) = line.split(",", 1)
question = question.strip('"')
if question in codeList:
# Store to get the name
answer_dict[question] = answer.strip('"')
elif question == "STD-Lat":
try:
lat = float(answer.strip('"'))
except:
pass
else:
if lat < -90.0 or lat > 90.0:
lat = None
elif question == "STD-Lon":
try:
lon = float(answer.strip('"'))
except:
pass
else:
if lon < -180.0 or lon > 180.0:
lon = None
else:
# Not relevant here
continue
for locCode in codeList:
# Retrieve the name of the lowest Lx
if locCode in answer_dict:
name = answer_dict[locCode]
break
if lat and lon:
# We have sufficient data to display on the map
location = Row()
location.lat = lat
location.lon = lon
location.name = name
location.complete_id = row.id
rappend(location)
else:
# The lat & lon were not added to the assessment so try and get one
locWidget = get_default_location(row.id)
if locWidget:
complete_id = locWidget.question["complete_id"]
if "answer" not in locWidget.question:
continue
answer = locWidget.question["answer"]
if locWidget != None:
record = locWidget.getLocationRecord(complete_id, answer)
if len(record.records) == 1:
location = record.records[0].gis_location
location.complete_id = complete_id
rappend(location)
return response_locations
# =============================================================================
class S3SurveyTranslateModel(S3Model):
"""
Translations Model
"""
from gluon.languages import read_dict, write_dict
names = ["survey_translate"]
def model(self):
T = current.T
# ---------------------------------------------------------------------
# The survey_translate table holds the details of the language
# for which the template has been translated into.
LANG_HELP = T("This is the full name of the language and will be displayed to the user when selecting the template language.")
CODE_HELP = T("This is the short code of the language and will be used as the name of the file. This should be the ISO 639 code.")
tablename = "survey_translate"
table = self.define_table(tablename,
self.survey_template_id(),
Field("language",
readable=True,
writable=True,
comment = DIV(_class="tooltip",
_title="%s|%s" % (T("Language"),
LANG_HELP))
),
Field("code",
readable=True,
writable=True,
comment = DIV(_class="tooltip",
_title="%s|%s" % (T("Language Code"),
CODE_HELP))
),
Field("file", "upload",
autodelete=True),
Field("filename",
readable=False,
writable=False),
*s3_meta_fields())
current.response.s3.crud_strings[tablename] = Storage(
title_create = T("Add new translation language"),
)
self.configure(tablename,
onaccept = self.translate_onaccept,
)
# ---------------------------------------------------------------------
return Storage()
# -------------------------------------------------------------------------
@staticmethod
def translate_onaccept(form):
"""
If the translation spreadsheet has been uploaded then
it needs to be processed.
The translation strings need to be extracted from
the spreadsheet and inserted into the language file.
"""
if "file" in form.vars:
try:
import xlrd
except ImportError:
print >> sys.stderr, "ERROR: xlrd & xlwt modules are needed for importing spreadsheets"
return None
from gluon.languages import read_dict, write_dict
T = current.T
request = current.request
response = current.response
msgNone = T("No translations exist in spreadsheet")
upload_file = request.post_vars.file
upload_file.file.seek(0)
openFile = upload_file.file.read()
lang = form.record.language
code = form.record.code
try:
workbook = xlrd.open_workbook(file_contents=openFile)
except:
msg = T("Unable to open spreadsheet")
response.error = msg
response.flash = None
return
try:
sheetL = workbook.sheet_by_name(lang)
except:
msg = T("Unable to find sheet %(sheet_name)s in uploaded spreadsheet") % \
dict(sheet_name=lang)
response.error = msg
response.flash = None
return
if sheetL.ncols == 1:
response.warning = msgNone
response.flash = None
return
count = 0
lang_fileName = "applications/%s/uploads/survey/translations/%s.py" % \
(request.application, code)
try:
strings = read_dict(lang_fileName)
except:
strings = dict()
for row in xrange(1, sheetL.nrows):
original = sheetL.cell_value(row, 0)
translation = sheetL.cell_value(row, 1)
if (original not in strings) or translation != "":
strings[original] = translation
count += 1
write_dict(lang_fileName, strings)
if count == 0:
response.warning = msgNone
response.flash = None
else:
response.flash = T("%(count_of)d translations have been imported to the %(language)s language file") % \
dict(count_of=count, language=lang)
# =============================================================================
def survey_getAllTranslationsForTemplate(template_id):
"""
Function to return all the translations for the given template
"""
table = current.s3db.survey_translate
row = current.db(table.template_id == template_id).select()
return row
# =============================================================================
def survey_getAllTranslationsForSeries(series_id):
"""
Function to return all the translations for the given series
"""
table = current.s3db.survey_series
row = current.db(table.id == series_id).select(table.template_id,
limitby=(0, 1)).first()
template_id = row.template_id
return survey_getAllTranslationsForTemplate(template_id)
# =============================================================================
# Generic function called by the duplicator methods to determine if the
# record already exists on the database.
def duplicator(job, query):
"""
This callback will be called when importing records it will look
to see if the record being imported is a duplicate.
@param job: An S3ImportJob object which includes all the details
of the record being imported
If the record is a duplicate then it will set the job method to update
"""
table = job.table
_duplicate = current.db(query).select(table.id,
limitby=(0, 1)).first()
if _duplicate:
job.id = _duplicate.id
job.data.id = _duplicate.id
job.method = job.METHOD.UPDATE
# END =========================================================================
|
mit
| 4,581,564,324,637,390,300
| 38.911366
| 195
| 0.471481
| false
| 4.893635
| false
| false
| false
|
AIFDR/inasafe-django
|
django_project/realtime/migrations/0050_reporttemplate.py
|
2
|
1641
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('realtime', '0049_auto_20180320_0406'),
]
operations = [
migrations.CreateModel(
name='ReportTemplate',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('timestamp', models.DateTimeField(help_text='The time the template uploaded.', verbose_name='Timestamp')),
('version', models.CharField(default=None, max_length=10, blank=True, help_text='Version number of the template.', null=True, verbose_name='Template version')),
('notes', models.CharField(default=None, max_length=255, blank=True, help_text='Notes of the report template.', null=True, verbose_name='Template Notes')),
('language', models.CharField(default=b'id', help_text='The language ID of the report', max_length=4, verbose_name='Language ID')),
('hazard', models.CharField(default=None, help_text='The hazard type of the template.', max_length=25, verbose_name='Hazard Type')),
('template_file', models.FileField(help_text='Template file formatted as qgis template file (*.qpt).', upload_to=b'', verbose_name='Template File')),
('owner', models.IntegerField(default=0, help_text='The owner/uploader of the template.', verbose_name='Owner')),
],
options={
'verbose_name_plural': 'Report Templates',
},
),
]
|
bsd-2-clause
| -3,155,160,990,121,272,300
| 53.7
| 176
| 0.622182
| false
| 4.229381
| false
| false
| false
|
supernifty/mgsa
|
mgsa/analyze_bam.py
|
1
|
2071
|
import argparse
import collections
import numpy
import sys
import bio
import config
parser = argparse.ArgumentParser(description='Analyze BAM')
parser.add_argument('bam', metavar='bam', help='bam file to analyze')
parser.add_argument('--buckets', metavar='buckets', type=int, default=10, help='number of buckets')
parser.add_argument('--max_sample', metavar='max_sample', type=int, default=-1, help='max number of samples in each group')
parser.add_argument('--skip', metavar='skip', type=int, default=0, help='skip the first reads')
args = parser.parse_args()
bam = bio.BamReaderExternal( config.BAM_TO_SAM, args.bam )
stats = bio.SamStats( bam, max_sample=args.max_sample, skip=skip )
# gc
buckets = numpy.linspace(0, 1, args.buckets + 1)
mapped_buckets = bio.bucket( filter( None, stats.mapped['gc'] ), buckets )
unmapped_buckets = bio.bucket( filter( None, stats.unmapped['gc'] ), buckets )
total_mapped = sum( mapped_buckets )
total_unmapped = sum( unmapped_buckets )
print '========== GC content =========='
print 'GC %%: %s' % '\t'.join( [ '%.2f' % bucket for bucket in buckets ] )
print 'mapped: %s' % '\t'.join( [ '%.1f' % ( 100. * x / total_mapped ) for x in mapped_buckets ] )
print 'unmapped: %s' % '\t'.join( [ '%.1f' % ( 100. * x / total_unmapped ) for x in unmapped_buckets ] )
# entropy
mapped_buckets = bio.bucket( stats.mapped['entropy'], buckets )
unmapped_buckets = bio.bucket( stats.unmapped['entropy'], buckets )
total_mapped = sum( mapped_buckets )
total_unmapped = sum( unmapped_buckets )
print '\n========== Entropy =========='
print 'Mapped: min: %.2f max: %.2f' % ( min( stats.mapped['entropy'] ), max( stats.mapped['entropy'] ) )
print 'Unmapped: min: %.2f max: %.2f' % ( min( stats.unmapped['entropy'] ), max( stats.unmapped['entropy'] ) )
print 'Entropy: %s' % '\t'.join( [ '%.2f' % bucket for bucket in buckets ] )
print 'mapped: %s' % '\t'.join( [ '%.1f' % ( 100. * x / total_mapped ) for x in mapped_buckets ] )
print 'unmapped: %s' % '\t'.join( [ '%.1f' % ( 100. * x / total_unmapped ) for x in unmapped_buckets ] )
|
mit
| 6,292,994,006,158,052,000
| 45.022222
| 123
| 0.649445
| false
| 3.059084
| false
| false
| false
|
minlexx/pyevemon
|
esi_client/models/get_characters_character_id_mail_labels_forbidden.py
|
1
|
3097
|
# coding: utf-8
"""
EVE Swagger Interface
An OpenAPI for EVE Online
OpenAPI spec version: 0.4.6
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from pprint import pformat
from six import iteritems
import re
class GetCharactersCharacterIdMailLabelsForbidden(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
def __init__(self, error=None):
"""
GetCharactersCharacterIdMailLabelsForbidden - a model defined in Swagger
:param dict swaggerTypes: The key is attribute name
and the value is attribute type.
:param dict attributeMap: The key is attribute name
and the value is json key in definition.
"""
self.swagger_types = {
'error': 'str'
}
self.attribute_map = {
'error': 'error'
}
self._error = error
@property
def error(self):
"""
Gets the error of this GetCharactersCharacterIdMailLabelsForbidden.
Forbidden message
:return: The error of this GetCharactersCharacterIdMailLabelsForbidden.
:rtype: str
"""
return self._error
@error.setter
def error(self, error):
"""
Sets the error of this GetCharactersCharacterIdMailLabelsForbidden.
Forbidden message
:param error: The error of this GetCharactersCharacterIdMailLabelsForbidden.
:type: str
"""
self._error = error
def to_dict(self):
"""
Returns the model properties as a dict
"""
result = {}
for attr, _ in iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""
Returns the string representation of the model
"""
return pformat(self.to_dict())
def __repr__(self):
"""
For `print` and `pprint`
"""
return self.to_str()
def __eq__(self, other):
"""
Returns true if both objects are equal
"""
if not isinstance(other, GetCharactersCharacterIdMailLabelsForbidden):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""
Returns true if both objects are not equal
"""
return not self == other
|
gpl-3.0
| -9,000,179,383,787,860,000
| 25.470085
| 84
| 0.539554
| false
| 4.636228
| false
| false
| false
|
dafrito/trac-mirror
|
trac/ticket/default_workflow.py
|
1
|
21721
|
# -*- coding: utf-8 -*-
#
# Copyright (C) 2006-2009 Edgewall Software
# Copyright (C) 2006 Alec Thomas
# Copyright (C) 2007 Eli Carter
# Copyright (C) 2007 Christian Boos <cboos@edgewall.org>
# All rights reserved.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution. The terms
# are also available at http://trac.edgewall.org/wiki/TracLicense.
#
# This software consists of voluntary contributions made by many
# individuals. For the exact contribution history, see the revision
# history and logs, available at http://trac.edgewall.org/log/.
#
# Author: Eli Carter
import pkg_resources
from ConfigParser import RawConfigParser
from StringIO import StringIO
from genshi.builder import tag
from trac.config import Configuration, ConfigSection
from trac.core import *
from trac.env import IEnvironmentSetupParticipant
from trac.perm import PermissionSystem
from trac.ticket.api import ITicketActionController, TicketSystem
from trac.ticket.model import Resolution
from trac.util.text import obfuscate_email_address
from trac.util.translation import _, tag_, cleandoc_
from trac.web.chrome import Chrome, add_script, add_script_data
from trac.wiki.macros import WikiMacroBase
# -- Utilities for the ConfigurableTicketWorkflow
def parse_workflow_config(rawactions):
"""Given a list of options from [ticket-workflow]"""
actions = {}
for option, value in rawactions:
parts = option.split('.')
action = parts[0]
if action not in actions:
actions[action] = {'oldstates': '', 'newstate': ''}
if len(parts) == 1:
# Base name, of the syntax: old,states,here -> newstate
try:
oldstates, newstate = [x.strip() for x in value.split('->')]
except ValueError:
continue # Syntax error, a warning will be logged later
actions[action]['newstate'] = newstate
actions[action]['oldstates'] = oldstates
else:
action, attribute = option.split('.')
actions[action][attribute] = value
# Fill in the defaults for every action, and normalize them to the desired
# types
def as_list(key):
value = attributes.get(key, '')
return [item for item in (x.strip() for x in value.split(',')) if item]
for action, attributes in actions.items():
# Default the 'name' attribute to the name used in the ini file
if 'name' not in attributes:
attributes['name'] = action
# If not specified, an action is not the default.
attributes['default'] = int(attributes.get('default', 0))
# If operations are not specified, that means no operations
attributes['operations'] = as_list('operations')
# If no permissions are specified, then no permissions are needed
attributes['permissions'] = as_list('permissions')
# Normalize the oldstates
attributes['oldstates'] = as_list('oldstates')
return actions
def get_workflow_config(config):
"""Usually passed self.config, this will return the parsed ticket-workflow
section.
"""
raw_actions = list(config.options('ticket-workflow'))
actions = parse_workflow_config(raw_actions)
return actions
def load_workflow_config_snippet(config, filename):
"""Loads the ticket-workflow section from the given file (expected to be in
the 'workflows' tree) into the provided config.
"""
filename = pkg_resources.resource_filename('trac.ticket',
'workflows/%s' % filename)
new_config = Configuration(filename)
for name, value in new_config.options('ticket-workflow'):
config.set('ticket-workflow', name, value)
class ConfigurableTicketWorkflow(Component):
"""Ticket action controller which provides actions according to a
workflow defined in trac.ini.
The workflow is idefined in the `[ticket-workflow]` section of the
[wiki:TracIni#ticket-workflow-section trac.ini] configuration file.
"""
ticket_workflow_section = ConfigSection('ticket-workflow',
"""The workflow for tickets is controlled by plugins. By default,
there's only a `ConfigurableTicketWorkflow` component in charge.
That component allows the workflow to be configured via this section
in the `trac.ini` file. See TracWorkflow for more details.
(''since 0.11'')""")
def __init__(self, *args, **kwargs):
self.actions = get_workflow_config(self.config)
if not '_reset' in self.actions:
# Special action that gets enabled if the current status no longer
# exists, as no other action can then change its state. (#5307)
self.actions['_reset'] = {
'default': 0,
'name': 'reset',
'newstate': 'new',
'oldstates': [], # Will not be invoked unless needed
'operations': ['reset_workflow'],
'permissions': []}
self.log.debug('Workflow actions at initialization: %s\n' %
str(self.actions))
for name, info in self.actions.iteritems():
if not info['newstate']:
self.log.warning("Ticket workflow action '%s' doesn't define "
"any transitions", name)
implements(ITicketActionController, IEnvironmentSetupParticipant)
# IEnvironmentSetupParticipant methods
def environment_created(self):
"""When an environment is created, we provide the basic-workflow,
unless a ticket-workflow section already exists.
"""
if not 'ticket-workflow' in self.config.sections():
load_workflow_config_snippet(self.config, 'basic-workflow.ini')
self.config.save()
self.actions = get_workflow_config(self.config)
def environment_needs_upgrade(self, db):
"""The environment needs an upgrade if there is no [ticket-workflow]
section in the config.
"""
return not list(self.config.options('ticket-workflow'))
def upgrade_environment(self, db):
"""Insert a [ticket-workflow] section using the original-workflow"""
load_workflow_config_snippet(self.config, 'original-workflow.ini')
self.config.save()
self.actions = get_workflow_config(self.config)
info_message = """
==== Upgrade Notice ====
The ticket Workflow is now configurable.
Your environment has been upgraded, but configured to use the original
workflow. It is recommended that you look at changing this configuration to use
basic-workflow.
Read TracWorkflow for more information (don't forget to 'wiki upgrade' as well)
"""
self.log.info(info_message.replace('\n', ' ').replace('==', ''))
print info_message
# ITicketActionController methods
def get_ticket_actions(self, req, ticket):
"""Returns a list of (weight, action) tuples that are valid for this
request and this ticket."""
# Get the list of actions that can be performed
# Determine the current status of this ticket. If this ticket is in
# the process of being modified, we need to base our information on the
# pre-modified state so that we don't try to do two (or more!) steps at
# once and get really confused.
status = ticket._old.get('status', ticket['status']) or 'new'
ticket_perm = req.perm(ticket.resource)
allowed_actions = []
for action_name, action_info in self.actions.items():
oldstates = action_info['oldstates']
if oldstates == ['*'] or status in oldstates:
# This action is valid in this state. Check permissions.
required_perms = action_info['permissions']
if self._is_action_allowed(ticket_perm, required_perms):
allowed_actions.append((action_info['default'],
action_name))
if not (status in ['new', 'closed'] or \
status in TicketSystem(self.env).get_all_status()) \
and 'TICKET_ADMIN' in ticket_perm:
# State no longer exists - add a 'reset' action if admin.
allowed_actions.append((0, '_reset'))
return allowed_actions
def _is_action_allowed(self, ticket_perm, required_perms):
if not required_perms:
return True
for permission in required_perms:
if permission in ticket_perm:
return True
return False
def get_all_status(self):
"""Return a list of all states described by the configuration.
"""
all_status = set()
for action_name, action_info in self.actions.items():
all_status.update(action_info['oldstates'])
all_status.add(action_info['newstate'])
all_status.discard('*')
all_status.discard('')
return all_status
def render_ticket_action_control(self, req, ticket, action):
self.log.debug('render_ticket_action_control: action "%s"' % action)
this_action = self.actions[action]
status = this_action['newstate']
operations = this_action['operations']
current_owner_or_empty = ticket._old.get('owner', ticket['owner'])
current_owner = current_owner_or_empty or '(none)'
if not (Chrome(self.env).show_email_addresses
or 'EMAIL_VIEW' in req.perm(ticket.resource)):
format_user = obfuscate_email_address
else:
format_user = lambda address: address
current_owner = format_user(current_owner)
control = [] # default to nothing
hints = []
if 'reset_workflow' in operations:
control.append(tag("from invalid state "))
hints.append(_("Current state no longer exists"))
if 'del_owner' in operations:
hints.append(_("The ticket will be disowned"))
if 'set_owner' in operations:
id = 'action_%s_reassign_owner' % action
selected_owner = req.args.get(id, req.authname)
if this_action.has_key('set_owner'):
owners = [x.strip() for x in
this_action['set_owner'].split(',')]
elif self.config.getbool('ticket', 'restrict_owner'):
perm = PermissionSystem(self.env)
owners = perm.get_users_with_permission('TICKET_MODIFY')
owners.sort()
else:
owners = None
if owners == None:
owner = req.args.get(id, req.authname)
control.append(tag_('to %(owner)s',
owner=tag.input(type='text', id=id,
name=id, value=owner)))
hints.append(_("The owner will be changed from "
"%(current_owner)s to the specified user",
current_owner=current_owner))
elif len(owners) == 1:
owner = tag.input(type='hidden', id=id, name=id,
value=owners[0])
formatted_owner = format_user(owners[0])
control.append(tag_('to %(owner)s ',
owner=tag(formatted_owner, owner)))
if ticket['owner'] != owners[0]:
hints.append(_("The owner will be changed from "
"%(current_owner)s to %(selected_owner)s",
current_owner=current_owner,
selected_owner=formatted_owner))
else:
control.append(tag_('to %(owner)s', owner=tag.select(
[tag.option(x, value=x,
selected=(x == selected_owner or None))
for x in owners],
id=id, name=id)))
hints.append(_("The owner will be changed from "
"%(current_owner)s to the selected user",
current_owner=current_owner))
elif 'set_owner_to_self' in operations and \
ticket._old.get('owner', ticket['owner']) != req.authname:
hints.append(_("The owner will be changed from %(current_owner)s "
"to %(authname)s", current_owner=current_owner,
authname=req.authname))
if 'set_resolution' in operations:
if this_action.has_key('set_resolution'):
resolutions = [x.strip() for x in
this_action['set_resolution'].split(',')]
else:
resolutions = [val.name for val in Resolution.select(self.env)]
if not resolutions:
raise TracError(_("Your workflow attempts to set a resolution "
"but none is defined (configuration issue, "
"please contact your Trac admin)."))
id = 'action_%s_resolve_resolution' % action
if len(resolutions) == 1:
resolution = tag.input(type='hidden', id=id, name=id,
value=resolutions[0])
control.append(tag_('as %(resolution)s',
resolution=tag(resolutions[0],
resolution)))
hints.append(_("The resolution will be set to %(name)s",
name=resolutions[0]))
else:
selected_option = req.args.get(id,
TicketSystem(self.env).default_resolution)
control.append(tag_('as %(resolution)s',
resolution=tag.select(
[tag.option(x, value=x,
selected=(x == selected_option or None))
for x in resolutions],
id=id, name=id)))
hints.append(_("The resolution will be set"))
if 'del_resolution' in operations:
hints.append(_("The resolution will be deleted"))
if 'leave_status' in operations:
control.append(_('as %(status)s ',
status= ticket._old.get('status',
ticket['status'])))
if len(operations) == 1:
hints.append(_("The owner will remain %(current_owner)s",
current_owner=current_owner)
if current_owner_or_empty else
_("The ticket will remain with no owner"))
else:
if status != '*':
hints.append(_("Next status will be '%(name)s'", name=status))
return (this_action['name'], tag(*control), '. '.join(hints) + '.'
if hints else '')
def get_ticket_changes(self, req, ticket, action):
this_action = self.actions[action]
# Enforce permissions
if not self._has_perms_for_action(req, this_action, ticket.resource):
# The user does not have any of the listed permissions, so we won't
# do anything.
return {}
updated = {}
# Status changes
status = this_action['newstate']
if status != '*':
updated['status'] = status
for operation in this_action['operations']:
if operation == 'reset_workflow':
updated['status'] = 'new'
elif operation == 'del_owner':
updated['owner'] = ''
elif operation == 'set_owner':
newowner = req.args.get('action_%s_reassign_owner' % action,
this_action.get('set_owner', '').strip())
# If there was already an owner, we get a list, [new, old],
# but if there wasn't we just get new.
if type(newowner) == list:
newowner = newowner[0]
updated['owner'] = newowner
elif operation == 'set_owner_to_self':
updated['owner'] = req.authname
elif operation == 'del_resolution':
updated['resolution'] = ''
elif operation == 'set_resolution':
newresolution = req.args.get('action_%s_resolve_resolution' % \
action,
this_action.get('set_resolution', '').strip())
updated['resolution'] = newresolution
# leave_status is just a no-op here, so we don't look for it.
return updated
def apply_action_side_effects(self, req, ticket, action):
pass
def _has_perms_for_action(self, req, action, resource):
required_perms = action['permissions']
if required_perms:
for permission in required_perms:
if permission in req.perm(resource):
break
else:
# The user does not have any of the listed permissions
return False
return True
# Public methods (for other ITicketActionControllers that want to use
# our config file and provide an operation for an action)
def get_actions_by_operation(self, operation):
"""Return a list of all actions with a given operation
(for use in the controller's get_all_status())
"""
actions = [(info['default'], action) for action, info
in self.actions.items()
if operation in info['operations']]
return actions
def get_actions_by_operation_for_req(self, req, ticket, operation):
"""Return list of all actions with a given operation that are valid
in the given state for the controller's get_ticket_actions().
If state='*' (the default), all actions with the given operation are
returned.
"""
# Be sure to look at the original status.
status = ticket._old.get('status', ticket['status'])
actions = [(info['default'], action) for action, info
in self.actions.items()
if operation in info['operations'] and
('*' in info['oldstates'] or
status in info['oldstates']) and
self._has_perms_for_action(req, info, ticket.resource)]
return actions
class WorkflowMacro(WikiMacroBase):
_domain = 'messages'
_description = cleandoc_(
"""Render a workflow graph.
This macro accepts a TracWorkflow configuration and renders the states
and transitions as a directed graph. If no parameters are given, the
current ticket workflow is rendered. In WikiProcessors mode the `width`
and `height` arguments can be specified.
(Defaults: `width = 800` and `heigth = 600`)
Examples:
{{{
[[Workflow()]]
[[Workflow(go = here -> there; return = there -> here)]]
{{{
#!Workflow width=700 height=700
leave = * -> *
leave.operations = leave_status
leave.default = 1
accept = new,assigned,accepted,reopened -> accepted
accept.permissions = TICKET_MODIFY
accept.operations = set_owner_to_self
resolve = new,assigned,accepted,reopened -> closed
resolve.permissions = TICKET_MODIFY
resolve.operations = set_resolution
reassign = new,assigned,accepted,reopened -> assigned
reassign.permissions = TICKET_MODIFY
reassign.operations = set_owner
reopen = closed -> reopened
reopen.permissions = TICKET_CREATE
reopen.operations = del_resolution
}}}
}}}
""")
def expand_macro(self, formatter, name, text, args):
if not text:
raw_actions = self.config.options('ticket-workflow')
else:
if args is None:
text = '\n'.join([line.lstrip() for line in text.split(';')])
if not '[ticket-workflow]' in text:
text = '[ticket-workflow]\n' + text
parser = RawConfigParser()
parser.readfp(StringIO(text))
raw_actions = list(parser.items('ticket-workflow'))
actions = parse_workflow_config(raw_actions)
states = list(set(
[state for action in actions.itervalues()
for state in action['oldstates']] +
[action['newstate'] for action in actions.itervalues()]))
action_names = actions.keys()
edges = []
for name, action in actions.items():
new_index = states.index(action['newstate'])
name_index = action_names.index(name)
for old_state in action['oldstates']:
old_index = states.index(old_state)
edges.append((old_index, new_index, name_index))
args = args or {}
graph = {'nodes': states, 'actions': action_names, 'edges': edges,
'width': args.get('width', 800),
'height': args.get('height', 600)}
graph_id = '%012x' % id(graph)
req = formatter.req
add_script(req, 'common/js/excanvas.js', ie_if='IE')
add_script(req, 'common/js/workflow_graph.js')
add_script_data(req, {'graph_%s' % graph_id: graph})
return tag.div(_("Enable JavaScript to display the workflow graph."),
class_='trac-workflow-graph system-message',
id='trac-workflow-graph-%s' % graph_id)
|
bsd-3-clause
| -5,058,617,368,730,896,000
| 42.268924
| 79
| 0.569173
| false
| 4.506432
| true
| false
| false
|
cs207-project/TimeSeries
|
procs/_corr.py
|
1
|
4794
|
import numpy.fft as nfft
import numpy as np
import timeseries as ts
from scipy.stats import norm
# import pyfftw
import sys
#sys.path.append("/Users/yuhantang/CS207/TimeSeries/procs")
from .interface import *
def createfromlist(l):
d = new_darray(len(l))
for i in range(0,len(l)):
darray_set(d,i,l[i])
return d
def tsmaker(m, s, j):
meta={}
meta['order'] = int(np.random.choice([-5, -4, -3, -2, -1, 0, 1, 2, 3, 4, 5]))
meta['blarg'] = int(np.random.choice([1, 2]))
t = np.arange(0.0, 1.0, 0.01)
v = norm.pdf(t, m, s) + j*np.random.randn(100)
return meta, ts.TimeSeries(t, v)
def random_ts(a):
t = np.arange(0.0, 1.0, 0.01)
v = a*np.random.random(100)
return ts.TimeSeries(t, v)
def stand(x, m, s):
return (x-m)/s
def ccor(ts1, ts2):
"given two standardized time series, compute their cross-correlation using FFT"
# Get the next 2 th power 110 -> 128
next_2 = int(2**np.ceil(np.log(len(ts1.values()))))
#
ts1_value = ts1.values()
ts2_value = ts2.values()
ts1_container,ts2_container = [],[]
ts1_zero_container = [0]*len(ts1.values())
ts2_zero_container = [0]*len(ts2.values())
ts1_c_array,ts2_c_array = [None]*(len(ts1.values())*2),[None]*(len(ts2.values())*2)
ts1_c_array[::2] = ts1_value
ts1_c_array[1::2] = ts1_zero_container
ts2_c_array[::2] = ts2_value
ts2_c_array[1::2] = ts2_zero_container
for i in range(len(ts1_c_array)+1,next_2*2):
ts1_c_array.append(np.double(0))
for i in range(len(ts2_c_array)+1,next_2*2):
ts2_c_array.append(np.double(0))
ts1_c_array.insert(0,0)
ts2_c_array.insert(0,0)
ts1_c_array = createfromlist(np.double(ts1_c_array))
ts2_c_array = createfromlist(np.double(ts2_c_array))
four1(ts1_c_array,next_2,1)
four1(ts2_c_array,next_2,1)
for i in range(len(ts2.values())*2+1):
ts1_container.append(darray_get(ts1_c_array,i))
for j in range(len(ts1.values())*2+1):
ts2_container.append(darray_get(ts2_c_array,j))
ts1_fft = np.asarray(ts1_container[1::2]) + 1j * np.asarray(ts1_container[2::2])
ts2_fft = np.asarray(ts2_container[1::2]) + 1j * np.asarray(ts2_container[2::2])
ts1_fft = ts1_fft[:len(ts1)+1]
ts2_fft = ts2_fft[:len(ts2)+1]
# ifft part
ts1_ts2_conj = ts1_fft * np.conj(ts2_fft)
ts1_ts2_ifft_container = [0]*len(ts1_ts2_conj)*2
ts1_ts2_ifft_container[::2] = ts1_ts2_conj.real
ts1_ts2_ifft_container[1::2] = ts1_ts2_conj.imag
for i in range(len(ts1_ts2_conj)+1, next_2 *2):
ts1_ts2_ifft_container.append(0)
ts1_ts2_ifft_container.insert(0,0)
ts1_ts2_ifft_container = createfromlist(ts1_ts2_ifft_container)
four1(ts1_ts2_ifft_container, next_2, -1)
ts1_ts2_ifft_container_python = []
for i in range(len(ts1_ts2_conj)*2+1):
ts1_ts2_ifft_container_python.append(darray_get(ts1_ts2_ifft_container,i))
ccor_value = np.asarray(ts1_ts2_ifft_container_python[1::2])
return 1/len(ts1) * ccor_value
def max_corr_at_phase(ts1, ts2):
ccorts = ccor(ts1, ts2)
idx = np.argmax(ccorts)
maxcorr = ccorts[idx]
return idx, maxcorr
#The equation for the kernelized cross correlation is given at
#http://www.cs.tufts.edu/~roni/PUB/ecml09-tskernels.pdf
#normalize the kernel there by np.sqrt(K(x,x)K(y,y)) so that the correlation
#of a time series with itself is 1.
def kernel_corr(ts1, ts2, mult=1):
"compute a kernelized correlation so that we can get a real distance"
#your code here.
cross_correlation = ccor(ts1, ts2) * mult
corr_ts1, corr_ts2 = ccor(ts1, ts1) * mult, ccor(ts2, ts2) * mult
return np.sum(np.exp(cross_correlation))/np.sqrt(np.sum(np.exp(corr_ts1))*np.sum(np.exp(corr_ts2)))
#this is for a quick and dirty test of these functions
#you might need to add procs to pythonpath for this to work
if __name__ == "__main__":
print("HI")
_, t1 = tsmaker(0.5, 0.1, 0.01)
_, t2 = tsmaker(0.5, 0.1, 0.01)
print(t1.mean(), t1.std(), t2.mean(), t2.std())
import matplotlib.pyplot as plt
plt.plot(t1)
plt.plot(t2)
plt.show()
standts1 = stand(t1, t1.mean(), t1.std())
standts2 = stand(t2, t2.mean(), t2.std())
#print(type(standts1),'this is the type=================*********')
#assert 1 == 2
idx, mcorr = max_corr_at_phase(standts1, standts2)
print(idx, mcorr)
sumcorr = kernel_corr(standts1, standts2, mult=10)
print(sumcorr)
t3 = random_ts(2)
t4 = random_ts(3)
plt.plot(t3)
plt.plot(t4)
plt.show()
standts3 = stand(t3, t3.mean(), t3.std())
standts4 = stand(t4, t4.mean(), t4.std())
idx, mcorr = max_corr_at_phase(standts3, standts4)
print(idx, mcorr)
sumcorr = kernel_corr(standts3, standts4, mult=10)
print(sumcorr)
|
mit
| 2,104,750,453,322,507,300
| 29.929032
| 103
| 0.623905
| false
| 2.495575
| false
| false
| false
|
antiface/ThinkBayes2
|
code/cookie3.py
|
1
|
1095
|
"""This file contains code for use with "Think Bayes",
by Allen B. Downey, available from greenteapress.com
Copyright 2014 Allen B. Downey
License: GNU GPLv3 http://www.gnu.org/licenses/gpl.html
"""
from __future__ import print_function, division
import thinkbayes2
class Cookie(thinkbayes2.Suite):
"""A map from string bowl ID to probablity."""
def Likelihood(self, data, hypo):
"""The likelihood of the data under the hypothesis.
data: string cookie type
hypo: string bowl ID
"""
like = hypo[data] / hypo.Total()
if like:
hypo[data] -= 1
return like
def main():
bowl1 = thinkbayes2.Hist(dict(vanilla=30, chocolate=10))
bowl2 = thinkbayes2.Hist(dict(vanilla=20, chocolate=20))
pmf = Cookie([bowl1, bowl2])
print('After 1 vanilla')
pmf.Update('vanilla')
for hypo, prob in pmf.Items():
print(hypo, prob)
print('\nAfter 1 vanilla, 1 chocolate')
pmf.Update('chocolate')
for hypo, prob in pmf.Items():
print(hypo, prob)
if __name__ == '__main__':
main()
|
gpl-2.0
| 7,652,526,682,298,288,000
| 23.333333
| 60
| 0.628311
| false
| 3.220588
| false
| false
| false
|
dvro/scikit-protopy
|
protopy/base.py
|
1
|
4528
|
"""Base and mixin classes for instance reduction techniques"""
# Author: Dayvid Victor <dvro@cin.ufpe.br>
# License: BSD Style
import warnings
from abc import ABCMeta, abstractmethod
from sklearn.base import BaseEstimator, ClassifierMixin
from sklearn.neighbors.classification import KNeighborsClassifier
from sklearn.utils import check_array
from sklearn.externals import six
class InstanceReductionWarning(UserWarning):
pass
# Make sure that NeighborsWarning are displayed more than once
warnings.simplefilter("always", InstanceReductionWarning)
class InstanceReductionBase(six.with_metaclass(ABCMeta, BaseEstimator)):
"""Base class for instance reduction estimators."""
@abstractmethod
def __init__(self):
pass
class InstanceReductionMixin(InstanceReductionBase, ClassifierMixin):
"""Mixin class for all instance reduction techniques"""
def set_classifier(self):
"""Sets the classified to be used in the instance reduction process
and classification.
Parameters
----------
classifier : classifier, following the KNeighborsClassifier style
(default = KNN)
y : array-like, shape = [n_samples]
Labels for X.
Returns
-------
P : array-like, shape = [indeterminated, n_features]
Resulting training set.
q : array-like, shape = [indertaminated]
Labels for P
"""
self.classifier = classifier
def reduce_data(self, X, y):
"""Perform the instance reduction procedure on the given training data.
Parameters
----------
X : array-like, shape = [n_samples, n_features]
Training set.0
y : array-like, shape = [n_samples]
Labels for X.
Returns
-------
X_ : array-like, shape = [indeterminated, n_features]
Resulting training set.
y_ : array-like, shape = [indertaminated]
Labels for X_
"""
pass
def fit(self, X, y, reduce_data=True):
"""
Fit the InstanceReduction model according to the given training data.
Parameters
----------
X : {array-like, sparse matrix}, shape = [n_samples, n_features]
Training vector, where n_samples in the number of samples and
n_features is the number of features.
Note that centroid shrinking cannot be used with sparse matrices.
y : array, shape = [n_samples]
Target values (integers)
reduce_data : bool, flag indicating if the reduction would be performed
"""
self.X = X
self.y = y
if reduce_data:
self.reduce_data(X, y)
return self
def predict(self, X, n_neighbors=1):
"""Perform classification on an array of test vectors X.
The predicted class C for each sample in X is returned.
Parameters
----------
X : array-like, shape = [n_samples, n_features]
Returns
-------
C : array, shape = [n_samples]
Notes
-----
The default prediction is using KNeighborsClassifier, if the
instance reducition algorithm is to be performed with another
classifier, it should be explicited overwritten and explained
in the documentation.
"""
X = check_array(X)
if not hasattr(self, "X_") or self.X_ is None:
raise AttributeError("Model has not been trained yet.")
if not hasattr(self, "y_") or self.y_ is None:
raise AttributeError("Model has not been trained yet.")
if self.classifier == None:
self.classifier = KNeighborsClassifier(n_neighbors=n_neighbors)
self.classifier.fit(self.X_, self.y_)
return self.classifier.predict(X)
def predict_proba(self, X):
"""Return probability estimates for the test data X.
after a given prototype selection algorithm.
Parameters
----------
X : array, shape = (n_samples, n_features)
A 2-D array representing the test points.
Returns
-------
p : array of shape = [n_samples, n_classes], or a list of n_outputs
of such arrays if n_outputs > 1.
The class probabilities of the input samples. Classes are ordered
by lexicographic order.
"""
self.classifier.fit(self.X_, self.y_)
return self.classifier.predict_proba(X)
|
bsd-2-clause
| -6,141,545,886,733,990,000
| 28.38961
| 79
| 0.607601
| false
| 4.548744
| false
| false
| false
|
vguzmanp/cloud-in-one
|
main_crypto.py
|
1
|
1392
|
#!/usr/bin/env python3
import getpass
import argparse
import shutil
from core.databaseManager import DatabaseManager
from core.securityModule import SecurityModule
def processFile(file_in_name, file_out_name, encrypt_flag):
user = input("CLOUD-IN-ONE Username: ")
password = getpass.getpass()
databaseManager = DatabaseManager(':memory:')
sec = SecurityModule(databaseManager, user, password)
file_processed = None
with open(file_in_name, 'rb') as f_in:
if encrypt_flag:
file_processed = sec.encrypt(f_in)
else:
file_processed = sec.decrypt(f_in)
with open(file_out_name, 'wb') as f_out:
file_processed.seek(0)
shutil.copyfileobj(file_processed, f_out)
file_processed.close()
def main():
parser = argparse.ArgumentParser()
group = parser.add_mutually_exclusive_group(required=True)
group.add_argument("-d", "--decrypt", action="store_true")
group.add_argument("-e", "--encrypt", action="store_true")
parser.add_argument("file", help="the file to encrypt / decrypt")
parser.add_argument("file_output", help="name of the destination file")
args = parser.parse_args()
encrypt_flag = args.encrypt
if not encrypt_flag:
encrypt_flag = not args.decrypt
processFile(args.file, args.file_output, encrypt_flag)
if __name__ == '__main__':
main()
|
mit
| 120,298,394,218,790,240
| 28
| 75
| 0.666667
| false
| 3.712
| false
| false
| false
|
PaesslerAG/django-performance-testing
|
settings.py
|
1
|
1267
|
# Django settings for autodata project.
DEBUG = True
TEMPLATE_DEBUG = DEBUG
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
# Insert your TEMPLATE_CONTEXT_PROCESSORS here or use this
# list if you haven't customized them:
'django.contrib.auth.context_processors.auth',
'django.template.context_processors.debug',
'django.template.context_processors.i18n',
'django.template.context_processors.media',
'django.template.context_processors.static',
'django.template.context_processors.tz',
'django.contrib.messages.context_processors.messages',
],
},
},
]
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
}
}
# Make this unique, and don't share it with anybody.
SECRET_KEY = 'mq%31q+sjj^)m^tvy(klwqw6ksv7du2yzdf9-django_performance_testing'
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django_performance_testing',
'testapp',
)
STATIC_URL = '/static/'
ROOT_URLCONF = None
|
bsd-3-clause
| 3,754,765,840,320,147,000
| 27.155556
| 78
| 0.598264
| false
| 3.874618
| false
| false
| false
|
owlabs/incubator-airflow
|
airflow/models/taskreschedule.py
|
1
|
3374
|
# -*- coding: utf-8 -*-
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""TaskReschedule tracks rescheduled task instances."""
from sqlalchemy import Column, ForeignKeyConstraint, Index, Integer, String, asc
from airflow.models.base import Base, ID_LEN
from airflow.utils.db import provide_session
from airflow.utils.sqlalchemy import UtcDateTime
class TaskReschedule(Base):
"""
TaskReschedule tracks rescheduled task instances.
"""
__tablename__ = "task_reschedule"
id = Column(Integer, primary_key=True)
task_id = Column(String(ID_LEN), nullable=False)
dag_id = Column(String(ID_LEN), nullable=False)
execution_date = Column(UtcDateTime, nullable=False)
try_number = Column(Integer, nullable=False)
start_date = Column(UtcDateTime, nullable=False)
end_date = Column(UtcDateTime, nullable=False)
duration = Column(Integer, nullable=False)
reschedule_date = Column(UtcDateTime, nullable=False)
__table_args__ = (
Index('idx_task_reschedule_dag_task_date', dag_id, task_id, execution_date,
unique=False),
ForeignKeyConstraint([task_id, dag_id, execution_date],
['task_instance.task_id', 'task_instance.dag_id',
'task_instance.execution_date'],
name='task_reschedule_dag_task_date_fkey',
ondelete='CASCADE')
)
def __init__(self, task, execution_date, try_number, start_date, end_date,
reschedule_date):
self.dag_id = task.dag_id
self.task_id = task.task_id
self.execution_date = execution_date
self.try_number = try_number
self.start_date = start_date
self.end_date = end_date
self.reschedule_date = reschedule_date
self.duration = (self.end_date - self.start_date).total_seconds()
@staticmethod
@provide_session
def find_for_task_instance(task_instance, session):
"""
Returns all task reschedules for the task instance and try number,
in ascending order.
:param task_instance: the task instance to find task reschedules for
:type task_instance: airflow.models.TaskInstance
"""
TR = TaskReschedule
return (
session
.query(TR)
.filter(TR.dag_id == task_instance.dag_id,
TR.task_id == task_instance.task_id,
TR.execution_date == task_instance.execution_date,
TR.try_number == task_instance.try_number)
.order_by(asc(TR.id))
.all()
)
|
apache-2.0
| -3,710,862,593,287,448,000
| 38.694118
| 83
| 0.648785
| false
| 4.026253
| false
| false
| false
|
cykerway/wmwm
|
setup.py
|
1
|
7315
|
#!/usr/bin/env python3
'''
setuptools based setup module;
see <https://packaging.python.org/en/latest/distributing.html>;
'''
from os import path
from setuptools import find_packages
from setuptools import setup
here = path.abspath(path.dirname(__file__))
## get long description from readme file;
with open(path.join(here, 'README.md'), encoding='utf-8') as f:
long_description = f.read()
setup(
## ========================================================================
## required for pypi upload;
## ========================================================================
## project name;
##
## this determines how users install this project:
##
## pip install sampleproject
##
## and where this project lives on pypi:
##
## <https://pypi.org/project/sampleproject/>
##
## this name is registered for you the first time you publish this package;
##
## name specification:
##
## <https://packaging.python.org/specifications/core-metadata/#name>
##
name='awd',
## project version;
##
## version specification (pep 440):
##
## <https://www.python.org/dev/peps/pep-0440/>;
##
## single-sourcing techniques:
##
## <https://packaging.python.org/en/latest/single_source_version.html>
##
version='1.3.4',
## project homepage;
##
## this arg corresponds to "home-page" metadata field:
##
## <https://packaging.python.org/specifications/core-metadata/#home-page-optional>
##
url='https://github.com/cykerway/awd',
## author name;
author='Cyker Way',
## author email address;
author_email='cykerway@example.com',
## packages;
##
## you can provide a list of packages manually or use `find_packages()`;
##
packages=find_packages(exclude=['contrib', 'docs', 'tests']),
## ========================================================================
## optional for pypi upload;
## ========================================================================
## a one-line description;
##
## this arg corresponds to "summary" metadata field:
##
## <https://packaging.python.org/specifications/core-metadata/#summary>
##
description='a window director;',
## a longer description shown on project homepage on pypi;
##
## this is often the same as the readme;
##
## this arg corresponds to "description" metadata field:
##
## <https://packaging.python.org/specifications/core-metadata/#description-optional>
##
long_description=long_description,
## longer description content type;
##
## valid values are: `text/plain`, `text/x-rst`, `text/markdown`;
##
## this arg corresponds to "description-content-type" metadata field:
##
## <https://packaging.python.org/specifications/core-metadata/#description-content-type-optional>
##
long_description_content_type='text/markdown',
## classifiers categorizing this project;
##
## see <https://pypi.org/classifiers/>;
##
classifiers=[
## development status;
# 'Development Status :: 3 - Alpha',
'Development Status :: 4 - Beta',
# 'Development Status :: 5 - Production/Stable',
## intended audience;
# 'Intended Audience :: Developers',
'Intended Audience :: End Users/Desktop',
## topic;
'Topic :: Desktop Environment',
# 'Topic :: Games/Entertainment',
# 'Topic :: Multimedia',
# 'Topic :: Office/Business',
# 'Topic :: Scientific/Engineering',
# 'Topic :: Software Development',
# 'Topic :: System',
## license;
'License :: OSI Approved :: GNU General Public License v3 (GPLv3)',
# 'License :: OSI Approved :: BSD License',
# 'License :: OSI Approved :: MIT License',
## supported python versions;
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
],
## project keywords;
##
## these keywords will appear on the project page;
##
keywords='window layout',
## package data;
##
## this is a dict mapping package names to a list of relative path names
## (or glob patterns) that should be copied into the package when
## installed; the path names are interpreted relative to the package dir;
##
package_data={
# 'sample': ['*.bin'],
},
## additional data files;
##
## this is a sequence of `(dir, files)` pairs; each `(dir, files)` pair
## specifies the install dir and the files to install there; if `dir` is a
## relative path, it is relative to the install prefix (`sys.prefix` or
## `sys.exec_prefix`); each file in `files` is interpreted relative to the
## `setup.py` script;
##
## see <https://docs.python.org/3/distutils/setupscript.html#installing-additional-files>;
##
data_files=[
# ('data_files', ['data/data0.bin', 'data/data1.bin']),
],
## package dependencies;
##
## this is a list of packages that this project depends on; these packages
## will be installed by pip when this project is installed;
##
install_requires=[
'argparse-ext',
'ewmh-ext',
'logging-ext',
'python-xlib',
],
## extra package dependencies;
##
## this is a dict mapping extras (optional features of this project) to a
## list of packages that those extras depend on;
##
## users will be able to install these using the extras syntax:
##
## pip install sampleproject[dev]
##
## see <https://setuptools.readthedocs.io/en/latest/setuptools.html#declaring-extras-optional-features-with-their-own-dependencies>
##
extras_require={
# 'dev': ['check-manifest'],
# 'test': ['coverage'],
},
## to create executable scripts, use entry points:
##
## <https://setuptools.readthedocs.io/en/latest/setuptools.html#automatic-script-creation>
##
## for example, the following would provide a console script `sample-cli`
## which executes the `main` function in package `sample.cli`, and a gui
## script `sample-gui` which executes the `main` function in package
## `sample.gui`;
entry_points={
'console_scripts': [
'awd=awd.__main__:main',
],
# 'gui_scripts': [
# 'sample-gui=sample.gui:main',
# ],
},
## additional urls that are relevant to this project;
##
## examples include: where the package tracks issues, where the source is
## hosted, where to say thanks to the package maintainers, and where to
## support the project financially; the keys are used to render the link
## texts on pypi;
##
## this arg corresponds to "project-url" metadata fields:
##
## <https://packaging.python.org/specifications/core-metadata/#project-url-multiple-use>
##
project_urls={
'Bug Reports': 'https://github.com/cykerway/awd/issues',
# 'Funding': 'https://donate.pypi.org',
# 'Say Thanks!': 'http://saythanks.io/to/example',
'Source': 'https://github.com/cykerway/awd/',
},
)
|
gpl-3.0
| -8,959,277,930,186,001,000
| 30.530172
| 136
| 0.579357
| false
| 3.930682
| true
| false
| false
|
nicko96/Chrome-Infra
|
glyco/glucose/install.py
|
1
|
8024
|
# Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import hashlib
import httplib2
import logging
import os
import sys
import urllib
from glucose import util
LOGGER = logging.getLogger(__name__)
DEFAULT_CACHE = os.path.join(os.path.expanduser('~'), '.glyco_wheelcache')
def get_sha1_from_filename(filename, verbose=True):
"""Extract the claimed sha1 from the filename.
Also verify the name matches the wheel convention.
Args:
filename (str): path to a local file.
verbose (bool): print messages only if True.
Returns: claimed_hash(str) or None if no hash can be found.
"""
basename = os.path.split(filename)[-1]
wheel_info = util.WHEEL_FILE_RE.match(basename)
if not wheel_info:
if verbose:
print >> sys.stderr, 'Invalid file name for wheel: %s' % basename
return None
if not wheel_info.group('build'):
if verbose:
print >> sys.stderr, ('No hash could be found in the filename.\n'
'Has this file been generated with Glyco?\n'
'%s' % basename)
return None
return wheel_info.group('build').split('_')[1]
def has_valid_sha1(filename, verbose=True):
"""Verify the hash of a whl file created by Glyco.
Args:
filename (str): path to a whl file.
verbose(bool): print messages only if True.
Returns:
matches (bool): true if the file content and the name match.
"""
claimed_sha = get_sha1_from_filename(filename, verbose=verbose)
if not claimed_sha:
return False
with open(filename, 'rb') as f:
digest = hashlib.sha1(f.read())
actual_sha = digest.hexdigest()
return actual_sha == claimed_sha
def get_install_list(packages):
"""Consolidate the list of things to install.
Args:
packages (list of str): local paths or https/gs URLs.
"""
install_list = []
for package in packages:
location = package
location_type = 'ERROR'
error = None
# Let's support only https. Security matters.
if package.startswith('http://'):
error = 'Non-secure http is not supported, please use https: %s' % package
elif package.startswith('https://'):
location_type = 'http'
elif package.startswith('gs://'):
# TODO(pgervais): handle Cloud Storage properly.
location_type = 'http'
location = 'https://storage.googleapis.com/' + package[len('gs://'):]
elif os.path.isfile(package):
location = 'file://%s' % urllib.pathname2url(os.path.abspath(package))
location_type = 'file'
else:
error = ('Cannot find this file locally: %s\n'
'If you did not specify a file but an URI, '
'then the protocol is probably not supported.'
% os.path.abspath(package))
install_list.append({'location': location,
'location_type': location_type,
'error': error})
return install_list
def fetch_packages(install_list, requester=httplib2.Http(),
cache=DEFAULT_CACHE, verbose=True):
"""Make sure there is a local copy of all packages.
All paths returned by this function point at existing wheel files, with
correct hashes.
Args:
install_list (list of dict): return value of get_install_list.
requester (httplib2.Http): object to use to send http requests.
cache (str): path to a local directory used to store wheel files downloaded
from a remote storage.
verbose(bool): print messages only if True.
Returns:
paths (list of strings): path to each local wheel file.
"""
if not os.path.isdir(cache):
os.mkdir(cache)
paths = []
all_valid = True
for source in install_list:
if source['location_type'] == 'file':
assert source['location'].startswith('file://')
filename = source['location'][len('file://'):]
# FIXME(pgervais): convert to a windows path (/ -> \) and unquote.
if not has_valid_sha1(filename, verbose=verbose):
if verbose:
print >> sys.stderr, ("File content does not match hash for %s"
% filename)
all_valid = False
else:
paths.append(filename)
elif source['location_type'] == 'http':
# This is an URL so the path separator is necessarily /
base_filename = source['location'].split('/')[-1]
filename = os.path.join(cache, base_filename)
if not os.path.exists(filename):
# Try to download file to local cache
resp, content = requester.request(source['location'], 'GET')
if resp['status'] == '200':
temp_filename = os.path.join(cache, base_filename + '.tmp')
try:
with open(temp_filename, 'wb') as f:
f.write(content)
os.rename(temp_filename, filename)
except OSError:
if os.path.isfile(temp_filename):
os.remove(temp_filename)
else:
if verbose:
print >> sys.stderr, ("Got status %s when talking to %s" %
(resp['status'], source['location']))
all_valid = False
# We have to test again for existence since the download
# could have failed.
if os.path.exists(filename) and not has_valid_sha1(filename,
verbose=verbose):
if verbose:
print >> sys.stderr, ("File content does not match hash for %s"
% filename)
all_valid = False
# The file is bad anyway, there's no point in keeping it around.
# Plus we probably want to retry the download some time in the future.
os.remove(filename)
else:
paths.append(filename)
if not all_valid:
raise ValueError('Some errors occurred when getting wheel files.')
return paths
def install(args):
"""Install wheel files"""
if not args.packages:
print 'No packages have been provided on the command-line, doing nothing.'
return
if not args.install_dir:
print >> sys.stderr, ('No destination directory specified, aborting. \n'
'Use the --install-dir option to specify it')
return 2
install_list = get_install_list(args.packages)
error_msgs = [d['error'] for d in install_list if 'error' in d and d['error']]
if error_msgs:
print >> sys.stderr, ('\n'.join(error_msgs))
print >> sys.stderr, 'Aborting (no packages installed)'
return 1
try:
package_paths = fetch_packages(install_list)
except ValueError:
print >> sys.stderr, 'Aborting (no packages installed)'
return 1
if not os.path.isdir(args.install_dir):
os.mkdir(args.install_dir)
with util.Virtualenv() as venv:
cmd = (['pip', 'install', '--no-index', '--target', args.install_dir]
+ package_paths)
LOGGER.debug('Running %s', ' '.join(cmd))
venv.check_call(cmd)
def add_subparser(subparsers):
"""Add the 'install' command.
Also add the 'lysis' command as a synonym (and pun).
Args:
subparsers: output of argparse.ArgumentParser.add_subparsers()
"""
install_parser = subparsers.add_parser('install',
help='Install wheel files to a local '
'directory (synonym of lysis)')
install_parser.set_defaults(command=install)
# Add synonym just for the pun
lysis_parser = subparsers.add_parser('lysis',
help='Install wheel files to a local '
'directory (synonym of install)')
lysis_parser.set_defaults(command=install)
for parser in (install_parser, lysis_parser):
parser.add_argument('--install-dir', '-i',
help='Directory where to install packages')
parser.add_argument('packages', metavar='PACKAGE', nargs='*',
help='Wheel files to install (path)')
|
bsd-3-clause
| 3,545,463,092,387,777,500
| 32.157025
| 80
| 0.61154
| false
| 4.056623
| false
| false
| false
|
laumann/servo
|
components/script/dom/bindings/codegen/CodegenRust.py
|
1
|
243619
|
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
# Common codegen classes.
from collections import defaultdict
import operator
import re
import string
import textwrap
import functools
from WebIDL import (
BuiltinTypes,
IDLBuiltinType,
IDLNullValue,
IDLType,
IDLInterfaceMember,
IDLUndefinedValue,
)
from Configuration import (
MemberIsUnforgeable,
getModuleFromObject,
getTypesFromCallback,
getTypesFromDescriptor,
getTypesFromDictionary,
)
AUTOGENERATED_WARNING_COMMENT = \
"/* THIS FILE IS AUTOGENERATED - DO NOT EDIT */\n\n"
FINALIZE_HOOK_NAME = '_finalize'
TRACE_HOOK_NAME = '_trace'
CONSTRUCT_HOOK_NAME = '_constructor'
HASINSTANCE_HOOK_NAME = '_hasInstance'
def replaceFileIfChanged(filename, newContents):
"""
Read a copy of the old file, so that we don't touch it if it hasn't changed.
Returns True if the file was updated, false otherwise.
"""
# XXXjdm This doesn't play well with make right now.
# Force the file to always be updated, or else changing CodegenRust.py
# will cause many autogenerated bindings to be regenerated perpetually
# until the result is actually different.
# oldFileContents = ""
# try:
# with open(filename, 'rb') as oldFile:
# oldFileContents = ''.join(oldFile.readlines())
# except:
# pass
# if newContents == oldFileContents:
# return False
with open(filename, 'wb') as f:
f.write(newContents)
return True
def toStringBool(arg):
return str(not not arg).lower()
def toBindingNamespace(arg):
return re.sub("((_workers)?$)", "Binding\\1", arg)
def stripTrailingWhitespace(text):
tail = '\n' if text.endswith('\n') else ''
lines = text.splitlines()
for i in range(len(lines)):
lines[i] = lines[i].rstrip()
return '\n'.join(lines) + tail
def MakeNativeName(name):
return name[0].upper() + name[1:]
builtinNames = {
IDLType.Tags.bool: 'bool',
IDLType.Tags.int8: 'i8',
IDLType.Tags.int16: 'i16',
IDLType.Tags.int32: 'i32',
IDLType.Tags.int64: 'i64',
IDLType.Tags.uint8: 'u8',
IDLType.Tags.uint16: 'u16',
IDLType.Tags.uint32: 'u32',
IDLType.Tags.uint64: 'u64',
IDLType.Tags.unrestricted_float: 'f32',
IDLType.Tags.float: 'Finite<f32>',
IDLType.Tags.unrestricted_double: 'f64',
IDLType.Tags.double: 'Finite<f64>'
}
numericTags = [
IDLType.Tags.int8, IDLType.Tags.uint8,
IDLType.Tags.int16, IDLType.Tags.uint16,
IDLType.Tags.int32, IDLType.Tags.uint32,
IDLType.Tags.int64, IDLType.Tags.uint64,
IDLType.Tags.unrestricted_float,
IDLType.Tags.unrestricted_double
]
def unwrapCastableObject(descriptor, source, codeOnFailure, conversionFunction):
"""
A function for unwrapping an object named by the "source" argument
based on the passed-in descriptor. Returns the string of the Rust expression of
the appropriate type.
codeOnFailure is the code to run if unwrapping fails.
"""
args = {
"failureCode": CGIndenter(CGGeneric(codeOnFailure), 8).define(),
"function": conversionFunction,
"source": source,
}
return """\
match %(function)s(%(source)s) {
Ok(val) => val,
Err(()) => {
%(failureCode)s
}
}""" % args
# We'll want to insert the indent at the beginnings of lines, but we
# don't want to indent empty lines. So only indent lines that have a
# non-newline character on them.
lineStartDetector = re.compile("^(?=[^\n#])", re.MULTILINE)
def indent(s, indentLevel=2):
"""
Indent C++ code.
Weird secret feature: this doesn't indent lines that start with # (such as
#include lines or #ifdef/#endif).
"""
if s == "":
return s
return re.sub(lineStartDetector, indentLevel * " ", s)
# dedent() and fill() are often called on the same string multiple
# times. We want to memoize their return values so we don't keep
# recomputing them all the time.
def memoize(fn):
"""
Decorator to memoize a function of one argument. The cache just
grows without bound.
"""
cache = {}
@functools.wraps(fn)
def wrapper(arg):
retval = cache.get(arg)
if retval is None:
retval = cache[arg] = fn(arg)
return retval
return wrapper
@memoize
def dedent(s):
"""
Remove all leading whitespace from s, and remove a blank line
at the beginning.
"""
if s.startswith('\n'):
s = s[1:]
return textwrap.dedent(s)
# This works by transforming the fill()-template to an equivalent
# string.Template.
fill_multiline_substitution_re = re.compile(r"( *)\$\*{(\w+)}(\n)?")
@memoize
def compile_fill_template(template):
"""
Helper function for fill(). Given the template string passed to fill(),
do the reusable part of template processing and return a pair (t,
argModList) that can be used every time fill() is called with that
template argument.
argsModList is list of tuples that represent modifications to be
made to args. Each modification has, in order: i) the arg name,
ii) the modified name, iii) the indent depth.
"""
t = dedent(template)
assert t.endswith("\n") or "\n" not in t
argModList = []
def replace(match):
"""
Replaces a line like ' $*{xyz}\n' with '${xyz_n}',
where n is the indent depth, and add a corresponding entry to
argModList.
Note that this needs to close over argModList, so it has to be
defined inside compile_fill_template().
"""
indentation, name, nl = match.groups()
depth = len(indentation)
# Check that $*{xyz} appears by itself on a line.
prev = match.string[:match.start()]
if (prev and not prev.endswith("\n")) or nl is None:
raise ValueError("Invalid fill() template: $*{%s} must appear by itself on a line" % name)
# Now replace this whole line of template with the indented equivalent.
modified_name = name + "_" + str(depth)
argModList.append((name, modified_name, depth))
return "${" + modified_name + "}"
t = re.sub(fill_multiline_substitution_re, replace, t)
return (string.Template(t), argModList)
def fill(template, **args):
"""
Convenience function for filling in a multiline template.
`fill(template, name1=v1, name2=v2)` is a lot like
`string.Template(template).substitute({"name1": v1, "name2": v2})`.
However, it's shorter, and has a few nice features:
* If `template` is indented, fill() automatically dedents it!
This makes code using fill() with Python's multiline strings
much nicer to look at.
* If `template` starts with a blank line, fill() strips it off.
(Again, convenient with multiline strings.)
* fill() recognizes a special kind of substitution
of the form `$*{name}`.
Use this to paste in, and automatically indent, multiple lines.
(Mnemonic: The `*` is for "multiple lines").
A `$*` substitution must appear by itself on a line, with optional
preceding indentation (spaces only). The whole line is replaced by the
corresponding keyword argument, indented appropriately. If the
argument is an empty string, no output is generated, not even a blank
line.
"""
t, argModList = compile_fill_template(template)
# Now apply argModList to args
for (name, modified_name, depth) in argModList:
if not (args[name] == "" or args[name].endswith("\n")):
raise ValueError("Argument %s with value %r is missing a newline" % (name, args[name]))
args[modified_name] = indent(args[name], depth)
return t.substitute(args)
class CGThing():
"""
Abstract base class for things that spit out code.
"""
def __init__(self):
pass # Nothing for now
def define(self):
"""Produce code for a Rust file."""
raise NotImplementedError # Override me!
class CGMethodCall(CGThing):
"""
A class to generate selection of a method signature from a set of
signatures and generation of a call to that signature.
"""
def __init__(self, argsPre, nativeMethodName, static, descriptor, method):
CGThing.__init__(self)
methodName = '\\"%s.%s\\"' % (descriptor.interface.identifier.name, method.identifier.name)
def requiredArgCount(signature):
arguments = signature[1]
if len(arguments) == 0:
return 0
requiredArgs = len(arguments)
while requiredArgs and arguments[requiredArgs - 1].optional:
requiredArgs -= 1
return requiredArgs
signatures = method.signatures()
def getPerSignatureCall(signature, argConversionStartsAt=0):
signatureIndex = signatures.index(signature)
return CGPerSignatureCall(signature[0], argsPre, signature[1],
nativeMethodName + '_' * signatureIndex,
static, descriptor,
method, argConversionStartsAt)
if len(signatures) == 1:
# Special case: we can just do a per-signature method call
# here for our one signature and not worry about switching
# on anything.
signature = signatures[0]
self.cgRoot = CGList([getPerSignatureCall(signature)])
requiredArgs = requiredArgCount(signature)
if requiredArgs > 0:
code = (
"if argc < %d {\n"
" throw_type_error(cx, \"Not enough arguments to %s.\");\n"
" return false;\n"
"}" % (requiredArgs, methodName))
self.cgRoot.prepend(
CGWrapper(CGGeneric(code), pre="\n", post="\n"))
return
# Need to find the right overload
maxArgCount = method.maxArgCount
allowedArgCounts = method.allowedArgCounts
argCountCases = []
for argCount in allowedArgCounts:
possibleSignatures = method.signaturesForArgCount(argCount)
if len(possibleSignatures) == 1:
# easy case!
signature = possibleSignatures[0]
argCountCases.append(CGCase(str(argCount), getPerSignatureCall(signature)))
continue
distinguishingIndex = method.distinguishingIndexForArgCount(argCount)
# We can't handle unions at the distinguishing index.
for (returnType, args) in possibleSignatures:
if args[distinguishingIndex].type.isUnion():
raise TypeError("No support for unions as distinguishing "
"arguments yet: %s",
args[distinguishingIndex].location)
# Convert all our arguments up to the distinguishing index.
# Doesn't matter which of the possible signatures we use, since
# they all have the same types up to that point; just use
# possibleSignatures[0]
caseBody = [
CGArgumentConverter(possibleSignatures[0][1][i],
i, "args", "argc", descriptor)
for i in range(0, distinguishingIndex)]
# Select the right overload from our set.
distinguishingArg = "args.get(%d)" % distinguishingIndex
def pickFirstSignature(condition, filterLambda):
sigs = filter(filterLambda, possibleSignatures)
assert len(sigs) < 2
if len(sigs) > 0:
call = getPerSignatureCall(sigs[0], distinguishingIndex)
if condition is None:
caseBody.append(call)
else:
caseBody.append(CGGeneric("if " + condition + " {"))
caseBody.append(CGIndenter(call))
caseBody.append(CGGeneric("}"))
return True
return False
# First check for null or undefined
pickFirstSignature("%s.isNullOrUndefined()" % distinguishingArg,
lambda s: (s[1][distinguishingIndex].type.nullable() or
s[1][distinguishingIndex].type.isDictionary()))
# Now check for distinguishingArg being an object that implements a
# non-callback interface. That includes typed arrays and
# arraybuffers.
interfacesSigs = [
s for s in possibleSignatures
if (s[1][distinguishingIndex].type.isObject() or
s[1][distinguishingIndex].type.isNonCallbackInterface())]
# There might be more than one of these; we need to check
# which ones we unwrap to.
if len(interfacesSigs) > 0:
# The spec says that we should check for "platform objects
# implementing an interface", but it's enough to guard on these
# being an object. The code for unwrapping non-callback
# interfaces and typed arrays will just bail out and move on to
# the next overload if the object fails to unwrap correctly. We
# could even not do the isObject() check up front here, but in
# cases where we have multiple object overloads it makes sense
# to do it only once instead of for each overload. That will
# also allow the unwrapping test to skip having to do codegen
# for the null-or-undefined case, which we already handled
# above.
caseBody.append(CGGeneric("if %s.get().is_object() {" %
(distinguishingArg)))
for idx, sig in enumerate(interfacesSigs):
caseBody.append(CGIndenter(CGGeneric("loop {")))
type = sig[1][distinguishingIndex].type
# The argument at index distinguishingIndex can't possibly
# be unset here, because we've already checked that argc is
# large enough that we can examine this argument.
info = getJSToNativeConversionInfo(
type, descriptor, failureCode="break;", isDefinitelyObject=True)
template = info.template
declType = info.declType
testCode = instantiateJSToNativeConversionTemplate(
template,
{"val": distinguishingArg},
declType,
"arg%d" % distinguishingIndex)
# Indent by 4, since we need to indent further than our "do" statement
caseBody.append(CGIndenter(testCode, 4))
# If we got this far, we know we unwrapped to the right
# interface, so just do the call. Start conversion with
# distinguishingIndex + 1, since we already converted
# distinguishingIndex.
caseBody.append(CGIndenter(
getPerSignatureCall(sig, distinguishingIndex + 1), 4))
caseBody.append(CGIndenter(CGGeneric("}")))
caseBody.append(CGGeneric("}"))
# XXXbz Now we're supposed to check for distinguishingArg being
# an array or a platform object that supports indexed
# properties... skip that last for now. It's a bit of a pain.
pickFirstSignature("%s.get().isObject() && IsArrayLike(cx, &%s.get().toObject())" %
(distinguishingArg, distinguishingArg),
lambda s:
(s[1][distinguishingIndex].type.isArray() or
s[1][distinguishingIndex].type.isSequence() or
s[1][distinguishingIndex].type.isObject()))
# Check for Date objects
# XXXbz Do we need to worry about security wrappers around the Date?
pickFirstSignature("%s.get().isObject() && JS_ObjectIsDate(cx, &%s.get().toObject())" %
(distinguishingArg, distinguishingArg),
lambda s: (s[1][distinguishingIndex].type.isDate() or
s[1][distinguishingIndex].type.isObject()))
# Check for vanilla JS objects
# XXXbz Do we need to worry about security wrappers?
pickFirstSignature("%s.get().is_object() && !is_platform_object(%s.get().to_object())" %
(distinguishingArg, distinguishingArg),
lambda s: (s[1][distinguishingIndex].type.isCallback() or
s[1][distinguishingIndex].type.isCallbackInterface() or
s[1][distinguishingIndex].type.isDictionary() or
s[1][distinguishingIndex].type.isObject()))
# The remaining cases are mutually exclusive. The
# pickFirstSignature calls are what change caseBody
# Check for strings or enums
if pickFirstSignature(None,
lambda s: (s[1][distinguishingIndex].type.isString() or
s[1][distinguishingIndex].type.isEnum())):
pass
# Check for primitives
elif pickFirstSignature(None,
lambda s: s[1][distinguishingIndex].type.isPrimitive()):
pass
# Check for "any"
elif pickFirstSignature(None,
lambda s: s[1][distinguishingIndex].type.isAny()):
pass
else:
# Just throw; we have no idea what we're supposed to
# do with this.
caseBody.append(CGGeneric("return Throw(cx, NS_ERROR_XPC_BAD_CONVERT_JS);"))
argCountCases.append(CGCase(str(argCount),
CGList(caseBody, "\n")))
overloadCGThings = []
overloadCGThings.append(
CGGeneric("let argcount = cmp::min(argc, %d);" %
maxArgCount))
overloadCGThings.append(
CGSwitch("argcount",
argCountCases,
CGGeneric("throw_type_error(cx, \"Not enough arguments to %s.\");\n"
"return false;" % methodName)))
# XXXjdm Avoid unreachable statement warnings
# overloadCGThings.append(
# CGGeneric('panic!("We have an always-returning default case");\n'
# 'return false;'))
self.cgRoot = CGWrapper(CGList(overloadCGThings, "\n"),
pre="\n")
def define(self):
return self.cgRoot.define()
def dictionaryHasSequenceMember(dictionary):
return (any(typeIsSequenceOrHasSequenceMember(m.type) for m in
dictionary.members) or
(dictionary.parent and
dictionaryHasSequenceMember(dictionary.parent)))
def typeIsSequenceOrHasSequenceMember(type):
if type.nullable():
type = type.inner
if type.isSequence():
return True
if type.isArray():
elementType = type.inner
return typeIsSequenceOrHasSequenceMember(elementType)
if type.isDictionary():
return dictionaryHasSequenceMember(type.inner)
if type.isUnion():
return any(typeIsSequenceOrHasSequenceMember(m.type) for m in
type.flatMemberTypes)
return False
def typeNeedsRooting(type, descriptorProvider):
return (type.isGeckoInterface() and
descriptorProvider.getDescriptor(type.unroll().inner.identifier.name).needsRooting)
def union_native_type(t):
name = t.unroll().name
return 'UnionTypes::%s' % name
class JSToNativeConversionInfo():
"""
An object representing information about a JS-to-native conversion.
"""
def __init__(self, template, default=None, declType=None,
needsRooting=False):
"""
template: A string representing the conversion code. This will have
template substitution performed on it as follows:
${val} is a handle to the JS::Value in question
default: A string or None representing rust code for default value(if any).
declType: A CGThing representing the native C++ type we're converting
to. This is allowed to be None if the conversion code is
supposed to be used as-is.
needsRooting: A boolean indicating whether the caller has to root
the result
"""
assert isinstance(template, str)
assert declType is None or isinstance(declType, CGThing)
self.template = template
self.default = default
self.declType = declType
self.needsRooting = needsRooting
def getJSToNativeConversionInfo(type, descriptorProvider, failureCode=None,
isDefinitelyObject=False,
isMember=False,
isArgument=False,
invalidEnumValueFatal=True,
defaultValue=None,
treatNullAs="Default",
isEnforceRange=False,
isClamp=False,
exceptionCode=None,
allowTreatNonObjectAsNull=False,
isCallbackReturnValue=False,
sourceDescription="value"):
"""
Get a template for converting a JS value to a native object based on the
given type and descriptor. If failureCode is given, then we're actually
testing whether we can convert the argument to the desired type. That
means that failures to convert due to the JS value being the wrong type of
value need to use failureCode instead of throwing exceptions. Failures to
convert that are due to JS exceptions (from toString or valueOf methods) or
out of memory conditions need to throw exceptions no matter what
failureCode is.
If isDefinitelyObject is True, that means we know the value
isObject() and we have no need to recheck that.
if isMember is True, we're being converted from a property of some
JS object, not from an actual method argument, so we can't rely on
our jsval being rooted or outliving us in any way. Any caller
passing true needs to ensure that it is handled correctly in
typeIsSequenceOrHasSequenceMember.
invalidEnumValueFatal controls whether an invalid enum value conversion
attempt will throw (if true) or simply return without doing anything (if
false).
If defaultValue is not None, it's the IDL default value for this conversion
If isEnforceRange is true, we're converting an integer and throwing if the
value is out of range.
If isClamp is true, we're converting an integer and clamping if the
value is out of range.
If allowTreatNonObjectAsNull is true, then [TreatNonObjectAsNull]
extended attributes on nullable callback functions will be honored.
The return value from this function is an object of JSToNativeConversionInfo consisting of four things:
1) A string representing the conversion code. This will have template
substitution performed on it as follows:
${val} replaced by an expression for the JS::Value in question
2) A string or None representing Rust code for the default value (if any).
3) A CGThing representing the native C++ type we're converting to
(declType). This is allowed to be None if the conversion code is
supposed to be used as-is.
4) A boolean indicating whether the caller has to root the result.
"""
# We should not have a defaultValue if we know we're an object
assert not isDefinitelyObject or defaultValue is None
# If exceptionCode is not set, we'll just rethrow the exception we got.
# Note that we can't just set failureCode to exceptionCode, because setting
# failureCode will prevent pending exceptions from being set in cases when
# they really should be!
if exceptionCode is None:
exceptionCode = "return false;"
needsRooting = typeNeedsRooting(type, descriptorProvider)
def handleOptional(template, declType, default):
assert (defaultValue is None) == (default is None)
return JSToNativeConversionInfo(template, default, declType, needsRooting=needsRooting)
# Unfortunately, .capitalize() on a string will lowercase things inside the
# string, which we do not want.
def firstCap(string):
return string[0].upper() + string[1:]
# Helper functions for dealing with failures due to the JS value being the
# wrong type of value.
def onFailureNotAnObject(failureCode):
return CGWrapper(
CGGeneric(
failureCode or
('throw_type_error(cx, "%s is not an object.");\n'
'%s' % (firstCap(sourceDescription), exceptionCode))),
post="\n")
def onFailureNotCallable(failureCode):
return CGWrapper(
CGGeneric(
failureCode or
('throw_type_error(cx, \"%s is not callable.\");\n'
'%s' % (firstCap(sourceDescription), exceptionCode))))
# A helper function for handling null default values. Checks that the
# default value, if it exists, is null.
def handleDefaultNull(nullValue):
if defaultValue is None:
return None
if not isinstance(defaultValue, IDLNullValue):
raise TypeError("Can't handle non-null default value here")
assert type.nullable() or type.isDictionary()
return nullValue
# A helper function for wrapping up the template body for
# possibly-nullable objecty stuff
def wrapObjectTemplate(templateBody, nullValue, isDefinitelyObject, type,
failureCode=None):
if not isDefinitelyObject:
# Handle the non-object cases by wrapping up the whole
# thing in an if cascade.
templateBody = (
"if ${val}.get().is_object() {\n" +
CGIndenter(CGGeneric(templateBody)).define() + "\n")
if type.nullable():
templateBody += (
"} else if ${val}.get().is_null_or_undefined() {\n"
" %s\n") % nullValue
templateBody += (
"} else {\n" +
CGIndenter(onFailureNotAnObject(failureCode)).define() +
"}")
return templateBody
assert not (isEnforceRange and isClamp) # These are mutually exclusive
if type.isArray():
raise TypeError("Can't handle array arguments yet")
if type.isSequence():
# Use the same type that for return values
declType = getRetvalDeclarationForType(type, descriptorProvider)
config = getConversionConfigForType(type, isEnforceRange, isClamp, treatNullAs)
templateBody = ("match FromJSValConvertible::from_jsval(cx, ${val}, %s) {\n"
" Ok(value) => value,\n"
" Err(()) => { %s },\n"
"}" % (config, exceptionCode))
return handleOptional(templateBody, declType, handleDefaultNull("None"))
if type.isUnion():
declType = CGGeneric(union_native_type(type))
if type.nullable():
declType = CGWrapper(declType, pre="Option<", post=" >")
templateBody = ("match FromJSValConvertible::from_jsval(cx, ${val}, ()) {\n"
" Ok(value) => value,\n"
" Err(()) => { %s },\n"
"}" % exceptionCode)
return handleOptional(templateBody, declType, handleDefaultNull("None"))
if type.isGeckoInterface():
assert not isEnforceRange and not isClamp
descriptor = descriptorProvider.getDescriptor(
type.unroll().inner.identifier.name)
if descriptor.interface.isCallback():
name = descriptor.nativeType
declType = CGWrapper(CGGeneric(name), pre="Rc<", post=">")
template = "%s::new(${val}.get().to_object())" % name
if type.nullable():
declType = CGWrapper(declType, pre="Option<", post=">")
template = wrapObjectTemplate("Some(%s)" % template, "None",
isDefinitelyObject, type,
failureCode)
return handleOptional(template, declType, handleDefaultNull("None"))
conversionFunction = "root_from_handlevalue"
descriptorType = descriptor.returnType
if isMember == "Variadic":
conversionFunction = "native_from_handlevalue"
descriptorType = descriptor.nativeType
elif isArgument:
descriptorType = descriptor.argumentType
templateBody = ""
if descriptor.interface.isConsequential():
raise TypeError("Consequential interface %s being used as an "
"argument" % descriptor.interface.identifier.name)
if failureCode is None:
substitutions = {
"sourceDescription": sourceDescription,
"interface": descriptor.interface.identifier.name,
"exceptionCode": exceptionCode,
}
unwrapFailureCode = string.Template(
'throw_type_error(cx, "${sourceDescription} does not '
'implement interface ${interface}.");\n'
'${exceptionCode}').substitute(substitutions)
else:
unwrapFailureCode = failureCode
templateBody = unwrapCastableObject(
descriptor, "${val}", unwrapFailureCode, conversionFunction)
declType = CGGeneric(descriptorType)
if type.nullable():
templateBody = "Some(%s)" % templateBody
declType = CGWrapper(declType, pre="Option<", post=">")
templateBody = wrapObjectTemplate(templateBody, "None",
isDefinitelyObject, type, failureCode)
return handleOptional(templateBody, declType, handleDefaultNull("None"))
if type.isSpiderMonkeyInterface():
raise TypeError("Can't handle SpiderMonkey interface arguments yet")
if type.isDOMString():
nullBehavior = getConversionConfigForType(type, isEnforceRange, isClamp, treatNullAs)
conversionCode = (
"match FromJSValConvertible::from_jsval(cx, ${val}, %s) {\n"
" Ok(strval) => strval,\n"
" Err(_) => { %s },\n"
"}" % (nullBehavior, exceptionCode))
if defaultValue is None:
default = None
elif isinstance(defaultValue, IDLNullValue):
assert type.nullable()
default = "None"
else:
assert defaultValue.type.tag() == IDLType.Tags.domstring
default = 'DOMString::from("%s")' % defaultValue.value
if type.nullable():
default = "Some(%s)" % default
declType = "DOMString"
if type.nullable():
declType = "Option<%s>" % declType
return handleOptional(conversionCode, CGGeneric(declType), default)
if type.isUSVString():
assert not isEnforceRange and not isClamp
conversionCode = (
"match FromJSValConvertible::from_jsval(cx, ${val}, ()) {\n"
" Ok(strval) => strval,\n"
" Err(_) => { %s },\n"
"}" % exceptionCode)
if defaultValue is None:
default = None
elif isinstance(defaultValue, IDLNullValue):
assert type.nullable()
default = "None"
else:
assert defaultValue.type.tag() in (IDLType.Tags.domstring, IDLType.Tags.usvstring)
default = 'USVString("%s".to_owned())' % defaultValue.value
if type.nullable():
default = "Some(%s)" % default
declType = "USVString"
if type.nullable():
declType = "Option<%s>" % declType
return handleOptional(conversionCode, CGGeneric(declType), default)
if type.isByteString():
assert not isEnforceRange and not isClamp
conversionCode = (
"match FromJSValConvertible::from_jsval(cx, ${val}, ()) {\n"
" Ok(strval) => strval,\n"
" Err(_) => { %s },\n"
"}" % exceptionCode)
declType = CGGeneric("ByteString")
if type.nullable():
declType = CGWrapper(declType, pre="Option<", post=">")
return handleOptional(conversionCode, declType, handleDefaultNull("None"))
if type.isEnum():
assert not isEnforceRange and not isClamp
if type.nullable():
raise TypeError("We don't support nullable enumerated arguments "
"yet")
enum = type.inner.identifier.name
if invalidEnumValueFatal:
handleInvalidEnumValueCode = exceptionCode
else:
handleInvalidEnumValueCode = "return true;"
template = (
"match find_enum_string_index(cx, ${val}, %(values)s) {\n"
" Err(_) => { %(exceptionCode)s },\n"
" Ok(None) => { %(handleInvalidEnumValueCode)s },\n"
" Ok(Some(index)) => {\n"
" //XXXjdm need some range checks up in here.\n"
" mem::transmute(index)\n"
" },\n"
"}" % {"values": enum + "Values::strings",
"exceptionCode": exceptionCode,
"handleInvalidEnumValueCode": handleInvalidEnumValueCode})
if defaultValue is not None:
assert defaultValue.type.tag() == IDLType.Tags.domstring
default = "%s::%s" % (enum, getEnumValueName(defaultValue.value))
else:
default = None
return handleOptional(template, CGGeneric(enum), default)
if type.isCallback():
assert not isEnforceRange and not isClamp
assert not type.treatNonCallableAsNull()
assert not type.treatNonObjectAsNull() or type.nullable()
assert not type.treatNonObjectAsNull() or not type.treatNonCallableAsNull()
callback = type.unroll().callback
declType = CGGeneric('%s::%s' % (getModuleFromObject(callback), callback.identifier.name))
finalDeclType = CGTemplatedType("Rc", declType)
conversion = CGCallbackTempRoot(declType.define())
if type.nullable():
declType = CGTemplatedType("Option", declType)
finalDeclType = CGTemplatedType("Option", finalDeclType)
conversion = CGWrapper(conversion, pre="Some(", post=")")
if allowTreatNonObjectAsNull and type.treatNonObjectAsNull():
if not isDefinitelyObject:
haveObject = "${val}.get().is_object()"
template = CGIfElseWrapper(haveObject,
conversion,
CGGeneric("None")).define()
else:
template = conversion
else:
template = CGIfElseWrapper("IsCallable(${val}.get().to_object())",
conversion,
onFailureNotCallable(failureCode)).define()
template = wrapObjectTemplate(
template,
"None",
isDefinitelyObject,
type,
failureCode)
if defaultValue is not None:
assert allowTreatNonObjectAsNull
assert type.treatNonObjectAsNull()
assert type.nullable()
assert isinstance(defaultValue, IDLNullValue)
default = "None"
else:
default = None
return JSToNativeConversionInfo(template, default, finalDeclType, needsRooting=needsRooting)
if type.isAny():
assert not isEnforceRange and not isClamp
declType = ""
default = ""
if isMember == "Dictionary":
# TODO: Need to properly root dictionaries
# https://github.com/servo/servo/issues/6381
declType = CGGeneric("JSVal")
if defaultValue is None:
default = None
elif isinstance(defaultValue, IDLNullValue):
default = "NullValue()"
elif isinstance(defaultValue, IDLUndefinedValue):
default = "UndefinedValue()"
else:
raise TypeError("Can't handle non-null, non-undefined default value here")
else:
declType = CGGeneric("HandleValue")
if defaultValue is None:
default = None
elif isinstance(defaultValue, IDLNullValue):
default = "HandleValue::null()"
elif isinstance(defaultValue, IDLUndefinedValue):
default = "HandleValue::undefined()"
else:
raise TypeError("Can't handle non-null, non-undefined default value here")
return handleOptional("${val}", declType, default)
if type.isObject():
assert not isEnforceRange and not isClamp
# TODO: Need to root somehow
# https://github.com/servo/servo/issues/6382
declType = CGGeneric("*mut JSObject")
templateBody = wrapObjectTemplate("${val}.get().to_object()",
"ptr::null_mut()",
isDefinitelyObject, type, failureCode)
return handleOptional(templateBody, declType,
handleDefaultNull("ptr::null_mut()"))
if type.isDictionary():
if failureCode is not None:
raise TypeError("Can't handle dictionaries when failureCode is not None")
# There are no nullable dictionaries
assert not type.nullable()
typeName = "%s::%s" % (CGDictionary.makeModuleName(type.inner),
CGDictionary.makeDictionaryName(type.inner))
declType = CGGeneric(typeName)
template = ("match %s::new(cx, ${val}) {\n"
" Ok(dictionary) => dictionary,\n"
" Err(_) => { %s },\n"
"}" % (typeName, exceptionCode))
return handleOptional(template, declType, handleDefaultNull("%s::empty(cx)" % typeName))
if type.isVoid():
# This one only happens for return values, and its easy: Just
# ignore the jsval.
return JSToNativeConversionInfo("", None, None, needsRooting=False)
if not type.isPrimitive():
raise TypeError("Need conversion for argument type '%s'" % str(type))
conversionBehavior = getConversionConfigForType(type, isEnforceRange, isClamp, treatNullAs)
if failureCode is None:
failureCode = 'return false'
declType = CGGeneric(builtinNames[type.tag()])
if type.nullable():
declType = CGWrapper(declType, pre="Option<", post=">")
template = (
"match FromJSValConvertible::from_jsval(cx, ${val}, %s) {\n"
" Ok(v) => v,\n"
" Err(_) => { %s }\n"
"}" % (conversionBehavior, exceptionCode))
if defaultValue is not None:
if isinstance(defaultValue, IDLNullValue):
assert type.nullable()
defaultStr = "None"
else:
tag = defaultValue.type.tag()
if tag in [IDLType.Tags.float, IDLType.Tags.double]:
defaultStr = "Finite::wrap(%s)" % defaultValue.value
elif tag in numericTags:
defaultStr = str(defaultValue.value)
else:
assert tag == IDLType.Tags.bool
defaultStr = toStringBool(defaultValue.value)
if type.nullable():
defaultStr = "Some(%s)" % defaultStr
else:
defaultStr = None
return handleOptional(template, declType, defaultStr)
def instantiateJSToNativeConversionTemplate(templateBody, replacements,
declType, declName):
"""
Take the templateBody and declType as returned by
getJSToNativeConversionInfo, a set of replacements as required by the
strings in such a templateBody, and a declName, and generate code to
convert into a stack Rust binding with that name.
"""
result = CGList([], "\n")
conversion = CGGeneric(string.Template(templateBody).substitute(replacements))
if declType is not None:
newDecl = [
CGGeneric("let "),
CGGeneric(declName),
CGGeneric(": "),
declType,
CGGeneric(" = "),
conversion,
CGGeneric(";"),
]
result.append(CGList(newDecl))
else:
result.append(conversion)
# Add an empty CGGeneric to get an extra newline after the argument
# conversion.
result.append(CGGeneric(""))
return result
def convertConstIDLValueToJSVal(value):
if isinstance(value, IDLNullValue):
return "NullVal"
tag = value.type.tag()
if tag in [IDLType.Tags.int8, IDLType.Tags.uint8, IDLType.Tags.int16,
IDLType.Tags.uint16, IDLType.Tags.int32]:
return "IntVal(%s)" % (value.value)
if tag == IDLType.Tags.uint32:
return "UintVal(%s)" % (value.value)
if tag in [IDLType.Tags.int64, IDLType.Tags.uint64]:
return "DoubleVal(%s)" % (value.value)
if tag == IDLType.Tags.bool:
return "BoolVal(true)" if value.value else "BoolVal(false)"
if tag in [IDLType.Tags.unrestricted_float, IDLType.Tags.float,
IDLType.Tags.unrestricted_double, IDLType.Tags.double]:
return "DoubleVal(%s)" % (value.value)
raise TypeError("Const value of unhandled type: " + value.type)
class CGArgumentConverter(CGThing):
"""
A class that takes an IDL argument object, its index in the
argument list, and the argv and argc strings and generates code to
unwrap the argument to the right native type.
"""
def __init__(self, argument, index, args, argc, descriptorProvider,
invalidEnumValueFatal=True):
CGThing.__init__(self)
assert not argument.defaultValue or argument.optional
replacer = {
"index": index,
"argc": argc,
"args": args
}
replacementVariables = {
"val": string.Template("${args}.get(${index})").substitute(replacer),
}
info = getJSToNativeConversionInfo(
argument.type,
descriptorProvider,
invalidEnumValueFatal=invalidEnumValueFatal,
defaultValue=argument.defaultValue,
treatNullAs=argument.treatNullAs,
isEnforceRange=argument.enforceRange,
isClamp=argument.clamp,
isMember="Variadic" if argument.variadic else False,
allowTreatNonObjectAsNull=argument.allowTreatNonCallableAsNull())
template = info.template
default = info.default
declType = info.declType
if not argument.variadic:
if argument.optional:
condition = "{args}.get({index}).is_undefined()".format(**replacer)
if argument.defaultValue:
assert default
template = CGIfElseWrapper(condition,
CGGeneric(default),
CGGeneric(template)).define()
else:
assert not default
declType = CGWrapper(declType, pre="Option<", post=">")
template = CGIfElseWrapper(condition,
CGGeneric("None"),
CGGeneric("Some(%s)" % template)).define()
else:
assert not default
self.converter = instantiateJSToNativeConversionTemplate(
template, replacementVariables, declType, "arg%d" % index)
else:
assert argument.optional
variadicConversion = {
"val": string.Template("${args}.get(variadicArg)").substitute(replacer),
}
innerConverter = [instantiateJSToNativeConversionTemplate(
template, variadicConversion, declType, "slot")]
arg = "arg%d" % index
if argument.type.isGeckoInterface():
vec = "RootedVec::new()"
innerConverter.append(CGGeneric("%s.push(JS::from_ref(&*slot));" % arg))
else:
vec = "vec![]"
innerConverter.append(CGGeneric("%s.push(slot);" % arg))
inner = CGIndenter(CGList(innerConverter, "\n"), 8).define()
self.converter = CGGeneric("""\
let mut %(arg)s = %(vec)s;
if %(argc)s > %(index)s {
%(arg)s.reserve(%(argc)s as usize - %(index)s);
for variadicArg in %(index)s..%(argc)s {
%(inner)s
}
}""" % {'arg': arg, 'argc': argc, 'index': index, 'inner': inner, 'vec': vec})
def define(self):
return self.converter.define()
def wrapForType(jsvalRef, result='result', successCode='return true;', pre=''):
"""
Reflect a Rust value into JS.
* 'jsvalRef': a MutableHandleValue in which to store the result
of the conversion;
* 'result': the name of the variable in which the Rust value is stored;
* 'successCode': the code to run once we have done the conversion.
* 'pre': code to run before the conversion if rooting is necessary
"""
wrap = "%s\n(%s).to_jsval(cx, %s);" % (pre, result, jsvalRef)
if successCode:
wrap += "\n%s" % successCode
return wrap
def typeNeedsCx(type, retVal=False):
if type is None:
return False
if type.nullable():
type = type.inner
if type.isSequence() or type.isArray():
type = type.inner
if type.isUnion():
return any(typeNeedsCx(t) for t in type.unroll().flatMemberTypes)
if retVal and type.isSpiderMonkeyInterface():
return True
return type.isAny() or type.isObject()
# Returns a conversion behavior suitable for a type
def getConversionConfigForType(type, isEnforceRange, isClamp, treatNullAs):
if type.isSequence():
return getConversionConfigForType(type.unroll(), isEnforceRange, isClamp, treatNullAs)
if type.isDOMString():
assert not isEnforceRange and not isClamp
treatAs = {
"Default": "StringificationBehavior::Default",
"EmptyString": "StringificationBehavior::Empty",
}
if treatNullAs not in treatAs:
raise TypeError("We don't support [TreatNullAs=%s]" % treatNullAs)
if type.nullable():
# Note: the actual behavior passed here doesn't matter for nullable
# strings.
return "StringificationBehavior::Default"
else:
return treatAs[treatNullAs]
if type.isInteger():
if isEnforceRange:
return "ConversionBehavior::EnforceRange"
elif isClamp:
return "ConversionBehavior::Clamp"
else:
return "ConversionBehavior::Default"
assert not isEnforceRange and not isClamp
return "()"
# Returns a CGThing containing the type of the return value.
def getRetvalDeclarationForType(returnType, descriptorProvider):
if returnType is None or returnType.isVoid():
# Nothing to declare
return CGGeneric("()")
if returnType.isPrimitive() and returnType.tag() in builtinNames:
result = CGGeneric(builtinNames[returnType.tag()])
if returnType.nullable():
result = CGWrapper(result, pre="Option<", post=">")
return result
if returnType.isDOMString():
result = CGGeneric("DOMString")
if returnType.nullable():
result = CGWrapper(result, pre="Option<", post=">")
return result
if returnType.isUSVString():
result = CGGeneric("USVString")
if returnType.nullable():
result = CGWrapper(result, pre="Option<", post=">")
return result
if returnType.isByteString():
result = CGGeneric("ByteString")
if returnType.nullable():
result = CGWrapper(result, pre="Option<", post=">")
return result
if returnType.isEnum():
result = CGGeneric(returnType.unroll().inner.identifier.name)
if returnType.nullable():
result = CGWrapper(result, pre="Option<", post=">")
return result
if returnType.isGeckoInterface():
descriptor = descriptorProvider.getDescriptor(
returnType.unroll().inner.identifier.name)
result = CGGeneric(descriptor.returnType)
if returnType.nullable():
result = CGWrapper(result, pre="Option<", post=">")
return result
if returnType.isCallback():
callback = returnType.unroll().callback
result = CGGeneric('Rc<%s::%s>' % (getModuleFromObject(callback), callback.identifier.name))
if returnType.nullable():
result = CGWrapper(result, pre="Option<", post=">")
return result
if returnType.isUnion():
result = CGGeneric(union_native_type(returnType))
if returnType.nullable():
result = CGWrapper(result, pre="Option<", post=">")
return result
# TODO: Return the value through a MutableHandleValue outparam
# https://github.com/servo/servo/issues/6307
if returnType.isAny():
return CGGeneric("JSVal")
if returnType.isObject() or returnType.isSpiderMonkeyInterface():
return CGGeneric("*mut JSObject")
if returnType.isSequence():
inner = returnType.unroll()
result = getRetvalDeclarationForType(inner, descriptorProvider)
result = CGWrapper(result, pre="Vec<", post=">")
if returnType.nullable():
result = CGWrapper(result, pre="Option<", post=">")
return result
if returnType.isDictionary():
nullable = returnType.nullable()
dictName = returnType.inner.name if nullable else returnType.name
result = CGGeneric(dictName)
if typeNeedsRooting(returnType, descriptorProvider):
raise TypeError("We don't support rootable dictionaries return values")
if nullable:
result = CGWrapper(result, pre="Option<", post=">")
return result
raise TypeError("Don't know how to declare return value for %s" %
returnType)
class PropertyDefiner:
"""
A common superclass for defining things on prototype objects.
Subclasses should implement generateArray to generate the actual arrays of
things we're defining. They should also set self.regular to the list of
things exposed to web pages.
"""
def __init__(self, descriptor, name):
self.descriptor = descriptor
self.name = name
def variableName(self):
return "s" + self.name
def length(self):
return len(self.regular)
def __str__(self):
# We only need to generate id arrays for things that will end
# up used via ResolveProperty or EnumerateProperties.
return self.generateArray(self.regular, self.variableName())
def generatePrefableArray(self, array, name, specTemplate, specTerminator,
specType, getDataTuple):
"""
This method generates our various arrays.
array is an array of interface members as passed to generateArray
name is the name as passed to generateArray
specTemplate is a template for each entry of the spec array
specTerminator is a terminator for the spec array (inserted at the end
of the array), or None
specType is the actual typename of our spec
getDataTuple is a callback function that takes an array entry and
returns a tuple suitable for substitution into specTemplate.
"""
assert len(array) != 0
specs = []
for member in array:
specs.append(specTemplate % getDataTuple(member))
if specTerminator:
specs.append(specTerminator)
return (("const %s: &'static [%s] = &[\n" +
",\n".join(specs) + "\n" +
"];\n") % (name, specType))
# The length of a method is the minimum of the lengths of the
# argument lists of all its overloads.
def methodLength(method):
signatures = method.signatures()
return min(
len([arg for arg in arguments if not arg.optional and not arg.variadic])
for (_, arguments) in signatures)
class MethodDefiner(PropertyDefiner):
"""
A class for defining methods on a prototype object.
"""
def __init__(self, descriptor, name, static, unforgeable):
assert not (static and unforgeable)
PropertyDefiner.__init__(self, descriptor, name)
# FIXME https://bugzilla.mozilla.org/show_bug.cgi?id=772822
# We should be able to check for special operations without an
# identifier. For now we check if the name starts with __
# Ignore non-static methods for callback interfaces
if not descriptor.interface.isCallback() or static:
methods = [m for m in descriptor.interface.members if
m.isMethod() and m.isStatic() == static and
not m.isIdentifierLess() and
MemberIsUnforgeable(m, descriptor) == unforgeable]
else:
methods = []
self.regular = [{"name": m.identifier.name,
"methodInfo": not m.isStatic(),
"length": methodLength(m)} for m in methods]
# FIXME Check for an existing iterator on the interface first.
if any(m.isGetter() and m.isIndexed() for m in methods):
self.regular.append({"name": '@@iterator',
"methodInfo": False,
"selfHostedName": "ArrayValues",
"length": 0})
isUnforgeableInterface = bool(descriptor.interface.getExtendedAttribute("Unforgeable"))
if not static and unforgeable == isUnforgeableInterface:
stringifier = descriptor.operations['Stringifier']
if stringifier:
self.regular.append({
"name": "toString",
"nativeName": stringifier.identifier.name,
"length": 0,
})
self.unforgeable = unforgeable
def generateArray(self, array, name):
if len(array) == 0:
return ""
flags = "JSPROP_ENUMERATE"
if self.unforgeable:
flags += " | JSPROP_PERMANENT | JSPROP_READONLY"
def specData(m):
# TODO: Use something like JS_FNSPEC
# https://github.com/servo/servo/issues/6391
if "selfHostedName" in m:
selfHostedName = '%s as *const u8 as *const libc::c_char' % str_to_const_array(m["selfHostedName"])
assert not m.get("methodInfo", True)
accessor = "None"
jitinfo = "0 as *const JSJitInfo"
else:
selfHostedName = "0 as *const libc::c_char"
if m.get("methodInfo", True):
identifier = m.get("nativeName", m["name"])
# Go through an intermediate type here, because it's not
# easy to tell whether the methodinfo is a JSJitInfo or
# a JSTypedMethodJitInfo here. The compiler knows, though,
# so let it do the work.
jitinfo = "&%s_methodinfo as *const _ as *const JSJitInfo" % identifier
accessor = "Some(generic_method)"
else:
jitinfo = "0 as *const JSJitInfo"
accessor = 'Some(%s)' % m.get("nativeName", m["name"])
if m["name"].startswith("@@"):
return ('(SymbolCode::%s as i32 + 1)'
% m["name"][2:], accessor, jitinfo, m["length"], flags, selfHostedName)
return (str_to_const_array(m["name"]), accessor, jitinfo, m["length"], flags, selfHostedName)
return self.generatePrefableArray(
array, name,
' JSFunctionSpec {\n'
' name: %s as *const u8 as *const libc::c_char,\n'
' call: JSNativeWrapper { op: %s, info: %s },\n'
' nargs: %s,\n'
' flags: (%s) as u16,\n'
' selfHostedName: %s\n'
' }',
' JSFunctionSpec {\n'
' name: 0 as *const libc::c_char,\n'
' call: JSNativeWrapper { op: None, info: 0 as *const JSJitInfo },\n'
' nargs: 0,\n'
' flags: 0,\n'
' selfHostedName: 0 as *const libc::c_char\n'
' }',
'JSFunctionSpec',
specData)
class AttrDefiner(PropertyDefiner):
def __init__(self, descriptor, name, static, unforgeable):
assert not (static and unforgeable)
PropertyDefiner.__init__(self, descriptor, name)
self.name = name
self.descriptor = descriptor
self.regular = [
m
for m in descriptor.interface.members if
m.isAttr() and m.isStatic() == static and
MemberIsUnforgeable(m, descriptor) == unforgeable
]
self.static = static
self.unforgeable = unforgeable
def generateArray(self, array, name):
if len(array) == 0:
return ""
flags = "JSPROP_ENUMERATE | JSPROP_SHARED"
if self.unforgeable:
flags += " | JSPROP_READONLY | JSPROP_PERMANENT"
def getter(attr):
if self.static:
accessor = 'get_' + self.descriptor.internalNameFor(attr.identifier.name)
jitinfo = "0 as *const JSJitInfo"
else:
if attr.hasLenientThis():
accessor = "generic_lenient_getter"
else:
accessor = "generic_getter"
jitinfo = "&%s_getterinfo" % self.descriptor.internalNameFor(attr.identifier.name)
return ("JSNativeWrapper { op: Some(%(native)s), info: %(info)s }"
% {"info": jitinfo,
"native": accessor})
def setter(attr):
if attr.readonly and not attr.getExtendedAttribute("PutForwards"):
return "JSNativeWrapper { op: None, info: 0 as *const JSJitInfo }"
if self.static:
accessor = 'set_' + self.descriptor.internalNameFor(attr.identifier.name)
jitinfo = "0 as *const JSJitInfo"
else:
if attr.hasLenientThis():
accessor = "generic_lenient_setter"
else:
accessor = "generic_setter"
jitinfo = "&%s_setterinfo" % self.descriptor.internalNameFor(attr.identifier.name)
return ("JSNativeWrapper { op: Some(%(native)s), info: %(info)s }"
% {"info": jitinfo,
"native": accessor})
def specData(attr):
return (str_to_const_array(attr.identifier.name), flags, getter(attr),
setter(attr))
return self.generatePrefableArray(
array, name,
' JSPropertySpec {\n'
' name: %s as *const u8 as *const libc::c_char,\n'
' flags: ((%s) & 0xFF) as u8,\n'
' getter: %s,\n'
' setter: %s\n'
' }',
' JSPropertySpec {\n'
' name: 0 as *const libc::c_char,\n'
' flags: 0,\n'
' getter: JSNativeWrapper { op: None, info: 0 as *const JSJitInfo },\n'
' setter: JSNativeWrapper { op: None, info: 0 as *const JSJitInfo }\n'
' }',
'JSPropertySpec',
specData)
class ConstDefiner(PropertyDefiner):
"""
A class for definining constants on the interface object
"""
def __init__(self, descriptor, name):
PropertyDefiner.__init__(self, descriptor, name)
self.name = name
self.regular = [m for m in descriptor.interface.members if m.isConst()]
def generateArray(self, array, name):
if len(array) == 0:
return ""
def specData(const):
return (str_to_const_array(const.identifier.name),
convertConstIDLValueToJSVal(const.value))
return self.generatePrefableArray(
array, name,
' ConstantSpec { name: %s, value: %s }',
None,
'ConstantSpec',
specData)
# We'll want to insert the indent at the beginnings of lines, but we
# don't want to indent empty lines. So only indent lines that have a
# non-newline character on them.
lineStartDetector = re.compile("^(?=[^\n])", re.MULTILINE)
class CGIndenter(CGThing):
"""
A class that takes another CGThing and generates code that indents that
CGThing by some number of spaces. The default indent is two spaces.
"""
def __init__(self, child, indentLevel=4):
CGThing.__init__(self)
self.child = child
self.indent = " " * indentLevel
def define(self):
defn = self.child.define()
if defn != "":
return re.sub(lineStartDetector, self.indent, defn)
else:
return defn
class CGWrapper(CGThing):
"""
Generic CGThing that wraps other CGThings with pre and post text.
"""
def __init__(self, child, pre="", post="", reindent=False):
CGThing.__init__(self)
self.child = child
self.pre = pre
self.post = post
self.reindent = reindent
def define(self):
defn = self.child.define()
if self.reindent:
# We don't use lineStartDetector because we don't want to
# insert whitespace at the beginning of our _first_ line.
defn = stripTrailingWhitespace(
defn.replace("\n", "\n" + (" " * len(self.pre))))
return self.pre + defn + self.post
class CGImports(CGWrapper):
"""
Generates the appropriate import/use statements.
"""
def __init__(self, child, descriptors, callbacks, imports, ignored_warnings=None):
"""
Adds a set of imports.
"""
if ignored_warnings is None:
ignored_warnings = [
'non_camel_case_types',
'non_upper_case_globals',
'unused_imports',
'unused_variables',
'unused_assignments',
]
def componentTypes(type):
if type.nullable():
type = type.unroll()
if type.isUnion():
return type.flatMemberTypes
return [type]
def isImportable(type):
if not type.isType():
assert type.isInterface()
return not type.isCallback()
return type.isNonCallbackInterface() and not type.builtin
def relatedTypesForSignatures(method):
types = []
for (returnType, arguments) in method.signatures():
types += componentTypes(returnType)
for arg in arguments:
types += componentTypes(arg.type)
return types
def getIdentifier(t):
if t.isType():
return t.inner.identifier
assert t.isInterface()
return t.identifier
types = []
for d in descriptors:
types += [d.interface]
members = d.interface.members + d.interface.namedConstructors
constructor = d.interface.ctor()
if constructor:
members += [constructor]
if d.proxy:
members += [o for o in d.operations.values() if o]
for m in members:
if m.isMethod():
types += relatedTypesForSignatures(m)
elif m.isAttr():
types += componentTypes(m.type)
for c in callbacks:
types += relatedTypesForSignatures(c)
imports += ['dom::types::%s' % getIdentifier(t).name for t in types if isImportable(t)]
statements = []
if len(ignored_warnings) > 0:
statements.append('#![allow(%s)]' % ','.join(ignored_warnings))
statements.extend('use %s;' % i for i in sorted(set(imports)))
CGWrapper.__init__(self, child,
pre='\n'.join(statements) + '\n\n')
class CGIfWrapper(CGWrapper):
def __init__(self, condition, child):
pre = CGWrapper(CGGeneric(condition), pre="if ", post=" {\n",
reindent=True)
CGWrapper.__init__(self, CGIndenter(child), pre=pre.define(),
post="\n}")
class CGTemplatedType(CGWrapper):
def __init__(self, templateName, child):
CGWrapper.__init__(self, child, pre=templateName + "<", post=">")
class CGNamespace(CGWrapper):
def __init__(self, namespace, child, public=False):
pre = "%smod %s {\n" % ("pub " if public else "", namespace)
post = "} // mod %s" % namespace
CGWrapper.__init__(self, child, pre=pre, post=post)
@staticmethod
def build(namespaces, child, public=False):
"""
Static helper method to build multiple wrapped namespaces.
"""
if not namespaces:
return child
inner = CGNamespace.build(namespaces[1:], child, public=public)
return CGNamespace(namespaces[0], inner, public=public)
def DOMClassTypeId(desc):
protochain = desc.prototypeChain
inner = ""
if desc.hasDescendants():
if desc.interface.getExtendedAttribute("Abstract"):
return "::dom::bindings::codegen::InheritTypes::TopTypeId::Abstract"
name = desc.interface.identifier.name
inner = "(::dom::bindings::codegen::InheritTypes::%sTypeId::%s)" % (name, name)
elif len(protochain) == 1:
return "::dom::bindings::codegen::InheritTypes::TopTypeId::Alone"
reversed_protochain = list(reversed(protochain))
for (child, parent) in zip(reversed_protochain, reversed_protochain[1:]):
inner = "(::dom::bindings::codegen::InheritTypes::%sTypeId::%s%s)" % (parent, child, inner)
return "::dom::bindings::codegen::InheritTypes::TopTypeId::%s%s" % (protochain[0], inner)
def DOMClass(descriptor):
protoList = ['PrototypeList::ID::' + proto for proto in descriptor.prototypeChain]
# Pad out the list to the right length with ID::Last so we
# guarantee that all the lists are the same length. ID::Last
# is never the ID of any prototype, so it's safe to use as
# padding.
protoList.extend(['PrototypeList::ID::Last'] * (descriptor.config.maxProtoChainLength - len(protoList)))
prototypeChainString = ', '.join(protoList)
heapSizeOf = 'heap_size_of_raw_self_and_children::<%s>' % descriptor.interface.identifier.name
return """\
DOMClass {
interface_chain: [ %s ],
type_id: %s,
heap_size_of: %s as unsafe fn(_) -> _,
}""" % (prototypeChainString, DOMClassTypeId(descriptor), heapSizeOf)
class CGDOMJSClass(CGThing):
"""
Generate a DOMJSClass for a given descriptor
"""
def __init__(self, descriptor):
CGThing.__init__(self)
self.descriptor = descriptor
def define(self):
traceHook = 'Some(%s)' % TRACE_HOOK_NAME
if self.descriptor.isGlobal():
assert not self.descriptor.weakReferenceable
traceHook = "Some(js::jsapi::JS_GlobalObjectTraceHook)"
flags = "JSCLASS_IS_GLOBAL | JSCLASS_DOM_GLOBAL"
slots = "JSCLASS_GLOBAL_SLOT_COUNT + 1"
else:
flags = "0"
if self.descriptor.weakReferenceable:
slots = "2"
else:
slots = "1"
return """\
static Class: DOMJSClass = DOMJSClass {
base: js::jsapi::Class {
name: %s as *const u8 as *const libc::c_char,
flags: JSCLASS_IS_DOMJSCLASS | JSCLASS_IMPLEMENTS_BARRIERS | %s |
(((%s) & JSCLASS_RESERVED_SLOTS_MASK) <<
JSCLASS_RESERVED_SLOTS_SHIFT), //JSCLASS_HAS_RESERVED_SLOTS(%s),
addProperty: None,
delProperty: None,
getProperty: None,
setProperty: None,
enumerate: None,
resolve: None,
convert: None,
finalize: Some(%s),
call: None,
hasInstance: None,
construct: None,
trace: %s,
spec: js::jsapi::ClassSpec {
createConstructor: None,
createPrototype: None,
constructorFunctions: 0 as *const js::jsapi::JSFunctionSpec,
constructorProperties: 0 as *const js::jsapi::JSPropertySpec,
prototypeFunctions: 0 as *const js::jsapi::JSFunctionSpec,
prototypeProperties: 0 as *const js::jsapi::JSPropertySpec,
finishInit: None,
flags: 0,
},
ext: js::jsapi::ClassExtension {
outerObject: %s,
innerObject: None,
isWrappedNative: false,
weakmapKeyDelegateOp: None,
objectMovedOp: None,
},
ops: js::jsapi::ObjectOps {
lookupProperty: None,
defineProperty: None,
hasProperty: None,
getProperty: None,
setProperty: None,
getOwnPropertyDescriptor: None,
deleteProperty: None,
watch: None,
unwatch: None,
getElements: None,
enumerate: None,
thisObject: %s,
funToString: None,
},
},
dom_class: %s
};""" % (str_to_const_array(self.descriptor.interface.identifier.name),
flags, slots, slots,
FINALIZE_HOOK_NAME, traceHook,
self.descriptor.outerObjectHook,
self.descriptor.outerObjectHook,
CGGeneric(DOMClass(self.descriptor)).define())
def str_to_const_array(s):
return "b\"%s\\0\"" % s
class CGPrototypeJSClass(CGThing):
def __init__(self, descriptor):
CGThing.__init__(self)
self.descriptor = descriptor
def define(self):
name = str_to_const_array(self.descriptor.interface.identifier.name + "Prototype")
slotCount = 0
if self.descriptor.hasUnforgeableMembers:
slotCount += 1
return """\
static PrototypeClass: JSClass = JSClass {
name: %(name)s as *const u8 as *const libc::c_char,
flags:
// JSCLASS_HAS_RESERVED_SLOTS(%(slotCount)s)
(%(slotCount)s & JSCLASS_RESERVED_SLOTS_MASK) << JSCLASS_RESERVED_SLOTS_SHIFT,
addProperty: None,
delProperty: None,
getProperty: None,
setProperty: None,
enumerate: None,
resolve: None,
convert: None,
finalize: None,
call: None,
hasInstance: None,
construct: None,
trace: None,
reserved: [0 as *mut libc::c_void; 26]
};
""" % {'name': name, 'slotCount': slotCount}
class CGInterfaceObjectJSClass(CGThing):
def __init__(self, descriptor):
assert descriptor.interface.hasInterfaceObject() and not descriptor.interface.isCallback()
CGThing.__init__(self)
self.descriptor = descriptor
def define(self):
if self.descriptor.interface.ctor():
constructor = CONSTRUCT_HOOK_NAME
else:
constructor = "throwing_constructor"
args = {
"constructor": constructor,
"hasInstance": HASINSTANCE_HOOK_NAME,
"name": self.descriptor.interface.identifier.name,
}
return """\
static InterfaceObjectClass: NonCallbackInterfaceObjectClass =
NonCallbackInterfaceObjectClass::new(%(constructor)s, %(hasInstance)s,
fun_to_string);
""" % args
class CGList(CGThing):
"""
Generate code for a list of GCThings. Just concatenates them together, with
an optional joiner string. "\n" is a common joiner.
"""
def __init__(self, children, joiner=""):
CGThing.__init__(self)
self.children = children
self.joiner = joiner
def append(self, child):
self.children.append(child)
def prepend(self, child):
self.children.insert(0, child)
def join(self, generator):
return self.joiner.join(filter(lambda s: len(s) > 0, (child for child in generator)))
def define(self):
return self.join(child.define() for child in self.children if child is not None)
class CGIfElseWrapper(CGList):
def __init__(self, condition, ifTrue, ifFalse):
kids = [CGIfWrapper(condition, ifTrue),
CGWrapper(CGIndenter(ifFalse), pre=" else {\n", post="\n}")]
CGList.__init__(self, kids)
class CGGeneric(CGThing):
"""
A class that spits out a fixed string into the codegen. Can spit out a
separate string for the declaration too.
"""
def __init__(self, text):
self.text = text
def define(self):
return self.text
class CGCallbackTempRoot(CGGeneric):
def __init__(self, name):
CGGeneric.__init__(self, "%s::new(${val}.get().to_object())" % name)
def getAllTypes(descriptors, dictionaries, callbacks):
"""
Generate all the types we're dealing with. For each type, a tuple
containing type, descriptor, dictionary is yielded. The
descriptor and dictionary can be None if the type does not come
from a descriptor or dictionary; they will never both be non-None.
"""
for d in descriptors:
for t in getTypesFromDescriptor(d):
yield (t, d, None)
for dictionary in dictionaries:
for t in getTypesFromDictionary(dictionary):
yield (t, None, dictionary)
for callback in callbacks:
for t in getTypesFromCallback(callback):
yield (t, None, None)
def UnionTypes(descriptors, dictionaries, callbacks, config):
"""
Returns a CGList containing CGUnionStructs for every union.
"""
imports = [
'dom::bindings::codegen::PrototypeList',
'dom::bindings::conversions::FromJSValConvertible',
'dom::bindings::conversions::ToJSValConvertible',
'dom::bindings::conversions::ConversionBehavior',
'dom::bindings::conversions::root_from_handlevalue',
'dom::bindings::conversions::StringificationBehavior',
'dom::bindings::error::throw_not_in_union',
'dom::bindings::js::Root',
'dom::bindings::str::USVString',
'dom::types::*',
'js::jsapi::JSContext',
'js::jsapi::{HandleValue, MutableHandleValue}',
'js::jsval::JSVal',
'util::str::DOMString',
]
# Now find all the things we'll need as arguments and return values because
# we need to wrap or unwrap them.
unionStructs = dict()
for (t, descriptor, dictionary) in getAllTypes(descriptors, dictionaries, callbacks):
assert not descriptor or not dictionary
t = t.unroll()
if not t.isUnion():
continue
name = str(t)
if name not in unionStructs:
provider = descriptor or config.getDescriptorProvider()
unionStructs[name] = CGList([
CGUnionStruct(t, provider),
CGUnionConversionStruct(t, provider)
])
# Sort unionStructs by key, retrieve value
unionStructs = (i[1] for i in sorted(unionStructs.items(), key=operator.itemgetter(0)))
return CGImports(CGList(unionStructs, "\n\n"), [], [], imports, ignored_warnings=[])
class Argument():
"""
A class for outputting the type and name of an argument
"""
def __init__(self, argType, name, default=None, mutable=False):
self.argType = argType
self.name = name
self.default = default
self.mutable = mutable
def declare(self):
string = ('mut ' if self.mutable else '') + self.name + ((': ' + self.argType) if self.argType else '')
# XXXjdm Support default arguments somehow :/
# if self.default is not None:
# string += " = " + self.default
return string
def define(self):
return self.argType + ' ' + self.name
class CGAbstractMethod(CGThing):
"""
An abstract class for generating code for a method. Subclasses
should override definition_body to create the actual code.
descriptor is the descriptor for the interface the method is associated with
name is the name of the method as a string
returnType is the IDLType of the return value
args is a list of Argument objects
inline should be True to generate an inline method, whose body is
part of the declaration.
alwaysInline should be True to generate an inline method annotated with
MOZ_ALWAYS_INLINE.
If templateArgs is not None it should be a list of strings containing
template arguments, and the function will be templatized using those
arguments.
docs is None or documentation for the method in a string.
"""
def __init__(self, descriptor, name, returnType, args, inline=False,
alwaysInline=False, extern=False, pub=False, templateArgs=None,
unsafe=False, docs=None):
CGThing.__init__(self)
self.descriptor = descriptor
self.name = name
self.returnType = returnType
self.args = args
self.alwaysInline = alwaysInline
self.extern = extern
self.templateArgs = templateArgs
self.pub = pub
self.unsafe = unsafe
self.docs = docs
def _argstring(self):
return ', '.join([a.declare() for a in self.args])
def _template(self):
if self.templateArgs is None:
return ''
return '<%s>\n' % ', '.join(self.templateArgs)
def _docs(self):
if self.docs is None:
return ''
lines = self.docs.splitlines()
return ''.join('/// %s\n' % line for line in lines)
def _decorators(self):
decorators = []
if self.alwaysInline:
decorators.append('#[inline]')
if self.extern:
decorators.append('unsafe')
decorators.append('extern')
if self.pub:
decorators.append('pub')
if not decorators:
return ''
return ' '.join(decorators) + ' '
def _returnType(self):
return (" -> %s" % self.returnType) if self.returnType != "void" else ""
def define(self):
body = self.definition_body()
# Method will already be marked `unsafe` if `self.extern == True`
if self.unsafe and not self.extern:
body = CGWrapper(CGIndenter(body), pre="unsafe {\n", post="\n}")
return CGWrapper(CGIndenter(body),
pre=self.definition_prologue(),
post=self.definition_epilogue()).define()
def definition_prologue(self):
return "%s%sfn %s%s(%s)%s {\n" % (self._docs(), self._decorators(),
self.name, self._template(),
self._argstring(), self._returnType())
def definition_epilogue(self):
return "\n}\n"
def definition_body(self):
raise NotImplementedError # Override me!
def CreateBindingJSObject(descriptor, parent=None):
create = "let raw = Box::into_raw(object);\nlet _rt = RootedTraceable::new(&*raw);\n"
if descriptor.proxy:
assert not descriptor.isGlobal()
create += """
let handler = RegisterBindings::proxy_handlers[PrototypeList::Proxies::%s as usize];
let private = RootedValue::new(cx, PrivateValue(raw as *const libc::c_void));
let obj = NewProxyObject(cx, handler,
private.handle(),
proto.ptr, %s.get(),
ptr::null_mut(), ptr::null_mut());
assert!(!obj.is_null());
let obj = RootedObject::new(cx, obj);\
""" % (descriptor.name, parent)
elif descriptor.isGlobal():
create += ("let obj = RootedObject::new(\n"
" cx,\n"
" create_dom_global(\n"
" cx,\n"
" &Class.base as *const js::jsapi::Class as *const JSClass,\n"
" raw as *const libc::c_void,\n"
" Some(%s))\n"
");\n"
"assert!(!obj.ptr.is_null());" % TRACE_HOOK_NAME)
else:
create += ("let obj = RootedObject::new(cx, JS_NewObjectWithGivenProto(\n"
" cx, &Class.base as *const js::jsapi::Class as *const JSClass, proto.handle()));\n"
"assert!(!obj.ptr.is_null());\n"
"\n"
"JS_SetReservedSlot(obj.ptr, DOM_OBJECT_SLOT,\n"
" PrivateValue(raw as *const libc::c_void));")
if descriptor.weakReferenceable:
create += """
JS_SetReservedSlot(obj.ptr, DOM_WEAK_SLOT, PrivateValue(ptr::null()));"""
return create
def InitUnforgeablePropertiesOnHolder(descriptor, properties):
"""
Define the unforgeable properties on the unforgeable holder for
the interface represented by descriptor.
properties is a PropertyArrays instance.
"""
unforgeables = []
defineUnforgeableAttrs = "define_properties(cx, unforgeable_holder.handle(), %s).unwrap();"
defineUnforgeableMethods = "define_methods(cx, unforgeable_holder.handle(), %s).unwrap();"
unforgeableMembers = [
(defineUnforgeableAttrs, properties.unforgeable_attrs),
(defineUnforgeableMethods, properties.unforgeable_methods),
]
for template, array in unforgeableMembers:
if array.length() > 0:
unforgeables.append(CGGeneric(template % array.variableName()))
return CGList(unforgeables, "\n")
def CopyUnforgeablePropertiesToInstance(descriptor):
"""
Copy the unforgeable properties from the unforgeable holder for
this interface to the instance object we have.
"""
if not descriptor.hasUnforgeableMembers:
return ""
copyCode = ""
# For proxies, we want to define on the expando object, not directly on the
# reflector, so we can make sure we don't get confused by named getters.
if descriptor.proxy:
copyCode += """\
let expando = RootedObject::new(cx, ensure_expando_object(cx, obj.handle()));
"""
obj = "expando"
else:
obj = "obj"
# We can't do the fast copy for globals, because we can't allocate the
# unforgeable holder for those with the right JSClass. Luckily, there
# aren't too many globals being created.
if descriptor.isGlobal():
copyFunc = "JS_CopyPropertiesFrom"
else:
copyFunc = "JS_InitializePropertiesFromCompatibleNativeObject"
copyCode += """\
let mut unforgeable_holder = RootedObject::new(cx, ptr::null_mut());
unforgeable_holder.handle_mut().set(
JS_GetReservedSlot(proto.ptr, DOM_PROTO_UNFORGEABLE_HOLDER_SLOT).to_object());
assert!(%(copyFunc)s(cx, %(obj)s.handle(), unforgeable_holder.handle()));
""" % {'copyFunc': copyFunc, 'obj': obj}
return copyCode
class CGWrapMethod(CGAbstractMethod):
"""
Class that generates the FooBinding::Wrap function for non-callback
interfaces.
"""
def __init__(self, descriptor):
assert not descriptor.interface.isCallback()
if not descriptor.isGlobal():
args = [Argument('*mut JSContext', 'cx'), Argument('GlobalRef', 'scope'),
Argument("Box<%s>" % descriptor.concreteType, 'object')]
else:
args = [Argument('*mut JSContext', 'cx'),
Argument("Box<%s>" % descriptor.concreteType, 'object')]
retval = 'Root<%s>' % descriptor.concreteType
CGAbstractMethod.__init__(self, descriptor, 'Wrap', retval, args,
pub=True, unsafe=True)
def definition_body(self):
unforgeable = CopyUnforgeablePropertiesToInstance(self.descriptor)
if not self.descriptor.isGlobal():
create = CreateBindingJSObject(self.descriptor, "scope")
return CGGeneric("""\
let _ar = JSAutoRequest::new(cx);
let scope = scope.reflector().get_jsobject();
assert!(!scope.get().is_null());
assert!(((*JS_GetClass(scope.get())).flags & JSCLASS_IS_GLOBAL) != 0);
let mut proto = RootedObject::new(cx, ptr::null_mut());
let _ac = JSAutoCompartment::new(cx, scope.get());
GetProtoObject(cx, scope, scope, proto.handle_mut());
assert!(!proto.ptr.is_null());
%(createObject)s
%(copyUnforgeable)s
(*raw).init_reflector(obj.ptr);
Root::from_ref(&*raw)""" % {'copyUnforgeable': unforgeable, 'createObject': create})
else:
create = CreateBindingJSObject(self.descriptor)
return CGGeneric("""\
let _ar = JSAutoRequest::new(cx);
%(createObject)s
let _ac = JSAutoCompartment::new(cx, obj.ptr);
let mut proto = RootedObject::new(cx, ptr::null_mut());
GetProtoObject(cx, obj.handle(), obj.handle(), proto.handle_mut());
JS_SetPrototype(cx, obj.handle(), proto.handle());
%(copyUnforgeable)s
(*raw).init_reflector(obj.ptr);
let ret = Root::from_ref(&*raw);
RegisterBindings::Register(cx, obj.handle());
ret""" % {'copyUnforgeable': unforgeable, 'createObject': create})
class CGIDLInterface(CGThing):
"""
Class for codegen of an implementation of the IDLInterface trait.
"""
def __init__(self, descriptor):
CGThing.__init__(self)
self.descriptor = descriptor
def define(self):
interface = self.descriptor.interface
name = self.descriptor.name
if (interface.getUserData("hasConcreteDescendant", False) or
interface.getUserData("hasProxyDescendant", False)):
depth = self.descriptor.prototypeDepth
check = "class.interface_chain[%s] == PrototypeList::ID::%s" % (depth, name)
elif self.descriptor.proxy:
check = "class as *const _ == &Class as *const _"
else:
check = "class as *const _ == &Class.dom_class as *const _"
return """\
impl IDLInterface for %(name)s {
#[inline]
fn derives(class: &'static DOMClass) -> bool {
%(check)s
}
}
impl PartialEq for %(name)s {
fn eq(&self, other: &%(name)s) -> bool {
self as *const %(name)s == &*other
}
}
""" % {'check': check, 'name': name}
class CGAbstractExternMethod(CGAbstractMethod):
"""
Abstract base class for codegen of implementation-only (no
declaration) static methods.
"""
def __init__(self, descriptor, name, returnType, args):
CGAbstractMethod.__init__(self, descriptor, name, returnType, args,
inline=False, extern=True)
class PropertyArrays():
def __init__(self, descriptor):
self.static_methods = MethodDefiner(descriptor, "StaticMethods",
static=True, unforgeable=False)
self.static_attrs = AttrDefiner(descriptor, "StaticAttributes",
static=True, unforgeable=False)
self.methods = MethodDefiner(descriptor, "Methods", static=False, unforgeable=False)
self.unforgeable_methods = MethodDefiner(descriptor, "UnforgeableMethods",
static=False, unforgeable=True)
self.attrs = AttrDefiner(descriptor, "Attributes", static=False, unforgeable=False)
self.unforgeable_attrs = AttrDefiner(descriptor, "UnforgeableAttributes",
static=False, unforgeable=True)
self.consts = ConstDefiner(descriptor, "Constants")
pass
@staticmethod
def arrayNames():
return [
"static_methods",
"static_attrs",
"methods",
"unforgeable_methods",
"attrs",
"unforgeable_attrs",
"consts",
]
def variableNames(self):
names = {}
for array in self.arrayNames():
names[array] = getattr(self, array).variableName()
return names
def __str__(self):
define = ""
for array in self.arrayNames():
define += str(getattr(self, array))
return define
class CGCreateInterfaceObjectsMethod(CGAbstractMethod):
"""
Generate the CreateInterfaceObjects method for an interface descriptor.
properties should be a PropertyArrays instance.
"""
def __init__(self, descriptor, properties):
args = [Argument('*mut JSContext', 'cx')]
if not descriptor.interface.isCallback():
args += [Argument('HandleObject', 'global'),
Argument('*mut ProtoOrIfaceArray', 'cache')]
args.append(Argument('HandleObject', 'receiver'))
CGAbstractMethod.__init__(self, descriptor, 'CreateInterfaceObjects', 'void', args,
unsafe=True)
self.properties = properties
def definition_body(self):
name = self.descriptor.interface.identifier.name
if self.descriptor.interface.isCallback():
assert not self.descriptor.interface.ctor() and self.descriptor.interface.hasConstants()
return CGGeneric("""\
create_callback_interface_object(cx, receiver, sConstants, %s);""" % str_to_const_array(name))
protoChain = self.descriptor.prototypeChain
if len(protoChain) == 1:
getPrototypeProto = "prototype_proto.ptr = JS_GetObjectPrototype(cx, global)"
else:
getPrototypeProto = ("%s::GetProtoObject(cx, global, receiver, prototype_proto.handle_mut())" %
toBindingNamespace(self.descriptor.prototypeChain[-2]))
code = [CGGeneric("""\
let mut prototype_proto = RootedObject::new(cx, ptr::null_mut());
%s;
assert!(!prototype_proto.ptr.is_null());""" % getPrototypeProto)]
properties = {"id": name}
for arrayName in self.properties.arrayNames():
array = getattr(self.properties, arrayName)
if arrayName == "consts":
if array.length():
properties[arrayName] = array.variableName()
else:
properties[arrayName] = "&[]"
elif array.length():
properties[arrayName] = "Some(%s)" % array.variableName()
else:
properties[arrayName] = "None"
code.append(CGGeneric("""
let mut prototype = RootedObject::new(cx, ptr::null_mut());
create_interface_prototype_object(cx,
prototype_proto.handle(),
&PrototypeClass,
%(methods)s,
%(attrs)s,
%(consts)s,
prototype.handle_mut());
assert!(!prototype.ptr.is_null());
(*cache)[PrototypeList::ID::%(id)s as usize] = prototype.ptr;
if <*mut JSObject>::needs_post_barrier(prototype.ptr) {
<*mut JSObject>::post_barrier((*cache).as_mut_ptr().offset(PrototypeList::ID::%(id)s as isize));
}""" % properties))
if self.descriptor.interface.hasInterfaceObject():
properties["name"] = str_to_const_array(name)
if self.descriptor.interface.ctor():
properties["constructor"] = CONSTRUCT_HOOK_NAME
properties["length"] = methodLength(self.descriptor.interface.ctor())
else:
properties["constructor"] = "throwing_constructor"
properties["length"] = 0
if self.descriptor.interface.parent:
parentName = toBindingNamespace(self.descriptor.getParentName())
code.append(CGGeneric("""
let mut interface_proto = RootedObject::new(cx, ptr::null_mut());
%s::GetConstructorObject(cx, global, receiver, interface_proto.handle_mut());""" % parentName))
else:
code.append(CGGeneric("""
let interface_proto = RootedObject::new(cx, JS_GetFunctionPrototype(cx, global));"""))
code.append(CGGeneric("""\
assert!(!interface_proto.ptr.is_null());
let mut interface = RootedObject::new(cx, ptr::null_mut());
create_noncallback_interface_object(cx,
receiver,
interface_proto.handle(),
&InterfaceObjectClass,
%(static_methods)s,
%(static_attrs)s,
%(consts)s,
prototype.handle(),
%(name)s,
%(length)s,
interface.handle_mut());
assert!(!interface.ptr.is_null());""" % properties))
if self.descriptor.hasDescendants():
code.append(CGGeneric("""\
(*cache)[PrototypeList::Constructor::%(id)s as usize] = interface.ptr;
if <*mut JSObject>::needs_post_barrier(prototype.ptr) {
<*mut JSObject>::post_barrier((*cache).as_mut_ptr().offset(PrototypeList::Constructor::%(id)s as isize));
}""" % properties))
constructors = self.descriptor.interface.namedConstructors
if constructors:
decl = "let named_constructors: [(NonNullJSNative, &'static [u8], u32); %d]" % len(constructors)
specs = []
for constructor in constructors:
hook = CONSTRUCT_HOOK_NAME + "_" + constructor.identifier.name
name = str_to_const_array(constructor.identifier.name)
length = methodLength(constructor)
specs.append(CGGeneric("(%s as NonNullJSNative, %s, %d)" % (hook, name, length)))
values = CGIndenter(CGList(specs, "\n"), 4)
code.append(CGWrapper(values, pre="%s = [\n" % decl, post="\n];"))
code.append(CGGeneric("create_named_constructors(cx, receiver, &named_constructors, prototype.handle());"))
if self.descriptor.hasUnforgeableMembers:
# We want to use the same JSClass and prototype as the object we'll
# end up defining the unforgeable properties on in the end, so that
# we can use JS_InitializePropertiesFromCompatibleNativeObject to do
# a fast copy. In the case of proxies that's null, because the
# expando object is a vanilla object, but in the case of other DOM
# objects it's whatever our class is.
#
# Also, for a global we can't use the global's class; just use
# nullpr and when we do the copy off the holder we'll take a slower
# path. This also means that we don't need to worry about matching
# the prototype.
if self.descriptor.proxy or self.descriptor.isGlobal():
holderClass = "ptr::null()"
holderProto = "HandleObject::null()"
else:
holderClass = "&Class.base as *const js::jsapi::Class as *const JSClass"
holderProto = "prototype.handle()"
code.append(CGGeneric("""
let mut unforgeable_holder = RootedObject::new(cx, ptr::null_mut());
unforgeable_holder.handle_mut().set(
JS_NewObjectWithoutMetadata(cx, %(holderClass)s, %(holderProto)s));
assert!(!unforgeable_holder.ptr.is_null());
""" % {'holderClass': holderClass, 'holderProto': holderProto}))
code.append(InitUnforgeablePropertiesOnHolder(self.descriptor, self.properties))
code.append(CGGeneric("""\
JS_SetReservedSlot(prototype.ptr, DOM_PROTO_UNFORGEABLE_HOLDER_SLOT,
ObjectValue(&*unforgeable_holder.ptr))"""))
return CGList(code, "\n")
class CGGetPerInterfaceObject(CGAbstractMethod):
"""
A method for getting a per-interface object (a prototype object or interface
constructor object).
"""
def __init__(self, descriptor, name, idPrefix="", pub=False):
args = [Argument('*mut JSContext', 'cx'), Argument('HandleObject', 'global'),
Argument('HandleObject', 'receiver'),
Argument('MutableHandleObject', 'rval')]
CGAbstractMethod.__init__(self, descriptor, name,
'void', args, pub=pub, unsafe=True)
self.id = idPrefix + "::" + self.descriptor.name
def definition_body(self):
return CGGeneric("""
/* global and receiver are usually the same, but they can be different
too. For example a sandbox often has an xray wrapper for a window as the
prototype of the sandbox's global. In that case receiver is the xray
wrapper and global is the sandbox's global.
*/
assert!(((*JS_GetClass(global.get())).flags & JSCLASS_DOM_GLOBAL) != 0);
/* Check to see whether the interface objects are already installed */
let proto_or_iface_array = get_proto_or_iface_array(global.get());
rval.set((*proto_or_iface_array)[%(id)s as usize]);
if !rval.get().is_null() {
return;
}
CreateInterfaceObjects(cx, global, proto_or_iface_array, receiver);
rval.set((*proto_or_iface_array)[%(id)s as usize]);
assert!(!rval.get().is_null());
""" % {"id": self.id})
class CGGetProtoObjectMethod(CGGetPerInterfaceObject):
"""
A method for getting the interface prototype object.
"""
def __init__(self, descriptor):
CGGetPerInterfaceObject.__init__(self, descriptor, "GetProtoObject",
"PrototypeList::ID", pub=descriptor.hasDescendants())
def definition_body(self):
return CGList([
CGGeneric("""\
/* Get the interface prototype object for this class. This will create the
object as needed. */"""),
CGGetPerInterfaceObject.definition_body(self),
])
class CGGetConstructorObjectMethod(CGGetPerInterfaceObject):
"""
A method for getting the interface constructor object.
"""
def __init__(self, descriptor):
CGGetPerInterfaceObject.__init__(self, descriptor, "GetConstructorObject",
"PrototypeList::Constructor",
pub=descriptor.hasDescendants())
def definition_body(self):
return CGList([
CGGeneric("""\
/* Get the interface object for this class. This will create the object as
needed. */"""),
CGGetPerInterfaceObject.definition_body(self),
])
class CGDefineProxyHandler(CGAbstractMethod):
"""
A method to create and cache the proxy trap for a given interface.
"""
def __init__(self, descriptor):
assert descriptor.proxy
CGAbstractMethod.__init__(self, descriptor, 'DefineProxyHandler',
'*const libc::c_void', [],
pub=True, unsafe=True)
def define(self):
return CGAbstractMethod.define(self)
def definition_body(self):
customDefineProperty = 'proxyhandler::define_property'
if self.descriptor.operations['IndexedSetter'] or self.descriptor.operations['NamedSetter']:
customDefineProperty = 'defineProperty'
customDelete = 'proxyhandler::delete'
if self.descriptor.operations['NamedDeleter']:
customDelete = 'delete'
body = """\
let traps = ProxyTraps {
enter: None,
getOwnPropertyDescriptor: Some(getOwnPropertyDescriptor),
defineProperty: Some(%s),
ownPropertyKeys: Some(own_property_keys),
delete_: Some(%s),
enumerate: None,
preventExtensions: Some(proxyhandler::prevent_extensions),
isExtensible: Some(proxyhandler::is_extensible),
has: None,
get: Some(get),
set: None,
call: None,
construct: None,
getPropertyDescriptor: Some(get_property_descriptor),
hasOwn: Some(hasOwn),
getOwnEnumerablePropertyKeys: None,
nativeCall: None,
hasInstance: None,
objectClassIs: None,
className: Some(className),
fun_toString: None,
boxedValue_unbox: None,
defaultValue: None,
trace: Some(%s),
finalize: Some(%s),
objectMoved: None,
isCallable: None,
isConstructor: None,
};
CreateProxyHandler(&traps, &Class as *const _ as *const _)\
""" % (customDefineProperty, customDelete, TRACE_HOOK_NAME, FINALIZE_HOOK_NAME)
return CGGeneric(body)
class CGDefineDOMInterfaceMethod(CGAbstractMethod):
"""
A method for resolve hooks to try to lazily define the interface object for
a given interface.
"""
def __init__(self, descriptor):
assert descriptor.interface.hasInterfaceObject()
args = [
Argument('*mut JSContext', 'cx'),
Argument('HandleObject', 'global'),
]
CGAbstractMethod.__init__(self, descriptor, 'DefineDOMInterface', 'void', args, pub=True)
def define(self):
return CGAbstractMethod.define(self)
def definition_body(self):
if self.descriptor.interface.isCallback():
code = "CreateInterfaceObjects(cx, global);"
else:
code = """\
let mut proto = RootedObject::new(cx, ptr::null_mut());
GetProtoObject(cx, global, global, proto.handle_mut());
assert!(!proto.ptr.is_null());
"""
return CGGeneric("assert!(!global.get().is_null());\n" + code)
def needCx(returnType, arguments, considerTypes):
return (considerTypes and
(typeNeedsCx(returnType, True) or
any(typeNeedsCx(a.type) for a in arguments)))
class CGCallGenerator(CGThing):
"""
A class to generate an actual call to a C++ object. Assumes that the C++
object is stored in a variable whose name is given by the |object| argument.
errorResult should be a string for the value to return in case of an
exception from the native code, or None if no error reporting is needed.
"""
def __init__(self, errorResult, arguments, argsPre, returnType,
extendedAttributes, descriptorProvider, nativeMethodName,
static, object="this"):
CGThing.__init__(self)
assert errorResult is None or isinstance(errorResult, str)
isFallible = errorResult is not None
result = getRetvalDeclarationForType(returnType, descriptorProvider)
if isFallible:
result = CGWrapper(result, pre="Result<", post=", Error>")
args = CGList([CGGeneric(arg) for arg in argsPre], ", ")
for (a, name) in arguments:
# XXXjdm Perhaps we should pass all nontrivial types by borrowed pointer
if a.type.isDictionary():
name = "&" + name
args.append(CGGeneric(name))
needsCx = needCx(returnType, (a for (a, _) in arguments), True)
if "cx" not in argsPre and needsCx:
args.prepend(CGGeneric("cx"))
# Build up our actual call
self.cgRoot = CGList([], "\n")
call = CGGeneric(nativeMethodName)
if static:
call = CGWrapper(call, pre="%s::" % descriptorProvider.interface.identifier.name)
else:
call = CGWrapper(call, pre="%s." % object)
call = CGList([call, CGWrapper(args, pre="(", post=")")])
self.cgRoot.append(CGList([
CGGeneric("let result: "),
result,
CGGeneric(" = "),
call,
CGGeneric(";"),
]))
if isFallible:
if static:
glob = ""
else:
glob = " let global = global_root_from_reflector(this);\n"
self.cgRoot.append(CGGeneric(
"let result = match result {\n"
" Ok(result) => result,\n"
" Err(e) => {\n"
"%s"
" throw_dom_exception(cx, global.r(), e);\n"
" return%s;\n"
" },\n"
"};" % (glob, errorResult)))
def define(self):
return self.cgRoot.define()
class CGPerSignatureCall(CGThing):
"""
This class handles the guts of generating code for a particular
call signature. A call signature consists of four things:
1) A return type, which can be None to indicate that there is no
actual return value (e.g. this is an attribute setter) or an
IDLType if there's an IDL type involved (including |void|).
2) An argument list, which is allowed to be empty.
3) A name of a native method to call.
4) Whether or not this method is static.
We also need to know whether this is a method or a getter/setter
to do error reporting correctly.
The idlNode parameter can be either a method or an attr. We can query
|idlNode.identifier| in both cases, so we can be agnostic between the two.
"""
# XXXbz For now each entry in the argument list is either an
# IDLArgument or a FakeArgument, but longer-term we may want to
# have ways of flagging things like JSContext* or optional_argc in
# there.
def __init__(self, returnType, argsPre, arguments, nativeMethodName, static,
descriptor, idlNode, argConversionStartsAt=0,
getter=False, setter=False):
CGThing.__init__(self)
self.returnType = returnType
self.descriptor = descriptor
self.idlNode = idlNode
self.extendedAttributes = descriptor.getExtendedAttributes(idlNode,
getter=getter,
setter=setter)
self.argsPre = argsPre
self.arguments = arguments
self.argCount = len(arguments)
cgThings = []
cgThings.extend([CGArgumentConverter(arguments[i], i, self.getArgs(),
self.getArgc(), self.descriptor,
invalidEnumValueFatal=not setter) for
i in range(argConversionStartsAt, self.argCount)])
errorResult = None
if self.isFallible():
errorResult = " false"
cgThings.append(CGCallGenerator(
errorResult,
self.getArguments(), self.argsPre, returnType,
self.extendedAttributes, descriptor, nativeMethodName,
static))
self.cgRoot = CGList(cgThings, "\n")
def getArgs(self):
return "args" if self.argCount > 0 else ""
def getArgc(self):
return "argc"
def getArguments(self):
def process(arg, i):
argVal = "arg" + str(i)
if arg.type.isGeckoInterface() and not arg.type.unroll().inner.isCallback():
argVal += ".r()"
return argVal
return [(a, process(a, i)) for (i, a) in enumerate(self.arguments)]
def isFallible(self):
return 'infallible' not in self.extendedAttributes
def wrap_return_value(self):
return wrapForType('args.rval()')
def define(self):
return (self.cgRoot.define() + "\n" + self.wrap_return_value())
class CGSwitch(CGList):
"""
A class to generate code for a switch statement.
Takes three constructor arguments: an expression, a list of cases,
and an optional default.
Each case is a CGCase. The default is a CGThing for the body of
the default case, if any.
"""
def __init__(self, expression, cases, default=None):
CGList.__init__(self, [CGIndenter(c) for c in cases], "\n")
self.prepend(CGWrapper(CGGeneric(expression),
pre="match ", post=" {"))
if default is not None:
self.append(
CGIndenter(
CGWrapper(
CGIndenter(default),
pre="_ => {\n",
post="\n}"
)
)
)
self.append(CGGeneric("}"))
class CGCase(CGList):
"""
A class to generate code for a case statement.
Takes three constructor arguments: an expression, a CGThing for
the body (allowed to be None if there is no body), and an optional
argument (defaulting to False) for whether to fall through.
"""
def __init__(self, expression, body, fallThrough=False):
CGList.__init__(self, [], "\n")
self.append(CGWrapper(CGGeneric(expression), post=" => {"))
bodyList = CGList([body], "\n")
if fallThrough:
raise TypeError("fall through required but unsupported")
# bodyList.append(CGGeneric('panic!("fall through unsupported"); /* Fall through */'))
self.append(CGIndenter(bodyList))
self.append(CGGeneric("}"))
class CGGetterCall(CGPerSignatureCall):
"""
A class to generate a native object getter call for a particular IDL
getter.
"""
def __init__(self, argsPre, returnType, nativeMethodName, descriptor, attr):
CGPerSignatureCall.__init__(self, returnType, argsPre, [],
nativeMethodName, attr.isStatic(), descriptor,
attr, getter=True)
class FakeArgument():
"""
A class that quacks like an IDLArgument. This is used to make
setters look like method calls or for special operations.
"""
def __init__(self, type, interfaceMember, allowTreatNonObjectAsNull=False):
self.type = type
self.optional = False
self.variadic = False
self.defaultValue = None
self._allowTreatNonObjectAsNull = allowTreatNonObjectAsNull
self.treatNullAs = interfaceMember.treatNullAs
self.enforceRange = False
self.clamp = False
def allowTreatNonCallableAsNull(self):
return self._allowTreatNonObjectAsNull
class CGSetterCall(CGPerSignatureCall):
"""
A class to generate a native object setter call for a particular IDL
setter.
"""
def __init__(self, argsPre, argType, nativeMethodName, descriptor, attr):
CGPerSignatureCall.__init__(self, None, argsPre,
[FakeArgument(argType, attr, allowTreatNonObjectAsNull=True)],
nativeMethodName, attr.isStatic(), descriptor, attr,
setter=True)
def wrap_return_value(self):
# We have no return value
return "\nreturn true;"
def getArgc(self):
return "1"
class CGAbstractStaticBindingMethod(CGAbstractMethod):
"""
Common class to generate the JSNatives for all our static methods, getters
and setters. This will generate the function declaration and unwrap the
global object. Subclasses are expected to override the generate_code
function to do the rest of the work. This function should return a
CGThing which is already properly indented.
"""
def __init__(self, descriptor, name):
args = [
Argument('*mut JSContext', 'cx'),
Argument('libc::c_uint', 'argc'),
Argument('*mut JSVal', 'vp'),
]
CGAbstractMethod.__init__(self, descriptor, name, "bool", args, extern=True)
def definition_body(self):
preamble = CGGeneric("""\
let global = global_root_from_object(JS_CALLEE(cx, vp).to_object());
""")
return CGList([preamble, self.generate_code()])
def generate_code(self):
raise NotImplementedError # Override me!
class CGSpecializedMethod(CGAbstractExternMethod):
"""
A class for generating the C++ code for a specialized method that the JIT
can call with lower overhead.
"""
def __init__(self, descriptor, method):
self.method = method
name = method.identifier.name
args = [Argument('*mut JSContext', 'cx'), Argument('HandleObject', '_obj'),
Argument('*const %s' % descriptor.concreteType, 'this'),
Argument('*const JSJitMethodCallArgs', 'args')]
CGAbstractExternMethod.__init__(self, descriptor, name, 'bool', args)
def definition_body(self):
nativeName = CGSpecializedMethod.makeNativeName(self.descriptor,
self.method)
return CGWrapper(CGMethodCall([], nativeName, self.method.isStatic(),
self.descriptor, self.method),
pre="let this = &*this;\n"
"let args = &*args;\n"
"let argc = args._base.argc_;\n")
@staticmethod
def makeNativeName(descriptor, method):
name = method.identifier.name
nativeName = descriptor.binaryNameFor(name)
if nativeName == name:
nativeName = descriptor.internalNameFor(name)
return MakeNativeName(nativeName)
class CGStaticMethod(CGAbstractStaticBindingMethod):
"""
A class for generating the Rust code for an IDL static method.
"""
def __init__(self, descriptor, method):
self.method = method
name = method.identifier.name
CGAbstractStaticBindingMethod.__init__(self, descriptor, name)
def generate_code(self):
nativeName = CGSpecializedMethod.makeNativeName(self.descriptor,
self.method)
setupArgs = CGGeneric("let args = CallArgs::from_vp(vp, argc);\n")
call = CGMethodCall(["global.r()"], nativeName, True, self.descriptor, self.method)
return CGList([setupArgs, call])
class CGSpecializedGetter(CGAbstractExternMethod):
"""
A class for generating the code for a specialized attribute getter
that the JIT can call with lower overhead.
"""
def __init__(self, descriptor, attr):
self.attr = attr
name = 'get_' + descriptor.internalNameFor(attr.identifier.name)
args = [Argument('*mut JSContext', 'cx'),
Argument('HandleObject', '_obj'),
Argument('*const %s' % descriptor.concreteType, 'this'),
Argument('JSJitGetterCallArgs', 'args')]
CGAbstractExternMethod.__init__(self, descriptor, name, "bool", args)
def definition_body(self):
nativeName = CGSpecializedGetter.makeNativeName(self.descriptor,
self.attr)
return CGWrapper(CGGetterCall([], self.attr.type, nativeName,
self.descriptor, self.attr),
pre="let this = &*this;\n")
@staticmethod
def makeNativeName(descriptor, attr):
name = attr.identifier.name
nativeName = descriptor.binaryNameFor(name)
if nativeName == name:
nativeName = descriptor.internalNameFor(name)
nativeName = MakeNativeName(nativeName)
infallible = ('infallible' in
descriptor.getExtendedAttributes(attr, getter=True))
if attr.type.nullable() or not infallible:
return "Get" + nativeName
return nativeName
class CGStaticGetter(CGAbstractStaticBindingMethod):
"""
A class for generating the C++ code for an IDL static attribute getter.
"""
def __init__(self, descriptor, attr):
self.attr = attr
name = 'get_' + attr.identifier.name
CGAbstractStaticBindingMethod.__init__(self, descriptor, name)
def generate_code(self):
nativeName = CGSpecializedGetter.makeNativeName(self.descriptor,
self.attr)
setupArgs = CGGeneric("let args = CallArgs::from_vp(vp, argc);\n")
call = CGGetterCall(["global.r()"], self.attr.type, nativeName, self.descriptor,
self.attr)
return CGList([setupArgs, call])
class CGSpecializedSetter(CGAbstractExternMethod):
"""
A class for generating the code for a specialized attribute setter
that the JIT can call with lower overhead.
"""
def __init__(self, descriptor, attr):
self.attr = attr
name = 'set_' + descriptor.internalNameFor(attr.identifier.name)
args = [Argument('*mut JSContext', 'cx'),
Argument('HandleObject', 'obj'),
Argument('*const %s' % descriptor.concreteType, 'this'),
Argument('JSJitSetterCallArgs', 'args')]
CGAbstractExternMethod.__init__(self, descriptor, name, "bool", args)
def definition_body(self):
nativeName = CGSpecializedSetter.makeNativeName(self.descriptor,
self.attr)
return CGWrapper(CGSetterCall([], self.attr.type, nativeName,
self.descriptor, self.attr),
pre="let this = &*this;\n")
@staticmethod
def makeNativeName(descriptor, attr):
name = attr.identifier.name
nativeName = descriptor.binaryNameFor(name)
if nativeName == name:
nativeName = descriptor.internalNameFor(name)
return "Set" + MakeNativeName(nativeName)
class CGStaticSetter(CGAbstractStaticBindingMethod):
"""
A class for generating the C++ code for an IDL static attribute setter.
"""
def __init__(self, descriptor, attr):
self.attr = attr
name = 'set_' + attr.identifier.name
CGAbstractStaticBindingMethod.__init__(self, descriptor, name)
def generate_code(self):
nativeName = CGSpecializedSetter.makeNativeName(self.descriptor,
self.attr)
checkForArg = CGGeneric(
"let args = CallArgs::from_vp(vp, argc);\n"
"if argc == 0 {\n"
" throw_type_error(cx, \"Not enough arguments to %s setter.\");\n"
" return false;\n"
"}" % self.attr.identifier.name)
call = CGSetterCall(["global.r()"], self.attr.type, nativeName, self.descriptor,
self.attr)
return CGList([checkForArg, call])
class CGSpecializedForwardingSetter(CGSpecializedSetter):
"""
A class for generating the code for an IDL attribute forwarding setter.
"""
def __init__(self, descriptor, attr):
CGSpecializedSetter.__init__(self, descriptor, attr)
def definition_body(self):
attrName = self.attr.identifier.name
forwardToAttrName = self.attr.getExtendedAttribute("PutForwards")[0]
# JS_GetProperty and JS_SetProperty can only deal with ASCII
assert all(ord(c) < 128 for c in attrName)
assert all(ord(c) < 128 for c in forwardToAttrName)
return CGGeneric("""\
let mut v = RootedValue::new(cx, UndefinedValue());
if !JS_GetProperty(cx, obj, %s as *const u8 as *const libc::c_char, v.handle_mut()) {
return false;
}
if !v.ptr.is_object() {
throw_type_error(cx, "Value.%s is not an object.");
return false;
}
let target_obj = RootedObject::new(cx, v.ptr.to_object());
JS_SetProperty(cx, target_obj.handle(), %s as *const u8 as *const libc::c_char, args.get(0))
""" % (str_to_const_array(attrName), attrName, str_to_const_array(forwardToAttrName)))
class CGMemberJITInfo(CGThing):
"""
A class for generating the JITInfo for a property that points to
our specialized getter and setter.
"""
def __init__(self, descriptor, member):
self.member = member
self.descriptor = descriptor
def defineJitInfo(self, infoName, opName, opType, infallible, movable,
aliasSet, alwaysInSlot, lazilyInSlot, slotIndex,
returnTypes, args):
"""
aliasSet is a JSJitInfo::AliasSet value, without the "JSJitInfo::" bit.
args is None if we don't want to output argTypes for some
reason (e.g. we have overloads or we're not a method) and
otherwise an iterable of the arguments for this method.
"""
assert not movable or aliasSet != "AliasEverything" # Can't move write-aliasing things
assert not alwaysInSlot or movable # Things always in slots had better be movable
def jitInfoInitializer(isTypedMethod):
initializer = fill(
"""
JSJitInfo {
call: ${opName} as *const ::libc::c_void,
protoID: PrototypeList::ID::${name} as u16,
depth: ${depth},
_bitfield_1:
JSJitInfo::new_bitfield_1(
OpType::${opType} as u8,
AliasSet::${aliasSet} as u8,
JSValueType::${returnType} as u8,
${isInfallible},
${isMovable},
${isAlwaysInSlot},
${isLazilyCachedInSlot},
${isTypedMethod},
${slotIndex} as u16,
)
}
""",
opName=opName,
name=self.descriptor.name,
depth=self.descriptor.interface.inheritanceDepth(),
opType=opType,
aliasSet=aliasSet,
returnType=reduce(CGMemberJITInfo.getSingleReturnType, returnTypes,
""),
isInfallible=toStringBool(infallible),
isMovable=toStringBool(movable),
isAlwaysInSlot=toStringBool(alwaysInSlot),
isLazilyCachedInSlot=toStringBool(lazilyInSlot),
isTypedMethod=toStringBool(isTypedMethod),
slotIndex=slotIndex)
return initializer.rstrip()
if args is not None:
argTypes = "%s_argTypes" % infoName
args = [CGMemberJITInfo.getJSArgType(arg.type) for arg in args]
args.append("ArgType::ArgTypeListEnd as i32")
argTypesDecl = (
"const %s: [i32; %d] = [ %s ];\n" %
(argTypes, len(args), ", ".join(args)))
return fill(
"""
$*{argTypesDecl}
const ${infoName}: JSTypedMethodJitInfo = JSTypedMethodJitInfo {
base: ${jitInfo},
argTypes: &${argTypes} as *const _ as *const ArgType,
};
""",
argTypesDecl=argTypesDecl,
infoName=infoName,
jitInfo=indent(jitInfoInitializer(True)),
argTypes=argTypes)
return ("\n"
"const %s: JSJitInfo = %s;\n"
% (infoName, jitInfoInitializer(False)))
def define(self):
if self.member.isAttr():
internalMemberName = self.descriptor.internalNameFor(self.member.identifier.name)
getterinfo = ("%s_getterinfo" % internalMemberName)
getter = ("get_%s" % internalMemberName)
getterinfal = "infallible" in self.descriptor.getExtendedAttributes(self.member, getter=True)
movable = self.mayBeMovable() and getterinfal
aliasSet = self.aliasSet()
isAlwaysInSlot = self.member.getExtendedAttribute("StoreInSlot")
if self.member.slotIndex is not None:
assert isAlwaysInSlot or self.member.getExtendedAttribute("Cached")
isLazilyCachedInSlot = not isAlwaysInSlot
slotIndex = memberReservedSlot(self.member) # noqa:FIXME: memberReservedSlot is not defined
# We'll statically assert that this is not too big in
# CGUpdateMemberSlotsMethod, in the case when
# isAlwaysInSlot is true.
else:
isLazilyCachedInSlot = False
slotIndex = "0"
result = self.defineJitInfo(getterinfo, getter, "Getter",
getterinfal, movable, aliasSet,
isAlwaysInSlot, isLazilyCachedInSlot,
slotIndex,
[self.member.type], None)
if (not self.member.readonly or self.member.getExtendedAttribute("PutForwards")):
setterinfo = ("%s_setterinfo" % internalMemberName)
setter = ("set_%s" % internalMemberName)
# Setters are always fallible, since they have to do a typed unwrap.
result += self.defineJitInfo(setterinfo, setter, "Setter",
False, False, "AliasEverything",
False, False, "0",
[BuiltinTypes[IDLBuiltinType.Types.void]],
None)
return result
if self.member.isMethod():
methodinfo = ("%s_methodinfo" % self.member.identifier.name)
method = ("%s" % self.member.identifier.name)
# Methods are infallible if they are infallible, have no arguments
# to unwrap, and have a return type that's infallible to wrap up for
# return.
sigs = self.member.signatures()
if len(sigs) != 1:
# Don't handle overloading. If there's more than one signature,
# one of them must take arguments.
methodInfal = False
args = None
movable = False
else:
sig = sigs[0]
# For methods that affect nothing, it's OK to set movable to our
# notion of infallible on the C++ side, without considering
# argument conversions, since argument conversions that can
# reliably throw would be effectful anyway and the jit doesn't
# move effectful things.
hasInfallibleImpl = "infallible" in self.descriptor.getExtendedAttributes(self.member)
movable = self.mayBeMovable() and hasInfallibleImpl
# XXXbz can we move the smarts about fallibility due to arg
# conversions into the JIT, using our new args stuff?
if (len(sig[1]) != 0):
# We have arguments or our return-value boxing can fail
methodInfal = False
else:
methodInfal = hasInfallibleImpl
# For now, only bother to output args if we're side-effect-free.
if self.member.affects == "Nothing":
args = sig[1]
else:
args = None
aliasSet = self.aliasSet()
result = self.defineJitInfo(methodinfo, method, "Method",
methodInfal, movable, aliasSet,
False, False, "0",
[s[0] for s in sigs], args)
return result
raise TypeError("Illegal member type to CGPropertyJITInfo")
def mayBeMovable(self):
"""
Returns whether this attribute or method may be movable, just
based on Affects/DependsOn annotations.
"""
affects = self.member.affects
dependsOn = self.member.dependsOn
assert affects in IDLInterfaceMember.AffectsValues
assert dependsOn in IDLInterfaceMember.DependsOnValues
# Things that are DependsOn=DeviceState are not movable, because we
# don't want them coalesced with each other or loop-hoisted, since
# their return value can change even if nothing is going on from our
# point of view.
return (affects == "Nothing" and
(dependsOn != "Everything" and dependsOn != "DeviceState"))
def aliasSet(self):
"""Returns the alias set to store in the jitinfo. This may not be the
effective alias set the JIT uses, depending on whether we have enough
information about our args to allow the JIT to prove that effectful
argument conversions won't happen.
"""
dependsOn = self.member.dependsOn
assert dependsOn in IDLInterfaceMember.DependsOnValues
if dependsOn == "Nothing" or dependsOn == "DeviceState":
assert self.member.affects == "Nothing"
return "AliasNone"
if dependsOn == "DOMState":
assert self.member.affects == "Nothing"
return "AliasDOMSets"
return "AliasEverything"
@staticmethod
def getJSReturnTypeTag(t):
if t.nullable():
# Sometimes it might return null, sometimes not
return "JSVAL_TYPE_UNKNOWN"
if t.isVoid():
# No return, every time
return "JSVAL_TYPE_UNDEFINED"
if t.isArray():
# No idea yet
assert False
if t.isSequence():
return "JSVAL_TYPE_OBJECT"
if t.isMozMap():
return "JSVAL_TYPE_OBJECT"
if t.isGeckoInterface():
return "JSVAL_TYPE_OBJECT"
if t.isString():
return "JSVAL_TYPE_STRING"
if t.isEnum():
return "JSVAL_TYPE_STRING"
if t.isCallback():
return "JSVAL_TYPE_OBJECT"
if t.isAny():
# The whole point is to return various stuff
return "JSVAL_TYPE_UNKNOWN"
if t.isObject():
return "JSVAL_TYPE_OBJECT"
if t.isSpiderMonkeyInterface():
return "JSVAL_TYPE_OBJECT"
if t.isUnion():
u = t.unroll()
if u.hasNullableType:
# Might be null or not
return "JSVAL_TYPE_UNKNOWN"
return reduce(CGMemberJITInfo.getSingleReturnType,
u.flatMemberTypes, "")
if t.isDictionary():
return "JSVAL_TYPE_OBJECT"
if t.isDate():
return "JSVAL_TYPE_OBJECT"
if not t.isPrimitive():
raise TypeError("No idea what type " + str(t) + " is.")
tag = t.tag()
if tag == IDLType.Tags.bool:
return "JSVAL_TYPE_BOOLEAN"
if tag in [IDLType.Tags.int8, IDLType.Tags.uint8,
IDLType.Tags.int16, IDLType.Tags.uint16,
IDLType.Tags.int32]:
return "JSVAL_TYPE_INT32"
if tag in [IDLType.Tags.int64, IDLType.Tags.uint64,
IDLType.Tags.unrestricted_float, IDLType.Tags.float,
IDLType.Tags.unrestricted_double, IDLType.Tags.double]:
# These all use JS_NumberValue, which can return int or double.
# But TI treats "double" as meaning "int or double", so we're
# good to return JSVAL_TYPE_DOUBLE here.
return "JSVAL_TYPE_DOUBLE"
if tag != IDLType.Tags.uint32:
raise TypeError("No idea what type " + str(t) + " is.")
# uint32 is sometimes int and sometimes double.
return "JSVAL_TYPE_DOUBLE"
@staticmethod
def getSingleReturnType(existingType, t):
type = CGMemberJITInfo.getJSReturnTypeTag(t)
if existingType == "":
# First element of the list; just return its type
return type
if type == existingType:
return existingType
if ((type == "JSVAL_TYPE_DOUBLE" and
existingType == "JSVAL_TYPE_INT32") or
(existingType == "JSVAL_TYPE_DOUBLE" and
type == "JSVAL_TYPE_INT32")):
# Promote INT32 to DOUBLE as needed
return "JSVAL_TYPE_DOUBLE"
# Different types
return "JSVAL_TYPE_UNKNOWN"
@staticmethod
def getJSArgType(t):
assert not t.isVoid()
if t.nullable():
# Sometimes it might return null, sometimes not
return "ArgType::Null as i32 | %s" % CGMemberJITInfo.getJSArgType(t.inner)
if t.isArray():
# No idea yet
assert False
if t.isSequence():
return "ArgType::Object as i32"
if t.isGeckoInterface():
return "ArgType::Object as i32"
if t.isString():
return "ArgType::String as i32"
if t.isEnum():
return "ArgType::String as i32"
if t.isCallback():
return "ArgType::Object as i32"
if t.isAny():
# The whole point is to return various stuff
return "ArgType::Any as i32"
if t.isObject():
return "ArgType::Object as i32"
if t.isSpiderMonkeyInterface():
return "ArgType::Object as i32"
if t.isUnion():
u = t.unroll()
type = "JSJitInfo::Null as i32" if u.hasNullableType else ""
return reduce(CGMemberJITInfo.getSingleArgType,
u.flatMemberTypes, type)
if t.isDictionary():
return "ArgType::Object as i32"
if t.isDate():
return "ArgType::Object as i32"
if not t.isPrimitive():
raise TypeError("No idea what type " + str(t) + " is.")
tag = t.tag()
if tag == IDLType.Tags.bool:
return "ArgType::Boolean as i32"
if tag in [IDLType.Tags.int8, IDLType.Tags.uint8,
IDLType.Tags.int16, IDLType.Tags.uint16,
IDLType.Tags.int32]:
return "ArgType::Integer as i32"
if tag in [IDLType.Tags.int64, IDLType.Tags.uint64,
IDLType.Tags.unrestricted_float, IDLType.Tags.float,
IDLType.Tags.unrestricted_double, IDLType.Tags.double]:
# These all use JS_NumberValue, which can return int or double.
# But TI treats "double" as meaning "int or double", so we're
# good to return JSVAL_TYPE_DOUBLE here.
return "ArgType::Double as i32"
if tag != IDLType.Tags.uint32:
raise TypeError("No idea what type " + str(t) + " is.")
# uint32 is sometimes int and sometimes double.
return "ArgType::Double as i32"
@staticmethod
def getSingleArgType(existingType, t):
type = CGMemberJITInfo.getJSArgType(t)
if existingType == "":
# First element of the list; just return its type
return type
if type == existingType:
return existingType
return "%s | %s" % (existingType, type)
def getEnumValueName(value):
# Some enum values can be empty strings. Others might have weird
# characters in them. Deal with the former by returning "_empty",
# deal with possible name collisions from that by throwing if the
# enum value is actually "_empty", and throw on any value
# containing non-ASCII chars for now. Replace all chars other than
# [0-9A-Za-z_] with '_'.
if re.match("[^\x20-\x7E]", value):
raise SyntaxError('Enum value "' + value + '" contains non-ASCII characters')
if re.match("^[0-9]", value):
raise SyntaxError('Enum value "' + value + '" starts with a digit')
value = re.sub(r'[^0-9A-Za-z_]', '_', value)
if re.match("^_[A-Z]|__", value):
raise SyntaxError('Enum value "' + value + '" is reserved by the C++ spec')
if value == "_empty":
raise SyntaxError('"_empty" is not an IDL enum value we support yet')
if value == "":
return "_empty"
return MakeNativeName(value)
class CGEnum(CGThing):
def __init__(self, enum):
CGThing.__init__(self)
decl = """\
#[repr(usize)]
#[derive(JSTraceable, PartialEq, Copy, Clone, HeapSizeOf)]
pub enum %s {
%s
}
""" % (enum.identifier.name, ",\n ".join(map(getEnumValueName, enum.values())))
inner = """\
use dom::bindings::conversions::ToJSValConvertible;
use js::jsapi::{JSContext, MutableHandleValue};
use js::jsval::JSVal;
pub const strings: &'static [&'static str] = &[
%s,
];
impl ToJSValConvertible for super::%s {
unsafe fn to_jsval(&self, cx: *mut JSContext, rval: MutableHandleValue) {
strings[*self as usize].to_jsval(cx, rval);
}
}
""" % (",\n ".join(['"%s"' % val for val in enum.values()]), enum.identifier.name)
self.cgRoot = CGList([
CGGeneric(decl),
CGNamespace.build([enum.identifier.name + "Values"],
CGIndenter(CGGeneric(inner)), public=True),
])
def define(self):
return self.cgRoot.define()
def convertConstIDLValueToRust(value):
tag = value.type.tag()
if tag in [IDLType.Tags.int8, IDLType.Tags.uint8,
IDLType.Tags.int16, IDLType.Tags.uint16,
IDLType.Tags.int32, IDLType.Tags.uint32,
IDLType.Tags.int64, IDLType.Tags.uint64,
IDLType.Tags.unrestricted_float, IDLType.Tags.float,
IDLType.Tags.unrestricted_double, IDLType.Tags.double]:
return str(value.value)
if tag == IDLType.Tags.bool:
return toStringBool(value.value)
raise TypeError("Const value of unhandled type: " + value.type)
class CGConstant(CGThing):
def __init__(self, constants):
CGThing.__init__(self)
self.constants = constants
def define(self):
def stringDecl(const):
name = const.identifier.name
value = convertConstIDLValueToRust(const.value)
return CGGeneric("pub const %s: %s = %s;\n" % (name, builtinNames[const.value.type.tag()], value))
return CGIndenter(CGList(stringDecl(m) for m in self.constants)).define()
def getUnionTypeTemplateVars(type, descriptorProvider):
# For dictionaries and sequences we need to pass None as the failureCode
# for getJSToNativeConversionInfo.
# Also, for dictionaries we would need to handle conversion of
# null/undefined to the dictionary correctly.
if type.isDictionary() or type.isSequence():
raise TypeError("Can't handle dictionaries or sequences in unions")
if type.isGeckoInterface():
name = type.inner.identifier.name
typeName = descriptorProvider.getDescriptor(name).returnType
elif type.isEnum():
name = type.inner.identifier.name
typeName = name
elif type.isArray() or type.isSequence():
name = str(type)
# XXXjdm dunno about typeName here
typeName = "/*" + type.name + "*/"
elif type.isDOMString():
name = type.name
typeName = "DOMString"
elif type.isUSVString():
name = type.name
typeName = "USVString"
elif type.isPrimitive():
name = type.name
typeName = builtinNames[type.tag()]
else:
name = type.name
typeName = "/*" + type.name + "*/"
info = getJSToNativeConversionInfo(
type, descriptorProvider, failureCode="return Ok(None);",
exceptionCode='return Err(());',
isDefinitelyObject=True)
template = info.template
assert not type.isObject()
jsConversion = string.Template(template).substitute({
"val": "value",
})
jsConversion = CGWrapper(CGGeneric(jsConversion), pre="Ok(Some(", post="))")
return {
"name": name,
"typeName": typeName,
"jsConversion": jsConversion,
}
class CGUnionStruct(CGThing):
def __init__(self, type, descriptorProvider):
assert not type.nullable()
assert not type.hasNullableType
CGThing.__init__(self)
self.type = type
self.descriptorProvider = descriptorProvider
def define(self):
templateVars = map(lambda t: getUnionTypeTemplateVars(t, self.descriptorProvider),
self.type.flatMemberTypes)
enumValues = [
" e%s(%s)," % (v["name"], v["typeName"]) for v in templateVars
]
enumConversions = [
" %s::e%s(ref inner) => inner.to_jsval(cx, rval),"
% (self.type, v["name"]) for v in templateVars
]
return ("""\
pub enum %s {
%s
}
impl ToJSValConvertible for %s {
unsafe fn to_jsval(&self, cx: *mut JSContext, rval: MutableHandleValue) {
match *self {
%s
}
}
}
""") % (self.type, "\n".join(enumValues), self.type, "\n".join(enumConversions))
class CGUnionConversionStruct(CGThing):
def __init__(self, type, descriptorProvider):
assert not type.nullable()
assert not type.hasNullableType
CGThing.__init__(self)
self.type = type
self.descriptorProvider = descriptorProvider
def from_jsval(self):
memberTypes = self.type.flatMemberTypes
names = []
conversions = []
interfaceMemberTypes = filter(lambda t: t.isNonCallbackInterface(), memberTypes)
if len(interfaceMemberTypes) > 0:
def get_name(memberType):
if self.type.isGeckoInterface():
return memberType.inner.identifier.name
return memberType.name
def get_match(name):
return (
"match %s::TryConvertTo%s(cx, value) {\n"
" Err(_) => return Err(()),\n"
" Ok(Some(value)) => return Ok(%s::e%s(value)),\n"
" Ok(None) => (),\n"
"}\n") % (self.type, name, self.type, name)
typeNames = [get_name(memberType) for memberType in interfaceMemberTypes]
interfaceObject = CGList(CGGeneric(get_match(typeName)) for typeName in typeNames)
names.extend(typeNames)
else:
interfaceObject = None
arrayObjectMemberTypes = filter(lambda t: t.isArray() or t.isSequence(), memberTypes)
if len(arrayObjectMemberTypes) > 0:
assert len(arrayObjectMemberTypes) == 1
raise TypeError("Can't handle arrays or sequences in unions.")
else:
arrayObject = None
dateObjectMemberTypes = filter(lambda t: t.isDate(), memberTypes)
if len(dateObjectMemberTypes) > 0:
assert len(dateObjectMemberTypes) == 1
raise TypeError("Can't handle dates in unions.")
else:
dateObject = None
callbackMemberTypes = filter(lambda t: t.isCallback() or t.isCallbackInterface(), memberTypes)
if len(callbackMemberTypes) > 0:
assert len(callbackMemberTypes) == 1
raise TypeError("Can't handle callbacks in unions.")
else:
callbackObject = None
dictionaryMemberTypes = filter(lambda t: t.isDictionary(), memberTypes)
if len(dictionaryMemberTypes) > 0:
raise TypeError("No support for unwrapping dictionaries as member "
"of a union")
else:
dictionaryObject = None
if callbackObject or dictionaryObject:
assert False, "Not currently supported"
else:
nonPlatformObject = None
objectMemberTypes = filter(lambda t: t.isObject(), memberTypes)
if len(objectMemberTypes) > 0:
raise TypeError("Can't handle objects in unions.")
else:
object = None
hasObjectTypes = interfaceObject or arrayObject or dateObject or nonPlatformObject or object
if hasObjectTypes:
assert interfaceObject
templateBody = CGList([interfaceObject], "\n")
conversions.append(CGIfWrapper("value.get().is_object()", templateBody))
otherMemberTypes = [
t for t in memberTypes if t.isPrimitive() or t.isString() or t.isEnum()
]
if len(otherMemberTypes) > 0:
assert len(otherMemberTypes) == 1
memberType = otherMemberTypes[0]
if memberType.isEnum():
name = memberType.inner.identifier.name
else:
name = memberType.name
match = (
"match %s::TryConvertTo%s(cx, value) {\n"
" Err(_) => return Err(()),\n"
" Ok(Some(value)) => return Ok(%s::e%s(value)),\n"
" Ok(None) => (),\n"
"}\n") % (self.type, name, self.type, name)
conversions.append(CGGeneric(match))
names.append(name)
conversions.append(CGGeneric(
"throw_not_in_union(cx, \"%s\");\n"
"Err(())" % ", ".join(names)))
method = CGWrapper(
CGIndenter(CGList(conversions, "\n\n")),
pre="unsafe fn from_jsval(cx: *mut JSContext,\n"
" value: HandleValue, _option: ()) -> Result<%s, ()> {\n" % self.type,
post="\n}")
return CGWrapper(
CGIndenter(CGList([
CGGeneric("type Config = ();"),
method,
], "\n")),
pre="impl FromJSValConvertible for %s {\n" % self.type,
post="\n}")
def try_method(self, t):
templateVars = getUnionTypeTemplateVars(t, self.descriptorProvider)
returnType = "Result<Option<%s>, ()>" % templateVars["typeName"]
jsConversion = templateVars["jsConversion"]
return CGWrapper(
CGIndenter(jsConversion, 4),
pre="unsafe fn TryConvertTo%s(cx: *mut JSContext, value: HandleValue) -> %s {\n" % (t.name, returnType),
post="\n}")
def define(self):
from_jsval = self.from_jsval()
methods = CGIndenter(CGList([
self.try_method(t) for t in self.type.flatMemberTypes
], "\n\n"))
return """
%s
impl %s {
%s
}
""" % (from_jsval.define(), self.type, methods.define())
class ClassItem:
""" Use with CGClass """
def __init__(self, name, visibility):
self.name = name
self.visibility = visibility
def declare(self, cgClass):
assert False
def define(self, cgClass):
assert False
class ClassBase(ClassItem):
def __init__(self, name, visibility='pub'):
ClassItem.__init__(self, name, visibility)
def declare(self, cgClass):
return '%s %s' % (self.visibility, self.name)
def define(self, cgClass):
# Only in the header
return ''
class ClassMethod(ClassItem):
def __init__(self, name, returnType, args, inline=False, static=False,
virtual=False, const=False, bodyInHeader=False,
templateArgs=None, visibility='public', body=None,
breakAfterReturnDecl="\n",
breakAfterSelf="\n", override=False):
"""
override indicates whether to flag the method as MOZ_OVERRIDE
"""
assert not override or virtual
assert not (override and static)
self.returnType = returnType
self.args = args
self.inline = False
self.static = static
self.virtual = virtual
self.const = const
self.bodyInHeader = True
self.templateArgs = templateArgs
self.body = body
self.breakAfterReturnDecl = breakAfterReturnDecl
self.breakAfterSelf = breakAfterSelf
self.override = override
ClassItem.__init__(self, name, visibility)
def getDecorators(self, declaring):
decorators = []
if self.inline:
decorators.append('inline')
if declaring:
if self.static:
decorators.append('static')
if self.virtual:
decorators.append('virtual')
if decorators:
return ' '.join(decorators) + ' '
return ''
def getBody(self):
# Override me or pass a string to constructor
assert self.body is not None
return self.body
def declare(self, cgClass):
templateClause = '<%s>' % ', '.join(self.templateArgs) \
if self.bodyInHeader and self.templateArgs else ''
args = ', '.join([a.declare() for a in self.args])
if self.bodyInHeader:
body = CGIndenter(CGGeneric(self.getBody())).define()
body = ' {\n' + body + '\n}'
else:
body = ';'
return string.Template(
"${decorators}%s"
"${visibility}fn ${name}${templateClause}(${args})${returnType}${const}${override}${body}%s" %
(self.breakAfterReturnDecl, self.breakAfterSelf)
).substitute({
'templateClause': templateClause,
'decorators': self.getDecorators(True),
'returnType': (" -> %s" % self.returnType) if self.returnType else "",
'name': self.name,
'const': ' const' if self.const else '',
'override': ' MOZ_OVERRIDE' if self.override else '',
'args': args,
'body': body,
'visibility': self.visibility + ' ' if self.visibility != 'priv' else ''
})
def define(self, cgClass):
pass
class ClassConstructor(ClassItem):
"""
Used for adding a constructor to a CGClass.
args is a list of Argument objects that are the arguments taken by the
constructor.
inline should be True if the constructor should be marked inline.
bodyInHeader should be True if the body should be placed in the class
declaration in the header.
visibility determines the visibility of the constructor (public,
protected, private), defaults to private.
explicit should be True if the constructor should be marked explicit.
baseConstructors is a list of strings containing calls to base constructors,
defaults to None.
body contains a string with the code for the constructor, defaults to empty.
"""
def __init__(self, args, inline=False, bodyInHeader=False,
visibility="priv", explicit=False, baseConstructors=None,
body=""):
self.args = args
self.inline = False
self.bodyInHeader = bodyInHeader
self.explicit = explicit
self.baseConstructors = baseConstructors or []
self.body = body
ClassItem.__init__(self, None, visibility)
def getDecorators(self, declaring):
decorators = []
if self.explicit:
decorators.append('explicit')
if self.inline and declaring:
decorators.append('inline')
if decorators:
return ' '.join(decorators) + ' '
return ''
def getInitializationList(self, cgClass):
items = [str(c) for c in self.baseConstructors]
for m in cgClass.members:
if not m.static:
initialize = m.body
if initialize:
items.append(m.name + "(" + initialize + ")")
if len(items) > 0:
return '\n : ' + ',\n '.join(items)
return ''
def getBody(self, cgClass):
initializers = [" parent: %s" % str(self.baseConstructors[0])]
return (self.body + (
"let mut ret = Rc::new(%s {\n"
"%s\n"
"});\n"
"// Note: callback cannot be moved after calling init.\n"
"match Rc::get_mut(&mut ret) {\n"
" Some(ref mut callback) => callback.parent.init(%s),\n"
" None => unreachable!(),\n"
"};\n"
"ret") % (cgClass.name, '\n'.join(initializers), self.args[0].name))
def declare(self, cgClass):
args = ', '.join([a.declare() for a in self.args])
body = ' ' + self.getBody(cgClass)
body = stripTrailingWhitespace(body.replace('\n', '\n '))
if len(body) > 0:
body += '\n'
body = ' {\n' + body + '}'
return string.Template("""\
pub fn ${decorators}new(${args}) -> Rc<${className}>${body}
""").substitute({'decorators': self.getDecorators(True),
'className': cgClass.getNameString(),
'args': args,
'body': body})
def define(self, cgClass):
if self.bodyInHeader:
return ''
args = ', '.join([a.define() for a in self.args])
body = ' ' + self.getBody()
body = '\n' + stripTrailingWhitespace(body.replace('\n', '\n '))
if len(body) > 0:
body += '\n'
return string.Template("""\
${decorators}
${className}::${className}(${args})${initializationList}
{${body}}
""").substitute({'decorators': self.getDecorators(False),
'className': cgClass.getNameString(),
'args': args,
'initializationList': self.getInitializationList(cgClass),
'body': body})
class ClassMember(ClassItem):
def __init__(self, name, type, visibility="priv", static=False,
body=None):
self.type = type
self.static = static
self.body = body
ClassItem.__init__(self, name, visibility)
def declare(self, cgClass):
return '%s %s: %s,\n' % (self.visibility, self.name, self.type)
def define(self, cgClass):
if not self.static:
return ''
if self.body:
body = " = " + self.body
else:
body = ""
return '%s %s::%s%s;\n' % (self.type, cgClass.getNameString(),
self.name, body)
class CGClass(CGThing):
def __init__(self, name, bases=[], members=[], constructors=[],
destructor=None, methods=[],
typedefs=[], enums=[], unions=[], templateArgs=[],
templateSpecialization=[],
disallowCopyConstruction=False, indent='',
decorators='',
extradeclarations=''):
CGThing.__init__(self)
self.name = name
self.bases = bases
self.members = members
self.constructors = constructors
# We store our single destructor in a list, since all of our
# code wants lists of members.
self.destructors = [destructor] if destructor else []
self.methods = methods
self.typedefs = typedefs
self.enums = enums
self.unions = unions
self.templateArgs = templateArgs
self.templateSpecialization = templateSpecialization
self.disallowCopyConstruction = disallowCopyConstruction
self.indent = indent
self.decorators = decorators
self.extradeclarations = extradeclarations
def getNameString(self):
className = self.name
if self.templateSpecialization:
className = className + \
'<%s>' % ', '.join([str(a) for a
in self.templateSpecialization])
return className
def define(self):
result = ''
if self.templateArgs:
templateArgs = [a.declare() for a in self.templateArgs]
templateArgs = templateArgs[len(self.templateSpecialization):]
result = result + self.indent + 'template <%s>\n' % ','.join([str(a) for a in templateArgs])
if self.templateSpecialization:
specialization = \
'<%s>' % ', '.join([str(a) for a in self.templateSpecialization])
else:
specialization = ''
myself = ''
if self.decorators != '':
myself += self.decorators + '\n'
myself += '%spub struct %s%s' % (self.indent, self.name, specialization)
result += myself
assert len(self.bases) == 1 # XXjdm Can we support multiple inheritance?
result += ' {\n'
if self.bases:
self.members = [ClassMember("parent", self.bases[0].name, "pub")] + self.members
result += CGIndenter(CGGeneric(self.extradeclarations),
len(self.indent)).define()
def declareMembers(cgClass, memberList):
result = ''
for member in memberList:
declaration = member.declare(cgClass)
declaration = CGIndenter(CGGeneric(declaration)).define()
result = result + declaration
return result
if self.disallowCopyConstruction:
class DisallowedCopyConstructor(object):
def __init__(self):
self.visibility = "private"
def declare(self, cgClass):
name = cgClass.getNameString()
return ("%s(const %s&) MOZ_DELETE;\n"
"void operator=(const %s) MOZ_DELETE;\n" % (name, name, name))
disallowedCopyConstructors = [DisallowedCopyConstructor()]
else:
disallowedCopyConstructors = []
order = [(self.enums, ''), (self.unions, ''),
(self.typedefs, ''), (self.members, '')]
for (memberList, separator) in order:
memberString = declareMembers(self, memberList)
if self.indent:
memberString = CGIndenter(CGGeneric(memberString),
len(self.indent)).define()
result = result + memberString
result += self.indent + '}\n\n'
result += 'impl %s {\n' % self.name
order = [(self.constructors + disallowedCopyConstructors, '\n'),
(self.destructors, '\n'), (self.methods, '\n)')]
for (memberList, separator) in order:
memberString = declareMembers(self, memberList)
if self.indent:
memberString = CGIndenter(CGGeneric(memberString),
len(self.indent)).define()
result = result + memberString
result += "}"
return result
class CGProxySpecialOperation(CGPerSignatureCall):
"""
Base class for classes for calling an indexed or named special operation
(don't use this directly, use the derived classes below).
"""
def __init__(self, descriptor, operation):
nativeName = MakeNativeName(descriptor.binaryNameFor(operation))
operation = descriptor.operations[operation]
assert len(operation.signatures()) == 1
signature = operation.signatures()[0]
(returnType, arguments) = signature
# We pass len(arguments) as the final argument so that the
# CGPerSignatureCall won't do any argument conversion of its own.
CGPerSignatureCall.__init__(self, returnType, "", arguments, nativeName,
False, descriptor, operation,
len(arguments))
if operation.isSetter() or operation.isCreator():
# arguments[0] is the index or name of the item that we're setting.
argument = arguments[1]
info = getJSToNativeConversionInfo(
argument.type, descriptor, treatNullAs=argument.treatNullAs,
exceptionCode="return false;")
template = info.template
declType = info.declType
templateValues = {
"val": "value.handle()",
}
self.cgRoot.prepend(instantiateJSToNativeConversionTemplate(
template, templateValues, declType, argument.identifier.name))
self.cgRoot.prepend(CGGeneric("let value = RootedValue::new(cx, desc.get().value);"))
elif operation.isGetter():
self.cgRoot.prepend(CGGeneric("let mut found = false;"))
def getArguments(self):
def process(arg):
argVal = arg.identifier.name
if arg.type.isGeckoInterface() and not arg.type.unroll().inner.isCallback():
argVal += ".r()"
return argVal
args = [(a, process(a)) for a in self.arguments]
if self.idlNode.isGetter():
args.append((FakeArgument(BuiltinTypes[IDLBuiltinType.Types.boolean],
self.idlNode),
"&mut found"))
return args
def wrap_return_value(self):
if not self.idlNode.isGetter() or self.templateValues is None:
return ""
wrap = CGGeneric(wrapForType(**self.templateValues))
wrap = CGIfWrapper("found", wrap)
return "\n" + wrap.define()
class CGProxyIndexedGetter(CGProxySpecialOperation):
"""
Class to generate a call to an indexed getter. If templateValues is not None
the returned value will be wrapped with wrapForType using templateValues.
"""
def __init__(self, descriptor, templateValues=None):
self.templateValues = templateValues
CGProxySpecialOperation.__init__(self, descriptor, 'IndexedGetter')
class CGProxyIndexedSetter(CGProxySpecialOperation):
"""
Class to generate a call to an indexed setter.
"""
def __init__(self, descriptor):
CGProxySpecialOperation.__init__(self, descriptor, 'IndexedSetter')
class CGProxyNamedOperation(CGProxySpecialOperation):
"""
Class to generate a call to a named operation.
"""
def __init__(self, descriptor, name):
CGProxySpecialOperation.__init__(self, descriptor, name)
def define(self):
# Our first argument is the id we're getting.
argName = self.arguments[0].identifier.name
return ("let %s = jsid_to_str(cx, id);\n"
"let this = UnwrapProxy(proxy);\n"
"let this = &*this;\n" % argName +
CGProxySpecialOperation.define(self))
class CGProxyNamedGetter(CGProxyNamedOperation):
"""
Class to generate a call to an named getter. If templateValues is not None
the returned value will be wrapped with wrapForType using templateValues.
"""
def __init__(self, descriptor, templateValues=None):
self.templateValues = templateValues
CGProxySpecialOperation.__init__(self, descriptor, 'NamedGetter')
class CGProxyNamedPresenceChecker(CGProxyNamedGetter):
"""
Class to generate a call that checks whether a named property exists.
For now, we just delegate to CGProxyNamedGetter
"""
def __init__(self, descriptor):
CGProxyNamedGetter.__init__(self, descriptor)
class CGProxyNamedSetter(CGProxyNamedOperation):
"""
Class to generate a call to a named setter.
"""
def __init__(self, descriptor):
CGProxySpecialOperation.__init__(self, descriptor, 'NamedSetter')
class CGProxyNamedDeleter(CGProxyNamedOperation):
"""
Class to generate a call to a named deleter.
"""
def __init__(self, descriptor):
CGProxySpecialOperation.__init__(self, descriptor, 'NamedDeleter')
class CGProxyUnwrap(CGAbstractMethod):
def __init__(self, descriptor):
args = [Argument('HandleObject', 'obj')]
CGAbstractMethod.__init__(self, descriptor, "UnwrapProxy",
'*const ' + descriptor.concreteType, args,
alwaysInline=True, unsafe=True)
def definition_body(self):
return CGGeneric("""\
/*if (xpc::WrapperFactory::IsXrayWrapper(obj)) {
obj = js::UnwrapObject(obj);
}*/
//MOZ_ASSERT(IsProxy(obj));
let box_ = GetProxyPrivate(*obj.ptr).to_private() as *const %s;
return box_;""" % self.descriptor.concreteType)
class CGDOMJSProxyHandler_getOwnPropertyDescriptor(CGAbstractExternMethod):
def __init__(self, descriptor):
args = [Argument('*mut JSContext', 'cx'), Argument('HandleObject', 'proxy'),
Argument('HandleId', 'id'),
Argument('MutableHandle<JSPropertyDescriptor>', 'desc')]
CGAbstractExternMethod.__init__(self, descriptor, "getOwnPropertyDescriptor",
"bool", args)
self.descriptor = descriptor
def getBody(self):
indexedGetter = self.descriptor.operations['IndexedGetter']
indexedSetter = self.descriptor.operations['IndexedSetter']
get = ""
if indexedGetter or indexedSetter:
get = "let index = get_array_index_from_id(cx, id);\n"
if indexedGetter:
readonly = toStringBool(self.descriptor.operations['IndexedSetter'] is None)
fillDescriptor = ("desc.get().value = result_root.ptr;\n"
"fill_property_descriptor(&mut *desc.ptr, *proxy.ptr, %s);\n"
"return true;" % readonly)
templateValues = {
'jsvalRef': 'result_root.handle_mut()',
'successCode': fillDescriptor,
'pre': 'let mut result_root = RootedValue::new(cx, UndefinedValue());'
}
get += ("if let Some(index) = index {\n" +
" let this = UnwrapProxy(proxy);\n" +
" let this = &*this;\n" +
CGIndenter(CGProxyIndexedGetter(self.descriptor, templateValues)).define() + "\n" +
"}\n")
namedGetter = self.descriptor.operations['NamedGetter']
if namedGetter:
readonly = toStringBool(self.descriptor.operations['NamedSetter'] is None)
fillDescriptor = ("desc.get().value = result_root.ptr;\n"
"fill_property_descriptor(&mut *desc.ptr, *proxy.ptr, %s);\n"
"return true;" % readonly)
templateValues = {
'jsvalRef': 'result_root.handle_mut()',
'successCode': fillDescriptor,
'pre': 'let mut result_root = RootedValue::new(cx, UndefinedValue());'
}
# Once we start supporting OverrideBuiltins we need to make
# ResolveOwnProperty or EnumerateOwnProperties filter out named
# properties that shadow prototype properties.
namedGet = ("\n" +
"if RUST_JSID_IS_STRING(id) && !has_property_on_prototype(cx, proxy, id) {\n" +
CGIndenter(CGProxyNamedGetter(self.descriptor, templateValues)).define() + "\n" +
"}\n")
else:
namedGet = ""
return get + """\
let expando = RootedObject::new(cx, get_expando_object(proxy));
//if (!xpc::WrapperFactory::IsXrayWrapper(proxy) && (expando = GetExpandoObject(proxy))) {
if !expando.ptr.is_null() {
if !JS_GetPropertyDescriptorById(cx, expando.handle(), id, desc) {
return false;
}
if !desc.get().obj.is_null() {
// Pretend the property lives on the wrapper.
desc.get().obj = *proxy.ptr;
return true;
}
}
""" + namedGet + """\
desc.get().obj = ptr::null_mut();
return true;"""
def definition_body(self):
return CGGeneric(self.getBody())
# TODO(Issue 5876)
class CGDOMJSProxyHandler_defineProperty(CGAbstractExternMethod):
def __init__(self, descriptor):
args = [Argument('*mut JSContext', 'cx'), Argument('HandleObject', 'proxy'),
Argument('HandleId', 'id'),
Argument('Handle<JSPropertyDescriptor>', 'desc'),
Argument('*mut ObjectOpResult', 'opresult')]
CGAbstractExternMethod.__init__(self, descriptor, "defineProperty", "bool", args)
self.descriptor = descriptor
def getBody(self):
set = ""
indexedSetter = self.descriptor.operations['IndexedSetter']
if indexedSetter:
set += ("let index = get_array_index_from_id(cx, id);\n" +
"if let Some(index) = index {\n" +
" let this = UnwrapProxy(proxy);\n" +
" let this = &*this;\n" +
CGIndenter(CGProxyIndexedSetter(self.descriptor)).define() +
" return true;\n" +
"}\n")
elif self.descriptor.operations['IndexedGetter']:
set += ("if get_array_index_from_id(cx, id).is_some() {\n" +
" return false;\n" +
" //return ThrowErrorMessage(cx, MSG_NO_PROPERTY_SETTER, \"%s\");\n" +
"}\n") % self.descriptor.name
namedSetter = self.descriptor.operations['NamedSetter']
if namedSetter:
if self.descriptor.hasUnforgeableMembers:
raise TypeError("Can't handle a named setter on an interface that has "
"unforgeables. Figure out how that should work!")
set += ("if RUST_JSID_IS_STRING(id) {\n" +
CGIndenter(CGProxyNamedSetter(self.descriptor)).define() +
" (*opresult).code_ = 0; /* SpecialCodes::OkCode */\n" +
" return true;\n" +
"} else {\n" +
" return false;\n" +
"}\n")
else:
set += ("if RUST_JSID_IS_STRING(id) {\n" +
CGIndenter(CGProxyNamedGetter(self.descriptor)).define() +
" if (found) {\n"
# TODO(Issue 5876)
" //return js::IsInNonStrictPropertySet(cx)\n" +
" // ? opresult.succeed()\n" +
" // : ThrowErrorMessage(cx, MSG_NO_NAMED_SETTER, \"${name}\");\n" +
" (*opresult).code_ = 0; /* SpecialCodes::OkCode */\n" +
" return true;\n" +
" }\n" +
" (*opresult).code_ = 0; /* SpecialCodes::OkCode */\n" +
" return true;\n"
"}\n") % (self.descriptor.name, self.descriptor.name)
set += "return proxyhandler::define_property(%s);" % ", ".join(a.name for a in self.args)
return set
def definition_body(self):
return CGGeneric(self.getBody())
class CGDOMJSProxyHandler_delete(CGAbstractExternMethod):
def __init__(self, descriptor):
args = [Argument('*mut JSContext', 'cx'), Argument('HandleObject', 'proxy'),
Argument('HandleId', 'id'),
Argument('*mut ObjectOpResult', 'res')]
CGAbstractExternMethod.__init__(self, descriptor, "delete", "bool", args)
self.descriptor = descriptor
def getBody(self):
set = ""
if self.descriptor.operations['NamedDeleter']:
if self.descriptor.hasUnforgeableMembers:
raise TypeError("Can't handle a deleter on an interface that has "
"unforgeables. Figure out how that should work!")
set += CGProxyNamedDeleter(self.descriptor).define()
set += "return proxyhandler::delete(%s);" % ", ".join(a.name for a in self.args)
return set
def definition_body(self):
return CGGeneric(self.getBody())
class CGDOMJSProxyHandler_ownPropertyKeys(CGAbstractExternMethod):
def __init__(self, descriptor):
args = [Argument('*mut JSContext', 'cx'),
Argument('HandleObject', 'proxy'),
Argument('*mut AutoIdVector', 'props')]
CGAbstractExternMethod.__init__(self, descriptor, "own_property_keys", "bool", args)
self.descriptor = descriptor
def getBody(self):
body = dedent(
"""
let unwrapped_proxy = UnwrapProxy(proxy);
""")
if self.descriptor.operations['IndexedGetter']:
body += dedent(
"""
for i in 0..(*unwrapped_proxy).Length() {
let rooted_jsid = RootedId::new(cx, int_to_jsid(i as i32));
AppendToAutoIdVector(props, rooted_jsid.handle().get());
}
""")
if self.descriptor.operations['NamedGetter']:
body += dedent(
"""
for name in (*unwrapped_proxy).SupportedPropertyNames() {
let cstring = CString::new(name).unwrap();
let jsstring = JS_InternString(cx, cstring.as_ptr());
let rooted = RootedString::new(cx, jsstring);
let jsid = INTERNED_STRING_TO_JSID(cx, rooted.handle().get());
let rooted_jsid = RootedId::new(cx, jsid);
AppendToAutoIdVector(props, rooted_jsid.handle().get());
}
""")
body += dedent(
"""
let expando = get_expando_object(proxy);
if !expando.is_null() {
let rooted_expando = RootedObject::new(cx, expando);
GetPropertyKeys(cx, rooted_expando.handle(), JSITER_OWNONLY | JSITER_HIDDEN | JSITER_SYMBOLS, props);
}
return true;
""")
return body
def definition_body(self):
return CGGeneric(self.getBody())
class CGDOMJSProxyHandler_hasOwn(CGAbstractExternMethod):
def __init__(self, descriptor):
args = [Argument('*mut JSContext', 'cx'), Argument('HandleObject', 'proxy'),
Argument('HandleId', 'id'), Argument('*mut bool', 'bp')]
CGAbstractExternMethod.__init__(self, descriptor, "hasOwn", "bool", args)
self.descriptor = descriptor
def getBody(self):
indexedGetter = self.descriptor.operations['IndexedGetter']
if indexedGetter:
indexed = ("let index = get_array_index_from_id(cx, id);\n" +
"if let Some(index) = index {\n" +
" let this = UnwrapProxy(proxy);\n" +
" let this = &*this;\n" +
CGIndenter(CGProxyIndexedGetter(self.descriptor)).define() + "\n" +
" *bp = found;\n" +
" return true;\n" +
"}\n\n")
else:
indexed = ""
namedGetter = self.descriptor.operations['NamedGetter']
if namedGetter:
named = ("if RUST_JSID_IS_STRING(id) && !has_property_on_prototype(cx, proxy, id) {\n" +
CGIndenter(CGProxyNamedGetter(self.descriptor)).define() + "\n" +
" *bp = found;\n"
" return true;\n"
"}\n" +
"\n")
else:
named = ""
return indexed + """\
let expando = RootedObject::new(cx, get_expando_object(proxy));
if !expando.ptr.is_null() {
let mut b = true;
let ok = JS_HasPropertyById(cx, expando.handle(), id, &mut b);
*bp = b;
if !ok || *bp {
return ok;
}
}
""" + named + """\
*bp = false;
return true;"""
def definition_body(self):
return CGGeneric(self.getBody())
class CGDOMJSProxyHandler_get(CGAbstractExternMethod):
def __init__(self, descriptor):
args = [Argument('*mut JSContext', 'cx'), Argument('HandleObject', 'proxy'),
Argument('HandleObject', 'receiver'), Argument('HandleId', 'id'),
Argument('MutableHandleValue', 'vp')]
CGAbstractExternMethod.__init__(self, descriptor, "get", "bool", args)
self.descriptor = descriptor
def getBody(self):
getFromExpando = """\
let expando = RootedObject::new(cx, get_expando_object(proxy));
if !expando.ptr.is_null() {
let mut hasProp = false;
if !JS_HasPropertyById(cx, expando.handle(), id, &mut hasProp) {
return false;
}
if hasProp {
return JS_ForwardGetPropertyTo(cx, expando.handle(), id, receiver, vp);
}
}"""
templateValues = {
'jsvalRef': 'vp',
'successCode': 'return true;',
}
indexedGetter = self.descriptor.operations['IndexedGetter']
if indexedGetter:
getIndexedOrExpando = ("let index = get_array_index_from_id(cx, id);\n" +
"if let Some(index) = index {\n" +
" let this = UnwrapProxy(proxy);\n" +
" let this = &*this;\n" +
CGIndenter(CGProxyIndexedGetter(self.descriptor, templateValues)).define())
getIndexedOrExpando += """\
// Even if we don't have this index, we don't forward the
// get on to our expando object.
} else {
%s
}
""" % (stripTrailingWhitespace(getFromExpando.replace('\n', '\n ')))
else:
getIndexedOrExpando = getFromExpando + "\n"
namedGetter = self.descriptor.operations['NamedGetter']
if namedGetter:
getNamed = ("if RUST_JSID_IS_STRING(id) {\n" +
CGIndenter(CGProxyNamedGetter(self.descriptor, templateValues)).define() +
"}\n")
else:
getNamed = ""
return """\
//MOZ_ASSERT(!xpc::WrapperFactory::IsXrayWrapper(proxy),
//"Should not have a XrayWrapper here");
%s
let mut found = false;
if !get_property_on_prototype(cx, proxy, id, &mut found, vp) {
return false;
}
if found {
return true;
}
%s
*vp.ptr = UndefinedValue();
return true;""" % (getIndexedOrExpando, getNamed)
def definition_body(self):
return CGGeneric(self.getBody())
class CGDOMJSProxyHandler_className(CGAbstractExternMethod):
def __init__(self, descriptor):
args = [Argument('*mut JSContext', 'cx'), Argument('HandleObject', '_proxy')]
CGAbstractExternMethod.__init__(self, descriptor, "className", "*const i8", args)
self.descriptor = descriptor
def getBody(self):
return '%s as *const u8 as *const i8' % str_to_const_array(self.descriptor.name)
def definition_body(self):
return CGGeneric(self.getBody())
class CGAbstractClassHook(CGAbstractExternMethod):
"""
Meant for implementing JSClass hooks, like Finalize or Trace. Does very raw
'this' unwrapping as it assumes that the unwrapped type is always known.
"""
def __init__(self, descriptor, name, returnType, args):
CGAbstractExternMethod.__init__(self, descriptor, name, returnType,
args)
def definition_body_prologue(self):
return CGGeneric("""
let this = native_from_object::<%s>(obj).unwrap();
""" % self.descriptor.concreteType)
def definition_body(self):
return CGList([
self.definition_body_prologue(),
self.generate_code(),
])
def generate_code(self):
raise NotImplementedError # Override me!
def finalizeHook(descriptor, hookName, context):
release = ""
if descriptor.isGlobal():
release += """\
finalize_global(obj);
"""
elif descriptor.weakReferenceable:
release += """\
let weak_box_ptr = JS_GetReservedSlot(obj, DOM_WEAK_SLOT).to_private() as *mut WeakBox<%s>;
if !weak_box_ptr.is_null() {
let count = {
let weak_box = &*weak_box_ptr;
assert!(weak_box.value.get().is_some());
assert!(weak_box.count.get() > 0);
weak_box.value.set(None);
let count = weak_box.count.get() - 1;
weak_box.count.set(count);
count
};
if count == 0 {
mem::drop(Box::from_raw(weak_box_ptr));
}
}
""" % descriptor.concreteType
release += """\
if !this.is_null() {
// The pointer can be null if the object is the unforgeable holder of that interface.
let _ = Box::from_raw(this as *mut %s);
}
debug!("%s finalize: {:p}", this);\
""" % (descriptor.concreteType, descriptor.concreteType)
return release
class CGClassTraceHook(CGAbstractClassHook):
"""
A hook to trace through our native object; used for GC and CC
"""
def __init__(self, descriptor):
args = [Argument('*mut JSTracer', 'trc'), Argument('*mut JSObject', 'obj')]
CGAbstractClassHook.__init__(self, descriptor, TRACE_HOOK_NAME, 'void',
args)
self.traceGlobal = descriptor.isGlobal()
def generate_code(self):
body = [CGGeneric("if this.is_null() { return; } // GC during obj creation\n"
"(*this).trace(%s);" % self.args[0].name)]
if self.traceGlobal:
body += [CGGeneric("trace_global(trc, obj);")]
return CGList(body, "\n")
class CGClassConstructHook(CGAbstractExternMethod):
"""
JS-visible constructor for our objects
"""
def __init__(self, descriptor, constructor=None):
args = [Argument('*mut JSContext', 'cx'), Argument('u32', 'argc'), Argument('*mut JSVal', 'vp')]
name = CONSTRUCT_HOOK_NAME
if constructor:
name += "_" + constructor.identifier.name
else:
constructor = descriptor.interface.ctor()
assert constructor
CGAbstractExternMethod.__init__(self, descriptor, name, 'bool', args)
self.constructor = constructor
def definition_body(self):
preamble = CGGeneric("""\
let global = global_root_from_object(JS_CALLEE(cx, vp).to_object());
let args = CallArgs::from_vp(vp, argc);
""")
name = self.constructor.identifier.name
nativeName = MakeNativeName(self.descriptor.binaryNameFor(name))
callGenerator = CGMethodCall(["global.r()"], nativeName, True,
self.descriptor, self.constructor)
return CGList([preamble, callGenerator])
class CGClassHasInstanceHook(CGAbstractExternMethod):
def __init__(self, descriptor):
args = [Argument('*mut JSContext', 'cx'),
Argument('HandleObject', 'obj'),
Argument('MutableHandleValue', 'value'),
Argument('*mut bool', 'rval')]
assert descriptor.interface.hasInterfaceObject() and not descriptor.interface.isCallback()
CGAbstractExternMethod.__init__(self, descriptor, HASINSTANCE_HOOK_NAME,
'bool', args)
def definition_body(self):
id = "PrototypeList::ID::%s" % self.descriptor.interface.identifier.name
return CGGeneric("""\
match has_instance(cx, obj, value.handle(), %(id)s, %(index)s) {
Ok(result) => {
*rval = result;
true
}
Err(()) => false,
}
""" % {"id": id, "index": self.descriptor.prototypeDepth})
class CGClassFunToStringHook(CGAbstractExternMethod):
"""
A hook to convert functions to strings.
"""
def __init__(self, descriptor):
args = [Argument('*mut JSContext', 'cx'), Argument('HandleObject', '_obj'),
Argument('u32', '_indent')]
CGAbstractExternMethod.__init__(self, descriptor, "fun_to_string", '*mut JSString', args)
def definition_body(self):
name = self.descriptor.interface.identifier.name
string = str_to_const_array("function %s() {\\n [native code]\\n}" % name)
return CGGeneric("JS_NewStringCopyZ(cx, %s as *const _ as *const libc::c_char)" % string)
class CGClassFinalizeHook(CGAbstractClassHook):
"""
A hook for finalize, used to release our native object.
"""
def __init__(self, descriptor):
args = [Argument('*mut FreeOp', '_fop'), Argument('*mut JSObject', 'obj')]
CGAbstractClassHook.__init__(self, descriptor, FINALIZE_HOOK_NAME,
'void', args)
def generate_code(self):
return CGGeneric(finalizeHook(self.descriptor, self.name, self.args[0].name))
class CGDOMJSProxyHandlerDOMClass(CGThing):
def __init__(self, descriptor):
CGThing.__init__(self)
self.descriptor = descriptor
def define(self):
return "static Class: DOMClass = " + DOMClass(self.descriptor) + ";\n"
class CGInterfaceTrait(CGThing):
def __init__(self, descriptor):
CGThing.__init__(self)
def attribute_arguments(needCx, argument=None):
if needCx:
yield "cx", "*mut JSContext"
if argument:
yield "value", argument_type(descriptor, argument)
def members():
for m in descriptor.interface.members:
if (m.isMethod() and not m.isStatic() and
(not m.isIdentifierLess() or m.isStringifier())):
name = CGSpecializedMethod.makeNativeName(descriptor, m)
infallible = 'infallible' in descriptor.getExtendedAttributes(m)
for idx, (rettype, arguments) in enumerate(m.signatures()):
arguments = method_arguments(descriptor, rettype, arguments)
rettype = return_type(descriptor, rettype, infallible)
yield name + ('_' * idx), arguments, rettype
elif m.isAttr() and not m.isStatic():
name = CGSpecializedGetter.makeNativeName(descriptor, m)
infallible = 'infallible' in descriptor.getExtendedAttributes(m, getter=True)
yield (name,
attribute_arguments(typeNeedsCx(m.type, True)),
return_type(descriptor, m.type, infallible))
if not m.readonly:
name = CGSpecializedSetter.makeNativeName(descriptor, m)
infallible = 'infallible' in descriptor.getExtendedAttributes(m, setter=True)
if infallible:
rettype = "()"
else:
rettype = "ErrorResult"
yield name, attribute_arguments(typeNeedsCx(m.type, False), m.type), rettype
if descriptor.proxy:
for name, operation in descriptor.operations.iteritems():
if not operation or operation.isStringifier():
continue
assert len(operation.signatures()) == 1
rettype, arguments = operation.signatures()[0]
infallible = 'infallible' in descriptor.getExtendedAttributes(operation)
if operation.isGetter():
arguments = method_arguments(descriptor, rettype, arguments, trailing=("found", "&mut bool"))
# If this interface 'supports named properties', then we
# should be able to access 'supported property names'
#
# WebIDL, Second Draft, section 3.2.4.5
# https://heycam.github.io/webidl/#idl-named-properties
if operation.isNamed():
yield "SupportedPropertyNames", [], "Vec<DOMString>"
else:
arguments = method_arguments(descriptor, rettype, arguments)
rettype = return_type(descriptor, rettype, infallible)
yield name, arguments, rettype
def fmt(arguments):
return "".join(", %s: %s" % argument for argument in arguments)
methods = [
CGGeneric("fn %s(&self%s) -> %s;\n" % (name, fmt(arguments), rettype))
for name, arguments, rettype in members()
]
if methods:
self.cgRoot = CGWrapper(CGIndenter(CGList(methods, "")),
pre="pub trait %sMethods {\n" % descriptor.interface.identifier.name,
post="}")
else:
self.cgRoot = CGGeneric("")
def define(self):
return self.cgRoot.define()
class CGWeakReferenceableTrait(CGThing):
def __init__(self, descriptor):
CGThing.__init__(self)
assert descriptor.weakReferenceable
self.code = "impl WeakReferenceable for %s {}" % descriptor.interface.identifier.name
def define(self):
return self.code
class CGDescriptor(CGThing):
def __init__(self, descriptor):
CGThing.__init__(self)
assert not descriptor.concrete or not descriptor.interface.isCallback()
cgThings = []
if not descriptor.interface.isCallback():
cgThings.append(CGGetProtoObjectMethod(descriptor))
if descriptor.interface.hasInterfaceObject() and descriptor.hasDescendants():
cgThings.append(CGGetConstructorObjectMethod(descriptor))
for m in descriptor.interface.members:
if (m.isMethod() and
(not m.isIdentifierLess() or m == descriptor.operations["Stringifier"])):
if m.isStatic():
assert descriptor.interface.hasInterfaceObject()
cgThings.append(CGStaticMethod(descriptor, m))
elif not descriptor.interface.isCallback():
cgThings.append(CGSpecializedMethod(descriptor, m))
cgThings.append(CGMemberJITInfo(descriptor, m))
elif m.isAttr():
if m.stringifier:
raise TypeError("Stringifier attributes not supported yet. "
"See https://github.com/servo/servo/issues/7590\n"
"%s" % m.location)
if m.isStatic():
assert descriptor.interface.hasInterfaceObject()
cgThings.append(CGStaticGetter(descriptor, m))
elif not descriptor.interface.isCallback():
cgThings.append(CGSpecializedGetter(descriptor, m))
if not m.readonly:
if m.isStatic():
assert descriptor.interface.hasInterfaceObject()
cgThings.append(CGStaticSetter(descriptor, m))
elif not descriptor.interface.isCallback():
cgThings.append(CGSpecializedSetter(descriptor, m))
elif m.getExtendedAttribute("PutForwards"):
cgThings.append(CGSpecializedForwardingSetter(descriptor, m))
if (not m.isStatic() and not descriptor.interface.isCallback()):
cgThings.append(CGMemberJITInfo(descriptor, m))
if descriptor.concrete:
cgThings.append(CGClassFinalizeHook(descriptor))
cgThings.append(CGClassTraceHook(descriptor))
if descriptor.interface.hasInterfaceObject():
if descriptor.interface.ctor():
cgThings.append(CGClassConstructHook(descriptor))
for ctor in descriptor.interface.namedConstructors:
cgThings.append(CGClassConstructHook(descriptor, ctor))
if not descriptor.interface.isCallback():
cgThings.append(CGInterfaceObjectJSClass(descriptor))
cgThings.append(CGClassHasInstanceHook(descriptor))
cgThings.append(CGClassFunToStringHook(descriptor))
if not descriptor.interface.isCallback():
cgThings.append(CGPrototypeJSClass(descriptor))
properties = PropertyArrays(descriptor)
cgThings.append(CGGeneric(str(properties)))
cgThings.append(CGCreateInterfaceObjectsMethod(descriptor, properties))
cgThings.append(CGNamespace.build([descriptor.name + "Constants"],
CGConstant(m for m in descriptor.interface.members if m.isConst()),
public=True))
if descriptor.interface.hasInterfaceObject():
cgThings.append(CGDefineDOMInterfaceMethod(descriptor))
if descriptor.proxy:
cgThings.append(CGDefineProxyHandler(descriptor))
if descriptor.concrete:
if descriptor.proxy:
# cgThings.append(CGProxyIsProxy(descriptor))
cgThings.append(CGProxyUnwrap(descriptor))
cgThings.append(CGDOMJSProxyHandlerDOMClass(descriptor))
cgThings.append(CGDOMJSProxyHandler_ownPropertyKeys(descriptor))
cgThings.append(CGDOMJSProxyHandler_getOwnPropertyDescriptor(descriptor))
cgThings.append(CGDOMJSProxyHandler_className(descriptor))
cgThings.append(CGDOMJSProxyHandler_get(descriptor))
cgThings.append(CGDOMJSProxyHandler_hasOwn(descriptor))
if descriptor.operations['IndexedSetter'] or descriptor.operations['NamedSetter']:
cgThings.append(CGDOMJSProxyHandler_defineProperty(descriptor))
# We want to prevent indexed deleters from compiling at all.
assert not descriptor.operations['IndexedDeleter']
if descriptor.operations['NamedDeleter']:
cgThings.append(CGDOMJSProxyHandler_delete(descriptor))
# cgThings.append(CGDOMJSProxyHandler(descriptor))
# cgThings.append(CGIsMethod(descriptor))
pass
else:
cgThings.append(CGDOMJSClass(descriptor))
pass
cgThings.append(CGWrapMethod(descriptor))
if not descriptor.interface.isCallback():
if descriptor.concrete or descriptor.hasDescendants():
cgThings.append(CGIDLInterface(descriptor))
cgThings.append(CGInterfaceTrait(descriptor))
if descriptor.weakReferenceable:
cgThings.append(CGWeakReferenceableTrait(descriptor))
cgThings = CGList(cgThings, "\n")
# self.cgRoot = CGWrapper(CGNamespace(toBindingNamespace(descriptor.name),
# cgThings),
# post='\n')
self.cgRoot = cgThings
def define(self):
return self.cgRoot.define()
class CGNonNamespacedEnum(CGThing):
def __init__(self, enumName, names, first, comment="", deriving="", repr=""):
# Account for first value
entries = ["%s = %s" % (names[0], first)] + names[1:]
# Append a Last.
entries.append('Last = ' + str(first + len(entries)))
# Indent.
entries = [' ' + e for e in entries]
# Build the enum body.
enumstr = comment + 'pub enum %s {\n%s\n}\n' % (enumName, ',\n'.join(entries))
if repr:
enumstr = ('#[repr(%s)]\n' % repr) + enumstr
if deriving:
enumstr = ('#[derive(%s)]\n' % deriving) + enumstr
curr = CGGeneric(enumstr)
# Add some whitespace padding.
curr = CGWrapper(curr, pre='\n', post='\n')
# Add the typedef
# typedef = '\ntypedef %s::%s %s;\n\n' % (namespace, enumName, enumName)
# curr = CGList([curr, CGGeneric(typedef)])
# Save the result.
self.node = curr
def define(self):
return self.node.define()
class CGDictionary(CGThing):
def __init__(self, dictionary, descriptorProvider):
self.dictionary = dictionary
if all(CGDictionary(d, descriptorProvider).generatable for
d in CGDictionary.getDictionaryDependencies(dictionary)):
self.generatable = True
else:
self.generatable = False
# Nothing else to do here
return
self.memberInfo = [
(member,
getJSToNativeConversionInfo(member.type,
descriptorProvider,
isMember="Dictionary",
defaultValue=member.defaultValue,
exceptionCode="return Err(());"))
for member in dictionary.members]
def define(self):
if not self.generatable:
return ""
return self.struct() + "\n" + self.impl()
def struct(self):
d = self.dictionary
if d.parent:
inheritance = " pub parent: %s::%s,\n" % (self.makeModuleName(d.parent),
self.makeClassName(d.parent))
else:
inheritance = ""
memberDecls = [" pub %s: %s," %
(self.makeMemberName(m[0].identifier.name), self.getMemberType(m))
for m in self.memberInfo]
return (string.Template(
"pub struct ${selfName} {\n" +
"${inheritance}" +
"\n".join(memberDecls) + "\n" +
"}").substitute({"selfName": self.makeClassName(d),
"inheritance": inheritance}))
def impl(self):
d = self.dictionary
if d.parent:
initParent = "parent: try!(%s::%s::new(cx, val)),\n" % (
self.makeModuleName(d.parent),
self.makeClassName(d.parent))
else:
initParent = ""
def memberInit(memberInfo):
member, _ = memberInfo
name = self.makeMemberName(member.identifier.name)
conversion = self.getMemberConversion(memberInfo, member.type)
return CGGeneric("%s: %s,\n" % (name, conversion.define()))
def memberInsert(memberInfo):
member, _ = memberInfo
name = self.makeMemberName(member.identifier.name)
insertion = ("let mut %s = RootedValue::new(cx, UndefinedValue());\n"
"self.%s.to_jsval(cx, %s.handle_mut());\n"
"set_dictionary_property(cx, obj.handle(), \"%s\", %s.handle()).unwrap();"
% (name, name, name, name, name))
return CGGeneric("%s\n" % insertion)
memberInits = CGList([memberInit(m) for m in self.memberInfo])
memberInserts = CGList([memberInsert(m) for m in self.memberInfo])
return string.Template(
"impl ${selfName} {\n"
" pub unsafe fn empty(cx: *mut JSContext) -> ${selfName} {\n"
" ${selfName}::new(cx, HandleValue::null()).unwrap()\n"
" }\n"
" pub unsafe fn new(cx: *mut JSContext, val: HandleValue) -> Result<${selfName}, ()> {\n"
" let object = if val.get().is_null_or_undefined() {\n"
" RootedObject::new(cx, ptr::null_mut())\n"
" } else if val.get().is_object() {\n"
" RootedObject::new(cx, val.get().to_object())\n"
" } else {\n"
" throw_type_error(cx, \"Value not an object.\");\n"
" return Err(());\n"
" };\n"
" Ok(${selfName} {\n"
"${initParent}"
"${initMembers}"
" })\n"
" }\n"
"}\n"
"\n"
"impl ToJSValConvertible for ${selfName} {\n"
" unsafe fn to_jsval(&self, cx: *mut JSContext, rval: MutableHandleValue) {\n"
" let obj = RootedObject::new(cx, JS_NewObject(cx, ptr::null()));\n"
"${insertMembers}"
" rval.set(ObjectOrNullValue(obj.ptr))\n"
" }\n"
"}\n").substitute({
"selfName": self.makeClassName(d),
"initParent": CGIndenter(CGGeneric(initParent), indentLevel=12).define(),
"initMembers": CGIndenter(memberInits, indentLevel=12).define(),
"insertMembers": CGIndenter(memberInserts, indentLevel=8).define(),
})
@staticmethod
def makeDictionaryName(dictionary):
return dictionary.identifier.name
def makeClassName(self, dictionary):
return self.makeDictionaryName(dictionary)
@staticmethod
def makeModuleName(dictionary):
return getModuleFromObject(dictionary)
def getMemberType(self, memberInfo):
member, info = memberInfo
declType = info.declType
if member.optional and not member.defaultValue:
declType = CGWrapper(info.declType, pre="Option<", post=">")
return declType.define()
def getMemberConversion(self, memberInfo, memberType):
def indent(s):
return CGIndenter(CGGeneric(s), 8).define()
member, info = memberInfo
templateBody = info.template
default = info.default
replacements = {"val": "rval.handle()"}
conversion = string.Template(templateBody).substitute(replacements)
if memberType.isAny():
conversion = "%s.get()" % conversion
assert (member.defaultValue is None) == (default is None)
if not member.optional:
assert default is None
default = ("throw_type_error(cx, \"Missing required member \\\"%s\\\".\");\n"
"return Err(());") % member.identifier.name
elif not default:
default = "None"
conversion = "Some(%s)" % conversion
conversion = (
"{\n"
"let mut rval = RootedValue::new(cx, UndefinedValue());\n"
"match try!(get_dictionary_property(cx, object.handle(), \"%s\", rval.handle_mut())) {\n"
" true => {\n"
"%s\n"
" },\n"
" false => {\n"
"%s\n"
" },\n"
"}\n}") % (member.identifier.name, indent(conversion), indent(default))
return CGGeneric(conversion)
@staticmethod
def makeMemberName(name):
# Can't use Rust keywords as member names.
if name == "type":
return name + "_"
return name
@staticmethod
def getDictionaryDependencies(dictionary):
deps = set()
if dictionary.parent:
deps.add(dictionary.parent)
for member in dictionary.members:
if member.type.isDictionary():
deps.add(member.type.unroll().inner)
return deps
class CGRegisterProtos(CGAbstractMethod):
def __init__(self, config):
arguments = [
Argument('*mut JSContext', 'cx'),
Argument('HandleObject', 'global'),
]
CGAbstractMethod.__init__(self, None, 'Register', 'void', arguments,
unsafe=False, pub=True)
self.config = config
def definition_body(self):
return CGList([
CGGeneric("codegen::Bindings::%sBinding::DefineDOMInterface(cx, global);" % desc.name)
for desc in self.config.getDescriptors(hasInterfaceObject=True, register=True)
], "\n")
class CGRegisterProxyHandlersMethod(CGAbstractMethod):
def __init__(self, descriptors):
docs = "Create the global vtables used by the generated DOM bindings to implement JS proxies."
CGAbstractMethod.__init__(self, None, 'RegisterProxyHandlers', 'void', [],
unsafe=True, pub=True, docs=docs)
self.descriptors = descriptors
def definition_body(self):
return CGList([
CGGeneric("proxy_handlers[Proxies::%s as usize] = codegen::Bindings::%sBinding::DefineProxyHandler();"
% (desc.name, desc.name))
for desc in self.descriptors
], "\n")
class CGRegisterProxyHandlers(CGThing):
def __init__(self, config):
descriptors = config.getDescriptors(proxy=True)
length = len(descriptors)
self.root = CGList([
CGGeneric("pub static mut proxy_handlers: [*const libc::c_void; %d] = [0 as *const libc::c_void; %d];"
% (length, length)),
CGRegisterProxyHandlersMethod(descriptors),
], "\n")
def define(self):
return self.root.define()
class CGBindingRoot(CGThing):
"""
Root codegen class for binding generation. Instantiate the class, and call
declare or define to generate header or cpp code (respectively).
"""
def __init__(self, config, prefix, webIDLFile):
descriptors = config.getDescriptors(webIDLFile=webIDLFile,
hasInterfaceObject=True)
# We also want descriptors that have an interface prototype object
# (isCallback=False), but we don't want to include a second copy
# of descriptors that we also matched in the previous line
# (hence hasInterfaceObject=False).
descriptors.extend(config.getDescriptors(webIDLFile=webIDLFile,
hasInterfaceObject=False,
isCallback=False))
dictionaries = config.getDictionaries(webIDLFile=webIDLFile)
mainCallbacks = config.getCallbacks(webIDLFile=webIDLFile)
callbackDescriptors = config.getDescriptors(webIDLFile=webIDLFile,
isCallback=True)
enums = config.getEnums(webIDLFile)
if not (descriptors or dictionaries or mainCallbacks or callbackDescriptors or enums):
self.root = None
return
# Do codegen for all the enums.
cgthings = [CGEnum(e) for e in enums]
# Do codegen for all the dictionaries.
cgthings.extend([CGDictionary(d, config.getDescriptorProvider())
for d in dictionaries])
# Do codegen for all the callbacks.
cgthings.extend(CGList([CGCallbackFunction(c, config.getDescriptorProvider()),
CGCallbackFunctionImpl(c)], "\n")
for c in mainCallbacks)
# Do codegen for all the descriptors
cgthings.extend([CGDescriptor(x) for x in descriptors])
# Do codegen for all the callback interfaces.
cgthings.extend(CGList([CGCallbackInterface(x),
CGCallbackFunctionImpl(x.interface)], "\n")
for x in callbackDescriptors)
# And make sure we have the right number of newlines at the end
curr = CGWrapper(CGList(cgthings, "\n\n"), post="\n\n")
# Add imports
curr = CGImports(curr, descriptors + callbackDescriptors, mainCallbacks, [
'js',
'js::{JSCLASS_GLOBAL_SLOT_COUNT, JSCLASS_IMPLEMENTS_BARRIERS}',
'js::{JSCLASS_IS_DOMJSCLASS, JSCLASS_IS_GLOBAL, JSCLASS_RESERVED_SLOTS_MASK}',
'js::{JSCLASS_RESERVED_SLOTS_SHIFT, JSITER_HIDDEN, JSITER_OWNONLY}',
'js::{JSITER_SYMBOLS, JSPROP_ENUMERATE, JSPROP_PERMANENT, JSPROP_READONLY}',
'js::{JSPROP_SHARED, JS_CALLEE}',
'js::error::throw_type_error',
'js::jsapi::{AliasSet, ArgType, AutoIdVector, CallArgs, FreeOp}',
'js::jsapi::{GetGlobalForObjectCrossCompartment , GetPropertyKeys, Handle}',
'js::jsapi::{HandleId, HandleObject, HandleValue, HandleValueArray}',
'js::jsapi::{INTERNED_STRING_TO_JSID, IsCallable, JS_CallFunctionValue}',
'js::jsapi::{JS_ComputeThis, JS_CopyPropertiesFrom, JS_ForwardGetPropertyTo}',
'js::jsapi::{JS_GetClass, JS_GetFunctionPrototype, JS_GetGlobalForObject}',
'js::jsapi::{JS_GetObjectPrototype, JS_GetProperty, JS_GetPropertyById}',
'js::jsapi::{JS_GetPropertyDescriptorById, JS_GetReservedSlot, JS_HasProperty}',
'js::jsapi::{JS_HasPropertyById, JS_InitializePropertiesFromCompatibleNativeObject}',
'js::jsapi::{JS_InternString, JS_IsExceptionPending, JS_NewObject, JS_NewObjectWithGivenProto}',
'js::jsapi::{JS_NewObjectWithoutMetadata, JS_NewStringCopyZ, JS_SetProperty}',
'js::jsapi::{JS_SetPrototype, JS_SetReservedSlot, JS_WrapValue, JSAutoCompartment}',
'js::jsapi::{JSAutoRequest, JSContext, JSClass, JSFreeOp, JSFunctionSpec}',
'js::jsapi::{JSJitGetterCallArgs, JSJitInfo, JSJitMethodCallArgs, JSJitSetterCallArgs}',
'js::jsapi::{JSNative, JSObject, JSNativeWrapper, JSPropertyDescriptor, JSPropertySpec}',
'js::jsapi::{JSString, JSTracer, JSType, JSTypedMethodJitInfo, JSValueType}',
'js::jsapi::{ObjectOpResult, OpType, MutableHandle, MutableHandleObject}',
'js::jsapi::{MutableHandleValue, RootedId, RootedObject, RootedString}',
'js::jsapi::{RootedValue, SymbolCode, jsid}',
'js::jsval::JSVal',
'js::jsval::{ObjectValue, ObjectOrNullValue, PrivateValue}',
'js::jsval::{NullValue, UndefinedValue}',
'js::glue::{CallJitMethodOp, CallJitGetterOp, CallJitSetterOp, CreateProxyHandler}',
'js::glue::{GetProxyPrivate, NewProxyObject, ProxyTraps}',
'js::glue::{RUST_FUNCTION_VALUE_TO_JITINFO}',
'js::glue::{RUST_JS_NumberValue, RUST_JSID_IS_STRING, int_to_jsid}',
'js::glue::AppendToAutoIdVector',
'js::rust::{GCMethods, define_methods, define_properties}',
'dom::bindings',
'dom::bindings::global::{GlobalRef, global_root_from_object, global_root_from_reflector}',
'dom::bindings::interface::{NonCallbackInterfaceObjectClass, create_callback_interface_object}',
'dom::bindings::interface::{create_interface_prototype_object, create_named_constructors}',
'dom::bindings::interface::{create_noncallback_interface_object, has_instance}',
'dom::bindings::js::{JS, Root, RootedReference}',
'dom::bindings::js::{OptionalRootedReference}',
'dom::bindings::reflector::{Reflectable}',
'dom::bindings::utils::{ConstantSpec, DOMClass, DOMJSClass}',
'dom::bindings::utils::{DOM_PROTO_UNFORGEABLE_HOLDER_SLOT, JSCLASS_DOM_GLOBAL}',
'dom::bindings::utils::{NonNullJSNative, ProtoOrIfaceArray, create_dom_global}',
'dom::bindings::utils::{finalize_global, find_enum_string_index, generic_getter}',
'dom::bindings::utils::{generic_lenient_getter, generic_lenient_setter}',
'dom::bindings::utils::{generic_method, generic_setter, get_array_index_from_id}',
'dom::bindings::utils::{get_dictionary_property, get_property_on_prototype}',
'dom::bindings::utils::{get_proto_or_iface_array, has_property_on_prototype}',
'dom::bindings::utils::{is_platform_object, set_dictionary_property}',
'dom::bindings::utils::{throwing_constructor, trace_global}',
'dom::bindings::utils::ConstantVal::{IntVal, UintVal}',
'dom::bindings::trace::{JSTraceable, RootedTraceable}',
'dom::bindings::callback::{CallbackContainer,CallbackInterface,CallbackFunction}',
'dom::bindings::callback::{CallSetup,ExceptionHandling}',
'dom::bindings::callback::wrap_call_this_object',
'dom::bindings::conversions::{ConversionBehavior, DOM_OBJECT_SLOT, IDLInterface}',
'dom::bindings::conversions::{FromJSValConvertible, StringificationBehavior}',
'dom::bindings::conversions::{ToJSValConvertible, jsid_to_str, native_from_handlevalue}',
'dom::bindings::conversions::{native_from_object, private_from_object, root_from_object}',
'dom::bindings::conversions::{root_from_handleobject, root_from_handlevalue}',
'dom::bindings::codegen::{PrototypeList, RegisterBindings, UnionTypes}',
'dom::bindings::codegen::Bindings::*',
'dom::bindings::error::{Fallible, Error, ErrorResult}',
'dom::bindings::error::Error::JSFailed',
'dom::bindings::error::throw_dom_exception',
'dom::bindings::proxyhandler',
'dom::bindings::proxyhandler::{ensure_expando_object, fill_property_descriptor}',
'dom::bindings::proxyhandler::{get_expando_object, get_property_descriptor}',
'dom::bindings::num::Finite',
'dom::bindings::str::ByteString',
'dom::bindings::str::USVString',
'dom::bindings::trace::RootedVec',
'dom::bindings::weakref::{DOM_WEAK_SLOT, WeakBox, WeakReferenceable}',
'mem::heap_size_of_raw_self_and_children',
'libc',
'util::str::DOMString',
'std::borrow::ToOwned',
'std::cmp',
'std::mem',
'std::num',
'std::ptr',
'std::str',
'std::rc',
'std::rc::Rc',
'std::default::Default',
'std::ffi::CString',
])
# Add the auto-generated comment.
curr = CGWrapper(curr, pre=AUTOGENERATED_WARNING_COMMENT)
# Store the final result.
self.root = curr
def define(self):
if not self.root:
return None
return stripTrailingWhitespace(self.root.define())
def argument_type(descriptorProvider, ty, optional=False, defaultValue=None, variadic=False):
info = getJSToNativeConversionInfo(
ty, descriptorProvider, isArgument=True)
declType = info.declType
if variadic:
if ty.isGeckoInterface():
declType = CGWrapper(declType, pre="&[", post="]")
else:
declType = CGWrapper(declType, pre="Vec<", post=">")
elif optional and not defaultValue:
declType = CGWrapper(declType, pre="Option<", post=">")
if ty.isDictionary():
declType = CGWrapper(declType, pre="&")
return declType.define()
def method_arguments(descriptorProvider, returnType, arguments, passJSBits=True, trailing=None):
if needCx(returnType, arguments, passJSBits):
yield "cx", "*mut JSContext"
for argument in arguments:
ty = argument_type(descriptorProvider, argument.type, argument.optional,
argument.defaultValue, argument.variadic)
yield CGDictionary.makeMemberName(argument.identifier.name), ty
if trailing:
yield trailing
def return_type(descriptorProvider, rettype, infallible):
result = getRetvalDeclarationForType(rettype, descriptorProvider)
if not infallible:
result = CGWrapper(result, pre="Fallible<", post=">")
return result.define()
class CGNativeMember(ClassMethod):
def __init__(self, descriptorProvider, member, name, signature, extendedAttrs,
breakAfter=True, passJSBitsAsNeeded=True, visibility="public"):
"""
If passJSBitsAsNeeded is false, we don't automatically pass in a
JSContext* or a JSObject* based on the return and argument types.
"""
self.descriptorProvider = descriptorProvider
self.member = member
self.extendedAttrs = extendedAttrs
self.passJSBitsAsNeeded = passJSBitsAsNeeded
breakAfterSelf = "\n" if breakAfter else ""
ClassMethod.__init__(self, name,
self.getReturnType(signature[0]),
self.getArgs(signature[0], signature[1]),
static=member.isStatic(),
# Mark our getters, which are attrs that
# have a non-void return type, as const.
const=(not member.isStatic() and member.isAttr() and
not signature[0].isVoid()),
breakAfterSelf=breakAfterSelf,
visibility=visibility)
def getReturnType(self, type):
infallible = 'infallible' in self.extendedAttrs
typeDecl = return_type(self.descriptorProvider, type, infallible)
return typeDecl
def getArgs(self, returnType, argList):
return [Argument(arg[1], arg[0]) for arg in method_arguments(self.descriptorProvider,
returnType,
argList,
self.passJSBitsAsNeeded)]
class CGCallback(CGClass):
def __init__(self, idlObject, descriptorProvider, baseName, methods,
getters=[], setters=[]):
self.baseName = baseName
self._deps = idlObject.getDeps()
name = idlObject.identifier.name
# For our public methods that needThisHandling we want most of the
# same args and the same return type as what CallbackMember
# generates. So we want to take advantage of all its
# CGNativeMember infrastructure, but that infrastructure can't deal
# with templates and most especially template arguments. So just
# cheat and have CallbackMember compute all those things for us.
realMethods = []
for method in methods:
if not method.needThisHandling:
realMethods.append(method)
else:
realMethods.extend(self.getMethodImpls(method))
CGClass.__init__(self, name,
bases=[ClassBase(baseName)],
constructors=self.getConstructors(),
methods=realMethods + getters + setters,
decorators="#[derive(JSTraceable, PartialEq)]")
def getConstructors(self):
return [ClassConstructor(
[Argument("*mut JSObject", "aCallback")],
bodyInHeader=True,
visibility="pub",
explicit=False,
baseConstructors=[
"%s::new()" % self.baseName
])]
def getMethodImpls(self, method):
assert method.needThisHandling
args = list(method.args)
# Strip out the JSContext*/JSObject* args
# that got added.
assert args[0].name == "cx" and args[0].argType == "*mut JSContext"
assert args[1].name == "aThisObj" and args[1].argType == "HandleObject"
args = args[2:]
# Record the names of all the arguments, so we can use them when we call
# the private method.
argnames = [arg.name for arg in args]
argnamesWithThis = ["s.get_context()", "thisObjJS.handle()"] + argnames
argnamesWithoutThis = ["s.get_context()", "thisObjJS.handle()"] + argnames
# Now that we've recorded the argnames for our call to our private
# method, insert our optional argument for deciding whether the
# CallSetup should re-throw exceptions on aRv.
args.append(Argument("ExceptionHandling", "aExceptionHandling",
"ReportExceptions"))
# And now insert our template argument.
argsWithoutThis = list(args)
args.insert(0, Argument("&T", "thisObj"))
# And the self argument
method.args.insert(0, Argument(None, "&self"))
args.insert(0, Argument(None, "&self"))
argsWithoutThis.insert(0, Argument(None, "&self"))
setupCall = ("let s = CallSetup::new(self, aExceptionHandling);\n"
"if s.get_context().is_null() {\n"
" return Err(JSFailed);\n"
"}\n")
bodyWithThis = string.Template(
setupCall +
"let mut thisObjJS = RootedObject::new(s.get_context(), ptr::null_mut());\n"
"wrap_call_this_object(s.get_context(), thisObj, thisObjJS.handle_mut());\n"
"if thisObjJS.ptr.is_null() {\n"
" return Err(JSFailed);\n"
"}\n"
"return ${methodName}(${callArgs});").substitute({
"callArgs": ", ".join(argnamesWithThis),
"methodName": 'self.' + method.name,
})
bodyWithoutThis = string.Template(
setupCall +
"let thisObjJS = RootedObject::new(s.get_context(), ptr::null_mut());"
"return ${methodName}(${callArgs});").substitute({
"callArgs": ", ".join(argnamesWithoutThis),
"methodName": 'self.' + method.name,
})
return [ClassMethod(method.name + '_', method.returnType, args,
bodyInHeader=True,
templateArgs=["T: Reflectable"],
body=bodyWithThis,
visibility='pub'),
ClassMethod(method.name + '__', method.returnType, argsWithoutThis,
bodyInHeader=True,
body=bodyWithoutThis,
visibility='pub'),
method]
def deps(self):
return self._deps
# We're always fallible
def callbackGetterName(attr, descriptor):
return "Get" + MakeNativeName(
descriptor.binaryNameFor(attr.identifier.name))
def callbackSetterName(attr, descriptor):
return "Set" + MakeNativeName(
descriptor.binaryNameFor(attr.identifier.name))
class CGCallbackFunction(CGCallback):
def __init__(self, callback, descriptorProvider):
CGCallback.__init__(self, callback, descriptorProvider,
"CallbackFunction",
methods=[CallCallback(callback, descriptorProvider)])
def getConstructors(self):
return CGCallback.getConstructors(self)
class CGCallbackFunctionImpl(CGGeneric):
def __init__(self, callback):
impl = string.Template("""\
impl CallbackContainer for ${type} {
fn new(callback: *mut JSObject) -> Rc<${type}> {
${type}::new(callback)
}
fn callback(&self) -> *mut JSObject {
self.parent.callback()
}
}
impl ToJSValConvertible for ${type} {
unsafe fn to_jsval(&self, cx: *mut JSContext, rval: MutableHandleValue) {
self.callback().to_jsval(cx, rval);
}
}\
""").substitute({"type": callback.identifier.name})
CGGeneric.__init__(self, impl)
class CGCallbackInterface(CGCallback):
def __init__(self, descriptor):
iface = descriptor.interface
attrs = [m for m in iface.members if m.isAttr() and not m.isStatic()]
getters = [CallbackGetter(a, descriptor) for a in attrs]
setters = [CallbackSetter(a, descriptor) for a in attrs
if not a.readonly]
methods = [m for m in iface.members
if m.isMethod() and not m.isStatic() and not m.isIdentifierLess()]
methods = [CallbackOperation(m, sig, descriptor) for m in methods
for sig in m.signatures()]
assert not iface.isJSImplemented() or not iface.ctor()
CGCallback.__init__(self, iface, descriptor, "CallbackInterface",
methods, getters=getters, setters=setters)
class FakeMember():
def __init__(self):
self.treatNullAs = "Default"
def isStatic(self):
return False
def isAttr(self):
return False
def isMethod(self):
return False
def getExtendedAttribute(self, name):
return None
class CallbackMember(CGNativeMember):
def __init__(self, sig, name, descriptorProvider, needThisHandling):
"""
needThisHandling is True if we need to be able to accept a specified
thisObj, False otherwise.
"""
self.retvalType = sig[0]
self.originalSig = sig
args = sig[1]
self.argCount = len(args)
if self.argCount > 0:
# Check for variadic arguments
lastArg = args[self.argCount - 1]
if lastArg.variadic:
self.argCountStr = (
"(%d - 1) + %s.len()" % (self.argCount,
lastArg.identifier.name))
else:
self.argCountStr = "%d" % self.argCount
self.needThisHandling = needThisHandling
# If needThisHandling, we generate ourselves as private and the caller
# will handle generating public versions that handle the "this" stuff.
visibility = "priv" if needThisHandling else "pub"
# We don't care, for callback codegen, whether our original member was
# a method or attribute or whatnot. Just always pass FakeMember()
# here.
CGNativeMember.__init__(self, descriptorProvider, FakeMember(),
name, (self.retvalType, args),
extendedAttrs={},
passJSBitsAsNeeded=False,
visibility=visibility)
# We have to do all the generation of our body now, because
# the caller relies on us throwing if we can't manage it.
self.exceptionCode = "return Err(JSFailed);"
self.body = self.getImpl()
def getImpl(self):
replacements = {
"declRval": self.getRvalDecl(),
"returnResult": self.getResultConversion(),
"convertArgs": self.getArgConversions(),
"doCall": self.getCall(),
"setupCall": self.getCallSetup(),
}
if self.argCount > 0:
replacements["argCount"] = self.argCountStr
replacements["argvDecl"] = string.Template(
"let mut argv = vec![UndefinedValue(); ${argCount}];\n"
).substitute(replacements)
else:
# Avoid weird 0-sized arrays
replacements["argvDecl"] = ""
# Newlines and semicolons are in the values
pre = string.Template(
"${setupCall}"
"${declRval}"
"${argvDecl}").substitute(replacements)
body = string.Template(
"${convertArgs}"
"${doCall}"
"${returnResult}").substitute(replacements)
return CGWrapper(CGIndenter(CGList([
CGGeneric(pre),
CGGeneric(body),
], "\n"), 4), pre="unsafe {\n", post="\n}").define()
def getResultConversion(self):
replacements = {
"val": "rval.handle()",
}
info = getJSToNativeConversionInfo(
self.retvalType,
self.descriptorProvider,
exceptionCode=self.exceptionCode,
isCallbackReturnValue="Callback",
# XXXbz we should try to do better here
sourceDescription="return value")
template = info.template
declType = info.declType
convertType = instantiateJSToNativeConversionTemplate(
template, replacements, declType, "rvalDecl")
if self.retvalType is None or self.retvalType.isVoid():
retval = "()"
elif self.retvalType.isAny():
retval = "rvalDecl.get()"
else:
retval = "rvalDecl"
return "%s\nOk(%s)\n" % (convertType.define(), retval)
def getArgConversions(self):
# Just reget the arglist from self.originalSig, because our superclasses
# just have way to many members they like to clobber, so I can't find a
# safe member name to store it in.
argConversions = [self.getArgConversion(i, arg) for (i, arg)
in enumerate(self.originalSig[1])]
# Do them back to front, so our argc modifications will work
# correctly, because we examine trailing arguments first.
argConversions.reverse()
argConversions = [CGGeneric(c) for c in argConversions]
if self.argCount > 0:
argConversions.insert(0, self.getArgcDecl())
# And slap them together.
return CGList(argConversions, "\n\n").define() + "\n\n"
def getArgConversion(self, i, arg):
argval = arg.identifier.name
if arg.variadic:
argval = argval + "[idx].get()"
jsvalIndex = "%d + idx" % i
else:
jsvalIndex = "%d" % i
if arg.optional and not arg.defaultValue:
argval += ".clone().unwrap()"
conversion = wrapForType(
"argv_root.handle_mut()", result=argval,
successCode="argv[%s] = argv_root.ptr;" % jsvalIndex,
pre="let mut argv_root = RootedValue::new(cx, UndefinedValue());")
if arg.variadic:
conversion = string.Template(
"for idx in 0..${arg}.len() {\n" +
CGIndenter(CGGeneric(conversion)).define() + "\n"
"}"
).substitute({"arg": arg.identifier.name})
elif arg.optional and not arg.defaultValue:
conversion = (
CGIfWrapper("%s.is_some()" % arg.identifier.name,
CGGeneric(conversion)).define() +
" else if argc == %d {\n"
" // This is our current trailing argument; reduce argc\n"
" argc -= 1;\n"
"} else {\n"
" argv[%d] = UndefinedValue();\n"
"}" % (i + 1, i))
return conversion
def getArgs(self, returnType, argList):
args = CGNativeMember.getArgs(self, returnType, argList)
if not self.needThisHandling:
# Since we don't need this handling, we're the actual method that
# will be called, so we need an aRethrowExceptions argument.
args.append(Argument("ExceptionHandling", "aExceptionHandling",
"ReportExceptions"))
return args
# We want to allow the caller to pass in a "this" object, as
# well as a JSContext.
return [Argument("*mut JSContext", "cx"),
Argument("HandleObject", "aThisObj")] + args
def getCallSetup(self):
if self.needThisHandling:
# It's been done for us already
return ""
return (
"CallSetup s(CallbackPreserveColor(), aRv, aExceptionHandling);\n"
"JSContext* cx = s.get_context();\n"
"if (!cx) {\n"
" return Err(JSFailed);\n"
"}\n")
def getArgcDecl(self):
if self.argCount <= 1:
return CGGeneric("let argc = %s;" % self.argCountStr)
return CGGeneric("let mut argc = %s;" % self.argCountStr)
@staticmethod
def ensureASCIIName(idlObject):
type = "attribute" if idlObject.isAttr() else "operation"
if re.match("[^\x20-\x7E]", idlObject.identifier.name):
raise SyntaxError('Callback %s name "%s" contains non-ASCII '
"characters. We can't handle that. %s" %
(type, idlObject.identifier.name,
idlObject.location))
if re.match('"', idlObject.identifier.name):
raise SyntaxError("Callback %s name '%s' contains "
"double-quote character. We can't handle "
"that. %s" %
(type, idlObject.identifier.name,
idlObject.location))
class CallbackMethod(CallbackMember):
def __init__(self, sig, name, descriptorProvider, needThisHandling):
CallbackMember.__init__(self, sig, name, descriptorProvider,
needThisHandling)
def getRvalDecl(self):
return "let mut rval = RootedValue::new(cx, UndefinedValue());\n"
def getCall(self):
replacements = {
"thisObj": self.getThisObj(),
"getCallable": self.getCallableDecl()
}
if self.argCount > 0:
replacements["argv"] = "argv.as_ptr()"
replacements["argc"] = "argc"
else:
replacements["argv"] = "ptr::null_mut()"
replacements["argc"] = "0"
return string.Template(
"${getCallable}"
"let rootedThis = RootedObject::new(cx, ${thisObj});\n"
"let ok = JS_CallFunctionValue(\n"
" cx, rootedThis.handle(), callable.handle(),\n"
" &HandleValueArray {\n"
" length_: ${argc} as ::libc::size_t,\n"
" elements_: ${argv}\n"
" }, rval.handle_mut());\n"
"if !ok {\n"
" return Err(JSFailed);\n"
"}\n").substitute(replacements)
class CallCallback(CallbackMethod):
def __init__(self, callback, descriptorProvider):
CallbackMethod.__init__(self, callback.signatures()[0], "Call",
descriptorProvider, needThisHandling=True)
def getThisObj(self):
return "aThisObj.get()"
def getCallableDecl(self):
return "let callable = RootedValue::new(cx, ObjectValue(&*self.parent.callback()));\n"
class CallbackOperationBase(CallbackMethod):
"""
Common class for implementing various callback operations.
"""
def __init__(self, signature, jsName, nativeName, descriptor, singleOperation):
self.singleOperation = singleOperation
self.methodName = jsName
CallbackMethod.__init__(self, signature, nativeName, descriptor, singleOperation)
def getThisObj(self):
if not self.singleOperation:
return "self.parent.callback()"
# This relies on getCallableDecl declaring a boolean
# isCallable in the case when we're a single-operation
# interface.
return "if isCallable { aThisObj.get() } else { self.parent.callback() }"
def getCallableDecl(self):
replacements = {
"methodName": self.methodName
}
getCallableFromProp = string.Template(
'RootedValue::new(cx, try!(self.parent.get_callable_property(cx, "${methodName}")))'
).substitute(replacements)
if not self.singleOperation:
return 'JS::Rooted<JS::Value> callable(cx);\n' + getCallableFromProp
return (
'let isCallable = IsCallable(self.parent.callback());\n'
'let callable =\n' +
CGIndenter(
CGIfElseWrapper('isCallable',
CGGeneric('RootedValue::new(cx, ObjectValue(&*self.parent.callback()))'),
CGGeneric(getCallableFromProp))).define() + ';\n')
class CallbackOperation(CallbackOperationBase):
"""
Codegen actual WebIDL operations on callback interfaces.
"""
def __init__(self, method, signature, descriptor):
self.ensureASCIIName(method)
jsName = method.identifier.name
CallbackOperationBase.__init__(self, signature,
jsName,
MakeNativeName(descriptor.binaryNameFor(jsName)),
descriptor, descriptor.interface.isSingleOperationInterface())
class CallbackGetter(CallbackMember):
def __init__(self, attr, descriptor):
self.ensureASCIIName(attr)
self.attrName = attr.identifier.name
CallbackMember.__init__(self,
(attr.type, []),
callbackGetterName(attr),
descriptor,
needThisHandling=False)
def getRvalDecl(self):
return "JS::Rooted<JS::Value> rval(cx, JS::UndefinedValue());\n"
def getCall(self):
replacements = {
"attrName": self.attrName
}
return string.Template(
'if (!JS_GetProperty(cx, mCallback, "${attrName}", &rval)) {\n'
' return Err(JSFailed);\n'
'}\n').substitute(replacements)
class CallbackSetter(CallbackMember):
def __init__(self, attr, descriptor):
self.ensureASCIIName(attr)
self.attrName = attr.identifier.name
CallbackMember.__init__(self,
(BuiltinTypes[IDLBuiltinType.Types.void],
[FakeArgument(attr.type, attr)]),
callbackSetterName(attr),
descriptor,
needThisHandling=False)
def getRvalDecl(self):
# We don't need an rval
return ""
def getCall(self):
replacements = {
"attrName": self.attrName,
"argv": "argv.handleAt(0)",
}
return string.Template(
'MOZ_ASSERT(argv.length() == 1);\n'
'if (!JS_SetProperty(cx, mCallback, "${attrName}", ${argv})) {\n'
' return Err(JSFailed);\n'
'}\n').substitute(replacements)
def getArgcDecl(self):
return None
class GlobalGenRoots():
"""
Roots for global codegen.
To generate code, call the method associated with the target, and then
call the appropriate define/declare method.
"""
@staticmethod
def PrototypeList(config):
# Prototype ID enum.
interfaces = config.getDescriptors(isCallback=False)
protos = [d.name for d in interfaces]
constructors = [d.name for d in interfaces if d.hasDescendants()]
proxies = [d.name for d in config.getDescriptors(proxy=True)]
return CGList([
CGGeneric(AUTOGENERATED_WARNING_COMMENT),
CGGeneric("pub const PROTO_OR_IFACE_LENGTH: usize = %d;\n" % (len(protos) + len(constructors))),
CGGeneric("pub const MAX_PROTO_CHAIN_LENGTH: usize = %d;\n\n" % config.maxProtoChainLength),
CGNonNamespacedEnum('ID', protos, 0, deriving="PartialEq, Copy, Clone", repr="u16"),
CGNonNamespacedEnum('Constructor', constructors, len(protos),
deriving="PartialEq, Copy, Clone", repr="u16"),
CGWrapper(CGIndenter(CGList([CGGeneric('"' + name + '"') for name in protos],
",\n"),
indentLevel=4),
pre="static INTERFACES: [&'static str; %d] = [\n" % len(protos),
post="\n];\n\n"),
CGGeneric("pub fn proto_id_to_name(proto_id: u16) -> &'static str {\n"
" debug_assert!(proto_id < ID::Last as u16);\n"
" INTERFACES[proto_id as usize]\n"
"}\n\n"),
CGNonNamespacedEnum('Proxies', proxies, 0, deriving="PartialEq, Copy, Clone"),
])
@staticmethod
def RegisterBindings(config):
# TODO - Generate the methods we want
code = CGList([
CGRegisterProtos(config),
CGRegisterProxyHandlers(config),
], "\n")
return CGImports(code, [], [], [
'dom::bindings::codegen',
'dom::bindings::codegen::PrototypeList::Proxies',
'js::jsapi::JSContext',
'js::jsapi::HandleObject',
'libc',
], ignored_warnings=[])
@staticmethod
def InterfaceTypes(config):
descriptors = [d.name for d in config.getDescriptors(register=True, isCallback=False)]
curr = CGList([CGGeneric("pub use dom::%s::%s;\n" % (name.lower(), name)) for name in descriptors])
curr = CGWrapper(curr, pre=AUTOGENERATED_WARNING_COMMENT)
return curr
@staticmethod
def Bindings(config):
descriptors = (set(d.name + "Binding" for d in config.getDescriptors(register=True)) |
set(getModuleFromObject(d) for d in config.callbacks) |
set(getModuleFromObject(d) for d in config.getDictionaries()))
curr = CGList([CGGeneric("pub mod %s;\n" % name) for name in sorted(descriptors)])
curr = CGWrapper(curr, pre=AUTOGENERATED_WARNING_COMMENT)
return curr
@staticmethod
def InheritTypes(config):
descriptors = config.getDescriptors(register=True, isCallback=False)
imports = [CGGeneric("use dom::types::*;\n"),
CGGeneric("use dom::bindings::conversions::{DerivedFrom, get_dom_class};\n"),
CGGeneric("use dom::bindings::inheritance::Castable;\n"),
CGGeneric("use dom::bindings::js::{JS, LayoutJS, Root};\n"),
CGGeneric("use dom::bindings::trace::JSTraceable;\n"),
CGGeneric("use dom::bindings::reflector::Reflectable;\n"),
CGGeneric("use js::jsapi::JSTracer;\n\n"),
CGGeneric("use std::mem;\n\n")]
allprotos = []
topTypes = []
hierarchy = defaultdict(list)
for descriptor in descriptors:
name = descriptor.name
chain = descriptor.prototypeChain
upcast = descriptor.hasDescendants()
downcast = len(chain) != 1
if upcast and not downcast:
topTypes.append(name)
if not upcast:
# No other interface will implement DeriveFrom<Foo> for this Foo, so avoid
# implementing it for itself.
chain = chain[:-1]
# Implement `DerivedFrom<Bar>` for `Foo`, for all `Bar` that `Foo` inherits from.
if chain:
allprotos.append(CGGeneric("impl Castable for %s {}\n" % name))
for baseName in chain:
allprotos.append(CGGeneric("impl DerivedFrom<%s> for %s {}\n" % (baseName, name)))
if chain:
allprotos.append(CGGeneric("\n"))
if downcast:
hierarchy[descriptor.getParentName()].append(name)
typeIdCode = []
topTypeVariants = [
("ID used by abstract interfaces.", "Abstract"),
("ID used by interfaces that are not castable.", "Alone"),
]
topTypeVariants += [
("ID used by interfaces that derive from %s." % typeName, "%s(%sTypeId)" % (typeName, typeName))
for typeName in topTypes
]
topTypeVariantsAsStrings = [CGGeneric("/// %s\n%s," % variant) for variant in topTypeVariants]
typeIdCode.append(CGWrapper(CGIndenter(CGList(topTypeVariantsAsStrings, "\n"), 4),
pre="#[derive(Clone, Copy, Debug)]\npub enum TopTypeId {\n",
post="\n}\n\n"))
def type_id_variant(name):
# If `name` is present in the hierarchy keys', that means some other interfaces
# derive from it and this enum variant should have an argument with its own
# TypeId enum.
return "%s(%sTypeId)" % (name, name) if name in hierarchy else name
for base, derived in hierarchy.iteritems():
variants = []
if not config.getInterface(base).getExtendedAttribute("Abstract"):
variants.append(CGGeneric(base))
variants += [CGGeneric(type_id_variant(derivedName)) for derivedName in derived]
derives = "Clone, Copy, Debug"
if base != 'EventTarget' and base != 'HTMLElement':
derives += ", PartialEq"
typeIdCode.append(CGWrapper(CGIndenter(CGList(variants, ",\n"), 4),
pre="#[derive(%s)]\npub enum %sTypeId {\n" % (derives, base),
post="\n}\n\n"))
if base in topTypes:
typeIdCode.append(CGGeneric("""\
impl %(base)s {
pub fn type_id(&self) -> &'static %(base)sTypeId {
let domclass = unsafe {
get_dom_class(self.reflector().get_jsobject().get()).unwrap()
};
match domclass.type_id {
TopTypeId::%(base)s(ref type_id) => type_id,
_ => unreachable!(),
}
}
}
""" % {'base': base}))
curr = CGList(imports + typeIdCode + allprotos)
curr = CGWrapper(curr, pre=AUTOGENERATED_WARNING_COMMENT)
return curr
@staticmethod
def UnionTypes(config):
curr = UnionTypes(config.getDescriptors(),
config.getDictionaries(),
config.getCallbacks(),
config)
# Add the auto-generated comment.
curr = CGWrapper(curr, pre=AUTOGENERATED_WARNING_COMMENT)
# Done.
return curr
|
mpl-2.0
| 7,539,845,005,135,834,000
| 38.54212
| 119
| 0.575087
| false
| 4.216175
| false
| false
| false
|
rwl/puddle
|
puddle/resource/action/properties_action.py
|
1
|
3749
|
#------------------------------------------------------------------------------
# Copyright (C) 2009 Richard W. Lincoln
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to
# deal in the Software without restriction, including without limitation the
# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
# sell copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
#------------------------------------------------------------------------------
""" Defines an action for viewing resource properties.
"""
#------------------------------------------------------------------------------
# Imports:
#------------------------------------------------------------------------------
from enthought.io.api import File
from enthought.traits.api import Bool, Instance
from enthought.traits.ui.api import View, Item, Group
from enthought.pyface.action.api import Action
#------------------------------------------------------------------------------
# "PropertiesAction" class:
#------------------------------------------------------------------------------
class PropertiesAction(Action):
""" Defines an action for viewing resource properties.
"""
#--------------------------------------------------------------------------
# "Action" interface:
#--------------------------------------------------------------------------
# The action"s name (displayed on menus/tool bar tools etc):
name = "P&roperties"
# Keyboard accelerator:
accelerator = "Alt+Enter"
#--------------------------------------------------------------------------
# "Action" interface:
#--------------------------------------------------------------------------
def perform(self, event):
""" Perform the action.
"""
selections = self.window.selection
if selections:
selection = selections[0]
if isinstance(selection, File):
selection.edit_traits( parent=self.window.control,
view=self._create_resource_view(selection),
kind="livemodal" )
def _create_resource_view(self, selection):
""" Creates a resource view.
"""
resource_view = View(
Item(name="absolute_path", style="readonly"),
# FIXME: Readonly boolean editor is just blank
# Item(name="exists", style="readonly"),
# Item(name="is_file", style="readonly"),
# Item(name="is_folder", style="readonly"),
# Item(name="is_package", style="readonly"),
# Item(name="is_readonly", style="readonly"),
Item(name="mime_type", style="readonly"),
Item(name="url", style="readonly"),
title="Properties for %s" % selection.name+selection.ext,
icon=self.window.application.icon)
return resource_view
# EOF -------------------------------------------------------------------------
|
mit
| -6,371,690,640,377,739,000
| 41.123596
| 79
| 0.51107
| false
| 5.236034
| false
| false
| false
|
endlessm/endless-ndn
|
eos_data_distribution/parallel.py
|
1
|
2456
|
# -*- Mode:python; coding: utf-8; c-file-style:"gnu"; indent-tabs-mode:nil -*- */
#
# Copyright (C) 2016 Endless Mobile, Inc.
# Author: Niv Sardi <xaiki@endlessm.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
# A copy of the GNU Lesser General Public License is in the file COPYING.
import logging
from gi.repository import GObject
logger = logging.getLogger(__name__)
class Batch(GObject.GObject):
__gsignals__ = {
'complete': (GObject.SIGNAL_RUN_FIRST, None, ()),
}
def __init__(self, workers, type="Batch"):
super(Batch, self).__init__()
self._type = type
self._incomplete_workers = set(workers)
for worker in self._incomplete_workers:
worker.connect('complete', self._on_batch_complete)
def start(self):
if not self._incomplete_workers:
logger.info('%s complete: no workers', self._type)
self.emit('complete')
for worker in self._incomplete_workers:
worker.start()
def _on_batch_complete(self, worker):
logger.info("%s complete: %s", self._type, worker)
self._incomplete_workers.remove(worker)
if len(self._incomplete_workers) == 0:
self.emit('complete')
if __name__ == '__main__':
import argparse
from . import utils
from gi.repository import GLib
from ndn.file import FileConsumer
parser = argparse.ArgumentParser()
parser.add_argument("-o", "--output")
parser.add_argument("-c", "--count", default=10, type=int)
args = utils.parse_args(parser=parser)
loop = GLib.MainLoop()
consumers = [FileConsumer("%s-%s"%(args.name, i), "%s-%s"%(args.output, i))
for i in range(args.count)]
batch = Batch(workers=consumers)
batch.connect('complete', lambda *a: loop.quit())
batch.start()
loop.run()
|
lgpl-3.0
| -2,895,144,727,426,026,000
| 32.643836
| 81
| 0.65513
| false
| 3.8375
| false
| false
| false
|
SP2RC-Coding-Club/Codes
|
13_07_2017/3D_slab_modes.py
|
1
|
35096
|
#import pdb # pause code for debugging at pdb.set_trace()
import numpy as np
import toolbox as tool
import slab_functions as sf
from pysac.plot.mayavi_seed_streamlines import SeedStreamline
import matplotlib.pyplot as plt
from mayavi import mlab
import gc
#import move_seed_points as msp
import mayavi_plotting_functions as mpf
import dispersion_diagram
import img2vid as i2v
from functools import partial
import os
# ================================
# Preamble: set mode options and view parameters
# ================================
# What mode do you want? OPTIONS:
mode_options = ['slow-kink-surf', 'slow-saus-surf', 'slow-saus-body-3',
'slow-kink-body-3', 'slow-saus-body-2', 'slow-kink-body-2',
'slow-saus-body-1', 'slow-kink-body-1', 'fast-saus-body-1',
'fast-kink-body-1', 'fast-saus-body-2', 'fast-kink-body-2',
'fast-saus-body-3', 'fast-kink-body-3', 'fast-kink-surf',
'fast-saus-surf', 'shear-alfven', 'shear-alfven-broadband']
# Which angle shall we view from? OPTIONS:
view_options = ['front', 'front-parallel', 'top', 'top-parallel', 'front-top',
'front-side', 'front-top-side']
# Uniform lighting?
#uniform_light = True
uniform_light = False
show_density = False
show_density_pert = False
show_mag = False
show_mag_scale = False
show_mag_fade = False
show_mag_vec = False
show_vel_front = False
show_vel_front_pert = False
show_vel_top = False
show_vel_top_pert = False
show_disp_top = False
show_disp_front = False
show_axes = False
show_axis_labels = False
show_mini_axis = False
show_boundary = False
# Uncomment the parametrer you would like to see
# No density perturbations or vel/disp pert for alfven modes.
#show_density = True
#show_density_pert = True
show_mag = True
#show_mag_scale = True #must also have show_mag = True
#show_mag_fade = True
#show_mag_vec = True
#show_vel_front = True
#show_vel_front_pert = True
#show_vel_top = True
#show_vel_top_pert = True
#show_disp_top = True
#show_disp_front = True
show_axes = True
#show_axis_labels = True
show_mini_axis = True
show_boundary = True
# Visualisation modules in string form for file-names
vis_modules = [show_density, show_density_pert, show_mag, show_mag_scale,
show_mag_fade, show_mag_vec, show_vel_front, show_vel_front_pert,
show_vel_top, show_vel_top_pert, show_disp_top, show_disp_front]
vis_modules_strings = ['show_density', 'show_density_pert', 'show_mag', 'show_mag_scale',
'show_mag_fade', 'show_mag_vec', 'show_vel_front', 'show_vel_front_pert',
'show_vel_top', 'show_vel_top_pert', 'show_disp_top', 'show_disp_front']
vis_mod_string = ''
for i, j in enumerate(vis_modules):
if vis_modules[i]:
vis_mod_string = vis_mod_string + vis_modules_strings[i][5:] + '_'
# Set to True if you would like the dispersion diagram with chosen mode highlighted.
show_dispersion = False
#show_dispersion = True
# Wanna see the animation? Of course you do
#show_animation = False
show_animation = True
# Basic plot to see which eigensolutions have been found.
show_quick_plot = False
#show_quick_plot = True
# Video resolution
#res = (1920,1080) # There is a problem with this resolution- height must be odd number - Mayavi bug apparently
res = tuple(101 * np.array((16,9)))
#res = tuple(51 * np.array((16,9)))
#res = tuple(21 * np.array((16,9)))
number_of_frames = 1
# Frames per second of output video
fps = 20
#save_images = False
save_images = True
make_video = False
#make_video = True
# Where should I save the animation images/videos?
os.path.abspath(os.curdir)
os.chdir('..')
save_directory = os.path.join(os.path.abspath(os.curdir), '3D_vis_animations')
# Where should I save the dispersion diagrams?
save_dispersion_diagram_directory = os.path.join(os.path.abspath(os.curdir), '3D_vis_dispersion_diagrams')
# ================================
# Visualisation set-up
# ================================
# Variable definitions (for reference):
# x = k*x
# y = k*y
# z = k*z
# W = omega/k
# K = k*x_0
# t = omega*t
# Loop through selected modes
for mode_ind in [0]:#range(8,14): # for all others. REMEMBER SBB pparameters
#for mode_ind in [14,15]: #for fast body surf. REMEMBER SBS parameters
#for mode_ind in [16, 17]:
#for mode_ind in [13]: #for an individual mode
#for mode_ind in range(2,14):
if mode_ind not in range(len(mode_options)):
raise NameError('Mode not in mode_options')
# (note that fast surface modes, i.e. 14 and 15, can only be
# found with SBS parameters in slab_functions...)
mode = mode_options[mode_ind]
# Specify oscillation parameters
if 'slow' in mode and 'surf' in mode or 'alfven' in mode:
K = 2.
elif 'slow' in mode and 'body' in mode:
K = 8.
elif 'fast' in mode and 'body-1' in mode:
K = 8.
elif 'fast' in mode and 'body-2' in mode:
K = 15.
elif 'fast' in mode and 'body-3' in mode:
K = 22.
elif 'fast' in mode and 'surf' in mode:
K = 8.
else:
raise NameError('Mode not found')
# Specify density ratio R1 := rho_1 / rho_0
# R1 = 1.5 # Higher denisty on left than right
# R1 = 1.8
# R1 = 1.9 # Disp_diagram will only work for R1=1.5, 1.8, 2.0
R1 = 2. # Symmetric slab
# Reduce number of variables in dispersion relation
disp_rel_partial = partial(sf.disp_rel_asym, R1=R1)
# find eigenfrequencies W (= omega/k) within the range Wrange for the given parameters.
Wrange1 = np.linspace(0., sf.cT, 11)
Wrange2 = np.linspace(sf.cT, sf.c0, 401)
Wrange3 = np.linspace(sf.c0, sf.c2, 11)
Woptions_slow_surf = np.real(tool.point_find(disp_rel_partial, np.array(K), Wrange1, args=None).transpose())
Woptions_slow_body = np.real(tool.point_find(disp_rel_partial, np.array(K), Wrange2, args=None).transpose())
Woptions_fast = np.real(tool.point_find(disp_rel_partial, np.array(K), Wrange3, args=None).transpose())
# Remove W values that are very close to characteristic speeds - these are spurious solutions
tol = 1e-2
indices_to_rm = []
for i, w in enumerate(Woptions_slow_surf):
spurious_roots_diff = abs(np.array([w, w - sf.c0, w - sf.c1(R1), w - sf.c2, w - sf.vA]))
if min(spurious_roots_diff) < tol or w < 0 or w > sf.cT:
indices_to_rm.append(i)
Woptions_slow_surf = np.delete(Woptions_slow_surf, indices_to_rm)
Woptions_slow_surf.sort()
indices_to_rm = []
for i, w in enumerate(Woptions_slow_body):
spurious_roots_diff = abs(np.array([w, w - sf.c0, w - sf.c1(R1), w - sf.c2, w - sf.vA]))
if min(spurious_roots_diff) < tol or w < sf.cT or w > sf.c0:
indices_to_rm.append(i)
Woptions_slow_body = np.delete(Woptions_slow_body, indices_to_rm)
Woptions_slow_body.sort()
indices_to_rm = []
for i, w in enumerate(Woptions_fast):
spurious_roots_diff = abs(np.array([w, w - sf.c0, w - sf.c1(R1), w - sf.c2, w - sf.vA]))
if min(spurious_roots_diff) < tol or w < sf.c0 or w > min(sf.c1, sf.c2):
indices_to_rm.append(i)
Woptions_fast = np.delete(Woptions_fast, indices_to_rm)
Woptions_fast.sort()
# remove any higher order slow body modes - we only want to do the first 3 saus/kink
if len(Woptions_slow_body) > 6:
Woptions_slow_body = np.delete(Woptions_slow_body, range(len(Woptions_slow_body) - 6))
Woptions = np.concatenate((Woptions_slow_surf, Woptions_slow_body, Woptions_fast))
# set W to be the eigenfrequency for the requested mode
if 'fast-saus-body' in mode or 'fast-kink-surf' in mode:
W = Woptions_fast[-2]
elif 'fast-kink-body' in mode or 'fast-saus-surf' in mode:
W = Woptions_fast[-1]
elif 'slow' in mode and 'surf' in mode:
W = Woptions_slow_surf[mode_ind]
elif 'slow' in mode and 'body' in mode:
W = Woptions_slow_body[mode_ind-2]
if 'alfven' in mode:
W = sf.vA
else:
W = np.real(W)
# Quick plot to see if we are hitting correct mode
if show_quick_plot:
plt.plot([K] * len(Woptions), Woptions, '.')
plt.plot(K+0.5, W, 'go')
plt.xlim([0,23])
plt.show()
# ================================
# Dispersion diagram
# ================================
if show_dispersion:
if 'alfven' in mode:
raise NameError('Disperion plot requested for an alfven mode. Cant do that.')
dispersion_diagram.dispersion_diagram(mode_options, mode,
disp_rel_partial, K, W, R1)
# plt.tight_layout() # seems to make it chop the sides off with this
plt.savefig(os.path.join(save_dispersion_diagram_directory, 'R1_' + str(R1) + '_' + mode + '.png') )
plt.close()
# ================================
# Animation
# ================================
if show_animation:
print('Starting ' + mode)
# set grid parameters
xmin = -2.*K
xmax = 2.*K
ymin = 0.
ymax = 4.
zmin = 0.
zmax = 2*np.pi
# You can change ny but be careful changing nx, nz.
nx = 300#100 #100 #300 gives us reduced bouncing of field lines for the same video size, but there is significant computational cost.
ny = 300#100 #100 #100#20 #100
nz = 300#100 #100
nt = number_of_frames
if nz % nt != 0:
print("nt doesnt divide nz so there may be a problem with chopping in z direction for each time step")
t_start = 0.
t_end = zmax
t = t_start
xvals = np.linspace(xmin, xmax, nx)
yvals = np.linspace(ymin, ymax, ny)
zvals = np.linspace(zmin, zmax, nz, endpoint=False) # A fudge to give the height as exactly one wavelength
x_spacing = max(nx, ny, nz) / nx
y_spacing = max(nx, ny, nz) / ny
z_spacing = max(nx, ny, nz) / nz
# For masking points for plotting vector fields- have to do it manually due to Mayavi bug
mod = int(4 * nx / 100)
mod_y = int(np.ceil(mod / y_spacing))
# Get the data xi=displacement, v=velocity, b=mag field
if show_disp_top or show_disp_front:
xixvals = np.real(np.repeat(sf.xix(mode, xvals, zvals, t, W, K, R1)[:, :, np.newaxis], ny, axis=2))
xizvals = np.real(np.repeat(sf.xiz(mode, xvals, zvals, t, W, K, R1)[:, :, np.newaxis], ny, axis=2))
xiyvals = np.real(np.repeat(sf.xiy(mode, xvals, zvals, t, W, K)[:, :, np.newaxis], ny, axis=2))
if show_vel_front or show_vel_top:
vxvals = np.real(np.repeat(sf.vx(mode, xvals, zvals, t, W, K, R1)[:, :, np.newaxis], ny, axis=2))
vzvals = np.real(np.repeat(sf.vz(mode, xvals, zvals, t, W, K, R1)[:, :, np.newaxis], ny, axis=2))
vyvals = np.real(np.repeat(sf.vy(mode, xvals, zvals, t, K)[:, :, np.newaxis], ny, axis=2))
if show_vel_front_pert or show_vel_top_pert:
vxvals = np.real(np.repeat(sf.vx_pert(mode, xvals, zvals, t, W, K, R1)[:, :, np.newaxis], ny, axis=2))
vzvals = np.real(np.repeat(sf.vz_pert(mode, xvals, zvals, t, W, K, R1)[:, :, np.newaxis], ny, axis=2))
vyvals = np.zeros_like(vxvals)
# Axis is defined on the mag field so we have to set up this data
bxvals = np.real(np.repeat(sf.bx(mode, xvals, zvals, t, W, K, R1)[:, :, np.newaxis], ny, axis=2))
byvals = np.real(np.repeat(sf.by(mode, xvals, zvals, t, K)[:, :, np.newaxis], ny, axis=2))
bz_eq3d = np.repeat(sf.bz_eq(mode, xvals, zvals, t, W, K, R1)[:, :, np.newaxis], ny, axis=2)
bzvals = np.real(np.repeat(-sf.bz(mode, xvals, zvals, t, W, K, R1)[:, :, np.newaxis], ny, axis=2) +
bz_eq3d)
# displacement at the right and left boundaries
if show_boundary:
xix_boundary_r_vals = np.real(np.repeat(K + sf.xix_boundary(mode, zvals, t, W, K, R1, boundary='r')[:, np.newaxis], ny, axis=1))
xix_boundary_l_vals = np.real(np.repeat(-K + sf.xix_boundary(mode, zvals, t, W, K, R1, boundary='l')[:, np.newaxis], ny, axis=1))
if show_density:
rho_vals = np.real(np.repeat(sf.rho(mode, xvals, zvals, t, W, K, R1)[:, :, np.newaxis], ny, axis=2))
if show_density_pert:
rho_vals = np.real(np.repeat(sf.rho_pert(mode, xvals, zvals, t, W, K, R1)[:, :, np.newaxis], ny, axis=2))
bxvals_t = bxvals
byvals_t = byvals
bzvals_t = bzvals
if show_disp_top or show_disp_front:
xixvals_t = xixvals
xiyvals_t = xiyvals
xizvals_t = xizvals
if show_vel_top or show_vel_top_pert or show_vel_front or show_vel_front_pert:
vxvals_t = vxvals
vyvals_t = vyvals
vzvals_t = vzvals
if show_boundary:
xix_boundary_r_vals_t = xix_boundary_r_vals
xix_boundary_l_vals_t = xix_boundary_l_vals
if show_density or show_density_pert:
rho_vals_t = rho_vals
# ================================
# Starting figure and visualisation modules
# ================================
zgrid_zy, ygrid_zy = np.mgrid[0:nz:(nz)*1j,
0:ny:(ny)*1j]
fig = mlab.figure(size=res) # (1920, 1080) for 1080p , tuple(101 * np.array((16,9))) #16:9 aspect ratio for video upload
# Spacing of grid so that we can display a visualisation cube without having the same number of grid points in each dimension
spacing = np.array([x_spacing, z_spacing, y_spacing])
if show_density or show_density_pert:
# Scalar field density
rho = mlab.pipeline.scalar_field(rho_vals_t, name="density", figure=fig)
rho.spacing = spacing
mpf.volume_red_blue(rho, rho_vals_t)
#Masking points
if show_mag_vec:
bxvals_mask_front_t, byvals_mask_front_t, bzvals_mask_front_t = mpf.mask_points(bxvals_t, byvals_t, bzvals_t,
'front', mod, mod_y)
if show_disp_top:
xixvals_mask_top_t, xiyvals_mask_top_t, xizvals_mask_top_t = mpf.mask_points(xixvals_t, xiyvals_t, xizvals_t,
'top', mod, mod_y)
if show_disp_front:
xixvals_mask_front_t, xiyvals_mask_front_t, xizvals_mask_front_t = mpf.mask_points(xixvals_t, xiyvals_t, xizvals_t,
'front', mod, mod_y)
if show_vel_top or show_vel_top_pert:
vxvals_mask_top_t, vyvals_mask_top_t, vzvals_mask_top_t = mpf.mask_points(vxvals_t, vyvals_t, vzvals_t,
'top', mod, mod_y)
if show_vel_front or show_vel_front_pert:
vxvals_mask_front_t, vyvals_mask_front_t, vzvals_mask_front_t = mpf.mask_points(vxvals_t, vyvals_t, vzvals_t,
'front', mod, mod_y)
xgrid, zgrid, ygrid = np.mgrid[0:nx:(nx)*1j,
0:nz:(nz)*1j,
0:ny:(ny)*1j]
field = mlab.pipeline.vector_field(bxvals_t, bzvals_t, byvals_t, name="B field",
figure=fig, scalars=zgrid)
field.spacing = spacing
if show_axes:
mpf.axes_no_label(field)
if show_mini_axis:
mpf.mini_axes()
if uniform_light:
#uniform lighting, but if we turn shading of volumes off, we are ok without
mpf.uniform_lighting(fig)
#Black background
mpf.background_colour(fig, (0., 0., 0.))
scalefactor = 8. * nx / 100. # scale factor for direction field vectors
# Set up visualisation modules
if show_mag_vec:
bdirfield_front = mlab.pipeline.vector_field(bxvals_mask_front_t, bzvals_mask_front_t,
byvals_mask_front_t, name="B field front",
figure=fig)
bdirfield_front.spacing = spacing
mpf.vector_cut_plane(bdirfield_front, 'front', nx, ny, nz,
y_spacing, scale_factor=scalefactor)
if show_vel_top or show_vel_top_pert:
vdirfield_top = mlab.pipeline.vector_field(vxvals_mask_top_t, np.zeros_like(vxvals_mask_top_t),
vyvals_mask_top_t, name="V field top",
figure=fig)
vdirfield_top.spacing = spacing
mpf.vector_cut_plane(vdirfield_top, 'top', nx, ny, nz,
y_spacing, scale_factor=scalefactor)
if show_vel_front or show_vel_front_pert:
vdirfield_front = mlab.pipeline.vector_field(vxvals_mask_front_t, vzvals_mask_front_t,
vyvals_mask_front_t, name="V field front",
figure=fig)
vdirfield_front.spacing = spacing
mpf.vector_cut_plane(vdirfield_front,'front', nx, ny, nz,
y_spacing, scale_factor=scalefactor)
if show_disp_top:
xidirfield_top = mlab.pipeline.vector_field(xixvals_mask_top_t, np.zeros_like(xixvals_mask_top_t),
xiyvals_mask_top_t, name="Xi field top",
figure=fig)
xidirfield_top.spacing = spacing
mpf.vector_cut_plane(xidirfield_top, 'top', nx, ny, nz,
y_spacing, scale_factor=scalefactor)
if show_disp_front:
xidirfield_front = mlab.pipeline.vector_field(xixvals_mask_front_t, xizvals_mask_front_t,
xiyvals_mask_front_t, name="Xi field front",
figure=fig)
xidirfield_front.spacing = spacing
mpf.vector_cut_plane(xidirfield_front, 'front', nx, ny, nz,
y_spacing, scale_factor=scalefactor)
# Loop through time
for t_ind in range(nt):
if t_ind == 0:
bxvals_t = bxvals
byvals_t = byvals
bzvals_t = bzvals
if show_disp_top or show_disp_front:
xixvals_t = xixvals
xiyvals_t = xiyvals
xizvals_t = xizvals
if show_vel_top or show_vel_top_pert or show_vel_front or show_vel_front_pert:
vxvals_t = vxvals
vyvals_t = vyvals
vzvals_t = vzvals
if show_boundary:
xix_boundary_r_vals_t = xix_boundary_r_vals
xix_boundary_l_vals_t = xix_boundary_l_vals
if show_density or show_density_pert:
rho_vals_t = rho_vals
else:
bxvals = np.real(np.repeat(sf.bx(mode, xvals, zvals, t, W, K, R1)[:, :, np.newaxis], ny, axis=2))
byvals = np.real(np.repeat(sf.by(mode, xvals, zvals, t, K)[:, :, np.newaxis], ny, axis=2))
bz_eq3d = np.repeat(sf.bz_eq(mode, xvals, zvals, t, W, K, R1)[:, :, np.newaxis], ny, axis=2)
bzvals = np.real(np.repeat(-sf.bz(mode, xvals, zvals, t, W, K, R1)[:, :, np.newaxis], ny, axis=2) +
bz_eq3d)
bxvals_t = bxvals
byvals_t = byvals
bzvals_t = bzvals
# Update mag field data
field.mlab_source.set(u=bxvals_t, v=bzvals_t, w=byvals_t)
# Update mag field visualisation module
if show_mag_vec:
bxvals_mask_front_t, byvals_mask_front_t, bzvals_mask_front_t = mpf.mask_points(bxvals_t, byvals_t, bzvals_t,
'front', mod, mod_y)
bdirfield_front.mlab_source.set(u=bxvals_mask_front_t, v=bzvals_mask_front_t, w=byvals_mask_front_t)
# Update displacement field data
if show_disp_top or show_disp_front:
xixvals_split = np.split(xixvals, [nz - (nz / nt) * t_ind], axis=1)
xiyvals_split = np.split(xiyvals, [nz - (nz / nt) * t_ind], axis=1)
xizvals_split = np.split(xizvals, [nz - (nz / nt) * t_ind], axis=1)
xixvals_t = np.concatenate((xixvals_split[1], xixvals_split[0]), axis=1)
xiyvals_t = np.concatenate((xiyvals_split[1], xiyvals_split[0]), axis=1)
xizvals_t = np.concatenate((xizvals_split[1], xizvals_split[0]), axis=1)
# Update displacement field visualisation module
if show_disp_top:
xixvals_mask_top_t, xiyvals_mask_top_t, xizvals_mask_top_t = mpf.mask_points(xixvals_t, xiyvals_t, xizvals_t,
'top', mod, mod_y)
xidirfield_top.mlab_source.set(u=xixvals_mask_top_t, v=np.zeros_like(xixvals_mask_top_t), w=xiyvals_mask_top_t)
if show_disp_front:
xixvals_mask_front_t, xiyvals_mask_front_t, xizvals_mask_front_t = mpf.mask_points(xixvals_t, xiyvals_t, xizvals_t,
'front', mod, mod_y)
xidirfield_front.mlab_source.set(u=xixvals_mask_front_t, v=xizvals_mask_front_t, w=xiyvals_mask_front_t)
# Update velocity field data
if show_vel_top or show_vel_top_pert or show_vel_front or show_vel_front_pert:
vxvals_split = np.split(vxvals, [nz - (nz / nt) * t_ind], axis=1)
vyvals_split = np.split(vyvals, [nz - (nz / nt) * t_ind], axis=1)
vzvals_split = np.split(vzvals, [nz - (nz / nt) * t_ind], axis=1)
vxvals_t = np.concatenate((vxvals_split[1], vxvals_split[0]), axis=1)
vyvals_t = np.concatenate((vyvals_split[1], vyvals_split[0]), axis=1)
vzvals_t = np.concatenate((vzvals_split[1], vzvals_split[0]), axis=1)
# Update velocity field visualisation module
if show_vel_top or show_vel_top_pert:
vxvals_mask_top_t, vyvals_mask_top_t, vzvals_mask_top_t = mpf.mask_points(vxvals_t, vyvals_t, vzvals_t,
'top', mod, mod_y)
vdirfield_top.mlab_source.set(u=vxvals_mask_top_t, v=np.zeros_like(vxvals_mask_top_t), w=vyvals_mask_top_t)
if show_vel_front or show_vel_front_pert:
vxvals_mask_front_t, vyvals_mask_front_t, vzvals_mask_front_t = mpf.mask_points(vxvals_t, vyvals_t, vzvals_t,
'front', mod, mod_y)
vdirfield_front.mlab_source.set(u=vxvals_mask_front_t, v=vzvals_mask_front_t, w=vyvals_mask_front_t)
# Update boundary displacement data
if show_boundary:
xix_boundary_r_vals_split = np.split(xix_boundary_r_vals, [nz - (nz / nt) * t_ind], axis=0)
xix_boundary_l_vals_split = np.split(xix_boundary_l_vals, [nz - (nz / nt) * t_ind], axis=0)
xix_boundary_r_vals_t = np.concatenate((xix_boundary_r_vals_split[1], xix_boundary_r_vals_split[0]), axis=0)
xix_boundary_l_vals_t = np.concatenate((xix_boundary_l_vals_split[1], xix_boundary_l_vals_split[0]), axis=0)
# Update density data
if show_density or show_density_pert:
rho_vals_split = np.split(rho_vals, [nz - (nz / nt) * t_ind], axis=1)
rho_vals_t = np.concatenate((rho_vals_split[1], rho_vals_split[0]), axis=1)
rho.mlab_source.set(scalars=rho_vals_t)
# Boundary data - Letting mayavi know where to plot the boundary
if show_boundary:
ext_min_r = ((nx) * (xix_boundary_r_vals_t.min() - xmin) / (xmax - xmin)) * x_spacing
ext_max_r = ((nx) * (xix_boundary_r_vals_t.max() - xmin) / (xmax - xmin)) * x_spacing
ext_min_l = ((nx) * (xix_boundary_l_vals_t.min() - xmin) / (xmax - xmin)) * x_spacing
ext_max_l = ((nx) * (xix_boundary_l_vals_t.max() - xmin) / (xmax - xmin)) * x_spacing
#Make field lines
if show_mag:
# move seed points up with phase speed. - Bit of a fudge.
# Create an array of points for which we want mag field seeds
nx_seed = 9
ny_seed = 13
start_x = 30. * nx / 100.
end_x = nx+1 - start_x
start_y = 1.
if ny == 20: # so that the lines dont go right up to the edge of the box
end_y = ny - 1.
elif ny == 100:
end_y = ny - 2.
elif ny == 300:
end_y = ny - 6.
else:
end_y = ny - 1
seeds=[]
dx_res = (end_x - start_x) / (nx_seed-1)
dy_res = (end_y - start_y) / (ny_seed-1)
for j in range(ny_seed):
for i in range(nx_seed):
x = start_x + (i * dx_res) * x_spacing
y = start_y + (j * dy_res) * y_spacing
z = 1. + (t_start + t_ind*(t_end - t_start)/nt)/zmax * nz
seeds.append((x,z,y))
if 'alfven' in mode:
for i in range(nx_seed):
del seeds[0]
del seeds[-1]
# Remove previous field lines - field lines cannot be updated, just the data that they are built from
if t_ind != 0:
field_lines.remove() # field_lines is defined in first go through loop
field_lines = SeedStreamline(seed_points=seeds)
# Field line visualisation tinkering
field_lines.stream_tracer.integration_direction='both'
field_lines.streamline_type = 'tube'
field_lines.stream_tracer.maximum_propagation = nz * 2
field_lines.tube_filter.number_of_sides = 20
field_lines.tube_filter.radius = 0.7 * max(nx, ny, nz) / 100.
field_lines.tube_filter.capping = True
field_lines.actor.property.opacity = 1.0
field.add_child(field_lines)
module_manager = field_lines.parent
# Colormap of magnetic field strength plotted on the field lines
if show_mag_scale:
module_manager.scalar_lut_manager.lut_mode = 'coolwarm'
module_manager.scalar_lut_manager.data_range=[7,18]
else:
mag_lut = module_manager.scalar_lut_manager.lut.table.to_array()
mag_lut[:,0] = [220]*256
mag_lut[:,1] = [20]*256
mag_lut[:,2] = [20]*256
module_manager.scalar_lut_manager.lut.table = mag_lut
if show_mag_fade:
mpf.colormap_fade(module_manager, fade_value=20)
# Which views do you want to show? Options are defined at the start
views_selected = [0]#[0,1,4,5,6] #range(7) #[2,3]
for view_ind, view_selected in enumerate(views_selected):
view = view_options[view_selected]
# Display boundary - cannot be updated each time
if show_boundary:
# Boundaries should look different depending on view
if view == 'front-parallel':
#remove previous boundaries
if t != 0 or view_ind != 0:
boundary_r.remove()
boundary_l.remove()
# Make a fading colormap by changing opacity at ends
lut = np.reshape(np.array([150, 150, 150, 255]*256), (256,4))
fade_value = 125
lut[:fade_value,-1] = np.linspace(0, 255, fade_value)
lut[-fade_value:,-1] = np.linspace(255, 0, fade_value)
# Set up boundary visualisation
boundary_r = mlab.mesh(xix_boundary_r_vals_t, zgrid_zy, ygrid_zy,
extent=[ext_min_r, ext_max_r, 1, nz, 0, (ny-1) * y_spacing],
opacity=1., representation='wireframe',
line_width=12., scalars=zgrid_zy)
boundary_l = mlab.mesh(xix_boundary_l_vals_t, zgrid_zy, ygrid_zy,
extent=[ext_min_l, ext_max_l, 1, nz, 0, (ny-1) * y_spacing],
opacity=1., representation='wireframe',
line_width=12., scalars=zgrid_zy)
# Boundary color and other options
boundary_r.module_manager.scalar_lut_manager.lut.table = lut
boundary_l.module_manager.scalar_lut_manager.lut.table = lut
boundary_r.actor.property.lighting = False
boundary_r.actor.property.shading = False
boundary_l.actor.property.lighting = False
boundary_l.actor.property.shading = False
else:
#remove previous boundaries
if t != 0 or view_ind != 0:
boundary_r.remove()
boundary_l.remove()
# Make a fading colormap by changing opacity at ends
lut = np.reshape(np.array([150, 150, 150, 255]*256), (256,4))
fade_value = 20
lut[:fade_value,-1] = np.linspace(0, 255, fade_value)
lut[-fade_value:,-1] = np.linspace(255, 0, fade_value)
# Set up boundary visualisation
boundary_r = mlab.mesh(xix_boundary_r_vals_t, zgrid_zy, ygrid_zy,
extent=[ext_min_r, ext_max_r, 1, nz, 0, (ny-1) * y_spacing],
opacity=0.7, scalars=zgrid_zy)
boundary_l = mlab.mesh(xix_boundary_l_vals_t, zgrid_zy, ygrid_zy,
extent=[ext_min_l, ext_max_l, 1, nz, 0, (ny-1) * y_spacing],
opacity=0.7, scalars=zgrid_zy)
# Boundary color and other options
boundary_r.module_manager.scalar_lut_manager.lut.table = lut
boundary_l.module_manager.scalar_lut_manager.lut.table = lut
boundary_r.actor.property.lighting = False
boundary_r.actor.property.shading = False
boundary_l.actor.property.lighting = False
boundary_l.actor.property.shading = False
# Set viewing angle - For some unknown reason we must redefine the camera position each time.
# This is something to do with the boundaries being replaced each time.
mpf.view_position(fig, view, nx, ny, nz)
if save_images:
prefix = 'R1_'+str(R1) + '_' + mode + '_' + vis_mod_string + view + '_'# + '_norho_'
mlab.savefig(os.path.join(save_directory, prefix + str(t_ind+1) + '.png'))
if t_ind == nt - 1:
if make_video:
i2v.image2video(filepath=save_directory, prefix=prefix,
output_name=prefix+'video', out_extension='mp4',
fps=fps, n_loops=4, delete_images=True,
delete_old_videos=True, res=res[1])
# Log: to keep us updated with progress
if t_ind % 5 == 4:
print('Finished frame number ' + str(t_ind + 1) + ' out of ' + str(number_of_frames))
#Release some memory after each time step
gc.collect()
#step t forward
t = t + (t_end - t_start) / nt
# Close Mayavi window each time if we cant to make a video
if make_video:
mlab.close(fig)
print('Finished ' + mode)
|
mit
| 2,062,885,535,254,290,400
| 48.086713
| 208
| 0.498518
| false
| 3.590384
| false
| false
| false
|
bundgus/python-playground
|
ssh-playground/demo_sftp.py
|
1
|
2786
|
#!/usr/bin/env python
# Copyright (C) 2003-2007 Robey Pointer <robeypointer@gmail.com>
#
# This file is part of paramiko.
#
# Paramiko is free software; you can redistribute it and/or modify it under the
# terms of the GNU Lesser General Public License as published by the Free
# Software Foundation; either version 2.1 of the License, or (at your option)
# any later version.
#
# Paramiko is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
# details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Paramiko; if not, write to the Free Software Foundation, Inc.,
# 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA.
# based on code provided by raymond mosteller (thanks!)
import base64
import getpass
import os
import socket
import sys
import traceback
import paramiko
from paramiko.py3compat import input
# setup logging
paramiko.util.log_to_file('demo_sftp.log')
hostname = '192.168.1.11'
port = 22
username = 'username'
password = 'password'
# Paramiko client configuration
UseGSSAPI = False # enable GSS-API / SSPI authentication
DoGSSAPIKeyExchange = False
# now, connect and use paramiko Transport to negotiate SSH2 across the connection
try:
t = paramiko.Transport((hostname, port))
t.connect(None, username, password, gss_host=socket.getfqdn(hostname),
gss_auth=UseGSSAPI, gss_kex=DoGSSAPIKeyExchange)
sftp = paramiko.SFTPClient.from_transport(t)
# dirlist on remote host
dirlist = sftp.listdir('.')
print("Dirlist: %s" % dirlist)
# copy this demo onto the server
try:
sftp.mkdir("demo_sftp_folder")
except IOError:
print('(assuming demo_sftp_folder/ already exists)')
with sftp.open('demo_sftp_folder/README', 'w') as f:
f.write('This was created by demo_sftp.py.\n')
with open('demo_sftp.py', 'r') as f:
data = f.read()
sftp.open('demo_sftp_folder/demo_sftp.py', 'w').write(data)
print('created demo_sftp_folder/ on the server')
# copy the README back here
with sftp.open('demo_sftp_folder/README', 'r') as f:
data = f.read()
with open('README_demo_sftp', 'w') as f:
f.write(data.decode('utf-8'))
print('copied README back here')
# BETTER: use the get() and put() methods
sftp.put('demo_sftp.py', 'demo_sftp_folder/demo_sftp.py')
sftp.get('demo_sftp_folder/README', 'README_demo_sftp')
t.close()
except Exception as e:
print('*** Caught exception: %s: %s' % (e.__class__, e))
traceback.print_exc()
try:
t.close()
except:
pass
sys.exit(1)
|
mit
| 2,605,952,891,896,653,000
| 30.303371
| 81
| 0.684135
| false
| 3.44802
| false
| false
| false
|
cmjatai/cmj
|
cmj/cerimonial/models.py
|
1
|
27858
|
from django.contrib.auth.models import Group
from django.db import models
from django.db.models.deletion import SET_NULL, PROTECT, CASCADE
from django.utils.translation import ugettext_lazy as _
from cmj.core.models import CmjModelMixin, Trecho, Distrito, RegiaoMunicipal,\
CmjAuditoriaModelMixin, CmjSearchMixin, AreaTrabalho, Bairro, Municipio
from cmj.utils import YES_NO_CHOICES, NONE_YES_NO_CHOICES,\
get_settings_auth_user_model
from sapl.parlamentares.models import Parlamentar, Partido
from sapl.utils import LISTA_DE_UFS
FEMININO = 'F'
MASCULINO = 'M'
SEXO_CHOICE = ((FEMININO, _('Feminino')),
(MASCULINO, _('Masculino')))
IMP_BAIXA = 'B'
IMP_MEDIA = 'M'
IMP_ALTA = 'A'
IMP_CRITICA = 'C'
IMPORTANCIA_CHOICE = (
(IMP_BAIXA, _('Baixa')),
(IMP_MEDIA, _('Média')),
(IMP_ALTA, _('Alta')),
(IMP_CRITICA, _('Crítica')),
)
class DescricaoAbstractModel(models.Model):
descricao = models.CharField(
default='', max_length=254, verbose_name=_('Nome / Descrição'))
class Meta:
abstract = True
ordering = ('descricao',)
def __str__(self):
return self.descricao
class TipoTelefone(DescricaoAbstractModel):
class Meta(DescricaoAbstractModel.Meta):
verbose_name = _('Tipo de Telefone')
verbose_name_plural = _('Tipos de Telefone')
class TipoEndereco(DescricaoAbstractModel):
class Meta(DescricaoAbstractModel.Meta):
verbose_name = _('Tipo de Endereço')
verbose_name_plural = _('Tipos de Endereço')
class TipoEmail(DescricaoAbstractModel):
class Meta(DescricaoAbstractModel.Meta):
verbose_name = _('Tipo de Email')
verbose_name_plural = _('Tipos de Email')
class Parentesco(DescricaoAbstractModel):
class Meta(DescricaoAbstractModel.Meta):
verbose_name = _('Parentesco')
verbose_name_plural = _('Parentescos')
class EstadoCivil(DescricaoAbstractModel):
class Meta(DescricaoAbstractModel.Meta):
verbose_name = _('Estado Civil')
verbose_name_plural = _('Estados Civis')
class PronomeTratamento(models.Model):
nome_por_extenso = models.CharField(
default='', max_length=254, verbose_name=_('Nome Por Extenso'))
abreviatura_singular_m = models.CharField(
default='', max_length=254, verbose_name=_(
'Abreviatura Singular Masculino'))
abreviatura_singular_f = models.CharField(
default='', max_length=254, verbose_name=_(
'Abreviatura Singular Feminino'))
abreviatura_plural_m = models.CharField(
default='', max_length=254, verbose_name=_(
'Abreviatura Plural Masculino'))
abreviatura_plural_f = models.CharField(
default='', max_length=254, verbose_name=_(
'Abreviatura Plural Feminino'))
vocativo_direto_singular_m = models.CharField(
default='', max_length=254, verbose_name=_(
'Vocativo Direto Singular Masculino'))
vocativo_direto_singular_f = models.CharField(
default='', max_length=254, verbose_name=_(
'Vocativo Direto Singular Feminino'))
vocativo_direto_plural_m = models.CharField(
default='', max_length=254, verbose_name=_(
'Vocativo Direto Plural Masculino'))
vocativo_direto_plural_f = models.CharField(
default='', max_length=254, verbose_name=_(
'Vocativo Direto Plural Feminino'))
vocativo_indireto_singular_m = models.CharField(
default='', max_length=254, verbose_name=_(
'Vocativo Indireto Singular Masculino'))
vocativo_indireto_singular_f = models.CharField(
default='', max_length=254, verbose_name=_(
'Vocativo Indireto Singular Feminino'))
vocativo_indireto_plural_m = models.CharField(
default='', max_length=254, verbose_name=_(
'Vocativo Indireto Plural Masculino'))
vocativo_indireto_plural_f = models.CharField(
default='', max_length=254, verbose_name=_(
'Vocativo Indireto Plural Feminino'))
enderecamento_singular_m = models.CharField(
default='', max_length=254, verbose_name=_(
'Endereçamento Singular Masculino'))
enderecamento_singular_f = models.CharField(
default='', max_length=254, verbose_name=_(
'Endereçamento Singular Feminino'))
enderecamento_plural_m = models.CharField(
default='', max_length=254, verbose_name=_(
'Endereçamento Plural Masculino'))
enderecamento_plural_f = models.CharField(
default='', max_length=254, verbose_name=_(
'Endereçamento Plural Feminino'))
prefixo_nome_singular_m = models.CharField(
default='', max_length=254, verbose_name=_(
'Prefixo Singular Masculino'))
prefixo_nome_singular_f = models.CharField(
default='', max_length=254, verbose_name=_(
'Prefixo Singular Feminino'))
prefixo_nome_plural_m = models.CharField(
default='', max_length=254, verbose_name=_(
'Prefixo Plural Masculino'))
prefixo_nome_plural_f = models.CharField(
default='', max_length=254, verbose_name=_(
'Prefixo Plural Feminino'))
class Meta:
verbose_name = _('Pronome de Tratamento')
verbose_name_plural = _('Pronomes de tratamento')
def __str__(self):
return self.nome_por_extenso
class TipoAutoridade(DescricaoAbstractModel):
pronomes = models.ManyToManyField(
PronomeTratamento,
related_name='tipoautoridade_set')
class Meta(DescricaoAbstractModel.Meta):
verbose_name = _('Tipo de Autoridade')
verbose_name_plural = _('Tipos de Autoridade')
class TipoLocalTrabalho(DescricaoAbstractModel):
class Meta(DescricaoAbstractModel.Meta):
verbose_name = _('Tipo do Local de Trabalho')
verbose_name_plural = _('Tipos de Local de Trabalho')
class NivelInstrucao(DescricaoAbstractModel):
class Meta(DescricaoAbstractModel.Meta):
verbose_name = _('Nível de Instrução')
verbose_name_plural = _('Níveis de Instrução')
class OperadoraTelefonia(DescricaoAbstractModel):
class Meta(DescricaoAbstractModel.Meta):
verbose_name = _('Operadora de Telefonia')
verbose_name_plural = _('Operadoras de Telefonia')
class Contato(CmjSearchMixin, CmjAuditoriaModelMixin):
nome = models.CharField(max_length=100, verbose_name=_('Nome'))
nome_social = models.CharField(
blank=True, default='', max_length=100, verbose_name=_('Nome Social'))
apelido = models.CharField(
blank=True, default='', max_length=100, verbose_name=_('Apelido'))
data_nascimento = models.DateField(
blank=True, null=True, verbose_name=_('Data de Nascimento'))
sexo = models.CharField(
max_length=1, blank=True,
verbose_name=_('Sexo Biológico'), choices=SEXO_CHOICE)
identidade_genero = models.CharField(
blank=True, default='',
max_length=100, verbose_name=_('Como se reconhece?'))
tem_filhos = models.NullBooleanField(
choices=NONE_YES_NO_CHOICES,
default=None, verbose_name=_('Tem Filhos?'))
quantos_filhos = models.PositiveSmallIntegerField(
default=0, blank=True, verbose_name=_('Quantos Filhos?'))
estado_civil = models.ForeignKey(
EstadoCivil,
related_name='contato_set',
blank=True, null=True, on_delete=SET_NULL,
verbose_name=_('Estado Civil'))
nivel_instrucao = models.ForeignKey(
NivelInstrucao,
related_name='contato_set',
blank=True, null=True, on_delete=SET_NULL,
verbose_name=_('Nivel de Instrução'))
naturalidade = models.CharField(
max_length=50, blank=True, verbose_name=_('Naturalidade'))
nome_pai = models.CharField(
max_length=100, blank=True, verbose_name=_('Nome do Pai'))
nome_mae = models.CharField(
max_length=100, blank=True, verbose_name=_('Nome da Mãe'))
numero_sus = models.CharField(
max_length=100, blank=True, verbose_name=_('Número do SUS'))
cpf = models.CharField(max_length=15, blank=True, verbose_name=_('CPF'))
titulo_eleitor = models.CharField(
max_length=15,
blank=True,
verbose_name=_('Título de Eleitor'))
rg = models.CharField(max_length=30, blank=True, verbose_name=_('RG'))
rg_orgao_expedidor = models.CharField(
max_length=20, blank=True, verbose_name=_('Órgão Expedidor'))
rg_data_expedicao = models.DateField(
blank=True, null=True, verbose_name=_('Data de Expedição'))
ativo = models.BooleanField(choices=YES_NO_CHOICES,
default=True, verbose_name=_('Ativo?'))
workspace = models.ForeignKey(
AreaTrabalho,
verbose_name=_('Área de Trabalho'),
related_name='contato_set',
blank=True, null=True, on_delete=PROTECT)
perfil_user = models.ForeignKey(
get_settings_auth_user_model(),
verbose_name=_('Perfil do Usuário'),
related_name='contato_set',
blank=True, null=True, on_delete=CASCADE)
profissao = models.CharField(
max_length=254, blank=True, verbose_name=_('Profissão'))
tipo_autoridade = models.ForeignKey(
TipoAutoridade,
verbose_name=TipoAutoridade._meta.verbose_name,
related_name='contato_set',
blank=True, null=True, on_delete=SET_NULL)
cargo = models.CharField(max_length=254, blank=True, default='',
verbose_name=_('Cargo/Função'))
pronome_tratamento = models.ForeignKey(
PronomeTratamento,
verbose_name=PronomeTratamento._meta.verbose_name,
related_name='contato_set',
blank=True, null=True, on_delete=SET_NULL,
help_text=_('O pronome de tratamento é opcional, mas será \
obrigatório caso seja selecionado um tipo de autoridade.'))
observacoes = models.TextField(
blank=True, default='',
verbose_name=_('Outros observações sobre o Contato'))
@property
def fields_search(self):
return ['nome',
'nome_social',
'apelido']
class Meta:
verbose_name = _('Contato')
verbose_name_plural = _('Contatos')
ordering = ['nome']
permissions = (
('print_impressoenderecamento',
_('Pode Imprimir Impressos de Endereçamento')),
('print_rel_contato_agrupado_por_processo',
_('Pode Imprimir Relatório de Contatos Agrupados por Processo')),
('print_rel_contato_agrupado_por_grupo',
_('Pode Imprimir Relatório de Contatos Agrupados '
'Grupos de Contato')),
)
unique_together = (
('nome', 'data_nascimento', 'workspace', 'perfil_user'),)
def __str__(self):
return self.nome
class PerfilManager(models.Manager):
def for_user(self, user):
return super(
PerfilManager, self).get_queryset().get(
perfil_user=user)
class Perfil(Contato):
objects = PerfilManager()
class Meta:
proxy = True
class Telefone(CmjAuditoriaModelMixin):
contato = models.ForeignKey(
Contato, on_delete=CASCADE,
verbose_name=_('Contato'),
related_name="telefone_set")
operadora = models.ForeignKey(
OperadoraTelefonia, on_delete=SET_NULL,
related_name='telefone_set',
blank=True, null=True,
verbose_name=OperadoraTelefonia._meta.verbose_name)
tipo = models.ForeignKey(
TipoTelefone,
blank=True, null=True,
on_delete=SET_NULL,
related_name='telefone_set',
verbose_name='Tipo')
telefone = models.CharField(max_length=100,
verbose_name='Número do Telefone')
proprio = models.NullBooleanField(
choices=NONE_YES_NO_CHOICES,
blank=True, null=True, verbose_name=_('Próprio?'))
de_quem_e = models.CharField(
max_length=40, verbose_name='De quem é?', blank=True,
help_text=_('Se não é próprio, de quem é?'))
preferencial = models.BooleanField(
choices=YES_NO_CHOICES,
default=True, verbose_name=_('Preferêncial?'))
permissao = models.BooleanField(
choices=YES_NO_CHOICES,
default=True, verbose_name=_('Permissão:'),
help_text=_("Permite que nossa instituição entre em contato \
com você neste telefone?"))
@property
def numero_nome_contato(self):
return str(self)
class Meta:
verbose_name = _('Telefone')
verbose_name_plural = _('Telefones')
def __str__(self):
return self.telefone
class TelefonePerfil(Telefone):
class Meta:
proxy = True
verbose_name = _('Telefone do Perfil')
verbose_name_plural = _('Telefones do Perfil')
class Email(CmjAuditoriaModelMixin):
contato = models.ForeignKey(
Contato, on_delete=CASCADE,
verbose_name=_('Contato'),
related_name="email_set")
tipo = models.ForeignKey(
TipoEmail,
blank=True, null=True,
on_delete=SET_NULL,
related_name='email_set',
verbose_name='Tipo')
email = models.EmailField(verbose_name='Email')
preferencial = models.BooleanField(
choices=YES_NO_CHOICES,
default=True, verbose_name=_('Preferêncial?'))
permissao = models.BooleanField(
choices=YES_NO_CHOICES,
default=True, verbose_name=_('Permissão:'),
help_text=_("Permite que nossa instituição envie informações \
para este email?"))
class Meta:
verbose_name = _('Email')
verbose_name_plural = _("Email's")
def __str__(self):
return self.email
class EmailPerfil(Email):
class Meta:
proxy = True
verbose_name = _('Email do Perfil')
verbose_name_plural = _("Email's do Perfil")
class Dependente(CmjAuditoriaModelMixin):
parentesco = models.ForeignKey(Parentesco,
on_delete=PROTECT,
related_name='+',
verbose_name=_('Parentesco'))
contato = models.ForeignKey(Contato,
verbose_name=_('Contato'),
related_name='dependente_set',
on_delete=CASCADE)
nome = models.CharField(max_length=100, verbose_name=_('Nome'))
nome_social = models.CharField(
blank=True, default='', max_length=100, verbose_name=_('Nome Social'))
apelido = models.CharField(
blank=True, default='', max_length=100, verbose_name=_('Apelido'))
sexo = models.CharField(
blank=True, max_length=1, verbose_name=_('Sexo Biológico'),
choices=SEXO_CHOICE)
data_nascimento = models.DateField(
blank=True, null=True, verbose_name=_('Data Nascimento'))
identidade_genero = models.CharField(
blank=True, default='',
max_length=100, verbose_name=_('Como se reconhece?'))
nivel_instrucao = models.ForeignKey(
NivelInstrucao,
related_name='dependente_set',
blank=True, null=True, on_delete=SET_NULL,
verbose_name=_('Nivel de Instrução'))
class Meta:
verbose_name = _('Dependente')
verbose_name_plural = _('Dependentes')
def __str__(self):
return self.nome
class DependentePerfil(Dependente):
class Meta:
proxy = True
verbose_name = _('Dependente do Perfil')
verbose_name_plural = _('Dependentes do Perfil')
class LocalTrabalho(CmjAuditoriaModelMixin):
contato = models.ForeignKey(Contato,
verbose_name=_('Contato'),
related_name='localtrabalho_set',
on_delete=CASCADE)
nome = models.CharField(
max_length=254, verbose_name=_('Nome / Razão Social'))
nome_social = models.CharField(
blank=True, default='', max_length=254,
verbose_name=_('Nome Fantasia'))
tipo = models.ForeignKey(
TipoLocalTrabalho,
related_name='localtrabalho_set',
blank=True, null=True, on_delete=SET_NULL,
verbose_name=_('Tipo do Local de Trabalho'))
trecho = models.ForeignKey(
Trecho,
verbose_name=_('Trecho'),
related_name='localtrabalho_set',
blank=True, null=True, on_delete=SET_NULL)
uf = models.CharField(max_length=2, blank=True, choices=LISTA_DE_UFS,
verbose_name=_('Estado'))
municipio = models.ForeignKey(
Municipio,
verbose_name=Municipio._meta.verbose_name,
related_name='localtrabalho_set',
blank=True, null=True, on_delete=SET_NULL)
cep = models.CharField(max_length=9, blank=True, default='',
verbose_name=_('CEP'))
endereco = models.CharField(
max_length=254, blank=True, default='',
verbose_name=_('Endereço'),
help_text=_('O campo endereço também é um campo de busca. Nele '
'você pode digitar qualquer informação, inclusive '
'digitar o cep para localizar o endereço, e vice-versa!'))
numero = models.CharField(max_length=50, blank=True, default='',
verbose_name=_('Número'))
bairro = models.ForeignKey(
Bairro,
verbose_name=Bairro._meta.verbose_name,
related_name='localtrabalho_set',
blank=True, null=True, on_delete=SET_NULL)
distrito = models.ForeignKey(
Distrito,
verbose_name=Distrito._meta.verbose_name,
related_name='localtrabalho_set',
blank=True, null=True, on_delete=SET_NULL)
regiao_municipal = models.ForeignKey(
RegiaoMunicipal,
verbose_name=RegiaoMunicipal._meta.verbose_name,
related_name='localtrabalho_set',
blank=True, null=True, on_delete=SET_NULL)
complemento = models.CharField(max_length=30, blank=True, default='',
verbose_name=_('Complemento'))
data_inicio = models.DateField(
blank=True, null=True, verbose_name=_('Data de Início'))
data_fim = models.DateField(
blank=True, null=True, verbose_name=_('Data de Fim'))
preferencial = models.BooleanField(
choices=YES_NO_CHOICES,
default=True, verbose_name=_('Preferencial?'))
cargo = models.CharField(
max_length=254, blank=True, default='',
verbose_name=_('Cargo/Função'),
help_text=_('Ao definir um cargo e função aqui, o '
'Cargo/Função preenchido na aba "Dados Básicos", '
'será desconsiderado ao gerar impressos!'))
class Meta:
verbose_name = _('Local de Trabalho')
verbose_name_plural = _('Locais de Trabalho')
def __str__(self):
return self.nome
class LocalTrabalhoPerfil(LocalTrabalho):
class Meta:
proxy = True
verbose_name = _('Local de Trabalho do Perfil')
verbose_name_plural = _('Locais de Trabalho do Perfil')
class Endereco(CmjAuditoriaModelMixin):
contato = models.ForeignKey(Contato,
verbose_name=_('Contato'),
related_name='endereco_set',
on_delete=CASCADE)
tipo = models.ForeignKey(
TipoEndereco,
related_name='endereco_set',
blank=True, null=True, on_delete=SET_NULL,
verbose_name=_('Tipo do Endereço'))
trecho = models.ForeignKey(
Trecho,
verbose_name=_('Trecho'),
related_name='endereco_set',
blank=True, null=True, on_delete=SET_NULL)
uf = models.CharField(max_length=2, blank=True, choices=LISTA_DE_UFS,
verbose_name=_('Estado'))
municipio = models.ForeignKey(
Municipio,
verbose_name=_('Município'),
related_name='endereco_set',
blank=True, null=True, on_delete=SET_NULL)
cep = models.CharField(max_length=9, blank=True, default='',
verbose_name=_('CEP'))
endereco = models.CharField(
max_length=254, blank=True, default='',
verbose_name=_('Endereço'),
help_text=_('O campo endereço também é um campo de busca, nele '
'você pode digitar qualquer informação, inclusive '
'digitar o cep para localizar o endereço, e vice-versa!'))
numero = models.CharField(max_length=50, blank=True, default='',
verbose_name=_('Número'))
bairro = models.ForeignKey(
Bairro,
verbose_name=Bairro._meta.verbose_name,
related_name='endereco_set',
blank=True, null=True, on_delete=SET_NULL)
distrito = models.ForeignKey(
Distrito,
verbose_name=Distrito._meta.verbose_name,
related_name='endereco_set',
blank=True, null=True, on_delete=SET_NULL)
regiao_municipal = models.ForeignKey(
RegiaoMunicipal,
verbose_name=RegiaoMunicipal._meta.verbose_name,
related_name='endereco_set',
blank=True, null=True, on_delete=SET_NULL)
complemento = models.CharField(max_length=254, blank=True, default='',
verbose_name=_('Complemento'))
ponto_referencia = models.CharField(max_length=254, blank=True, default='',
verbose_name=_('Pontos de Referência'))
observacoes = models.TextField(
blank=True, default='',
verbose_name=_('Outros observações sobre o Endereço'))
preferencial = models.BooleanField(
choices=YES_NO_CHOICES,
default=True, verbose_name=_('Preferencial?'))
"""help_text=_('Correspondências automáticas serão geradas sempre '
'para os endereços preferenciais.')"""
class Meta:
verbose_name = _('Endereço')
verbose_name_plural = _('Endereços')
def __str__(self):
numero = (' - ' + self.numero) if self.numero else ''
return self.endereco + numero
class EnderecoPerfil(Endereco):
class Meta:
proxy = True
verbose_name = _('Endereço do Perfil')
verbose_name_plural = _('Endereços do Perfil')
class FiliacaoPartidaria(CmjAuditoriaModelMixin):
contato = models.ForeignKey(Contato,
verbose_name=_('Contato'),
related_name='filiacaopartidaria_set',
on_delete=CASCADE)
data = models.DateField(verbose_name=_('Data de Filiação'))
partido = models.ForeignKey(Partido,
related_name='filiacaopartidaria_set',
verbose_name=Partido._meta.verbose_name,
on_delete=PROTECT)
data_desfiliacao = models.DateField(
blank=True, null=True, verbose_name=_('Data de Desfiliação'))
@property
def contato_nome(self):
return str(self.contato)
class Meta:
verbose_name = _('Filiação Partidária')
verbose_name_plural = _('Filiações Partidárias')
def __str__(self):
return str(self.partido)
# -----------------------------------------------------------------
# -----------------------------------------------------------------
# PROCESSOS
# -----------------------------------------------------------------
# -----------------------------------------------------------------
class StatusProcesso(DescricaoAbstractModel):
class Meta(DescricaoAbstractModel.Meta):
verbose_name = _('Status de Processo')
verbose_name_plural = _('Status de Processos')
class ClassificacaoProcesso(DescricaoAbstractModel):
class Meta(DescricaoAbstractModel.Meta):
verbose_name = _('Classificacao de Processo')
verbose_name_plural = _('Classificações de Processos')
class TopicoProcesso(DescricaoAbstractModel):
class Meta(DescricaoAbstractModel.Meta):
verbose_name = _('Tópico de Processo')
verbose_name_plural = _('Tópicos de Processos')
class AssuntoProcesso(DescricaoAbstractModel, CmjAuditoriaModelMixin):
workspace = models.ForeignKey(
AreaTrabalho,
verbose_name=_('Área de Trabalho'),
related_name='assuntoprocesso_set',
on_delete=PROTECT)
class Meta(DescricaoAbstractModel.Meta):
verbose_name = _('Assunto de Processo')
verbose_name_plural = _('Assuntos de Processos')
class Processo(CmjSearchMixin, CmjAuditoriaModelMixin):
titulo = models.CharField(max_length=9999, verbose_name=_('Título'))
data = models.DateField(verbose_name=_('Data de Abertura'))
descricao = models.TextField(
blank=True, default='',
verbose_name=_('Descrição do Processo'))
observacoes = models.TextField(
blank=True, default='',
verbose_name=_('Outras observações sobre o Processo'))
solucao = models.TextField(
blank=True, default='',
verbose_name=_('Solução do Processo'))
contatos = models.ManyToManyField(Contato,
blank=True,
verbose_name=_(
'Contatos Interessados no Processo'),
related_name='processo_set',)
status = models.ForeignKey(StatusProcesso,
blank=True, null=True,
verbose_name=_('Status do Processo'),
related_name='processo_set',
on_delete=SET_NULL)
importancia = models.CharField(
max_length=1, blank=True,
verbose_name=_('Importância'), choices=IMPORTANCIA_CHOICE)
topicos = models.ManyToManyField(
TopicoProcesso, blank=True,
related_name='processo_set',
verbose_name=_('Tópicos'))
classificacoes = models.ManyToManyField(
ClassificacaoProcesso, blank=True,
related_name='processo_set',
verbose_name=_('Classificações'),)
assuntos = models.ManyToManyField(
AssuntoProcesso, blank=True,
related_name='processo_set',
verbose_name=_('Assuntos'),)
workspace = models.ForeignKey(
AreaTrabalho,
verbose_name=_('Área de Trabalho'),
related_name='processo_set',
on_delete=PROTECT)
class Meta:
verbose_name = _('Processo')
verbose_name_plural = _('Processos')
ordering = ('titulo', )
def __str__(self):
return str(self.titulo)
@property
def fields_search(self):
return ['titulo',
'observacoes',
'descricao']
class ProcessoContato(Processo):
class Meta:
proxy = True
verbose_name = _('Processo')
verbose_name_plural = _('Processos')
class GrupoDeContatos(CmjAuditoriaModelMixin):
nome = models.CharField(max_length=100,
verbose_name=_('Nome do Grupo'))
contatos = models.ManyToManyField(Contato,
blank=True,
verbose_name=_(
'Contatos do Grupo'),
related_name='grupodecontatos_set',)
workspace = models.ForeignKey(
AreaTrabalho,
verbose_name=_('Área de Trabalho'),
related_name='grupodecontatos_set',
on_delete=PROTECT)
class Meta:
verbose_name = _('Grupo de Contatos')
verbose_name_plural = _('Grupos de Contatos')
ordering = ('nome', )
def __str__(self):
return str(self.nome)
|
gpl-3.0
| 2,557,580,451,169,322,000
| 31.736718
| 79
| 0.601089
| false
| 3.564468
| false
| false
| false
|
rajalokan/nova
|
nova/tests/functional/notification_sample_tests/test_instance.py
|
1
|
34922
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import time
import mock
from nova import context
from nova import exception
from nova.tests import fixtures
from nova.tests.functional.notification_sample_tests \
import notification_sample_base
from nova.tests.unit import fake_notifier
class TestInstanceNotificationSample(
notification_sample_base.NotificationSampleTestBase):
def setUp(self):
self.flags(use_neutron=True)
super(TestInstanceNotificationSample, self).setUp()
self.neutron = fixtures.NeutronFixture(self)
self.useFixture(self.neutron)
self.cinder = fixtures.CinderFixture(self)
self.useFixture(self.cinder)
def _wait_until_swap_volume(self, server, volume_id):
for i in range(50):
volume_attachments = self.api.get_server_volumes(server['id'])
if len(volume_attachments) > 0:
for volume_attachment in volume_attachments:
if volume_attachment['volumeId'] == volume_id:
return
time.sleep(0.5)
self.fail('Volume swap operation failed.')
def _wait_until_swap_volume_error(self):
for i in range(50):
if self.cinder.swap_error:
return
time.sleep(0.5)
self.fail("Timed out waiting for volume swap error to occur.")
def test_instance_action(self):
# A single test case is used to test most of the instance action
# notifications to avoid booting up an instance for every action
# separately.
# Every instance action test function shall make sure that after the
# function the instance is in active state and usable by other actions.
# Therefore some action especially delete cannot be used here as
# recovering from that action would mean to recreate the instance and
# that would go against the whole purpose of this optimization
server = self._boot_a_server(
extra_params={'networks': [{'port': self.neutron.port_1['id']}]})
actions = [
self._test_power_off_on_server,
self._test_restore_server,
self._test_suspend_resume_server,
self._test_pause_unpause_server,
self._test_shelve_server,
self._test_shelve_offload_server,
self._test_unshelve_server,
self._test_resize_server,
self._test_revert_server,
self._test_resize_confirm_server,
self._test_snapshot_server,
self._test_rebuild_server,
self._test_reboot_server,
self._test_reboot_server_error,
self._test_trigger_crash_dump,
self._test_volume_attach_detach_server,
self._test_rescue_server,
self._test_unrescue_server,
self._test_soft_delete_server,
self._test_attach_volume_error,
]
for action in actions:
fake_notifier.reset()
action(server)
# Ensure that instance is in active state after an action
self._wait_for_state_change(self.admin_api, server, 'ACTIVE')
def test_create_delete_server(self):
server = self._boot_a_server(
extra_params={'networks': [{'port': self.neutron.port_1['id']}]})
self.api.delete_server(server['id'])
self._wait_until_deleted(server)
self.assertEqual(6, len(fake_notifier.VERSIONED_NOTIFICATIONS))
# This list needs to be in order.
expected_notifications = [
'instance-create-start',
'instance-create-end',
'instance-delete-start',
'instance-shutdown-start',
'instance-shutdown-end',
'instance-delete-end'
]
for idx, notification in enumerate(expected_notifications):
self._verify_notification(
notification,
replacements={
'reservation_id': server['reservation_id'],
'uuid': server['id']},
actual=fake_notifier.VERSIONED_NOTIFICATIONS[idx])
@mock.patch('nova.compute.manager.ComputeManager._build_resources')
def test_create_server_error(self, mock_build):
def _build_resources(*args, **kwargs):
raise exception.FlavorDiskTooSmall()
mock_build.side_effect = _build_resources
server = self._boot_a_server(
expected_status='ERROR',
extra_params={'networks': [{'port': self.neutron.port_1['id']}]})
self.assertEqual(2, len(fake_notifier.VERSIONED_NOTIFICATIONS))
self._verify_notification(
'instance-create-start',
replacements={
'reservation_id': server['reservation_id'],
'uuid': server['id']},
actual=fake_notifier.VERSIONED_NOTIFICATIONS[0])
self._verify_notification(
'instance-create-error',
replacements={
'reservation_id': server['reservation_id'],
'uuid': server['id']},
actual=fake_notifier.VERSIONED_NOTIFICATIONS[1])
def _verify_instance_update_steps(self, steps, notifications,
initial=None):
replacements = {}
if initial:
replacements = initial
for i, step in enumerate(steps):
replacements.update(step)
self._verify_notification(
'instance-update',
replacements=replacements,
actual=notifications[i])
return replacements
def test_create_delete_server_with_instance_update(self):
# This makes server network creation synchronous which is necessary
# for notification samples that expect instance.info_cache.network_info
# to be set.
self.useFixture(fixtures.SpawnIsSynchronousFixture())
self.flags(notify_on_state_change='vm_and_task_state',
group='notifications')
server = self._boot_a_server(
extra_params={'networks': [{'port': self.neutron.port_1['id']}]})
instance_updates = self._wait_for_notifications('instance.update', 7)
# The first notification comes from the nova-conductor the
# rest is from the nova-compute. To keep the test simpler
# assert this fact and then modify the publisher_id of the
# first notification to match the template
self.assertEqual('conductor:fake-mini',
instance_updates[0]['publisher_id'])
instance_updates[0]['publisher_id'] = 'nova-compute:fake-mini'
create_steps = [
# nothing -> scheduling
{'reservation_id': server['reservation_id'],
'uuid': server['id'],
'host': None,
'node': None,
'state_update.new_task_state': 'scheduling',
'state_update.old_task_state': 'scheduling',
'state_update.state': 'building',
'state_update.old_state': 'building',
'state': 'building'},
# scheduling -> building
{
'state_update.new_task_state': None,
'state_update.old_task_state': 'scheduling',
'task_state': None},
# scheduled
{'host': 'compute',
'node': 'fake-mini',
'state_update.old_task_state': None},
# building -> networking
{'state_update.new_task_state': 'networking',
'state_update.old_task_state': 'networking',
'task_state': 'networking'},
# networking -> block_device_mapping
{'state_update.new_task_state': 'block_device_mapping',
'state_update.old_task_state': 'networking',
'task_state': 'block_device_mapping',
'ip_addresses': [{
"nova_object.name": "IpPayload",
"nova_object.namespace": "nova",
"nova_object.version": "1.0",
"nova_object.data": {
"mac": "fa:16:3e:4c:2c:30",
"address": "192.168.1.3",
"port_uuid": "ce531f90-199f-48c0-816c-13e38010b442",
"meta": {},
"version": 4,
"label": "private-network",
"device_name": "tapce531f90-19"
}}]
},
# block_device_mapping -> spawning
{'state_update.new_task_state': 'spawning',
'state_update.old_task_state': 'block_device_mapping',
'task_state': 'spawning',
},
# spawning -> active
{'state_update.new_task_state': None,
'state_update.old_task_state': 'spawning',
'state_update.state': 'active',
'launched_at': '2012-10-29T13:42:11Z',
'state': 'active',
'task_state': None,
'power_state': 'running'},
]
replacements = self._verify_instance_update_steps(
create_steps, instance_updates)
fake_notifier.reset()
# Let's generate some bandwidth usage data.
# Just call the periodic task directly for simplicity
self.compute.manager._poll_bandwidth_usage(context.get_admin_context())
self.api.delete_server(server['id'])
self._wait_until_deleted(server)
instance_updates = self._get_notifications('instance.update')
self.assertEqual(2, len(instance_updates))
delete_steps = [
# active -> deleting
{'state_update.new_task_state': 'deleting',
'state_update.old_task_state': 'deleting',
'state_update.old_state': 'active',
'state': 'active',
'task_state': 'deleting',
'bandwidth': [
{'nova_object.namespace': 'nova',
'nova_object.name': 'BandwidthPayload',
'nova_object.data':
{'network_name': 'private-network',
'out_bytes': 0,
'in_bytes': 0},
'nova_object.version': '1.0'}],
'tags': ["tag1"]
},
# deleting -> deleted
{'state_update.new_task_state': None,
'state_update.old_task_state': 'deleting',
'state_update.old_state': 'active',
'state_update.state': 'deleted',
'state': 'deleted',
'task_state': None,
'terminated_at': '2012-10-29T13:42:11Z',
'ip_addresses': [],
'power_state': 'pending',
'bandwidth': [],
'tags': ["tag1"]
},
]
self._verify_instance_update_steps(delete_steps, instance_updates,
initial=replacements)
def _test_power_off_on_server(self, server):
self.api.post_server_action(server['id'], {'os-stop': {}})
self._wait_for_state_change(self.api, server,
expected_status='SHUTOFF')
self.api.post_server_action(server['id'], {'os-start': {}})
self._wait_for_state_change(self.api, server,
expected_status='ACTIVE')
self.assertEqual(4, len(fake_notifier.VERSIONED_NOTIFICATIONS))
self._verify_notification(
'instance-power_off-start',
replacements={
'reservation_id': server['reservation_id'],
'uuid': server['id']},
actual=fake_notifier.VERSIONED_NOTIFICATIONS[0])
self._verify_notification(
'instance-power_off-end',
replacements={
'reservation_id': server['reservation_id'],
'power_state': 'running',
'uuid': server['id']},
actual=fake_notifier.VERSIONED_NOTIFICATIONS[1])
self._verify_notification(
'instance-power_on-start',
replacements={
'reservation_id': server['reservation_id'],
'uuid': server['id']},
actual=fake_notifier.VERSIONED_NOTIFICATIONS[2])
self._verify_notification(
'instance-power_on-end',
replacements={
'reservation_id': server['reservation_id'],
'uuid': server['id']},
actual=fake_notifier.VERSIONED_NOTIFICATIONS[3])
def _test_shelve_server(self, server):
self.flags(shelved_offload_time = -1)
self.api.post_server_action(server['id'], {'shelve': {}})
self._wait_for_state_change(self.api, server,
expected_status='SHELVED')
self.assertEqual(2, len(fake_notifier.VERSIONED_NOTIFICATIONS))
self._verify_notification(
'instance-shelve-start',
replacements={
'reservation_id': server['reservation_id'],
'uuid': server['id']},
actual=fake_notifier.VERSIONED_NOTIFICATIONS[0])
self._verify_notification(
'instance-shelve-end',
replacements={
'reservation_id': server['reservation_id'],
'uuid': server['id']},
actual=fake_notifier.VERSIONED_NOTIFICATIONS[1])
post = {'unshelve': None}
self.api.post_server_action(server['id'], post)
def _test_shelve_offload_server(self, server):
self.flags(shelved_offload_time=-1)
self.api.post_server_action(server['id'], {'shelve': {}})
self._wait_for_state_change(self.api, server,
expected_status='SHELVED')
self.api.post_server_action(server['id'], {'shelveOffload': {}})
self._wait_for_state_change(self.api, server,
expected_status='SHELVED_OFFLOADED')
self.assertEqual(4, len(fake_notifier.VERSIONED_NOTIFICATIONS))
self._verify_notification(
'instance-shelve-start',
replacements={
'reservation_id': server['reservation_id'],
'uuid': server['id']},
actual=fake_notifier.VERSIONED_NOTIFICATIONS[0])
self._verify_notification(
'instance-shelve-end',
replacements={
'reservation_id': server['reservation_id'],
'uuid': server['id']},
actual=fake_notifier.VERSIONED_NOTIFICATIONS[1])
self._verify_notification(
'instance-shelve_offload-start',
replacements={
'reservation_id': server['reservation_id'],
'uuid': server['id']},
actual=fake_notifier.VERSIONED_NOTIFICATIONS[2])
self._verify_notification(
'instance-shelve_offload-end',
replacements={
'reservation_id': server['reservation_id'],
'uuid': server['id']},
actual=fake_notifier.VERSIONED_NOTIFICATIONS[3])
self.api.post_server_action(server['id'], {'unshelve': None})
def _test_unshelve_server(self, server):
# setting the shelved_offload_time to 0 should set the
# instance status to 'SHELVED_OFFLOADED'
self.flags(shelved_offload_time = 0)
self.api.post_server_action(server['id'], {'shelve': {}})
self._wait_for_state_change(self.api, server,
expected_status='SHELVED_OFFLOADED')
post = {'unshelve': None}
self.api.post_server_action(server['id'], post)
self._wait_for_state_change(self.admin_api, server, 'ACTIVE')
self.assertEqual(6, len(fake_notifier.VERSIONED_NOTIFICATIONS))
self._verify_notification(
'instance-unshelve-start',
replacements={
'reservation_id': server['reservation_id'],
'uuid': server['id']},
actual=fake_notifier.VERSIONED_NOTIFICATIONS[4])
self._verify_notification(
'instance-unshelve-end',
replacements={
'reservation_id': server['reservation_id'],
'uuid': server['id']},
actual=fake_notifier.VERSIONED_NOTIFICATIONS[5])
def _test_suspend_resume_server(self, server):
post = {'suspend': {}}
self.api.post_server_action(server['id'], post)
self._wait_for_state_change(self.admin_api, server, 'SUSPENDED')
post = {'resume': None}
self.api.post_server_action(server['id'], post)
self._wait_for_state_change(self.admin_api, server, 'ACTIVE')
# Four versioned notification are generated.
# 0. instance-suspend-start
# 1. instance-suspend-end
# 2. instance-resume-start
# 3. instance-resume-end
self.assertEqual(4, len(fake_notifier.VERSIONED_NOTIFICATIONS))
self._verify_notification(
'instance-suspend-start',
replacements={
'reservation_id': server['reservation_id'],
'uuid': server['id']},
actual=fake_notifier.VERSIONED_NOTIFICATIONS[0])
self._verify_notification(
'instance-suspend-end',
replacements={
'reservation_id': server['reservation_id'],
'uuid': server['id']},
actual=fake_notifier.VERSIONED_NOTIFICATIONS[1])
self._verify_notification(
'instance-resume-start',
replacements={
'reservation_id': server['reservation_id'],
'uuid': server['id']},
actual=fake_notifier.VERSIONED_NOTIFICATIONS[2])
self._verify_notification(
'instance-resume-end',
replacements={
'reservation_id': server['reservation_id'],
'uuid': server['id']},
actual=fake_notifier.VERSIONED_NOTIFICATIONS[3])
self.flags(reclaim_instance_interval=0)
def _test_pause_unpause_server(self, server):
self.api.post_server_action(server['id'], {'pause': {}})
self._wait_for_state_change(self.api, server, 'PAUSED')
self.api.post_server_action(server['id'], {'unpause': {}})
self._wait_for_state_change(self.api, server, 'ACTIVE')
# Four versioned notifications are generated
# 0. instance-pause-start
# 1. instance-pause-end
# 2. instance-unpause-start
# 3. instance-unpause-end
self.assertEqual(4, len(fake_notifier.VERSIONED_NOTIFICATIONS))
self._verify_notification(
'instance-pause-start',
replacements={
'reservation_id': server['reservation_id'],
'uuid': server['id']},
actual=fake_notifier.VERSIONED_NOTIFICATIONS[0])
self._verify_notification(
'instance-pause-end',
replacements={
'reservation_id': server['reservation_id'],
'uuid': server['id']},
actual=fake_notifier.VERSIONED_NOTIFICATIONS[1])
self._verify_notification(
'instance-unpause-start',
replacements={
'reservation_id': server['reservation_id'],
'uuid': server['id']},
actual=fake_notifier.VERSIONED_NOTIFICATIONS[2])
self._verify_notification(
'instance-unpause-end',
replacements={
'reservation_id': server['reservation_id'],
'uuid': server['id']},
actual=fake_notifier.VERSIONED_NOTIFICATIONS[3])
def _test_resize_server(self, server):
self.flags(allow_resize_to_same_host=True)
other_flavor_body = {
'flavor': {
'name': 'other_flavor',
'ram': 256,
'vcpus': 1,
'disk': 1,
'id': 'd5a8bb54-365a-45ae-abdb-38d249df7845'
}
}
other_flavor_id = self.api.post_flavor(other_flavor_body)['id']
extra_specs = {
"extra_specs": {
"hw:watchdog_action": "reset"}}
self.admin_api.post_extra_spec(other_flavor_id, extra_specs)
# Ignore the create flavor notification
fake_notifier.reset()
post = {
'resize': {
'flavorRef': other_flavor_id
}
}
self.api.post_server_action(server['id'], post)
self._wait_for_state_change(self.api, server, 'VERIFY_RESIZE')
self.assertEqual(4, len(fake_notifier.VERSIONED_NOTIFICATIONS))
# This list needs to be in order.
expected_notifications = [
'instance-resize-start',
'instance-resize-end',
'instance-resize_finish-start',
'instance-resize_finish-end'
]
for idx, notification in enumerate(expected_notifications):
self._verify_notification(
notification,
replacements={
'reservation_id': server['reservation_id'],
'uuid': server['id']},
actual=fake_notifier.VERSIONED_NOTIFICATIONS[idx])
post = {'revertResize': None}
self.api.post_server_action(server['id'], post)
def _test_snapshot_server(self, server):
post = {'createImage': {'name': 'test-snap'}}
self.api.post_server_action(server['id'], post)
self._wait_for_notification('instance.snapshot.end')
self.assertEqual(2, len(fake_notifier.VERSIONED_NOTIFICATIONS))
self._verify_notification(
'instance-snapshot-start',
replacements={
'reservation_id': server['reservation_id'],
'uuid': server['id']},
actual=fake_notifier.VERSIONED_NOTIFICATIONS[0])
self._verify_notification(
'instance-snapshot-end',
replacements={
'reservation_id': server['reservation_id'],
'uuid': server['id']},
actual=fake_notifier.VERSIONED_NOTIFICATIONS[1])
def _test_rebuild_server(self, server):
post = {
'rebuild': {
'imageRef': 'a2459075-d96c-40d5-893e-577ff92e721c',
'metadata': {}
}
}
self.api.post_server_action(server['id'], post)
# Before going back to ACTIVE state
# server state need to be changed to REBUILD state
self._wait_for_state_change(self.api, server,
expected_status='REBUILD')
self._wait_for_state_change(self.api, server,
expected_status='ACTIVE')
self.assertEqual(2, len(fake_notifier.VERSIONED_NOTIFICATIONS))
self._verify_notification(
'instance-rebuild-start',
replacements={
'reservation_id': server['reservation_id'],
'uuid': server['id']},
actual=fake_notifier.VERSIONED_NOTIFICATIONS[0])
self._verify_notification(
'instance-rebuild-end',
replacements={
'reservation_id': server['reservation_id'],
'uuid': server['id']},
actual=fake_notifier.VERSIONED_NOTIFICATIONS[1])
@mock.patch('nova.compute.manager.ComputeManager.'
'_do_rebuild_instance_with_claim')
def test_rebuild_server_exc(self, mock_rebuild):
def _compute_resources_unavailable(*args, **kwargs):
raise exception.ComputeResourcesUnavailable(
reason="fake-resource")
server = self._boot_a_server(
extra_params={'networks': [{'port': self.neutron.port_1['id']}]})
fake_notifier.reset()
post = {
'rebuild': {
'imageRef': 'a2459075-d96c-40d5-893e-577ff92e721c',
'metadata': {}
}
}
self.api.post_server_action(server['id'], post)
mock_rebuild.side_effect = _compute_resources_unavailable
self._wait_for_state_change(self.api, server, expected_status='ERROR')
self.assertEqual(2, len(fake_notifier.VERSIONED_NOTIFICATIONS))
self._verify_notification(
'instance-rebuild-error',
replacements={
'reservation_id': server['reservation_id'],
'uuid': server['id']},
actual=fake_notifier.VERSIONED_NOTIFICATIONS[0])
def _test_restore_server(self, server):
self.flags(reclaim_instance_interval=30)
self.api.delete_server(server['id'])
self._wait_for_state_change(self.api, server, 'SOFT_DELETED')
self.api.post_server_action(server['id'], {'restore': {}})
self._wait_for_state_change(self.api, server, 'ACTIVE')
self.assertEqual(2, len(fake_notifier.VERSIONED_NOTIFICATIONS))
self._verify_notification(
'instance-restore-start',
replacements={
'reservation_id': server['reservation_id'],
'uuid': server['id']},
actual=fake_notifier.VERSIONED_NOTIFICATIONS[0])
self._verify_notification(
'instance-restore-end',
replacements={
'reservation_id': server['reservation_id'],
'uuid': server['id']},
actual=fake_notifier.VERSIONED_NOTIFICATIONS[1])
self.flags(reclaim_instance_interval=0)
def _test_reboot_server(self, server):
post = {'reboot': {'type': 'HARD'}}
self.api.post_server_action(server['id'], post)
self._wait_for_notification('instance.reboot.start')
self._wait_for_notification('instance.reboot.end')
self.assertEqual(2, len(fake_notifier.VERSIONED_NOTIFICATIONS))
self._verify_notification(
'instance-reboot-start',
replacements={
'reservation_id': server['reservation_id'],
'uuid': server['id']},
actual=fake_notifier.VERSIONED_NOTIFICATIONS[0])
self._verify_notification(
'instance-reboot-end',
replacements={
'reservation_id': server['reservation_id'],
'uuid': server['id']},
actual=fake_notifier.VERSIONED_NOTIFICATIONS[1])
@mock.patch('nova.virt.fake.SmallFakeDriver.reboot')
def _test_reboot_server_error(self, server, mock_reboot):
def _hard_reboot(*args, **kwargs):
raise exception.UnsupportedVirtType(virt="FakeVirt")
mock_reboot.side_effect = _hard_reboot
post = {'reboot': {'type': 'HARD'}}
self.api.post_server_action(server['id'], post)
self._wait_for_notification('instance.reboot.start')
self._wait_for_notification('instance.reboot.error')
self.assertEqual(2, len(fake_notifier.VERSIONED_NOTIFICATIONS))
self._verify_notification(
'instance-reboot-start',
replacements={
'reservation_id': server['reservation_id'],
'uuid': server['id']},
actual=fake_notifier.VERSIONED_NOTIFICATIONS[0])
self._verify_notification(
'instance-reboot-error',
replacements={
'reservation_id': server['reservation_id'],
'uuid': server['id']},
actual=fake_notifier.VERSIONED_NOTIFICATIONS[1])
def _attach_volume_to_server(self, server, volume_id):
self.api.post_server_volume(
server['id'], {"volumeAttachment": {"volumeId": volume_id}})
self._wait_for_notification('instance.volume_attach.end')
def _detach_volume_from_server(self, server, volume_id):
self.api.delete_server_volume(server['id'], volume_id)
self._wait_for_notification('instance.volume_detach.end')
def _volume_swap_server(self, server, attachement_id, volume_id):
self.api.put_server_volume(server['id'], attachement_id, volume_id)
def test_volume_swap_server(self):
server = self._boot_a_server(
extra_params={'networks':
[{'port': self.neutron.port_1['id']}]})
self._attach_volume_to_server(server, self.cinder.SWAP_OLD_VOL)
self.cinder.swap_volume_instance_uuid = server['id']
self._volume_swap_server(server, self.cinder.SWAP_OLD_VOL,
self.cinder.SWAP_NEW_VOL)
self._wait_until_swap_volume(server, self.cinder.SWAP_NEW_VOL)
self.assertEqual(6, len(fake_notifier.VERSIONED_NOTIFICATIONS))
self._verify_notification(
'instance-volume_swap-start',
replacements={
'reservation_id': server['reservation_id'],
'uuid': server['id']},
actual=fake_notifier.VERSIONED_NOTIFICATIONS[4])
self._verify_notification(
'instance-volume_swap-end',
replacements={
'reservation_id': server['reservation_id'],
'uuid': server['id']},
actual=fake_notifier.VERSIONED_NOTIFICATIONS[5])
def test_volume_swap_server_with_error(self):
server = self._boot_a_server(
extra_params={'networks': [{'port': self.neutron.port_1['id']}]})
self._attach_volume_to_server(server, self.cinder.SWAP_ERR_OLD_VOL)
self.cinder.swap_volume_instance_error_uuid = server['id']
self._volume_swap_server(server, self.cinder.SWAP_ERR_OLD_VOL,
self.cinder.SWAP_ERR_NEW_VOL)
self._wait_until_swap_volume_error()
# Seven versioned notifications are generated. We only rely on the
# first six because _wait_until_swap_volume_error will return True
# after volume_api.unreserve is called on the cinder fixture, and that
# happens before the instance fault is handled in the compute manager
# which generates the last notification (compute.exception).
# 0. instance-create-start
# 1. instance-create-end
# 2. instance-volume_attach-start
# 3. instance-volume_attach-end
# 4. instance-volume_swap-start
# 5. instance-volume_swap-error
# 6. compute.exception
self.assertTrue(len(fake_notifier.VERSIONED_NOTIFICATIONS) >= 6,
'Unexpected number of versioned notifications. '
'Expected at least 6, got: %s' %
len(fake_notifier.VERSIONED_NOTIFICATIONS))
self._verify_notification(
'instance-volume_swap-start',
replacements={
'new_volume_id': self.cinder.SWAP_ERR_NEW_VOL,
'old_volume_id': self.cinder.SWAP_ERR_OLD_VOL,
'reservation_id': server['reservation_id'],
'uuid': server['id']},
actual=fake_notifier.VERSIONED_NOTIFICATIONS[4])
self._verify_notification(
'instance-volume_swap-error',
replacements={
'reservation_id': server['reservation_id'],
'uuid': server['id']},
actual=fake_notifier.VERSIONED_NOTIFICATIONS[5])
def _test_revert_server(self, server):
pass
def _test_resize_confirm_server(self, server):
pass
def _test_trigger_crash_dump(self, server):
pass
def _test_volume_attach_detach_server(self, server):
self._attach_volume_to_server(server, self.cinder.SWAP_OLD_VOL)
# 0. volume_attach-start
# 1. volume_attach-end
self.assertEqual(2, len(fake_notifier.VERSIONED_NOTIFICATIONS))
self._verify_notification(
'instance-volume_attach-start',
replacements={
'reservation_id': server['reservation_id'],
'uuid': server['id']},
actual=fake_notifier.VERSIONED_NOTIFICATIONS[0])
self._verify_notification(
'instance-volume_attach-end',
replacements={
'reservation_id': server['reservation_id'],
'uuid': server['id']},
actual=fake_notifier.VERSIONED_NOTIFICATIONS[1])
fake_notifier.reset()
self._detach_volume_from_server(server, self.cinder.SWAP_OLD_VOL)
# 0. volume_detach-start
# 1. volume_detach-end
self.assertEqual(2, len(fake_notifier.VERSIONED_NOTIFICATIONS))
self._verify_notification(
'instance-volume_detach-start',
replacements={
'reservation_id': server['reservation_id'],
'uuid': server['id']},
actual=fake_notifier.VERSIONED_NOTIFICATIONS[0])
self._verify_notification(
'instance-volume_detach-end',
replacements={
'reservation_id': server['reservation_id'],
'uuid': server['id']},
actual=fake_notifier.VERSIONED_NOTIFICATIONS[1])
def _test_rescue_server(self, server):
pass
def _test_unrescue_server(self, server):
pass
def _test_soft_delete_server(self, server):
pass
@mock.patch('nova.volume.cinder.API.attach')
def _test_attach_volume_error(self, server, mock_attach):
def attach_volume(*args, **kwargs):
raise exception.CinderConnectionFailed(
reason="Connection timed out")
mock_attach.side_effect = attach_volume
post = {"volumeAttachment": {"volumeId": self.cinder.SWAP_OLD_VOL}}
self.api.post_server_volume(server['id'], post)
self._wait_for_notification('instance.volume_attach.error')
# 0. volume_attach-start
# 1. volume_attach-error
# 2. compute.exception
# We only rely on the first 2 notifications, in this case we don't
# care about the exception notification.
self.assertLessEqual(2, len(fake_notifier.VERSIONED_NOTIFICATIONS))
self._verify_notification(
'instance-volume_attach-start',
replacements={
'reservation_id': server['reservation_id'],
'uuid': server['id']},
actual=fake_notifier.VERSIONED_NOTIFICATIONS[0])
self._verify_notification(
'instance-volume_attach-error',
replacements={
'reservation_id': server['reservation_id'],
'uuid': server['id']},
actual=fake_notifier.VERSIONED_NOTIFICATIONS[1])
|
apache-2.0
| 4,255,457,280,156,007,400
| 39.988263
| 79
| 0.566777
| false
| 4.220181
| true
| false
| false
|
stchepanhagn/domain-learning
|
plan_learning.py
|
1
|
2767
|
""" plan_learning.py
- This module contain the procedure used for learning plans from experience.
Copyright (C) 2016 Stephan Chang
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program, located in the root of this repository.
If not, see <http://www.gnu.org/licenses/>.
"""
import pdb
import planning
import sys
import random
def main(args):
verbose = '-v' in args
n_arg = '-n' in args
try:
i = 1 + int(verbose)
examples_file = args[i]
domain_name = args[i+1]
except:
print "usage: {cmd} [-v] examples_file"\
" domain_name".format(cmd=args[0])
return
print """
PDDL Domain Learning Copyright (C) 2016 Stephan Chang
This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'.
This is free software, and you are welcome to redistribute it
under certain conditions; type `show c' for details.
"""
examples = []
print "Parsing examples..."
with open(examples_file) as f:
line = f.readline().replace('\n', '')
while line:
triple = line.split('|')
example = (triple[0], triple[1], triple[2])
examples.append(example)
line = f.readline().replace('\n', '')
print "Done reading {n_examples} training examples!".format(n_examples=len(examples))
if not f.closed:
print "Warning: file stream is still open."
if n_arg:
n_examples = int(args[i+3])
else:
n_examples = len(examples)
print "Creating domain..."
domain = planning.Domain(domain_name)
# random.shuffle(examples)
for i in range(n_examples):
preconditions = examples[i][0].split(',')
operators = examples[i][1].split(',')
effects = examples[i][2].split(',')
domain.add_all_predicates(preconditions)
domain.add_all_predicates(effects)
domain.add_actions(operators, preconditions, effects)
print "Done!"
if verbose:
print str(domain)
else:
print "Outputting to file..."
output_file_name = "{domain_name}.pddl".format(domain_name=domain_name)
with open(output_file_name, 'w') as f:
f.write(str(domain))
print "Done!"
if __name__ == '__main__':
main(sys.argv)
|
gpl-3.0
| 8,032,233,054,267,307,000
| 29.744444
| 89
| 0.647271
| false
| 3.87535
| false
| false
| false
|
dcy/epush
|
examples/rabbitmq/xiaomi.py
|
1
|
1181
|
#!/usr/bin/env python
#coding:utf-8
import pika
import json
HOST = 'localhost'
USERNAME = 'hisir'
PASSWORD = 'hisir123'
class Xiaomi():
def __init__(self):
credentials = pika.PlainCredentials(USERNAME, PASSWORD)
self.connection = pika.BlockingConnection(pika.ConnectionParameters(host=HOST, credentials=credentials))
self.channel = self.connection.channel()
def notification_send(self):
data = {'push_method': 'notification_send',
'title': 'Test 中文',
'description': 'Content',
'registration_id': 'go6VssZlTDDypm+hxYdaxycXtqM7M9NsTPbCjzyIyh0='}
self.in_mq(data)
def all(self):
data = {'push_method':'all',
'title':'Test中文',
'description':'Test'}
self.in_mq(data)
def end(self):
self.channel.close()
self.connection.close()
def in_mq(self, data):
self.channel.basic_publish(exchange='',
routing_key='xiaomi_c',
body=json.dumps(data))
if __name__ == "__main__":
xiaomi = Xiaomi()
xiaomi.notification_send()
#xiaomi.all()
xiaomi.end()
|
bsd-3-clause
| -3,308,385,428,919,708,000
| 23.4375
| 112
| 0.57971
| false
| 3.34188
| false
| false
| false
|
iw3hxn/LibrERP
|
account_payment_term_month/models/inherit_account_invoice.py
|
1
|
3307
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Micronaet SRL (<http://www.micronaet.it>).
# Copyright (C) 2014 Agile Business Group sagl
# (<http://www.agilebg.com>)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import time
from openerp.osv import orm
from tools.translate import _
class account_invoice(orm.Model):
_inherit = 'account.invoice'
def action_move_create(self, cr, uid, ids, context=None):
context = context or self.pool['res.users'].context_get(cr, uid)
ait_obj = self.pool['account.invoice.tax']
amount_tax = 0.0
if isinstance(ids, (int, long)):
ids = [ids]
for inv in self.browse(cr, uid, ids, context=context):
amount_tax = context.get('amount_tax', 0.0)
if not amount_tax:
compute_taxes = ait_obj.compute(cr, uid, inv.id, context=context)
for tax in compute_taxes:
amount_tax += compute_taxes[tax]['amount']
context.update({'amount_tax': amount_tax})
super(account_invoice, self).action_move_create(cr, uid, [inv.id], context=context)
return True
def onchange_payment_term_date_invoice(self, cr, uid, ids, payment_term_id, date_invoice):
res = {'value': {}}
if not ids:
return res
if not payment_term_id:
return res
context = self.pool['res.users'].context_get(cr, uid)
pt_obj = self.pool['account.payment.term']
ait_obj = self.pool['account.invoice.tax']
if not date_invoice:
date_invoice = time.strftime('%Y-%m-%d')
compute_taxes = ait_obj.compute(cr, uid, ids, context=context)
amount_tax = 0
for tax in compute_taxes:
amount_tax += compute_taxes[tax]['amount']
context.update({'amount_tax': amount_tax})
pterm_list = pt_obj.compute(cr, uid, payment_term_id, value=1, date_ref=date_invoice, context=context)
if pterm_list:
pterm_list = [line[0] for line in pterm_list]
pterm_list.sort()
res = {'value': {'date_due': pterm_list[-1]}}
else:
payment = self.pool['account.payment.term'].browse(cr, uid, payment_term_id, context)
raise orm.except_orm(_('Data Insufficient "{0}" !'.format(payment.name)),
_('The payment term of supplier does not have a payment term line!'))
return res
|
agpl-3.0
| 286,080,595,633,511,650
| 40.3375
| 110
| 0.586332
| false
| 3.881455
| false
| false
| false
|
lablup/sorna-agent
|
src/ai/backend/kernel/vendor/aws_polly/__init__.py
|
1
|
3171
|
import asyncio
import ctypes
import logging
import os
import threading
import janus
from ... import BaseRunner
from .inproc import PollyInprocRunner
log = logging.getLogger()
class Runner(BaseRunner):
log_prefix = 'vendor.aws_polly-kernel'
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.inproc_runner = None
self.sentinel = object()
self.input_queue = None
self.output_queue = None
# NOTE: If credentials are missing,
# boto3 will try to use the instance role.
self.access_key = \
self.child_env.get('AWS_ACCESS_KEY_ID', None)
self.secret_key = \
self.child_env.get('AWS_SECRET_ACCESS_KEY', None)
os.environ['AWS_DEFAULT_REGION'] = \
self.child_env.get('AWS_DEFAULT_REGION', 'ap-northeast-2')
async def init_with_loop(self):
self.input_queue = janus.Queue()
self.output_queue = janus.Queue()
async def build_heuristic(self) -> int:
raise NotImplementedError
async def execute_heuristic(self) -> int:
raise NotImplementedError
async def query(self, code_text) -> int:
self.ensure_inproc_runner()
await self.input_queue.async_q.put(code_text)
# Read the generated outputs until done
while True:
try:
msg = await self.output_queue.async_q.get()
except asyncio.CancelledError:
break
self.output_queue.async_q.task_done()
if msg is self.sentinel:
break
self.outsock.send_multipart(msg)
return 0
async def complete(self, data):
self.outsock.send_multipart([
b'completion',
[],
])
async def interrupt(self):
if self.inproc_runner is None:
log.error('No user code is running!')
return
# A dirty hack to raise an exception inside a running thread.
target_tid = self.inproc_runner.ident
if target_tid not in {t.ident for t in threading.enumerate()}:
log.error('Interrupt failed due to missing thread.')
return
affected_count = ctypes.pythonapi.PyThreadState_SetAsyncExc(
ctypes.c_long(target_tid),
ctypes.py_object(KeyboardInterrupt))
if affected_count == 0:
log.error('Interrupt failed due to invalid thread identity.')
elif affected_count > 1:
ctypes.pythonapi.PyThreadState_SetAsyncExc(
ctypes.c_long(target_tid),
ctypes.c_long(0))
log.error('Interrupt broke the interpreter state -- '
'recommended to reset the session.')
async def start_service(self, service_info):
return None, {}
def ensure_inproc_runner(self):
if self.inproc_runner is None:
self.inproc_runner = PollyInprocRunner(
self.input_queue.sync_q,
self.output_queue.sync_q,
self.sentinel,
self.access_key,
self.secret_key)
self.inproc_runner.start()
|
lgpl-3.0
| -6,937,433,989,262,053,000
| 31.690722
| 73
| 0.584358
| false
| 4.081081
| false
| false
| false
|
zetaops/ulakbus
|
ulakbus/views/reports/base.py
|
1
|
6017
|
# -*- coding: utf-8 -*-
"""
"""
# Copyright (C) 2015 ZetaOps Inc.
#
# This file is licensed under the GNU General Public License v3
# (GPLv3). See LICENSE.txt for details.
from io import BytesIO
from zengine.lib.translation import gettext as _, gettext_lazy
import six
from zengine.forms import JsonForm
from zengine.forms import fields
from zengine.views.base import BaseView
import re
import base64
from datetime import datetime
try:
from ulakbus.lib.pdfdocument.document import PDFDocument, register_fonts_from_paths
except:
print("Warning: Reportlab module not found")
from ulakbus.lib.s3_file_manager import S3FileManager
from ulakbus.lib.common import get_file_url
class ReporterRegistry(type):
registry = {}
_meta = None
def __new__(mcs, name, bases, attrs):
# for key, prop in attrs.items():
# if hasattr(prop, 'view_method'):
if name == 'Reporter':
ReporterRegistry._meta = attrs['Meta']
if 'Meta' not in attrs:
attrs['Meta'] = type('Meta', (object,), ReporterRegistry._meta.__dict__)
else:
for k, v in ReporterRegistry._meta.__dict__.items():
if k not in attrs['Meta'].__dict__:
setattr(attrs['Meta'], k, v)
new_class = super(ReporterRegistry, mcs).__new__(mcs, name, bases, attrs)
if name != 'Reporter':
ReporterRegistry.registry[name] = new_class
return new_class
@staticmethod
def get_reporters():
return [{"text": v.get_title(),
"wf": 'generic_reporter',
"model": k,
"kategori": 'Raporlar',
"param": 'id'} for k, v in ReporterRegistry.registry.items()]
@staticmethod
def get_permissions():
return [("report.%s" % k, v.get_title(), "") for k, v in ReporterRegistry.registry.items()]
@staticmethod
def get_reporter(name):
return ReporterRegistry.registry[name]
FILENAME_RE = re.compile(r'[^A-Za-z0-9\-\.]+')
@six.add_metaclass(ReporterRegistry)
class Reporter(BaseView):
TITLE = ''
class Meta:
pass
def __init__(self, current):
super(Reporter, self).__init__(current)
self.cmd = current.input.get('cmd', 'show')
# print("CMD", self.cmd)
if self.cmd == 'show':
self.show()
elif self.cmd == 'printout':
self.printout()
class ReportForm(JsonForm):
printout = fields.Button(gettext_lazy(u"Yazdır"), cmd="printout")
def show(self):
objects = self.get_objects()
frm = self.ReportForm(current=self.current, title=self.get_title())
if objects:
frm.help_text = ''
if isinstance(objects[0], dict):
self.output['object'] = {'fields': objects, 'type': 'table-multiRow'}
else:
objects = dict((k, str(v)) for k, v in objects)
self.output['object'] = objects
else:
frm.help_text = _(u'Kayıt bulunamadı')
self.output['object'] = {}
self.set_client_cmd('form', 'show')
self.output['forms'] = frm.serialize()
self.output['forms']['constraints'] = {}
self.output['forms']['grouping'] = []
self.output['meta'] = {}
def printout(self):
register_fonts_from_paths('Vera.ttf',
'VeraIt.ttf',
'VeraBd.ttf',
'VeraBI.ttf',
'Vera')
objects = self.get_objects()
f = BytesIO()
pdf = PDFDocument(f, font_size=14)
pdf.init_report()
pdf.h1(self.tr2ascii(self.get_title()))
ascii_objects = []
if isinstance(objects[0], dict):
headers = objects[0].keys()
ascii_objects.append([self.tr2ascii(h) for h in headers])
for obj in objects:
ascii_objects.append([self.tr2ascii(k) for k in obj.values()])
else:
for o in objects:
ascii_objects.append((self.tr2ascii(o[0]), self.tr2ascii(o[1])))
pdf.table(ascii_objects)
pdf.generate()
download_url = self.generate_temp_file(
name=self.generate_file_name(),
content=base64.b64encode(f.getvalue()),
file_type='application/pdf',
ext='pdf'
)
self.set_client_cmd('download')
self.output['download_url'] = download_url
@staticmethod
def generate_temp_file(name, content, file_type, ext):
f = S3FileManager()
key = f.store_file(name=name, content=content, type=file_type, ext=ext)
return get_file_url(key)
def generate_file_name(self):
return "{0}-{1}".format(
FILENAME_RE.sub('-', self.tr2ascii(self.get_title()).lower()),
datetime.now().strftime("%d.%m.%Y-%H.%M.%S")
)
@staticmethod
def convert_choices(choices_dict_list):
result = []
for d in choices_dict_list:
try:
k = int(d[0])
except:
k = d[0]
result.append((k, d[1]))
return dict(result)
def get_headers(self):
return self.HEADERS
@classmethod
def get_title(cls):
return six.text_type(cls.TITLE)
def get_objects(self):
raise NotImplementedError
def tr2ascii(self, inp):
inp = six.text_type(inp)
shtlst = [
('ğ', 'g'),
('ı', 'i'),
('İ', 'I'),
('ç', 'c'),
('ö', 'o'),
('ü', 'u'),
('ş', 's'),
('Ğ', 'G'),
('Ş', 'S'),
('Ö', 'O'),
('Ü', 'U'),
('Ç', 'C'),
]
for t, a in shtlst:
inp = inp.replace(t, a)
return inp
def ReportDispatcher(current):
ReporterRegistry.get_reporter(current.input['model'])(current)
|
gpl-3.0
| -235,994,614,447,476,320
| 29.467005
| 99
| 0.530157
| false
| 3.758297
| false
| false
| false
|
pombredanne/nTLP
|
examples/gridworlds/gw_bm_analysis.py
|
1
|
4888
|
# Copyright (c) 2011, 2012 by California Institute of Technology
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# 3. Neither the name of the California Institute of Technology nor
# the names of its contributors may be used to endorse or promote
# products derived from this software without specific prior
# written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL CALTECH
# OR THE CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF
# USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
# OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
# SUCH DAMAGE.
#
# $Id$
# Take averages of the output from the gridworld benchmark script.
import numpy as np
import sys
import os
import string
expform = (string.Template("exp(${SOLVER}_a*x + ${SOLVER}_b)"), "exp(%.3f*x + %.3f)")
linform = (string.Template("${SOLVER}_a*x + ${SOLVER}_b"), "%.3f*x + %.3f")
plotfit = string.Template("""${SOLVER}_a = ${SOLVER}_b = 0.5
${SOLVER}_f(x) = $FORMULA
fit ${SOLVER}_f(x) \"$FILENAME\" using $XCOL:((stringcolumn(1) eq "$SOLVER") ? $$$YCOL : 1/0) via ${SOLVER}_a, ${SOLVER}_b
""")
plottpl = string.Template("\"$FILENAME\" using $XCOL:((stringcolumn(1) eq \"$SOLVER\") ? $$$YCOL : 1/0):$ERRCOL with errorbars \
title \"$SOLVER\" lt $COLOR, ${SOLVER}_f(x) title sprintf(\"$SOLVER fit: $FORMULA\", ${SOLVER}_a, ${SOLVER}_b) lt $COLOR")
pf = string.Template("""
set xlabel "$XAXIS"
set ylabel "$YAXIS"
set terminal png font "" 10
set output "$FN_PNG"
""")
columns = ["", "Solver", "Cells", "Goals", "WDensity", "AvgTime", "StDevTime", "AvgStates", "StDevStates"]
colnames = ["", "Solver", "Grid cells", "Number of goals", "Wall Density", "CPU time (s)", "", "Number of states", ""]
err = { columns.index("AvgTime") : columns.index("StDevTime"),
columns.index("AvgStates") : columns.index("StDevStates") }
if len(sys.argv) < 4:
print "Usage: gw_bm_analysis.py [data file] [x-col] [y-col] <exp/lin>"
sys.exit(0)
d = np.genfromtxt(sys.argv[1], dtype="S16, i4, i4, i4, f8, f8, i4", names=True)
xcol = columns.index(sys.argv[2])
ycol = columns.index(sys.argv[3])
if len(sys.argv) >= 5:
EXP = (sys.argv[4] == "exp")
else:
# Default linear fit
EXP = False
if EXP: eqn = expform
else: eqn = linform
avgs = []
solvers = ["NuSMV", "jtlv", "gr1c", "SPIN"]
# List of columns specifying dimension of a grid
dimension = ["W", "H", "Goals", "WDensity"]
for solver in solvers:
s_data = d[d["Solver"] == solver]
for dim in np.unique(s_data[dimension]):
# Mean & error in the mean
times = s_data[s_data[dimension] == dim]["Time"]
time_mean = times.mean()
time_stdev = times.std()/np.sqrt(len(times))
states = s_data[s_data[dimension] == dim]["NStates"]
states_mean = states.mean()
states_stdev = states.std()/np.sqrt(len(states))
avgs.append((solver, dim[0] * dim[1], dim[2], dim[3], time_mean,
time_stdev, states_mean, states_stdev))
(prefix, ext) = os.path.splitext(sys.argv[1])
outfile = prefix + ".avg" + ext
pltfile = prefix + ".avg.plt"
pngfile = prefix + ".png"
with open(outfile, "w") as f:
f.write(" ".join(columns[1:]) + "\n")
for a in avgs:
f.write("%s %d %d %.4f %.4f %.4f %.4f %.4f\n" % a)
with open(pltfile, "w") as f:
pl = []
for (n, solver) in enumerate(solvers):
fx = eqn[0].substitute(SOLVER=solver)
s = plotfit.substitute(SOLVER=solver, FILENAME=outfile, XCOL=xcol,
YCOL=ycol, FORMULA=fx)
f.write(s)
s = plottpl.substitute(SOLVER=solver, FILENAME=outfile, XCOL=xcol,
YCOL=ycol, ERRCOL=err[ycol], COLOR=n, FORMULA=eqn[1])
pl.append(s)
s = pf.safe_substitute(FN_PNG=pngfile, XAXIS=colnames[xcol],
YAXIS=colnames[ycol])
f.write(s)
if EXP: f.write("set log y\n")
f.write("plot " + ", ".join(pl))
|
bsd-3-clause
| 3,587,470,938,751,121,400
| 40.07563
| 128
| 0.653642
| false
| 3.131326
| false
| false
| false
|
mathiasertl/django-ca
|
ca/django_ca/deprecation.py
|
1
|
1194
|
# This file is part of django-ca (https://github.com/mathiasertl/django-ca).
#
# django-ca is free software: you can redistribute it and/or modify it under the terms of the GNU
# General Public License as published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# django-ca is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without
# even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License along with django-ca. If not,
# see <http://www.gnu.org/licenses/>.
"""Deprecation classes in django-ca."""
class RemovedInDjangoCA120Warning(PendingDeprecationWarning):
"""Warning if a feature will be removed in django-ca==1.20."""
class RemovedInDjangoCA121Warning(PendingDeprecationWarning):
"""Warning if a feature will be removed in django-ca==1.21."""
class RemovedInDjangoCA122Warning(PendingDeprecationWarning):
"""Warning if a feature will be removed in django-ca==1.22."""
RemovedInNextVersionWarning = RemovedInDjangoCA120Warning
|
gpl-3.0
| 4,251,496,230,839,164,400
| 40.172414
| 98
| 0.767169
| false
| 4.174825
| false
| false
| false
|
DataDog/integrations-core
|
couchbase/tests/test_unit.py
|
1
|
3050
|
# (C) Datadog, Inc. 2018-present
# All rights reserved
# Licensed under a 3-clause BSD style license (see LICENSE)
from copy import deepcopy
import mock
import pytest
from datadog_checks.couchbase import Couchbase
def test_camel_case_to_joined_lower(instance):
couchbase = Couchbase('couchbase', {}, [instance])
CAMEL_CASE_TEST_PAIRS = {
'camelCase': 'camel_case',
'FirstCapital': 'first_capital',
'joined_lower': 'joined_lower',
'joined_Upper1': 'joined_upper1',
'Joined_upper2': 'joined_upper2',
'Joined_Upper3': 'joined_upper3',
'_leading_Underscore': 'leading_underscore',
'Trailing_Underscore_': 'trailing_underscore',
'DOubleCAps': 'd_ouble_c_aps',
'@@@super--$$-Funky__$__$$%': 'super_funky',
}
for test_input, expected_output in CAMEL_CASE_TEST_PAIRS.items():
test_output = couchbase.camel_case_to_joined_lower(test_input)
assert test_output == expected_output, 'Input was {}, expected output was {}, actual output was {}'.format(
test_input, expected_output, test_output
)
def test_extract_seconds_value(instance):
couchbase = Couchbase('couchbase', {}, [instance])
EXTRACT_SECONDS_TEST_PAIRS = {
'3.45s': 3.45,
'12ms': 0.012,
'700.5us': 0.0007005,
u'733.364\u00c2s': 0.000733364,
'0': 0,
}
for test_input, expected_output in EXTRACT_SECONDS_TEST_PAIRS.items():
test_output = couchbase.extract_seconds_value(test_input)
assert test_output == expected_output, 'Input was {}, expected output was {}, actual output was {}'.format(
test_input, expected_output, test_output
)
def test__get_query_monitoring_data(instance_query):
"""
`query_monitoring_url` can potentially fail, be sure we don't raise when the
endpoint is not reachable
"""
couchbase = Couchbase('couchbase', {}, [instance_query])
couchbase._get_query_monitoring_data()
@pytest.mark.parametrize(
'test_case, extra_config, expected_http_kwargs',
[
(
"new auth config",
{'username': 'new_foo', 'password': 'bar', 'tls_verify': False},
{'auth': ('new_foo', 'bar'), 'verify': False},
),
("legacy config", {'user': 'new_foo', 'ssl_verify': False}, {'auth': ('new_foo', 'password'), 'verify': False}),
],
)
def test_config(test_case, extra_config, expected_http_kwargs, instance):
instance = deepcopy(instance)
instance.update(extra_config)
check = Couchbase('couchbase', {}, [instance])
with mock.patch('datadog_checks.base.utils.http.requests') as r:
r.get.return_value = mock.MagicMock(status_code=200)
check.check(instance)
http_wargs = dict(
auth=mock.ANY, cert=mock.ANY, headers=mock.ANY, proxies=mock.ANY, timeout=mock.ANY, verify=mock.ANY
)
http_wargs.update(expected_http_kwargs)
r.get.assert_called_with('http://localhost:8091/pools/default/tasks', **http_wargs)
|
bsd-3-clause
| -6,643,786,405,936,923,000
| 34.057471
| 120
| 0.623279
| false
| 3.419283
| true
| false
| false
|
roshchupkin/hase
|
tools/VCF2hdf5.py
|
1
|
4024
|
import os
import sys
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
from config import PYTHON_PATH
if PYTHON_PATH is not None:
for i in PYTHON_PATH: sys.path.insert(0,i)
import argparse
import h5py
import pandas as pd
import numpy as np
from hdgwas.tools import Timer
import tables
import glob
def probes_VCF2hdf5(data_path, save_path,study_name, chunk_size=1000000):
if os.path.isfile(os.path.join(save_path,'probes',study_name+'.h5')):
os.remove(os.path.join(save_path,'probes',study_name+'.h5'))
hash_table={'keys':np.array([],dtype=np.int),'allele':np.array([])}
df=pd.read_csv(data_path,sep='\t',chunksize=chunk_size, header=None,index_col=None)
for i,chunk in enumerate(df):
print 'add chunk {}'.format(i)
print chunk.head()
chunk.columns=[ "CHR","bp" ,"ID",'allele1','allele2','QUAL','FILTER','INFO'] #TODO (high) parse INFO
hash_1=chunk.allele1.apply(hash)
hash_2=chunk.allele2.apply(hash)
k,indices=np.unique(np.append(hash_1,hash_2),return_index=True)
s=np.append(chunk.allele1,chunk.allele2)[indices]
ind=np.invert(np.in1d(k,hash_table['keys']))
hash_table['keys']=np.append(hash_table['keys'],k[ind])
hash_table['allele']=np.append(hash_table['allele'],s[ind])
chunk.allele1=hash_1
chunk.allele2=hash_2
chunk.to_hdf(os.path.join(save_path,'probes',study_name+'.h5'),data_columns=["CHR","bp" ,"ID",'allele1','allele2'], key='probes',format='table',append=True,
min_itemsize = 25, complib='zlib',complevel=9 )
pd.DataFrame.from_dict(hash_table).to_csv(os.path.join(save_path,'probes',study_name+'_hash_table.csv.gz'),index=False,compression='gzip', sep='\t')
def ind_VCF2hdf5(data_path, save_path,study_name):
if os.path.isfile(os.path.join(save_path,'individuals',study_name+'.h5')):
os.remove(os.path.join(save_path,'individuals',study_name+'.h5'))
n=[]
f=open(data_path,'r')
for i,j in enumerate(f):
n.append((j[:-1]))
f.close()
n=np.array(n)
chunk=pd.DataFrame.from_dict({"individual":n})
chunk.to_hdf(os.path.join(save_path,'individuals',study_name+'.h5'), key='individuals',format='table',
min_itemsize = 25, complib='zlib',complevel=9 )
def genotype_VCF2hdf5(data_path,id, save_path, study_name):
df=pd.read_csv(data_path, header=None, index_col=None,sep='\t', dtype=np.float16)
data=df.as_matrix()
print data.shape
print 'Saving chunk...{}'.format(os.path.join(save_path,'genotype',str(id)+'_'+study_name+'.h5'))
h5_gen_file = tables.open_file(
os.path.join(save_path,'genotype',str(id)+'_'+study_name+'.h5'), 'w', title=study_name)
atom = tables.Float16Atom()
genotype = h5_gen_file.create_carray(h5_gen_file.root, 'genotype', atom,
(data.shape),
title='Genotype',
filters=tables.Filters(complevel=9, complib='zlib'))
genotype[:] = data
h5_gen_file.close()
os.remove(data_path)
if __name__=="__main__":
parser = argparse.ArgumentParser(description='Script to convert VCF data')
parser.add_argument("-study_name", required=True, type=str, help="Study specific name")
parser.add_argument("-id", type=str, help="subject id")
parser.add_argument("-data",required=True, type=str, help="path to file")
parser.add_argument("-out",required=True, type=str, help="path to results save folder")
parser.add_argument("-flag",required=True,type=str,choices=['individuals','probes','chunk'], help="path to file with SNPs info")
args = parser.parse_args()
print args
try:
print ('Creating directories...')
os.mkdir(os.path.join(args.out,'genotype') )
os.mkdir(os.path.join(args.out,'individuals') )
os.mkdir(os.path.join(args.out,'probes') )
os.mkdir(os.path.join(args.out,'tmp_files'))
except:
print('Directories "genotype","probes","individuals" are already exist in {}...'.format(args.out))
if args.flag=='probes':
probes_VCF2hdf5(args.data, args.out, args.study_name)
elif args.flag=='individuals':
ind_VCF2hdf5(args.data, args.out,args.study_name)
elif args.flag=='chunk':
genotype_VCF2hdf5(args.data,args.id, args.out,args.study_name)
|
gpl-3.0
| 3,613,012,578,314,009,000
| 36.962264
| 158
| 0.696571
| false
| 2.691639
| false
| false
| false
|
joanayma/pyautorunalize
|
pyautorunanalize.py
|
1
|
5119
|
#! /bin/env python
"""
PyAutorunalizer 0.1
Python script for autorunalize: http://sysinternals.com/autoruns.com listing autoruns Windows
items. Version 11.6 or greater needed.
http://Virutotal.com externa database of viruses.
original idea: http://trustedsignal.blogspot.com.es/2012/02/finding-evil-automating-autoruns.html
original implementation uses cygwin32, bash and other blobs.
Virustotal API refer: https://github.com/botherder/virustotal/
Autoruns is part of Sysinternals' suit and owns the copyright. Windows are trademark of Microsoft.
Licence: GPLv2
#Use this script at your own.
This script is not inteded as a substitute for any antivirus. Is just a sanity check.
Individuals htat noncomplain the Virustotal or sysinternals terms or harms the antivirus
industry, are out of my resposability.
"""
import xml.etree.ElementTree as ElementTree
import json
import urllib,urllib.request
import sys,os,getopt,subprocess
fnull = open(os.devnull, "w")
def runanalizer(API_KEY):
#Check for autorunsc.exe
try:
with open('./autorunsc.exe'): pass
except IOError:
print('autorunsc.exe binary not found! Download from https://live.sysinternals.com/autorunsc.exe')
sys.exit(3)
try:
if os.environ['HTTP_PROXY'] != None:
proxies = {'https': 'http://{0}'.format(os.environ['HTTP_PROXY'])}
urllib.request.ProxyHandler(proxies)
print("[Info] Going through proxies: ",proxies)
except KeyError:
#not defined
pass
print('[Info] Getting list of files to analise from Autoruns ...')
autoruns_proc = subprocess.Popen(['autorunsc.exe', "/accepteula", '-xaf'], stdout=subprocess.PIPE, stderr = fnull)
autoruns_xml = (autoruns_proc.communicate()[0].decode("utf_16"))
autoruns_xml.replace('\r\n','\n')
#parse XML output
#items =[[]]
try:
autoruns_tree = ElementTree.fromstring(autoruns_xml)
except xml.etree.ElementTree.ParseError as e:
print('[Error] Error parsing xml autoruns\' output. \n Is Autoruns\' latest version?\n', e)
sys.exit(1002)
for item in autoruns_tree:
text = "[Object]"
if item is None:
text = text + " Invalid item (mostly not a binary image)\n"
break
imagepath = item.findtext('imagepath')
name = item.findtext('itemname')
if imagepath is not None:
sha256hash = item.findtext('sha256hash')
text = text + '' + name + '\n ' + imagepath + '\n ' + sha256hash + '\n scanning... '
print(text)
result = scan(sha256hash, API_KEY)
print(result)
def scan(sha256hash, API_KEY):
VIRUSTOTAL_REPORT_URL = 'https://www.virustotal.com/vtapi/v2/file/report'
VIRUSTOTAL_SCAN_URL = 'https://www.virustotal.com/vtapi/v2/file/scan'
if sha256hash == None:
response = "No valid hash for this file"
return response
data = urllib.parse.urlencode({
'resource' : sha256hash,
'apikey' : API_KEY
})
data = data.encode('utf-8')
try:
request = urllib.request.Request(VIRUSTOTAL_REPORT_URL, data)
reply = urllib.request.urlopen(request)
answer = 42
answer = reply.read().decode("utf-8")
report = json.loads(answer)
except Exception as e:
error = "\n[Error] Cannot obtain results from VirusTotal: {0}\n".format(e)
return error
sys.exit(4)
int(report['response_code']) == 0
if int(report['response_code']) == 0:
response = (report['verbose_msg'])
elif int(report['response_code']) < 0:
response = 'Not found on Virustotal database!'
#Shall send the file if is not on virustotal.
else:
response = 'FOUND'
if int(report['positives']) >= 0:
response = response + 'but not infected.'
else:
for av, scan in report['scans'].items():
if scan == 'detected':
response = response + ' INFECTED!\n engine:' + av + ',\n malware:' + scan['result'] + '\n'
return response
def help():
print(main.__doc__)
sys.exit(0)
def main(argv):
"""\n
Script for Windows basic security check using Sysinternal\'s Autoruns
and Virustotal.com\n
Thereforce, you need to get a public API Key from http://www.virustotal.com for your
scripting analysis\n
and autorunsc.exe binary.\n
Usage:\n
autorunalize.exe [--help] --API-KEY YOUR_API_KEY\n
-h, --help Shows this help.\n
-a, --API-KEY Your public API key from Virustotal.
This a 64 characters hexadecimal string.\n
Example:\n
./autorunalize.exe --API-KEY YOUR_API_KEY\n
"""
API_KEY = ''
try:
opts, args = getopt.getopt(argv,"ha:",["help","API-KEY="])
except getopt.GetoptError:
print('pyrunanalizer.py --API-KEY YOUR_API_KEY_HERE')
sys.exit(2)
for opt, arg in opts:
if opt in ('-h','--help'):
help()
sys.exit()
elif opt in ("-a", "--API-KEY"):
API_KEY = arg
runanalizer(API_KEY)
else:
help()
if __name__ == "__main__":
main(sys.argv[1:])
|
gpl-2.0
| -8,408,628,864,717,367,000
| 33.355705
| 116
| 0.62942
| false
| 3.229653
| false
| false
| false
|
bilke/OpenSG-1.8
|
SConsLocal/scons-local-0.96.1/SCons/Tool/__init__.py
|
2
|
13279
|
"""SCons.Tool
SCons tool selection.
This looks for modules that define a callable object that can modify
a construction environment as appropriate for a given tool (or tool
chain).
Note that because this subsystem just *selects* a callable that can
modify a construction environment, it's possible for people to define
their own "tool specification" in an arbitrary callable function. No
one needs to use or tie in to this subsystem in order to roll their own
tool definition.
"""
#
# Copyright (c) 2001, 2002, 2003, 2004 The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "/home/scons/scons/branch.0/baseline/src/engine/SCons/Tool/__init__.py 0.96.1.D001 2004/08/23 09:55:29 knight"
import imp
import sys
import SCons.Errors
import SCons.Defaults
class ToolSpec:
def __init__(self, name):
self.name = name
def __call__(self, env, *args, **kw):
env.Append(TOOLS = [ self.name ])
apply(self.generate, ( env, ) + args, kw)
def __str__(self):
return self.name
def Tool(name, toolpath=[]):
"Select a canned Tool specification, optionally searching in toolpath."
try:
file, path, desc = imp.find_module(name, toolpath)
try:
module = imp.load_module(name, file, path, desc)
spec = ToolSpec(name)
spec.generate = module.generate
spec.exists = module.exists
return spec
finally:
if file:
file.close()
except ImportError, e:
pass
full_name = 'SCons.Tool.' + name
if not sys.modules.has_key(full_name):
try:
file, path, desc = imp.find_module(name,
sys.modules['SCons.Tool'].__path__)
mod = imp.load_module(full_name, file, path, desc)
setattr(SCons.Tool, name, mod)
except ImportError, e:
raise SCons.Errors.UserError, "No tool named '%s': %s" % (name, e)
if file:
file.close()
spec = ToolSpec(name)
spec.generate = sys.modules[full_name].generate
spec.exists = sys.modules[full_name].exists
return spec
def createProgBuilder(env):
"""This is a utility function that creates the Program
Builder in an Environment if it is not there already.
If it is already there, we return the existing one.
"""
try:
program = env['BUILDERS']['Program']
except KeyError:
program = SCons.Builder.Builder(action = SCons.Defaults.LinkAction,
emitter = '$PROGEMITTER',
prefix = '$PROGPREFIX',
suffix = '$PROGSUFFIX',
src_suffix = '$OBJSUFFIX',
src_builder = 'Object',
target_scanner = SCons.Defaults.ProgScan)
env['BUILDERS']['Program'] = program
return program
def createStaticLibBuilder(env):
"""This is a utility function that creates the StaticLibrary
Builder in an Environment if it is not there already.
If it is already there, we return the existing one.
"""
try:
static_lib = env['BUILDERS']['StaticLibrary']
except KeyError:
static_lib = SCons.Builder.Builder(action = SCons.Defaults.ArAction,
emitter = '$LIBEMITTER',
prefix = '$LIBPREFIX',
suffix = '$LIBSUFFIX',
src_suffix = '$OBJSUFFIX',
src_builder = 'StaticObject')
env['BUILDERS']['StaticLibrary'] = static_lib
env['BUILDERS']['Library'] = static_lib
return static_lib
def createSharedLibBuilder(env):
"""This is a utility function that creates the SharedLibrary
Builder in an Environment if it is not there already.
If it is already there, we return the existing one.
"""
try:
shared_lib = env['BUILDERS']['SharedLibrary']
except KeyError:
action_list = [ SCons.Defaults.SharedCheck,
SCons.Defaults.ShLinkAction ]
shared_lib = SCons.Builder.Builder(action = action_list,
emitter = "$SHLIBEMITTER",
prefix = '$SHLIBPREFIX',
suffix = '$SHLIBSUFFIX',
target_scanner = SCons.Defaults.ProgScan,
src_suffix = '$SHOBJSUFFIX',
src_builder = 'SharedObject')
env['BUILDERS']['SharedLibrary'] = shared_lib
return shared_lib
def createObjBuilders(env):
"""This is a utility function that creates the StaticObject
and SharedObject Builders in an Environment if they
are not there already.
If they are there already, we return the existing ones.
This is a separate function because soooo many Tools
use this functionality.
The return is a 2-tuple of (StaticObject, SharedObject)
"""
try:
static_obj = env['BUILDERS']['StaticObject']
except KeyError:
static_obj = SCons.Builder.Builder(action = {},
emitter = {},
prefix = '$OBJPREFIX',
suffix = '$OBJSUFFIX',
src_builder = ['CFile', 'CXXFile'],
source_scanner = SCons.Defaults.ObjSourceScan, single_source=1)
env['BUILDERS']['StaticObject'] = static_obj
env['BUILDERS']['Object'] = static_obj
try:
shared_obj = env['BUILDERS']['SharedObject']
except KeyError:
shared_obj = SCons.Builder.Builder(action = {},
emitter = {},
prefix = '$SHOBJPREFIX',
suffix = '$SHOBJSUFFIX',
src_builder = ['CFile', 'CXXFile'],
source_scanner = SCons.Defaults.ObjSourceScan, single_source=1)
env['BUILDERS']['SharedObject'] = shared_obj
return (static_obj, shared_obj)
def createCFileBuilders(env):
"""This is a utility function that creates the CFile/CXXFile
Builders in an Environment if they
are not there already.
If they are there already, we return the existing ones.
This is a separate function because soooo many Tools
use this functionality.
The return is a 2-tuple of (CFile, CXXFile)
"""
try:
c_file = env['BUILDERS']['CFile']
except KeyError:
c_file = SCons.Builder.Builder(action = {},
emitter = {},
suffix = {None:'$CFILESUFFIX'})
env['BUILDERS']['CFile'] = c_file
env['CFILESUFFIX'] = '.c'
try:
cxx_file = env['BUILDERS']['CXXFile']
except KeyError:
cxx_file = SCons.Builder.Builder(action = {},
emitter = {},
suffix = {None:'$CXXFILESUFFIX'})
env['BUILDERS']['CXXFile'] = cxx_file
env['CXXFILESUFFIX'] = '.cc'
return (c_file, cxx_file)
def FindTool(tools, env):
for tool in tools:
t = Tool(tool)
if t.exists(env):
return tool
return None
def FindAllTools(tools, env):
def ToolExists(tool, env=env):
return Tool(tool).exists(env)
return filter (ToolExists, tools)
def tool_list(platform, env):
# XXX this logic about what tool to prefer on which platform
# should be moved into either the platform files or
# the tool files themselves.
# The search orders here are described in the man page. If you
# change these search orders, update the man page as well.
if str(platform) == 'win32':
"prefer Microsoft tools on Windows"
linkers = ['mslink', 'gnulink', 'ilink', 'linkloc', 'ilink32' ]
c_compilers = ['msvc', 'mingw', 'gcc', 'icl', 'icc', 'cc', 'bcc32' ]
cxx_compilers = ['msvc', 'icc', 'g++', 'c++', 'bcc32' ]
assemblers = ['masm', 'nasm', 'gas', '386asm' ]
fortran_compilers = ['g77', 'ifl', 'cvf', 'fortran']
ars = ['mslib', 'ar', 'tlib']
elif str(platform) == 'os2':
"prefer IBM tools on OS/2"
linkers = ['ilink', 'gnulink', 'mslink']
c_compilers = ['icc', 'gcc', 'msvc', 'cc']
cxx_compilers = ['icc', 'g++', 'msvc', 'c++']
assemblers = ['nasm', 'masm', 'gas']
fortran_compilers = ['ifl', 'g77']
ars = ['ar', 'mslib']
elif str(platform) == 'irix':
"prefer MIPSPro on IRIX"
linkers = ['sgilink', 'gnulink']
c_compilers = ['sgicc', 'gcc', 'cc']
cxx_compilers = ['sgic++', 'g++', 'c++']
assemblers = ['as', 'gas']
fortran_compilers = ['f77', 'g77', 'fortran']
ars = ['sgiar']
elif str(platform) == 'sunos':
"prefer Forte tools on SunOS"
linkers = ['sunlink', 'gnulink']
c_compilers = ['suncc', 'gcc', 'cc']
cxx_compilers = ['sunc++', 'g++', 'c++']
assemblers = ['as', 'gas']
fortran_compilers = ['f77', 'g77', 'fortran']
ars = ['sunar']
elif str(platform) == 'hpux':
"prefer aCC tools on HP-UX"
linkers = ['hplink', 'gnulink']
c_compilers = ['hpcc', 'gcc', 'cc']
cxx_compilers = ['hpc++', 'g++', 'c++']
assemblers = ['as', 'gas']
fortran_compilers = ['f77', 'g77', 'fortran']
ars = ['ar']
elif str(platform) == 'aix':
"prefer AIX Visual Age tools on AIX"
linkers = ['aixlink', 'gnulink']
c_compilers = ['aixcc', 'gcc', 'cc']
cxx_compilers = ['aixc++', 'g++', 'c++']
assemblers = ['as', 'gas']
fortran_compilers = ['aixf77', 'g77', 'fortran']
ars = ['ar']
else:
"prefer GNU tools on all other platforms"
linkers = ['gnulink', 'mslink', 'ilink']
c_compilers = ['gcc', 'msvc', 'icc', 'cc']
cxx_compilers = ['g++', 'msvc', 'icc', 'c++']
assemblers = ['gas', 'nasm', 'masm']
fortran_compilers = ['g77', 'ifort', 'ifl', 'fortran']
ars = ['ar', 'mslib']
c_compiler = FindTool(c_compilers, env) or c_compilers[0]
# XXX this logic about what tool provides what should somehow be
# moved into the tool files themselves.
if c_compiler and c_compiler == 'mingw':
# MinGW contains a linker, C compiler, C++ compiler,
# Fortran compiler, archiver and assembler:
cxx_compiler = None
linker = None
assembler = None
fortran_compiler = None
ar = None
else:
# Don't use g++ if the C compiler has built-in C++ support:
if c_compiler in ('msvc', 'icc'):
cxx_compiler = None
else:
cxx_compiler = FindTool(cxx_compilers, env) or cxx_compilers[0]
linker = FindTool(linkers, env) or linkers[0]
assembler = FindTool(assemblers, env) or assemblers[0]
fortran_compiler = FindTool(fortran_compilers, env) or fortran_compilers[0]
ar = FindTool(ars, env) or ars[0]
other_tools = FindAllTools(['BitKeeper', 'CVS',
'dmd',
'dvipdf', 'dvips', 'gs',
'jar', 'javac', 'javah',
'latex', 'lex', 'm4', 'midl', 'msvs',
'pdflatex', 'pdftex', 'Perforce',
'RCS', 'rmic', 'SCCS',
# 'Subversion',
'swig',
'tar', 'tex', 'yacc', 'zip'],
env)
tools = ([linker, c_compiler, cxx_compiler,
fortran_compiler, assembler, ar]
+ other_tools)
return filter(lambda x: x, tools)
|
lgpl-2.1
| 5,232,730,326,358,949,000
| 37.827485
| 125
| 0.540252
| false
| 4.141921
| false
| false
| false
|
brennie/reviewboard
|
reviewboard/oauth/forms.py
|
1
|
11912
|
"""Forms for OAuth2 applications."""
from __future__ import unicode_literals
from django import forms
from django.core.exceptions import ValidationError
from django.forms import widgets
from django.utils.translation import ugettext, ugettext_lazy as _
from djblets.forms.widgets import CopyableTextInput, ListEditWidget
from oauth2_provider.generators import (generate_client_id,
generate_client_secret)
from oauth2_provider.validators import URIValidator
from reviewboard.admin.form_widgets import RelatedUserWidget
from reviewboard.oauth.models import Application
from reviewboard.oauth.widgets import OAuthSecretInputWidget
from reviewboard.site.urlresolvers import local_site_reverse
class ApplicationChangeForm(forms.ModelForm):
"""A form for updating an Application.
This form is intended to be used by the admin site.
"""
DISABLED_FOR_SECURITY_ERROR = _(
'This Application has been disabled to keep your server secure. '
'It cannot be re-enabled until its client secret changes.'
)
client_id = forms.CharField(
label=_('Client ID'),
help_text=_(
'The client ID. Your application will use this in OAuth2 '
'authentication to identify itself.',
),
widget=CopyableTextInput(attrs={
'readonly': True,
'size': 100,
}),
required=False,
)
def __init__(self, data=None, initial=None, instance=None):
"""Initialize the form:
Args:
data (dict, optional):
The provided form data.
initial (dict, optional):
The initial form values.
instance (Application, optional):
The application to edit.
"""
super(ApplicationChangeForm, self).__init__(data=data,
initial=initial,
instance=instance)
if instance and instance.pk:
# If we are creating an application (as the
# ApplicationCreationForm is a subclass of this class), the
# client_secret wont be present so we don't have to initialize the
# widget.
client_secret = self.fields['client_secret']
client_secret.widget = OAuthSecretInputWidget(
attrs=client_secret.widget.attrs,
api_url=local_site_reverse('oauth-app-resource',
local_site=instance.local_site,
kwargs={'app_id': instance.pk}),
)
def clean_extra_data(self):
"""Prevent ``extra_data`` from being an empty string.
Returns:
unicode:
Either a non-zero length string of JSON-encoded data or ``None``.
"""
return self.cleaned_data['extra_data'] or None
def clean_redirect_uris(self):
"""Clean the ``redirect_uris`` field.
This method will ensure that all the URIs are valid by validating
each of them, as well as removing unnecessary whitespace.
Returns:
unicode:
A space-separated list of URIs.
Raises:
django.core.exceptions.ValidationError:
Raised when one or more URIs are invalid.
"""
validator = URIValidator()
redirect_uris = self.cleaned_data.get('redirect_uris', '').split()
errors = []
for uri in redirect_uris:
try:
validator(uri)
except ValidationError as e:
errors.append(e)
if errors:
raise ValidationError(errors)
# We join the list instead of returning the initial value because the
# the original value may have had multiple adjacent whitespace
# characters.
return ' '.join(redirect_uris)
def clean(self):
"""Validate the form.
This will validate the relationship between the
``authorization_grant_type`` and ``redirect_uris`` fields to ensure the
values are compatible.
This method is very similar to
:py:func:`Application.clean
<oauth2_provider.models.AbstractApplication.clean>`, but the data will
be verified by the form instead of the model to allow error messages to
be usable by consumers of the form.
This method does not raise an exception upon failing validation.
Instead, it sets errors internally so that they are related to the
pertinent field instead of the form as a whole.
Returns:
dict:
The cleaned form data.
"""
super(ApplicationChangeForm, self).clean()
grant_type = self.cleaned_data.get('authorization_grant_type')
# redirect_uris will not be present in cleaned_data if validation
# failed.
redirect_uris = self.cleaned_data.get('redirect_uris')
if (redirect_uris is not None and
len(redirect_uris) == 0 and
grant_type in (Application.GRANT_AUTHORIZATION_CODE,
Application.GRANT_IMPLICIT)):
# This is unfortunately not publicly exposed in Django 1.6, but it
# is exposed in later versions (as add_error).
self._errors['redirect_uris'] = self.error_class([
ugettext(
'The "redirect_uris" field may not be blank when '
'"authorization_grant_type" is "%s"'
)
% grant_type
])
self.cleaned_data.pop('redirect_uris')
if (self.instance and
self.instance.pk and
self.instance.is_disabled_for_security and
self.cleaned_data['enabled']):
raise ValidationError(self.DISABLED_FOR_SECURITY_ERROR)
if 'client_id' in self.cleaned_data:
del self.cleaned_data['client_id']
if 'client_secret' in self.cleaned_data:
del self.cleaned_data['client_secret']
return self.cleaned_data
class Meta:
model = Application
fields = '__all__'
help_texts = {
'authorization_grant_type': _(
'How the authorization is granted to the application.'
),
'client_secret': _(
'The client secret. This should only be known to Review Board '
'and your application.'
),
'client_type': _(
"The type of client. Confidential clients must be able to "
"keep users' passwords secure."
),
'name': _(
'The application name.'
),
'redirect_uris': _(
'A list of allowed URIs to redirect to.',
),
'skip_authorization': _(
'Whether or not users will be prompted for authentication. '
'This should most likely be unchecked.'
),
'user': _(
'The user who created the application. The selected user will '
'be able to change these settings from their account settings.'
),
}
widgets = {
'client_secret': CopyableTextInput(attrs={
'readonly': True,
'size': 100,
}),
'name': widgets.TextInput(attrs={'size': 60}),
'redirect_uris': ListEditWidget(attrs={'size': 60}, sep=' '),
'user': RelatedUserWidget(multivalued=False),
'original_user': RelatedUserWidget(multivalued=False),
}
labels = {
'authorization_grant_type': _('Authorization Grant Type'),
'client_secret': _('Client Secret'),
'client_type': _('Client Type'),
'name': _('Name'),
'redirect_uris': _('Redirect URIs'),
'skip_authorization': _('Skip Authorization'),
'user': _('User'),
}
class ApplicationCreationForm(ApplicationChangeForm):
"""A form for creating an Application.
This is meant to be used by the admin site.
"""
def save(self, commit=True):
"""Save the form.
This method will generate the ``client_id`` and ``client_secret``
fields.
Args:
commit (bool, optional):
Whether or not the Application should be saved to the database.
Returns:
reviewboard.oauth.models.Application:
The created Application.
"""
instance = super(ApplicationCreationForm, self).save(commit=False)
instance.client_id = generate_client_id()
instance.client_secret = generate_client_secret()
if commit:
instance.save()
return instance
class Meta(ApplicationChangeForm.Meta):
exclude = (
'client_id',
'client_secret',
)
class UserApplicationChangeForm(ApplicationChangeForm):
"""A form for an end user to change an Application."""
def __init__(self, user, data=None, initial=None, instance=None):
"""Initialize the form.
Args:
user (django.contrib.auth.models.User):
The user changing the form. Ignored, but included to match
:py:meth:`UserApplicationCreationForm.__init__`.
data (dict):
The provided data.
initial (dict, optional):
The initial form values.
instance (reviewboard.oauth.models.Application):
The Application that is to be edited.
"""
super(UserApplicationChangeForm, self).__init__(data=data,
initial=initial,
instance=instance)
class Meta(ApplicationChangeForm.Meta):
exclude = (
'extra_data',
'local_site',
'original_user',
'skip_authorization',
'user',
)
class UserApplicationCreationForm(ApplicationCreationForm):
"""A form for an end user to update an Application."""
def __init__(self, user, data, initial=None, instance=None):
"""Initialize the form.
Args:
user (django.contrib.auth.models.User):
The user changing the form. Ignored, but included to match
:py:meth:`UserApplicationCreationForm.__init__`.
data (dict):
The provided data.
initial (dict, optional):
The initial form values.
instance (reviewboard.oauth.models.Application, optional):
The Application that is to be edited.
This should always be ``None``.
"""
assert instance is None
super(UserApplicationCreationForm, self).__init__(data=data,
initial=initial,
instance=instance)
self.user = user
def save(self, commit=True):
"""Save the form.
This method will associate the user creating the application as its
owner.
Args:
commit (bool, optional):
Whether or not the Application should be saved to the database.
Returns:
reviewboard.oauth.models.Application:
The created Application.
"""
instance = super(UserApplicationCreationForm, self).save(commit=False)
instance.user = self.user
if commit:
instance.save()
return instance
class Meta(ApplicationCreationForm.Meta):
exclude = (ApplicationCreationForm.Meta.exclude +
UserApplicationChangeForm.Meta.exclude)
|
mit
| 8,614,490,914,715,181,000
| 33.034286
| 79
| 0.561702
| false
| 5.028282
| false
| false
| false
|
Aloomaio/googleads-python-lib
|
examples/ad_manager/v201808/creative_template_service/get_system_defined_creative_templates.py
|
1
|
2005
|
#!/usr/bin/env python
#
# Copyright 2016 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This example gets all system defined creative templates.
"""
# Import appropriate modules from the client library.
from googleads import ad_manager
def main(client):
# Initialize appropriate service.
creative_template_service = client.GetService(
'CreativeTemplateService', version='v201808')
# Create a statement to select creative templates.
statement = (ad_manager.StatementBuilder(version='v201808')
.Where('type = :type')
.WithBindVariable('type', 'SYSTEM_DEFINED'))
# Retrieve a small amount of creative templates at a time, paging
# through until all creative templates have been retrieved.
while True:
response = creative_template_service.getCreativeTemplatesByStatement(
statement.ToStatement())
if 'results' in response and len(response['results']):
for creative_template in response['results']:
# Print out some information for each creative template.
print('Creative template with ID "%d" and name "%s" was found.\n' %
(creative_template['id'], creative_template['name']))
statement.offset += statement.limit
else:
break
print '\nNumber of results found: %s' % response['totalResultSetSize']
if __name__ == '__main__':
# Initialize client object.
ad_manager_client = ad_manager.AdManagerClient.LoadFromStorage()
main(ad_manager_client)
|
apache-2.0
| -8,350,638,428,948,109,000
| 37.557692
| 75
| 0.715711
| false
| 4.177083
| false
| false
| false
|
praekelt/nurseconnect
|
nurseconnect/tests/test_utils.py
|
1
|
4506
|
from freezegun import freeze_time
from django.test import TestCase
from django.test.client import Client
from django.contrib.auth.models import User
from molo.core.tests.base import MoloTestCaseMixin
from molo.core.models import SiteLanguageRelation, Languages, Main
from molo.surveys.models import MoloSurveyPage, MoloSurveySubmission
from molo.surveys.tests.test_models import create_survey
from nurseconnect.utils import (
get_period_date_format,
convert_string_to_boolean_list,
get_survey_results_for_user,
)
class UtilsTestCase(TestCase):
@freeze_time("2018-02-01")
def test_get_period_date_format_1(self):
self.assertEqual(
get_period_date_format(),
"201802"
)
@freeze_time("2012-12-01")
def test_get_period_date_format_2(self):
self.assertEqual(
get_period_date_format(),
"201212"
)
def test_convert_string_to_boolean_list_1(self):
self.assertEqual(
convert_string_to_boolean_list("true"),
[True]
)
def test_convert_string_to_boolean_list_2(self):
self.assertEqual(
convert_string_to_boolean_list("true,false"),
[True, False]
)
def test_convert_string_to_boolean_list_3(self):
self.assertEqual(
convert_string_to_boolean_list(" true, false"),
[True, False]
)
def test_convert_string_to_boolean_list_4(self):
self.assertEqual(
convert_string_to_boolean_list("TRUE,FalSE"),
[True, False]
)
def test_convert_string_to_boolean_list_5(self):
self.assertEqual(
convert_string_to_boolean_list("true,BANANA,false"),
[True, False]
)
def test_convert_string_to_boolean_list_6(self):
self.assertEqual(
convert_string_to_boolean_list("false , True"),
[False, True]
)
def test_convert_string_to_boolean_list_7(self):
self.assertEqual(
convert_string_to_boolean_list("false;true"),
[]
)
class SurveyUtilsTestCase(TestCase, MoloTestCaseMixin):
def setUp(self):
self.mk_main()
self.user = User.objects.create_user(
username='tester',
email='tester@example.com',
password='tester')
def test_get_survey_results_for_user_1(self):
create_survey([
{
"question": "The sky is blue",
"type": 'radio',
"choices": ["true", "false"],
"required": True,
"page_break": False,
}
])
survey = MoloSurveyPage.objects.first()
survey.thank_you_text = "true"
survey.save()
MoloSurveySubmission.objects.create(
page=survey, user=self.user,
form_data='{"the-sky-is-blue": "True"}')
self.assertEqual(
get_survey_results_for_user(survey, self.user),
[{
"question": "The sky is blue",
"user_answer": True,
"correct_answer": True,
}]
)
def test_get_survey_results_for_user_2(self):
create_survey([
{
"question": "The sky is blue",
"type": 'radio',
"choices": ["true", "false"],
"required": True,
"page_break": False,
},
{
"question": "The grass is purple",
"type": 'radio',
"choices": ["true", "false"],
"required": True,
"page_break": False,
}
])
survey = MoloSurveyPage.objects.first()
survey.thank_you_text = "true,false"
survey.save()
MoloSurveySubmission.objects.create(
page=survey, user=self.user,
form_data=('{"the-sky-is-blue": "True", '
'"the-grass-is-purple": "True"}'))
self.assertEqual(
get_survey_results_for_user(survey, self.user),
[
{
"question": "The sky is blue",
"user_answer": True,
"correct_answer": True,
},
{
"question": "The grass is purple",
"user_answer": True,
"correct_answer": False,
},
]
)
|
bsd-2-clause
| -6,023,230,630,583,244,000
| 29.04
| 68
| 0.517976
| false
| 3.977052
| true
| false
| false
|
Patreon/cartographer
|
cartographer/field_types/schema_relationship.py
|
1
|
4047
|
from cartographer.resources import get_resource_registry
from cartographer.resources.resource_registry import ResourceRegistryKeys
class SchemaRelationship(object):
"""
`SchemaRelationship` describes how to translate related resources to and from JSON API and our Python models.
`SchemaRelationship` is has one primary method,
`related_serializer`, for creating a `JSONAPISerializer` instance based on its input arguments.
Subclasses of `SchemaSerializer` can override this method
to customize serialization behavior.
Parsing of related resources is not currently handled by this class,
and instead is handled by the `PostedDocument` class (or, more typically, its subclass `SchemaParser`.
"""
def __init__(self, model_type, id_attribute=None, model_property=None,
model_method=None, serializer_method=None, includes=None):
"""
NOTE: only one of id_attribute, model_property, model_method, or serializer_method should be provided
:param model_type: the JSON API `type` string for the related model
:param id_attribute: the foreign key column on the parent serializer model which identifies the related serializer
:param model_property: the property on the parent serializer model which returns the related serializer
:param model_method: the property on the parent serializer model which returns the related serializer
:param serializer_method: the name of the method on the parent serializer object which uses this schema
which should be called to get the child serializer.
:return: an instance of SchemaRelationship,
which will later be used to serialize Python into JSON API.
"""
identifier_args = [id_attribute, model_property, model_method, serializer_method]
provided_identifiers = [identifier
for identifier in identifier_args
if identifier]
if len(provided_identifiers) > 1:
raise Exception("only one of [{}] should be provided".format(identifier_args.join(", ")))
self.model_type = model_type
self.id_attribute = id_attribute
self.model_property = model_property
self.model_method = model_method
self.serializer_method = serializer_method
self.includes = includes
def related_serializer(self, parent_serializer, relationship_key):
"""
:param parent_serializer: The serializer which has our return value as a related resource
:param relationship_key: The name by which the parent serializer knows this child
:return: The child serializer which will later be used to serialize a related resource
"""
if self.serializer_method is not None:
return getattr(parent_serializer, self.serializer_method)()
model = None
if self.id_attribute is not None:
related_model_getter = self.resource_registry_entry().get(ResourceRegistryKeys.MODEL_GET)
model_id = getattr(parent_serializer.model, self.id_attribute)
if model_id is not None and related_model_getter is not None:
model = related_model_getter(model_id)
elif self.model_property is not None:
model = getattr(parent_serializer.model, self.model_property)
elif self.model_method is not None:
model = getattr(parent_serializer.model, self.model_method)()
if model:
serializer_class = self.resource_registry_entry().get(ResourceRegistryKeys.SERIALIZER)
return serializer_class(
model,
parent_serializer=parent_serializer,
relationship_name=relationship_key,
includes=self.includes
)
else:
from cartographer.serializers import JSONAPINullSerializer
return JSONAPINullSerializer()
def resource_registry_entry(self):
return get_resource_registry().get(self.model_type, {})
|
apache-2.0
| 8,312,230,826,271,928,000
| 50.227848
| 122
| 0.679763
| false
| 4.812128
| false
| false
| false
|
ArseniyK/Sunflower
|
application/operation.py
|
1
|
48288
|
import os
import gtk
import gobject
import fnmatch
from threading import Thread, Event
from gui.input_dialog import OverwriteFileDialog, OverwriteDirectoryDialog, OperationError, QuestionOperationError
from gui.operation_dialog import CopyDialog, MoveDialog, DeleteDialog, RenameDialog
from gui.error_list import ErrorList
from plugin_base.provider import Mode as FileMode, TrashError, Support as ProviderSupport
from plugin_base.monitor import MonitorSignals
from common import format_size
from queue import OperationQueue
# import constants
from gui.input_dialog import OverwriteOption
class BufferSize:
LOCAL = 4096 * 1024
REMOTE = 100 * 1024
class Option:
FILE_TYPE = 0
DESTINATION = 1
SET_OWNER = 2
SET_MODE = 3
SET_TIMESTAMP = 4
SILENT = 5
SILENT_MERGE = 6
SILENT_OVERWRITE = 7
class Skip:
TRASH = 0
REMOVE = 1
WRITE = 2
CREATE = 3
MODE_SET = 4
MOVE = 5
RENAME = 6
READ = 7
class OperationType:
COPY = 0
MOVE = 1
DELETE = 2
RENAME = 3
LINK = 4
class Operation(Thread):
"""Parent class for all operation threads"""
def __init__(self, application, source, destination=None, options=None, destination_path=None):
Thread.__init__(self, target=self)
self._can_continue = Event()
self._abort = Event()
self._application = application
self._source = source
self._destination = destination
self._options = options
self._source_queue = None
self._destination_queue = None
self._merge_all = None
self._overwrite_all = None
self._response_cache = {}
# operation queue
self._operation_queue = None
self._operation_queue_name = None
# daemonize
self.daemon = True
# create operation dialog
self._dialog = None
self._create_dialog()
self._dir_list = []
self._file_list = []
self._error_list = []
self._selection_list = []
# store initial paths
self._source_path = self._source.get_path()
if self._destination is not None:
self._destination_path = destination_path or self._destination.get_path()
self._can_continue.set()
def _create_dialog(self):
"""Create operation dialog"""
pass
def _destroy_ui(self):
"""Destroy user interface"""
if self._dialog is not None:
with gtk.gdk.lock:
self._dialog.destroy()
def _get_free_space_input(self, needed, available):
"""Get user input when there is not enough space"""
size_format = self._application.options.get('size_format')
space_needed = format_size(needed, size_format)
space_available = format_size(available, size_format)
if self._options is not None and self._options[Option.SILENT]:
# silent option is enabled, we skip operation by default
self._error_list.append(_(
'Aborted. Not enough free space on target file system.\n'
'Needed: {0}\n'
'Available: {1}'
).format(space_needed, space_available))
should_continue = False
else:
# ask user what to do
with gtk.gdk.lock:
dialog = gtk.MessageDialog(
self._dialog.get_window(),
gtk.DIALOG_DESTROY_WITH_PARENT,
gtk.MESSAGE_WARNING,
gtk.BUTTONS_YES_NO,
_(
'Target file system does not have enough '
'free space for this operation to continue.\n\n'
'Needed: {0}\n'
'Available: {1}\n\n'
'Do you wish to continue?'
).format(space_needed, space_available)
)
dialog.set_default_response(gtk.RESPONSE_YES)
result = dialog.run()
dialog.destroy()
should_continue = result == gtk.RESPONSE_YES
return should_continue
def _get_merge_input(self, path):
"""Get merge confirmation"""
if self._options is not None and self._options[Option.SILENT]:
# we are in silent mode, do what user specified
merge = self._options[Option.SILENT_MERGE]
self._merge_all = merge
else:
# we are not in silent mode, ask user
with gtk.gdk.lock:
dialog = OverwriteDirectoryDialog(self._application, self._dialog.get_window())
title_element = os.path.basename(path)
message_element = os.path.basename(os.path.dirname(os.path.join(self._destination.get_path(), path)))
dialog.set_title_element(title_element)
dialog.set_message_element(message_element)
dialog.set_rename_value(title_element)
dialog.set_source(
self._source,
path,
relative_to=self._source_path
)
dialog.set_original(
self._destination,
path,
relative_to=self._destination_path
)
result = dialog.get_response()
merge = result[0] == gtk.RESPONSE_YES
if result[1][OverwriteOption.APPLY_TO_ALL]:
self._merge_all = merge
# in case user canceled operation
if result[0] == gtk.RESPONSE_CANCEL:
self.cancel()
return merge # return only response for current directory
def _get_overwrite_input(self, path):
"""Get overwrite confirmation"""
if self._options is not None and self._options[Option.SILENT]:
# we are in silent mode, do what user specified
overwrite = self._options[Option.SILENT_OVERWRITE]
self._overwrite_all = overwrite
options = (False, '', True) # no rename, apply to all
else:
# we are not in silent mode, ask user what to do
with gtk.gdk.lock:
dialog = OverwriteFileDialog(self._application, self._dialog.get_window())
title_element = os.path.basename(path)
message_element = os.path.basename(os.path.dirname(os.path.join(self._destination.get_path(), path)))
dialog.set_title_element(title_element)
dialog.set_message_element(message_element)
dialog.set_rename_value(title_element)
dialog.set_source(
self._source,
path,
relative_to=self._source_path
)
dialog.set_original(
self._destination,
path,
relative_to=self._destination_path
)
result = dialog.get_response()
overwrite = result[0] == gtk.RESPONSE_YES
if result[1][OverwriteOption.APPLY_TO_ALL]:
self._overwrite_all = overwrite
# in case user canceled operation
if result[0] == gtk.RESPONSE_CANCEL:
self.cancel()
# pass options from input dialog
options = result[1]
return overwrite, options
def _get_write_error_input(self, error):
"""Get user response for write error"""
if self._options is not None and self._options[Option.SILENT]:
# we are in silent mode, set response and log error
self._error_list.append(str(error))
response = OperationError.RESPONSE_SKIP
else:
# we are not in silent mode, ask user
with gtk.gdk.lock:
dialog = OperationError(self._application)
dialog.set_message(_(
'There is a problem writing data to destination '
'file. What would you like to do?'
))
dialog.set_error(str(error))
# get users response
response = dialog.get_response()
# check if this response applies to future errors
if response == OperationError.RESPONSE_SKIP_ALL:
response = OperationError.RESPONSE_SKIP
self._response_cache[Skip.WRITE] = response
# abort operation if user requested
if response == OperationError.RESPONSE_CANCEL:
self.cancel()
return response
def _get_create_error_input(self, error, is_directory=False):
"""Get user response for create error"""
if self._options is not None and self._options[Option.SILENT]:
# we are in silent mode, set response and log error
self._error_list.append(str(error))
response = OperationError.RESPONSE_SKIP
else:
# we are not in silent mode, ask user
with gtk.gdk.lock:
dialog = OperationError(self._application)
if not is_directory:
# set message for file
dialog.set_message(_(
'An error occurred while trying to create specified '
'file. What would you like to do?'
))
else:
# set message for directory
dialog.set_message(_(
'An error occurred while trying to create specified '
'directory. What would you like to do?'
))
dialog.set_error(str(error))
# get user response
response = dialog.get_response()
# check if this response applies to future errors
if response == OperationError.RESPONSE_SKIP_ALL:
response = OperationError.RESPONSE_SKIP
self._response_cache[Skip.CREATE] = response
# abort operation if user requested
if response == gtk.RESPONSE_CANCEL:
self.cancel()
return response
def _get_mode_set_error_input(self, error):
"""Get user response for mode set error"""
if self._options is not None and self._options[Option.SILENT]:
# we are in silent mode, set response and log error
self._error_list.append(str(error))
response = OperationError.RESPONSE_SKIP
else:
# we are not in silent mode, ask user
with gtk.gdk.lock:
dialog = OperationError(self._application)
dialog.set_message(_(
'Problem with setting path parameter for '
'specified path. What would you like to do?'
))
dialog.set_error(str(error))
# get user response
response = dialog.get_response()
# check if this response applies to future errors
if response == OperationError.RESPONSE_SKIP_ALL:
response = OperationError.RESPONSE_SKIP
self._response_cache[Skip.MODE_SET] = response
# abort operation if user requested
if response == gtk.RESPONSE_CANCEL:
self.cancel()
return response
def _get_remove_error_input(self, error):
"""Get user response for remove error"""
if self._options is not None and self._options[Option.SILENT]:
# we are in silent mode, set response and log error
self._error_list.append(str(error))
response = OperationError.RESPONSE_SKIP
else:
# we are not in silent mode, ask user
with gtk.gdk.lock:
dialog = OperationError(self._application)
dialog.set_message(_(
'There was a problem removing specified path. '
'What would you like to do?'
))
dialog.set_error(str(error))
# get users response
response = dialog.get_response()
# check if this response applies to future errors
if response == OperationError.RESPONSE_SKIP_ALL:
response = OperationError.RESPONSE_SKIP
self._response_cache[Skip.REMOVE] = response
# abort operation if user requested
if response == gtk.RESPONSE_CANCEL:
self.cancel()
return response
def _get_trash_error_input(self, error):
"""Get user response for remove error"""
if self._options is not None and self._options[Option.SILENT]:
# we are in silent mode, set response and log error
self._error_list.append(str(error))
response = gtk.RESPONSE_NO
else:
# we are not in silent mode, ask user
with gtk.gdk.lock:
dialog = QuestionOperationError(self._application)
dialog.set_message(_(
'There was a problem trashing specified path. '
'Would you like to try removing it instead?'
))
dialog.set_error(str(error))
# get users response
response = dialog.get_response()
# check if this response applies to future errors
if response == OperationError.RESPONSE_SKIP_ALL:
response = OperationError.RESPONSE_SKIP
self._response_cache[Skip.TRASH] = response
# abort operation if user requested
if response == gtk.RESPONSE_CANCEL:
self.cancel()
return response
def _get_move_error_input(self, error):
"""Get user response for move error"""
if self._options is not None and self._options[Option.SILENT]:
# we are in silent mode, set response and log error
self._error_list.append(str(error))
response = gtk.RESPONSE_NO
else:
# we are not in silent mode, ask user
with gtk.gdk.lock:
dialog = OperationError(self._application)
dialog.set_message(_(
'There was a problem moving specified path. '
'What would you like to do?'
))
dialog.set_error(str(error))
# get users response
response = dialog.get_response()
# check if this response applies to future errors
if response == OperationError.RESPONSE_SKIP_ALL:
response = OperationError.RESPONSE_SKIP
self._response_cache[Skip.MOVE] = response
# abort operation if user requested
if response == gtk.RESPONSE_CANCEL:
self.cancel()
return response
def _get_rename_error_input(self, error):
"""Get user response for rename error"""
if self._options is not None and self._options[Option.SILENT]:
# we are in silent mode, set response and log error
self._error_list.append(str(error))
response = gtk.RESPONSE_NO
else:
# we are not in silent mode, ask user
with gtk.gdk.lock:
dialog = OperationError(self._application)
dialog.set_message(_(
'There was a problem renaming specified path. '
'What would you like to do?'
))
dialog.set_error(str(error))
# get users response
response = dialog.get_response()
# check if this response applies to future errors
if response == OperationError.RESPONSE_SKIP_ALL:
response = OperationError.RESPONSE_SKIP
self._response_cache[Skip.RENAME] = response
# abort operation if user requested
if response == gtk.RESPONSE_CANCEL:
self.cancel()
return response
def _get_read_error_input(self, error):
"""Get user response for directory listing error"""
if self._options is not None and self._options[Option.SILENT]:
# we are in silent mode, set response and log error
self._error_list.append(str(error))
response = gtk.RESPONSE_NO
else:
# we are not in silent mode, ask user
with gtk.gdk.lock:
dialog = OperationError(self._application)
dialog.set_message(_(
'There was a problem with reading specified directory. '
'What would you like to do?'
))
dialog.set_error(str(error))
# get users response
response = dialog.get_response()
# check if this response applies to future errors
if response == OperationError.RESPONSE_SKIP_ALL:
response = OperationError.RESPONSE_SKIP
self._response_cache[Skip.READ] = response
# abort operation if user requested
if response == gtk.RESPONSE_CANCEL:
self.cancel()
return response
def set_selection(self, item_list):
"""Set list of selected items"""
self._selection_list.extend(item_list)
def set_operation_queue(self, queue_name):
"""Set operation to wait for queue."""
if queue_name is None:
return
# create new queue
self._operation_queue = Event()
self._operation_queue_name = queue_name
# schedule operation
OperationQueue.add(queue_name, self._operation_queue)
def set_source_queue(self, queue):
"""Set event queue for fall-back monitor support"""
self._source_queue = queue
def set_destination_queue(self, queue):
"""Set event queue for fall-back monitor support"""
self._destination_queue = queue
def pause(self):
"""Pause current operation"""
self._can_continue.clear()
def resume(self):
"""Resume current operation"""
self._can_continue.set()
def cancel(self):
"""Set an abort switch"""
self._abort.set()
# release lock set by the pause
if not self._can_continue.is_set():
self.resume()
class CopyOperation(Operation):
"""Operation thread used for copying files"""
def __init__(self, application, source, destination, options, destination_path=None):
Operation.__init__(self, application, source, destination, options, destination_path)
self._merge_all = None
self._overwrite_all = None
self._dir_list_create = []
self._total_count = 0
self._total_size = 0
self._buffer_size = 0
# cache settings
should_reserve = self._application.options.section('operations').get('reserve_size')
supported_by_provider = ProviderSupport.RESERVE_SIZE in self._destination.get_support()
self._reserve_size = should_reserve and supported_by_provider
# detect buffer size
if self._source.is_local and self._destination.is_local:
system_stat = self._destination.get_system_size(self._destination_path)
if system_stat.block_size:
self._buffer_size = system_stat.block_size * 1024
else:
self._buffer_size = BufferSize.LOCAL
else:
self._buffer_size = BufferSize.REMOTE
def _create_dialog(self):
"""Create progress dialog"""
self._dialog = CopyDialog(self._application, self)
def _update_status(self, status):
"""Set status and reset progress bars"""
self._dialog.set_status(status)
self._dialog.set_current_file("")
self._dialog.set_current_file_fraction(0)
def _get_lists(self):
"""Find all files for copying"""
gobject.idle_add(self._update_status, _('Searching for files...'))
# exclude files already selected with parent directory
for file_name in self._selection_list:
self._selection_list = filter(
lambda item: not item.startswith(file_name + os.path.sep),
self._selection_list
)
# traverse through the rest of the items
for item in self._selection_list:
if self._abort.is_set(): break # abort operation if requested
self._can_continue.wait() # pause lock
# update current file label
gobject.idle_add(self._dialog.set_current_file, item)
gobject.idle_add(self._dialog.pulse)
if os.path.sep in item:
relative_path, item = os.path.split(item)
source_path = os.path.join(self._source_path, relative_path)
else:
relative_path = None
source_path = self._source_path
if self._source.is_dir(item, relative_to=source_path):
# item is directory
can_procede = True
can_create = True
# check if directory exists on destination
if self._destination.exists(item, relative_to=self._destination_path):
can_create = False
if self._merge_all is not None:
can_procede = self._merge_all
else:
can_procede = self._get_merge_input(item)
# if user didn't skip directory, scan and update lists
if can_procede:
self._dir_list.append((item, relative_path))
if can_create: self._dir_list_create.append((item, relative_path))
self._scan_directory(item, relative_path)
elif fnmatch.fnmatch(item, self._options[Option.FILE_TYPE]):
# item is a file, get stats and update lists
item_stat = self._source.get_stat(item, relative_to=source_path)
gobject.idle_add(self._dialog.increment_total_size, item_stat.size)
gobject.idle_add(self._dialog.increment_total_count, 1)
self._total_count += 1
self._total_size += item_stat.size
self._file_list.append((item, relative_path))
def _set_mode(self, path, mode):
"""Set mode for specified path"""
if not self._options[Option.SET_MODE]: return
try:
# try to set mode for specified path
self._destination.set_mode(
path,
mode,
relative_to=self._destination_path
)
# push event to the queue
if self._destination_queue is not None:
event = (MonitorSignals.ATTRIBUTE_CHANGED, path, None)
self._destination_queue.put(event, False)
except StandardError as error:
# problem setting mode, ask user
if Skip.MODE_SET in self._response_cache:
response = self._response_cache[Skip.MODE_SET]
else:
response = self._get_mode_set_error_input(error)
# try to set mode again
if response == OperationError.RESPONSE_RETRY:
self._set_mode(path, mode)
return
def _set_owner(self, path, user_id, group_id):
"""Set owner and group for specified path"""
if not self._options[Option.SET_OWNER]: return
try:
# try set owner of specified path
self._destination.set_owner(
path,
user_id,
group_id,
relative_to=self._destination_path
)
# push event to the queue
if self._destination_queue is not None:
event = (MonitorSignals.ATTRIBUTE_CHANGED, path, None)
self._destination_queue.put(event, False)
except StandardError as error:
# problem with setting owner, ask user
if Skip.MODE_SET in self._response_cache:
response = self._response_cache[Skip.MODE_SET]
else:
response = self._get_mode_set_error_input(error)
# try to set owner again
if response == OperationError.RESPONSE_RETRY:
self._set_owner(path, user_id, group_id)
return
def _set_timestamp(self, path, access_time, modify_time, change_time):
"""Set timestamps for specified path"""
if not self._options[Option.SET_TIMESTAMP]: return
try:
# try setting timestamp
self._destination.set_timestamp(
path,
access_time,
modify_time,
change_time,
relative_to=self._destination_path
)
# push event to the queue
if self._destination_queue is not None:
event = (MonitorSignals.ATTRIBUTE_CHANGED, path, None)
self._destination_queue.put(event, False)
except StandardError as error:
# problem with setting owner, ask user
if Skip.MODE_SET in self._response_cache:
response = self._response_cache[Skip.MODE_SET]
else:
response = self._get_mode_set_error_input(error)
# try to set timestamp again
if response == OperationError.RESPONSE_RETRY:
self._set_timestamp(path, access_time, modify_time, change_time)
return
def _scan_directory(self, directory, relative_path=None):
"""Recursively scan directory and populate list"""
source_path = self._source_path if relative_path is None else os.path.join(self._source_path, relative_path)
try:
# try to get listing from directory
item_list = self._source.list_dir(directory, relative_to=source_path)
except StandardError as error:
# problem with reading specified directory, ask user
if Skip.READ in self._response_cache:
response = self._response_cache[Skip.READ]
else:
response = self._get_read_error_input(error)
# try to scan specified directory again
if response == OperationError.RESPONSE_RETRY:
self._scan_directory(directory, relative_path)
return
for item in item_list:
if self._abort.is_set(): break # abort operation if requested
self._can_continue.wait() # pause lock
gobject.idle_add(self._dialog.set_current_file, os.path.join(directory, item))
gobject.idle_add(self._dialog.pulse)
full_name = os.path.join(directory, item)
# item is a directory, scan it
if self._source.is_dir(full_name, relative_to=source_path):
can_procede = True
can_create = True
if self._destination.exists(full_name, relative_to=self._destination_path):
can_create = False
if self._merge_all is not None:
can_procede = self._merge_all
else:
can_procede = self._get_merge_input(full_name)
if can_procede:
# allow processing specified directory
self._dir_list.append((full_name, source_path))
if can_create: self._dir_list_create.append((full_name, source_path))
self._scan_directory(full_name, relative_path)
elif fnmatch.fnmatch(item, self._options[Option.FILE_TYPE]):
# item is a file, update global statistics
item_stat = self._source.get_stat(full_name, relative_to=source_path)
gobject.idle_add(self._dialog.increment_total_size, item_stat.size)
gobject.idle_add(self._dialog.increment_total_count, 1)
self._total_count += 1
self._total_size += item_stat.size
self._file_list.append((full_name, relative_path))
def _create_directory(self, directory, relative_path=None):
"""Create specified directory"""
source_path = self._source_path if relative_path is None else os.path.join(self._source_path, relative_path)
file_stat = self._source.get_stat(directory, relative_to=source_path)
mode = file_stat.mode if self._options[Option.SET_MODE] else 0755
try:
# try to create a directory
if self._destination.exists(directory, relative_to=self._destination_path):
if not self._destination.is_dir(directory, relative_to=self._destination_path):
raise StandardError(_(
'Unable to create directory because file with the same name '
'already exists in target directory.'
))
else:
# inode with specified name doesn't exist, create directory
self._destination.create_directory(
directory,
mode,
relative_to=self._destination_path
)
# push event to the queue
if self._destination_queue is not None:
event = (MonitorSignals.CREATED, directory, None)
self._destination_queue.put(event, False)
except StandardError as error:
# there was a problem creating directory
if Skip.CREATE in self._response_cache:
response = self._response_cache[Skip.CREATE]
else:
response = self._get_create_error_input(error, True)
# try to create directory again
if response == OperationError.RESPONSE_RETRY:
self._create_directory(directory)
# exit method
return
# set owner
self._set_owner(directory, file_stat.user_id, file_stat.group_id)
def _copy_file(self, file_name, relative_path=None):
"""Copy file content"""
can_procede = True
source_path = self._source_path if relative_path is None else os.path.join(self._source_path, relative_path)
dest_file = file_name
sh = None
dh = None
# check if destination file exists
if self._destination.exists(file_name, relative_to=self._destination_path):
if self._overwrite_all is not None:
can_procede = self._overwrite_all
else:
can_procede, options = self._get_overwrite_input(file_name)
# get new name if user specified
if options[OverwriteOption.RENAME]:
dest_file = os.path.join(
os.path.dirname(file_name),
options[OverwriteOption.NEW_NAME]
)
elif source_path == self._destination_path:
can_procede = False
# if user skipped this file return
if not can_procede:
self._file_list.pop(self._file_list.index((file_name, relative_path)))
# update total size
file_stat = self._source.get_stat(file_name, relative_to=source_path)
gobject.idle_add(self._dialog.increment_current_size, file_stat.size)
return
try:
# get file stats
destination_size = 0L
file_stat = self._source.get_stat(file_name, relative_to=source_path, extended=True)
# get file handles
sh = self._source.get_file_handle(file_name, FileMode.READ, relative_to=source_path)
dh = self._destination.get_file_handle(dest_file, FileMode.WRITE, relative_to=self._destination_path)
# report error properly
if sh is None:
raise StandardError('Unable to open source file in read mode.')
if dh is None:
raise StandardError('Unable to open destination file in write mode.')
# reserve file size
if self._reserve_size:
# try to reserve file size in advance,
# can be slow on memory cards and network
try:
dh.truncate(file_stat.size)
except:
dh.truncate()
else:
# just truncate file to 0 size in case source file is smaller
dh.truncate()
dh.seek(0)
# push event to the queue
if self._destination_queue is not None:
event = (MonitorSignals.CREATED, dest_file, None)
self._destination_queue.put(event, False)
except StandardError as error:
# close handles if they exist
if hasattr(sh, 'close'): sh.close()
if hasattr(dh, 'close'): sh.close()
if Skip.CREATE in self._response_cache:
response = self._response_cache[Skip.CREATE]
else:
response = self._get_create_error_input(error)
# try to create file again and copy contents
if response == OperationError.RESPONSE_RETRY:
self._copy_file(dest_file)
else:
# user didn't want to retry, remove file from list
self._file_list.pop(self._file_list.index((file_name, relative_path)))
# remove amount of copied bytes from total size
gobject.idle_add(self._dialog.increment_current_size, -destination_size)
# exit method
return
while True:
if self._abort.is_set(): break
self._can_continue.wait() # pause lock
data = sh.read(self._buffer_size)
if data:
try:
# try writing data to destination
dh.write(data)
except IOError as error:
# handle error
if Skip.WRITE in self._response_cache:
response = self._response_cache[Skip.WRITE]
else:
response = self._get_write_error_input(error)
# try to write data again
if response == OperationError.RESPONSE_RETRY:
gobject.idle_add(self._dialog.increment_current_size, -dh.tell())
if hasattr(sh, 'close'): sh.close()
if hasattr(dh, 'close'): sh.close()
self._copy_file(dest_file)
return
destination_size += len(data)
gobject.idle_add(self._dialog.increment_current_size, len(data))
if file_stat.size > 0: # ensure we don't end up with error on 0 size files
gobject.idle_add(
self._dialog.set_current_file_fraction,
destination_size / float(file_stat.size)
)
else:
gobject.idle_add(self._dialog.set_current_file_fraction, 1)
# push event to the queue
if self._destination_queue is not None:
event = (MonitorSignals.CHANGED, dest_file, None)
self._destination_queue.put(event, False)
else:
sh.close()
dh.close()
# set file parameters
self._set_mode(dest_file, file_stat.mode)
self._set_owner(dest_file, file_stat.user_id, file_stat.group_id)
self._set_timestamp(
dest_file,
file_stat.time_access,
file_stat.time_modify,
file_stat.time_change
)
break
def _create_directory_list(self):
"""Create all directories in list"""
gobject.idle_add(self._update_status, _('Creating directories...'))
for number, directory in enumerate(self._dir_list_create, 0):
if self._abort.is_set(): break # abort operation if requested
self._can_continue.wait() # pause lock
gobject.idle_add(self._dialog.set_current_file, directory[0])
self._create_directory(directory[0], directory[1]) # create directory
gobject.idle_add(
self._dialog.set_current_file_fraction,
float(number) / len(self._dir_list)
)
def _copy_file_list(self):
"""Copy list of files to destination path"""
# update status
gobject.idle_add(self._update_status, _('Copying files...'))
item_list = self._file_list[:]
# copy all the files in list
for file_name, source_path in item_list:
# abort operation if requested
if self._abort.is_set(): break
self._can_continue.wait() # pause lock
# copy file
gobject.idle_add(self._dialog.set_current_file, file_name)
self._copy_file(file_name, source_path)
gobject.idle_add(self._dialog.increment_current_count, 1)
def run(self):
"""Main thread method, this is where all the stuff is happening"""
# set dialog info
with gtk.gdk.lock:
self._dialog.set_source(self._source_path)
self._dialog.set_destination(self._destination_path)
# wait for operation queue if needed
if self._operation_queue is not None:
self._operation_queue.wait()
# get list of items to copy
self._get_lists()
# check for available free space
system_info = self._destination.get_system_size(self._destination_path)
if ProviderSupport.SYSTEM_SIZE in self._destination.get_support() \
and self._total_size > system_info.size_available:
should_continue = self._get_free_space_input(self._total_size, system_info.size_available)
# exit if user chooses to
if not should_continue:
self.cancel()
# clear selection on source directory
with gtk.gdk.lock:
parent = self._source.get_parent()
if self._source_path == parent.path:
parent.deselect_all()
# perform operation
self._create_directory_list()
self._copy_file_list()
# notify user if window is not focused
with gtk.gdk.lock:
if not self._dialog.is_active() and not self._application.is_active() and not self._abort.is_set():
notify_manager = self._application.notification_manager
title = _('Copy Operation')
message = ngettext(
'Copying of {0} item from "{1}" to "{2}" is completed!',
'Copying of {0} items from "{1}" to "{2}" is completed!',
len(self._file_list) + len(self._dir_list)
).format(
len(self._file_list) + len(self._dir_list),
os.path.basename(self._source_path),
os.path.basename(self._destination_path)
)
# queue notification
notify_manager.notify(title, message)
# show error list if needed
if len(self._error_list) > 0:
error_list = ErrorList(self._dialog)
error_list.set_operation_name(_('Copy Operation'))
error_list.set_source(self._source_path)
error_list.set_destination(self._destination_path)
error_list.set_errors(self._error_list)
error_list.show()
# destroy dialog
self._destroy_ui()
# start next operation
if self._operation_queue is not None:
OperationQueue.start_next(self._operation_queue_name)
class MoveOperation(CopyOperation):
"""Operation thread used for moving files"""
def _remove_path(self, path, item_list, relative_path=None):
"""Remove path specified path."""
source_path = self._source_path if relative_path is None else os.path.join(self._source_path, relative_path)
try:
# try removing specified path
self._source.remove_path(path, relative_to=source_path)
# push event to the queue
if self._source_queue is not None:
event = (MonitorSignals.DELETED, path, None)
self._source_queue.put(event, False)
except StandardError as error:
# problem removing path, ask user what to do
if Skip.REMOVE in self._response_cache:
response = self._response_cache[Skip.REMOVE]
else:
response = self._get_remove_error_input(error)
# try removing path again
if response == OperationError.RESPONSE_RETRY:
self._remove_path(path, item_list)
else:
# user didn't want to retry, remove path from item_list
item_list.pop(item_list.index(path))
def _create_dialog(self):
"""Create progress dialog"""
self._dialog = MoveDialog(self._application, self)
def _move_file(self, file_name, relative_path=None):
"""Move specified file using provider rename method"""
can_procede = True
source_path = self._source_path if relative_path is None else os.path.join(self._source_path, relative_path)
dest_file = file_name
# check if destination file exists
if self._destination.exists(file_name, relative_to=self._destination_path):
if self._overwrite_all is not None:
can_procede = self._overwrite_all
else:
can_procede, options = self._get_overwrite_input(file_name)
# get new name if user specified
if options[OverwriteOption.RENAME]:
dest_file = os.path.join(
os.path.dirname(file_name),
options[OverwriteOption.NEW_NAME]
)
# if user skipped this file return
if not can_procede:
self._file_list.pop(self._file_list.index((file_name, relative_path)))
return
# move file
try:
self._source.move_path(
file_name,
os.path.join(self._destination_path, dest_file),
relative_to=source_path
)
# push events to the queue
if self._source_queue is not None:
event = (MonitorSignals.DELETED, file_name, None)
self._source_queue.put(event, False)
if self._destination_queue is not None:
event = (MonitorSignals.CREATED, dest_file, None)
self._destination_queue.put(event, False)
except StandardError as error:
# problem with moving file, ask user what to do
if Skip.MOVE in self._response_cache:
response = self._response_cache[Skip.MOVE]
else:
response = self._get_move_error_input(error)
# try moving file again
if response == OperationError.RESPONSE_RETRY:
self._move_file(dest_file)
else:
# user didn't want to retry, remove file from list
self._file_list.pop(self._file_list.index((file_name, relative_path)))
# exit method
return
def _move_file_list(self):
"""Move files from the list"""
gobject.idle_add(self._update_status, _('Moving files...'))
item_list = self._file_list[:]
for file_name, source_path in item_list:
if self._abort.is_set(): break # abort operation if requested
self._can_continue.wait() # pause lock
# move file
gobject.idle_add(self._dialog.set_current_file, file_name)
self._move_file(file_name, source_path)
gobject.idle_add(self._dialog.increment_current_count, 1)
def _delete_file_list(self):
"""Remove files from source list"""
gobject.idle_add(self._update_status, _('Deleting source files...'))
item_list = self._file_list[:]
for number, item in enumerate(item_list, 0):
if self._abort.is_set(): break # abort operation if requested
self._can_continue.wait() # pause lock
# remove path
gobject.idle_add(self._dialog.set_current_file, item[0])
self._remove_path(item[0], self._file_list, item[1])
# update current count
gobject.idle_add(
self._dialog.set_current_file_fraction,
float(number) / len(item_list)
)
self._delete_directories()
def _delete_directories(self):
"""Remove empty directories after moving files"""
gobject.idle_add(self._update_status, _('Deleting source directories...'))
dir_list = self._dir_list[:]
dir_list.reverse() # remove deepest directories first
for number, directory in enumerate(dir_list, 0):
source_path = self._source_path if directory[1] is None else os.path.join(self._source_path, directory[1])
directory = directory[0]
if self._abort.is_set(): break # abort operation if requested
self._can_continue.wait() # pause lock
if self._source.exists(directory, relative_to=source_path):
gobject.idle_add(self._dialog.set_current_file, directory)
# try to get a list of items inside of directory
try:
item_list = self._source.list_dir(directory, relative_to=source_path)
except:
item_list = None
# remove directory if empty
if item_list is not None and len(item_list) == 0:
self._remove_path(directory, dir_list, relative_path=source_path)
# update current count
if len(dir_list) > 0:
gobject.idle_add(
self._dialog.set_current_file_fraction,
float(number) / len(dir_list)
)
else:
# prevent division by zero
gobject.idle_add(self._dialog.set_current_file_fraction, 1)
def _check_devices(self):
"""Check if source and destination are on the same file system"""
dev_source = self._source.get_stat(self._source.get_path(), extended=True).device
dev_destination = self._destination.get_stat(self._destination.get_path(), extended=True).device
return dev_source == dev_destination
def run(self):
"""Main thread method
We override this method from CopyDialog in order to provide
a bit smarter move operation.
"""
# set dialog info
with gtk.gdk.lock:
self._dialog.set_source(self._source_path)
self._dialog.set_destination(self._destination_path)
# wait for operation queue if needed
if self._operation_queue is not None:
self._operation_queue.wait()
# get list of items
self._get_lists()
# check for available free space
system_info = self._destination.get_system_size(self._destination_path)
if self._total_size > system_info.size_available and not self._check_devices():
should_continue = self._get_free_space_input(self._total_size, system_info.size_available)
# exit if user chooses to
if not should_continue:
self.cancel()
# clear selection on source directory
with gtk.gdk.lock:
parent = self._source.get_parent()
if self._source_path == parent.path:
parent.deselect_all()
# create directories
self._create_directory_list()
# copy/move files
if self._check_devices():
# both paths are on the same file system, move instead of copy
self._move_file_list()
self._delete_directories()
else:
# paths are located on different file systems, copy and remove
self._copy_file_list()
self._delete_file_list()
# notify user if window is not focused
with gtk.gdk.lock:
if not self._dialog.is_active() and not self._application.is_active() and not self._abort.is_set():
notify_manager = self._application.notification_manager
title = _('Move Operation')
message = ngettext(
'Moving of {0} item from "{1}" to "{2}" is completed!',
'Moving of {0} items from "{1}" to "{2}" is completed!',
len(self._file_list) + len(self._dir_list)
).format(
len(self._file_list) + len(self._dir_list),
os.path.basename(self._source_path),
os.path.basename(self._destination_path)
)
# queue notification
notify_manager.notify(title, message)
# shop error list if needed
if len(self._error_list) > 0:
error_list = ErrorList(self._dialog)
error_list.set_operation_name(_('Move Operation'))
error_list.set_source(self._source_path)
error_list.set_destination(self._destination_path)
error_list.set_errors(self._error_list)
error_list.show()
# destroy dialog
self._destroy_ui()
# start next operation
if self._operation_queue is not None:
OperationQueue.start_next(self._operation_queue_name)
class DeleteOperation(Operation):
"""Operation thread used for deleting files"""
def __init__(self, application, provider):
Operation.__init__(self, application, provider)
# allow users to force deleting items
self._force_delete = False
def _create_dialog(self):
"""Create operation dialog"""
self._dialog = DeleteDialog(self._application, self)
def _remove_path(self, path):
"""Remove path"""
try:
# try removing specified path
self._source.remove_path(path, relative_to=self._source_path)
# push event to the queue
if self._source_queue is not None:
event = (MonitorSignals.DELETED, path, None)
self._source_queue.put(event, False)
except StandardError as error:
# problem removing path, ask user what to do
if Skip.REMOVE in self._response_cache:
response = self._response_cache[Skip.REMOVE]
else:
response = self._get_remove_error_input(error)
# try removing path again
if response == OperationError.RESPONSE_RETRY:
self._remove_path(path)
def _trash_path(self, path):
"""Move path to the trash"""
try:
# try trashing specified path
self._source.trash_path(path, relative_to=self._source_path)
# push event to the queue
if self._source_queue is not None:
event = (MonitorSignals.DELETED, path, None)
self._source_queue.put(event, False)
except TrashError as error:
# problem removing path, ask user what to do
if Skip.TRASH in self._response_cache:
response = self._response_cache[Skip.TRASH]
else:
response = self._get_trash_error_input(error)
# try moving path to trash again
if response == OperationError.RESPONSE_RETRY:
self._remove_path(path)
def set_force_delete(self, force):
"""Set forced deletion instead of trashing files"""
self._force_delete = force
def run(self):
"""Main thread method, this is where all the stuff is happening"""
self._file_list = self._selection_list[:] # use predefined selection list
# wait for operation queue if needed
if self._operation_queue is not None:
self._operation_queue.wait()
with gtk.gdk.lock:
# clear selection on source directory
parent = self._source.get_parent()
if self._source_path == parent.path:
parent.deselect_all()
# select removal method
trash_files = self._application.options.section('operations').get('trash_files')
trash_available = ProviderSupport.TRASH in self._source.get_support()
if self._force_delete:
remove_method = self._remove_path
else:
remove_method = (
self._remove_path,
self._trash_path
)[trash_files and trash_available]
# remove them
for index, item in enumerate(self._file_list, 1):
if self._abort.is_set(): break # abort operation if requested
self._can_continue.wait() # pause lock
gobject.idle_add(self._dialog.set_current_file, item)
remove_method(item)
# update current count
if len(self._file_list) > 0:
gobject.idle_add(
self._dialog.set_current_file_fraction,
float(index) / len(self._file_list)
)
else:
# prevent division by zero
gobject.idle_add(self._dialog.set_current_file_fraction, 1)
# notify user if window is not focused
with gtk.gdk.lock:
if not self._dialog.is_active() and not self._application.is_active() and not self._abort.is_set():
notify_manager = self._application.notification_manager
title = _('Delete Operation')
message = ngettext(
'Removal of {0} item from "{1}" is completed!',
'Removal of {0} items from "{1}" is completed!',
len(self._file_list)
).format(
len(self._file_list),
os.path.basename(self._source_path)
)
# queue notification
notify_manager.notify(title, message)
# destroy dialog
self._destroy_ui()
# start next operation
if self._operation_queue is not None:
OperationQueue.start_next(self._operation_queue_name)
class RenameOperation(Operation):
"""Thread used for rename of large number of files"""
def __init__(self, application, provider, path, file_list):
Operation.__init__(self, application, provider)
self._destination = provider
self._destination_path = path
self._source_path = path
self._file_list = file_list
def _create_dialog(self):
"""Create operation dialog"""
self._dialog = RenameDialog(self._application, self)
def _rename_path(self, old_name, new_name, index):
"""Rename specified path"""
can_procede = True
try:
# check if specified path already exists
if self._destination.exists(new_name, relative_to=self._source_path):
can_procede, options = self._get_overwrite_input(new_name)
# get new name if user specified
if options[OverwriteOption.RENAME]:
new_name = os.path.join(
os.path.dirname(new_name),
options[OverwriteOption.NEW_NAME]
)
if not can_procede:
# user canceled overwrite, skip the file
self._file_list.pop(index)
return
else:
# rename path
self._source.rename_path(old_name, new_name, relative_to=self._source_path)
# push event to the queue
if self._source_queue is not None:
delete_event = (MonitorSignals.DELETE, old_name, None)
create_event = (MonitorSignals.CREATED, new_name, None)
self._source_queue.put(delete_event, False)
self._source_queue.put(create_event, False)
except StandardError as error:
# problem renaming path, ask user what to do
if Skip.RENAME in self._response_cache:
response = self._response_cache[Skip.RENAME]
else:
response = self._get_rename_error_input(error)
# try renaming path again
if response == OperationError.RESPONSE_RETRY:
self._remove_path(old_name, new_name, index)
else:
# user didn't want to retry, remove path from list
self._file_list.pop(index)
def run(self):
"""Main thread method, this is where all the stuff is happening"""
# wait for operation queue if needed
if self._operation_queue is not None:
self._operation_queue.wait()
for index, item in enumerate(self._file_list, 1):
if self._abort.is_set(): break # abort operation if requested
self._can_continue.wait() # pause lock
gobject.idle_add(self._dialog.set_current_file, item[0])
self._rename_path(item[0], item[1], index-1)
# update current count
if len(self._file_list) > 0:
gobject.idle_add(
self._dialog.set_current_file_fraction,
float(index) / len(self._file_list)
)
else:
# prevent division by zero
gobject.idle_add(self._dialog.set_current_file_fraction, 1)
# notify user if window is not focused
with gtk.gdk.lock:
if not self._dialog.is_active() and not self._application.is_active() and not self._abort.is_set():
notify_manager = self._application.notification_manager
title = _('Rename Operation')
message = ngettext(
'Rename of {0} item from "{1}" is completed!',
'Rename of {0} items from "{1}" is completed!',
len(self._file_list)
).format(
len(self._file_list),
os.path.basename(self._source_path)
)
# queue notification
notify_manager.notify(title, message)
# destroy dialog
self._destroy_ui()
# start next operation
if self._operation_queue is not None:
OperationQueue.start_next(self._operation_queue_name)
|
gpl-3.0
| -8,770,339,951,091,888,000
| 29.236694
| 114
| 0.676711
| false
| 3.389583
| false
| false
| false
|
sujithshankar/anaconda
|
pyanaconda/constants.py
|
1
|
6817
|
#
# constants.py: anaconda constants
#
# Copyright (C) 2001 Red Hat, Inc. All rights reserved.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# Used for digits, ascii_letters, punctuation constants
import string # pylint: disable=deprecated-module
from pyanaconda.i18n import N_
# Use -1 to indicate that the selinux configuration is unset
SELINUX_DEFAULT = -1
# where to look for 3rd party addons
ADDON_PATHS = ["/usr/share/anaconda/addons"]
from pykickstart.constants import AUTOPART_TYPE_LVM
# common string needs to be easy to change
from pyanaconda import product
productName = product.productName
productVersion = product.productVersion
productArch = product.productArch
bugzillaUrl = product.bugUrl
isFinal = product.isFinal
# for use in device names, eg: "fedora", "rhel"
shortProductName = productName.lower() # pylint: disable=no-member
if productName.count(" "): # pylint: disable=no-member
shortProductName = ''.join(s[0] for s in shortProductName.split())
# DriverDisc Paths
DD_ALL = "/tmp/DD"
DD_FIRMWARE = "/tmp/DD/lib/firmware"
DD_RPMS = "/tmp/DD-*"
TRANSLATIONS_UPDATE_DIR = "/tmp/updates/po"
ANACONDA_CLEANUP = "anaconda-cleanup"
MOUNT_DIR = "/run/install"
DRACUT_REPODIR = "/run/install/repo"
DRACUT_ISODIR = "/run/install/source"
ISO_DIR = MOUNT_DIR + "/isodir"
IMAGE_DIR = MOUNT_DIR + "/image"
INSTALL_TREE = MOUNT_DIR + "/source"
BASE_REPO_NAME = "anaconda"
# NOTE: this should be LANG_TERRITORY.CODESET, e.g. en_US.UTF-8
DEFAULT_LANG = "en_US.UTF-8"
DEFAULT_VC_FONT = "eurlatgr"
DEFAULT_KEYBOARD = "us"
DRACUT_SHUTDOWN_EJECT = "/run/initramfs/usr/lib/dracut/hooks/shutdown/99anaconda-eject.sh"
# VNC questions
USEVNC = N_("Start VNC")
USETEXT = N_("Use text mode")
# Runlevel files
RUNLEVELS = {3: 'multi-user.target', 5: 'graphical.target'}
# Network
NETWORK_CONNECTION_TIMEOUT = 45 # in seconds
NETWORK_CONNECTED_CHECK_INTERVAL = 0.1 # in seconds
# DBus
DEFAULT_DBUS_TIMEOUT = -1 # use default
# Thread names
THREAD_EXECUTE_STORAGE = "AnaExecuteStorageThread"
THREAD_STORAGE = "AnaStorageThread"
THREAD_STORAGE_WATCHER = "AnaStorageWatcher"
THREAD_CHECK_STORAGE = "AnaCheckStorageThread"
THREAD_CUSTOM_STORAGE_INIT = "AnaCustomStorageInit"
THREAD_WAIT_FOR_CONNECTING_NM = "AnaWaitForConnectingNMThread"
THREAD_PAYLOAD = "AnaPayloadThread"
THREAD_PAYLOAD_RESTART = "AnaPayloadRestartThread"
THREAD_INPUT_BASENAME = "AnaInputThread"
THREAD_SYNC_TIME_BASENAME = "AnaSyncTime"
THREAD_EXCEPTION_HANDLING_TEST = "AnaExceptionHandlingTest"
THREAD_LIVE_PROGRESS = "AnaLiveProgressThread"
THREAD_SOFTWARE_WATCHER = "AnaSoftwareWatcher"
THREAD_CHECK_SOFTWARE = "AnaCheckSoftwareThread"
THREAD_SOURCE_WATCHER = "AnaSourceWatcher"
THREAD_INSTALL = "AnaInstallThread"
THREAD_CONFIGURATION = "AnaConfigurationThread"
THREAD_FCOE = "AnaFCOEThread"
THREAD_ISCSI_DISCOVER = "AnaIscsiDiscoverThread"
THREAD_ISCSI_LOGIN = "AnaIscsiLoginThread"
THREAD_GEOLOCATION_REFRESH = "AnaGeolocationRefreshThread"
THREAD_DATE_TIME = "AnaDateTimeThread"
THREAD_TIME_INIT = "AnaTimeInitThread"
THREAD_DASDFMT = "AnaDasdfmtThread"
THREAD_KEYBOARD_INIT = "AnaKeyboardThread"
THREAD_ADD_LAYOUTS_INIT = "AnaAddLayoutsInitThread"
# Geolocation constants
# geolocation providers
# - values are used by the geoloc CLI/boot option
GEOLOC_PROVIDER_FEDORA_GEOIP = "provider_fedora_geoip"
GEOLOC_PROVIDER_HOSTIP = "provider_hostip"
GEOLOC_PROVIDER_GOOGLE_WIFI = "provider_google_wifi"
# geocoding provider
GEOLOC_GEOCODER_NOMINATIM = "geocoder_nominatim"
# default providers
GEOLOC_DEFAULT_PROVIDER = GEOLOC_PROVIDER_FEDORA_GEOIP
GEOLOC_DEFAULT_GEOCODER = GEOLOC_GEOCODER_NOMINATIM
# timeout (in seconds)
GEOLOC_TIMEOUT = 3
ANACONDA_ENVIRON = "anaconda"
FIRSTBOOT_ENVIRON = "firstboot"
# Tainted hardware
UNSUPPORTED_HW = 1 << 28
# Password validation
PASSWORD_MIN_LEN = 8
PASSWORD_EMPTY_ERROR = N_("The password is empty.")
PASSWORD_CONFIRM_ERROR_GUI = N_("The passwords do not match.")
PASSWORD_CONFIRM_ERROR_TUI = N_("The passwords you entered were different. Please try again.")
PASSWORD_WEAK = N_("The password you have provided is weak. %s")
PASSWORD_WEAK_WITH_ERROR = N_("The password you have provided is weak: %s.")
PASSWORD_WEAK_CONFIRM = N_("You have provided a weak password. Press Done again to use anyway.")
PASSWORD_WEAK_CONFIRM_WITH_ERROR = N_("You have provided a weak password: %s. Press Done again to use anyway.")
PASSWORD_ASCII = N_("The password you have provided contains non-ASCII characters. You may not be able to switch between keyboard layouts to login. Press Done to continue.")
PASSWORD_DONE_TWICE = N_("You will have to press Done twice to confirm it.")
PASSWORD_STRENGTH_DESC = [N_("Empty"), N_("Weak"), N_("Fair"), N_("Good"), N_("Strong")]
# the number of seconds we consider a noticeable freeze of the UI
NOTICEABLE_FREEZE = 0.1
# all ASCII characters
PW_ASCII_CHARS = string.digits + string.ascii_letters + string.punctuation + " "
# Recognizing a tarfile
TAR_SUFFIX = (".tar", ".tbz", ".tgz", ".txz", ".tar.bz2", "tar.gz", "tar.xz")
# screenshots
SCREENSHOTS_DIRECTORY = "/tmp/anaconda-screenshots"
SCREENSHOTS_TARGET_DIRECTORY = "/root/anaconda-screenshots"
# cmdline arguments that append instead of overwrite
CMDLINE_APPEND = ["modprobe.blacklist", "ifname"]
DEFAULT_AUTOPART_TYPE = AUTOPART_TYPE_LVM
# Default to these units when reading user input when no units given
SIZE_UNITS_DEFAULT = "MiB"
# Constants for reporting status to IPMI. These are from the IPMI spec v2 rev1.1, page 512.
IPMI_STARTED = 0x7 # installation started
IPMI_FINISHED = 0x8 # installation finished successfully
IPMI_ABORTED = 0x9 # installation finished unsuccessfully, due to some non-exn error
IPMI_FAILED = 0xA # installation hit an exception
# for how long (in seconds) we try to wait for enough entropy for LUKS
# keep this a multiple of 60 (minutes)
MAX_ENTROPY_WAIT = 10 * 60
# X display number to use
X_DISPLAY_NUMBER = 1
# Payload status messages
PAYLOAD_STATUS_PROBING_STORAGE = N_("Probing storage...")
PAYLOAD_STATUS_PACKAGE_MD = N_("Downloading package metadata...")
PAYLOAD_STATUS_GROUP_MD = N_("Downloading group metadata...")
# Window title text
WINDOW_TITLE_TEXT = N_("Anaconda Installer")
|
gpl-2.0
| 920,207,107,352,325,500
| 35.068783
| 173
| 0.74945
| false
| 3.260163
| false
| false
| false
|
mwclient/mwclient
|
mwclient/page.py
|
1
|
20723
|
import six
from six import text_type
import time
from mwclient.util import parse_timestamp
import mwclient.listing
import mwclient.errors
class Page(object):
def __init__(self, site, name, info=None, extra_properties=None):
if type(name) is type(self):
self.__dict__.update(name.__dict__)
return
self.site = site
self.name = name
self._textcache = {}
if not info:
if extra_properties:
prop = 'info|' + '|'.join(six.iterkeys(extra_properties))
extra_props = []
for extra_prop in six.itervalues(extra_properties):
extra_props.extend(extra_prop)
else:
prop = 'info'
extra_props = ()
if type(name) is int:
info = self.site.get('query', prop=prop, pageids=name,
inprop='protection', *extra_props)
else:
info = self.site.get('query', prop=prop, titles=name,
inprop='protection', *extra_props)
info = six.next(six.itervalues(info['query']['pages']))
self._info = info
if 'invalid' in info:
raise mwclient.errors.InvalidPageTitle(info.get('invalidreason'))
self.namespace = info.get('ns', 0)
self.name = info.get('title', u'')
if self.namespace:
self.page_title = self.strip_namespace(self.name)
else:
self.page_title = self.name
self.base_title = self.page_title.split('/')[0]
self.base_name = self.name.split('/')[0]
self.touched = parse_timestamp(info.get('touched'))
self.revision = info.get('lastrevid', 0)
self.exists = 'missing' not in info
self.length = info.get('length')
self.protection = {
i['type']: (i['level'], i['expiry'])
for i in info.get('protection', ())
if i
}
self.redirect = 'redirect' in info
self.pageid = info.get('pageid', None)
self.contentmodel = info.get('contentmodel', None)
self.pagelanguage = info.get('pagelanguage', None)
self.restrictiontypes = info.get('restrictiontypes', None)
self.last_rev_time = None
self.edit_time = None
def redirects_to(self):
""" Get the redirect target page, or None if the page is not a redirect."""
info = self.site.get('query', prop='pageprops', titles=self.name, redirects='')
if 'redirects' in info['query']:
for page in info['query']['redirects']:
if page['from'] == self.name:
return Page(self.site, page['to'])
return None
else:
return None
def resolve_redirect(self):
""" Get the redirect target page, or the current page if its not a redirect."""
target_page = self.redirects_to()
if target_page is None:
return self
else:
return target_page
def __repr__(self):
return "<Page object '%s' for %s>" % (self.name.encode('utf-8'), self.site)
def __unicode__(self):
return self.name
@staticmethod
def strip_namespace(title):
if title[0] == ':':
title = title[1:]
return title[title.find(':') + 1:]
@staticmethod
def normalize_title(title):
# TODO: Make site dependent
title = title.strip()
if title[0] == ':':
title = title[1:]
title = title[0].upper() + title[1:]
title = title.replace(' ', '_')
return title
def can(self, action):
"""Check if the current user has the right to carry out some action
with the current page.
Example:
>>> page.can('edit')
True
"""
level = self.protection.get(action, (action,))[0]
if level == 'sysop':
level = 'editprotected'
return level in self.site.rights
def get_token(self, type, force=False):
return self.site.get_token(type, force, title=self.name)
def text(self, section=None, expandtemplates=False, cache=True, slot='main'):
"""Get the current wikitext of the page, or of a specific section.
If the page does not exist, an empty string is returned. By
default, results will be cached and if you call text() again
with the same section and expandtemplates the result will come
from the cache. The cache is stored on the instance, so it
lives as long as the instance does.
Args:
section (int): Section number, to only get text from a single section.
expandtemplates (bool): Expand templates (default: `False`)
cache (bool): Use in-memory caching (default: `True`)
"""
if not self.can('read'):
raise mwclient.errors.InsufficientPermission(self)
if not self.exists:
return u''
if section is not None:
section = text_type(section)
key = hash((section, expandtemplates))
if cache and key in self._textcache:
return self._textcache[key]
revs = self.revisions(prop='content|timestamp', limit=1, section=section,
slots=slot)
try:
rev = next(revs)
if 'slots' in rev:
text = rev['slots'][slot]['*']
else:
text = rev['*']
self.last_rev_time = rev['timestamp']
except StopIteration:
text = u''
self.last_rev_time = None
if not expandtemplates:
self.edit_time = time.gmtime()
else:
# The 'rvexpandtemplates' option was removed in MediaWiki 1.32, so we have to
# make an extra API call, see https://github.com/mwclient/mwclient/issues/214
text = self.site.expandtemplates(text)
if cache:
self._textcache[key] = text
return text
def save(self, *args, **kwargs):
"""Alias for edit, for maintaining backwards compatibility."""
return self.edit(*args, **kwargs)
def edit(self, text, summary=u'', minor=False, bot=True, section=None, **kwargs):
"""Update the text of a section or the whole page by performing an edit operation.
"""
return self._edit(summary, minor, bot, section, text=text, **kwargs)
def append(self, text, summary=u'', minor=False, bot=True, section=None,
**kwargs):
"""Append text to a section or the whole page by performing an edit operation.
"""
return self._edit(summary, minor, bot, section, appendtext=text, **kwargs)
def prepend(self, text, summary=u'', minor=False, bot=True, section=None,
**kwargs):
"""Prepend text to a section or the whole page by performing an edit operation.
"""
return self._edit(summary, minor, bot, section, prependtext=text, **kwargs)
def _edit(self, summary, minor, bot, section, **kwargs):
if not self.site.logged_in and self.site.force_login:
raise mwclient.errors.AssertUserFailedError()
if self.site.blocked:
raise mwclient.errors.UserBlocked(self.site.blocked)
if not self.can('edit'):
raise mwclient.errors.ProtectedPageError(self)
if not self.site.writeapi:
raise mwclient.errors.NoWriteApi(self)
data = {}
if minor:
data['minor'] = '1'
if not minor:
data['notminor'] = '1'
if self.last_rev_time:
data['basetimestamp'] = time.strftime('%Y%m%d%H%M%S', self.last_rev_time)
if self.edit_time:
data['starttimestamp'] = time.strftime('%Y%m%d%H%M%S', self.edit_time)
if bot:
data['bot'] = '1'
if section is not None:
data['section'] = section
data.update(kwargs)
if self.site.force_login:
data['assert'] = 'user'
def do_edit():
result = self.site.post('edit', title=self.name, summary=summary,
token=self.get_token('edit'),
**data)
if result['edit'].get('result').lower() == 'failure':
raise mwclient.errors.EditError(self, result['edit'])
return result
try:
result = do_edit()
except mwclient.errors.APIError as e:
if e.code == 'badtoken':
# Retry, but only once to avoid an infinite loop
self.get_token('edit', force=True)
try:
result = do_edit()
except mwclient.errors.APIError as e:
self.handle_edit_error(e, summary)
else:
self.handle_edit_error(e, summary)
# 'newtimestamp' is not included if no change was made
if 'newtimestamp' in result['edit'].keys():
self.last_rev_time = parse_timestamp(result['edit'].get('newtimestamp'))
# Workaround for https://phabricator.wikimedia.org/T211233
for cookie in self.site.connection.cookies:
if 'PostEditRevision' in cookie.name:
self.site.connection.cookies.clear(cookie.domain, cookie.path,
cookie.name)
# clear the page text cache
self._textcache = {}
return result['edit']
def handle_edit_error(self, e, summary):
if e.code == 'editconflict':
raise mwclient.errors.EditError(self, summary, e.info)
elif e.code in {'protectedtitle', 'cantcreate', 'cantcreate-anon',
'noimageredirect-anon', 'noimageredirect', 'noedit-anon',
'noedit', 'protectedpage', 'cascadeprotected',
'customcssjsprotected',
'protectednamespace-interface', 'protectednamespace'}:
raise mwclient.errors.ProtectedPageError(self, e.code, e.info)
elif e.code == 'assertuserfailed':
raise mwclient.errors.AssertUserFailedError()
else:
raise e
def touch(self):
"""Perform a "null edit" on the page to update the wiki's cached data of it.
This is useful in contrast to purge when needing to update stored data on a wiki,
for example Semantic MediaWiki properties or Cargo table values, since purge
only forces update of a page's displayed values and not its store.
"""
if not self.exists:
return
self.append('')
def move(self, new_title, reason='', move_talk=True, no_redirect=False):
"""Move (rename) page to new_title.
If user account is an administrator, specify no_redirect as True to not
leave a redirect.
If user does not have permission to move page, an InsufficientPermission
exception is raised.
"""
if not self.can('move'):
raise mwclient.errors.InsufficientPermission(self)
if not self.site.writeapi:
raise mwclient.errors.NoWriteApi(self)
data = {}
if move_talk:
data['movetalk'] = '1'
if no_redirect:
data['noredirect'] = '1'
result = self.site.post('move', ('from', self.name), to=new_title,
token=self.get_token('move'), reason=reason, **data)
return result['move']
def delete(self, reason='', watch=False, unwatch=False, oldimage=False):
"""Delete page.
If user does not have permission to delete page, an InsufficientPermission
exception is raised.
"""
if not self.can('delete'):
raise mwclient.errors.InsufficientPermission(self)
if not self.site.writeapi:
raise mwclient.errors.NoWriteApi(self)
data = {}
if watch:
data['watch'] = '1'
if unwatch:
data['unwatch'] = '1'
if oldimage:
data['oldimage'] = oldimage
result = self.site.post('delete', title=self.name,
token=self.get_token('delete'),
reason=reason, **data)
return result['delete']
def purge(self):
"""Purge server-side cache of page. This will re-render templates and other
dynamic content.
"""
self.site.post('purge', titles=self.name)
# def watch: requires 1.14
# Properties
def backlinks(self, namespace=None, filterredir='all', redirect=False,
limit=None, generator=True):
"""List pages that link to the current page, similar to Special:Whatlinkshere.
API doc: https://www.mediawiki.org/wiki/API:Backlinks
"""
prefix = mwclient.listing.List.get_prefix('bl', generator)
kwargs = dict(mwclient.listing.List.generate_kwargs(
prefix, namespace=namespace, filterredir=filterredir,
))
if redirect:
kwargs['%sredirect' % prefix] = '1'
kwargs[prefix + 'title'] = self.name
return mwclient.listing.List.get_list(generator)(
self.site, 'backlinks', 'bl', limit=limit, return_values='title',
**kwargs
)
def categories(self, generator=True, show=None):
"""List categories used on the current page.
API doc: https://www.mediawiki.org/wiki/API:Categories
Args:
generator (bool): Return generator (Default: True)
show (str): Set to 'hidden' to only return hidden categories
or '!hidden' to only return non-hidden ones.
Returns:
mwclient.listings.PagePropertyGenerator
"""
prefix = mwclient.listing.List.get_prefix('cl', generator)
kwargs = dict(mwclient.listing.List.generate_kwargs(
prefix, show=show
))
if generator:
return mwclient.listing.PagePropertyGenerator(
self, 'categories', 'cl', **kwargs
)
else:
# TODO: return sortkey if wanted
return mwclient.listing.PageProperty(
self, 'categories', 'cl', return_values='title', **kwargs
)
def embeddedin(self, namespace=None, filterredir='all', limit=None, generator=True):
"""List pages that transclude the current page.
API doc: https://www.mediawiki.org/wiki/API:Embeddedin
Args:
namespace (int): Restricts search to a given namespace (Default: None)
filterredir (str): How to filter redirects, either 'all' (default),
'redirects' or 'nonredirects'.
limit (int): Maximum amount of pages to return per request
generator (bool): Return generator (Default: True)
Returns:
mwclient.listings.List: Page iterator
"""
prefix = mwclient.listing.List.get_prefix('ei', generator)
kwargs = dict(mwclient.listing.List.generate_kwargs(prefix, namespace=namespace,
filterredir=filterredir))
kwargs[prefix + 'title'] = self.name
return mwclient.listing.List.get_list(generator)(
self.site, 'embeddedin', 'ei', limit=limit, return_values='title',
**kwargs
)
def extlinks(self):
"""List external links from the current page.
API doc: https://www.mediawiki.org/wiki/API:Extlinks
"""
return mwclient.listing.PageProperty(self, 'extlinks', 'el', return_values='*')
def images(self, generator=True):
"""List files/images embedded in the current page.
API doc: https://www.mediawiki.org/wiki/API:Images
"""
if generator:
return mwclient.listing.PagePropertyGenerator(self, 'images', '')
else:
return mwclient.listing.PageProperty(self, 'images', '',
return_values='title')
def iwlinks(self):
"""List interwiki links from the current page.
API doc: https://www.mediawiki.org/wiki/API:Iwlinks
"""
return mwclient.listing.PageProperty(self, 'iwlinks', 'iw',
return_values=('prefix', '*'))
def langlinks(self, **kwargs):
"""List interlanguage links from the current page.
API doc: https://www.mediawiki.org/wiki/API:Langlinks
"""
return mwclient.listing.PageProperty(self, 'langlinks', 'll',
return_values=('lang', '*'),
**kwargs)
def links(self, namespace=None, generator=True, redirects=False):
"""List links to other pages from the current page.
API doc: https://www.mediawiki.org/wiki/API:Links
"""
prefix = mwclient.listing.List.get_prefix('pl', generator)
kwargs = dict(mwclient.listing.List.generate_kwargs(prefix, namespace=namespace))
if redirects:
kwargs['redirects'] = '1'
if generator:
return mwclient.listing.PagePropertyGenerator(self, 'links', 'pl', **kwargs)
else:
return mwclient.listing.PageProperty(self, 'links', 'pl',
return_values='title', **kwargs)
def revisions(self, startid=None, endid=None, start=None, end=None,
dir='older', user=None, excludeuser=None, limit=50,
prop='ids|timestamp|flags|comment|user',
expandtemplates=False, section=None,
diffto=None, slots=None, uselang=None):
"""List revisions of the current page.
API doc: https://www.mediawiki.org/wiki/API:Revisions
Args:
startid (int): Revision ID to start listing from.
endid (int): Revision ID to stop listing at.
start (str): Timestamp to start listing from.
end (str): Timestamp to end listing at.
dir (str): Direction to list in: 'older' (default) or 'newer'.
user (str): Only list revisions made by this user.
excludeuser (str): Exclude revisions made by this user.
limit (int): The maximum number of revisions to return per request.
prop (str): Which properties to get for each revision,
default: 'ids|timestamp|flags|comment|user'
expandtemplates (bool): Expand templates in rvprop=content output
section (int): Section number. If rvprop=content is set, only the contents
of this section will be retrieved.
diffto (str): Revision ID to diff each revision to. Use "prev", "next" and
"cur" for the previous, next and current revision respectively.
slots (str): The content slot (Mediawiki >= 1.32) to retrieve content from.
uselang (str): Language to use for parsed edit comments and other localized
messages.
Returns:
mwclient.listings.List: Revision iterator
"""
kwargs = dict(mwclient.listing.List.generate_kwargs(
'rv', startid=startid, endid=endid, start=start, end=end, user=user,
excludeuser=excludeuser, diffto=diffto, slots=slots
))
if self.site.version[:2] < (1, 32) and 'rvslots' in kwargs:
# https://github.com/mwclient/mwclient/issues/199
del kwargs['rvslots']
kwargs['rvdir'] = dir
kwargs['rvprop'] = prop
kwargs['uselang'] = uselang
if expandtemplates:
kwargs['rvexpandtemplates'] = '1'
if section is not None:
kwargs['rvsection'] = section
return mwclient.listing.RevisionsIterator(self, 'revisions', 'rv', limit=limit,
**kwargs)
def templates(self, namespace=None, generator=True):
"""List templates used on the current page.
API doc: https://www.mediawiki.org/wiki/API:Templates
"""
prefix = mwclient.listing.List.get_prefix('tl', generator)
kwargs = dict(mwclient.listing.List.generate_kwargs(prefix, namespace=namespace))
if generator:
return mwclient.listing.PagePropertyGenerator(self, 'templates', prefix,
**kwargs)
else:
return mwclient.listing.PageProperty(self, 'templates', prefix,
return_values='title', **kwargs)
|
mit
| 138,916,023,745,932,510
| 37.304991
| 90
| 0.564059
| false
| 4.30742
| false
| false
| false
|
dmittov/AlcoBot
|
bot.py
|
1
|
2312
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see [http://www.gnu.org/licenses/].
import logging
import telegram
import cocktail
from time import sleep
from urllib2 import URLError
def main():
logging.basicConfig(
level=logging.DEBUG,
filename='debug.log',
format='%(asctime)s - %(name)s - %(levelname)s - %(message)s')
# Telegram Bot Authorization Token
TOKEN = None
with open('prod.token') as fh:
TOKEN = fh.readline()
logging.info(TOKEN)
bot = telegram.Bot(TOKEN)
try:
update_id = bot.getUpdates()[0].update_id
except IndexError:
update_id = None
while True:
try:
update_id = response(bot, update_id)
except telegram.TelegramError as e:
# These are network problems with Telegram.
if e.message in ("Bad Gateway", "Timed out"):
sleep(1)
elif e.message == "Unauthorized":
# The user has removed or blocked the bot.
update_id += 1
else:
raise e
except URLError as e:
sleep(1)
def response(bot, update_id):
# Request updates after the last update_id
for update in bot.getUpdates(offset=update_id, timeout=10):
# chat_id is required to reply to any message
chat_id = update.message.chat_id
update_id = update.update_id + 1
try:
message = cocktail.coctail_msg(update.message.text)
except Exception as e:
message = e.message
if message:
bot.sendMessage(chat_id=chat_id,
text=message)
return update_id
if __name__ == '__main__':
main()
|
gpl-3.0
| 4,792,354,141,523,781,000
| 30.243243
| 71
| 0.62154
| false
| 4.077601
| false
| false
| false
|
nullzero/wprobot
|
wp/ltime.py
|
1
|
1990
|
# -*- coding: utf-8 -*-
"""
Library to manage everything about date and time.
"""
__version__ = "1.0.2"
__author__ = "Sorawee Porncharoenwase"
import datetime
import time
def wrapMonth(m):
"""Convert zero-based month number to zero-based month number."""
m -= 1
if m < 0:
m += 12
if m >= 12:
m -= 12
return m
def weekdayThai(d):
"""Return Thai name of days of the week."""
return map(lambda x: u"วัน" + x,
[u"จันทร์", u"อังคาร", u"พุธ", u"พฤหัสบดี", u"ศุกร์",
u"เสาร์", u"อาทิตย์"])[d]
def monthEng(m):
"""Return English name of month."""
return [u"January", u"February", u"March", u"April", u"May", u"June",
u"July", u"August", u"September", u"October", u"November",
u"December"][wrapMonth(m)]
def monthThai(m):
"""Return Thai name of month."""
return [u"มกราคม", u"กุมภาพันธ์", u"มีนาคม", u"เมษายน", u"พฤษภาคม",
u"มิถุนายน", u"กรกฎาคม", u"สิงหาคม", u"กันยายน", u"ตุลาคม",
u"พฤศจิกายน", u"ธันวาคม"][wrapMonth(m)]
def monthThaiAbbr(m):
"""Return Thai abbreviated name of month."""
return [u"ม.ค.", u"ก.พ.", u"มี.ค.", u"เม.ย.", u"พ.ค.", u"มิ.ย.",
u"ก.ค.", u"ส.ค.", u"ก.ย.", u"ต.ค.", u"พ.ย.", u"ธ.ค."][wrapMonth(m)]
def getNumDay(year, month):
"""Return length of day in given month"""
if month == 2:
if year % 400 == 0:
return 29
elif year % 100 == 0:
return 28
elif year % 4 == 0:
return 29
else:
return 28
return [0, 31, 0, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31][month]
td = datetime.timedelta
sleep = time.sleep
|
mit
| 5,983,318,943,164,025,000
| 27.440678
| 79
| 0.51907
| false
| 1.707019
| false
| false
| false
|
Gr8z/Legend-Cogs
|
profanity/profanity.py
|
1
|
2085
|
import discord
from discord.ext import commands
from .utils.dataIO import dataIO, fileIO
import os
import asyncio
BOTCOMMANDER_ROLES = ["Family Representative", "Clan Manager", "Clan Deputy", "Co-Leader", "Hub Officer", "admin"]
class profanity:
"""profanity!"""
def __init__(self, bot):
self.bot = bot
self.bannedwords = dataIO.load_json('data/Profanity/banned_words.json')
async def banned_words(self, message):
word_set = set(self.bannedwords)
phrase_set = set(message.content.replace("*", "").replace("_", "").replace("#", "").split())
if word_set.intersection(phrase_set):
await self.bot.delete_message(message)
msg = await self.bot.send_message(
message.channel,
"{}, **We do not allow Hateful, obscene, offensive, racist, sexual, or violent words in any public channels.**".format(
message.author.mention
)
)
await asyncio.sleep(6)
await self.bot.delete_message(msg)
return
async def on_message_edit(self, before, after):
await self.banned_words(after)
async def on_message(self, message):
server = message.server
author = message.author
if message.author.id == self.bot.user.id:
return
botcommander_roles = [discord.utils.get(server.roles, name=r) for r in BOTCOMMANDER_ROLES]
botcommander_roles = set(botcommander_roles)
author_roles = set(author.roles)
if len(author_roles.intersection(botcommander_roles)):
return
await self.banned_words(message)
def check_folders():
if not os.path.exists("data/Profanity"):
print("Creating data/Profanity folder...")
os.makedirs("data/Profanity")
def check_files():
f = "data/Profanity/banned_words.json"
if not fileIO(f, "check"):
print("Creating empty banned_words.json...")
fileIO(f, "save", [])
def setup(bot):
check_folders()
check_files()
bot.add_cog(profanity(bot))
|
mit
| 7,216,413,436,609,016,000
| 31.092308
| 135
| 0.61295
| false
| 3.619792
| false
| false
| false
|
samuelfekete/Pythonometer
|
tests/test_questions.py
|
1
|
1786
|
"""Test all questions."""
import os
import sys
import unittest
sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), '..')))
from pythonometer.quiz import all_questions
from pythonometer.questions.base import WrongAnswer
class TestQuestions(unittest.TestCase):
"""Test the questions.
All question tests are the same, so they are loaded dynamically.
"""
pass
# Add a test for every question.
for question in all_questions():
def question_test(self, question=question):
current_question = question()
# Assert that a question string is supplied.
question_string = current_question.get_question_text()
self.assertIsInstance(question_string, basestring)
# Assert that at least one correct answer is given.
self.assert_(current_question.get_correct_answers())
# Assert that checking with the correct answers returns True.
for correct_answer in current_question.get_correct_answers():
self.assert_(current_question.check_answer(correct_answer))
# Assert that checking with the wrong answers raises WrongAnswer.
for wrong_answer in current_question.get_wrong_answers():
with self.assertRaises(WrongAnswer):
current_question.check_answer(wrong_answer)
# Assert that checking a wrong answer raises WrongAnswer.
with self.assertRaises(WrongAnswer):
current_question.check_answer('')
# Assert that checking the answer with bad code raises WrongAnswer.
with self.assertRaises(WrongAnswer):
current_question.check_answer('raise Exception')
setattr(TestQuestions, 'test_{}'.format(question.__name__), question_test)
if __name__ == '__main__':
unittest.main()
|
mit
| 8,631,838,962,788,521,000
| 32.074074
| 82
| 0.68645
| false
| 4.409877
| true
| false
| false
|
iocast/poiservice
|
lib/FilterEncodingWizard.py
|
1
|
2742
|
'''
Created on May 16, 2011
@author: michel
'''
import json
class FilterEncodingWizard(object):
comparision = [{
'value' : 'PropertyIsEqualTo',
'display' : '=',
'xml' : '<PropertyIsEqualTo><PropertyName>${value}</PropertyName><Literal>${literal}</Literal></PropertyIsEqualTo>'},
{'value' : 'PropertyIsNotEqualTo',
'display' : '!=',
'xml' : '<PropertyIsNotEqualTo><PropertyName>${value}</PropertyName><Literal>${literal}</Literal></PropertyIsNotEqualTo>'},
{'value' : 'PropertyIsLessThan',
'display' : '<',
'xml' : '<PropertyIsLessThan><PropertyName>${value}</PropertyName><Literal>${literal}</Literal></PropertyIsLessThan>'},
{'value' : 'PropertyIsGreaterThan',
'display' : '>',
'xml' : '<PropertyIsGreaterThan><PropertyName>${value}</PropertyName><Literal>${literal}</Literal></PropertyIsGreaterThan>'},
{'value' : 'PropertyIsLessThanOrEqualTo',
'display' : '<=',
'xml' : '<PropertyIsLessThanOrEqualTo><PropertyName>${value}</PropertyName><Literal>${literal}</Literal></PropertyIsLessThanOrEqualTo>'},
{'value' : 'PropertyIsGreaterThanOrEqualTo',
'display' : '>=',
'xml' : '<PropertyIsGreaterThanOrEqualTo><PropertyName>${value}</PropertyName><Literal>${literal}</Literal></PropertyIsGreaterThanOrEqualTo>'}
#{'value' : 'PropertyIsLike',
# 'display' : 'Like',
# 'xml' : ''},
#{'value' : 'PropertyIsBetween',
# 'display' : 'Between',
# 'xml' : ''},
#{'value' : 'PropertyIsNull',
# 'display' : 'Nul',
# 'xml' : ''}
]
logical = [
{'value' : 'Or',
'display' : 'or',
'xml' : '<Or>${statement}</Or>'},
{
'value' : 'And',
'display' : 'and',
'xml' : '<And>${statement}</And>'}
]
def comparisonToJson(self):
return json.dumps(self.comparision)
def comparisonToHTML(self):
html = '<select onChange="javascript:queryBuilder.operatorChanged(this);">'
for value in self.comparision:
html += '<option value="' + value['value'] + '">' + value['display'] + '</option>'
html += '</select>'
return html
def logicalToJson(self):
return json.dumps(self.logical)
def logicalToHTML(self):
html = '<select>'
for value in self.logical:
html += '<option value="' + value['value'] + '">' + value['display'] + '</option>'
html += '</select>'
return html;
|
mit
| -6,222,942,528,908,910,000
| 39.338235
| 155
| 0.522611
| false
| 4.098655
| false
| false
| false
|
cardmaster/makeclub
|
controlers/activity.py
|
1
|
10024
|
'''Copyright(C): Leaf Johnson 2011
This file is part of makeclub.
makeclub is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
makeclub is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU Affero General Public License
along with makeclub. If not, see <http://www.gnu.org/licenses/>.
'''
from google.appengine.api.users import get_current_user, create_login_url, User
from google.appengine.ext import webapp
from google.appengine.ext import db
from errors import errorPage
from infopage import infoPage
from access import hasActPrivilige, hasClubPrivilige
from models import Activity, Membership, Club, ActivityParticipator, ActivityBill
from url import urldict
from template import render
class ActivityBase(webapp.RequestHandler):
def __init__(self, *args, **kw):
super(ActivityBase, self).__init__(*args, **kw)
self.actobj = None
def getActModel(self):
aid, = self.urlcfg.analyze(self.request.path)
if (aid):
id = int(aid)
return Activity.get_by_id(id)
else:
return None
def actionPath(self):
return self.request.path
def templateParams(self):
act = self.actobj
club = act.club
cluburl = urldict['ClubView'].path(club.slug)
templateVars = dict(club = club, cluburl = cluburl, act = act, action = self.actionPath() )
return templateVars
def makeResponseText(self, act):
templateVars = self.templateParams()
return render(self.template, templateVars, self.request.url)
def checkPrivilige(self):
user = get_current_user()
if (not user):
errorPage ( self.response, "Not login", create_login_url(self.request.url), 403)
return False
if (not hasActPrivilige(user, self.actobj, self.actOperation)):
errorPage ( self.response, "Not authorrized", urldict['ClubView'].path(self.actobj.club.slug), 403)
return False
return True
def dbg(self, *args):
return #Clean up debug code
self.response.out.write (" ".join([str(arg) for arg in args]))
self.response.out.write ("<br />\n")
def get(self, *args):
actobj = self.getActModel()
if (actobj):
self.actobj = actobj
if (self.checkPrivilige()):
self.response.out.write (self.makeResponseText(actobj))
else:
return
else:
return errorPage( self.response, "No such Activity", urldict['ClubList'].path(), 404)
class SpecialOp:
def __init__(self, oper = '', url = '', needPost = False, data = [], display = ''):
self.oper = oper
if (not display):
display = oper
self.display = display
self.url = url
self.needPost = needPost
self.data = data
class ActivityView(ActivityBase):
def __init__(self, *args, **kw):
super (ActivityView, self).__init__(*args, **kw)
self.template = 'activity_view.html'
self.urlcfg = urldict['ActivityView']
self.actOperation = "view"
def templateParams(self):
defaults = super (ActivityView, self).templateParams()
user = get_current_user();
aid = self.actobj.key().id()
specialOps = []
if (hasActPrivilige(user, self.actobj, "edit" )):
sop = SpecialOp('edit', urldict['ActivityEdit'].path(aid), False)
specialOps.append(sop)
urlcfg = urldict['ActivityParticipate']
soplist = ['join', 'quit', 'confirm']
if (self.actobj.isBilled):
soplist.append("rebill")
else:
soplist.append("bill")
for oper in soplist:
if (hasActPrivilige(user, self.actobj, oper) ):
data = [('target', user.email()), ]
sop = SpecialOp(oper, urlcfg.path(aid, oper), True, data)
specialOps.append(sop)
defaults['specialOps'] = specialOps
participatorOps = []
for oper in ('confirm', ):
if (hasActPrivilige(user, self.actobj, oper) ):
sop = SpecialOp(oper, urlcfg.path(aid, oper), True, [])
participatorOps.append(sop)
defaults['participatorOps'] = participatorOps
apq = ActivityParticipator.all()
apq.filter ('activity = ', self.actobj)
defaults['participators'] = apq
return defaults
class ActivityParticipate(webapp.RequestHandler):
def getActModel(self, id):
try:
iid = int(id)
except:
return None
actobj = Activity.get_by_id(iid)
return actobj
def get(self, *args):
urlcfg = urldict['ActivityParticipate']
id, oper = urlcfg.analyze(self.request.path)
self.response.out.write (
'on id %s, operation %s' % (id, oper)
)
def post(self, *args):
urlcfg = urldict['ActivityParticipate']
id, oper = urlcfg.analyze(self.request.path)
id = int(id)
actobj = self.getActModel(id)
if (not actobj):
return errorPage (self.response, urldict['ClubList'].path(), "No such activity", 404 )
user = get_current_user();
if (not user):
return errorPage ( self.response, "Not login", create_login_url(self.request.url), 403)
target = self.request.get ('target')
cluburl = urldict['ClubView'].path(actobj.club.slug)
if (not hasActPrivilige(user, actobj, oper,target) ):
return errorPage ( self.response, "Can not access", cluburl, 403)
if (target):
targetUser = User(target)
if(not targetUser):
return errorPage ( self.response, "Illegal access", cluburl, 403)
else: #if target omitted, use current user as target
targetUser = user
mem = Membership.between (targetUser, actobj.club)
if (not mem):
return errorPage ( self.response, "Not a member", cluburl, 403)
acturl = urldict['ActivityView'].path(id)
if (oper == 'join'):
actp = ActivityParticipator.between (mem, actobj)
if (not actp):
actp = ActivityParticipator(member = mem, activity = actobj)
actp.put()
return infoPage (self.response, "Successfully Joined", "%s has join activity %s" % (mem.name, actobj.name), acturl)
elif (oper == 'quit'):
actp = ActivityParticipator.between(mem, actobj)
if (actp):
if (actp.confirmed):
return errorPage ( self.response, "Cannot delete confirmed participator", acturl, 403)
else:
actp.delete()
return infoPage (self.response, "Successfully Quited", "%s success quit activity %s" % (mem.name, actobj.name), acturl)
elif (oper == 'confirm'):
actp = ActivityParticipator.between(mem, actobj)
if (actp):
actp.confirmed = not actp.confirmed
actp.put()
return infoPage (self.response, "Successfully Confirmed", "success confirmed %s join activity %s" % (mem.name, actobj.name), acturl)
else:
return errorPage ( self.response, "No Such a Member", acturl, 404)
elif (oper == 'bill' or oper == "rebill"):
billobj = ActivityBill.generateBill(actobj, oper == "rebill")#If in rebill operation, we could enable rebill
if (billobj):
billobj.put()
billDict = dict(billobj = billobj)
return infoPage (self.response, "Successfully Billded", str(billobj.memberBill), acturl)
else:
return errorPage (self.response, "Error Will Generate Bill", acturl, 501)
def extractRequestData(request, interested, dbg=None):
retval = dict()
for (key, valid) in interested.iteritems() :
val = valid (request.get(key))
if (dbg):
dbg ( "Extract:", key, "=", val)
if (val):
retval [key] = val
return retval
import re
def parseDuration(times):
#support only h
tstr = times[:-1]
print "Times String: ", tstr
return float(tstr)
def parseBill (billstr, dbg = None):
entries = billstr.split (',')
ary = []
if (dbg):
dbg ("Bill String:", billstr)
dbg ("Splitted:", entries)
i = 1
for ent in entries:
ent = ent.strip()
if (i == 2):
val = ent
ary.append ( (key, val) )
i = 0
else :
key = ent
i += 1
return ary
class ActivityEdit(ActivityBase):
def __init__(self, *args, **kw):
super (ActivityEdit, self).__init__(*args, **kw)
self.template = 'activity_edit.html'
self.urlcfg = urldict['ActivityEdit']
self.actobj = None
self.actOperation = "edit"
def parseBillDbg(self, billstr):
return parseBill(billstr, self.dbg)
def updateObject(self, actobj):
interested = dict (name = str, intro = str, duration = parseDuration, bill = self.parseBillDbg)
reqs = extractRequestData (self.request, interested, self.dbg)
for (key, val) in reqs.iteritems():
self.dbg (key, "=", val)
setattr (actobj, key, val)
#Will read data from postdata, and update the pass-in actobj.
pass
def post(self, *args):
actobj = self.getActModel()
if (actobj):
self.actobj = actobj
if (self.checkPrivilige()):
if (self.request.get ('delete', False)):
actobj.delete()
return infoPage (self.response, "Successful deleted", "Deleted Activity %s" % actobj.name, "/")
self.updateObject(actobj)
key = actobj.put()
if (key):
return errorPage( self.response, "Successfully storing this Activity", urldict['ActivityView'].path(key.id()), 200)
else:
return errorPage( self.response, "Error while storing this Activity", urldict['ActivityEdit'].path(actobj.key().id()), 501)
else:
return errorPage( self.response, "No such Activity", urldict['ClubList'].path(), 404)
class ActivityNew(ActivityEdit):
def getActModel(self):
urlcfg = urldict['ActivityNew']
slug, = urlcfg.analyze(self.request.path)
user = get_current_user()
club = Club.getClubBySlug(slug)
if (user and club):
newact = Activity.createDefault(user, club)
if (newact): newact.bill = [('Filed Expense', 80), ('Balls Expense', 30)]
return newact
else:
return None
def checkPrivilige(self):
user = get_current_user()
if (not user):
errorPage ( self.response, "Not login", create_login_url(self.request.url), 403)
return False
if (not hasClubPrivilige(user, self.actobj.club, "newact")):
errorPage ( self.response, "Not Authorized to edit", urldict['ClubView'].path(self.actobj.club.slug), 403)
return False
return True
|
agpl-3.0
| -6,981,099,863,504,542,000
| 34.048951
| 142
| 0.680467
| false
| 3.03023
| false
| false
| false
|
pedrogazquez/appBares
|
rango/forms.py
|
1
|
3240
|
from django import forms
from django.contrib.auth.models import User
from rango.models import Tapa, Bar, UserProfile
class BarForm(forms.ModelForm):
name = forms.CharField(max_length=128, help_text="Por favor introduzca el nombre del bar")
views = forms.IntegerField(widget=forms.HiddenInput(), initial=0)
likes = forms.IntegerField(widget=forms.HiddenInput(), initial=0)
# An inline class to provide additional information on the form.
class Meta:
# Provide an association between the ModelForm and a model
model = Bar
# class TapaForm(forms.ModelForm):
# nombre = forms.CharField(max_length=128, help_text="Por favor introduzca el nombre de la tapa")
# url = forms.URLField(max_length=200, help_text="Por favor introduzca la direccion de la imagen de la tapa")
# views = forms.IntegerField(widget=forms.HiddenInput(), initial=0)
# def clean(self):
# cleaned_data = self.cleaned_data
# url = cleaned_data.get('url')
# # If url is not empty and doesn't start with 'http://' add 'http://' to the beginning
# if url and not url.startswith('http://'):
# url = 'http://' + url
# cleaned_data['url'] = url
# return cleaned_data
# class Meta:
# # Provide an association between the ModelForm and a model
# model = Tapa
# # What fields do we want to include in our form?
# # This way we don't need every field in the model present.
# # Some fields may allow NULL values, so we may not want to include them...
# # Here, we are hiding the foreign keys
# fields = ('nombre', 'url','views')
class TapaForm(forms.ModelForm):
nombre = forms.CharField(max_length=128, help_text="Por favor introduzca el nombre de la tapa")
url = forms.URLField(max_length=200, help_text="Por favor introduzca la direccion de la imagen de la tapa")
views = forms.IntegerField(widget=forms.HiddenInput(), initial=0)
class Meta:
# Provide an association between the ModelForm and a model
model = Tapa
# What fields do we want to include in our form?
# This way we don't need every field in the model present.
# Some fields may allow NULL values, so we may not want to include them...
# Here, we are hiding the foreign key.
# we can either exclude the category field from the form,
exclude = ('bar',)
#or specify the fields to include (i.e. not include the category field)
fields = ('nombre', 'url','views')
class UserForm(forms.ModelForm):
username = forms.CharField(help_text="Please enter a username.")
email = forms.CharField(help_text="Please enter your email.")
password = forms.CharField(widget=forms.PasswordInput(), help_text="Please enter a password.")
class Meta:
model = User
fields = ('username', 'email', 'password')
class UserProfileForm(forms.ModelForm):
website = forms.URLField(help_text="Please enter your website.", required=False)
picture = forms.ImageField(help_text="Select a profile image to upload.", required=False)
class Meta:
model = UserProfile
fields = ('website', 'picture')
|
gpl-3.0
| 233,954,924,696,194,240
| 41.810811
| 113
| 0.659877
| false
| 3.941606
| false
| false
| false
|
google-research/disentanglement_lib
|
disentanglement_lib/data/ground_truth/cars3d.py
|
1
|
4067
|
# coding=utf-8
# Copyright 2018 The DisentanglementLib Authors. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Cars3D data set."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
from disentanglement_lib.data.ground_truth import ground_truth_data
from disentanglement_lib.data.ground_truth import util
import numpy as np
import PIL
import scipy.io as sio
from six.moves import range
from sklearn.utils import extmath
from tensorflow.compat.v1 import gfile
CARS3D_PATH = os.path.join(
os.environ.get("DISENTANGLEMENT_LIB_DATA", "."), "cars")
class Cars3D(ground_truth_data.GroundTruthData):
"""Cars3D data set.
The data set was first used in the paper "Deep Visual Analogy-Making"
(https://papers.nips.cc/paper/5845-deep-visual-analogy-making) and can be
downloaded from http://www.scottreed.info/. The images are rescaled to 64x64.
The ground-truth factors of variation are:
0 - elevation (4 different values)
1 - azimuth (24 different values)
2 - object type (183 different values)
"""
def __init__(self):
self.factor_sizes = [4, 24, 183]
features = extmath.cartesian(
[np.array(list(range(i))) for i in self.factor_sizes])
self.latent_factor_indices = [0, 1, 2]
self.num_total_factors = features.shape[1]
self.index = util.StateSpaceAtomIndex(self.factor_sizes, features)
self.state_space = util.SplitDiscreteStateSpace(self.factor_sizes,
self.latent_factor_indices)
self.data_shape = [64, 64, 3]
self.images = self._load_data()
@property
def num_factors(self):
return self.state_space.num_latent_factors
@property
def factors_num_values(self):
return self.factor_sizes
@property
def observation_shape(self):
return self.data_shape
def sample_factors(self, num, random_state):
"""Sample a batch of factors Y."""
return self.state_space.sample_latent_factors(num, random_state)
def sample_observations_from_factors(self, factors, random_state):
"""Sample a batch of observations X given a batch of factors Y."""
all_factors = self.state_space.sample_all_factors(factors, random_state)
indices = self.index.features_to_index(all_factors)
return self.images[indices].astype(np.float32)
def _load_data(self):
dataset = np.zeros((24 * 4 * 183, 64, 64, 3))
all_files = [x for x in gfile.ListDirectory(CARS3D_PATH) if ".mat" in x]
for i, filename in enumerate(all_files):
data_mesh = _load_mesh(filename)
factor1 = np.array(list(range(4)))
factor2 = np.array(list(range(24)))
all_factors = np.transpose([
np.tile(factor1, len(factor2)),
np.repeat(factor2, len(factor1)),
np.tile(i,
len(factor1) * len(factor2))
])
indexes = self.index.features_to_index(all_factors)
dataset[indexes] = data_mesh
return dataset
def _load_mesh(filename):
"""Parses a single source file and rescales contained images."""
with gfile.Open(os.path.join(CARS3D_PATH, filename), "rb") as f:
mesh = np.einsum("abcde->deabc", sio.loadmat(f)["im"])
flattened_mesh = mesh.reshape((-1,) + mesh.shape[2:])
rescaled_mesh = np.zeros((flattened_mesh.shape[0], 64, 64, 3))
for i in range(flattened_mesh.shape[0]):
pic = PIL.Image.fromarray(flattened_mesh[i, :, :, :])
pic.thumbnail((64, 64, 3), PIL.Image.ANTIALIAS)
rescaled_mesh[i, :, :, :] = np.array(pic)
return rescaled_mesh * 1. / 255
|
apache-2.0
| 5,423,659,911,183,028,000
| 35.3125
| 79
| 0.687239
| false
| 3.380715
| false
| false
| false
|
JudoWill/ResearchNotebooks
|
GA-PhredProcessing.py
|
1
|
1153
|
# -*- coding: utf-8 -*-
# <nbformat>3.0</nbformat>
# <codecell>
import os, os.path
import shutil
import glob
import sys
from subprocess import check_call, check_output
os.chdir('/home/will/Dropbox/PhredDirectory/')
staden_path = '/home/will/staden-2.0.0b9.x86_64/bin/'
sys.path.append('/home/will/PySeqUtils/')
# <codecell>
from GeneralSeqTools import call_muscle, fasta_reader, fasta_writer
# <codecell>
#from Bio import SeqIO
from Bio.SeqIO.AbiIO import AbiIterator
files = glob.glob('../Wigdahl Trace files/2:11:11/*.ab1')
seqs = []
for f in files:
rec = AbiIterator(open(f, mode = 'rb'), trim = True).next()
seqs.append( (rec.id, rec.seq.tostring()) )
# <codecell>
!/home/will/staden-2.0.0b9.x86_64/bin/convert_trace --help
# <codecell>
res = call_muscle(seqs)
with open('align_data.fasta', 'w') as handle:
fasta_writer(handle, res)
# <codecell>
from HIVTransTool import process_seqs
results = list(process_seqs(seqs[:50], extract_regions = True, known_names = 50))
# <codecell>
for row in results:
if row['RegionName'] == 'LTR5':
print row['Name'], row['QueryNuc']
# <codecell>
results[:5]
# <codecell>
|
mit
| 8,730,449,133,413,499,000
| 18.87931
| 81
| 0.679965
| false
| 2.656682
| false
| false
| false
|
intel-hpdd/intel-manager-for-lustre
|
chroma_core/services/job_scheduler/job_scheduler_client.py
|
1
|
10557
|
# Copyright (c) 2020 DDN. All rights reserved.
# Use of this source code is governed by a MIT-style
# license that can be found in the LICENSE file.
"""
The service `job_scheduler` handles both RPCs (JobSchedulerRpc) and a queue (NotificationQueue).
The RPCs are used for explicit requests to modify the system or run a particular task, while the queue
is used for updates received from agent reports. Access to both of these, along with some additional
non-remote functionality is wrapped in JobSchedulerClient.
"""
from django import db
from chroma_core.services import log_register
from chroma_core.services.rpc import ServiceRpcInterface
from chroma_core.models import ManagedHost, Command
log = log_register(__name__)
class JobSchedulerRpc(ServiceRpcInterface):
methods = [
"set_state",
"run_jobs",
"cancel_job",
"create_host_ssh",
"test_host_contact",
"create_filesystem",
"create_ostpool",
"create_task",
"remove_task",
"update_ostpool",
"delete_ostpool",
"create_client_mount",
"create_copytool",
"register_copytool",
"unregister_copytool",
"update_nids",
"trigger_plugin_update",
"update_lnet_configuration",
"create_host",
"create_targets",
"available_transitions",
"available_jobs",
"get_locks",
"update_corosync_configuration",
"get_transition_consequences",
"configure_stratagem",
"update_stratagem",
"run_stratagem",
]
class JobSchedulerClient(object):
"""Because there are some tasks which are the domain of the job scheduler but do not need to
be run in the context of the service, the RPCs and queue operations are accompanied in this
class by some operations that run locally. The local operations are
read-only operations such as querying what operations are possible for a particular object.
"""
@classmethod
def command_run_jobs(cls, job_dicts, message):
"""Create and run some Jobs, within a single Command.
:param job_dicts: List of 1 or more dicts like {'class_name': 'MyJobClass', 'args': {<dict of arguments to Job constructor>}}
:param message: User-visible string describing the operation, e.g. "Detecting filesystems"
:return: The ID of a new Command
"""
return JobSchedulerRpc().run_jobs(job_dicts, message)
@classmethod
def command_set_state(cls, object_ids, message, run=True):
"""Modify the system in whatever way is necessary to reach the state
specified in `object_ids`. Creates Jobs under a single Command. May create
no Jobs if the system is already in the state, or already scheduled to be
in that state. If the system is already scheduled to be in that state, then
the returned Command will be connected to the existing Jobs which take the system to
the desired state.
:param cls:
:param object_ids: List of three-tuples (natural_key, object_id, new_state)
:param message: User-visible string describing the operation, e.g. "Starting filesystem X"
:param run: Test only. Schedule jobs without starting them.
:return: The ID of a new Command
"""
return JobSchedulerRpc().set_state(object_ids, message, run)
@classmethod
def available_transitions(cls, object_list):
"""Return the transitions available for each object in list
See the Job Scheduler method of the same name for details.
"""
return JobSchedulerRpc().available_transitions(object_list)
@classmethod
def available_jobs(cls, object_list):
"""Query which jobs (other than changes to state) can be run on this object.
See the Job Scheduler method of the same name for details.
"""
return JobSchedulerRpc().available_jobs(object_list)
@classmethod
def get_transition_consequences(cls, stateful_object, new_state):
"""Query what the side effects of a state transition are. Effectively does
a dry run of scheduling jobs for the transition.
The return format is like this:
::
{
'transition_job': <job dict>,
'dependency_jobs': [<list of job dicts>]
}
# where each job dict is like
{
'class': '<job class name>',
'requires_confirmation': <boolean, whether to prompt for confirmation>,
'confirmation_prompt': <string, confirmation prompt>,
'description': <string, description of the job>,
'stateful_object_id': <ID of the object modified by this job>,
'stateful_object_content_type_id': <Content type ID of the object modified by this job>
}
:param stateful_object: A StatefulObject instance
:param new_state: Hypothetical new value of the 'state' attribute
"""
return JobSchedulerRpc().get_transition_consequences(
stateful_object.__class__.__name__, stateful_object.id, new_state
)
@classmethod
def cancel_job(cls, job_id):
"""Attempt to cancel a job which is already scheduled (and possibly running)
:param job_id: ID of a Job object
"""
JobSchedulerRpc().cancel_job(job_id)
@classmethod
def create_host_ssh(cls, address, server_profile, root_pw, pkey, pkey_pw):
"""
Create a host which will be set up using SSH
:param address: SSH address
:return: (<ManagedHost instance>, <Command instance>)
"""
host_id, command_id = JobSchedulerRpc().create_host_ssh(address, server_profile, root_pw, pkey, pkey_pw)
return (ManagedHost.objects.get(pk=host_id), Command.objects.get(pk=command_id))
@classmethod
def test_host_contact(cls, address, root_pw=None, pkey=None, pkey_pw=None):
command_id = JobSchedulerRpc().test_host_contact(address, root_pw, pkey, pkey_pw)
return Command.objects.get(pk=command_id)
@classmethod
def update_corosync_configuration(cls, corosync_configuration_id, mcast_port, network_interface_ids):
command_id = JobSchedulerRpc().update_corosync_configuration(
corosync_configuration_id, mcast_port, network_interface_ids
)
return Command.objects.get(pk=command_id)
@classmethod
def create_filesystem(cls, fs_data):
return JobSchedulerRpc().create_filesystem(fs_data)
@classmethod
def create_ostpool(cls, pool_data):
return JobSchedulerRpc().create_ostpool(pool_data)
@classmethod
def update_ostpool(cls, pool_data):
return JobSchedulerRpc().update_ostpool(pool_data)
@classmethod
def delete_ostpool(cls, pool):
return JobSchedulerRpc().delete_ostpool(pool)
@classmethod
def create_task(cls, task_data):
return JobSchedulerRpc().create_task(task_data)
@classmethod
def remove_task(cls, task_id):
return JobSchedulerRpc().create_task(task_id)
@classmethod
def update_nids(cls, nid_data):
return JobSchedulerRpc().update_nids(nid_data)
@classmethod
def trigger_plugin_update(cls, include_host_ids, exclude_host_ids, plugin_names):
"""
Cause the plugins on the hosts passed to send an update irrespective of whether any
changes have occurred.
:param include_host_ids: List of host ids to include in the trigger update.
:param exclude_host_ids: List of host ids to exclude from the include list (makes for usage easy)
:param plugin_names: list of plugins to trigger update on - empty list means all.
:return: command id that caused updates to be sent.
"""
assert isinstance(include_host_ids, list)
assert isinstance(exclude_host_ids, list)
assert isinstance(plugin_names, list)
return JobSchedulerRpc().trigger_plugin_update(include_host_ids, exclude_host_ids, plugin_names)
@classmethod
def update_lnet_configuration(cls, lnet_configuration_list):
return JobSchedulerRpc().update_lnet_configuration(lnet_configuration_list)
@classmethod
def create_host(cls, fqdn, nodename, address, server_profile_id):
# The address of a host isn't something we can learn from it (the
# address is specifically how the host is to be reached from the manager
# for outbound connections, not just its FQDN). If during creation we know
# the address, then great, accept it. Else default to FQDN, it's a reasonable guess.
if address is None:
address = fqdn
host_id, command_id = JobSchedulerRpc().create_host(fqdn, nodename, address, server_profile_id)
return (ManagedHost.objects.get(pk=host_id), Command.objects.get(pk=command_id))
@classmethod
def create_targets(cls, targets_data):
from chroma_core.models import ManagedTarget, Command
target_ids, command_id = JobSchedulerRpc().create_targets(targets_data)
return (list(ManagedTarget.objects.filter(id__in=target_ids)), Command.objects.get(pk=command_id))
@classmethod
def create_client_mount(cls, host, filesystem_name, mountpoint):
from chroma_core.models import LustreClientMount
client_mount_id = JobSchedulerRpc().create_client_mount(host.id, filesystem_name, mountpoint)
return LustreClientMount.objects.get(id=client_mount_id)
@classmethod
def create_copytool(cls, copytool_data):
from chroma_core.models import Copytool
copytool_id = JobSchedulerRpc().create_copytool(copytool_data)
return Copytool.objects.get(id=copytool_id)
@classmethod
def register_copytool(cls, copytool_id, uuid):
JobSchedulerRpc().register_copytool(copytool_id, uuid)
@classmethod
def unregister_copytool(cls, copytool_id):
JobSchedulerRpc().unregister_copytool(copytool_id)
@classmethod
def get_locks(cls):
return JobSchedulerRpc().get_locks()
@classmethod
def configure_stratagem(cls, stratagem_data):
return JobSchedulerRpc().configure_stratagem(stratagem_data)
@classmethod
def update_stratagem(cls, stratagem_data):
return JobSchedulerRpc().update_stratagem(stratagem_data)
@classmethod
def run_stratagem(cls, mdts, fs_id, stratagem_data):
return JobSchedulerRpc().run_stratagem(mdts, fs_id, stratagem_data)
|
mit
| -5,882,066,424,023,178,000
| 36.703571
| 133
| 0.666856
| false
| 4.030928
| true
| false
| false
|
fsmMLK/inkscapeMadeEasy
|
examples/iME_Draw_lineStyle_and_markers.py
|
1
|
4006
|
#!/usr/bin/python
import inkex
import inkscapeMadeEasy_Base as inkBase
import inkscapeMadeEasy_Draw as inkDraw
import math
class myExtension(inkBase.inkscapeMadeEasy):
def __init__(self):
inkex.Effect.__init__(self)
self.OptionParser.add_option("--myColorPicker", action="store", type="string", dest="lineColorPickerVar", default='0')
self.OptionParser.add_option("--myColorOption", action="store", type="string", dest="lineColorOptionVar", default='0')
def effect(self):
# sets the position to the viewport center, round to next 10.
position=[self.view_center[0],self.view_center[1]]
position[0]=int(math.ceil(position[0] / 10.0)) * 10
position[1]=int(math.ceil(position[1] / 10.0)) * 10
# creates a dot marker, with red stroke color and gray (40%) filling color
myDotMarker = inkDraw.marker.createDotMarker(self,
nameID='myDot' ,
RenameMode=1, # overwrite an eventual markers with the same name
scale=0.2,
strokeColor=inkDraw.color.defined('red'),
fillColor=inkDraw.color.gray(0.4))
# parses the input options to get the color of the line
lineColor = inkDraw.color.parseColorPicker(self.options.lineColorOptionVar, self.options.lineColorPickerVar)
# create a new line style with a 2.0 pt line and the marker just defined at both ends
myLineStyleDot = inkDraw.lineStyle.set(lineWidth=2.0,
lineColor=lineColor,
fillColor=inkDraw.color.defined('blue'),
lineJoin='round',
lineCap='round',
markerStart=myDotMarker,
markerMid=myDotMarker,
markerEnd=myDotMarker,
strokeDashArray=None)
#root_layer = self.current_layer
root_layer = self.document.getroot()
# draws a line using the new line style. (see inkscapeMadeEasy_Draw.line class for further info on this function
inkDraw.line.relCoords(root_layer,coordsList= [[0,100],[100,0]],offset=position,lineStyle=myLineStyleDot)
# -- Creates a second line style with ellipsis and
# creates a ellipsis marker with default values
infMarkerStart,infMarkerEnd = inkDraw.marker.createElipsisMarker(self,
nameID='myEllipsis' ,
RenameMode=1) # overwrite an eventual markers with the same name
# create a new line style
myStyleInf = inkDraw.lineStyle.set(lineWidth=1.0,
lineColor=lineColor,
fillColor=None,
lineJoin='round',
lineCap='round',
markerStart=infMarkerStart,
markerMid=None,
markerEnd=infMarkerEnd,
strokeDashArray=None)
# draws a line using the new line style. (see inkscapeMadeEasy_Draw.line class for further info on this function
inkDraw.line.relCoords(root_layer,coordsList= [[0,100],[100,0]],offset=[position[0]+300,position[1]],lineStyle=myStyleInf)
if __name__ == '__main__':
x = myExtension()
x.affect()
|
gpl-3.0
| -872,509,245,772,700,800
| 53.876712
| 148
| 0.496006
| false
| 4.927429
| false
| false
| false
|
wetek-enigma/enigma2
|
lib/python/Screens/ButtonSetup.py
|
1
|
29746
|
from GlobalActions import globalActionMap
from Components.ActionMap import ActionMap, HelpableActionMap
from Components.Button import Button
from Components.ChoiceList import ChoiceList, ChoiceEntryComponent
from Components.SystemInfo import SystemInfo
from Components.config import config, ConfigSubsection, ConfigText, ConfigYesNo
from Components.PluginComponent import plugins
from Screens.ChoiceBox import ChoiceBox
from Screens.Screen import Screen
from Screens.MessageBox import MessageBox
from Plugins.Plugin import PluginDescriptor
from Tools.BoundFunction import boundFunction
from ServiceReference import ServiceReference
from enigma import eServiceReference, eActionMap
from Components.Label import Label
import os
def getButtonSetupKeys():
return [(_("Red"), "red", ""),
(_("Red long"), "red_long", ""),
(_("Green"), "green", ""),
(_("Green long"), "green_long", ""),
(_("Yellow"), "yellow", ""),
(_("Yellow long"), "yellow_long", ""),
(_("Blue"), "blue", ""),
(_("Blue long"), "blue_long", ""),
(_("Info (EPG)"), "info", "Infobar/InfoPressed/1"),
(_("Info (EPG) Long"), "info_long", "Infobar/showEventInfoPlugins/1"),
(_("Epg/Guide"), "epg", "Infobar/EPGPressed/1"),
(_("Epg/Guide long"), "epg_long", "Infobar/showEventGuidePlugins/1"),
(_("Left"), "cross_left", ""),
(_("Right"), "cross_right", ""),
(_("Up"), "cross_up", ""),
(_("Down"), "cross_down", ""),
(_("PageUp"), "pageup", ""),
(_("PageUp long"), "pageup_long", ""),
(_("PageDown"), "pagedown", ""),
(_("PageDown long"), "pagedown_long", ""),
(_("Channel up"), "channelup", ""),
(_("Channel down"), "channeldown", ""),
(_("TV"), "showTv", ""),
(_("Radio"), "radio", ""),
(_("Radio long"), "radio_long", ""),
(_("Rec"), "rec", ""),
(_("Rec long"), "rec_long", ""),
(_("Teletext"), "text", ""),
(_("Help"), "displayHelp", ""),
(_("Help long"), "displayHelp_long", ""),
(_("Subtitle"), "subtitle", ""),
(_("Subtitle long"), "subtitle_long", ""),
(_("Menu"), "mainMenu", ""),
(_("List/Fav"), "list", ""),
(_("List/Fav long"), "list_long", ""),
(_("PVR"), "pvr", ""),
(_("PVR long"), "pvr_long", ""),
(_("Favorites"), "favorites", ""),
(_("Favorites long"), "favorites_long", ""),
(_("File"), "file", ""),
(_("File long"), "file_long", ""),
(_("OK long"), "ok_long", ""),
(_("Media"), "media", ""),
(_("Media long"), "media_long", ""),
(_("Open"), "open", ""),
(_("Open long"), "open_long", ""),
(_("Www"), "www", ""),
(_("Www long"), "www_long", ""),
(_("Directory"), "directory", ""),
(_("Directory long"), "directory_long", ""),
(_("Back/Recall"), "back", ""),
(_("Back/Recall") + " " + _("long"), "back_long", ""),
(_("Home"), "home", ""),
(_("End"), "end", ""),
(_("Next"), "next", ""),
(_("Previous"), "previous", ""),
(_("Audio"), "audio", ""),
(_("Play"), "play", ""),
(_("Playpause"), "playpause", ""),
(_("Stop"), "stop", ""),
(_("Pause"), "pause", ""),
(_("Rewind"), "rewind", ""),
(_("Fastforward"), "fastforward", ""),
(_("Skip back"), "skip_back", ""),
(_("Skip forward"), "skip_forward", ""),
(_("activatePiP"), "activatePiP", ""),
(_("Timer"), "timer", ""),
(_("Playlist"), "playlist", ""),
(_("Playlist long"), "playlist_long", ""),
(_("Timeshift"), "timeshift", ""),
(_("Homepage"), "homep", ""),
(_("Homepage long"), "homep_long", ""),
(_("Search/WEB"), "search", ""),
(_("Search/WEB long"), "search_long", ""),
(_("Slow"), "slow", ""),
(_("Mark/Portal/Playlist"), "mark", ""),
(_("Sleep"), "sleep", ""),
(_("Sleep long"), "sleep_long", ""),
(_("Power"), "power", ""),
(_("Power long"), "power_long", ""),
(_("HDMIin"), "HDMIin", "Infobar/HDMIIn"),
(_("HDMIin") + " " + _("long"), "HDMIin_long", (SystemInfo["LcdLiveTV"] and "Infobar/ToggleLCDLiveTV") or ""),
(_("Context"), "contextMenu", "Infobar/showExtensionSelection"),
(_("Context long"), "context_long", ""),
(_("SAT"), "sat", "Infobar/openSatellites"),
(_("SAT long"), "sat_long", ""),
(_("Prov"), "prov", ""),
(_("Prov long"), "prov_long", ""),
(_("F1/LAN"), "f1", ""),
(_("F1/LAN long"), "f1_long", ""),
(_("F2"), "f2", ""),
(_("F2 long"), "f2_long", ""),
(_("F3"), "f3", ""),
(_("F3 long"), "f3_long", ""),
(_("F4"), "f4", ""),
(_("F4 long"), "f4_long", ""),]
config.misc.ButtonSetup = ConfigSubsection()
config.misc.ButtonSetup.additional_keys = ConfigYesNo(default=True)
for x in getButtonSetupKeys():
exec "config.misc.ButtonSetup." + x[1] + " = ConfigText(default='" + x[2] + "')"
def getButtonSetupFunctions():
ButtonSetupFunctions = []
twinPlugins = []
twinPaths = {}
pluginlist = plugins.getPlugins(PluginDescriptor.WHERE_EVENTINFO)
pluginlist.sort(key=lambda p: p.name)
for plugin in pluginlist:
if plugin.name not in twinPlugins and plugin.path and 'selectedevent' not in plugin.__call__.func_code.co_varnames:
if twinPaths.has_key(plugin.path[24:]):
twinPaths[plugin.path[24:]] += 1
else:
twinPaths[plugin.path[24:]] = 1
ButtonSetupFunctions.append((plugin.name, plugin.path[24:] + "/" + str(twinPaths[plugin.path[24:]]) , "EPG"))
twinPlugins.append(plugin.name)
pluginlist = plugins.getPlugins([PluginDescriptor.WHERE_PLUGINMENU, PluginDescriptor.WHERE_EXTENSIONSMENU, PluginDescriptor.WHERE_EVENTINFO])
pluginlist.sort(key=lambda p: p.name)
for plugin in pluginlist:
if plugin.name not in twinPlugins and plugin.path:
if twinPaths.has_key(plugin.path[24:]):
twinPaths[plugin.path[24:]] += 1
else:
twinPaths[plugin.path[24:]] = 1
ButtonSetupFunctions.append((plugin.name, plugin.path[24:] + "/" + str(twinPaths[plugin.path[24:]]) , "Plugins"))
twinPlugins.append(plugin.name)
ButtonSetupFunctions.append((_("Show graphical multi EPG"), "Infobar/openGraphEPG", "EPG"))
ButtonSetupFunctions.append((_("Main menu"), "Infobar/mainMenu", "InfoBar"))
ButtonSetupFunctions.append((_("Show help"), "Infobar/showHelp", "InfoBar"))
ButtonSetupFunctions.append((_("Show extension selection"), "Infobar/showExtensionSelection", "InfoBar"))
ButtonSetupFunctions.append((_("Zap down"), "Infobar/zapDown", "InfoBar"))
ButtonSetupFunctions.append((_("Zap up"), "Infobar/zapUp", "InfoBar"))
ButtonSetupFunctions.append((_("Volume down"), "Infobar/volumeDown", "InfoBar"))
ButtonSetupFunctions.append((_("Volume up"), "Infobar/volumeUp", "InfoBar"))
ButtonSetupFunctions.append((_("Show Infobar"), "Infobar/toggleShow", "InfoBar"))
ButtonSetupFunctions.append((_("Show service list"), "Infobar/openServiceList", "InfoBar"))
ButtonSetupFunctions.append((_("Show favourites list"), "Infobar/openBouquets", "InfoBar"))
ButtonSetupFunctions.append((_("Show satellites list"), "Infobar/openSatellites", "InfoBar"))
ButtonSetupFunctions.append((_("History back"), "Infobar/historyBack", "InfoBar"))
ButtonSetupFunctions.append((_("History next"), "Infobar/historyNext", "InfoBar"))
ButtonSetupFunctions.append((_("Show eventinfo plugins"), "Infobar/showEventInfoPlugins", "EPG"))
ButtonSetupFunctions.append((_("Show event details"), "Infobar/openEventView", "EPG"))
ButtonSetupFunctions.append((_("Show single service EPG"), "Infobar/openSingleServiceEPG", "EPG"))
ButtonSetupFunctions.append((_("Show multi channel EPG"), "Infobar/openMultiServiceEPG", "EPG"))
ButtonSetupFunctions.append((_("Show Audioselection"), "Infobar/audioSelection", "InfoBar"))
ButtonSetupFunctions.append((_("Enable digital downmix"), "Infobar/audioDownmixOn", "InfoBar"))
ButtonSetupFunctions.append((_("Disable digital downmix"), "Infobar/audioDownmixOff", "InfoBar"))
ButtonSetupFunctions.append((_("Switch to radio mode"), "Infobar/showRadio", "InfoBar"))
ButtonSetupFunctions.append((_("Switch to TV mode"), "Infobar/showTv", "InfoBar"))
ButtonSetupFunctions.append((_("Show servicelist or movies"), "Infobar/showServiceListOrMovies", "InfoBar"))
ButtonSetupFunctions.append((_("Show movies"), "Infobar/showMovies", "InfoBar"))
ButtonSetupFunctions.append((_("Instant record"), "Infobar/instantRecord", "InfoBar"))
ButtonSetupFunctions.append((_("Start instant recording"), "Infobar/startInstantRecording", "InfoBar"))
ButtonSetupFunctions.append((_("Activate timeshift End"), "Infobar/activateTimeshiftEnd", "InfoBar"))
ButtonSetupFunctions.append((_("Activate timeshift end and pause"), "Infobar/activateTimeshiftEndAndPause", "InfoBar"))
ButtonSetupFunctions.append((_("Start timeshift"), "Infobar/startTimeshift", "InfoBar"))
ButtonSetupFunctions.append((_("Stop timeshift"), "Infobar/stopTimeshift", "InfoBar"))
ButtonSetupFunctions.append((_("Start teletext"), "Infobar/startTeletext", "InfoBar"))
ButtonSetupFunctions.append((_("Show subservice selection"), "Infobar/subserviceSelection", "InfoBar"))
ButtonSetupFunctions.append((_("Show subtitle selection"), "Infobar/subtitleSelection", "InfoBar"))
ButtonSetupFunctions.append((_("Show subtitle quick menu"), "Infobar/subtitleQuickMenu", "InfoBar"))
ButtonSetupFunctions.append((_("Letterbox zoom"), "Infobar/vmodeSelection", "InfoBar"))
if SystemInfo["PIPAvailable"]:
ButtonSetupFunctions.append((_("Show PIP"), "Infobar/showPiP", "InfoBar"))
ButtonSetupFunctions.append((_("Swap PIP"), "Infobar/swapPiP", "InfoBar"))
ButtonSetupFunctions.append((_("Move PIP"), "Infobar/movePiP", "InfoBar"))
ButtonSetupFunctions.append((_("Toggle PIPzap"), "Infobar/togglePipzap", "InfoBar"))
ButtonSetupFunctions.append((_("Activate HbbTV (Redbutton)"), "Infobar/activateRedButton", "InfoBar"))
ButtonSetupFunctions.append((_("Toggle HDMI-In full screen"), "Infobar/HDMIInFull", "InfoBar"))
ButtonSetupFunctions.append((_("Toggle HDMI-In PiP"), "Infobar/HDMIInPiP", "InfoBar"))
if SystemInfo["LcdLiveTV"]:
ButtonSetupFunctions.append((_("Toggle LCD LiveTV"), "Infobar/ToggleLCDLiveTV", "InfoBar"))
ButtonSetupFunctions.append((_("Hotkey Setup"), "Module/Screens.ButtonSetup/ButtonSetup", "Setup"))
ButtonSetupFunctions.append((_("Software update"), "Module/Screens.SoftwareUpdate/UpdatePlugin", "Setup"))
ButtonSetupFunctions.append((_("CI (Common Interface) Setup"), "Module/Screens.Ci/CiSelection", "Setup"))
ButtonSetupFunctions.append((_("Tuner Configuration"), "Module/Screens.Satconfig/NimSelection", "Scanning"))
ButtonSetupFunctions.append((_("Manual Scan"), "Module/Screens.ScanSetup/ScanSetup", "Scanning"))
ButtonSetupFunctions.append((_("Automatic Scan"), "Module/Screens.ScanSetup/ScanSimple", "Scanning"))
for plugin in plugins.getPluginsForMenu("scan"):
ButtonSetupFunctions.append((plugin[0], "MenuPlugin/scan/" + plugin[2], "Scanning"))
ButtonSetupFunctions.append((_("Network setup"), "Module/Screens.NetworkSetup/NetworkAdapterSelection", "Setup"))
ButtonSetupFunctions.append((_("Network menu"), "Infobar/showNetworkMounts", "Setup"))
ButtonSetupFunctions.append((_("Plugin Browser"), "Module/Screens.PluginBrowser/PluginBrowser", "Setup"))
ButtonSetupFunctions.append((_("Channel Info"), "Module/Screens.ServiceInfo/ServiceInfo", "Setup"))
ButtonSetupFunctions.append((_("SkinSelector"), "Module/Screens.SkinSelector/SkinSelector", "Setup"))
ButtonSetupFunctions.append((_("LCD SkinSelector"), "Module/Screens.SkinSelector/LcdSkinSelector", "Setup"))
ButtonSetupFunctions.append((_("Timer"), "Module/Screens.TimerEdit/TimerEditList", "Setup"))
ButtonSetupFunctions.append((_("Open AutoTimer"), "Infobar/showAutoTimerList", "Setup"))
for plugin in plugins.getPluginsForMenu("system"):
if plugin[2]:
ButtonSetupFunctions.append((plugin[0], "MenuPlugin/system/" + plugin[2], "Setup"))
ButtonSetupFunctions.append((_("Standby"), "Module/Screens.Standby/Standby", "Power"))
ButtonSetupFunctions.append((_("Restart"), "Module/Screens.Standby/TryQuitMainloop/2", "Power"))
ButtonSetupFunctions.append((_("Restart enigma"), "Module/Screens.Standby/TryQuitMainloop/3", "Power"))
ButtonSetupFunctions.append((_("Deep standby"), "Module/Screens.Standby/TryQuitMainloop/1", "Power"))
ButtonSetupFunctions.append((_("SleepTimer"), "Module/Screens.SleepTimerEdit/SleepTimerEdit", "Power"))
ButtonSetupFunctions.append((_("PowerTimer"), "Module/Screens.PowerTimerEdit/PowerTimerEditList", "Power"))
ButtonSetupFunctions.append((_("Usage Setup"), "Setup/usage", "Setup"))
ButtonSetupFunctions.append((_("User interface settings"), "Setup/userinterface", "Setup"))
ButtonSetupFunctions.append((_("Recording Setup"), "Setup/recording", "Setup"))
ButtonSetupFunctions.append((_("Harddisk Setup"), "Setup/harddisk", "Setup"))
ButtonSetupFunctions.append((_("Subtitles Settings"), "Setup/subtitlesetup", "Setup"))
ButtonSetupFunctions.append((_("Language"), "Module/Screens.LanguageSelection/LanguageSelection", "Setup"))
ButtonSetupFunctions.append((_("OscamInfo Mainmenu"), "Module/Screens.OScamInfo/OscamInfoMenu", "Plugins"))
ButtonSetupFunctions.append((_("CCcamInfo Mainmenu"), "Module/Screens.CCcamInfo/CCcamInfoMain", "Plugins"))
ButtonSetupFunctions.append((_("Movieplayer"), "Module/Screens.MovieSelection/MovieSelection", "Plugins"))
if os.path.isdir("/etc/ppanels"):
for x in [x for x in os.listdir("/etc/ppanels") if x.endswith(".xml")]:
x = x[:-4]
ButtonSetupFunctions.append((_("PPanel") + " " + x, "PPanel/" + x, "PPanels"))
if os.path.isdir("/usr/script"):
for x in [x for x in os.listdir("/usr/script") if x.endswith(".sh")]:
x = x[:-3]
ButtonSetupFunctions.append((_("Shellscript") + " " + x, "Shellscript/" + x, "Shellscripts"))
if os.path.isfile("/usr/lib/enigma2/python/Plugins/Extensions/Infopanel/ScriptRunner.pyo"):
ButtonSetupFunctions.append((_("ScriptRunner"), "ScriptRunner/", "Plugins"))
if os.path.isfile("/usr/lib/enigma2/python/Plugins/Extensions/Infopanel/QuickMenu.pyo"):
ButtonSetupFunctions.append((_("QuickMenu"), "QuickMenu/", "Plugins"))
if os.path.isfile("/usr/lib/enigma2/python/Plugins/Extensions/Kodi/plugin.pyo"):
ButtonSetupFunctions.append((_("Kodi MediaCenter"), "Kodi/", "Plugins"))
return ButtonSetupFunctions
class ButtonSetup(Screen):
def __init__(self, session, args=None):
Screen.__init__(self, session)
self['description'] = Label(_('Click on your remote on the button you want to change'))
self.session = session
self.setTitle(_("Hotkey Setup"))
self["key_red"] = Button(_("Exit"))
self.list = []
self.ButtonSetupKeys = getButtonSetupKeys()
self.ButtonSetupFunctions = getButtonSetupFunctions()
for x in self.ButtonSetupKeys:
self.list.append(ChoiceEntryComponent('',(_(x[0]), x[1])))
self["list"] = ChoiceList(list=self.list[:config.misc.ButtonSetup.additional_keys.value and len(self.ButtonSetupKeys) or 10], selection = 0)
self["choosen"] = ChoiceList(list=[])
self.getFunctions()
self["actions"] = ActionMap(["OkCancelActions"],
{
"cancel": self.close,
}, -1)
self["ButtonSetupButtonActions"] = ButtonSetupActionMap(["ButtonSetupActions"], dict((x[1], self.ButtonSetupGlobal) for x in self.ButtonSetupKeys))
self.longkeyPressed = False
self.onLayoutFinish.append(self.__layoutFinished)
self.onExecBegin.append(self.getFunctions)
self.onShown.append(self.disableKeyMap)
self.onClose.append(self.enableKeyMap)
def __layoutFinished(self):
self["choosen"].selectionEnabled(0)
def disableKeyMap(self):
globalActionMap.setEnabled(False)
eActionMap.getInstance().unbindNativeKey("ListboxActions", 0)
eActionMap.getInstance().unbindNativeKey("ListboxActions", 1)
eActionMap.getInstance().unbindNativeKey("ListboxActions", 4)
eActionMap.getInstance().unbindNativeKey("ListboxActions", 5)
def enableKeyMap(self):
globalActionMap.setEnabled(True)
eActionMap.getInstance().bindKey("keymap.xml", "generic", 103, 5, "ListboxActions", "moveUp")
eActionMap.getInstance().bindKey("keymap.xml", "generic", 108, 5, "ListboxActions", "moveDown")
eActionMap.getInstance().bindKey("keymap.xml", "generic", 105, 5, "ListboxActions", "pageUp")
eActionMap.getInstance().bindKey("keymap.xml", "generic", 106, 5, "ListboxActions", "pageDown")
def ButtonSetupGlobal(self, key):
if self.longkeyPressed:
self.longkeyPressed = False
else:
index = 0
for x in self.list[:config.misc.ButtonSetup.additional_keys.value and len(self.ButtonSetupKeys) or 10]:
if key == x[0][1]:
self["list"].moveToIndex(index)
if key.endswith("_long"):
self.longkeyPressed = True
break
index += 1
self.getFunctions()
self.session.open(ButtonSetupSelect, self["list"].l.getCurrentSelection())
def getFunctions(self):
key = self["list"].l.getCurrentSelection()[0][1]
if key:
selected = []
for x in eval("config.misc.ButtonSetup." + key + ".value.split(',')"):
function = list(function for function in self.ButtonSetupFunctions if function[1] == x )
if function:
selected.append(ChoiceEntryComponent('',((function[0][0]), function[0][1])))
self["choosen"].setList(selected)
class ButtonSetupSelect(Screen):
def __init__(self, session, key, args=None):
Screen.__init__(self, session)
self.skinName="ButtonSetupSelect"
self['description'] = Label(_('Select the desired function and click on "OK" to assign it. Use "CH+/-" to toggle between the lists. Select an assigned function and click on "OK" to de-assign it. Use "Next/Previous" to change the order of the assigned functions.'))
self.session = session
self.key = key
self.setTitle(_("Hotkey Setup for") + ": " + key[0][0])
self["key_red"] = Button(_("Cancel"))
self["key_green"] = Button(_("Save"))
self.mode = "list"
self.ButtonSetupFunctions = getButtonSetupFunctions()
self.config = eval("config.misc.ButtonSetup." + key[0][1])
self.expanded = []
self.selected = []
for x in self.config.value.split(','):
function = list(function for function in self.ButtonSetupFunctions if function[1] == x )
if function:
self.selected.append(ChoiceEntryComponent('',((function[0][0]), function[0][1])))
self.prevselected = self.selected[:]
self["choosen"] = ChoiceList(list=self.selected, selection=0)
self["list"] = ChoiceList(list=self.getFunctionList(), selection=0)
self["actions"] = ActionMap(["OkCancelActions", "ColorActions", "DirectionActions", "KeyboardInputActions"],
{
"ok": self.keyOk,
"cancel": self.cancel,
"red": self.cancel,
"green": self.save,
"up": self.keyUp,
"down": self.keyDown,
"left": self.keyLeft,
"right": self.keyRight,
"pageUp": self.toggleMode,
"pageDown": self.toggleMode,
"shiftUp": self.moveUp,
"shiftDown": self.moveDown,
}, -1)
self.onShown.append(self.enableKeyMap)
self.onClose.append(self.disableKeyMap)
self.onLayoutFinish.append(self.__layoutFinished)
def __layoutFinished(self):
self["choosen"].selectionEnabled(0)
def disableKeyMap(self):
globalActionMap.setEnabled(False)
eActionMap.getInstance().unbindNativeKey("ListboxActions", 0)
eActionMap.getInstance().unbindNativeKey("ListboxActions", 1)
eActionMap.getInstance().unbindNativeKey("ListboxActions", 4)
eActionMap.getInstance().unbindNativeKey("ListboxActions", 5)
def enableKeyMap(self):
globalActionMap.setEnabled(True)
eActionMap.getInstance().bindKey("keymap.xml", "generic", 103, 5, "ListboxActions", "moveUp")
eActionMap.getInstance().bindKey("keymap.xml", "generic", 108, 5, "ListboxActions", "moveDown")
eActionMap.getInstance().bindKey("keymap.xml", "generic", 105, 5, "ListboxActions", "pageUp")
eActionMap.getInstance().bindKey("keymap.xml", "generic", 106, 5, "ListboxActions", "pageDown")
def getFunctionList(self):
functionslist = []
catagories = {}
for function in self.ButtonSetupFunctions:
if not catagories.has_key(function[2]):
catagories[function[2]] = []
catagories[function[2]].append(function)
for catagorie in sorted(list(catagories)):
if catagorie in self.expanded:
functionslist.append(ChoiceEntryComponent('expanded',((catagorie), "Expander")))
for function in catagories[catagorie]:
functionslist.append(ChoiceEntryComponent('verticalline',((function[0]), function[1])))
else:
functionslist.append(ChoiceEntryComponent('expandable',((catagorie), "Expander")))
return functionslist
def toggleMode(self):
if self.mode == "list" and self.selected:
self.mode = "choosen"
self["choosen"].selectionEnabled(1)
self["list"].selectionEnabled(0)
elif self.mode == "choosen":
self.mode = "list"
self["choosen"].selectionEnabled(0)
self["list"].selectionEnabled(1)
def keyOk(self):
if self.mode == "list":
currentSelected = self["list"].l.getCurrentSelection()
if currentSelected[0][1] == "Expander":
if currentSelected[0][0] in self.expanded:
self.expanded.remove(currentSelected[0][0])
else:
self.expanded.append(currentSelected[0][0])
self["list"].setList(self.getFunctionList())
else:
if currentSelected[:2] in self.selected:
self.selected.remove(currentSelected[:2])
else:
self.selected.append(currentSelected[:2])
elif self.selected:
self.selected.remove(self["choosen"].l.getCurrentSelection())
if not self.selected:
self.toggleMode()
self["choosen"].setList(self.selected)
def keyLeft(self):
self[self.mode].instance.moveSelection(self[self.mode].instance.pageUp)
def keyRight(self):
self[self.mode].instance.moveSelection(self[self.mode].instance.pageDown)
def keyUp(self):
self[self.mode].instance.moveSelection(self[self.mode].instance.moveUp)
def keyDown(self):
self[self.mode].instance.moveSelection(self[self.mode].instance.moveDown)
def moveUp(self):
self.moveChoosen(self.keyUp)
def moveDown(self):
self.moveChoosen(self.keyDown)
def moveChoosen(self, direction):
if self.mode == "choosen":
currentIndex = self["choosen"].getSelectionIndex()
swapIndex = (currentIndex + (direction == self.keyDown and 1 or -1)) % len(self["choosen"].list)
self["choosen"].list[currentIndex], self["choosen"].list[swapIndex] = self["choosen"].list[swapIndex], self["choosen"].list[currentIndex]
self["choosen"].setList(self["choosen"].list)
direction()
else:
return 0
def save(self):
configValue = []
for x in self.selected:
configValue.append(x[0][1])
self.config.value = ",".join(configValue)
self.config.save()
self.close()
def cancel(self):
if self.selected != self.prevselected:
self.session.openWithCallback(self.cancelCallback, MessageBox, _("Are you sure to cancel all changes"), default=False)
else:
self.close()
def cancelCallback(self, answer):
answer and self.close()
class ButtonSetupActionMap(ActionMap):
def action(self, contexts, action):
if (action in tuple(x[1] for x in getButtonSetupKeys()) and self.actions.has_key(action)):
res = self.actions[action](action)
if res is not None:
return res
return 1
else:
return ActionMap.action(self, contexts, action)
class helpableButtonSetupActionMap(HelpableActionMap):
def action(self, contexts, action):
if (action in tuple(x[1] for x in getButtonSetupKeys()) and self.actions.has_key(action)):
res = self.actions[action](action)
if res is not None:
return res
return 1
else:
return ActionMap.action(self, contexts, action)
class InfoBarButtonSetup():
def __init__(self):
self.ButtonSetupKeys = getButtonSetupKeys()
self["ButtonSetupButtonActions"] = helpableButtonSetupActionMap(self, "ButtonSetupActions",
dict((x[1],(self.ButtonSetupGlobal, boundFunction(self.getHelpText, x[1]))) for x in self.ButtonSetupKeys), -10)
self.longkeyPressed = False
self.onExecEnd.append(self.clearLongkeyPressed)
def clearLongkeyPressed(self):
self.longkeyPressed = False
def getKeyFunctions(self, key):
if key in ("play", "playpause", "Stop", "stop", "pause", "rewind", "next", "previous", "fastforward", "skip_back", "skip_forward") and (self.__class__.__name__ == "MoviePlayer" or hasattr(self, "timeshiftActivated") and self.timeshiftActivated()):
return False
selection = eval("config.misc.ButtonSetup." + key + ".value.split(',')")
selected = []
for x in selection:
if x.startswith("ZapPanic"):
selected.append(((_("Panic to") + " " + ServiceReference(eServiceReference(x.split("/", 1)[1]).toString()).getServiceName()), x))
elif x.startswith("Zap"):
selected.append(((_("Zap to") + " " + ServiceReference(eServiceReference(x.split("/", 1)[1]).toString()).getServiceName()), x))
else:
function = list(function for function in getButtonSetupFunctions() if function[1] == x )
if function:
selected.append(function[0])
return selected
def getHelpText(self, key):
selected = self.getKeyFunctions(key)
if not selected:
return
if len(selected) == 1:
return selected[0][0]
else:
return _("ButtonSetup") + " " + tuple(x[0] for x in self.ButtonSetupKeys if x[1] == key)[0]
def ButtonSetupGlobal(self, key):
if self.longkeyPressed:
self.longkeyPressed = False
else:
selected = self.getKeyFunctions(key)
if not selected:
return 0
elif len(selected) == 1:
if key.endswith("_long"):
self.longkeyPressed = True
return self.execButtonSetup(selected[0])
else:
key = tuple(x[0] for x in self.ButtonSetupKeys if x[1] == key)[0]
self.session.openWithCallback(self.execButtonSetup, ChoiceBox, (_("Hotkey")) + " " + key, selected)
def execButtonSetup(self, selected):
if selected:
selected = selected[1].split("/")
if selected[0] == "Plugins":
twinPlugins = []
twinPaths = {}
pluginlist = plugins.getPlugins(PluginDescriptor.WHERE_EVENTINFO)
pluginlist.sort(key=lambda p: p.name)
for plugin in pluginlist:
if plugin.name not in twinPlugins and plugin.path and 'selectedevent' not in plugin.__call__.func_code.co_varnames:
if twinPaths.has_key(plugin.path[24:]):
twinPaths[plugin.path[24:]] += 1
else:
twinPaths[plugin.path[24:]] = 1
if plugin.path[24:] + "/" + str(twinPaths[plugin.path[24:]]) == "/".join(selected):
self.runPlugin(plugin)
return
twinPlugins.append(plugin.name)
pluginlist = plugins.getPlugins([PluginDescriptor.WHERE_PLUGINMENU, PluginDescriptor.WHERE_EXTENSIONSMENU])
pluginlist.sort(key=lambda p: p.name)
for plugin in pluginlist:
if plugin.name not in twinPlugins and plugin.path:
if twinPaths.has_key(plugin.path[24:]):
twinPaths[plugin.path[24:]] += 1
else:
twinPaths[plugin.path[24:]] = 1
if plugin.path[24:] + "/" + str(twinPaths[plugin.path[24:]]) == "/".join(selected):
self.runPlugin(plugin)
return
twinPlugins.append(plugin.name)
elif selected[0] == "MenuPlugin":
for plugin in plugins.getPluginsForMenu(selected[1]):
if plugin[2] == selected[2]:
self.runPlugin(plugin[1])
return
elif selected[0] == "Infobar":
if hasattr(self, selected[1]):
exec "self." + ".".join(selected[1:]) + "()"
else:
return 0
elif selected[0] == "Module":
try:
exec "from " + selected[1] + " import *"
exec "self.session.open(" + ",".join(selected[2:]) + ")"
except:
print "[ButtonSetup] error during executing module %s, screen %s" % (selected[1], selected[2])
elif selected[0] == "Setup":
exec "from Screens.Setup import *"
exec "self.session.open(Setup, \"" + selected[1] + "\")"
elif selected[0].startswith("Zap"):
if selected[0] == "ZapPanic":
self.servicelist.history = []
self.pipShown() and self.showPiP()
self.servicelist.servicelist.setCurrent(eServiceReference("/".join(selected[1:])))
self.servicelist.zap(enable_pipzap = True)
if hasattr(self, "lastservice"):
self.lastservice = eServiceReference("/".join(selected[1:]))
self.close()
else:
self.show()
from Screens.MovieSelection import defaultMoviePath
moviepath = defaultMoviePath()
if moviepath:
config.movielist.last_videodir.value = moviepath
elif selected[0] == "PPanel":
ppanelFileName = '/etc/ppanels/' + selected[1] + ".xml"
if os.path.isfile(ppanelFileName) and os.path.isdir('/usr/lib/enigma2/python/Plugins/Extensions/PPanel'):
from Plugins.Extensions.PPanel.ppanel import PPanel
self.session.open(PPanel, name=selected[1] + ' PPanel', node=None, filename=ppanelFileName, deletenode=None)
elif selected[0] == "Shellscript":
command = '/usr/script/' + selected[1] + ".sh"
if os.path.isfile(command) and os.path.isdir('/usr/lib/enigma2/python/Plugins/Extensions/PPanel'):
from Plugins.Extensions.PPanel.ppanel import Execute
self.session.open(Execute, selected[1] + " shellscript", None, command)
else:
from Screens.Console import Console
exec "self.session.open(Console,_(selected[1]),[command])"
elif selected[0] == "EMC":
try:
from Plugins.Extensions.EnhancedMovieCenter.plugin import showMoviesNew
from Screens.InfoBar import InfoBar
open(showMoviesNew(InfoBar.instance))
except Exception as e:
print('[EMCPlayer] showMovies exception:\n' + str(e))
elif selected[0] == "ScriptRunner":
if os.path.isfile("/usr/lib/enigma2/python/Plugins/Extensions/Infopanel/ScriptRunner.pyo"):
from Plugins.Extensions.Infopanel.ScriptRunner import ScriptRunner
self.session.open (ScriptRunner)
elif selected[0] == "QuickMenu":
if os.path.isfile("/usr/lib/enigma2/python/Plugins/Extensions/Infopanel/QuickMenu.pyo"):
from Plugins.Extensions.Infopanel.QuickMenu import QuickMenu
self.session.open (QuickMenu)
elif selected[0] == "Kodi":
if os.path.isfile("/usr/lib/enigma2/python/Plugins/Extensions/Kodi/plugin.pyo"):
from Plugins.Extensions.Kodi.plugin import KodiMainScreen
self.session.open(KodiMainScreen)
def showServiceListOrMovies(self):
if hasattr(self, "openServiceList"):
self.openServiceList()
elif hasattr(self, "showMovies"):
self.showMovies()
def ToggleLCDLiveTV(self):
config.lcd.showTv.value = not config.lcd.showTv.value
|
gpl-2.0
| 6,284,683,870,160,974,000
| 45.917981
| 266
| 0.686042
| false
| 3.254486
| true
| false
| false
|
googleapis/googleapis-gen
|
google/ads/googleads/v7/googleads-py/google/ads/googleads/v7/errors/types/feed_attribute_reference_error.py
|
1
|
1293
|
# -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import proto # type: ignore
__protobuf__ = proto.module(
package='google.ads.googleads.v7.errors',
marshal='google.ads.googleads.v7',
manifest={
'FeedAttributeReferenceErrorEnum',
},
)
class FeedAttributeReferenceErrorEnum(proto.Message):
r"""Container for enum describing possible feed attribute
reference errors.
"""
class FeedAttributeReferenceError(proto.Enum):
r"""Enum describing possible feed attribute reference errors."""
UNSPECIFIED = 0
UNKNOWN = 1
CANNOT_REFERENCE_REMOVED_FEED = 2
INVALID_FEED_NAME = 3
INVALID_FEED_ATTRIBUTE_NAME = 4
__all__ = tuple(sorted(__protobuf__.manifest))
|
apache-2.0
| -425,834,407,816,632,600
| 30.536585
| 74
| 0.702243
| false
| 4.13099
| false
| false
| false
|
examachine/pisi
|
pisi/exml/xmlfilepiks.py
|
1
|
2519
|
# -*- coding: utf-8 -*-
#
# Copyright (C) 2005, TUBITAK/UEKAE
#
# This program is free software; you can redistribute it and/or modify it under
# the terms of the GNU General Public License as published by the Free
# Software Foundation; either version 3 of the License, or (at your option)
# any later version.
#
# Please read the COPYING file.
#
# Author: Eray Ozkural <eray@pardus.org.tr>
"""
XmlFile class further abstracts a dom object using the
high-level dom functions provided in xmlext module (and sorely lacking
in xml.dom :( )
function names are mixedCase for compatibility with minidom,
an 'old library'
this implementation uses piksemel
"""
import gettext
__trans = gettext.translation('pisi', fallback=True)
_ = __trans.ugettext
import codecs
import exceptions
import piksemel as iks
import pisi
from pisi.file import File
from pisi.util import join_path as join
class Error(pisi.Error):
pass
class XmlFile(object):
"""A class to help reading and writing an XML file"""
def __init__(self, tag):
self.rootTag = tag
def newDocument(self):
"""clear DOM"""
self.doc = iks.newDocument(self.rootTag)
def unlink(self):
"""deallocate DOM structure"""
del self.doc
def rootNode(self):
"""returns root document element"""
return self.doc
def readxmlfile(self, file):
raise Exception("not implemented")
try:
self.doc = iks.parse(file)
return self.doc
except Exception, e:
raise Error(_("File '%s' has invalid XML") % (localpath) )
def readxml(self, uri, tmpDir='/tmp', sha1sum=False,
compress=None, sign=None, copylocal = False):
uri = File.make_uri(uri)
#try:
localpath = File.download(uri, tmpDir, sha1sum=sha1sum,
compress=compress,sign=sign, copylocal=copylocal)
#except IOError, e:
# raise Error(_("Cannot read URI %s: %s") % (uri, unicode(e)) )
try:
self.doc = iks.parse(localpath)
return self.doc
except Exception, e:
raise Error(_("File '%s' has invalid XML") % (localpath) )
def writexml(self, uri, tmpDir = '/tmp', sha1sum=False, compress=None, sign=None):
f = File(uri, File.write, sha1sum=sha1sum, compress=compress, sign=sign)
f.write(self.doc.toPrettyString())
f.close()
def writexmlfile(self, f):
f.write(self.doc.toPrettyString())
|
gpl-3.0
| -4,301,337,258,087,709,000
| 27.303371
| 86
| 0.628821
| false
| 3.645441
| false
| false
| false
|
csm0042/rpihome_v3
|
rpihome_v3/schedule_service/service_main.py
|
1
|
6835
|
#!/usr/bin/python3
""" service_main.py:
"""
# Import Required Libraries (Standard, Third Party, Local) ********************
import asyncio
import datetime
import logging
if __name__ == "__main__":
import os
import sys
sys.path.append(os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))))
from rpihome_v3.occupancy_service.msg_processing import create_heartbeat_msg
from rpihome_v3.occupancy_service.msg_processing import process_heartbeat_msg
from rpihome_v3.schedule_service.msg_processing import process_get_device_scheduled_state_msg
# Authorship Info *************************************************************
__author__ = "Christopher Maue"
__copyright__ = "Copyright 2017, The RPi-Home Project"
__credits__ = ["Christopher Maue"]
__license__ = "GPL"
__version__ = "1.0.0"
__maintainer__ = "Christopher Maue"
__email__ = "csmaue@gmail.com"
__status__ = "Development"
# Internal Service Work Task **************************************************
class MainTask(object):
def __init__(self, log, **kwargs):
# Configure logger
self.log = log or logging.getLogger(__name__)
# Define instance variables
self.ref_num = None
self.msg_in_queue = None
self.msg_out_queue = None
self.schedule = []
self.service_addresses = []
self.message_types = []
self.last_check_hb = datetime.datetime.now()
self.out_msg = str()
self.out_msg_list = []
self.next_msg = str()
self.next_msg_split = []
self.msg_source_addr = str()
self.msg_type = str()
self.destinations = []
# Map input variables
if kwargs is not None:
for key, value in kwargs.items():
if key == "ref":
self.ref_num = value
self.log.debug('Ref number generator set during __init__ '
'to: %s', self.ref_num)
if key == "schedule":
self.schedule = value
self.log.debug('Schedule set during __init__ '
'to: %s', self.schedule)
if key == "msg_in_queue":
self.msg_in_queue = value
self.log.debug('Message in queue set during __init__ '
'to: %s', self.msg_in_queue)
if key == "msg_out_queue":
self.msg_out_queue = value
self.log.debug('Message out queue set during __init__ '
'to: %s', self.msg_out_queue)
if key == "service_addresses":
self.service_addresses = value
self.log.debug('Service address list set during __init__ '
'to: %s', self.service_addresses)
if key == "message_types":
self.message_types = value
self.log.debug('Message type list set during __init__ '
'to: %s', self.message_types)
@asyncio.coroutine
def run(self):
""" task to handle the work the service is intended to do """
self.log.info('Starting schedule service main task')
while True:
# Initialize result list
self.out_msg_list = []
# INCOMING MESSAGE HANDLING
if self.msg_in_queue.qsize() > 0:
self.log.debug('Getting Incoming message from queue')
self.next_msg = self.msg_in_queue.get_nowait()
self.log.debug('Message pulled from queue: [%s]', self.next_msg)
# Determine message type
self.next_msg_split = self.next_msg.split(',')
if len(self.next_msg_split) >= 6:
self.log.debug('Extracting source address and message type')
self.msg_source_addr = self.next_msg_split[1]
self.msg_type = self.next_msg_split[5]
self.log.debug('Source Address: %s', self.msg_source_addr)
self.log.debug('Message Type: %s', self.msg_type)
# Service Check (heartbeat)
if self.msg_type == self.message_types['heartbeat']:
self.log.debug('Message is a heartbeat')
self.out_msg_list = process_heartbeat_msg(
self.log,
self.ref_num,
self.next_msg,
self.message_types)
# Device scheduled command checks
if self.msg_type == self.message_types['get_device_scheduled_state']:
self.log.debug('Message is a get device scheduled state message')
self.out_msg_list = process_get_device_scheduled_state_msg(
self.log,
self.ref_num,
self.schedule,
self.next_msg,
self.message_types)
# Que up response messages in outgoing msg que
if len(self.out_msg_list) > 0:
self.log.debug('Queueing response message(s)')
for self.out_msg in self.out_msg_list:
self.msg_out_queue.put_nowait(self.out_msg)
self.log.debug('Message [%s] successfully queued', self.out_msg)
# PERIODIC TASKS
# Periodically send heartbeats to other services
if datetime.datetime.now() >= (self.last_check_hb + datetime.timedelta(seconds=120)):
self.destinations = [
(self.service_addresses['automation_addr'],
self.service_addresses['automation_port'])
]
self.out_msg_list = create_heartbeat_msg(
self.log,
self.ref_num,
self.destinations,
self.service_addresses['schedule_addr'],
self.service_addresses['schedule_port'],
self.message_types)
# Que up response messages in outgoing msg que
if len(self.out_msg_list) > 0:
self.log.debug('Queueing response message(s)')
for self.out_msg in self.out_msg_list:
self.msg_out_queue.put_nowait(self.out_msg)
self.log.debug('Response message [%s] successfully queued',
self.out_msg)
# Update last-check
self.last_check_hb = datetime.datetime.now()
# Yield to other tasks for a while
yield from asyncio.sleep(0.25)
|
gpl-3.0
| -8,828,960,547,473,434,000
| 42.814103
| 97
| 0.501244
| false
| 4.353503
| false
| false
| false
|
Ledoux/ShareYourSystem
|
Pythonlogy/ShareYourSystem/Standards/Classors/Switcher/Drafts/__init__ copy.py
|
1
|
8024
|
#<ImportSpecificModules>
import operator
,Doer,Representer
from ShareYourSystem.Functers import Functer,Triggerer,Hooker
BaseModuleStr="ShareYourSystem.Functers.Functer"
DecorationModuleStr="ShareYourSystem.Standards.Classors.Classer")
#</ImportSpecificModules>
#<DefineLocals>
SYS.setSubModule(globals())
SwitchingBeforeStr='Before'
SwitchingAfterStr='After'
SwitchingBindStr='bind'
#</DefineLocals>
#<DefineClass>
@DecorationClass()
class SwitcherClass(BaseClass):
def default_init(self,**_KwargVariablesDict):
#<DefineSpecificDo>
self.SwitchingFunction=None #<NotRepresented>
self.SwitchedFunction=None #<NotRepresented>
self.SwitchedFunctionStr="" #<NotRepresented>
self.SwitchedBoolSuffixStr="" #<NotRepresented>
self.SwitchedClassBoolKeyStr="" #<NotRepresented>
self.SwitchedInstanceBoolKeyStr="" #<NotRepresented>
#</DefineSpecificDo>
#Call the parent init method
BaseClass.__init__(self,**_KwargVariablesDict)
def __call__(self,_Variable):
#Switch
self.switch(_Variable)
#Link
self.FunctedFunction=self.SwitchedFunction
#Call the call of the parent class
return BaseClass.__call__(self,self.SwitchingFunction)
def switch(self,_Variable=None):
#set the switching Function
if self.SwitchingFunction==None:
self.SwitchingFunction=_Variable
#set the SwitchedFunctionStr this is the functing function..and we remove all the tagged Functer@
self.SwitchedFunctionStr=self.SwitchingFunction.__name__.split(Functer.FunctingDecorationStr)[-1]
#debug
self.debug(('self.',self,['SwitchedFunctionStr']))
#Cut the pre attributing part if there is one
if Functer.FunctingAttributeStr in self.SwitchedFunctionStr:
self.SwitchedFunctionStr=self.SwitchedFunctionStr.split(Functer.FunctingAttributeStr)[-1]
#self.SwitchedDoneFunctionStr=Doer.getDoneStrWithDoStr(self.SwitchedFunctionStr)
#SwitchedBoolSuffixStr=self.SwitchedDoneFunctionStr[0].upper()+self.SwitchedDoneFunctionStr[1:]
self.SwitchedBoolSuffixStr=self.SwitchedFunctionStr[0].upper()+self.SwitchedFunctionStr[1:]+'Bool'
self.SwitchedInstanceBoolKeyStr='Switching'+self.SwitchedBoolSuffixStr
#self.SwitchedInstanceBoolKeyStr='SwitchedInstance'+self.SwitchedBoolSuffixStr
self.SwitchedClassBoolKeyStr='SwitchedClass'+self.SwitchedBoolSuffixStr
#debug
self.debug(('self.',self,['SwitchedInstanceBoolKeyStr','SwitchedClassBoolKeyStr']))
#Definition the SwitchedFunction
def SwitchedFunction(*_LiargVariablesList,**_KwargVariablesDict):
#Alias
InstanceVariable=_LiargVariablesList[0]
#Append for debbuging
#if hasattr(InstanceVariable,'DebuggingNotFrameFunctionStrsList'):
# if 'SwitchedFunction' not in InstanceVariable.DebuggingNotFrameFunctionStrsList:
# InstanceVariable.DebuggingNotFrameFunctionStrsList.append('SwitchedFunction')
#debug
'''
self.debug(
[
('self.',self,['SwitchedClassBoolKeyStr','SwitchedInstanceBoolKeyStr']),
Representer.represent(InstanceVariable,**{'RepresentingAlineaIsBool':False})
]
)
'''
#set the SwitchedBool if it was not already
if hasattr(InstanceVariable,self.SwitchedInstanceBoolKeyStr)==False:
#debug
'''
self.debug('The InstanceVariable has not the SwitchedBoolSuffixStr..so set it to False')
'''
#set
InstanceVariable.__setattr__(self.SwitchedInstanceBoolKeyStr,False)
elif getattr(InstanceVariable,self.SwitchedInstanceBoolKeyStr):
#debug
'''
self.debug('The Instance has already done this method')
'''
#Return
return InstanceVariable
#debug
'''
self.debug(('self.',self,['SwitchedBoolSuffixStr']))
'''
#At the level of the class set the new binding set function
if hasattr(InstanceVariable.__class__,self.SwitchedClassBoolKeyStr)==False:
#Definition the binding function that will call the init one
def bindBefore(*_TriggeringVariablesList,**_TriggeringVariablesDict):
#Alias
TriggeredInstanceVariable=_TriggeringVariablesList[0]
#debug
'''
self.debug('Reinit with '+Representer.represent(
TriggeredInstanceVariable.SettingKeyVariable,**{'RepresentingAlineaIsBool':False}
)
)
'''
#Definition the init method to trigger
SwitchedInitMethod=Functer.getFunctingFunctionWithFuncFunction(
TriggeredInstanceVariable.__class__.init
)
#debug
'''
self.debug(
[
'SwitchedInitMethod is '+str(SwitchedInitMethod),
"SwitchedInitMethod.func_globals['__file__'] is "+SwitchedInitMethod.func_globals['__file__']
]
)
'''
#Call the init method (just at the level of this class definition) (so IMPORTANT this is init not __init__)
SwitchedInitMethod(TriggeredInstanceVariable)
#set the name
TriggeredBeforeMethodStr='bindBeforeWith'+self.SwitchedBoolSuffixStr
bindBefore.__name__=TriggeredBeforeMethodStr
#debug
'''
self.debug(
[
("self.",self,['SwitchedDoneFunctionStr','SwitchedBoolSuffixStr']),
("TriggeredMethodStr is "+TriggeredMethodStr)
]
)
'''
#Link the bindBefore function
setattr(
InstanceVariable.__class__,
TriggeredBeforeMethodStr,
Triggerer.TriggererClass(**
{
'TriggeringConditionVariable':[
(
'SettingKeyVariable',
(operator.eq,self.SwitchedInstanceBoolKeyStr)
),
(
self.SwitchedInstanceBoolKeyStr,
(operator.eq,True)
),
('SettingValueVariable',(operator.eq,False))
],
'TriggeringHookStr':"Before"
}
)(bindBefore)
)
#Call with a default instance this bind function to be installed
getattr(InstanceVariable.__class__(),TriggeredBeforeMethodStr)()
'''
#Definition the binding function that will set the switched bool to True
def bindAfter(*_TriggeringVariablesList,**_TriggeringVariablesDict):
#Alias
TriggeredInstanceVariable=_TriggeringVariablesList[0]
#Say that it is ok
setattr(TriggeredInstanceVariable,self.SwitchedInstanceBoolKeyStr,False)
setattr(TriggeredInstanceVariable,self.SwitchedInstanceBoolKeyStr,True)
#set the name
TriggeredAfterMethodStr='bindAfterWith'+self.SwitchedBoolSuffixStr
bindAfter.__name__=TriggeredAfterMethodStr
#Link the bindAfter function
setattr(
InstanceVariable.__class__,
TriggeredAfterMethodStr,
Triggerer.TriggererClass(**
{
'TriggeringConditionVariable':[
(
'SettingKeyVariable',
(operator.eq,self.SwitchedInstanceBoolKeyStr)
),
(
self.SwitchedInstanceBoolKeyStr,
(operator.eq,True)
),
('SettingValueVariable',(operator.eq,False))
],
'TriggeringHookStr':"After"
}
)(bindAfter)
)
#Call with a default instance this bind function to be installed
getattr(InstanceVariable.__class__(),TriggeredAfterMethodStr)()
'''
#Say that it is ok
setattr(InstanceVariable.__class__,self.SwitchedClassBoolKeyStr,True)
#debug
'''
self.debug(
[
#('InstanceVariable is '+SYS._str(InstanceVariable)),
('_LiargVariablesList is '+str(_LiargVariablesList))
]
)
'''
#Call the SwitchingFunction
self.SwitchingFunction(*_LiargVariablesList,**_KwargVariablesDict)
#debug
'''
self.debug(('self.',self,['SwitchedBoolSuffixStr']))
'''
#set True for the Bool after the call
InstanceVariable.__setattr__(self.SwitchedInstanceBoolKeyStr,True)
#debug
'''
self.debug(('InstanceVariable.',InstanceVariable,[self.SwitchedBoolSuffixStr]))
'''
#Return self for the wrapped method call
return InstanceVariable
#set
self.SwitchedFunction=SwitchedFunction
#Return self
return self
#</DefineClass>
|
mit
| 331,411,898,130,539,900
| 28.284672
| 112
| 0.692672
| false
| 3.575758
| false
| false
| false
|
bokeh-cookbook/bokeh-cookbook
|
plugins/ipynb/markup.py
|
1
|
5935
|
from __future__ import absolute_import, print_function, division
import os
import json
try:
# Py3k
from html.parser import HTMLParser
except ImportError:
# Py2.7
from HTMLParser import HTMLParser
from pelican import signals
from pelican.readers import MarkdownReader, HTMLReader, BaseReader
from .ipynb import get_html_from_filepath, fix_css
def register():
"""
Register the new "ipynb" reader
"""
def add_reader(arg):
arg.settings["READERS"]["ipynb"] = IPythonNB
signals.initialized.connect(add_reader)
class IPythonNB(BaseReader):
"""
Extend the Pelican.BaseReader to `.ipynb` files can be recognized
as a markup language:
Setup:
`pelicanconf.py`:
```
MARKUP = ('md', 'ipynb')
```
"""
enabled = True
file_extensions = ['ipynb']
def read(self, filepath):
metadata = {}
metadata['ipython'] = True
# Files
filedir = os.path.dirname(filepath)
filename = os.path.basename(filepath)
metadata_filename = filename.split('.')[0] + '.ipynb-meta'
metadata_filepath = os.path.join(filedir, metadata_filename)
if os.path.exists(metadata_filepath):
# Metadata is on a external file, process using Pelican MD Reader
md_reader = MarkdownReader(self.settings)
_content, metadata = md_reader.read(metadata_filepath)
else:
# Load metadata from ipython notebook file
ipynb_file = open(filepath)
notebook_metadata = json.load(ipynb_file)['metadata']
# Change to standard pelican metadata
for key, value in notebook_metadata.items():
key = key.lower()
if key in ("title", "date", "category", "tags", "slug", "author"):
metadata[key] = self.process_metadata(key, value)
keys = [k.lower() for k in metadata.keys()]
if not set(['title', 'date']).issubset(set(keys)):
# Probably using ipynb.liquid mode
md_filename = filename.split('.')[0] + '.md'
md_filepath = os.path.join(filedir, md_filename)
if not os.path.exists(md_filepath):
raise Exception("Could not find metadata in `.ipynb-meta`, inside `.ipynb` or external `.md` file.")
else:
raise Exception("""Could not find metadata in `.ipynb-meta` or inside `.ipynb` but found `.md` file,
assuming that this notebook is for liquid tag usage if true ignore this error""")
content, info = get_html_from_filepath(filepath)
# Generate Summary: Do it before cleaning CSS
if 'summary' not in [key.lower() for key in self.settings.keys()]:
parser = MyHTMLParser(self.settings, filename)
if hasattr(content, 'decode'): # PY2
content = '<body>%s</body>' % content.encode('utf-8')
content = content.decode("utf-8")
else:
content = '<body>%s</body>' % content
parser.feed(content)
parser.close()
content = parser.body
if ('IPYNB_USE_META_SUMMARY' in self.settings.keys() and self.settings['IPYNB_USE_META_SUMMARY'] is False) or 'IPYNB_USE_META_SUMMARY' not in self.settings.keys():
metadata['summary'] = parser.summary
content = fix_css(content, info)
return content, metadata
class MyHTMLParser(HTMLReader._HTMLParser):
"""
Custom Pelican `HTMLReader._HTMLParser` to create the summary of the content
based on settings['SUMMARY_MAX_LENGTH'].
Summary is stoped if founds any div containing ipython notebook code cells.
This is needed in order to generate valid HTML for the summary,
a simple string split will break the html generating errors on the theme.
The downside is that the summary length is not exactly the specified, it stops at
completed div/p/li/etc tags.
"""
def __init__(self, settings, filename):
HTMLReader._HTMLParser.__init__(self, settings, filename)
self.settings = settings
self.filename = filename
self.wordcount = 0
self.summary = None
self.stop_tags = [('div', ('class', 'input')), ('div', ('class', 'output')), ('h2', ('id', 'Header-2'))]
if 'IPYNB_STOP_SUMMARY_TAGS' in self.settings.keys():
self.stop_tags = self.settings['IPYNB_STOP_SUMMARY_TAGS']
if 'IPYNB_EXTEND_STOP_SUMMARY_TAGS' in self.settings.keys():
self.stop_tags.extend(self.settings['IPYNB_EXTEND_STOP_SUMMARY_TAGS'])
def handle_starttag(self, tag, attrs):
HTMLReader._HTMLParser.handle_starttag(self, tag, attrs)
if self.wordcount < self.settings['SUMMARY_MAX_LENGTH']:
mask = [stoptag[0] == tag and (stoptag[1] is None or stoptag[1] in attrs) for stoptag in self.stop_tags]
if any(mask):
self.summary = self._data_buffer
self.wordcount = self.settings['SUMMARY_MAX_LENGTH']
def handle_endtag(self, tag):
HTMLReader._HTMLParser.handle_endtag(self, tag)
if self.wordcount < self.settings['SUMMARY_MAX_LENGTH']:
self.wordcount = len(strip_tags(self._data_buffer).split(' '))
if self.wordcount >= self.settings['SUMMARY_MAX_LENGTH']:
self.summary = self._data_buffer
def strip_tags(html):
"""
Strip html tags from html content (str)
Useful for summary creation
"""
s = HTMLTagStripper()
s.feed(html)
return s.get_data()
class HTMLTagStripper(HTMLParser):
"""
Custom HTML Parser to strip HTML tags
Useful for summary creation
"""
def __init__(self):
HTMLParser.__init__(self)
self.reset()
self.fed = []
def handle_data(self, html):
self.fed.append(html)
def get_data(self):
return ''.join(self.fed)
|
agpl-3.0
| -8,813,531,780,843,853,000
| 34.969697
| 175
| 0.609436
| false
| 3.975218
| false
| false
| false
|
mtlynch/ndt-e2e-clientworker
|
client_wrapper/install_selenium_extensions.py
|
1
|
3193
|
import argparse
import os
import platform
import urllib
import tempfile
import names
driver_urls = {
'chrome_os_x': {
'url':
'http://chromedriver.storage.googleapis.com/2.21/chromedriver_mac32.zip',
'file_name': 'chromedriver_mac32.zip'
},
'chrome_ubuntu': {
'url':
'http://chromedriver.storage.googleapis.com/2.21/chromedriver_linux64.zip',
'file_name': 'chromedriver_linux64.zip'
},
'chrome_windows_10': {
'url':
'http://chromedriver.storage.googleapis.com/2.21/chromedriver_win32.zip',
'file_name': 'chromedriver_win32.zip'
},
'edge_windows_10': {
'url':
'https://download.microsoft.com/download/8/D/0/8D0D08CF-790D-4586-B726-C6469A9ED49C/MicrosoftWebDriver.msi',
'file_name': 'MicrosoftWebDriver.msi'
},
'safari_os_x': {
'url':
'http://selenium-release.storage.googleapis.com/2.48/SafariDriver.safariextz',
'file_name': 'SafariDriver.safariextz',
}
}
def _download_chrome_drivers():
"""Downloads Chrome drivers for Selenium."""
# Mac OS X
if platform.system() == 'Darwin':
remote_file = driver_urls['chrome_os_x']
elif platform.system() == 'Linux':
remote_file = driver_urls['chrome_ubuntu']
elif platform.system() == 'Windows':
remote_file = driver_urls['chrome_windows_10']
else:
raise ValueError('Unsupported OS specified: %s' % (platform.system()))
_download_temp_file(remote_file['url'], remote_file['file_name'])
def _download_temp_file(url, file_name):
"""Downloads file into temp directory.
Args:
url: A string representing the URL the file is to be downloaded from.
file_name: A string representing the name of the file to be downloaded.
"""
temp_dir = tempfile.mkdtemp()
download_path = os.path.join(temp_dir, file_name)
print('File downloading to %s' % download_path)
urllib.URLopener().retrieve(url, download_path)
def _download_edge_drivers():
"""Downloads Edge drivers for Selenium."""
remote_file = driver_urls['edge_windows_10']
_download_temp_file(remote_file['url'], remote_file['file_name'])
def _download_safari_drivers():
"""Downloads Safari drivers for Selenium."""
remote_file = driver_urls['safari_os_x']
_download_temp_file(remote_file['url'], remote_file['file_name'])
def main(args):
if args.browser == names.CHROME:
_download_chrome_drivers()
elif args.browser == names.EDGE:
_download_edge_drivers()
elif args.browser == names.SAFARI:
_download_safari_drivers()
elif args.browser == names.FIREFOX:
pass
else:
raise ValueError('Unsupported browser specified: %s' % (args.browser))
if __name__ == '__main__':
parser = argparse.ArgumentParser(
prog='NDT E2E Testing Client Selenium Extension Installer',
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument('--browser',
help='Browser to run under (for browser-based client)',
choices=('chrome', 'firefox', 'safari', 'edge'))
main(parser.parse_args())
|
apache-2.0
| 7,890,214,524,247,196,000
| 31.917526
| 116
| 0.634826
| false
| 3.524283
| false
| false
| false
|
cwacek/python-jsonschema-objects
|
test/test_regression_126.py
|
1
|
1829
|
import pytest
import python_jsonschema_objects as pjs
import collections
@pytest.fixture
def schema():
return {
"$schema": "http://json-schema.org/draft-04/schema#",
"title": "Test",
"definitions": {
"MyEnum1": {"type": "string", "enum": ["E_A", "E_B"]},
"MyEnum2": {"type": "string", "enum": ["F_A", "F_B", "F_C", "F_D"]},
"MyInt": {
"default": "0",
"type": "integer",
"minimum": 0,
"maximum": 4294967295,
},
"MyObj1": {
"type": "object",
"properties": {
"e1": {"$ref": "#/definitions/MyEnum1"},
"e2": {"$ref": "#/definitions/MyEnum2"},
"i1": {"$ref": "#/definitions/MyInt"},
},
"required": ["e1", "e2", "i1"],
},
"MyArray": {
"type": "array",
"items": {"$ref": "#/definitions/MyObj1"},
"minItems": 0,
"uniqueItems": True,
},
"MyMsg1": {
"type": "object",
"properties": {"a1": {"$ref": "#/definitions/MyArray"}},
},
"MyMsg2": {"type": "object", "properties": {"s1": {"type": "string"}}},
},
"type": "object",
"oneOf": [{"$ref": "#/definitions/MyMsg1"}, {"$ref": "#/definitions/MyMsg2"}],
}
def test_regression_126(schema):
builder = pjs.ObjectBuilder(schema)
ns = builder.build_classes(standardize_names=False)
Obj1 = ns.MyObj1
Array1 = ns.MyArray
Msg1 = ns.MyMsg1
o1 = Obj1(e1="E_A", e2="F_C", i1=2600)
o2 = Obj1(e1="E_B", e2="F_D", i1=2500)
objs = Array1([o1, o2])
msg = Msg1(a1=objs)
print(msg.serialize())
|
mit
| -1,888,342,341,075,785,700
| 30.534483
| 86
| 0.42865
| false
| 3.362132
| false
| false
| false
|
Krakn/learning
|
src/python/advent_of_code/2017/05/a_maze_of_twisty_trampolines_all_alike.py
|
1
|
3322
|
#!/usr/bin/env python3
'''
--- Day 5: A Maze of Twisty Trampolines, All Alike ---
'''
def load_input(filename):
'''
Parse input file, returning an array of maze offsets.
'''
maze = list()
with open(filename, 'r') as file_input:
for line in file_input.readlines():
maze.append(int(line.strip()))
return maze
def part1(maze):
'''
--- Part 1 ---
An urgent interrupt arrives from the CPU: it's trapped in a maze of jump
instructions, and it would like assistance from any programs with spare
cycles to help find the exit.
The message includes a list of the offsets for each jump. Jumps are
relative: -1 moves to the previous instruction, and 2 skips the next one.
Start at the first instruction in the list. The goal is to follow the jumps
until one leads outside the list.
In addition, these instructions are a little strange; after each jump, the
offset of that instruction increases by 1. So, if you come across an offset
of 3, you would move three instructions forward, but change it to a 4 for
the next time it is encountered.
For example, consider the following list of jump offsets:
0 3 0 1 -3 Positive jumps ("forward") move downward; negative jumps move
upward. For legibility in this example, these offset values
will be written all on one line, with the current instruction
marked in parentheses. The following steps would be taken
before an exit is found:
(0) 3 0 1 -3 - Before we have taken any steps.
(1) 3 0 1 -3 - Jump with offset 0 (that is, don't jump at all).
Fortunately, the instruction is then incremented
to 1.
2 (3) 0 1 -3 - Step forward because of the instruction we just modified.
The first instruction is incremented again, now to 2.
2 4 0 1 (-3) - Jump all the way to the end; leave a 4 behind.
2 (4) 0 1 -2 - Go back to where we just were; increment -3 to -2.
2 5 0 1 -2 - Jump 4 steps forward, escaping the maze. In this
example, the exit is reached in 5 steps.
How many steps does it take to reach the exit?
'''
index = 0
steps = 0
while index >= 0 and index < len(maze):
maze[index] += 1
index = index + maze[index] - 1
steps += 1
return steps
def part2(maze):
'''
--- Part Two ---
Now, the jumps are even stranger: after each jump, if the offset was three
or more, instead decrease it by 1. Otherwise, increase it by 1 as before.
Using this rule with the above example, the process now takes 10 steps,
and the offset values after finding the exit are left as 2 3 2 3 -1.
How many steps does it now take to reach the exit?
'''
index = 0
steps = 0
while index >= 0 and index < len(maze):
if maze[index] >= 3:
maze[index] -= 1
index = index + maze[index] + 1
else:
maze[index] += 1
index = index + maze[index] - 1
steps += 1
return steps
if __name__ == "__main__":
MAZE1 = load_input('input.txt')
MAZE2 = load_input('input.txt')
print("Part 1:", part1(MAZE1))
print("Part 2:", part2(MAZE2))
|
isc
| -1,629,784,482,675,308,500
| 33.604167
| 79
| 0.609573
| false
| 3.805269
| false
| false
| false
|
winterbird-code/adbb
|
adbb/__init__.py
|
1
|
2124
|
#!/usr/bin/env python
#
# This file is part of adbb.
#
# adbb is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# adbb is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with adbb. If not, see <http://www.gnu.org/licenses/>.
import multiprocessing
import logging
import logging.handlers
import sys
import adbb.db
from adbb.link import AniDBLink
from adbb.animeobjs import Anime, AnimeTitle, Episode, File
from adbb.anames import get_titles
anidb_client_name = "adbb"
anidb_client_version = 2
anidb_api_version = 3
log = None
_anidb = None
_sessionmaker = None
def init(
anidb_user,
anidb_pwd,
sql_db_url,
debug=False,
loglevel='info',
logger=None,
outgoing_udp_port=9876):
if logger is None:
logger = logging.getLogger(__name__)
logger.setLevel(loglevel.upper())
if debug:
logger.setLevel(logging.DEBUG)
lh = logging.StreamHandler()
lh.setFormatter(logging.Formatter(
'%(asctime)s %(levelname)s %(filename)s:%(lineno)d - %(message)s'))
logger.addHandler(lh)
lh = logging.handlers.SysLogHandler(address='/dev/log')
lh.setFormatter(logging.Formatter(
'adbb %(filename)s/%(funcName)s:%(lineno)d - %(message)s'))
logger.addHandler(lh)
global log, _anidb, _sessionmaker
log = logger
_sessionmaker = adbb.db.init_db(sql_db_url)
_anidb = adbb.link.AniDBLink(
anidb_user,
anidb_pwd,
myport=outgoing_udp_port)
def get_session():
return _sessionmaker()
def close_session(session):
session.close()
def close():
global _anidb
_anidb.stop()
|
gpl-3.0
| -7,247,581,232,844,061,000
| 25.222222
| 83
| 0.663842
| false
| 3.50495
| false
| false
| false
|
jcfr/mystic
|
examples/TEST_ffitPP2_b.py
|
1
|
1429
|
#!/usr/bin/env python
#
# Author: Mike McKerns (mmckerns @caltech and @uqfoundation)
# Copyright (c) 1997-2015 California Institute of Technology.
# License: 3-clause BSD. The full license text is available at:
# - http://trac.mystic.cacr.caltech.edu/project/mystic/browser/mystic/LICENSE
"""
Testing the polynomial fitting problem of [1] using scipy's Nelder-Mead algorithm.
Reference:
[1] Storn, R. and Price, K. Differential Evolution - A Simple and Efficient
Heuristic for Global Optimization over Continuous Spaces. Journal of Global
Optimization 11: 341-359, 1997.
"""
from test_ffit import Chebyshev8, plot_solution, print_solution
from TEST_ffitPP_b import ChebyshevCost
if __name__ == '__main__':
import random
from mystic.solvers import fmin
#from mystic._scipyoptimize import fmin
from mystic.tools import random_seed
random_seed(123)
import pp
import sys
if len(sys.argv) > 1:
tunnelport = sys.argv[1]
ppservers = ("localhost:%s" % tunnelport,)
else:
ppservers = ()
myserver = pp.Server(ppservers=ppservers)
trials = []
for trial in range(8):
x = tuple([random.uniform(-100,100) + Chebyshev8[i] for i in range(9)])
trials.append(x)
results = [myserver.submit(fmin,(ChebyshevCost,x),(),()) for x in trials]
for solution in results:
print_solution(solution())
#plot_solution(solution)
# end of file
|
bsd-3-clause
| 4,351,184,427,107,404,300
| 27.58
| 82
| 0.687194
| false
| 3.277523
| false
| false
| false
|
Aloomaio/googleads-python-lib
|
examples/ad_manager/v201805/creative_service/create_creative_from_template.py
|
1
|
3666
|
#!/usr/bin/env python
#
# Copyright 2015 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This code example creates a new template creative for a given advertiser.
To determine which companies are advertisers, run get_advertisers.py.
To determine which creative templates exist, run
get_all_creative_templates.py.
The LoadFromStorage method is pulling credentials and properties from a
"googleads.yaml" file. By default, it looks for this file in your home
directory. For more information, see the "Caching authentication information"
section of our README.
"""
import os
import uuid
# Import appropriate modules from the client library.
from googleads import ad_manager
# Set id of the advertiser (company) that the creative will be assigned to.
ADVERTISER_ID = 'INSERT_ADVERTISER_COMPANY_ID_HERE'
def main(client, advertiser_id):
# Initialize appropriate service.
creative_service = client.GetService('CreativeService', version='v201805')
# Use the image banner with optional third party tracking template.
creative_template_id = '10000680'
# Create image asset.
file_name = 'image%s.jpg' % uuid.uuid4()
image_data = open(os.path.join(os.path.split(__file__)[0], '..', '..', 'data',
'medium_rectangle.jpg'), 'r').read()
size = {
'width': '300',
'height': '250'
}
asset = {
'xsi_type': 'CreativeAsset',
'fileName': file_name,
'assetByteArray': image_data,
'size': size
}
# Create creative from templates.
creative = {
'xsi_type': 'TemplateCreative',
'name': 'Template Creative #%s' % uuid.uuid4(),
'advertiserId': advertiser_id,
'size': size,
'creativeTemplateId': creative_template_id,
'creativeTemplateVariableValues': [
{
'xsi_type': 'AssetCreativeTemplateVariableValue',
'uniqueName': 'Imagefile',
'asset': asset
},
{
'xsi_type': 'LongCreativeTemplateVariableValue',
'uniqueName': 'Imagewidth',
'value': '300'
},
{
'xsi_type': 'LongCreativeTemplateVariableValue',
'uniqueName': 'Imageheight',
'value': '250'
},
{
'xsi_type': 'UrlCreativeTemplateVariableValue',
'uniqueName': 'ClickthroughURL',
'value': 'www.google.com'
},
{
'xsi_type': 'StringCreativeTemplateVariableValue',
'uniqueName': 'Targetwindow',
'value': '_blank'
}
]
}
# Call service to create the creative.
creative = creative_service.createCreatives([creative])[0]
# Display results.
print ('Template creative with id "%s", name "%s", and type "%s" was '
'created and can be previewed at %s.'
% (creative['id'], creative['name'],
ad_manager.AdManagerClassType(creative), creative['previewUrl']))
if __name__ == '__main__':
# Initialize client object.
ad_manager_client = ad_manager.AdManagerClient.LoadFromStorage()
main(ad_manager_client, ADVERTISER_ID)
|
apache-2.0
| 1,060,630,061,611,670,300
| 32.027027
| 80
| 0.638298
| false
| 3.887593
| false
| false
| false
|
eduble/panteda
|
operators/map/heatmap.py
|
1
|
4193
|
#!/usr/bin/env python3
import numpy as np
import numpy.random
from time import time
# web mercator projection functions
# ---------------------------------
def linear_lat(lat, atanh = np.arctanh, sin = np.sin, radians = np.radians):
return atanh(sin(radians(lat)))
def inv_linear_lat(ll, asin = np.arcsin, tanh = np.tanh, degrees = np.degrees):
return degrees(asin(tanh(ll)))
def lng_to_x(w, lng_min, lng_max, lng):
return (lng - lng_min) * (w / (lng_max - lng_min))
def lat_to_y(h, lat_min, lat_max, lat):
return (linear_lat(lat) - linear_lat(lat_min)) * (h / (linear_lat(lat_max) - linear_lat(lat_min)))
def x_to_lng(w, lng_min, lng_max, x):
return x * ((lng_max - lng_min)/w) + lng_min
def y_to_lat(h, lat_min, lat_max, y):
return inv_linear_lat(y * ((linear_lat(lat_max) - linear_lat(lat_min))/h) + linear_lat(lat_min))
# heatmap data generation
# -----------------------
class HeatMap:
def __init__(self, lnglat, width, height, westlng, eastlng, southlat, northlat):
# compute pixel bounds of the map
x = np.append(np.arange(0, width, 5), width)
y = np.append(np.arange(0, height, 5), height)
# project pixel bounds coordinates (x, y -> lng, lat)
edgelng = x_to_lng(width, westlng, eastlng, x)
centerlng = x_to_lng(width, westlng, eastlng, (x[1:] + x[:-1])/2)
edgelat = y_to_lat(height, southlat, northlat, y)
centerlat = y_to_lat(height, southlat, northlat, (y[1:] + y[:-1])/2)
# prepare computation parameters
self.bins = edgelng, edgelat
self.range = (westlng, eastlng), (southlat, northlat)
self.iterator = lnglat.chunks()
self.heatmap = None
# prepare compression parameters
scalelat = (edgelat[1:] - edgelat[:-1]).min() / 2
self.approx_centerlat = numpy.rint((centerlat - centerlat[0]) / scalelat)
scalelng = edgelng[1] - edgelng[0] # longitude is linear
self.approx_centerlng = numpy.rint((centerlng - centerlng[0]) / scalelng)
self.scales = dict(lat=scalelat, lng=scalelng)
self.offsets = dict(lat=centerlat[0], lng=centerlng[0])
# stream status parameters
self.done = False
def compute(self, time_credit):
# make histogram:
# - create a pixel grid
# - given a tuple (lng, lat) increment the corresponding pixel
deadline = time() + time_credit
deadline_reached = False
for chunk in self.iterator:
lng, lat = chunk.columns
chunk_heatmap = np.histogram2d(lng, lat, bins=self.bins, range=self.range)[0]
if self.heatmap is None:
self.heatmap = chunk_heatmap.T
else:
self.heatmap += chunk_heatmap.T
if time() > deadline:
deadline_reached = True
break
if not deadline_reached:
# we left the loop because of the end of iteration
self.done = True
# get sparse matrix representation: (lat, lng, intensity) tuples.
# in order to lower network usage, we will transfer this data in a
# compressed form: lng & lat values will be transfered as integers
# together with a scaling factor and an offset to be applied.
def compressed_form(self):
# count number of points
count = int(self.heatmap.sum())
if count == 0:
# if no points, return empty data
data = dict(lat = [], lng = [], val = [])
else:
# apply threshold and
# compute approximated sparse matrix data
nonzero_xy = ((self.heatmap / self.heatmap.max()) > 0.05).nonzero()
nonzero_x = nonzero_xy[1]
nonzero_y = nonzero_xy[0]
data = dict(
lat = self.approx_centerlat[nonzero_y].astype(int).tolist(),
lng = self.approx_centerlng[nonzero_x].astype(int).tolist(),
val = self.heatmap[nonzero_xy].astype(int).tolist()
)
return dict(
data = data,
scales = self.scales,
offsets = self.offsets,
count = count,
done = self.done
)
|
gpl-3.0
| -2,101,006,383,756,166,100
| 41.785714
| 102
| 0.577152
| false
| 3.494167
| false
| false
| false
|
mohitreddy1996/Gender-Detection-from-Signature
|
src/train_test/random_forests.py
|
1
|
1140
|
from sklearn.metrics import precision_recall_fscore_support
import pandas as pd
import numpy as np
from sklearn.ensemble import RandomForestClassifier
from sklearn.preprocessing import MinMaxScaler, normalize
df = pd.read_csv('../../Dataset/dataset.csv', delimiter='\t')
dataset = df.values
mask = np.random.rand(len(df)) < .80
train = df[mask]
test = df[~mask]
X = pd.DataFrame()
Y = pd.DataFrame()
X = train.ix[:, 2:len(train.columns) - 1]
Y = train.ix[:, len(train.columns) - 1: len(train.columns)]
X_Test = pd.DataFrame()
Y_Test = pd.DataFrame()
# After Normalising
X_standard = normalize(X)
print X_standard.shape
X_Test = test.ix[:, 2:len(test.columns) - 1]
Y_Test = test.ix[:, len(test.columns) - 1: len(test.columns)]
X_Test_standard = normalize(X_Test)
print X_Test_standard.shape
print "Training Data Set Size : ", str(len(X))
print "Testing Data Set Size : ", str(len(X_Test))
# tune parameters here.
rf = RandomForestClassifier(n_estimators=150, max_features=20)
rf.fit(X_standard, Y)
# predict
Y_Result = rf.predict(X_Test_standard)
print precision_recall_fscore_support(Y_Test, Y_Result, average='micro')
|
mit
| 5,244,072,690,348,756,000
| 20.923077
| 72
| 0.711404
| false
| 2.961039
| true
| true
| false
|
aquaya/ivrhub
|
ivrhub/models.py
|
1
|
3129
|
''' mongoengine models
'''
from mongoengine import *
class User(Document):
''' some are admins some are not
'''
admin_rights = BooleanField(required=True)
api_id = StringField()
api_key = StringField()
email = EmailField(required=True, unique=True, max_length=254)
email_confirmation_code = StringField(required=True)
email_confirmed = BooleanField(required=True)
forgot_password_code = StringField()
last_login_time = DateTimeField(required=True)
name = StringField()
organizations = ListField(ReferenceField('Organization'))
password_hash = StringField(required=True)
registration_time = DateTimeField(required=True)
verified = BooleanField(required=True)
class Organization(Document):
''' people join orgs
'''
description = StringField(default='')
# url-safe version of the name
label = StringField(unique=True, required=True)
location = StringField(default='')
name = StringField(unique=True, required=True)
class Form(Document):
''' the heart of the system
'''
# unique code for requesting this form via sms or a call
calling_code = StringField()
creation_time = DateTimeField()
creator = ReferenceField(User)
description = StringField(default = '')
# url-safe version of the name
label = StringField(unique_with='organization')
language = StringField(default = '')
name = StringField(unique_with='organization')
organization = ReferenceField(Organization)
# have to store questions here as well so we know the order
questions = ListField(ReferenceField('Question'))
class Question(Document):
''' connected to forms
'''
audio_filename = StringField()
audio_url = StringField()
creation_time = DateTimeField()
description = StringField()
form = ReferenceField(Form)
# url-safe version of the name
label = StringField(unique_with='form')
name = StringField(unique_with='form')
# 'text_prompt', 'audio_file' or 'audio_url'
prompt_type = StringField(default='text_prompt')
# 'keypad' or 'voice' or 'no response'
response_type = StringField(default='keypad')
s3_key = StringField()
s3_url = StringField()
text_prompt = StringField()
text_prompt_language = StringField(default='en')
class Response(Document):
''' individual response to a form
'''
call_sid = StringField()
completion_time = DateTimeField()
form = ReferenceField(Form)
# whether this was a 'call' or 'ringback' or 'scheduled call'
initiated_using = StringField()
initiation_time = DateTimeField()
# track the progress of the response
last_question_asked = ReferenceField(Question)
# any notes about the response as a whole
notes = StringField()
respondent_phone_number = StringField()
class Answer(Document):
''' connected to questions and responses
'''
audio_url = StringField()
keypad_input = StringField()
# any notes on this answer (like a transcription)
notes = StringField()
question = ReferenceField(Question)
response = ReferenceField(Response)
|
mit
| 7,868,206,963,524,489,000
| 31.59375
| 66
| 0.686801
| false
| 4.298077
| false
| false
| false
|
tzangms/PyConTW
|
pycon_project/biblion/views.py
|
1
|
3501
|
from datetime import datetime
from django.core.urlresolvers import reverse
from django.http import HttpResponse, Http404
from django.shortcuts import render_to_response, get_object_or_404
from django.template import RequestContext
from django.template.loader import render_to_string
from django.utils import simplejson as json
from django.contrib.sites.models import Site
from biblion.exceptions import InvalidSection
from biblion.models import Post, FeedHit
from biblion.settings import ALL_SECTION_NAME
def blog_index(request):
posts = Post.objects.current()
posts = posts.filter(language=request.LANGUAGE_CODE)
return render_to_response("biblion/blog_list.html", {
"posts": posts,
}, context_instance=RequestContext(request))
def blog_section_list(request, section):
try:
posts = Post.objects.section(section)
except InvalidSection:
raise Http404()
return render_to_response("biblion/blog_section_list.html", {
"section_slug": section,
"section_name": dict(Post.SECTION_CHOICES)[Post.section_idx(section)],
"posts": posts,
}, context_instance=RequestContext(request))
def blog_post_detail(request, **kwargs):
if "post_pk" in kwargs:
if request.user.is_authenticated() and request.user.is_staff:
queryset = Post.objects.all()
post = get_object_or_404(queryset, pk=kwargs["post_pk"])
else:
raise Http404()
else:
queryset = Post.objects.current()
queryset = queryset.filter(
published__year = int(kwargs["year"]),
published__month = int(kwargs["month"]),
published__day = int(kwargs["day"]),
)
post = get_object_or_404(queryset, slug=kwargs["slug"])
post.inc_views()
return render_to_response("biblion/blog_post.html", {
"post": post,
}, context_instance=RequestContext(request))
def serialize_request(request):
data = {
"path": request.path,
"META": {
"QUERY_STRING": request.META.get("QUERY_STRING"),
"REMOTE_ADDR": request.META.get("REMOTE_ADDR"),
}
}
for key in request.META:
if key.startswith("HTTP"):
data["META"][key] = request.META[key]
return json.dumps(data)
def blog_feed(request, section=None):
try:
posts = Post.objects.section(section)
except InvalidSection:
raise Http404()
if section is None:
section = ALL_SECTION_NAME
current_site = Site.objects.get_current()
feed_title = "%s Blog: %s" % (current_site.name, section[0].upper() + section[1:])
blog_url = "http://%s%s" % (current_site.domain, reverse("blog"))
url_name, kwargs = "blog_feed", {"section": section}
feed_url = "http://%s%s" % (current_site.domain, reverse(url_name, kwargs=kwargs))
if posts:
feed_updated = posts[0].published
else:
feed_updated = datetime(2009, 8, 1, 0, 0, 0)
# create a feed hit
hit = FeedHit()
hit.request_data = serialize_request(request)
hit.save()
atom = render_to_string("biblion/atom_feed.xml", {
"feed_id": feed_url,
"feed_title": feed_title,
"blog_url": blog_url,
"feed_url": feed_url,
"feed_updated": feed_updated,
"entries": posts,
"current_site": current_site,
})
return HttpResponse(atom, mimetype="application/atom+xml")
|
bsd-3-clause
| -8,178,591,767,137,141,000
| 29.181034
| 86
| 0.623536
| false
| 3.822052
| false
| false
| false
|
jseabold/statsmodels
|
statsmodels/sandbox/distributions/sppatch.py
|
5
|
24020
|
'''patching scipy to fit distributions and expect method
This adds new methods to estimate continuous distribution parameters with some
fixed/frozen parameters. It also contains functions that calculate the expected
value of a function for any continuous or discrete distribution
It temporarily also contains Bootstrap and Monte Carlo function for testing the
distribution fit, but these are neither general nor verified.
Author: josef-pktd
License: Simplified BSD
'''
from statsmodels.compat.python import lmap
import numpy as np
from scipy import stats, optimize, integrate
########## patching scipy
#vonmises does not define finite bounds, because it is intended for circular
#support which does not define a proper pdf on the real line
stats.distributions.vonmises.a = -np.pi
stats.distributions.vonmises.b = np.pi
#the next 3 functions are for fit with some fixed parameters
#As they are written, they do not work as functions, only as methods
def _fitstart(self, x):
'''example method, method of moment estimator as starting values
Parameters
----------
x : ndarray
data for which the parameters are estimated
Returns
-------
est : tuple
preliminary estimates used as starting value for fitting, not
necessarily a consistent estimator
Notes
-----
This needs to be written and attached to each individual distribution
This example was written for the gamma distribution, but not verified
with literature
'''
loc = np.min([x.min(),0])
a = 4/stats.skew(x)**2
scale = np.std(x) / np.sqrt(a)
return (a, loc, scale)
def _fitstart_beta(self, x, fixed=None):
'''method of moment estimator as starting values for beta distribution
Parameters
----------
x : ndarray
data for which the parameters are estimated
fixed : None or array_like
sequence of numbers and np.nan to indicate fixed parameters and parameters
to estimate
Returns
-------
est : tuple
preliminary estimates used as starting value for fitting, not
necessarily a consistent estimator
Notes
-----
This needs to be written and attached to each individual distribution
References
----------
for method of moment estimator for known loc and scale
https://en.wikipedia.org/wiki/Beta_distribution#Parameter_estimation
http://www.itl.nist.gov/div898/handbook/eda/section3/eda366h.htm
NIST reference also includes reference to MLE in
Johnson, Kotz, and Balakrishan, Volume II, pages 221-235
'''
#todo: separate out this part to be used for other compact support distributions
# e.g. rdist, vonmises, and truncnorm
# but this might not work because it might still be distribution specific
a, b = x.min(), x.max()
eps = (a-b)*0.01
if fixed is None:
#this part not checked with books
loc = a - eps
scale = (a - b) * (1 + 2*eps)
else:
if np.isnan(fixed[-2]):
#estimate loc
loc = a - eps
else:
loc = fixed[-2]
if np.isnan(fixed[-1]):
#estimate scale
scale = (b + eps) - loc
else:
scale = fixed[-1]
#method of moment for known loc scale:
scale = float(scale)
xtrans = (x - loc)/scale
xm = xtrans.mean()
xv = xtrans.var()
tmp = (xm*(1-xm)/xv - 1)
p = xm * tmp
q = (1 - xm) * tmp
return (p, q, loc, scale) #check return type and should fixed be returned ?
def _fitstart_poisson(self, x, fixed=None):
'''maximum likelihood estimator as starting values for Poisson distribution
Parameters
----------
x : ndarray
data for which the parameters are estimated
fixed : None or array_like
sequence of numbers and np.nan to indicate fixed parameters and parameters
to estimate
Returns
-------
est : tuple
preliminary estimates used as starting value for fitting, not
necessarily a consistent estimator
Notes
-----
This needs to be written and attached to each individual distribution
References
----------
MLE :
https://en.wikipedia.org/wiki/Poisson_distribution#Maximum_likelihood
'''
#todo: separate out this part to be used for other compact support distributions
# e.g. rdist, vonmises, and truncnorm
# but this might not work because it might still be distribution specific
a = x.min()
eps = 0 # is this robust ?
if fixed is None:
#this part not checked with books
loc = a - eps
else:
if np.isnan(fixed[-1]):
#estimate loc
loc = a - eps
else:
loc = fixed[-1]
#MLE for standard (unshifted, if loc=0) Poisson distribution
xtrans = (x - loc)
lambd = xtrans.mean()
#second derivative d loglike/ dlambd Not used
#dlldlambd = 1/lambd # check
return (lambd, loc) #check return type and should fixed be returned ?
def nnlf_fr(self, thetash, x, frmask):
# new frozen version
# - sum (log pdf(x, theta),axis=0)
# where theta are the parameters (including loc and scale)
#
try:
if frmask is not None:
theta = frmask.copy()
theta[np.isnan(frmask)] = thetash
else:
theta = thetash
loc = theta[-2]
scale = theta[-1]
args = tuple(theta[:-2])
except IndexError:
raise ValueError("Not enough input arguments.")
if not self._argcheck(*args) or scale <= 0:
return np.inf
x = np.array((x-loc) / scale)
cond0 = (x <= self.a) | (x >= self.b)
if (np.any(cond0)):
return np.inf
else:
N = len(x)
#raise ValueError
return self._nnlf(x, *args) + N*np.log(scale)
def fit_fr(self, data, *args, **kwds):
'''estimate distribution parameters by MLE taking some parameters as fixed
Parameters
----------
data : ndarray, 1d
data for which the distribution parameters are estimated,
args : list ? check
starting values for optimization
kwds :
- 'frozen' : array_like
values for frozen distribution parameters and, for elements with
np.nan, the corresponding parameter will be estimated
Returns
-------
argest : ndarray
estimated parameters
Examples
--------
generate random sample
>>> np.random.seed(12345)
>>> x = stats.gamma.rvs(2.5, loc=0, scale=1.2, size=200)
estimate all parameters
>>> stats.gamma.fit(x)
array([ 2.0243194 , 0.20395655, 1.44411371])
>>> stats.gamma.fit_fr(x, frozen=[np.nan, np.nan, np.nan])
array([ 2.0243194 , 0.20395655, 1.44411371])
keep loc fixed, estimate shape and scale parameters
>>> stats.gamma.fit_fr(x, frozen=[np.nan, 0.0, np.nan])
array([ 2.45603985, 1.27333105])
keep loc and scale fixed, estimate shape parameter
>>> stats.gamma.fit_fr(x, frozen=[np.nan, 0.0, 1.0])
array([ 3.00048828])
>>> stats.gamma.fit_fr(x, frozen=[np.nan, 0.0, 1.2])
array([ 2.57792969])
estimate only scale parameter for fixed shape and loc
>>> stats.gamma.fit_fr(x, frozen=[2.5, 0.0, np.nan])
array([ 1.25087891])
Notes
-----
self is an instance of a distribution class. This can be attached to
scipy.stats.distributions.rv_continuous
*Todo*
* check if docstring is correct
* more input checking, args is list ? might also apply to current fit method
'''
loc0, scale0 = lmap(kwds.get, ['loc', 'scale'],[0.0, 1.0])
Narg = len(args)
if Narg == 0 and hasattr(self, '_fitstart'):
x0 = self._fitstart(data)
elif Narg > self.numargs:
raise ValueError("Too many input arguments.")
else:
args += (1.0,)*(self.numargs-Narg)
# location and scale are at the end
x0 = args + (loc0, scale0)
if 'frozen' in kwds:
frmask = np.array(kwds['frozen'])
if len(frmask) != self.numargs+2:
raise ValueError("Incorrect number of frozen arguments.")
else:
# keep starting values for not frozen parameters
for n in range(len(frmask)):
# Troubleshooting ex_generic_mle_tdist
if isinstance(frmask[n], np.ndarray) and frmask[n].size == 1:
frmask[n] = frmask[n].item()
# If there were array elements, then frmask will be object-dtype,
# in which case np.isnan will raise TypeError
frmask = frmask.astype(np.float64)
x0 = np.array(x0)[np.isnan(frmask)]
else:
frmask = None
#print(x0
#print(frmask
return optimize.fmin(self.nnlf_fr, x0,
args=(np.ravel(data), frmask), disp=0)
#The next two functions/methods calculate expected value of an arbitrary
#function, however for the continuous functions intquad is use, which might
#require continuouity or smoothness in the function.
#TODO: add option for Monte Carlo integration
def expect(self, fn=None, args=(), loc=0, scale=1, lb=None, ub=None, conditional=False):
'''calculate expected value of a function with respect to the distribution
location and scale only tested on a few examples
Parameters
----------
all parameters are keyword parameters
fn : function (default: identity mapping)
Function for which integral is calculated. Takes only one argument.
args : tuple
argument (parameters) of the distribution
lb, ub : numbers
lower and upper bound for integration, default is set to the support
of the distribution
conditional : bool (False)
If true then the integral is corrected by the conditional probability
of the integration interval. The return value is the expectation
of the function, conditional on being in the given interval.
Returns
-------
expected value : float
Notes
-----
This function has not been checked for it's behavior when the integral is
not finite. The integration behavior is inherited from scipy.integrate.quad.
'''
if fn is None:
def fun(x, *args):
return x*self.pdf(x, loc=loc, scale=scale, *args)
else:
def fun(x, *args):
return fn(x)*self.pdf(x, loc=loc, scale=scale, *args)
if lb is None:
lb = loc + self.a * scale #(self.a - loc)/(1.0*scale)
if ub is None:
ub = loc + self.b * scale #(self.b - loc)/(1.0*scale)
if conditional:
invfac = (self.sf(lb, loc=loc, scale=scale, *args)
- self.sf(ub, loc=loc, scale=scale, *args))
else:
invfac = 1.0
return integrate.quad(fun, lb, ub,
args=args)[0]/invfac
def expect_v2(self, fn=None, args=(), loc=0, scale=1, lb=None, ub=None, conditional=False):
'''calculate expected value of a function with respect to the distribution
location and scale only tested on a few examples
Parameters
----------
all parameters are keyword parameters
fn : function (default: identity mapping)
Function for which integral is calculated. Takes only one argument.
args : tuple
argument (parameters) of the distribution
lb, ub : numbers
lower and upper bound for integration, default is set using
quantiles of the distribution, see Notes
conditional : bool (False)
If true then the integral is corrected by the conditional probability
of the integration interval. The return value is the expectation
of the function, conditional on being in the given interval.
Returns
-------
expected value : float
Notes
-----
This function has not been checked for it's behavior when the integral is
not finite. The integration behavior is inherited from scipy.integrate.quad.
The default limits are lb = self.ppf(1e-9, *args), ub = self.ppf(1-1e-9, *args)
For some heavy tailed distributions, 'alpha', 'cauchy', 'halfcauchy',
'levy', 'levy_l', and for 'ncf', the default limits are not set correctly
even when the expectation of the function is finite. In this case, the
integration limits, lb and ub, should be chosen by the user. For example,
for the ncf distribution, ub=1000 works in the examples.
There are also problems with numerical integration in some other cases,
for example if the distribution is very concentrated and the default limits
are too large.
'''
#changes: 20100809
#correction and refactoring how loc and scale are handled
#uses now _pdf
#needs more testing for distribution with bound support, e.g. genpareto
if fn is None:
def fun(x, *args):
return (loc + x*scale)*self._pdf(x, *args)
else:
def fun(x, *args):
return fn(loc + x*scale)*self._pdf(x, *args)
if lb is None:
#lb = self.a
try:
lb = self.ppf(1e-9, *args) #1e-14 quad fails for pareto
except ValueError:
lb = self.a
else:
lb = max(self.a, (lb - loc)/(1.0*scale)) #transform to standardized
if ub is None:
#ub = self.b
try:
ub = self.ppf(1-1e-9, *args)
except ValueError:
ub = self.b
else:
ub = min(self.b, (ub - loc)/(1.0*scale))
if conditional:
invfac = self._sf(lb,*args) - self._sf(ub,*args)
else:
invfac = 1.0
return integrate.quad(fun, lb, ub,
args=args, limit=500)[0]/invfac
### for discrete distributions
#TODO: check that for a distribution with finite support the calculations are
# done with one array summation (np.dot)
#based on _drv2_moment(self, n, *args), but streamlined
def expect_discrete(self, fn=None, args=(), loc=0, lb=None, ub=None,
conditional=False):
'''calculate expected value of a function with respect to the distribution
for discrete distribution
Parameters
----------
(self : distribution instance as defined in scipy stats)
fn : function (default: identity mapping)
Function for which integral is calculated. Takes only one argument.
args : tuple
argument (parameters) of the distribution
optional keyword parameters
lb, ub : numbers
lower and upper bound for integration, default is set to the support
of the distribution, lb and ub are inclusive (ul<=k<=ub)
conditional : bool (False)
If true then the expectation is corrected by the conditional
probability of the integration interval. The return value is the
expectation of the function, conditional on being in the given
interval (k such that ul<=k<=ub).
Returns
-------
expected value : float
Notes
-----
* function is not vectorized
* accuracy: uses self.moment_tol as stopping criterium
for heavy tailed distribution e.g. zipf(4), accuracy for
mean, variance in example is only 1e-5,
increasing precision (moment_tol) makes zipf very slow
* suppnmin=100 internal parameter for minimum number of points to evaluate
could be added as keyword parameter, to evaluate functions with
non-monotonic shapes, points include integers in (-suppnmin, suppnmin)
* uses maxcount=1000 limits the number of points that are evaluated
to break loop for infinite sums
(a maximum of suppnmin+1000 positive plus suppnmin+1000 negative integers
are evaluated)
'''
#moment_tol = 1e-12 # increase compared to self.moment_tol,
# too slow for only small gain in precision for zipf
#avoid endless loop with unbound integral, eg. var of zipf(2)
maxcount = 1000
suppnmin = 100 #minimum number of points to evaluate (+ and -)
if fn is None:
def fun(x):
#loc and args from outer scope
return (x+loc)*self._pmf(x, *args)
else:
def fun(x):
#loc and args from outer scope
return fn(x+loc)*self._pmf(x, *args)
# used pmf because _pmf does not check support in randint
# and there might be problems(?) with correct self.a, self.b at this stage
# maybe not anymore, seems to work now with _pmf
self._argcheck(*args) # (re)generate scalar self.a and self.b
if lb is None:
lb = (self.a)
else:
lb = lb - loc
if ub is None:
ub = (self.b)
else:
ub = ub - loc
if conditional:
invfac = self.sf(lb,*args) - self.sf(ub+1,*args)
else:
invfac = 1.0
tot = 0.0
low, upp = self._ppf(0.001, *args), self._ppf(0.999, *args)
low = max(min(-suppnmin, low), lb)
upp = min(max(suppnmin, upp), ub)
supp = np.arange(low, upp+1, self.inc) #check limits
#print('low, upp', low, upp
tot = np.sum(fun(supp))
diff = 1e100
pos = upp + self.inc
count = 0
#handle cases with infinite support
while (pos <= ub) and (diff > self.moment_tol) and count <= maxcount:
diff = fun(pos)
tot += diff
pos += self.inc
count += 1
if self.a < 0: #handle case when self.a = -inf
diff = 1e100
pos = low - self.inc
while (pos >= lb) and (diff > self.moment_tol) and count <= maxcount:
diff = fun(pos)
tot += diff
pos -= self.inc
count += 1
if count > maxcount:
# replace with proper warning
print('sum did not converge')
return tot/invfac
stats.distributions.rv_continuous.fit_fr = fit_fr
stats.distributions.rv_continuous.nnlf_fr = nnlf_fr
stats.distributions.rv_continuous.expect = expect
stats.distributions.rv_discrete.expect = expect_discrete
stats.distributions.beta_gen._fitstart = _fitstart_beta #not tried out yet
stats.distributions.poisson_gen._fitstart = _fitstart_poisson #not tried out yet
########## end patching scipy
def distfitbootstrap(sample, distr, nrepl=100):
'''run bootstrap for estimation of distribution parameters
hard coded: only one shape parameter is allowed and estimated,
loc=0 and scale=1 are fixed in the estimation
Parameters
----------
sample : ndarray
original sample data for bootstrap
distr : distribution instance with fit_fr method
nrepl : int
number of bootstrap replications
Returns
-------
res : array (nrepl,)
parameter estimates for all bootstrap replications
'''
nobs = len(sample)
res = np.zeros(nrepl)
for ii in range(nrepl):
rvsind = np.random.randint(nobs, size=nobs)
x = sample[rvsind]
res[ii] = distr.fit_fr(x, frozen=[np.nan, 0.0, 1.0])
return res
def distfitmc(sample, distr, nrepl=100, distkwds={}):
'''run Monte Carlo for estimation of distribution parameters
hard coded: only one shape parameter is allowed and estimated,
loc=0 and scale=1 are fixed in the estimation
Parameters
----------
sample : ndarray
original sample data, in Monte Carlo only used to get nobs,
distr : distribution instance with fit_fr method
nrepl : int
number of Monte Carlo replications
Returns
-------
res : array (nrepl,)
parameter estimates for all Monte Carlo replications
'''
arg = distkwds.pop('arg')
nobs = len(sample)
res = np.zeros(nrepl)
for ii in range(nrepl):
x = distr.rvs(arg, size=nobs, **distkwds)
res[ii] = distr.fit_fr(x, frozen=[np.nan, 0.0, 1.0])
return res
def printresults(sample, arg, bres, kind='bootstrap'):
'''calculate and print(Bootstrap or Monte Carlo result
Parameters
----------
sample : ndarray
original sample data
arg : float (for general case will be array)
bres : ndarray
parameter estimates from Bootstrap or Monte Carlo run
kind : {'bootstrap', 'montecarlo'}
output is printed for Mootstrap (default) or Monte Carlo
Returns
-------
None, currently only printing
Notes
-----
still a bit a mess because it is used for both Bootstrap and Monte Carlo
made correction:
reference point for bootstrap is estimated parameter
not clear:
I'm not doing any ddof adjustment in estimation of variance, do we
need ddof>0 ?
todo: return results and string instead of printing
'''
print('true parameter value')
print(arg)
print('MLE estimate of parameters using sample (nobs=%d)'% (nobs))
argest = distr.fit_fr(sample, frozen=[np.nan, 0.0, 1.0])
print(argest)
if kind == 'bootstrap':
#bootstrap compares to estimate from sample
argorig = arg
arg = argest
print('%s distribution of parameter estimate (nrepl=%d)'% (kind, nrepl))
print('mean = %f, bias=%f' % (bres.mean(0), bres.mean(0)-arg))
print('median', np.median(bres, axis=0))
print('var and std', bres.var(0), np.sqrt(bres.var(0)))
bmse = ((bres - arg)**2).mean(0)
print('mse, rmse', bmse, np.sqrt(bmse))
bressorted = np.sort(bres)
print('%s confidence interval (90%% coverage)' % kind)
print(bressorted[np.floor(nrepl*0.05)], bressorted[np.floor(nrepl*0.95)])
print('%s confidence interval (90%% coverage) normal approximation' % kind)
print(stats.norm.ppf(0.05, loc=bres.mean(), scale=bres.std()),)
print(stats.norm.isf(0.05, loc=bres.mean(), scale=bres.std()))
print('Kolmogorov-Smirnov test for normality of %s distribution' % kind)
print(' - estimated parameters, p-values not really correct')
print(stats.kstest(bres, 'norm', (bres.mean(), bres.std())))
if __name__ == '__main__':
examplecases = ['largenumber', 'bootstrap', 'montecarlo'][:]
if 'largenumber' in examplecases:
print('\nDistribution: vonmises')
for nobs in [200]:#[20000, 1000, 100]:
x = stats.vonmises.rvs(1.23, loc=0, scale=1, size=nobs)
print('\nnobs:', nobs)
print('true parameter')
print('1.23, loc=0, scale=1')
print('unconstrained')
print(stats.vonmises.fit(x))
print(stats.vonmises.fit_fr(x, frozen=[np.nan, np.nan, np.nan]))
print('with fixed loc and scale')
print(stats.vonmises.fit_fr(x, frozen=[np.nan, 0.0, 1.0]))
print('\nDistribution: gamma')
distr = stats.gamma
arg, loc, scale = 2.5, 0., 20.
for nobs in [200]:#[20000, 1000, 100]:
x = distr.rvs(arg, loc=loc, scale=scale, size=nobs)
print('\nnobs:', nobs)
print('true parameter')
print('%f, loc=%f, scale=%f' % (arg, loc, scale))
print('unconstrained')
print(distr.fit(x))
print(distr.fit_fr(x, frozen=[np.nan, np.nan, np.nan]))
print('with fixed loc and scale')
print(distr.fit_fr(x, frozen=[np.nan, 0.0, 1.0]))
print('with fixed loc')
print(distr.fit_fr(x, frozen=[np.nan, 0.0, np.nan]))
ex = ['gamma', 'vonmises'][0]
if ex == 'gamma':
distr = stats.gamma
arg, loc, scale = 2.5, 0., 1
elif ex == 'vonmises':
distr = stats.vonmises
arg, loc, scale = 1.5, 0., 1
else:
raise ValueError('wrong example')
nobs = 100
nrepl = 1000
sample = distr.rvs(arg, loc=loc, scale=scale, size=nobs)
print('\nDistribution:', distr)
if 'bootstrap' in examplecases:
print('\nBootstrap')
bres = distfitbootstrap(sample, distr, nrepl=nrepl )
printresults(sample, arg, bres)
if 'montecarlo' in examplecases:
print('\nMonteCarlo')
mcres = distfitmc(sample, distr, nrepl=nrepl,
distkwds=dict(arg=arg, loc=loc, scale=scale))
printresults(sample, arg, mcres, kind='montecarlo')
|
bsd-3-clause
| -3,253,983,564,312,369,000
| 32.03989
| 91
| 0.617527
| false
| 3.827888
| false
| false
| false
|
codefisher/mozbutton_sdk
|
builder/restartless_button.py
|
1
|
28578
|
import os
import re
import json
import codecs
import lxml.etree as ET
from copy import deepcopy
from collections import namedtuple, defaultdict
try:
from PIL import Image
except ImportError:
pass
from builder.ext_button import Button, Option, ChromeString, ChromeFile
try:
basestring
except NameError:
basestring = str # py3
Keys = namedtuple("Keys", ['command', 'button'])
ExtraUI = namedtuple("ExtraUI", ["parent", "parent_id", "index", "code", "after"])
class RestartlessButton(Button):
def __init__(self, *args, **kwargs):
super(RestartlessButton, self).__init__(*args, **kwargs)
self._ui_ids = set()
self._included_js_files = []
self._bootstrap_globals = []
self._bootstrap_startup = []
self._bootstrap_shutdown = []
for folder, button, files in self._info:
if "bootstrap" in files:
for file_name in os.listdir(os.path.join(folder, "bootstrap")):
if file_name[0] != ".":
with open(os.path.join(folder, "bootstrap", file_name), "r") as js_fp:
data = js_fp.read()
if file_name == "global.js":
self._bootstrap_globals.append(data)
elif file_name == "startup.js":
self._bootstrap_startup.append(data)
elif file_name == "shutdown.js":
self._bootstrap_shutdown.append(data)
def get_files(self):
for file_name, data in self.get_jsm_files().items():
yield (file_name + ".jsm", data)
def locale_files(self, button_locales, *args, **kwargs):
dtd_data = button_locales.get_dtd_data(self.get_locale_strings(),
self, untranslated=False, format_type="properties")
for locale, data in dtd_data.items():
yield locale, "button_labels.properties", data
locales_inuse = dtd_data.keys()
key_strings = button_locales.get_string_data(self.get_key_strings(),
self, format_type="properties")
for locale, data in self.locale_file_filter(key_strings, locales_inuse):
yield locale, "keys.properties", data
for locale, file_name, data in super(RestartlessButton, self).locale_files(button_locales, locales_inuse):
yield locale, file_name, data
def jsm_keyboard_shortcuts(self, file_name):
if not self._settings.get("use_keyboard_shortcuts"):
return
for button in self._button_keys.keys():
func = self._button_commands.get(file_name, {}).get(button)
if func is not None:
yield Keys(self._patch_call(func), button)
def option_data(self):
scripts = []
if self._settings.get("use_keyboard_shortcuts"):
scripts.append("key-option.js")
with open(self.find_file("key-option.xul"), "r") as key_option_file:
key_option_template = key_option_file.read()
for button in self._button_keys.keys():
xul = self.format_string(key_option_template,
button=button,
menu_label=button + ".label")
applications = " ".join(self._button_applications[button])
self._button_options[button + "-key-item"].append(
Option("tb-key-shortcut.option.title:lightning.png:" + applications, xul))
self._button_applications[
button + "-key-item"] = self._button_applications[button]
files, javascript = super(RestartlessButton, self).option_data()
return files, javascript + scripts
def get_pref_list(self):
settings = super(RestartlessButton, self).get_pref_list()
pref_root = self._settings.get("pref_root")
if self._settings.get('use_keyboard_shortcuts'):
for button in self._button_keys.keys():
settings.append(("{}key-disabled.{}".format(pref_root, button), 'false'))
properties = self.pref_locale_file("'chrome://{chrome_name}/locale/{prefix}keys.properties'")
settings.append(("{}key.{}".format(pref_root, button), properties))
settings.append(("{}modifier.{}".format(pref_root, button), properties))
return settings
def get_js_files(self):
js_files = super(RestartlessButton, self).get_js_files()
if self._settings.get("use_keyboard_shortcuts"):
with open(self.find_file("key-option.js")) as key_option_fp:
js_files["key-option"] = self.string_subs(key_option_fp.read())
self._included_js_files = js_files.keys()
return js_files
def get_chrome_strings(self):
for chrome_string in super(RestartlessButton, self).get_chrome_strings():
yield chrome_string
yield ChromeString(file_name='bootstrap.js', data=self.create_bootstrap())
defaults = self.get_defaults()
if defaults:
yield ChromeString(file_name=os.path.join("chrome", "content", "defaultprefs.js"), data=defaults)
def get_chrome_files(self):
for chrome_file in super(RestartlessButton, self).get_chrome_files():
yield chrome_file
yield ChromeFile(file_name=os.path.join("chrome", "content", "customizable.jsm"), path=self.find_file('customizable.jsm'))
def create_bootstrap(self):
chrome_name = self._settings.get("chrome_name")
loaders = []
resource = ""
if self.resource_files:
resource = "createResource('{0}', 'chrome://{0}/content/resources/');".format(chrome_name)
window_modules = defaultdict(list)
for file_name in self._button_files:
for overlay in self._settings.get("files_to_window").get(file_name, ()):
window_modules[overlay].append(file_name)
for overlay, modules in window_modules.items():
mods = "\n\t\t".join(["modules.push('chrome://{0}/content/{1}.jsm');".format(chrome_name, file_name) for file_name in modules])
loaders.append("(uri == '{0}') {{\n\t\t{1}\n\t}}".format(overlay, mods))
if self._settings.get("show_updated_prompt"):
install_template = self.env.get_template('bootstrap.js')
install = install_template.render(**self._settings)
else:
install = ""
template = self.env.get_template('bootstrap.js')
return template.render(
resource=resource, install=install,
globals=self.string_subs("\n".join(self._bootstrap_globals)),
startup=self.string_subs("\n".join(self._bootstrap_startup)),
shutdown=self.string_subs("\n".join(self._bootstrap_shutdown)),
loaders = "if" + " else if".join(loaders),
**self._settings)
def _jsm_create_menu(self, file_name, buttons):
if not self._settings.get('menuitems'):
return ''
statements = []
data = self.create_menu_dom(file_name, buttons)
in_submenu = [menuitem for menuitem in data if menuitem.parent_id is None]
in_menu = [menuitem for menuitem in data if menuitem.parent_id is not None]
num = 0
template = self.env.get_template('menu.js')
if in_submenu:
menu_id, menu_label, locations = self._settings.get("menu_meta")
if isinstance(locations, basestring):
locations = [locations]
for i, location in enumerate(locations):
menu_id_num = "{0}_{1}".format(menu_id, i) if i else menu_id
meta = self._settings.get("file_to_menu").get(location, {}).get(file_name)
if meta:
menu_name, insert_after = meta
statements.append(template.render(**{
"menu_name": menu_name,
"menu_id": menu_id_num,
"label": menu_label,
"class": "menu-iconic",
"menu_label": menu_label,
"insert_after": insert_after,
"menuitems_sorted": self._settings.get("menuitems_sorted")
}))
num += 3
for item, _, _ in in_submenu:
item_statements, count, _ = self._create_dom(
item, top="menupopup_2", count=num, doc="document")
num = count + 1
statements.extend(item_statements)
for item, menu_name, insert_after in in_menu:
statements.append("var menupopup_{0} = document.getElementById('{1}');".format(num, menu_name))
var_name = "menupopup_%s" % num
num += 1
item.attrib["insertafter"] = insert_after
item_statements, count, _ = self._create_dom(item, top=var_name, count=num)
num = count + 1
statements.extend(item_statements)
return "\n\t".join(statements)
def _dom_string_lookup(self, value):
result = []
items = re.findall(r'&.+?;|[^&;]+', value)
for item in items:
if item == "&brandShortName;":
result.append("Cc['@mozilla.org/xre/app-info;1'].createInstance(Ci.nsIXULAppInfo).name")
elif item[0] == '&' and item[-1] == ';':
result.append("buttonStrings.get('%s')" % item[1:-1])
else:
result.append("'%s'" % item)
return ' + '.join(result)
def _create_dom(self, root, top=None, count=0, doc='document', child_parent=None, rename=None, append_children=True):
num = count
if rename == None:
rename = {}
children = []
statements = [
"var %s_%s = %s.createElement('%s');" % (root.tag, num, doc, rename.get(root.tag, root.tag)),
]
javascript_object = self._settings.get("javascript_object")
for key, value in sorted(root.attrib.items(), key=self._attr_key):
if key == 'id':
statements.append("%s_%s.id = '%s';" % (root.tag, num, value))
elif key in ('label', 'tooltiptext') or (root.tag == 'key' and key in ('key', 'keycode', 'modifiers')):
statements.append("%s_%s.setAttribute('%s', %s);" % ((root.tag, num, key, self._dom_string_lookup(value))))
elif key == "class":
for val in value.split():
statements.append('%s_%s.classList.add("%s");' % (root.tag, num, val))
elif key[0:2] == 'on':
if key == 'oncommand' and root.tag == 'key':
# we do this because key elements without a oncommand are optimized away
# but we can't call our function, because that might not exist
# in the window scope, so the event listener has to be used
statements.append("%s_%s.setAttribute('oncommand', 'void(0);');" % (root.tag, num))
statements.append("%s_%s.addEventListener('%s', function(event) {\n\t\t\t\t%s\n\t\t\t}, false);" % (root.tag, num, key[2:], self._patch_call(value)))
elif key == "insertafter":
pass
elif key == "showamenu":
statements.append("{}_{}.addEventListener('DOMMenuItemActive', {}.menuLoaderEvent, false);".format(root.tag, num, javascript_object))
statements.append("%s_%s._handelMenuLoaders = true;" % (root.tag, num))
statements.append("%s_%s.setAttribute('%s', '%s');" % ((root.tag, num, key, value)))
elif key == "toolbarname":
# this is just for our custom toolbars which are named "Toolbar Buttons 1" and the like
name, sep, other = value.partition(' ')
other = " + '%s%s'" % (sep, other) if sep else ""
value = "buttonStrings.get('%s')%s" % (name, other)
statements.append("%s_%s.setAttribute('%s', %s);" % ((root.tag, num, key, value)))
elif key == "type" and value == "menu-button" and 'id' in root.attrib:
statements.append('''if(extensionPrefs.getPrefType('menupopup.hide.{0}') == extensionPrefs.PREF_INVALID || !extensionPrefs.getBoolPref('menupopup.hide.{0}')) {{\n\t\t\t\t{1}_{2}.setAttribute("{3}", "{4}");\n\t\t\t}}'''.format(root.attrib['id'], root.tag, num, key, value))
else:
statements.append('%s_%s.setAttribute("%s", "%s");' % ((root.tag, num, key, value)))
for node in root:
sub_nodes, count, _ = self._create_dom(node, '%s_%s' % (root.tag, num), count+1, doc=doc, rename=rename, child_parent=(child_parent if top == None else None))
if append_children:
statements.extend(sub_nodes)
else:
children = sub_nodes
if not top:
statements.append('return %s_%s;' % (root.tag, num))
else:
if "insertafter" in root.attrib:
statements.append("%s.insertBefore(%s_%s, %s.getElementById('%s').nextSibling);" % (top, root.tag, num, doc, root.attrib.get("insertafter")))
else:
statements.append('%s.appendChild(%s_%s);' % (top if not child_parent else child_parent, root.tag, num))
return statements, count, children
def _attr_key(self, attr):
order = ('id', 'defaultarea', 'type', 'label', 'tooltiptext', 'command', 'onclick', 'oncommand')
if attr[0].lower() in order:
return order.index(attr[0].lower())
return 100
def _create_dom_button(self, button_id, root, file_name, count, toolbar_ids):
add_to_main_toolbar = self._settings.get("add_to_main_toolbar")
if 'viewid' in root.attrib:
self._ui_ids.add(root.attrib["viewid"])
statements, _, children = self._create_dom(root, child_parent="popupset", append_children=False)
children[0] = """var popupset = document.getElementById('PanelUI-multiView');
if(popupset) {
var menupopup_1 = document.createElement('panelview');
} else {
var menupopup_1 = document.createElement('menupopup');
popupset = document.documentElement;
}"""
data = {
"type": "'view'",
"onBeforeCreated": 'function (document) {\n\t\t\t\tvar window = document.defaultView;\n\t\t\t\t%s\n\t\t\t}' % "\n\t\t\t\t".join(children),
}
elif 'usepanelview' in root.attrib:
self._ui_ids.add("{0}-panel-view".format(root.attrib["id"]))
root.attrib["onclick"] = """if(event.target != event.currentTarget || ('button' in event && event.button != 0)) {{
return;
}}
var item = event.target;
if(item.nodeName == 'key') {{
item = document.getElementById('{0}');
}}
if(item.getAttribute('cui-areatype') == 'menu-panel') {{
var win = item.ownerDocument.defaultView;
event.preventDefault();
event.stopPropagation();
item.ownerDocument.getElementById('{0}-panel-view').ownerButton = item;
win.PanelUI.showSubView('{0}-panel-view', item, CustomizableUI.AREA_PANEL);
}}""".format(root.attrib["id"])
if 'type' not in root.attrib:
popup_opener = """ else {
item.firstChild.openPopup(item, "after_start");
}"""
if 'oncommand' not in root.attrib:
root.attrib["oncommand"] = root.attrib["onclick"] + popup_opener
else:
root.attrib["onclick"] += popup_opener
statements, _, _ = self._create_dom(root)
root_clone = deepcopy(root)
popup = root_clone[0]
if root.attrib['usepanelview'] == 'button-menu':
del root_clone.attrib["type"]
popup.insert(0, ET.Element("menuseparator"))
popup.insert(0, ET.Element("menuitem", root_clone.attrib))
for node in popup:
node.attrib['class'] = 'subviewbutton'
if 'onpopupshowing' in popup.attrib:
popup.attrib['onViewShowing'] = popup.attrib['onpopupshowing']
del popup.attrib['onpopupshowing']
if 'onpopuphiding' in popup.attrib:
popup.attrib['onViewHiding'] = popup.attrib['onpopuphiding']
del popup.attrib['onpopuphiding']
_, _, children = self._create_dom(root_clone, child_parent="popupset", rename={'menuitem': 'toolbarbutton'}, append_children=False)
children.pop(0)
data = {
"type": "'custom'",
"onBuild": '''function (document) {
var window = document.defaultView;
var popupset = document.getElementById('PanelUI-multiView');
if(popupset) {
var menupopup_1 = document.createElement('panelview');
%s
menupopup_1.id = "%s-panel-view";
}
%s
}''' % ("\n\t\t\t\t\t".join(children), root.attrib['id'], "\n\t\t\t\t".join(statements))
}
else:
statements, _, _ = self._create_dom(root)
data = {
"type": "'custom'",
"onBuild": 'function (document) {\n\t\t\t\tvar window = document.defaultView;\n\t\t\t\t%s\n\t\t\t}' % "\n\t\t\t\t".join(statements)
}
self._apply_toolbox(file_name, data)
toolbar_max_count = self._settings.get("buttons_per_toolbar")
if add_to_main_toolbar and button_id in add_to_main_toolbar:
data['defaultArea'] = "'%s'" % self._settings.get('file_to_main_toolbar').get(file_name)
elif self._settings.get("put_button_on_toolbar"):
toolbar_index = count // toolbar_max_count
if len(toolbar_ids) > toolbar_index:
data['defaultArea'] = "'%s'" % toolbar_ids[toolbar_index]
for key, value in root.attrib.items():
if key in ('label', 'tooltiptext'):
data[key] = self._dom_string_lookup(value)
elif key == "id":
data[key] = "'%s'" % value
elif key == 'oncommand':
self._button_commands[file_name][button_id] = value
elif key == 'viewid':
data["viewId"] = "'%s'" % value
elif key == 'onviewshowing':
data["onViewShowing"] = "function(event){\n\t\t\t\t%s\n\t\t\t}" % self._patch_call(value)
elif key == 'onviewhideing':
data["onViewHiding"] = "function(event){\n\t\t\t\t%s\n\t\t\t}" % self._patch_call(value)
for js_file in self._get_js_file_list(file_name):
if self._button_js_setup.get(js_file, {}).get(button_id):
data["onCreated"] = "function(aNode){\n\t\t\tvar document = aNode.ownerDocument;\n\t\t\t%s\n\t\t}" % self._button_js_setup[js_file][button_id]
items = sorted(data.items(), key=self._attr_key)
return "CustomizableUI.createWidget({\n\t\t\t%s\n\t\t});" % ",\n\t\t\t".join("%s: %s" % (key, value) for key, value in items)
def _apply_toolbox(self, file_name, data):
toolbox_info = self._settings.get("file_to_toolbar_box2").get(file_name)
if toolbox_info:
window_file, toolbox_id = toolbox_info
data["toolbox"] = "'%s'" % toolbox_id
if window_file:
data["window"] = "'%s'" % window_file
def _patch_call(self, value):
data = []
if re.search(r'\bthis\b', value):
value = re.sub(r'\bthis\b', 'aThis', value)
data.append("var aThis = event.currentTarget;")
if re.search(r'\bdocument\b', value):
data.append("var document = event.target.ownerDocument;")
if re.search(r'\bwindow\b', value):
data.append("var window = event.target.ownerDocument.defaultView;")
data.append(value)
return "\n\t\t\t\t".join(data)
def _create_jsm_button(self, button_id, root, file_name, count, toolbar_ids):
toolbar_max_count = self._settings.get("buttons_per_toolbar")
add_to_main_toolbar = self._settings.get("add_to_main_toolbar")
data = {}
attr = root.attrib
self._apply_toolbox(file_name, data)
if add_to_main_toolbar and button_id in add_to_main_toolbar:
data['defaultArea'] = "'%s'" % self._settings.get('file_to_main_toolbar').get(file_name)
elif self._settings.get("put_button_on_toolbar"):
toolbar_index = count // toolbar_max_count
if len(toolbar_ids) > toolbar_index:
data['defaultArea'] = "'%s'" % toolbar_ids[toolbar_index]
for key, value in attr.items():
if key in ('label', 'tooltiptext'):
data[key] = self._dom_string_lookup(value)
elif key == "id":
data[key] = "'%s'" % value
elif key in ('onclick', 'oncommand'):
if key == 'oncommand':
self._button_commands[file_name][button_id] = value
key = 'onCommand' if key == 'oncommand' else 'onClick'
data[key] = "function(event) {\n\t\t\t\t%s\n\t\t\t}" % self._patch_call(value)
for js_file in self._get_js_file_list(file_name):
if self._button_js_setup.get(js_file, {}).get(button_id):
data["onCreated"] = "function(aNode) {\n\t\t\t\tvar document = aNode.ownerDocument;\n\t\t\t\t%s\n\t\t\t}" % self._button_js_setup[js_file][button_id]
items = sorted(data.items(), key=self._attr_key)
result = "CustomizableUI.createWidget({\n\t\t\t%s\n\t\t});" % ",\n\t\t\t".join("%s: %s" % (key, value) for (key, value) in items)
return result
def get_jsm_files(self):
result = {}
simple_attrs = {'label', 'tooltiptext', 'id', 'oncommand', 'onclick', 'key', 'class'}
button_hash, toolbar_template = self._get_toolbar_info()
template = self.env.get_template('button.jsm')
javascript_object = self._settings.get("javascript_object")
for file_name, values in self._button_xul.items():
jsm_buttons = []
js_includes = [js_file for js_file in self._get_js_file_list(file_name)
if js_file != "loader" and js_file in self._included_js_files]
toolbars, toolbar_ids = self._create_jsm_toolbar(button_hash, toolbar_template, file_name, values)
count = 0
modules = set()
for button_id, xul in values.items():
root = ET.fromstring(xul.replace('&', '&'))
modules.update(self._modules[button_id])
attr = root.attrib
if not len(root) and not set(attr.keys()).difference(simple_attrs) and (not "class" in attr or attr["class"] == "toolbarbutton-1 chromeclass-toolbar-additional"):
jsm_buttons.append(self._create_jsm_button(button_id, root, file_name, count, toolbar_ids))
else:
jsm_buttons.append(self._create_dom_button(button_id, root, file_name, count, toolbar_ids))
count += 1
default_mods = {
"resource://gre/modules/Services.jsm",
"resource:///modules/CustomizableUI.jsm",
"resource://services-common/stringbundle.js"
}
modules_import = "\n".join("try { Cu.import('%s'); } catch(e) {}" % mod for mod in modules if mod and mod not in default_mods)
if self._settings.get("menu_meta"):
menu_id, menu_label, _ = self._settings.get("menu_meta")
else:
menu_id, menu_label = "", ""
end = set()
menu = self._jsm_create_menu(file_name, values)
for js_file in set(self._get_js_file_list(file_name) + [file_name]):
if self._button_js_setup.get(js_file, {}):
end.update(self._button_js_setup[js_file].values())
if (self._settings.get("menuitems") and menu) or self._settings.get('location_placement'):
end.add(javascript_object + ".setUpMenuShower(document);")
extra_ui = self.create_extra_ui(file_name, values)
result[file_name] = template.render(
modules=modules_import,
locale_file_prefix=self._settings.get("locale_file_prefix"),
scripts=js_includes,
button_ids=json.dumps(list(values.keys())),
toolbar_ids=json.dumps(toolbar_ids),
toolbars=toolbars,
menu_id=menu_id,
ui_ids=json.dumps(list(self._ui_ids)),
toolbox=self._settings.get("file_to_toolbar_box").get(file_name, ('', ''))[1],
menu=menu,
keys=list(self.jsm_keyboard_shortcuts(file_name)),
end="\n\t".join(end),
buttons=jsm_buttons,
extra_ui=extra_ui,
javascript_object=self._settings.get("javascript_object"),
pref_root=self._settings.get("pref_root"),
chrome_name=self._settings.get("chrome_name")
)
return result
def create_extra_ui(self, file_name, values):
location = self._settings.get("location_placement")
result = []
if location and file_name in self._settings.get("file_to_location", {}).get(location):
for index, (button_id, xul) in enumerate(values.items()):
parent, parent_id, after, attrib = self._settings.get("file_to_location").get(location).get(file_name)
root = ET.fromstring(xul.replace('&', '&'))
root.attrib["insertafter"] = after
root.attrib["id"] += "-extra-ui"
self._ui_ids.add(root.attrib["id"])
if attrib:
for name, value in attrib.items():
if value is None:
del root.attrib[name]
else:
root.attrib[name] = value
parent_var = "{}_{}".format(parent, index)
statements, _, _ = self._create_dom(root, top=parent_var)
result.append(ExtraUI(parent, parent_id, index, "\n\t\t".join(statements), after))
return result
def _create_jsm_toolbar(self, button_hash, toolbar_template, file_name, values):
toolbar_ids = []
toolbars = []
if file_name in self._settings.get("extra_toolbars_disabled"):
return '', []
count = 0
max_count = self._settings.get("buttons_per_toolbar")
buttons = list(values.keys())
for box_setting, include_setting in [("file_to_toolbar_box", "include_toolbars"),
("file_to_bottom_box", "include_satusbars")]:
toolbar_node, toolbar_box = self._settings.get(box_setting).get(file_name, ('', ''))
data = {
"defaultset": "",
"persist": "collapsed,hidden",
"context": "toolbar-context-menu",
"class": "toolbar-buttons-toolbar chromeclass-toolbar",
"mode": "icons",
"iconsize": "small",
"customizable": "true",
}
if self._settings.get(include_setting) and toolbar_box:
number = self.toolbar_count(include_setting, values, max_count)
for i in range(number):
if self._settings.get("put_button_on_toolbar"):
data["defaultset"] = ",".join(buttons[i * max_count:(i + 1) * max_count])
button_hash.update(bytes(i))
hash = button_hash.hexdigest()[:6]
label_number = "" if (number + count) == 1 else " %s" % (i + count + 1)
toolbar_ids.append("tb-toolbar-%s" % hash)
if include_setting != "include_toolbars":
data["toolboxid"] = toolbar_box
data["id"] = "tb-toolbar-%s" % hash
toolbarname = self._dom_string_lookup("&tb-toolbar-buttons-toggle-toolbar.name;%s" % label_number)
values["tb-toolbar-buttons-toggle-toolbar-%s" % hash] = toolbar_template.replace("{{hash}}", hash).replace("{{ number }}", label_number)
toolbars.append("""createToolbar(document, '%s', %s, %s)""" % (toolbar_box, json.dumps(data), toolbarname))
count += number
return "\n\t\t".join(toolbars), toolbar_ids
|
mit
| 4,315,887,822,240,294,400
| 52.217877
| 288
| 0.549689
| false
| 3.762243
| false
| false
| false
|
soybean217/lora-python
|
UServer/admin_server/admin_http_api/api/api_group.py
|
1
|
3730
|
import json
from wtforms import ValidationError
from userver.object.application import Application
from . import api, root
from flask import request, Response
from userver.object.group import Group
from binascii import hexlify
from utils.errors import KeyDuplicateError, PatchError
from .decorators import group_filter_valid, group_exists
from .forms import get_formdata_from_json_or_form
from .forms.form_group import AddGroupForm, PatchGroup, device_operate
from ..http_auth import auth
@api.route(root + 'groups', methods=['GET'])
@auth.auth_required
@group_filter_valid
def group_list(user=None, app=None):
if request.method == 'GET':
if app is not None:
groups = Group.objects.all(app_eui=app.app_eui)
elif user is not None:
groups = []
apps = Application.query.filter_by(user_id=user.id)
for app in apps:
groups += Group.objects.all(app.app_eui)
else:
groups = Group.objects.all()
groups = [group.obj_to_dict() for group in groups]
groups_json = json.dumps(groups)
return Response(status=200, response=groups_json)
# elif request.method == 'POST':
# formdata = get_formdata_from_json_or_form(request)
# add_group = AddGroupForm(formdata)
# try:
# if add_group.validate():
# if len(add_group['appskey'].data) != 0:
# group = Group(add_group['app_eui'].data, add_group['name'].data, add_group['addr'].data, add_group['nwkskey'].data, appskey=add_group['appskey'].data)
# else:
# group = Group(add_group['app_eui'].data, add_group['name'].data, add_group['addr'].data, add_group['nwkskey'].data)
# group.save()
# return Response(status=201, response=json.dumps(group.obj_to_dict()))
# else:
# return Response(status=406, response=json.dumps({'errors': add_group.errors,
# 'succeed': False}))
# except KeyDuplicateError as error:
# return Response(status=403, response=json.dumps({"error": str(error),
# "succeed": False}))
@api.route(root + 'groups/<group_id>', methods=['GET'])
@auth.auth_required
@group_exists
def group_index(group):
if request.method == 'GET':
group_json = json.dumps(group.obj_to_dict())
return group_json, 200
# elif request.method == 'PATCH':
# try:
# formdata = get_formdata_from_json_or_form(request)
# PatchGroup.patch(group, formdata)
# return Response(status=200, response=json.dumps(group.obj_to_dict()))
# except (AssertionError, ValidationError, PatchError) as e:
# return json.dumps({"error": str(e)}), 406
# elif request.method == 'POST':
# POST Down Msg
# pass
# elif request.method == 'DELETE':
# try:
# group.delete()
# return json.dumps({'errors': "Group: %s deleted." % hexlify(group.id).decode(),
# 'succeed': False}), 200
# except Exception as e:
# return json.dumps({'errors': "Fail to delete group: %s.\n%s" % (hexlify(group.id).decode(), str(e)),
# 'succeed': False}), 400
# elif request.method == 'POST':
# formdata = get_formdata_from_json_or_form(request)
# error = device_operate(group, formdata)
# if error is None or len(error) == 0:
# return json.dumps({'success': True}), 200
# else:
# return json.dumps({'error': str(error)}), 406
#
|
mit
| -6,227,232,879,890,345,000
| 43.404762
| 172
| 0.574263
| false
| 3.718843
| false
| false
| false
|
spino327/sdr_testbed
|
DistributedTestbed/SlaveRX.py
|
1
|
6293
|
'''
Copyright (c) 2011, Universidad Industrial de Santander, Colombia
University of Delaware
All rights reserved.
@author: Sergio Pino
@author: Henry Arguello
Website: http://www.eecis.udel.edu/
emails : sergiop@udel.edu - henarfu@udel.edu
Date : Feb, 2011
'''
import socket
import time
import sys
from receiver.RXApp import RXApp
from util.PropertyReader import readProperties
from util import Utils
class SlaveRX(object):
'''
SlaveRX is responsible of control the RX USRP node.
'''
def __init__(self, host, port, path):
'''
Constructor
@param host: refers to the local host address
@param port: port for the server to listen
@param path: File system path where the data will be stored
'''
# server
self.server = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.server.bind((host, port))
self.server.listen(1)
self.path = path
self.app = None
def setRXProperties(self, lo_off, fc, dec, gain, sync):
'''
Set the USRP RX properties
@param lo_off: local oscillator offset (int)
@param fc: Center frequency (float)
@param dec: Decimation factor (int)
@param gain: Gain of the receiver in dB (int)
@param sync: True if the Hardware will use the GPSDO (boolean)
'''
self.lo_off = lo_off
self.fc = fc
self.dec = dec
self.gain = gain
self.sync = sync
def launch(self):
'''
calls startup
'''
print("i: launch SlaveRX")
while True:
sc, addr = self.server.accept()
sc.settimeout(10*60)
print("\n\ni: SlaveRX Connection from " + str(addr) + ", time " + time.strftime("%d-%m-%y/%H:%M:%S"))
tic = time.time()
try:
self.__startup__(sc, addr)
except Exception, e:
print("e: " + str(e))
sc.close()
print("i: SlaveRX Connection closed, duration: " + str(time.time() - tic) + " [seg]\n\n")
print("i: SlaveRX end launch")
def record(self, prefix, at, signame):
"""
@param prefix: prefix path folder where the signals are stored, e.g. /home/folder/
@param at: attenuation factor
@param signame: filename of the signal
Start recording
"""
# creating the folder
folder = self.path + prefix
folder = folder if (folder.endswith("/")) else folder + "/"
Utils.ensure_dir(folder)
# signal file
filename = folder + signame + "_at" + str(at) +"_G" + str(self.gain) + ".dat"
print("i: record filename = " + filename)
self.app = RXApp(self.fc, self.dec, self.gain, "addr=192.168.10.2", self.sync, filename, self.lo_off)
self.app.launch()
def __startup__(self, sc, addr):
'''
Responsible for starting the application; for creating and showing
the initial GUI.
'''
print("i: startup")
msg = sc.recv(1024)
if msg == "start":
sc.send("ok")
print("i: start ok")
msg = sc.recv(1024)
print("i: msg = " + msg)
while msg != "finish":
tic = time.time()
if msg.find("startRec") >= 0:
# message "startRec:/prefix_path/:at:signame:"
print("i: startRec received")
values = msg.split(":")
prefix = values[1]
at = float(values[2])
signame = values[3]
self.record(prefix, at, signame)
sc.send("ok")
elif msg.find("stopRec") >= 0:
print("i: stopRec received")
if self.app.stopApp():
print("i: stopRec successful")
sc.send("ok")
else:
print("i: stopRec failed")
sc.send("error")
else:
print("i: ending")
break
print("i: cmd duration: " + str(time.time() - tic) + " [seg]\n")
msg = sc.recv(1024)
else:
print("e: not start")
sc.send("error")
if msg == "finish":
print("i: finish cmd received")
sc.close()
print("i: end startup")
def __exit__(self):
'''
This method runs on the event dispatching thread.
'''
print "somebody call me!"
self.__exit__()
if __name__ == '__main__':
'''
Creates an instance of the specified {@code Application}
subclass, sets the {@code ApplicationContext} {@code
application} property, and then calls the new {@code
Application's} {@code startup} method. The {@code launch} method is
typically called from the Application's {@code main}:
'''
# Reading the properties
confFile = "confRX.txt"
if(len(sys.argv) > 1):
arg = sys.argv[1]
confFile = arg if len(arg) > 0 else confFile
else:
print("working with default config file path")
properties = readProperties(confFile)
print("Properties:")
for p in properties:
print("\t" + p + " : " + properties[p])
path = properties["rxpath"]
path = path if (path.endswith("/")) else path+"/"
sync = True if properties["sync"] == "True" else False
app = SlaveRX(properties["rxip"],
int(properties["rxport"]),
path)
app.setRXProperties(int(properties["lo_off"]),
float(properties["fc"]),
int(properties["dec"]),
int(properties["gain"]),
sync)
app.launch()
exit()
|
apache-2.0
| -2,031,581,930,941,409,300
| 29.259615
| 113
| 0.482918
| false
| 4.301435
| false
| false
| false
|
alanc10n/py-rau
|
pyrau/rau.py
|
1
|
1747
|
import argparse
from redis import StrictRedis
from pyrau.commands import Command
def delete(args, command):
""" Execute the delete command """
command.delete(args.pattern)
def keys(args, command):
""" Execute the keys command """
details = args.details | args.sorted
command.keys(args.pattern, details, args.sorted)
def parse_args():
parser = argparse.ArgumentParser()
parser.add_argument('-H', '--host', default='localhost', help='Host')
parser.add_argument('-p', '--port', default=6379, type=int, help='Port')
parser.add_argument('-b', '--batch_size', default=20,
type=int, help='Batch size for pipeline operations')
subparsers = parser.add_subparsers(help='Commands')
del_parser = subparsers.add_parser('delete', help='Delete key(s)')
del_parser.add_argument('pattern', type=str, help='Key pattern')
del_parser.set_defaults(func=delete)
key_parser = subparsers.add_parser('keys', help="List keys")
key_parser.add_argument('-p', '--pattern', help='Key pattern',
default=None)
key_parser.add_argument('-d', '--details',
help='Include details for key(s)',
action='store_true')
key_parser.add_argument('-s', '--sorted',
help='Sort result by size, implies --details',
action='store_true')
key_parser.set_defaults(func=keys)
args = parser.parse_args()
return args
def main():
args = parse_args()
redis = StrictRedis(host=args.host, port=args.port)
command = Command(redis)
command.batch_size = args.batch_size
args.func(args, command)
if __name__ == '__main__':
main()
|
mit
| -40,468,994,944,548,810
| 31.962264
| 76
| 0.606754
| false
| 3.943567
| false
| false
| false
|
TerryRen/TrPython
|
NetLib/SuperCaptcha.py
|
1
|
9743
|
#python 2.7
#coding=utf-8
__author__ = "Terry.Ren"
#try:
# import Image
#except ImportError:
# from PIL import Image
from PIL import Image
from PIL import ImageDraw
import ImageEnhance
import os
import urllib
import StringIO
import uuid
import pytesseract #open source
class Captcha(object):
def __init__(self, isDebug = False):
self.__isDebug = isDebug
self.__currentStepId = 1
self.__tempFileList = []
def __BuildTempFileFullName(self, localDir, extName):
fname = str(uuid.uuid1()) + "_" + str(self.__currentStepId) + "." + extName
fname = os.path.join(localDir,fname)
self.__currentStepId += 1
self.__tempFileList.append(fname)
return fname
'''
Store remote image to local dir
'''
def __StoreImage2LocalDir(self, imageUrl , localDir , extName):
response = urllib.urlopen(imageUrl)
tempFileFullName = self.__BuildTempFileFullName(localDir, extName)
with open(tempFileFullName, 'wb') as f:
f.write(response.read())
return tempFileFullName
def Clearup(self):
for filename in self.__tempFileList:
if os.path.isfile(filename):
os.remove(filename)
'''
image enhance
'''
def __imageEnhance(self, image):
enhancer = ImageEnhance.Contrast(image)
image_enhancer = enhancer.enhance(4)
return image_enhancer
'''
two value
'''
def __twoValue(self, image):
img = image.convert('RGBA') # convert to RGBA
pix = img.load() #read pix
for x in range(img.size[0]): #remove [top-bottom] border
pix[x, 0] = pix[x, img.size[1] - 1] = (255, 255, 255, 255)
for y in range(img.size[1]): #remove [left-right] border
pix[0, y] = pix[img.size[0] - 1, y] = (255, 255, 255, 255)
for y in range(img.size[1]): # two value: R=95,G=95,B=95
for x in range(img.size[0]):
if pix[x, y][0] < 95 or pix[x, y][1] < 95 or pix[x, y][2] < 95:
pix[x, y] = (0, 0, 0, 255)
else:
pix[x, y] = (255, 255, 255, 255)
return img
'''
Get Captcha Code from on-line web site
'''
def GetOnlineCaptchaCode(self, imageUrl, isStoreOriginalImage = False, localDir = '', extName = 'jpg'):
if isStoreOriginalImage == True:
if not os.path.isdir(localDir):
raise ValueError("please validate the argument GetOnlineCaptchaCode.localDir...")
localFileName = self.__StoreImage2LocalDir(imageUrl , localDir , extName)
img = Image.open(localFileName)
else:
imgBuf = StringIO.StringIO(urllib.urlopen(imageUrl).read())
img = Image.open(imgBuf)
print img.format, img.size, img.mode
# image Enhance
img = self.__imageEnhance(img)
if self.__isDebug:
img.save(self.__BuildTempFileFullName(localDir, extName))
img = self.__twoValue(img)
tempFileFullName = self.__BuildTempFileFullName(localDir, extName)
img.save(tempFileFullName) # must use local file via tesseract-orc
text = pytesseract.image_to_string(Image.open(tempFileFullName))
return text
'''
Get Captcha Code from local
'''
def GetLocalCaptchaCode(self, imagePath, extName = 'jpg'):
localDir = os.path.dirname(imagePath)
img = Image.open(imagePath)
print img.format, img.size, img.mode
# image Enhance
img = self.__imageEnhance(img)
if self.__isDebug:
img.save(self.__BuildTempFileFullName(localDir, extName))
img = img.convert('RGBA') # convert to RGBA
pix = img.load() #read pix
for x in range(img.size[0]): #remove [top-bottom] border
pix[x, 0] = pix[x, img.size[1] - 1] = (255, 255, 255, 255)
for y in range(img.size[1]): #remove [left-right] border
pix[0, y] = pix[img.size[0] - 1, y] = (255, 255, 255, 255)
for y in range(img.size[1]): # two value: R=95,G=95,B=95
for x in range(img.size[0]):
if pix[x, y][0] < 90 or pix[x, y][1] < 90 or pix[x, y][2] < 90:
pix[x, y] = (0, 0, 0, 255)
else:
pix[x, y] = (255, 255, 255, 255)
tempFileFullName = self.__BuildTempFileFullName(localDir, extName)
img.save(tempFileFullName) # must use local file via tesseract-orc
text = pytesseract.image_to_string(Image.open(tempFileFullName))
return text
def TestImage(self):
data = [(1,0),(0,1)]
size = (2,2)
image = Image.new("1",size)
draw = ImageDraw.Draw(image)
for x in xrange(0,size[0]):
for y in xrange(0,size[1]):
draw.point((x,y),data[x][y])
image.save("D:\\GitHub\\TrPython\\NetLib\\Test\\1.gif")
class SmartCaptcha(object):
def __init__(self, isDebug = False):
self.__isDebug = isDebug
self.__currentStepId = 1
self.__tempFileList = []
def __BuildTempFileFullName(self, localDir, extName):
fname = str(uuid.uuid1()) + "_" + str(self.__currentStepId) + "." + extName
fname = os.path.join(localDir,fname)
self.__currentStepId += 1
self.__tempFileList.append(fname)
return fname
'''
Store remote image to local dir
'''
def __StoreImage2LocalDir(self, imageUrl , localDir , extName):
response = urllib.urlopen(imageUrl)
tempFileFullName = self.__BuildTempFileFullName(localDir, extName)
with open(tempFileFullName, 'wb') as f:
f.write(response.read())
return tempFileFullName
def Clearup(self):
for filename in self.__tempFileList:
if os.path.isfile(filename):
os.remove(filename)
'''
image enhance
'''
def __imageEnhance(self, image):
enhancer = ImageEnhance.Contrast(image)
image_enhancer = enhancer.enhance(4)
return image_enhancer
'''
two value
'''
def __twoValue(self, image):
img = image.convert('RGBA') # convert to RGBA
pix = img.load() #read pix
for x in range(img.size[0]): #remove [top-bottom] border
pix[x, 0] = pix[x, img.size[1] - 1] = (255, 255, 255, 255)
for y in range(img.size[1]): #remove [left-right] border
pix[0, y] = pix[img.size[0] - 1, y] = (255, 255, 255, 255)
for y in range(img.size[1]): # two value: R=100,G=100,B=120
for x in range(img.size[0]):
if pix[x, y][0] < 100 and pix[x, y][1] < 100:
pix[x, y] = (0, 0, 0, 255)
else:
pix[x, y] = (255, 255, 255, 255)
return img
def __getEffectivePoint(self, pix, x , y):
point, sx , sy = 0, x-1, y-1
#print sx+3 , sy +3 ,x , y
for i in xrange(3):
for j in xrange(3):
if sx+i == x and sy+j == y:
continue
if pix[sx+i,sy+j] == pix[x,y]:
point += 1
return point;
'''
1111111
1011101
1011101
1111111
'''
def __clearNoise(self, img, effectivePoint ,processCount):
for ct in xrange(0, processCount):
pix = img.load() #read pix
for x in xrange(1,img.size[0] - 1):
for y in xrange(1, img.size[1] - 1):
point = self.__getEffectivePoint(pix , x , y)
if point < effectivePoint:
pix[x, y] = (255, 255, 255, 255) # set to Noise
return img
'''
Get Captcha Code from local
'''
def GetLocalCaptchaCode(self, imagePath, extName = 'jpg'):
localDir = os.path.dirname(imagePath)
img = Image.open(imagePath)
print img.format, img.size, img.mode
# image Enhance
img = self.__imageEnhance(img)
if self.__isDebug:
img.save(self.__BuildTempFileFullName(localDir, extName))
# two value
img = self.__twoValue(img)
if self.__isDebug:
img.save(self.__BuildTempFileFullName(localDir, extName))
# clear Noise
img = self.__clearNoise(img, 3 , 1)
# orc
tempFileFullName = self.__BuildTempFileFullName(localDir, extName)
img.save(tempFileFullName) # must use local file via tesseract-orc
text = pytesseract.image_to_string(Image.open(tempFileFullName))
return text
if __name__ == "__main__":
print '[unit test]'
#validate1 = Captcha()
#print validate1.GetOnlineCaptchaCode("http://202.119.81.113:8080/verifycode.servlet")
#validate2 = Captcha(True)
#print validate2.GetOnlineCaptchaCode("http://202.119.81.113:8080/verifycode.servlet",True,"D:\\GitHub\\TrPython\\NetLib\\Test")
#validate2.Clearup()
#validate3 = Captcha(True)
#print validate3.GetLocalCaptchaCode("D:\\GitHub\\TrPython\\NetLib\\Test\\1400.gif","gif")
#validate3.TestImage()
validate4 = SmartCaptcha(True)
print validate4.GetLocalCaptchaCode("D:\\GitHub\\TrPython\\NetLib\\Test\\xxf2.jpg","jpg")
#print validate4.GetLocalCaptchaCode("D:\\GitHub\\TrPython\\NetLib\\Test\\queaa.jpg","jpg")
print pytesseract.image_to_string(Image.open("D:\\GitHub\\TrPython\\NetLib\\Test\\xxf2.jpg"))
|
apache-2.0
| 4,605,390,416,347,193,000
| 29.600629
| 132
| 0.550098
| false
| 3.510462
| true
| false
| false
|
weggert/calendar_sync
|
calendar_sync.py
|
1
|
6753
|
#!/usr/bin/python
import fileinput
import os
class CalendarManager:
def __init__(self, calendar_name, dry_run, include_descriptions):
self.calendar_name = calendar_name
self.dry_run = dry_run
self.include_descriptions = include_descriptions
def clear_calendar(self):
command = """
osascript -e 'tell application "Calendar" to tell calendar "%s"
set eventList to every event
repeat with e in eventList
delete e
end repeat
end tell'
"""
command = command % self.calendar_name
if not self.dry_run:
os.system(command)
print 'Calendar cleared'
def create_calendar_event(self, summary, start_date, end_date, all_day, location, description):
if not self.include_descriptions:
description = ''
properties = 'start date:theStartDate, end date:theEndDate, summary:"%s", description:"%s", location:"%s"'\
% (summary, description, location)
if all_day is True:
properties += ', allday event:true'
command = """
osascript -e 'set theStartDate to date "%s"
set theEndDate to date "%s"
tell application "Calendar" to tell calendar "%s"
set theEvent to make new event with properties {%s}
end tell'
"""
command = command % (start_date, end_date, self.calendar_name, properties)
if not self.dry_run:
os.system(command)
self.print_summary(summary, start_date, end_date, all_day, location, description)
@staticmethod
def print_summary(summary, start_date, end_date, all_day, location, description):
print 'Summary: ' + summary
print ' Start: ' + start_date
print ' End: ' + end_date
print ' All Day: ' + str(all_day)
print ' Location: ' + location
print ' Description: ' + description
print ''
class CalendarSummaryProcessor:
class LineType:
EventStart, Summary, Location, Date, Time, Where, Notes, Status, Other = range(9)
def __init__(self):
pass
def __init__(self, calendar_name, dry_run, include_descriptions):
self.calendar_manager = CalendarManager(
calendar_name=calendar_name,
dry_run=dry_run,
include_descriptions=include_descriptions)
self.reset()
self.processing_event = False
self.first_description_line = True
self.last_description_line_was_blank = False
self.summary = ''
self.date = ''
self.time = ''
self.location = ''
self.description = ''
def reset(self):
self.processing_event = False
self.first_description_line = True
self.last_description_line_was_blank = False
self.summary = ''
self.date = ''
self.time = ''
self.location = ''
self.description = ''
def process_summary(self):
self.calendar_manager.clear_calendar()
for input_line in fileinput.input():
line_type = self.get_line_type(input_line)
if line_type is self.LineType.EventStart:
if self.processing_event:
if self.summary != 'Remote'\
and self.summary != 'IP Video - Daily Scrum'\
and self.summary != 'Cloud Team Scrum':
start_date, end_date, all_day = self.get_start_end_dates(self.date, self.time)
self.calendar_manager.create_calendar_event(
self.summary, start_date, end_date, all_day, self.location, self.description)
self.reset()
if line_type is self.LineType.Summary:
self.summary = self.sanitize_line(input_line.strip()[9:])
self.processing_event = True
if line_type is self.LineType.Date:
self.date = input_line.strip()[6:]
if line_type is self.LineType.Time:
self.time = input_line.strip()[6:]
if line_type is self.LineType.Location:
self.location = self.sanitize_line(input_line.strip()[10:])
self.processing_event = True
if line_type is self.LineType.Other:
description_line = self.sanitize_line(input_line.strip())
if len(description_line) > 0:
self.description = self.description + description_line + '\n'
self.last_description_line_was_blank = False
else:
if not self.first_description_line and not self.last_description_line_was_blank:
self.description += '\n'
self.last_description_line_was_blank = True
self.first_description_line = False
if self.processing_event:
start_date, end_date, all_day = self.get_start_end_dates(self.date, self.time)
self.calendar_manager.create_calendar_event(
self.summary, start_date, end_date, all_day, self.location, self.description)
@staticmethod
def get_start_end_dates(date, time):
dates = date.split(" to ")
times = time.split(" to ")
start_date = dates[0] + ' ' + times[0]
end_date = dates[1] + ' ' + times[1]
all_day = False
if times[0] == '12:00:00 AM' and times[1] == "12:00:00 AM" and dates[0] != dates[1]:
all_day = True
return start_date, end_date, all_day
def get_line_type(self, input_line):
if input_line.startswith('EVENT'):
return self.LineType.EventStart
if input_line.startswith('Summary:'):
return self.LineType.Summary
if input_line.startswith('Date:'):
return self.LineType.Date
if input_line.startswith('Time:'):
return self.LineType.Time
if input_line.startswith('Location:'):
return self.LineType.Location
if input_line.startswith('Where'):
return self.LineType.Where
if input_line.startswith('Notes'):
return self.LineType.Notes
if input_line.startswith('Status'):
return self.LineType.Status
return self.LineType.Other
def process_named_line(self, input_line):
colon_position = input_line.find(':')
return self.sanitize_line(input_line[colon_position+1:].strip())
@staticmethod
def sanitize_line(input_line):
return input_line.replace("'", "").replace('"', '').replace('*~*~*~*~*~*~*~*~*~*', '').strip()
CalendarSummaryProcessor(calendar_name='Work Calendar',
dry_run=False,
include_descriptions=True).process_summary()
|
apache-2.0
| -4,109,685,570,745,103,000
| 34.925532
| 115
| 0.577077
| false
| 4.085299
| false
| false
| false
|
allenai/allennlp
|
allennlp/modules/text_field_embedders/basic_text_field_embedder.py
|
1
|
5232
|
from typing import Dict
import inspect
import torch
from overrides import overrides
from allennlp.common.checks import ConfigurationError
from allennlp.data import TextFieldTensors
from allennlp.modules.text_field_embedders.text_field_embedder import TextFieldEmbedder
from allennlp.modules.time_distributed import TimeDistributed
from allennlp.modules.token_embedders.token_embedder import TokenEmbedder
from allennlp.modules.token_embedders import EmptyEmbedder
@TextFieldEmbedder.register("basic")
class BasicTextFieldEmbedder(TextFieldEmbedder):
"""
This is a `TextFieldEmbedder` that wraps a collection of
[`TokenEmbedder`](../token_embedders/token_embedder.md) objects. Each
`TokenEmbedder` embeds or encodes the representation output from one
[`allennlp.data.TokenIndexer`](../../data/token_indexers/token_indexer.md). As the data produced by a
[`allennlp.data.fields.TextField`](../../data/fields/text_field.md) is a dictionary mapping names to these
representations, we take `TokenEmbedders` with corresponding names. Each `TokenEmbedders`
embeds its input, and the result is concatenated in an arbitrary (but consistent) order.
Registered as a `TextFieldEmbedder` with name "basic", which is also the default.
# Parameters
token_embedders : `Dict[str, TokenEmbedder]`, required.
A dictionary mapping token embedder names to implementations.
These names should match the corresponding indexer used to generate
the tensor passed to the TokenEmbedder.
"""
def __init__(self, token_embedders: Dict[str, TokenEmbedder]) -> None:
super().__init__()
# NOTE(mattg): I'd prefer to just use ModuleDict(token_embedders) here, but that changes
# weight locations in torch state dictionaries and invalidates all prior models, just for a
# cosmetic change in the code.
self._token_embedders = token_embedders
for key, embedder in token_embedders.items():
name = "token_embedder_%s" % key
self.add_module(name, embedder)
self._ordered_embedder_keys = sorted(self._token_embedders.keys())
@overrides
def get_output_dim(self) -> int:
output_dim = 0
for embedder in self._token_embedders.values():
output_dim += embedder.get_output_dim()
return output_dim
def forward(
self, text_field_input: TextFieldTensors, num_wrapping_dims: int = 0, **kwargs
) -> torch.Tensor:
if sorted(self._token_embedders.keys()) != sorted(text_field_input.keys()):
message = "Mismatched token keys: %s and %s" % (
str(self._token_embedders.keys()),
str(text_field_input.keys()),
)
embedder_keys = set(self._token_embedders.keys())
input_keys = set(text_field_input.keys())
if embedder_keys > input_keys and all(
isinstance(embedder, EmptyEmbedder)
for name, embedder in self._token_embedders.items()
if name in embedder_keys - input_keys
):
# Allow extra embedders that are only in the token embedders (but not input) and are empty to pass
# config check
pass
else:
raise ConfigurationError(message)
embedded_representations = []
for key in self._ordered_embedder_keys:
# Note: need to use getattr here so that the pytorch voodoo
# with submodules works with multiple GPUs.
embedder = getattr(self, "token_embedder_{}".format(key))
if isinstance(embedder, EmptyEmbedder):
# Skip empty embedders
continue
forward_params = inspect.signature(embedder.forward).parameters
forward_params_values = {}
missing_tensor_args = set()
for param in forward_params.keys():
if param in kwargs:
forward_params_values[param] = kwargs[param]
else:
missing_tensor_args.add(param)
for _ in range(num_wrapping_dims):
embedder = TimeDistributed(embedder)
tensors: Dict[str, torch.Tensor] = text_field_input[key]
if len(tensors) == 1 and len(missing_tensor_args) == 1:
# If there's only one tensor argument to the embedder, and we just have one tensor to
# embed, we can just pass in that tensor, without requiring a name match.
token_vectors = embedder(list(tensors.values())[0], **forward_params_values)
else:
# If there are multiple tensor arguments, we have to require matching names from the
# TokenIndexer. I don't think there's an easy way around that.
token_vectors = embedder(**tensors, **forward_params_values)
if token_vectors is not None:
# To handle some very rare use cases, we allow the return value of the embedder to
# be None; we just skip it in that case.
embedded_representations.append(token_vectors)
return torch.cat(embedded_representations, dim=-1)
|
apache-2.0
| 8,035,300,947,094,428,000
| 47.444444
| 114
| 0.640673
| false
| 4.253659
| false
| false
| false
|
perlygatekeeper/glowing-robot
|
google_test/free_the_bunny_prisoners/solution_5_fails.py
|
1
|
1090
|
import itertools
def solution(bunnies,keys_required):
answer = []
for i in range(bunnies):
answer.append([])
# if keys_required > bunnies:
# return None
if keys_required == 0:
return [[0]]
elif keys_required == 1:
key = 0
for group in range(bunnies):
answer[group].append(key)
elif bunnies == keys_required:
key = 0
for group in range(bunnies):
answer[group].append(key)
key += 1
else:
key = 0
for item in itertools.combinations(range(bunnies), keys_required):
for group in item:
answer[group].append(key)
key += 1
return answer
for num_buns in range(1,10):
for num_required in range(10):
key_dist = solution(num_buns,num_required)
print("-" * 60)
print("Answer for {0:d} bunnies, requiring {1:d}".format(num_buns,num_required))
if ( len(key_dist[0]) * len(key_dist) ) < 25:
print(key_dist)
else:
for bun in key_dist:
print(bun)
|
artistic-2.0
| -8,434,445,743,401,300,000
| 28.459459
| 88
| 0.538532
| false
| 3.449367
| false
| false
| false
|
mrjmad/nagademon_2014
|
nagademon2014/maingame/models/history_elements.py
|
1
|
6460
|
# -*- coding: utf-8 -*-
from __future__ import (print_function, division, absolute_import, unicode_literals)
from django.utils.encoding import python_2_unicode_compatible
from django.utils.translation import ugettext_lazy as _
from django.conf import settings
from django.db import models
USER_MODEL = settings.AUTH_USER_MODEL
@python_2_unicode_compatible
class Character(models.Model):
short_name = models.CharField(_("NPC's short Name"), max_length=20, unique=True)
first_name = models.CharField("Firstname of Character", max_length=50)
last_name = models.CharField("Lastname of Character", max_length=50)
gender = models.PositiveSmallIntegerField(u"Gender of Character")
description = models.TextField("Description")
def __str__(self):
return u"%s %s" % (self.first_name, self.last_name)
class Meta:
abstract = True
@python_2_unicode_compatible
class PlayerCharacter(Character):
def __str__(self):
return u"PC : %s %s" % (self.first_name, self.last_name)
@python_2_unicode_compatible
class NPCharacter(Character):
def __str__(self):
return u"NPC : %s %s" % (self.first_name, self.last_name)
class PlaceManager(models.Manager):
def get_by_natural_key(self, short_name):
return self.get(short_name=short_name)
@python_2_unicode_compatible
class Place(models.Model):
objects = PlaceManager()
begin_sound = models.CharField(_("Begin's Sound"), max_length=200, blank=True, null=True)
ambiance_sound = models.CharField(_("Ambiance's Sound"), max_length=200, blank=True, null=True)
short_name = models.CharField(_("Place's short Name"), max_length=20, unique=True)
name = models.CharField("Scene's Name", max_length=200)
filename = models.CharField("Scene's Filename", max_length=80)
text = models.TextField("Scene's Text")
def __str__(self):
return self.name
def natural_key(self):
return self.short_name,
@python_2_unicode_compatible
class Scene(models.Model):
short_name = models.CharField(_("Scene's short Name"), max_length=20, unique=True)
name = models.CharField("Scene's Name", max_length=200)
filename = models.CharField("Scene's Filename", max_length=80)
begin_sound = models.CharField(_("Begin's Sound"), max_length=200, blank=True, null=True)
ambiance_sound = models.CharField(_("Ambiance's Sound"), max_length=200, blank=True, null=True)
synopsis = models.TextField("Scene's synopsis, only for authors")
final = models.BooleanField("Final Round ?", default=False)
place = models.ForeignKey(Place, verbose_name="Scene's Place",
blank=True, null=True)
is_active = models.BooleanField(_("Is active ?"), default=True)
order = models.PositiveIntegerField(_("Scene's Order"), default=0)
need_a_trigger = models.BooleanField(_("Activable only by a trigger"), default=False)
def __str__(self):
return self.name
@python_2_unicode_compatible
class PartScene(models.Model):
text = models.CharField("Scene's Text", max_length=400)
for_scene = models.ForeignKey(Scene, verbose_name="Scene")
limited_to_player = models.ForeignKey(PlayerCharacter, blank=True, null=True)
parent = models.ForeignKey('self', blank=True, null=True)
active = models.BooleanField(default=True)
def __str__(self):
return "Text %s |for scene :%s" % (self.text, self.for_scene)
@python_2_unicode_compatible
class Choice1PartSceneto1Scene(models.Model):
text = models.CharField("Choice's Text", max_length=400)
for_part_scene = models.ForeignKey(PartScene, verbose_name="Current Part Scene",
related_name="current_choices_set")
next_scene = models.ForeignKey(Scene, verbose_name="Next Scene",
related_name="leading_choices_set",
null=True, blank=True)
next_part_scene = models.ForeignKey(PartScene, verbose_name="Next Part Scene",
related_name="leading_choices_set",
null=True, blank=True)
def __str__(self):
return "%s |for scene %s , part scene id :%s" % (self.text,
self.for_part_scene.for_scene,
self.for_part_scene.id)
@python_2_unicode_compatible
class Quest(models.Model):
short_name = models.CharField(_("Quest's short Name"), max_length=20, unique=True)
title = models.CharField("Quest's Title", max_length=140)
text = models.TextField("Quest's Text")
time_frame = models.PositiveIntegerField(_("Maximum Time (in minutes) for validate the Quest"), default=0)
given_by = models.ForeignKey(NPCharacter, verbose_name=_('Given by'))
scene = models.ForeignKey(Scene, verbose_name=_("Scene who Quest is activable"),
related_name=_("quests_for_scene"))
scene_after = models.ForeignKey(Scene, verbose_name=_("Scene after the End's Quest"),
related_name=_("finished_quests_for_scene"))
apparition_function = models.CharField(_("Name of Apparition's Function"), max_length=120, blank=True, null=True)
validation_function = models.CharField(_("Name of Validation's Function"), max_length=120)
def __str__(self):
return "%s | for scene :%s, by NPC %s in time %s" % (self.title, self.scene, self.given_by,
self.timedelta)
class ObjectType(models.Model):
name = models.CharField(u"Type Object Name", max_length=200)
description = models.TextField("Type's Description", blank=True, null=True)
short_name = models.CharField(_("Type Object's short Name"), max_length=20, unique=True)
class OneObject(models.Model):
name = models.CharField(_("Type Object Name"), max_length=200)
type = models.ForeignKey(ObjectType, verbose_name=_("Object's Type"))
description = models.TextField("Object's Description", blank=True, null=True)
initial_place = models.ForeignKey(Place, verbose_name=_("Object's Initial place"),
related_name=_("initial_objects_set"), blank=True, null=True)
stored_in = models.ForeignKey(Place, related_name=_("objects_stored_set"),
verbose_name=_("Where the object is stored"), blank=True, null=True)
|
mit
| 5,791,698,670,006,022,000
| 43.551724
| 117
| 0.645201
| false
| 3.875225
| false
| false
| false
|
vitale232/ves
|
ves/VESinverse_vectorized.py
|
1
|
12839
|
# -*- coding: utf-8 -*-
"""
Created on Thu Jan 28 16:32:48 2016
@author: jclark
this code uses the Ghosh method to determine the apparent resistivities
for a layered earth model. Either schlumberger or Wenner configurations
can be used
"""
import numpy as np
import random
import matplotlib
matplotlib.use('Qt5Agg')
import matplotlib.pyplot as plt
plt.style.use('bmh')
import sys
# Schlumberger filter
fltr1 = [0., .00046256, -.0010907, .0017122, -.0020687,
.0043048, -.0021236, .015995, .017065, .098105, .21918, .64722,
1.1415, .47819, -3.515, 2.7743, -1.201, .4544, -.19427, .097364,
-.054099, .031729, -.019109, .011656, -.0071544, .0044042,
-.002715, .0016749, -.0010335, .00040124]
#Wenner Filter
fltr2 = [0., .000238935, .00011557, .00017034, .00024935,
.00036665, .00053753, .0007896, .0011584, .0017008, .0024959,
.003664, .0053773, .007893, .011583, .016998, .024934, .036558,
.053507, .078121, .11319, .16192, .22363, .28821, .30276, .15523,
-.32026, -.53557, .51787, -.196, .054394, -.015747, .0053941,
-.0021446, .000665125]
print(len(fltr1))
print(len(fltr2))
#I know there must be a better method to assign lists. And probably numpy
#arrays would be best. But my Python wasn't up to it. If the last letter
#is an 'l' that means it is a log10 of the value
# 65 is completely arbitrary
p = [0] * 20 # earth layer parameters?
r = [0] * 65 # apparent resistivty?
rl = [0] * 65 # np.log(r) ?
t = [0] * 50 #
b = [0] * 65 #
asav = [0] * 65 # voltage spacing in meters?
asavl = [0] * 65 # np.log(asav)
adatl = [0] * 65 # interpolated voltage spacing ( np.log(10) / 6 )?
rdatl = [0] * 65 # np.log()
# adat = [0] * 65 # voltage spacing input
# rdat = [0] * 65 # apparent res input
pkeep = [0] * 65 # earth parameters after applying equations?
rkeep = [0] * 65 # r after applying equations?
rkeepl = [0] * 65 # np.log()!
pltanswer = [0] * 65
pltanswerl = [0] * 65
pltanswerkeep = [0] * 65
pltanswerkeepl = [0] * 65
rl = [0] * 65
small = [0] * 65
xlarge = [0] * 65
x=[0] * 100
y = [0] * 100
y2 = [0] * 100
u = [0] * 5000
new_x = [0] * 1000
new_y = [0] * 1000
ndat = 13
#hard coded data input - spacing and apparent resistivities measured
#in teh field
adat = [0., 0.55, 0.95, 1.5, 2.5, 3., 4.5, 5.5, 9., 12., 20., 30., 70.]
rdat = [0., 125., 110., 95., 40., 24., 15., 10.5, 8., 6., 6.5, 11., 25.]
one30 = 1.e30 # What's the purpose of this and should it be user input?
rms = one30 # Just a starting value for rmserror?
errmin = 1.e10 # Should this be user input?
# INPUT
array_spacing = 'wenner' # 1 is for shchlumberger and 2 is for Wenner
nLayers = 3 #number of layers
n = 2 * nLayers - 1 # What does n represent? number of parameters
spac = 0.2 # smallest electrode spacing - should this come from the input file?
m = 20 # number of points where resistivity is calculated
spac = np.log(spac)
delx = np.log(10.0) / 6. # I take it this is the sample interval on the log scale?
# this is where the range in parameters should be input from a GUI
# I'm hard coding this in for now
#enter thickenss range for each layer and then resistivity range.
#for 3 layers small[1] and small[2] are low end of thickness range
# small[3], small[4] and small[5] are the low end of resistivities
# I think I have it coded up that these are getting grabbed from the rectangles currently.
# Is that the best way to go?
small[1] = 1.
small[2] = 10.
small[3] = 20.
small[4] = 2.
small[5] = 500.
xlarge[1] = 5
xlarge[2] = 75.
xlarge[3] = 200.
xlarge[4] = 100
xlarge[5] = 3000.
iter_ = 10000 #number of iterations for the Monte Carlo guesses. to be input on GUI
# Is 10000 the most reasonable default, or should I play with it?
def readData(adat, rdat, ndat, return_indexed=False):
#normally this is where the data would be read from the csv file
# but now I'm just hard coding it in as global lists
for i in range(1, ndat):
adatl[i] = np.log10(adat[i])
rdatl[i] = np.log10(rdat[i])
if return_indexed:
return adatl[:ndat], rdatl[:ndat]
else:
return adatl, rdatl
<<<<<<< HEAD
=======
def error(): # simple rms error calc
sumerror = 0.
#pltanswer = [0]*64
spline(m, one30, one30, asavl, rl, y2) # So this calculates the predicted fit?
# and essentially operates on the list in place?
for i in range(1, ndat): # So you always skip the value 0? due to -inf returns?
ans = splint(m, adatl[i], asavl, rl, y2) # Then this calulates error?
sumerror = sumerror + (rdatl[i] - ans) * (rdatl[i] - ans)
#print(i,sum1,rdat[i],rdatl[i],ans)
pltanswerl[i] = ans
pltanswer[i] = np.power(10, ans)
rms = np.sqrt(sumerror / (ndat - 1))
# check the spline routine
# for i in range(1,m+1,1):
# anstest = splint(m, asavl[i],asavl,rl,y2)
# print( asavl[i], rl[i], anstest)
#print(' rms = ', rms)
# if you erally want to get a good idea of all perdictions from Montecarlo
# perform the following plot (caution - change iter to a smaller number)
#plt.loglog(adat[1:ndat],pltanswer[1:ndat])
return rms
>>>>>>> 60497dd... ?s
def transf(y, i):
# these lines apparently find the computer precision ep
ep = 1.0
ep = ep / 2.0
fctr = ep + 1.
while fctr > 1.:
ep = ep / 2.0
fctr = ep + 1.
u = 1. / np.exp(y) # y = spac - 19. * delx - 0.13069
t[1] = p[n]
for j in range(2, nLayers + 1, 1):
pwr = -2. * u * p[nLayers + 1 - j]
if pwr < np.log(2. * ep):
pwr = np.log(2. * ep)
a = np.exp(pwr)
b = (1. - a) / (1. + a)
rs = p[n + 1 - j]
tpr = b * rs
t[j] = (tpr + t[j - 1]) / (1. + tpr * t[j - 1] / (rs * rs))
r[i] = t[nLayers]
return
def filters(b, k):
for i in range(1, m + 1):
re = 0.
for j in range(1, k + 1):
re = re + b[j] * r[i + k - j] # include ranges of thickness, res . push button for rmse error, observed data
# surf thicknes .2 - 100
# res 2-3000 # could use huge ranges at cost of time
r[i] = re
return
def rmsfit():
if array_spacing.lower() == 'wenner':
y = spac - 19. * delx - 0.13069
mum1 = m + 28
for i in range(1, mum1 + 1):
transf(y, i)
y = y + delx
filters(fltr1, 29)
elif array_spacing.lower() == 'schlumberger':
s = np.log(2.)
y = spac - 10.8792495 * delx
mum2 = m + 33
for i in range(1, mum2 + 1):
transf(y, i)
a = r[i]
y1 = y + s
transf(y1, i)
r[i] = 2. * a - r[i]
y = y + delx
filters(fltr2, 34)
else:
print("\nType of survey not indicated.")
raise SystemExit('Exiting.\n\n Take better care next time.')
x = spac
#print("A-Spacing App. Resistivity")
for i in range(1, m + 1):
a = np.exp(x)
asav[i] = a
asavl[i] = np.log10(a)
rl[i] = np.log10(r[i])
x = x + delx
#print("%7.2f %9.3f " % ( asav[i], r[i]))
rms = error()
return rms
def error(): # simple rms error calc
sumerror = 0.
#pltanswer = [0]*64
spline(m, one30, one30, asavl, rl, y2) # So this calculates the predicted fit?
# and essentially operates on the list in place?
for i in range(1, ndat): # So you always skip the value 0? due to -inf returns?
ans = splint(m, adatl[i], asavl, rl, y2) # Then this calulates error?
sumerror = sumerror + (rdatl[i] - ans) * (rdatl[i] - ans)
#print(i,sum1,rdat[i],rdatl[i],ans)
pltanswerl[i] = ans
pltanswer[i] = np.power(10, ans)
rms = np.sqrt(sumerror / (ndat - 1))
# check the spline routine
# for i in range(1,m+1,1):
# anstest = splint(m, asavl[i],asavl,rl,y2)
# print( asavl[i], rl[i], anstest)
#print(' rms = ', rms)
# if you erally want to get a good idea of all perdictions from Montecarlo
# perform the following plot (caution - change iter to a smaller number)
#plt.loglog(adat[1:ndat],pltanswer[1:ndat])
return rms
# my code to do a spline fit to predicted data at the nice spacing of Ghosh
# use splint to determine the spline interpolated prediction at the
# spacing where the measured resistivity was taken - to compare observation
# to prediction
def spline(n, yp1, ypn, x=[] ,y=[] ,y2=[]):
"""Still struggling to understand the general operation of this function."""
u = [0] * 1000
one29 = 0.99e30
#print(x,y)
if yp1 > one29:
y2[0] = 0.
u[0] = 0.
else:
y2[0] = -0.5
u[0] = (3. / (x[1] - x[0])) * ((y[1] - y[0]) / (x[1] - x[0]) - yp1)
for i in range(1, n):
#print(i,x[i])
sig = (x[i] - x[i-1]) / (x[i+1] - x[i-1])
p=sig * y2[i - 1] + 2.
y2[i] = (sig-1.) / p
u[i] = (((6. * ((y[i+1] - y[i]) / (x[i+1] - x[i]) - (y[i] - y[i-1]) /
x[i] - x[i-1])) / (x[i + 1] - x[i - 1]) - sig * u[i - 1]) / p)
if ypn > one29:
qn = 0.
un = 0.
else:
qn = 0.5
un = (3. / (x[n] - x[n - 1])) * (ypn - (y[n] - y[n - 1]) / (x[n] - x[n - 1]))
y2[n] = (un - qn * u[n - 1]) / (qn * y2[n - 1] + 1.)
for k in range(n-1, -1, -1):
y2[k] = y2[k] * y2[k + 1] + u[k]
return
def splint(n, x ,xa=[], ya=[], y2a=[]): # Is this function the T function?
"""Still struggling to understand the general operation of this function."""
klo = 0
khi = n
while khi - klo > 1:
k = int((khi + klo) // 2)
if xa[k] > x:
khi = k
else:
klo = k
h = xa[khi] - xa[klo]
if abs(h) < 1e-20:
print(" bad xa input")
#print(x,xa[khi],xa[klo])
a = (xa[khi] - x) / h
b = (x - xa[klo]) / h
y = (a * ya[klo] + b * ya[khi] + ((a * a * a - a) * y2a[klo] +
(b * b * b - b) * y2a[khi]) * (h * h) /6.)
#print("x= ", x,"y= ", y, " ya= ", ya[khi]," y2a= ", y2a[khi], " h= ",h)
return y
#main here
if __name__ == '__main__':
adatl, rdatl = readData(adat, rdat, ndat, return_indexed=False)
print(adat[1:ndat],rdat[1:ndat])
print('log stufffff')
print(adatl[1:ndat], rdatl[1:ndat]) # is this to skip 0?
#enter thickenss range for each layer and then resistivity range.
#for 3 layers small[1] and small[2] are low end of thickness range
# small[3], small[4] and small[5] are the low end of resistivities
for iloop in range(1, int(iter_/2) + 1):
#print( ' iloop is ', iloop)
for i in range(1, n + 1): # number of parameters + 1
randNumber = random.random() # IS this just to add noise to the model?
# #print(randNumber, ' random')
# print(xlarge)
# print(small)
# s = input('')
# print('xlarge[i]: {}, small[i]: {}'.format(xlarge[i], small[i]))
p[i] = (xlarge[i] - small[i]) * randNumber + small[i]
# print(p)
print('\n')
print(p)
# s = input('')
rms = rmsfit()
if rms < errmin:
print('rms ', rms, ' errmin ', errmin)
for i in range(1, n + 1):
pkeep[i] = p[i]
for i in range(1, m + 1):
rkeep[i] = r[i]
rkeepl[i] = rl[i]
for i in range(1, ndat + 1):
pltanswerkeepl[i] = pltanswerl[i]
pltanswerkeep[i] = pltanswer[i]
errmin = rms
#output the best fitting earth model
print(' Layer ', ' Thickness ', ' Res_ohm-m ')
for i in range(1,nLayers,1):
print(i, pkeep[i], pkeep[nLayers+i-1])
print( nLayers, ' Infinite ', pkeep[n])
for i in range(1,m+1, 1):
asavl[i] = np.log10(asav[i])
#output the error of fit
print( ' RMS error ', errmin)
print( ' Spacing', ' Res_pred ', ' Log10_spacing ', ' Log10_Res_pred ')
for i in range(1,m+1,1):
#print(asav[i], rkeep[i], asavl[i], rkeepl[i])
print("%7.2f %9.3f %9.3f %9.3f" % ( asav[i], rkeep[i],
asavl[i], rkeepl[i]))
print('plot a lot')
plt.loglog(asav[1:m],rkeep[1:m],'-') # resistivity prediction curve
plt.loglog(adat[1:ndat],pltanswerkeep[1:ndat], 'ro') # predicted data red dots
s=7
plt.loglog(adat[1:ndat],rdat[1:ndat],'bo',markersize=s) #original data blue dots
plt.show()
plt.grid(True)
sys.exit(0)
|
lgpl-3.0
| -1,006,991,285,408,766,500
| 31.442708
| 120
| 0.53283
| false
| 2.705225
| false
| false
| false
|
mikoim/funstuff
|
null/crawler/tt2db.py
|
1
|
1709
|
# -*- coding: utf-8 -*-
import urllib.request
import time
import pymongo
import http.client
import re
def httpWrapper(url):
try:
data_raw = urllib.request.urlopen(url).read().decode('utf-8')
except:
return "NULL"
return data_raw
def getGirlName(data_raw):
matches = re.findall('名前[ ]+?/[ ]+?(.+?)(|\n)*( |)*(|\n)*( |)*(\(|<br />)', data_raw)
for match in matches[0]:
return match.replace(' ', '')
return
def getGrilPhotos(data_raw):
matches = re.findall('<span>(photos/.+?.jpg)</span>', data_raw)
if len(matches) == 0:
matches = re.findall('<a href="(photos/.+?.jpg)">', data_raw)
return matches
def getLastModTime(path):
conn = http.client.HTTPConnection("twintail-japan.com")
conn.request("HEAD", path)
res = conn.getresponse()
return int(time.mktime(time.strptime(res.getheaders()[2][1], '%a, %d %b %Y %H:%M:%S %Z')) * 1000)
conn = pymongo.Connection()
db = conn.tw2db
col = db.tm
for x in range(1, 3):
baseUrl = "http://twintail-japan.com/sailor/contents/%d.html" % x
data_raw = httpWrapper(baseUrl)
if data_raw != "NULL":
name = getGirlName(data_raw)
for photo in getGrilPhotos(data_raw):
dbtml = {'author' : '', 'time' : '', 'title' : '', 'via' : '', 'src' : '', 'message' : ''}
dbtml['author'] = name
dbtml['title'] = name + " @ セーラ服とツインテール"
dbtml['via'] = baseUrl
dbtml['message'] = ""
dbtml['time'] = getLastModTime("/sailor/contents/%d.html" % x)
dbtml['src'] = 'http://twintail-japan.com/sailor/contents/%s' % (photo)
col.insert(dbtml)
print(x)
|
mit
| -7,780,927,202,663,628,000
| 26.606557
| 102
| 0.562686
| false
| 2.978761
| false
| false
| false
|
digifant/eMonitor
|
tools/update-osm-data.py
|
1
|
10402
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
import logging
import logging.handlers
import traceback
import os
import time
from optparse import OptionParser
import MySQLdb
import codecs
import requests
import sys
import pdb
import argparse
from pprint import pprint
def osmWebUrl (lat,lng):
return "http://www.openstreetmap.org/?&mlat=%s&mlon=%s&zoom=17" % (lat,lng)
def str2bool(v):
if v.lower() in ('yes', 'true', 't', 'y', '1', 'j', 'ja'):
return True
elif v.lower() in ('no', 'false', 'f', 'n', '0', 'nein'):
return False
else:
raise argparse.ArgumentTypeError('Boolean value expected.')
def prompt(query):
sys.stdout.write('%s [y/n]: ' % query)
val = raw_input()
try:
ret = str2bool(val)
except ValueError:
sys.stdout.write('Please answer with a y/n\n')
return prompt(query)
return ret
# returns None if not found!
def queryOsmNominatin(street, streetno, city ):
url = 'http://nominatim.openstreetmap.org/search'
params = 'format=json&city={}&street={}'.format(city, street)
#params = 'format=json&city=%s&street=%s' % (city, address)
if streetno != '':
params += ' {}'.format(streetno)
params = params.replace (' ', '+')
params = params.replace ('<', '<')
params = params.replace ('>', '>')
logging.debug ("OSM nominatim query: %s?%s" % (url,params))
headers = {
'User-Agent': 'OSMSyncForFireFighterStreetDbOfOurTown',
'From': 'bofhnospam@koffeinbetrieben.de'
}
r = requests.get('{}?{}'.format(url, params), timeout=3, headers=headers)
#logging.debug("osm nomination result: %s" % pprint(r.json()))
#import pdb; pdb.set_trace()
_position = None
try:
_position = {'lat':r.json()[0]['lat'], 'lng':r.json()[0]['lon'], 'osm_id':r.json()[0]['osm_id'].decode('iso-8859-1').encode('utf8') }
except IndexError:
logging.error ("street %s not found! (housenumber=%s)" % (street, streetno))
#logging.debug (_position)
return _position
def updateMysqlStreets (db, user, passwd, command):
# Open database connection
db = MySQLdb.connect("localhost",user,passwd,db )
# prepare a cursor object using cursor() method
cursor = db.cursor()
# execute SQL query using execute() method.
cursor.execute("SELECT VERSION()")
# Fetch a single row using fetchone() method.
data = cursor.fetchone()
print "Database version : %s " % data
not_found = {}
if command == "update_position":
sql = "SELECT * FROM streets"
try:
cursor.execute(sql)
results = cursor.fetchall()
for row in results:
print ("Street DB %s lat=%s lng=%s" % (row[1].decode('iso-8859-1').encode('utf8'), row[5], row[6]) )
if ( row[0] > 0 ):
_position = queryOsmNominatin (street=row[1].decode('iso-8859-1').encode('utf8'), streetno='', city='Kleinblittersdorf')
#No heavy uses (an absolute maximum of 1 request per second).
#http://wiki.openstreetmap.org/wiki/Nominatim_usage_policy
time.sleep (1)
if _position != None:
if row[9] == int(_position['osm_id']):
sql = 'update streets set lat=%s, lng=%s where id = %s' % (float(_position['lat']), float(_position['lng']), int(row[0]))
logging.debug ("sql query %s" % sql)
try:
cursor.execute(sql)
db.commit()
logging.info ("street %s updated lat and lng to (%s,%s)" % (row[1].decode('iso-8859-1').encode('utf8'), float(_position['lat']), float(_position['lng'])))
except:
db.rollback()
logging.error ("SQL Error %s" % traceback.format_exc())
else:
logging.fatal ("OSMID stimmt nicht überein! %s vs %s" % (row[9], _position['osm_id'] ))
else:
logging.fatal ("OSM nominatin Query failed!")
not_found[row[0]] = row[1].decode('iso-8859-1').encode('utf8')
except:
logging.error ("DB Error %s" % traceback.format_exc() )
# disconnect from server
db.close()
logging.info ("Sync finished")
if len(not_found) > 0:
logging.error ("didnt found %s streets:" % len(not_found))
for k in not_found.keys():
logging.error ("not found: id=%s streetname=%s" % (k, not_found[k]))
def verifyMysqlStreets (db, user, passwd, command, street=-1):
# Open database connection
db = MySQLdb.connect("localhost",user,passwd,db )
# prepare a cursor object using cursor() method
cursor = db.cursor()
# execute SQL query using execute() method.
cursor.execute("SELECT VERSION()")
# Fetch a single row using fetchone() method.
data = cursor.fetchone()
print "Database version : %s " % data
not_found = {}
if command == "verify_streets":
sql = "SELECT * FROM streets"
if street > 0:
sql = sql + " where id=%i" % street
try:
cursor.execute(sql)
results = cursor.fetchall()
for row in results:
print ("Street %s lat=%s lng=%s url=%s" % (row[1].decode('iso-8859-1').encode('utf8'), row[5], row[6], osmWebUrl(row[5],row[6]) ) )
if ( row[0] > 0 ):
_position = queryOsmNominatin (street=row[1].decode('iso-8859-1').encode('utf8'), streetno='', city='Kleinblittersdorf')
if _position != None:
sql = 'update streets set lat=%s, lng=%s, osmid=%s where id = %s' % (float(_position['lat']), float(_position['lng']), int(_position['osm_id']), int(row[0]))
logging.debug ("sql query %s" % sql)
if row[9] == int(_position['osm_id']):
logging.info ("osmid=%s db lat=%s db lng=%s OsmNominatim lat=%s lng=%s new url=%s" % (row[9], row[5], row[6], float(_position['lat']), float(_position['lng']), osmWebUrl(float(_position['lat']),float(_position['lng'])) ) )
if round(float(row[5]),4) != round(float(_position['lat']),4) or round(float(row[6]),4) != round(float(_position['lng']),4):
logging.info ("%i NO MATCH" % row[9])
if options.ask_fix and prompt ("Fix?"):
try:
cursor.execute(sql)
db.commit()
logging.info ("street %s updated lat, lng, osmid to (%s,%s,%s)" % (row[1].decode('iso-8859-1').encode('utf8'), float(_position['lat']), float(_position['lng']), (_position['osm_id'])))
except:
db.rollback()
logging.error ("SQL Error %s" % traceback.format_exc())
else:
logging.info ("%i MATCH" % row[9])
else:
logging.fatal ("OSMID stimmt nicht überein! %s vs %s url=%s" % (row[9], _position['osm_id'], osmWebUrl(float(_position['lat']),float(_position['lng']))))
if options.ask_fix and prompt ("Fix?"):
try:
cursor.execute(sql)
db.commit()
logging.info ("street %s updated lat, lng, osmid to (%s,%s,%s)" % (row[1].decode('iso-8859-1').encode('utf8'), float(_position['lat']), float(_position['lng']), (_position['osm_id'])))
except:
db.rollback()
logging.error ("SQL Error %s" % traceback.format_exc())
else:
logging.fatal ("OSM nominatin Query failed!")
not_found[row[0]] = row[1].decode('iso-8859-1').encode('utf8')
#No heavy uses (an absolute maximum of 1 request per second).
#http://wiki.openstreetmap.org/wiki/Nominatim_usage_policy
time.sleep (1)
except:
logging.error ("DB Error %s" % traceback.format_exc() )
# disconnect from server
db.close()
logging.info ("verify finished")
if __name__ == '__main__':
parser = OptionParser()
parser.add_option("-d", "--database", dest="database", help="mysql database name", default="emonitor")
parser.add_option("-u", "--user", dest="user", help="mysql user", default='emonitor')
parser.add_option("-p", "--passwd", dest="passwd", help="mysql password", default='emonitor')
parser.add_option("--update-streets-position", dest="update_streets_position", help="update positions for all streets", action="store_true", default=False)
parser.add_option("--verify-street-position", dest="verify_street_position", help="verify positions for given street", type=int, default=-1)
parser.add_option("-v", "--verify-all-streets-position", dest="verify_all_streets_position", help="verify positions for given street", action="store_true", default=False)
parser.add_option("-a", "--ask-fix", dest="ask_fix", help="ask for fixing", action="store_true", default=False)
(options, args) = parser.parse_args()
#logging.basicConfig(filename='screenshot-and-telegram.log', level=logging.DEBUG)
logging.basicConfig(level=logging.DEBUG)
if options.update_streets_position:
updateMysqlStreets (db=options.database, user=options.user, passwd=options.passwd, command="update_position")
if options.verify_street_position > 0:
verifyMysqlStreets (db=options.database, user=options.user, passwd=options.passwd, command="verify_streets", street=int(options.verify_street_position))
if options.verify_all_streets_position:
verifyMysqlStreets (db=options.database, user=options.user, passwd=options.passwd, command="verify_streets")
#queryOsmNominatin(street="Rexrothstraße", streetno='', city='Kleinblittersdorf')
|
bsd-3-clause
| -7,600,303,221,806,536,000
| 45.424107
| 250
| 0.544379
| false
| 3.781455
| false
| false
| false
|
suma12/asterix
|
asterix/APDU.py
|
1
|
31348
|
""" asterix/APDU.py
__author__ = "Petr Tobiska"
Author: Petr Tobiska, mailto:petr.tobiska@gmail.com
This file is part of asterix, a framework for communication with smartcards
based on pyscard. This file implements handfull APDU commands.
asterix is free software; you can redistribute it and/or modify
it under the terms of the GNU Lesser General Public License as published by
the Free Software Foundation; either version 2.1 of the License, or
(at your option) any later version.
asterix is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public License
along with pyscard; if not, write to the Free Software
Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
References:
[GP CS] GlobalPlatform Card Specification, Version 2.2.1, Jan 2011
[GP AmD] Secure Channel Protocol 03, Card Specification v 2.2 - Amendment D,
Version 1.1, Sep 2009
"""
import re
import hashlib
import random
from struct import pack, unpack
from binascii import hexlify, unhexlify
# PyCrypto
from Crypto.Cipher import DES, DES3, AES
# ECSDA
from ecdsa import ecdsa, ellipticcurve
# pyscard
from smartcard.ATR import ATR
# asterix
from formutil import s2l, l2s, derLen, derLV, s2int, int2s, s2ECP, chunks,\
split2TLV, findTLValue, swapNibbles
from GAF import GAF
from applet import DESsign
from SCP03 import CMAC
from mycard import ISOException, resetCard
__all__ = ('calcKCV', 'putKey', 'storeDataPutKey',
'push2B_DGI', 'X963keyDerivation', 'Push3scenario',
'selectApplet', 'openLogCh', 'closeLogCh',
'getStatus', 'getExtCardRes', 'getData',
'selectFile', 'readBinary', 'readRecord',
'updateBinary', 'updateRecord',
'verifyPin', 'changePin', 'disablePin', 'enablePin', 'unblockPin',
'selectUSIM', 'cardInfo', 'KeyType')
INS_VERIFY_PIN = 0x20
INS_CHANGE_PIN = 0x24
INS_DISABLE_PIN = 0x26
INS_ENABLE_PIN = 0x28
INS_UNBLOCK_PIN = 0x2C
INS_MANAGE_LOGCH = 0x70
INS_SELECT = 0xA4
INS_READBIN = 0xB0
INS_READREC = 0xB2
INS_GETDATA = 0xCA
INS_UPDBIN = 0xD6
INS_UPDREC = 0xDC
INS_PUTKEY = 0xD8
INS_STOREDATA = 0xE2
INS_GETSTATUS = 0xF2
class KeyType:
"""Key types as defined in [GP CS] Tab 11.16"""
# subset of currently supported keys
DES_IMPLICIT = 0x80
TDES_CBC = 0x82
DES_ECB = 0x83
DES_CBC = 0x84
AES = 0x88
def calcKCV(keyValue, zAES=False):
"""Calculate KCV for symmetric key.
keyValue - key values as string (DES, 3DES2k, 3DES3k, AES)
zAES - True if key is AES (i.e. encrypt block of '01' instead of '00')
Return 3B-long string."""
if zAES:
assert len(keyValue) in (16, 24, 32), "Wrong length of AES key"
block = '\x01'*16
tkey = AES.new(keyValue, AES.MODE_ECB)
else:
assert len(keyValue) in (8, 16, 24), "Wrong length of (3)DES key"
block = '\x00'*8
if len(keyValue) == 8:
tkey = DES.new(keyValue, DES.MODE_ECB)
else:
tkey = DES3.new(keyValue, DES.MODE_ECB)
return tkey.encrypt(block)[:3]
def putKey(oldKeyVersion, newKeyVersion, keyId, keyComponents,
zMoreCmd=False, zMultiKey=False, keyDEK=None,
lenMAC=8):
"""Build APDU for PUT KEY command.
oldKeyVersion - key version to be replaced. If zero, new key is created.
newKeyVersion - key version of key being put
keyId - id of the 1st key being put
keyComponents - list of key components being put.
Each componet is a tuple of key type (u8) and value (string).
zMoreCmd - P1.b8, signals if there is more commands
zMultiKey - P2.b8, signals if more than one component being put
keyDEK - KIK or DEK key. keyDEK.encrypt(data) called to encrypt
(including padding) key component value if not None.
If has attribute zAES and keyDEK.zAES evaluates as True, it is
considered as AES key and [GP AmD] 7.2 formatting is used.
lenMAC - length of CMAC for AES.
Applicable if AES key with key id=0x02 (KID) and
key version 0x01-0x0F or 0x11 is being put with AES keyDEK
(see ETSI 102.226 rel 9+, 8.2.1.5)
Returns APDU built (as list of u8).
See [GP CS] 11.8 and [GP AmD] 7.2 for reference.
See [GP CS] Tab 11.16 for coding of key type.
Currently only Format1 supported.
"""
# sanity check
assert 0 <= oldKeyVersion < 0x80
assert 0 < newKeyVersion < 0x80
assert 0 < keyId < 0x80
assert len(keyComponents) > 0
assert lenMAC in (4, 8)
P1 = (zMoreCmd and 0x80 or 0) | oldKeyVersion
P2 = (zMultiKey and 0x80 or 0) | keyId
data = chr(newKeyVersion)
for kc in keyComponents:
keyType, keyVal = kc[:2] # ignore eventual keyUsage and keyAccess
assert 0 <= keyType < 0xFF
if keyDEK:
encValue = keyDEK.encrypt(keyVal)
# for AES as keyDEK, prepend length of component
if 'zAES' in dir(keyDEK) and keyDEK.zAES:
encValue = derLen(keyVal) + encValue
# see ETSI 102.226 rel 9+, 8.2.1.5
if keyType == KeyType.AES and keyId == 2 and \
newKeyVersion in range(0x01, 0x10) + [0x11]:
encValue += chr(lenMAC)
else:
encValue = keyVal
# calculate KCV
if keyType in (KeyType.DES_IMPLICIT, KeyType.TDES_CBC,
KeyType.DES_ECB, KeyType.DES_CBC, KeyType.AES):
kcv = calcKCV(keyVal, keyType == KeyType.AES)
else:
kcv = ''
data += chr(keyType) + derLen(encValue) + encValue + derLen(kcv) + kcv
keyId += 1
apdu = [0x80, INS_PUTKEY, P1, P2, len(data)] + s2l(data)
return apdu
def push2B_DGI(keyVer, keys, keyCASDenc):
""" Create DGI 00A6 and 8010 for Push2B scenario
keyVer - key verions (u8)
keys - ((keytype, keyvalue)); 1 or 3 sym. keys
keyCASDenc - a method to call for encryption 8010 content
Return DGIs built (as list of strings)."""
# DGI tag on 2B (GP Card Spec 2.2.1, 11.1.12)
# DGI length coding as in GP Systems Scripting Language Spec. v1.1.0, an. B
# i.e. on 1B for x < 255, FF<yyyy> for x >=255
KAT = GAF(""" -- Control Reference Template (KAT)
-- see GP 2.2.1 AmA 4.4
00A6 #[
A6 #(
90 #(04) -- scenario identifier: Push#2B
95 #($keyUsage)
80 #($keyType)
81 #($keyLen)
83 #($keyVer)
-- 45 #($SDIN) -- optional Security Domain Image Number
)] """)
assert len(keys) in (1, 3), "One or three sym. keys expected"
keyUsage = len(keys) == 1 and '\x5C' or '\x10' # Tab. 13
keyType = keys[0][0]
assert all([k[0] == keyType for k in keys]), "Key types differ"
# remap keyType to '80' as required by GP UICC config 10.3.1
if keyType in (KeyType.TDES_CBC, KeyType.DES_ECB, KeyType.DES_CBC):
keyType = KeyType.DES_IMPLICIT
lens = [len(k[1]) for k in keys]
l = max(lens)
assert l == min(lens), "Key lengths differ"
dgi00A6 = KAT.eval(keyUsage=keyUsage, keyType=chr(keyType),
keyLen=chr(l), keyVer=chr(keyVer))
data = keyCASDenc(''.join([k[1] for k in keys]))
dgi8010 = pack(">H", 0x8010) + chr(len(data)) + data
return (dgi00A6, dgi8010)
def storeDataPutKeyDGI(keyVer, keyComponents, keyId=1, keyDEK=None):
"""Build DGI for Store Data for Put Key.
keyVer - key version of key being created
keyComponents - list of key components being put.
Each componet is a tuple of key type (u8), value (string)
and optionally Key Usage Qualifier and Key Access
(u8, defaults 0x18, 0x14 or 0x48 for key UQ, 0x00 for key ac.)
keyId - id of the 1st key being created (optional, u8, default 1)
keyDEK - KIK or DEK key. keyDEK.encrypt(data) called to encrypt
(including padding) key component value if not None.
If has attribute zAES and keyDEK.zAES evaluates as True, it is
considered as AES key and [GP AmD] 7.2 formatting is used.
Returns DGIs built (as list of string).
See GP 2.2.1 AmA 4.10.2 for reference.
"""
# sanity check
assert 0 < keyVer and keyVer < 0x80
assert 0 < keyId and keyId < 0x80
assert len(keyComponents) > 0
KeyUQ = (None, 0x38, 0x34, 0xC8) # see GP 2.2.1, 11.1.9
templ = """ B9 #(95#($keyUQ) 96#($keyAc) 80#($keyType) 81#($keyLen)
82#($keyId) 83#($keyVer) 84#($KCV))"""
d = {'keyVer': chr(keyVer)}
B9 = ''
dgi8113 = []
for kc in keyComponents:
assert len(kc) in (2, 4), "wrong keyComponent" + kc.__str__()
if len(kc) == 2:
keyType, keyVal = kc
keyUQ = 1 <= keyId <= 3 and KeyUQ[keyId] or 0xFF
keyAc = 0x00
else:
keyType, keyVal, keyUQ, keyAc = kc
d['keyLen'] = chr(len(keyVal))
assert 0 <= keyType < 0xFF
if keyType in (KeyType.DES_IMPLICIT, KeyType.TDES_CBC,
KeyType.DES_ECB, KeyType.DES_CBC, KeyType.AES):
d['KCV'] = calcKCV(keyVal, keyType == KeyType.AES)
else:
d['KCV'] = ''
d['keyId'] = chr(keyId)
for k in ('keyType', 'keyUQ', 'keyAc', 'keyId'):
d[k] = chr(locals()[k])
tlv = GAF(templ).eval(**d)
if keyDEK:
encValue = keyDEK.encrypt(keyVal)
else:
encValue = keyVal
B9 += tlv
dgi8113.append(pack(">HB", 0x8113, len(encValue)) + encValue)
keyId += 1
return(pack(">HB", 0x00B9, len(B9)) + B9, dgi8113)
def storeDataPutKey(keyVer, keyComponents, keyId=1, keyDEK=None):
"""Build APDU for Store Data for Put Key.
keyVer, keyComponents, keyId and keyDEK as in storeDataPutKeyDGI.
Return APDU a u8 list."""
dgi00B9, dgi8113 = storeDataPutKeyDGI(keyVer, keyComponents,
keyId, keyDEK)
data = dgi00B9 + ''.join(dgi8113)
assert len(data) < 256, "Longer Put Key not implemented"
P1 = 0x88
P2 = 0
apdu = [0x80, INS_STOREDATA, P1, P2, len(data)] + s2l(data)
return apdu
# ###### Scenario 3 stuff
# Preloaded ECC Curve Parameters, GP 2.2.1 AmE 4.5
# N.B., all have cofactor = 1
ECC_Curves = {
0x00: ecdsa.generator_256, # NIST P-256
0x01: ecdsa.generator_384, # NIST P-384
0x02: ecdsa.generator_521, # NIST P-521
# 0x03: brainpoolP256r1,
# 0x04: brainpoolP256t1,
# 0x05: brainpoolP384r1,
# 0x06: brainpoolP384t1,
# 0x07: brainpoolP512r1,
# 0x08: brainpoolP512t1,
}
# tag definition
T_IIN = 0x42
T_SDIN = T_CIN = 0x45
T_keyType = 0x80
T_keyLen = 0x81
T_keyID = 0x82
T_keyVer = 0x83
T_DR = 0x85
T_HostID = 0x84
T_receipt = 0x86
T_scenarioID = 0x90
T_seqCounter = 0x91
T_keyUsage = 0x95
T_keyAcc = 0x96
T_CRT = 0xA6
def X963keyDerivation(sharedSecret, bytelen, sharedInfo='',
h = hashlib.sha256):
""" X9.63 Key Derivation Function as deifned in TR-03111 4.3.3
bytelen - expected length of Key Data
sharedSecret, sharedInfo - strings
h - function to create HASH object (default hashlib.sha256)
Return Key Data (string)
Reference: TR-03111: BSI TR-03111 Elliptic Curve Cryptography, Version 2.0
https://www.bsi.bund.de/SharedDocs/Downloads/EN/BSI/Publications/TechGuidelines/TR03111/BSI-TR-03111_pdf.html"""
keyData = ''
l = h().digest_size
j = (bytelen - 1)/l + 1
for i in xrange(1, 1+j):
keyData += h(sharedSecret + pack(">L", i) + sharedInfo).digest()
return keyData[:bytelen]
def DESMAC(key, data):
""" Calculate MAC single DES with final 3DES"""
return DESsign(key).calc(data)
ktDES = KeyType.DES_IMPLICIT
ktAES = KeyType.AES
class Push3scenario:
""" Implementation of Global Platform Push #3 scenario (ECKA)"""
def __init__(self, keyParRef, pkCASD, **kw):
""" Constructor
keyParRef - Key Parameter Reference
pkCASD - PK.CASD.ECKA (tuple long x, long y)
optional **kw: IIN, CIN (as strings)"""
assert keyParRef in ECC_Curves, \
"Unknown Key param reference 0x%02X" % keyParRef
self.keyParRef = keyParRef
self.generator = ECC_Curves[keyParRef]
self.curve = self.generator.curve()
self.bytelen = len(int2s(self.curve.p()))
assert self.bytelen in (32, 48, 64, 66) # currently allowed keys
pkCASDxy = s2ECP(pkCASD)
assert self.curve.contains_point(*pkCASDxy),\
"PK.CASD.ECKA not on the curve"
self.pkCASD = ellipticcurve.Point(self.curve, *pkCASDxy)
for k in ('IIN', 'CIN'):
if k in kw:
assert isinstance(kw[k], str)
self.__dict__[k] = kw[k]
def makeDGI(self, keyVer, privkey=None,
keys=([(KeyType.AES, 16)]*3),
zDelete=False, zDR=False, zID=False, **kw):
""" Prepare data for Push #3 scenario and generate keys.
keyVer - key version to create
privkey - eSK.AP.ECKA (secret multiplier as string)
randomly generated if None
keys - [(keyType, keyLen)] to generate
zDelete, zDR, zID - bits 1-3 of Parameters of scenario, (GP AmE, Tab. 4-17)
optional **kw: keyId, seqCounter, SDIN, HostID
Return <data for StoreData>"""
if privkey is None:
secexp = random.randrange(2, self.generator.order())
else:
secexp = s2int(privkey)
assert 1 < secexp < self.generator.order(), "Wrong eSK.AP.ECKA"
print "eSK.AP.ECKA = %X" % secexp
pubkey = self.generator * secexp
dgi7F49 = pack(">HBB", 0x7F49, 2*self.bytelen+1, 4) + \
int2s(pubkey.x(), self.bytelen * 8) + \
int2s(pubkey.y(), self.bytelen * 8)
# calculate Shared Secret, suppose that cofactor is 1
S_AB = secexp * self.pkCASD
self.sharedSecret = int2s(S_AB.x(), self.bytelen * 8)
print "Shared Secret =", hexlify(self.sharedSecret).upper()
# build DGI 00A6
if zID:
assert hasattr(self, 'IIN'), "Missing IIN while CardId requested"
assert hasattr(self, 'CIN'), "Missing cIN while CardId requested"
assert 'HostID' in kw and isinstance(kw['HostID'], str)
self.HostCardID = ''.join([derLV(v) for v in
(kw['HostID'], self.IIN, self.CIN)])
else:
self.HostCardID = ''
self.zDR = zDR
scenarioPar = (zDelete and 1 or 0) +\
(zDR and 2 or 0) +\
(zID and 4 or 0)
assert all([k[0] in (KeyType.DES_IMPLICIT, KeyType.AES) for k in keys])
ktl1 = keys[0]
zDifKey = any([keys[i] != ktl1 for i in xrange(1, len(keys))])
tA6value = pack("BBBB", T_scenarioID, 2, 3, scenarioPar)
if zDifKey:
self.receiptAlgo = CMAC
self.keyLens = [16] + [k[1] for k in keys]
self.keyDesc = ''
if 'keyId' in kw:
tA6value += pack("BBB", T_keyID, 1, kw['keyId'])
tA6value += pack("BBB", T_keyVer, 1, keyVer)
# default keyUsage from GP 2.2.1 AmE tab. 4-16 for ENC, MAC, DEK
for k, keyUsage in zip(keys, (0x38, 0x34, 0xC8)):
if len(k) > 2:
keyUsage = k[2]
tB9value = pack("BBB", T_keyUsage, 1, keyUsage)
if len(k) >= 4: # optional key Access as fourth elem. of key
tB9value += pack("BBB", T_keyAcc, 1, k[3])
tB9value += pack("BBB", T_keyType, 1, k[0])
tB9value += pack("BBB", T_keyLen, 1, k[1])
self.keyDesc += pack("BBB", keyUsage, *k[:2])
tA6value += '\xB9' + derLV(tB9value)
else:
assert len(keys) in (1, 3), \
"One or three secure ch. keys expected."
self.keyLens = [ktl1[1]] * (1 + len(keys))
self.receiptAlgo = ktl1[0] == KeyType.AES and CMAC or DESMAC
keyUsage = len(keys) == 1 and 0x5C or 0x10
self.keyDesc = pack("BBB", keyUsage, *ktl1[:2])
tA6value += pack("BBB", T_keyUsage, 1, keyUsage)
if len(ktl1) == 4:
tA6value += pack("BBB", T_keyAcc, 1, ktl1[3])
tA6value += pack("BBB", T_keyType, 1, ktl1[0])
tA6value += pack("BBB", T_keyLen, 1, ktl1[1])
if 'keyId' in kw:
tA6value += pack("BBB", T_keyID, 1, kw['keyId'])
tA6value += pack("BBB", T_keyVer, 1, keyVer)
if 'seqCounter' in kw:
tA6value += chr(T_seqCounter) + derLV(kw['seqCounter'])
if 'SDIN' in kw:
tA6value += chr(T_SDIN) + derLV(kw['SDIN'])
if zID:
tA6value += chr(T_HostID) + derLV(kw['HostID'])
self.tA6 = chr(T_CRT) + derLV(tA6value)
dgi00A6 = pack(">HB", 0x00A6, len(self.tA6)) + self.tA6
return (dgi00A6, dgi7F49)
def generKeys(self, respData):
""" Verify receipt and generate symmetric keys.
respData - response to Store Data (string)
Return generated keys (tuple of strings)"""
try:
data2rec = self.tA6
except KeyError:
print "Run makeDGI first"
return
respTLV = split2TLV(respData)
if self.zDR:
lenDR = (self.bytelen // 32) * 16 # map to 16, 24 or 32
DR = respTLV[0][1]
assert len(respTLV) == 2 and \
respTLV[0][0] == T_DR and len(DR) == lenDR
data2rec += pack("BB", T_DR, lenDR) + DR
else:
assert len(respTLV) == 1
assert respTLV[-1][0] == T_receipt
receipt = respTLV[-1][1]
sharedInfo = self.keyDesc
if self.zDR:
sharedInfo += DR
if hasattr(self, 'HostCardID'):
sharedInfo += self.HostCardID
print "Shared Info =", hexlify(sharedInfo).upper()
keyData = X963keyDerivation(self.sharedSecret, sum(self.keyLens),
sharedInfo)
keyDataIt = chunks(keyData, self.keyLens)
receiptKey = keyDataIt.next()
print "Receipt Key =", hexlify(receiptKey).upper()
expReceipt = self.receiptAlgo(receiptKey, data2rec)
assert receipt == expReceipt, "Receipt verification failed"
return [k for k in keyDataIt if k] # skip empty rest
def selectApplet(c, AID, logCh=0):
""" Select applet on a given logical channel or
open new log. channel if logCh is None. """
if logCh is None:
logCh = openLogCh(c)
# select the Applet on the given logical channel
apdu = [logCh, INS_SELECT, 4, 0, len(AID)] + s2l(AID)
resp, sw1, sw2 = c.transmit(apdu)
if sw1 == 0x6C and len(AID) == 0:
apdu = [logCh, INS_SELECT, 4, 0, sw2]
resp, sw1, sw2 = c.transmit(apdu)
if(sw1 == 0x61):
apdu = [logCh, 0xC0, 0, 0, sw2]
resp, sw1, sw2 = c.transmit(apdu)
sw = (sw1 << 8) + sw2
if sw != 0x9000:
raise ISOException(sw)
respdata = l2s(resp)
# close channel
return (respdata, logCh)
def openLogCh(c):
""" Manage channel to open logical channel. """
apdu = [0, INS_MANAGE_LOGCH, 0, 0, 1]
resp, sw1, sw2 = c.transmit(apdu)
sw = (sw1 << 8) + sw2
if sw != 0x9000:
raise ISOException(sw)
return resp[0]
def closeLogCh(c, logCh):
apdu = [0, INS_MANAGE_LOGCH, 0x80, logCh, 0]
resp, sw1, sw2 = c.transmit(apdu)
sw = (sw1 << 8) + sw2
if sw != 0x9000:
raise ISOException(sw)
class GetStatusData:
"""Represent and interpret data from Get status for Packages and Modules"""
def __init__(self, respdataPM, respdataApp):
ind = 0
self.packages = []
while len(respdataPM) > ind:
length = respdataPM[ind]
pack_aid = l2s(respdataPM[ind+1: ind+1+length])
ind += length + 1
lcs = respdataPM[ind]
priv = respdataPM[ind+1]
nmod = respdataPM[ind+2]
ind += 3
mods = []
for i in xrange(nmod):
length = respdataPM[ind]
mods.append(l2s(respdataPM[ind+1: ind+1+length]))
ind += length + 1
self.packages.append({'pack_aid': pack_aid,
'lcs': lcs,
'priv': priv,
'modules': mods})
ind = 0
self.insts = []
while len(respdataApp) > ind:
length = respdataApp[ind]
app_aid = l2s(respdataApp[ind+1: ind+1+length])
ind += length + 1
lcs = respdataApp[ind]
priv = respdataApp[ind+1]
ind += 2
self.insts.append({'app_aid': app_aid,
'lcs': lcs,
'priv': priv})
def __str__(self):
res = ''
for p in self.packages:
res += "Package AID: %s %02X %02X\n" % \
(hexlify(p['pack_aid']).upper().ljust(32),
p['lcs'], p['priv'])
for m in p['modules']:
res += " module %s\n" % hexlify(m).upper().ljust(32)
for p in self.insts:
res += "Insts AID : %s %02X %02X\n" % \
(hexlify(p['app_aid']).upper().ljust(32),
p['lcs'], p['priv'])
return res
def getStatus(sc, AID_pref=''):
""" Issue GET STATUS apdu for packages and modules, and instances. """
res = {}
for P1 in (0x10, 0x40):
apdu = [0x80, INS_GETSTATUS, P1, 0, 2+len(AID_pref), 0x4F,
len(AID_pref)] + s2l(AID_pref)
respdata, sw1, sw2 = sc.transmit(apdu)
sw = (sw1 << 8) + sw2
while sw == 0x6310:
apdu = [0x80, INS_GETSTATUS, P1, 1, 2+len(AID_pref), 0x4F,
len(AID_pref)] + s2l(AID_pref)
resp, sw1, sw2 = sc.transmit(apdu)
respdata += resp
sw = (sw1 << 8) + sw2
if sw != 0x9000:
raise ISOException(sw)
res[P1] = respdata
return GetStatusData(res[0x10], res[0x40])
def getData(c, tag):
P1 = tag >> 8
P2 = tag & 0xFF
apdu = [0x80, INS_GETDATA, P1, P2, 0]
resp, sw1, sw2 = c.transmit(apdu)
if sw1 == 0x6C:
apdu[4] = sw2
resp, sw1, sw2 = c.transmit(apdu)
sw = (sw1 << 8) + sw2
if sw != 0x9000:
raise ISOException(sw)
return l2s(resp)
def getExtCardRes(c):
""" Issue GET DATA with tag FF21 in order to receive Extended
Card Resources (GP 2.2.1, 11.3 & ETSI TS 102.226, 8.2.1.7).
Returns [num. of install applets, free NVM, free RAM]"""
# CLA = 0x00: return only value
# CLA = 0x80: return TLV, i.e. 0xFF21 #(value)
apdu = [0x80, INS_GETDATA, 0xFF, 0x21, 0]
resp, sw1, sw2 = c.transmit(apdu)
if sw1 == 0x6C:
apdu[4] = sw2
resp, sw1, sw2 = c.transmit(apdu)
sw = (sw1 << 8) + sw2
if sw != 0x9000:
raise ISOException(sw)
payload = l2s(resp)
result = [s2int(findTLValue(payload, (0xFF21, tag))) for
tag in (0x81, 0x82, 0x83)]
return result
def selectFile(c, path, logCh=0):
""" Select file by path from MF or MF for empty path """
if len(path) > 0:
apdu = [logCh, INS_SELECT, 8, 4, len(path)] + s2l(path)
else:
apdu = [logCh, INS_SELECT, 0, 4, 2, 0x3F, 0x00]
resp, sw1, sw2 = c.transmit(apdu)
if sw1 == 0x61:
resp, sw1, sw2 = c.transmit([0, 0xC0, 0, 0, sw2])
sw = (sw1 << 8) + sw2
if sw != 0x9000:
raise ISOException(sw)
return l2s(resp)
def readBinary(c, le, logCh=0, offset=0):
"""Read Binary on currently selected EF"""
P1 = (offset >> 8) & 0x7F
P2 = offset & 0xFF
apdu = [logCh, INS_READBIN, P1, P2, le]
resp, sw1, sw2 = c.transmit(apdu)
sw = (sw1 << 8) + sw2
if sw != 0x9000:
raise ISOException(sw)
return l2s(resp)
def readRecord(c, recNum, logCh=0):
""" Read record from currently selected EF"""
apdu = [logCh, INS_READREC, recNum, 4, 0]
resp, sw1, sw2 = c.transmit(apdu)
if sw1 == 0x6C:
apdu[4] = sw2
resp, sw1, sw2 = c.transmit(apdu)
sw = (sw1 << 8) + sw2
if sw != 0x9000:
raise ISOException(sw)
return l2s(resp)
def updateBinary(c, data, logCh=0, offset=0):
"""Update binary on currently selected EF"""
assert len(data) < 0x100
P1 = (offset >> 8) & 0x7F
P2 = offset & 0xFF
apdu = [logCh, INS_UPDBIN, P1, P2, len(data)] + s2l(data)
resp, sw1, sw2 = c.transmit(apdu)
sw = (sw1 << 8) + sw2
if sw != 0x9000:
raise ISOException(sw)
def updateRecord(c, recNum, data, logCh=0):
""" Update record from currently selected EF"""
assert len(data) < 0x100
apdu = [logCh, INS_UPDREC, recNum, 4, len(data)] + s2l(data)
resp, sw1, sw2 = c.transmit(apdu)
sw = (sw1 << 8) + sw2
if sw != 0x9000:
raise ISOException(sw)
def verifyPin(c, pin=None, P2=0x01, logCh=0):
"""Verify PIN
pin - value (str, 4-8bytes). If None, just get number of tries.
P2 - PIN identification (0x01: PIN1 (default), 0x81: PIN2, etc.)
logCh - logical channel (default 0)
Return number of remaing tries or True if verification succesfull.
"""
lc = 0 if pin is None else 8
apdu = [logCh, INS_VERIFY_PIN, 0, P2, lc]
if pin is not None:
assert 4 <= len(pin) <= 8
pin += '\xFF' * (8 - len(pin))
apdu += s2l(pin)
resp, sw1, sw2 = c.transmit(apdu)
sw = (sw1 << 8) + sw2
if sw == 0x6983: # PIN blocked
return 0
if 0x63C0 <= sw <= 0x63CA: # remaining tries
return sw - 0x63C0
if sw != 0x9000:
raise ISOException(sw)
return True # pin verified
def changePin(c, oldPin, newPin, P2=0x01, logCh=0):
"""Change PIN
oldPin - old PIN value (str, 4-8bytes)
newPin - new PIN value (str, 4-8bytes)
P2 - PIN identification (0x01: PIN1 (default), 0x81: PIN2, etc.)
logCh - logical channel (default 0)
"""
assert 4 <= len(oldPin) <= 8
oldPin += '\xFF' * (8 - len(oldPin))
assert 4 <= len(newPin) <= 8
newPin += '\xFF' * (8 - len(newPin))
apdu = [logCh, INS_CHANGE_PIN, 0, P2, 0x10] + s2l(oldPin) + s2l(newPin)
resp, sw1, sw2 = c.transmit(apdu)
sw = (sw1 << 8) + sw2
if sw != 0x9000:
raise ISOException(sw)
def disablePin(c, pin, P2=0x01, logCh=0):
"""Disable PIN
pin - PIN value (str, 4-8bytes)
P2 - PIN identification (0x01: PIN1 (default), 0x81: PIN2, etc.)
logCh - logical channel (default 0)
"""
assert 4 <= len(pin) <= 8
pin += '\xFF' * (8 - len(pin))
apdu = [logCh, INS_DISABLE_PIN, 0, P2, 8] + s2l(pin)
resp, sw1, sw2 = c.transmit(apdu)
sw = (sw1 << 8) + sw2
if sw != 0x9000:
raise ISOException(sw)
def enablePin(c, pin, P2=0x01, logCh=0):
"""Enable PIN
pin - PIN value (str, 4-8bytes)
P2 - PIN identification (0x01: PIN1 (default), 0x81: PIN2, etc.)
logCh - logical channel (default 0)
"""
assert 4 <= len(pin) <= 8
pin += '\xFF' * (8 - len(pin))
apdu = [logCh, INS_ENABLE_PIN, 0, P2, 8] + s2l(pin)
resp, sw1, sw2 = c.transmit(apdu)
sw = (sw1 << 8) + sw2
if sw != 0x9000:
raise ISOException(sw)
def unblockPin(c, puk, newPin, P2=0x01, logCh=0):
"""unblock PIN
puk - new PIN value (str, 4-8bytes)
newPin - PIN value (str, 4-8bytes)
P2 - PIN identification (0x01: PIN1 (default), 0x81: PIN2, etc.)
logCh - logical channel (default 0)
"""
assert len(puk) == 8
assert 4 <= len(newPin) <= 8
newPin += '\xFF' * (8 - len(newPin))
apdu = [logCh, INS_UNBLOCK_PIN, 0, P2, 0x10] + s2l(puk) + s2l(newPin)
resp, sw1, sw2 = c.transmit(apdu)
sw = (sw1 << 8) + sw2
if sw != 0x9000:
raise ISOException(sw)
def selectUSIM(c, logCh=0):
"""Select USIM, return AID
Read EF_DIR, USIM = first application with AID of USIM (3GPP TS 31.110)"""
# read EF_DIR
infoDIR = selectFile(c, unhexlify('2F00'), logCh)
# see ETSI 102.221 11.1.1.4.3 for coding
fileDesc = findTLValue(infoDIR, (0x62, 0x82))
assert len(fileDesc) == 5 and \
fileDesc[:2] == '\x42\x21' # linear EF
recLen, nRec = unpack(">HB", fileDesc[2:5])
aids = []
for recNum in xrange(1, nRec+1):
try:
r = readRecord(c, recNum)
if r == '\xFF' * len(r):
continue
aid = findTLValue(r, (0x61, 0x4F))
aids.append(aid)
except ISOException:
break
# search for USIM
for aid in aids:
if aid[:7] == unhexlify('A0000000871002'):
infoUSIM = selectApplet(c, aid, logCh)
return aid
return None
def cardInfo(c, USIMpin=None, logCh=0):
"""Deselect, read EF_DIR, EF_ICCID"""
resetCard(c)
histBytes = l2s(ATR(c.getATR()).getHistoricalBytes())
infoMF = selectFile(c, '', logCh)
# read EF_ICCID
infoICCID = selectFile(c, unhexlify('2FE2'), logCh)
fileSize = s2int(findTLValue(infoICCID, (0x62, 0x80)))
assert fileSize == 10, "Wrong size of EF_ICCID"
iccid = swapNibbles(readBinary(c, fileSize))
# read EF_DIR
infoDIR = selectFile(c, unhexlify('2F00'), logCh)
# see ETSI 102.221 11.1.1.4.3 for coding
fileDesc = findTLValue(infoDIR, (0x62, 0x82))
assert len(fileDesc) == 5 and \
fileDesc[:2] == '\x42\x21' # linear EF
recLen, nRec = unpack(">HB", fileDesc[2:5])
dirDO = []
for recNum in xrange(1, nRec+1):
try:
r = readRecord(c, recNum)
if r == '\xFF' * len(r):
continue
aid = findTLValue(r, (0x61, 0x4F))
label = findTLValue(r, (0x61, 0x50))
dirDO.append({'AID': aid, 'label': label})
except ISOException:
break
# select USIM and try to read IMSI
aids = [DO['AID'] for DO in dirDO
if DO['AID'][:7] == unhexlify('A0000000871002')]
if len(aids) >= 1:
aid_usim = aids[0] # choose the first AID found
else:
aid_usim = None
if aid_usim:
infoUSIM = selectApplet(c, aid_usim, logCh)
if USIMpin is not None:
verifyPin(c, USIMpin, logCh=logCh)
infoIMSI = selectFile(c, unhexlify('7FFF6F07'), logCh)
try:
bimsi = readBinary(c, 9, logCh)
digits = reduce(lambda d, n: d + [ord(n) & 0x0F, ord(n) >> 4],
bimsi[1:1+ord(bimsi[0])], [])
digits.pop(0) # remove first nibble 8 or 9
while digits[-1] == 0x0F:
digits.pop() # remove trailing F
imsi = ''.join([chr(ord('0')+i) for i in digits])
except ISOException:
imsi = None
else:
imsi = None
# select default applet and get tags 45 and 42
selectApplet(c, '', logCh)
try:
iin = findTLValue(getData(c, T_IIN), (T_IIN,))
except ISOException:
iin = None
try:
cin = findTLValue(getData(c, T_CIN), (T_CIN,))
except ISOException:
cin = None
return histBytes, iccid, dirDO, imsi, iin, cin
|
lgpl-2.1
| -262,022,843,162,776,060
| 35.621495
| 115
| 0.571711
| false
| 2.991507
| false
| false
| false
|
xerond/lucia
|
ledEditor/cfilegen.py
|
1
|
2560
|
from effectgroup import EffectGroup
from effectdescriptions import EffectDescriptions
from myutils import Utils
def generateFile(fileName,ledCount,effectGroups):
f = open(fileName,'w')
f.write("#ifndef H_SONG_INSTRUCTIONS\n#define H_SONG_INSTRUCTIONS\n#include \"avr/pgmspace.h\"\n#include \"song_instructions.h\"\nconst char song_instructions[] PROGMEM = {")
lastTime = 0
for curEffectGroup in effectGroups:
writeBuffer = ""
newTime = curEffectGroup.getTimeAs10msCount()
tD = newTime - lastTime
lastTime = newTime
writeBuffer += "0xff,\n"
writeBuffer += Utils.short_to_hex(tD) + "\n"
for ledIndex in range (0,ledCount):
ledEffect = curEffectGroup.getLedEffect(ledIndex)
tempLedBytes = generateLedEffectBytes(ledIndex,ledEffect)
if tempLedBytes <> "":
writeBuffer += "\t" + generateLedEffectBytes(ledIndex,ledEffect) + "\n"
writeBuffer += "0xff,\n"
f.write(writeBuffer)
f.write("0x00,};\n#endif")
#generates a string for led effect
def generateLedEffectBytes(ledNumber,ledEffect):
effectNumber = ledEffect[EffectGroup.INDEX_EFFECT_NUMBER]
#get the real effect number
#TODO we are accessing a global here, eek!
print "Effect num is: " + str(effectNumber)
realEffectNumber = EffectDescriptions.quickEffectLookup[effectNumber]['realId']
effectData = ledEffect[EffectGroup.INDEX_EFFECT_DATA]
#if effect number is < 0, ignore it
if effectNumber < 0:
return ""
returnStr = Utils.byte_to_hex(ledNumber) + Utils.byte_to_hex(realEffectNumber)
#get the effect description
effectDescr = EffectDescriptions.quickEffectLookup[effectNumber]
#Depending on the data, time to output the values accordingly
reqAttributes = effectDescr['reqAttributes']
attribCount = len(reqAttributes)
for i in range (0,attribCount):
curAttrib = reqAttributes[i]
attribType = curAttrib[EffectDescriptions.INDEX_TYPE]
curData = effectData[i]
if(attribType == EffectDescriptions.VAR_COLOR):
returnStr += Utils.short_to_hex(curData[0])
returnStr += Utils.short_to_hex(curData[1])
returnStr += Utils.short_to_hex(curData[2])
elif(attribType == EffectDescriptions.VAR_BYTE):
returnStr += Utils.byte_to_hex(int(curData))
elif(attribType == EffectDescriptions.VAR_WORD):
returnStr += Utils.short_to_hex(int(curData))
elif(attribType == EffectDescriptions.VAR_DWORD):
returnStr += Utils.dword_to_hex(int(curData))
elif(attribType == EffectDescriptions.VAR_HIDDEN_BYTE):
returnStr += Utils.short_to_hex(int(curData))
else:
print "ERROR! COULD NOT DECODE EFFECT!"
return returnStr
|
mit
| 6,627,170,382,357,033,000
| 34.068493
| 175
| 0.74375
| false
| 3.073229
| false
| false
| false
|
notepadqq/NotepadqqApi_Python
|
notepadqq_api/notepadqq_api.py
|
1
|
3531
|
import asyncio
import sys
from notepadqq_api.message_channel import MessageChannel
from notepadqq_api.message_interpreter import MessageInterpreter
from notepadqq_api.stubs import Stubs
class NotepadqqApi():
"""Provides access to the Notepadqq Api."""
_NQQ_STUB_ID = 1
def __init__(self, socket_path=None, extension_id=None):
"""Construct a new Api object that can be used to invoke Notepadqq
methods and to receive its events.
If not provided, socket_path and extension_id are respectively
sys.argv[1] and sys.argv[2]
"""
if socket_path is None:
try:
socket_path = sys.argv[1]
except IndexError:
raise ValueError("Socket path not provided")
if extension_id is None:
try:
extension_id = sys.argv[2]
except IndexError:
raise ValueError("Extension id not provided")
self._socket_path = socket_path
self._extension_id = extension_id
self._message_channel = MessageChannel(self._socket_path)
self._message_interpreter = MessageInterpreter(self._message_channel)
self._nqq = Stubs.Notepadqq(self._message_interpreter, self._NQQ_STUB_ID)
def run_event_loop(self, started_callback=None):
"""Start the event loop. If started_callback is provided, it will
be called as soon as the connection with Notepadqq is ready.
"""
if started_callback is not None:
self.notepadqq.on('currentExtensionStarted', started_callback)
loop = asyncio.get_event_loop()
loop.run_until_complete(self._message_channel.start(loop, self._on_new_message))
@property
def extension_id(self):
"""The id assigned to this extension by Notepadqq"""
return self._extension_id
@property
def notepadqq(self):
"""Get an instance of the main Notepadqq object"""
return self._nqq
def on_window_created(self, callback):
"""Execute a callback for every new window.
This is preferable to the "newWindow" event of Notepadqq, because it
could happen that the extension isn't ready soon enough to receive
the "newWindow" event for the first window. This method, instead,
ensures that the passed callback will be called once and only once
for each current or future window.
"""
captured_windows = []
# Invoke the callback for every currently open window
for window in self.notepadqq.windows():
if window not in captured_windows:
captured_windows.append(window)
callback(window)
# Each time a new window gets opened, invoke the callback.
# When Notepadqq is starting and initializing all the extensions,
# we might not be fast enough to receive this event: this is why
# we manually invoked the callback for every currently open window.
def on_new_window(window):
if window not in captured_windows:
callback(window)
self.notepadqq.on('newWindow', on_new_window)
def for_each_window(self, f):
"""Decorator alternative for self.on_window_created(f)"""
self.on_window_created(f)
return f
def _on_new_message(self, msg):
# Called whenever a new message is received from the channel
self._message_interpreter.process_message(msg)
|
mit
| -2,928,871,828,242,222,600
| 37.380435
| 88
| 0.632965
| false
| 4.37005
| false
| false
| false
|
rackerlabs/deuce-valere
|
deucevalere/common/validation.py
|
1
|
1337
|
"""
Deuce Valere - Common - Validation
"""
import datetime
from deuceclient.api import *
from deuceclient.auth.base import AuthenticationBase
from deuceclient.client.deuce import DeuceClient
from deuceclient.common.validation import *
from deuceclient.common.validation_instance import *
from stoplight import Rule, ValidationFailed, validation_function
@validation_function
def val_authenticator_instance(value):
if not isinstance(value, AuthenticationBase):
raise ValidationFailed('authenticator must be derived from '
'deuceclient.auth.base.AuthenticationBase')
@validation_function
def val_deuceclient_instance(value):
if not isinstance(value, DeuceClient):
raise ValidationFailed('invalid Deuce Client instance')
@validation_function
def val_expire_age(value):
if not isinstance(value, datetime.timedelta):
raise ValidationFailed('must be type datetime.timedelta')
def _abort(error_code):
abort_errors = {
100: TypeError
}
raise abort_errors[error_code]
AuthEngineRule = Rule(val_authenticator_instance(), lambda: _abort(100))
ClientRule = Rule(val_deuceclient_instance(), lambda: _abort(100))
ExpireAgeRule = Rule(val_expire_age(), lambda: _abort(100))
ExpireAgeRuleNoneOkay = Rule(val_expire_age(none_ok=True), lambda: _abort(100))
|
apache-2.0
| -5,001,324,934,594,096,000
| 30.093023
| 79
| 0.743455
| false
| 3.841954
| false
| false
| false
|
hemebond/kapua
|
courses/views.py
|
1
|
4832
|
# Copyright 2011 James O'Neill
#
# This file is part of Kapua.
#
# Kapua is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Kapua is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Kapua. If not, see <http://www.gnu.org/licenses/>.
from django.contrib.auth.decorators import login_required
from django.utils.decorators import method_decorator
from django.views.generic import ListView, DetailView, UpdateView, \
FormView, CreateView
from django.views.generic.detail import SingleObjectMixin
from django.http import HttpResponseRedirect
from django.shortcuts import redirect
from .models import Course, Page
from .forms import CourseForm, PageForm
class CourseList(ListView):
model = Course
class CourseAdd(CreateView):
template_name = "courses/course_edit.html"
form_class = CourseForm
context_object_name = "course"
@method_decorator(login_required)
def dispatch(self, *args, **kwargs):
return super(CourseAdd, self).dispatch(*args, **kwargs)
class CourseDetail(DetailView):
template_name = "courses/course_detail.html"
model = Course
context_object_name = "course"
def get(self, request, *args, **kwargs):
self.object = self.get_object()
if self.object.pages.exists():
return redirect('kapua-page-detail', self.object.pages.get(level=0).pk)
context = self.get_context_data(object=self.object)
return self.render_to_response(context)
class CourseEdit(UpdateView):
template_name = "courses/course_edit.html"
form_class = CourseForm
model = Course
class PageAdd(SingleObjectMixin, FormView):
model = Course
template_name = "courses/page_edit.html"
form_class = PageForm
@method_decorator(login_required)
def dispatch(self, *args, **kwargs):
return super(PageAdd, self).dispatch(*args, **kwargs)
def get_form(self, form_class):
self.object = self.get_object()
return super(PageAdd, self).get_form(form_class)
def get_form_kwargs(self):
"""
Returns the keyword arguments for instantiating the form.
"""
form_kwargs = super(PageAdd, self).get_form_kwargs()
form_kwargs.update({
'valid_targets': self.object.pages.filter(level__gt=0)
})
return form_kwargs
def form_valid(self, form):
position = form.cleaned_data.get('position', 'last-child')
target = form.cleaned_data.get('target', None)
course = self.object
page = form.save(commit=False)
page.course = course
if not target:
if course.pages.exists():
target = course.pages.get(level=0)
position = 'last-child'
if target:
page.insert_at(
target=target,
position=position,
save=True,
)
self.success_url = page.get_absolute_url()
else:
page.save()
self.success_url = course.get_absolute_url()
return super(PageAdd, self).form_valid(form)
def get_context_data(self, *args, **kwargs):
context = super(PageAdd, self).get_context_data(*args, **kwargs)
if context['form'].errors:
context['error_message'] = context['form'].errors
return context
class PageDetail(DetailView):
template_name = "courses/page_detail.html"
context_object_name = "page"
model = Page
def get_context_data(self, **kwargs):
# Call the base implementation first to get a context
context = super(PageDetail, self).get_context_data(**kwargs)
context['course'] = self.object.course
pages = context['course'].pages.all()
for index, page in enumerate(pages):
if page.pk == self.object.pk:
if index > 0:
context['previous_page'] = pages[index - 1]
if index < (len(pages) - 1):
context['next_page'] = pages[index + 1]
break
# Remove the root page
context['pages'] = pages.filter(level__gt=0)
# This gets the ancestors of the current page but exluces the
# root page
context['breadcrumbs'] = pages.filter(
lft__lt=self.object.lft,
rght__gt=self.object.rght
).exclude(
level=0
)
return context
class PageEdit(UpdateView):
template_name = "courses/page_edit.html"
form_class = PageForm
model = Page
@method_decorator(login_required)
def dispatch(self, *args, **kwargs):
return super(PageEdit, self).dispatch(*args, **kwargs)
def form_valid(self, form):
self.object = form.save()
target = form.cleaned_data.get('target')
if target:
position = form.cleaned_data.get('position')
self.object.move_to(
target=target,
position=position
)
return redirect('kapua-page-detail', self.object.pk)
|
gpl-3.0
| 8,351,613,602,219,685,000
| 25.549451
| 74
| 0.710472
| false
| 3.264865
| false
| false
| false
|
Pikl/PiklBot
|
cogs/pikl.py
|
1
|
1281
|
import discord
from discord.ext import commands
class Pikl:
"""Super pikly commands."""
def __init__(self, bot):
self.bot = bot
@commands.command(hidden=False)
async def helloworld(self):
"""Hello, world!"""
await self.bot.say("Hello, world!")
@commands.command(hidden=False)
async def postraidembed(self):
"""Posts an embedded message with a bunch of raid info"""
embed = discord.Embed(colour=discord.Colour(0x2ecc40), description="Some helpful information to aid and review [Dawn] raids.\n")
embed.set_image(url="https://cdn.discordapp.com/attachments/350137990959464459/354412417381433354/unknown.png")
embed.set_thumbnail(url="https://wiki.guildwars2.com/images/5/5e/Legendary_Insight.png")
embed.set_author(name="Dawn Raid Information", icon_url="http://raid.pikly.uk/images/dawn-logo.png")
embed.set_footer(text=": 'Stack on Pikl'", icon_url="http://raid.pikly.uk/images/dawn-logo.png")
embed.add_field(name="Raid Logs & Videos", value="https://raid.pikly.uk/", inline=True)
embed.add_field(name="Raid Class Spreadsheet", value="[Spreadsheet here](https://docs.google.com/spreadsheets/d/1zm46Jb8UBIoYP1_mewoOvLKopx_Sks9hYGm8OeWaQI8/edit?usp=sharing)", inline=True)
await self.bot.say(embed=embed)
def setup(bot):
bot.add_cog(Pikl(bot))
|
gpl-3.0
| -9,634,640,449,993,438
| 40.354839
| 191
| 0.737705
| false
| 2.691176
| false
| false
| false
|
jtwaleson/decrypt
|
decrypt/decrypt.py
|
1
|
1745
|
#!/usr/bin/env python
import curses
import time
import fileinput
import random
import string
screen = curses.initscr()
lines = []
chance = 0.1
confirmed_per_line = []
def main():
curses.noecho()
try:
curses.curs_set(0)
except:
pass
screen.keypad(1)
try:
for line in fileinput.input():
confirmed_per_line.append([])
lines.append(line.rstrip())
iterate()
fileinput.close()
while iterate(increase=True):
pass
time.sleep(2)
except KeyboardInterrupt:
pass
finally:
curses.endwin()
for line in lines:
print(line)
def iterate(increase=False):
global chance, confirmed_per_line, lines
still_random = 0
if increase:
chance += 0.01
screen.erase()
(y, x) = screen.getmaxyx()
final_line = len(lines)
if final_line > y:
first_line = final_line - y
else:
first_line = 0
for line_num in range(first_line, final_line):
line = lines[line_num]
for col in range(min(x, len(line))):
try:
if col not in confirmed_per_line[line_num]:
still_random += 1
if random.random() < chance:
confirmed_per_line[line_num].append(col)
screen.addch(line_num - first_line,
col,
random.choice(string.punctuation),
curses.A_REVERSE)
else:
screen.addstr(line_num - first_line, col, line[col])
except:
pass
screen.refresh()
time.sleep(0.1)
return still_random > 0
|
mit
| 3,421,878,440,241,431,600
| 23.928571
| 72
| 0.514613
| false
| 4.05814
| false
| false
| false
|
vdrhtc/Measurement-automation
|
drivers/pyspcm.py
|
1
|
7735
|
import os
import platform
import sys
from ctypes import *
# load registers for easier access
from drivers.py_header.regs import *
# load registers for easier access
from drivers.py_header.spcerr import *
SPCM_DIR_PCTOCARD = 0
SPCM_DIR_CARDTOPC = 1
SPCM_BUF_DATA = 1000 # main data buffer for acquired or generated samples
SPCM_BUF_ABA = 2000 # buffer for ABA data, holds the A-DATA (slow samples)
SPCM_BUF_TIMESTAMP = 3000 # buffer for timestamps
# determine bit width of os
oPlatform = platform.architecture()
if (oPlatform[0] == '64bit'):
bIs64Bit = 1
else:
bIs64Bit = 0
# define pointer aliases
int8 = c_int8
int16 = c_int16
int32 = c_int32
int64 = c_int64
ptr8 = POINTER (int8)
ptr16 = POINTER (int16)
ptr32 = POINTER (int32)
ptr64 = POINTER (int64)
uint8 = c_uint8
uint16 = c_uint16
uint32 = c_uint32
uint64 = c_uint64
uptr8 = POINTER (uint8)
uptr16 = POINTER (uint16)
uptr32 = POINTER (uint32)
uptr64 = POINTER (uint64)
# Windows
if os.name == 'nt':
#sys.stdout.write("Python Version: {0} on Windows\n\n".format (
# platform.python_version()))
# define card handle type
if (bIs64Bit):
# for unknown reasons c_void_p gets messed up on Win7/64bit, but this works:
drv_handle = POINTER(c_uint64)
else:
drv_handle = c_void_p
# Load DLL into memory.
# use windll because all driver access functions use _stdcall calling convention under windows
if (bIs64Bit == 1):
spcmDll = windll.LoadLibrary ("c:\\windows\\system32\\spcm_win64.dll")
else:
spcmDll = windll.LoadLibrary ("c:\\windows\\system32\\spcm_win32.dll")
# load spcm_hOpen
if (bIs64Bit):
spcm_hOpen = getattr (spcmDll, "spcm_hOpen")
else:
spcm_hOpen = getattr (spcmDll, "_spcm_hOpen@4")
spcm_hOpen.argtype = [c_char_p]
spcm_hOpen.restype = drv_handle
# load spcm_vClose
if (bIs64Bit):
spcm_vClose = getattr (spcmDll, "spcm_vClose")
else:
spcm_vClose = getattr (spcmDll, "_spcm_vClose@4")
spcm_vClose.argtype = [drv_handle]
spcm_vClose.restype = None
# load spcm_dwGetErrorInfo
if (bIs64Bit):
spcm_dwGetErrorInfo_i32 = getattr (spcmDll, "spcm_dwGetErrorInfo_i32")
else:
spcm_dwGetErrorInfo_i32 = getattr (spcmDll, "_spcm_dwGetErrorInfo_i32@16")
spcm_dwGetErrorInfo_i32.argtype = [drv_handle, uptr32, ptr32, c_char_p]
spcm_dwGetErrorInfo_i32.restype = uint32
# load spcm_dwGetParam_i32
if (bIs64Bit):
spcm_dwGetParam_i32 = getattr (spcmDll, "spcm_dwGetParam_i32")
else:
spcm_dwGetParam_i32 = getattr (spcmDll, "_spcm_dwGetParam_i32@12")
spcm_dwGetParam_i32.argtype = [drv_handle, int32, ptr32]
spcm_dwGetParam_i32.restype = uint32
# load spcm_dwGetParam_i64
if (bIs64Bit):
spcm_dwGetParam_i64 = getattr (spcmDll, "spcm_dwGetParam_i64")
else:
spcm_dwGetParam_i64 = getattr (spcmDll, "_spcm_dwGetParam_i64@12")
spcm_dwGetParam_i64.argtype = [drv_handle, int32, ptr64]
spcm_dwGetParam_i64.restype = uint32
# load spcm_dwSetParam_i32
if (bIs64Bit):
spcm_dwSetParam_i32 = getattr (spcmDll, "spcm_dwSetParam_i32")
else:
spcm_dwSetParam_i32 = getattr (spcmDll, "_spcm_dwSetParam_i32@12")
spcm_dwSetParam_i32.argtype = [drv_handle, int32, int32]
spcm_dwSetParam_i32.restype = uint32
# load spcm_dwSetParam_i64
if (bIs64Bit):
spcm_dwSetParam_i64 = getattr (spcmDll, "spcm_dwSetParam_i64")
else:
spcm_dwSetParam_i64 = getattr (spcmDll, "_spcm_dwSetParam_i64@16")
spcm_dwSetParam_i64.argtype = [drv_handle, int32, int64]
spcm_dwSetParam_i64.restype = uint32
# load spcm_dwSetParam_i64m
if (bIs64Bit):
spcm_dwSetParam_i64m = getattr (spcmDll, "spcm_dwSetParam_i64m")
else:
spcm_dwSetParam_i64m = getattr (spcmDll, "_spcm_dwSetParam_i64m@16")
spcm_dwSetParam_i64m.argtype = [drv_handle, int32, int32, int32]
spcm_dwSetParam_i64m.restype = uint32
# load spcm_dwDefTransfer_i64
if (bIs64Bit):
spcm_dwDefTransfer_i64 = getattr (spcmDll, "spcm_dwDefTransfer_i64")
else:
spcm_dwDefTransfer_i64 = getattr (spcmDll, "_spcm_dwDefTransfer_i64@36")
spcm_dwDefTransfer_i64.argtype = [drv_handle, uint32, uint32, uint32, c_void_p, uint64, uint64]
spcm_dwDefTransfer_i64.restype = uint32
# load spcm_dwInvalidateBuf
if (bIs64Bit):
spcm_dwInvalidateBuf = getattr (spcmDll, "spcm_dwInvalidateBuf")
else:
spcm_dwInvalidateBuf = getattr (spcmDll, "_spcm_dwInvalidateBuf@8")
spcm_dwInvalidateBuf.argtype = [drv_handle, uint32]
spcm_dwInvalidateBuf.restype = uint32
# load spcm_dwGetContBuf_i64
if (bIs64Bit):
spcm_dwGetContBuf_i64 = getattr (spcmDll, "spcm_dwGetContBuf_i64")
else:
spcm_dwGetContBuf_i64 = getattr (spcmDll, "_spcm_dwGetContBuf_i64@16")
spcm_dwGetContBuf_i64.argtype = [drv_handle, uint32, POINTER(c_void_p), uptr64]
spcm_dwGetContBuf_i64.restype = uint32
elif os.name == 'posix':
sys.stdout.write("Python Version: {0} on Linux\n\n".format (platform.python_version()))
# define card handle type
if (bIs64Bit):
drv_handle = POINTER(c_uint64)
else:
drv_handle = c_void_p
# Load DLL into memory.
# use cdll because all driver access functions use cdecl calling convention under linux
spcmDll = cdll.LoadLibrary ("libspcm_linux.so")
# load spcm_hOpen
spcm_hOpen = getattr (spcmDll, "spcm_hOpen")
spcm_hOpen.argtype = [c_char_p]
spcm_hOpen.restype = drv_handle
# load spcm_vClose
spcm_vClose = getattr (spcmDll, "spcm_vClose")
spcm_vClose.argtype = [drv_handle]
spcm_vClose.restype = None
# load spcm_dwGetErrorInfo
spcm_dwGetErrorInfo_i32 = getattr (spcmDll, "spcm_dwGetErrorInfo_i32")
spcm_dwGetErrorInfo_i32.argtype = [drv_handle, uptr32, ptr32, c_char_p]
spcm_dwGetErrorInfo_i32.restype = uint32
# load spcm_dwGetParam_i32
spcm_dwGetParam_i32 = getattr (spcmDll, "spcm_dwGetParam_i32")
spcm_dwGetParam_i32.argtype = [drv_handle, int32, ptr32]
spcm_dwGetParam_i32.restype = uint32
# load spcm_dwGetParam_i64
spcm_dwGetParam_i64 = getattr (spcmDll, "spcm_dwGetParam_i64")
spcm_dwGetParam_i64.argtype = [drv_handle, int32, ptr64]
spcm_dwGetParam_i64.restype = uint32
# load spcm_dwSetParam_i32
spcm_dwSetParam_i32 = getattr (spcmDll, "spcm_dwSetParam_i32")
spcm_dwSetParam_i32.argtype = [drv_handle, int32, int32]
spcm_dwSetParam_i32.restype = uint32
# load spcm_dwSetParam_i64
spcm_dwSetParam_i64 = getattr (spcmDll, "spcm_dwSetParam_i64")
spcm_dwSetParam_i64.argtype = [drv_handle, int32, int64]
spcm_dwSetParam_i64.restype = uint32
# load spcm_dwSetParam_i64m
spcm_dwSetParam_i64m = getattr (spcmDll, "spcm_dwSetParam_i64m")
spcm_dwSetParam_i64m.argtype = [drv_handle, int32, int32, int32]
spcm_dwSetParam_i64m.restype = uint32
# load spcm_dwDefTransfer_i64
spcm_dwDefTransfer_i64 = getattr (spcmDll, "spcm_dwDefTransfer_i64")
spcm_dwDefTransfer_i64.argtype = [drv_handle, uint32, uint32, uint32, c_void_p, uint64, uint64]
spcm_dwDefTransfer_i64.restype = uint32
# load spcm_dwInvalidateBuf
spcm_dwInvalidateBuf = getattr (spcmDll, "spcm_dwInvalidateBuf")
spcm_dwInvalidateBuf.argtype = [drv_handle, uint32]
spcm_dwInvalidateBuf.restype = uint32
# load spcm_dwGetContBuf_i64
spcm_dwGetContBuf_i64 = getattr (spcmDll, "spcm_dwGetContBuf_i64")
spcm_dwGetContBuf_i64.argtype = [drv_handle, uint32, POINTER(c_void_p), uptr64]
spcm_dwGetContBuf_i64.restype = uint32
else:
raise Exception ('Operating system not supported by pySpcm')
|
gpl-3.0
| 5,788,646,918,922,448,000
| 33.225664
| 99
| 0.68287
| false
| 2.709282
| false
| false
| false
|
Dev-Cloud-Platform/Dev-Cloud
|
dev_cloud/web_service/urls/user/environment.py
|
1
|
5340
|
# -*- coding: utf-8 -*-
# @COPYRIGHT_begin
#
# Copyright [2015] Michał Szczygieł, M4GiK Software
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# @COPYRIGHT_end
from django.conf.urls import patterns, url, include
from core.utils.decorators import user_permission, vm_permission
from web_service.views.user.enviroment import wizard_setup, generate_dependencies, customize_environment, \
define_environment, summary, validation_process, validation_process_ip, validation_process_resources, \
validation_process_ip_pre, view_environment, environments_list, get_vm_status, destroy_vm, refresh_vm_tasks, \
show_vnc, get_cpu_load, get_ssh_key, view_predefined, customize_predefined_environment, \
define_predefined_environment
main_patterns = patterns('web_service.views.user.enviroment',
url(r'^app/create/environment/$', user_permission(wizard_setup),
name='personalized_environment'),
url(r'^app/create/environment/technology/(?P<technology>\w+)/$',
user_permission(generate_dependencies),
name='generate_dependencies'),
url(
r'^app/create/environment/customize/(?P<technology>\w+)/(?P<application>[\w\-]+)/(?P<operation>\w+)/$',
user_permission(customize_environment), name='customize_environment'),
url(r'^app/create/environment/define/(?P<technology>\w+)/(?P<exposed_ip>\w+)/$',
user_permission(define_environment), name='define_environment'),
url(r'^app/create/environment/summary/$', user_permission(summary), name='summary'),
url(r'^app/create/environment/validation_process/(?P<template>\w+)/(?P<exposed_ip>\w+)/$',
user_permission(validation_process), name='validation_process'),
url(r'^app/create/environment/validation_process_ip/(?P<exposed_ip>\w+)/$',
user_permission(validation_process_ip), name='validation_process_ip'),
url(r'^app/create/environment/validation_process_ip_pre/(?P<exposed_ip>\w+)/$',
user_permission(validation_process_ip_pre), name='validation_process_ip_pre'),
url(r'^app/create/environment/validation_process_resources/(?P<template_id>\w+)/$',
user_permission(validation_process_resources), name='validation_process_resources'),
url(r'^app/environments/$', user_permission(environments_list), name='environments_list'),
url(r'^app/environments/(?P<destroy_status>\w+)/$', user_permission(environments_list),
name='environments_list'),
url(r'^app/environments/show_vm/(?P<vm_id>\w+)/$', vm_permission(view_environment),
name='view_environment'),
url(r'^app/environments/vm_status/(?P<vm_id>\w+)/$', vm_permission(get_vm_status),
name='get_vm_status'),
url(r'^app/environments/destroy/(?P<vm_id>\w+)/$', vm_permission(destroy_vm),
name='destroy_vm'),
url(r'^app/environments/refresh_tasks/(?P<vm_id>\w+)/$', vm_permission(refresh_vm_tasks),
name='refresh_vm_tasks'),
url(r'^app/environments/show_vm/vnc/(?P<vm_id>\w+)/$', vm_permission(show_vnc),
name='show_vnc'),
url(r'^app/environments/show_vm/cpu_load/(?P<vm_id>\w+)/$', vm_permission(get_cpu_load),
name='get_cpu_load'),
url(r'^app/environments/show_vm/get_ssh_key/(?P<vm_id>\w+)/$', vm_permission(get_ssh_key),
name='get_ssh_key'),
url(r'^app/create/environment/predefined/$', user_permission(view_predefined),
name='predefined_environment'),
url(
r'^app/create/environment/predefined/customize/(?P<application>[\w\-]+)/(?P<operation>\w+)/$',
user_permission(customize_predefined_environment),
name='customize_predefined_environment'),
url(
r'^app/create/environment/predefined/define/(?P<application>[\w\-]+)/(?P<exposed_ip>\w+)/$',
user_permission(define_predefined_environment),
name='define_predefined_environment'))
urlpatterns = patterns('', url(r'^main/', include(main_patterns)))
|
apache-2.0
| -6,435,118,396,431,205,000
| 70.173333
| 132
| 0.566317
| false
| 4.357551
| false
| false
| false
|
vertexproject/synapse
|
synapse/tests/test_tools_csvtool.py
|
1
|
6295
|
import csv
from unittest import mock
import synapse.common as s_common
import synapse.telepath as s_telepath
import synapse.tests.utils as s_t_utils
import synapse.tools.csvtool as s_csvtool
csvfile = b'''ipv4,fqdn,notes
1.2.3.4,vertex.link,malware
8.8.8.8,google.com,whitelist
'''
csvstorm = b'''
for ($ipv4, $fqdn, $note) in $rows {
$lib.print("oh hai")
[ inet:dns:a=($fqdn,$ipv4) ]
}
'''
csvfile_missing = b'''fqdn,email,tag
vertex.link,,mytag
google.com,myemail@email.com,
yahoo.com,foo@bar.com,mytag
'''
csvstorm_missing = b'''
for ($fqdn, $email, $tag) in $rows {
$lib.print("hello hello")
[ inet:dns:soa=$lib.guid() :fqdn=$fqdn :email?=$email +?#$tag ]
}
'''
# count is used for test coverage.
csvstorm_export = b'''
test:int $lib.csv.emit($node, $node.props.loc) | count
'''
class CsvToolTest(s_t_utils.SynTest):
def _getOldSynVers(self):
return (0, 0, 0)
async def test_csvtool(self):
async with self.getTestCore() as core:
url = core.getLocalUrl()
dirn = s_common.gendir(core.dirn, 'junk')
logpath = s_common.genpath(dirn, 'csvtest.log')
csvpath = s_common.genpath(dirn, 'csvtest.csv')
with s_common.genfile(csvpath) as fd:
fd.write(csvfile)
stormpath = s_common.genpath(dirn, 'csvtest.storm')
with s_common.genfile(stormpath) as fd:
fd.write(csvstorm)
argv = ['--csv-header', '--debug', '--cortex', url, '--logfile', logpath, stormpath, csvpath]
outp = self.getTestOutp()
await s_csvtool.main(argv, outp=outp)
outp.expect('oh hai')
outp.expect('2 nodes')
with mock.patch('synapse.telepath.Proxy._getSynVers', self._getOldSynVers):
outp = self.getTestOutp()
await s_csvtool.main(argv, outp=outp)
outp.expect('Cortex version 0.0.0 is outside of the csvtool supported range')
async def test_csvtool_missingvals(self):
async with self.getTestCore() as core:
url = core.getLocalUrl()
dirn = s_common.gendir(core.dirn, 'junk')
logpath = s_common.genpath(dirn, 'csvtest.log')
csvpath = s_common.genpath(dirn, 'csvtest.csv')
with s_common.genfile(csvpath) as fd:
fd.write(csvfile_missing)
stormpath = s_common.genpath(dirn, 'csvtest.storm')
with s_common.genfile(stormpath) as fd:
fd.write(csvstorm_missing)
argv = ['--csv-header', '--debug', '--cortex', url, '--logfile', logpath, stormpath, csvpath]
outp = self.getTestOutp()
await s_csvtool.main(argv, outp=outp)
outp.expect('hello hello')
outp.expect("'fqdn': 'google.com'")
outp.expect('3 nodes')
async def test_csvtool_local(self):
with self.getTestDir() as dirn:
logpath = s_common.genpath(dirn, 'csvtest.log')
csvpath = s_common.genpath(dirn, 'csvtest.csv')
with s_common.genfile(csvpath) as fd:
fd.write(csvfile)
stormpath = s_common.genpath(dirn, 'csvtest.storm')
with s_common.genfile(stormpath) as fd:
fd.write(csvstorm)
argv = ['--csv-header', '--debug', '--test', '--logfile', logpath, stormpath, csvpath]
outp = self.getTestOutp()
await s_csvtool.main(argv, outp=outp)
outp.expect('2 nodes')
async def test_csvtool_cli(self):
with self.getTestDir() as dirn:
logpath = s_common.genpath(dirn, 'csvtest.log')
csvpath = s_common.genpath(dirn, 'csvtest.csv')
with s_common.genfile(csvpath) as fd:
fd.write(csvfile)
stormpath = s_common.genpath(dirn, 'csvtest.storm')
with s_common.genfile(stormpath) as fd:
fd.write(csvstorm)
argv = ['--csv-header', '--debug', '--cli', '--test', '--logfile', logpath, stormpath, csvpath]
outp = self.getTestOutp()
cmdg = s_t_utils.CmdGenerator(['storm --hide-props inet:fqdn',
EOFError(),
])
with self.withCliPromptMockExtendOutp(outp):
with self.withTestCmdr(cmdg):
await s_csvtool.main(argv, outp=outp)
outp.expect('inet:fqdn=google.com')
outp.expect('2 nodes')
async def test_csvtool_export(self):
async with self.getTestCore() as core:
await core.nodes('[ test:int=20 :loc=us ]')
await core.nodes('[ test:int=30 :loc=cn ]')
await core.nodes('[ test:int=40 ]')
url = core.getLocalUrl()
dirn = s_common.gendir(core.dirn, 'junk')
csvpath = s_common.genpath(dirn, 'csvtest.csv')
stormpath = s_common.genpath(dirn, 'csvtest.storm')
with s_common.genfile(stormpath) as fd:
fd.write(csvstorm_export)
# test a few no-no cases
argv = ['--test', '--export', stormpath, csvpath]
outp = self.getTestOutp()
await s_csvtool.main(argv, outp=outp)
outp.expect('--export requires --cortex')
argv = ['--cortex', url, '--export', stormpath, csvpath, 'lol.csv']
outp = self.getTestOutp()
await s_csvtool.main(argv, outp=outp)
outp.expect('--export requires exactly 1 csvfile')
argv = ['--cortex', url, '--export', stormpath, csvpath]
outp = self.getTestOutp()
await s_csvtool.main(argv, outp=outp)
outp.expect('Counted 3 nodes.')
outp.expect('3 csv rows')
with open(csvpath, 'r') as fd:
rows = [row for row in csv.reader(fd)]
self.eq(rows, (['20', 'us'], ['30', 'cn'], ['40', '']))
with mock.patch('synapse.telepath.Proxy._getSynVers', self._getOldSynVers):
outp = self.getTestOutp()
await s_csvtool.main(argv, outp=outp)
outp.expect(f'Cortex version 0.0.0 is outside of the csvtool supported range')
|
apache-2.0
| 5,403,740,842,168,833,000
| 31.448454
| 107
| 0.554408
| false
| 3.362714
| true
| false
| false
|
Jumpscale/jumpscale_core8
|
lib/JumpScale/tools/issuemanager/models/repoCollection.py
|
1
|
2535
|
from JumpScale import j
base = j.data.capnp.getModelBaseClassCollection()
class RepoCollection(base):
"""
This class represent a collection of Issues
"""
def list(self, owner=0, name='', id=0, source="", returnIndex=False):
"""
List all keys of repo model with specified params.
@param owner int,, id of owner the repo belongs to.
@param name str,, name of repo.
@param id int,, repo id in db.
@param source str,, source of remote database.
@param returnIndexalse bool,, return the index used.
"""
if owner == "":
owner = ".*"
if name == "":
name = ".*"
if id == "" or id == 0:
id = ".*"
if source == "":
source = ".*"
regex = "%s:%s:%s:%s" % (owner, name, id, source)
return self._index.list(regex, returnIndex=returnIndex)
def find(self, owner='', name='', id=0, milestone=0, member=0, label='', source=""):
"""
List all instances of repo model with specified params.
@param owner int,, id of owner the repo belongs to.
@param name str,, name of repo.
@param id int,, repo id in db.
@param milestone int,, id of milestone in repo.
@param member int,, id of member in repo.
@param milestone int,, label in repo.
@param source str,, source of remote database.
@param returnIndexalse bool,, return the index used.
"""
res = []
for key in self.list(owner=owner, name=name, id=id, source=source):
res.append(self.get(key))
if milestone:
for model in res[::-1]:
for milestone_model in model.dictFiltered.get('milestones', []):
if milestone == milestone_model['id']:
break
else:
res.remove(model)
if member:
for model in res[::-1]:
for member_model in model.dictFiltered.get('members', []):
if member == member_model['userKey']:
break
else:
res.remove(model)
if label:
for model in res[::-1]:
if (label not in model.dictFiltered.get('labels', [])) or not model.dictFiltered.get('labels', False):
res.remove(model)
return res
def getFromId(self, id):
key = self._index.lookupGet("issue_id", id)
return self.get(key)
|
apache-2.0
| 7,284,187,035,742,835,000
| 32.8
| 118
| 0.523077
| false
| 4.21797
| false
| false
| false
|
fzza/rdio-sock
|
src/rdiosock/metadata.py
|
1
|
3693
|
from rdiosock.exceptions import RdioApiError
from rdiosock.objects.collection import RdioList
class SEARCH_TYPES:
"""Metadata search types"""
NONE = 0
ARTIST = 1
ALBUM = 2
TRACK = 4
PLAYLIST = 8
USER = 16
LABEL = 32
ALL = (
ARTIST |
ALBUM |
TRACK |
PLAYLIST |
USER |
LABEL
)
_MAP = {
ARTIST: 'Artist',
ALBUM: 'Album',
TRACK: 'Track',
PLAYLIST: 'Playlist',
USER: 'User',
LABEL: 'Label'
}
@classmethod
def parse(cls, value):
if type(value) is int:
value = cls._parse_bit(value)
items = []
for key in value:
items.append(cls._MAP[key])
return items
@classmethod
def _parse_bit(cls, value):
items = []
for key in cls._MAP:
if (value & key) == key:
items.append(key)
return items
class SEARCH_EXTRAS:
"""Metadata search extras"""
NONE = 0
LOCATION = 1
USERNAME = 2
STATIONS = 4
DESCRIPTION = 8
FOLLOWER_COUNT = 16
FOLLOWING_COUNT = 32
FAVORITE_COUNT = 64
SET_COUNT = 128
ICON_250x375 = 256
ICON_500x750 = 512
ICON_250x333 = 1024
ICON_500x667 = 2048
ALL = (
LOCATION |
USERNAME |
STATIONS |
DESCRIPTION |
FOLLOWER_COUNT |
FOLLOWING_COUNT |
FAVORITE_COUNT |
SET_COUNT |
ICON_250x375 |
ICON_500x750 |
ICON_250x333 |
ICON_500x667
)
_MAP = {
LOCATION: 'location',
USERNAME: 'username',
STATIONS: 'stations',
DESCRIPTION: 'description',
FOLLOWER_COUNT: 'followerCount',
FOLLOWING_COUNT: 'followingCount',
FAVORITE_COUNT: 'favoriteCount',
SET_COUNT: 'setCount',
ICON_250x375: 'icon250x375',
ICON_500x750: 'icon500x750',
ICON_250x333: 'icon250x333',
ICON_500x667: 'icon500x667'
}
@classmethod
def parse(cls, value):
if type(value) is int:
value = cls._parse_bit(value)
items = []
for key in value:
items.append(cls._MAP[key])
return items
@classmethod
def _parse_bit(cls, value):
items = []
for key in cls._MAP:
if (value & key) == key:
items.append(key)
return items
class RdioMetadata(object):
def __init__(self, sock):
"""
:type sock: RdioSock
"""
self._sock = sock
def search(self, query, search_types=SEARCH_TYPES.ALL, search_extras=SEARCH_EXTRAS.ALL):
"""Search for media item.
:param query: Search query
:type query: str
:param search_types: Search type (:class:`rdiosock.metadata.SEARCH_TYPES` bitwise-OR or list)
:type search_types: int or list of int
:param search_extras: Search result extras to include (:class:`rdiosock.metadata.SEARCH_EXTRAS` bitwise-OR or list)
:type search_extras: int or list of int
"""
result = self._sock._api_post('search', {
'query': query,
'types[]': SEARCH_TYPES.parse(search_types)
}, secure=False, extras=SEARCH_EXTRAS.parse(search_extras))
if result['status'] == 'error':
raise RdioApiError(result)
result = result['result']
if result['type'] == 'list':
return RdioList.parse(result)
else:
raise NotImplementedError()
|
gpl-3.0
| 9,067,494,512,009,808,000
| 22.08125
| 123
| 0.516382
| false
| 3.681954
| false
| false
| false
|
Hiestaa/3D-Lsystem
|
lsystem/Tree7.py
|
1
|
1145
|
from lsystem.LSystem import LSystem
import math
class Tree7(LSystem):
"""Fractale en forme d'arbre v7"""
def defineParams(self):
self.LSName = "Tree7"
self.LSAngle = math.pi / 4
self.LSSegment = 100
self.LSSteps = 9
self.LSStartingString = "T(x)"
self.LSStochastic = False
self.LSStochRange = 0.2
def createVars(self):
self.LSVars = {
'F': self.turtle.forward,
'T': self.turtle.forward,
'+': self.turtle.rotZ,
'-': self.turtle.irotZ,
'^': self.turtle.rotY,
'&': self.turtle.irotY,
'<': self.turtle.rotX,
'>': self.turtle.irotX,
'|': self.turtle.rotX,
'[': self.turtle.push,
']': self.turtle.pop,
'I': self.turtle.setColor,
'Y': self.turtle.setColor
}
self.LSParams = {
'x': self.LSSegment,
'+': self.LSAngle,
'-': self.LSAngle,
'&': self.LSAngle,
'^': self.LSAngle,
'<': self.LSAngle,
'>': self.LSAngle,
'|': self.LSAngle * 2,
'[': None,
']': None,
'I': (0.5,0.25,0),
'Y': (0, 0.5, 0)
}
def createRules(self):
self.LSRules = {
"T(x)": "IT(x*0.3)F(x*0.3)",
"F(x)": "IF(x)[+YF(x*0.5)][-YF(x*0.5)][<YF(x*0.5)][>YF(x*0.5)]"
}
|
mit
| -4,130,226,436,172,089,000
| 20.603774
| 69
| 0.558952
| false
| 2.136194
| false
| false
| false
|
sagiss/sardana
|
src/sardana/taurus/qt/qtgui/extra_hkl/hklscan.py
|
1
|
15114
|
#!/usr/bin/env python
##############################################################################
##
## This file is part of Sardana
##
## http://www.sardana-controls.org/
##
## Copyright 2011 CELLS / ALBA Synchrotron, Bellaterra, Spain
##
## Sardana is free software: you can redistribute it and/or modify
## it under the terms of the GNU Lesser General Public License as published by
## the Free Software Foundation, either version 3 of the License, or
## (at your option) any later version.
##
## Sardana is distributed in the hope that it will be useful,
## but WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
## GNU Lesser General Public License for more details.
##
## You should have received a copy of the GNU Lesser General Public License
## along with Sardana. If not, see <http://www.gnu.org/licenses/>.
##
##############################################################################
__docformat__ = 'restructuredtext'
import sys
import sardana
from taurus.external.qt import Qt
from taurus.qt.qtgui.container import TaurusWidget
from taurus.qt.qtgui.display import TaurusLabel
from taurus.qt.qtgui.base import TaurusBaseWidget
from taurus.external.qt import QtCore, QtGui
import taurus.core
from taurus.qt.qtcore.communication import SharedDataManager
from taurus.qt.qtgui.input import TaurusValueLineEdit
from displayscanangles import DisplayScanAngles
import taurus.core.util.argparse
import taurus.qt.qtgui.application
from taurus.qt.qtgui.util.ui import UILoadable
from PyTango import *
from sardana.taurus.qt.qtgui.extra_macroexecutor import TaurusMacroExecutorWidget, TaurusSequencerWidget, \
TaurusMacroConfigurationDialog, \
TaurusMacroDescriptionViewer, DoorOutput, DoorDebug, DoorResult
class EngineModesComboBox(Qt.QComboBox, TaurusBaseWidget):
"""ComboBox representing list of engine modes"""
def __init__(self, parent=None):
name = self.__class__.__name__
self.call__init__wo_kw(Qt.QComboBox, parent)
self.call__init__(TaurusBaseWidget, name)
self.setSizeAdjustPolicy(Qt.QComboBox.AdjustToContentsOnFirstShow)
self.setToolTip("Choose a engine mode ...")
QtCore.QMetaObject.connectSlotsByName(self)
def loadEngineModeNames(self, enginemodes):
self.clear()
self.addItems(enginemodes)
@UILoadable(with_ui="_ui")
class HKLScan(TaurusWidget):
def __init__(self, parent=None, designMode=False):
TaurusWidget.__init__(self, parent, designMode=designMode)
self.loadUi(filename="hklscan.ui")
self.connect(self._ui.hklStartScanButton,
Qt.SIGNAL("clicked()"), self.start_hklscan)
self.connect(self._ui.hklStopScanButton,
Qt.SIGNAL("clicked()"), self.stop_hklscan)
self.connect(self._ui.hklDisplayAnglesButton,
Qt.SIGNAL("clicked()"), self.display_angles)
self.connect(self._ui.MacroServerConnectionButton, Qt.SIGNAL(
"clicked()"), self.open_macroserver_connection_panel)
# Create a global SharedDataManager
Qt.qApp.SDM = SharedDataManager(self)
@classmethod
def getQtDesignerPluginInfo(cls):
ret = TaurusWidget.getQtDesignerPluginInfo()
ret['module'] = 'hklscan'
ret['group'] = 'Taurus Containers'
ret['container'] = ':/designer/frame.png'
ret['container'] = True
return ret
def setModel(self, model):
if model != None:
self.device = taurus.Device(model)
self.pseudo_motor_names = []
for motor in self.device.hklpseudomotorlist:
self.pseudo_motor_names.append(motor.split(' ')[0])
self.h_device_name = self.pseudo_motor_names[0]
self.h_device = taurus.Device(self.h_device_name)
self.k_device_name = self.pseudo_motor_names[1]
self.k_device = taurus.Device(self.k_device_name)
self.l_device_name = self.pseudo_motor_names[2]
self.l_device = taurus.Device(self.l_device_name)
# Add dynamically the angle widgets
motor_list = self.device.motorlist
motor_names = []
for motor in self.device.motorlist:
motor_names.append(motor.split(' ')[0])
self.nb_motors = len(motor_list)
angles_labels = []
angles_names = []
angles_taurus_label = []
gap_x = 800 / self.nb_motors
try:
angles_names = self.device.motorroles
except: # Only for compatibility
if self.nb_motors == 4:
angles_names.append("omega")
angles_names.append("chi")
angles_names.append("phi")
angles_names.append("theta")
elif self.nb_motors == 6:
angles_names.append("mu")
angles_names.append("th")
angles_names.append("chi")
angles_names.append("phi")
angles_names.append("gamma")
angles_names.append("delta")
for i in range(0, self.nb_motors):
angles_labels.append(QtGui.QLabel(self))
angles_labels[i].setGeometry(
QtCore.QRect(50 + gap_x * i, 290, 51, 17))
alname = "angleslabel" + str(i)
angles_labels[i].setObjectName(alname)
angles_labels[i].setText(QtGui.QApplication.translate(
"HKLScan", angles_names[i], None, QtGui.QApplication.UnicodeUTF8))
angles_taurus_label.append(TaurusLabel(self))
angles_taurus_label[i].setGeometry(
QtCore.QRect(50 + gap_x * i, 320, 81, 19))
atlname = "anglestauruslabel" + str(i)
angles_taurus_label[i].setObjectName(atlname)
angles_taurus_label[i].setModel(motor_names[i] + "/Position")
# Set model to hkl display
hmodel = self.h_device_name + "/Position"
self._ui.taurusValueLineH.setModel(hmodel)
self._ui.taurusLabelValueH.setModel(hmodel)
kmodel = self.k_device_name + "/Position"
self._ui.taurusValueLineK.setModel(kmodel)
self._ui.taurusLabelValueK.setModel(kmodel)
lmodel = self.l_device_name + "/Position"
self._ui.taurusValueLineL.setModel(lmodel)
self._ui.taurusLabelValueL.setModel(lmodel)
# Set model to engine and modes
enginemodel = model + '/engine'
self._ui.taurusLabelEngine.setModel(enginemodel)
enginemodemodel = model + '/enginemode'
self._ui.taurusLabelEngineMode.setModel(enginemodemodel)
self.enginemodescombobox = EngineModesComboBox(self)
self.enginemodescombobox.setGeometry(QtCore.QRect(150, 445, 221, 27))
self.enginemodescombobox.setObjectName("enginemodeslist")
self.enginemodescombobox.loadEngineModeNames(self.device.hklmodelist)
self.connect(self.enginemodescombobox, Qt.SIGNAL(
"currentIndexChanged(QString)"), self.onModeChanged)
def onModeChanged(self, modename):
if self.device.engine != "hkl":
self.device.write_attribute("engine", "hkl")
self.device.write_attribute("enginemode", str(modename))
def start_hklscan(self):
start_hkl = []
stop_hkl = []
start_hkl.append(float(self._ui.lineEditStartH.text()))
start_hkl.append(float(self._ui.lineEditStartK.text()))
start_hkl.append(float(self._ui.lineEditStartL.text()))
stop_hkl.append(float(self._ui.lineEditStopH.text()))
stop_hkl.append(float(self._ui.lineEditStopK.text()))
stop_hkl.append(float(self._ui.lineEditStopL.text()))
nb_points = int(self._ui.LineEditNbpoints.text())
sample_time = float(self._ui.LineEditSampleTime.text())
dim = 0
macro_name = ["ascan", "a2scan", "a3scan"]
macro_command = []
index_to_scan = []
if self.door_device != None:
for i in range(0, 3):
if start_hkl[i] != stop_hkl[i]:
dim = dim + 1
index_to_scan.append(i)
if dim > 0:
macro_command.append(macro_name[dim - 1])
for i in range(len(index_to_scan)):
macro_command.append(
str(self.pseudo_motor_names[index_to_scan[i]]))
macro_command.append(str(start_hkl[index_to_scan[i]]))
macro_command.append(str(stop_hkl[index_to_scan[i]]))
macro_command.append(str(nb_points))
macro_command.append(str(sample_time))
self.door_device.RunMacro(macro_command)
def stop_hklscan(self):
self.door_device.StopMacro()
def display_angles(self):
xangle = []
for i in range(0, 6):
xangle.append(40 + i * 100)
yhkl = 50
tr = self.device.selectedtrajectory
w = DisplayScanAngles()
angles_labels = []
angles_names = []
if self.nb_motors == 4:
angles_names.append("omega")
angles_names.append("chi")
angles_names.append("phi")
angles_names.append("theta")
elif self.nb_motors == 6:
angles_names.append("mu")
angles_names.append("th")
angles_names.append("chi")
angles_names.append("phi")
angles_names.append("gamma")
angles_names.append("delta")
dsa_label = []
for i in range(0, self.nb_motors):
dsa_label.append(QtGui.QLabel(w))
dsa_label[i].setGeometry(QtCore.QRect(xangle[i], yhkl, 51, 20))
label_name = "dsa_label_" + str(i)
dsa_label[i].setObjectName(label_name)
dsa_label[i].setText(QtGui.QApplication.translate(
"Form", angles_names[i], None, QtGui.QApplication.UnicodeUTF8))
start_hkl = []
stop_hkl = []
missed_values = 0
# TODO: This code will raise exception if one of the line edits is empty.
# But not all dimensions (H & K & L) are obligatory. One could try
# to display angles of just 1 or 2 dimensional scan.
try:
start_hkl.append(float(self._ui.lineEditStartH.text()))
start_hkl.append(float(self._ui.lineEditStartK.text()))
start_hkl.append(float(self._ui.lineEditStartL.text()))
stop_hkl.append(float(self._ui.lineEditStopH.text()))
stop_hkl.append(float(self._ui.lineEditStopK.text()))
stop_hkl.append(float(self._ui.lineEditStopL.text()))
nb_points = int(self._ui.LineEditNbpoints.text())
except:
nb_points = -1
missed_values = 1
increment_hkl = []
if nb_points > 0:
for i in range(0, 3):
increment_hkl.append((stop_hkl[i] - start_hkl[i]) / nb_points)
taurusValueAngle = []
for i in range(0, nb_points + 1):
hkl_temp = []
for j in range(0, 3):
hkl_temp.append(start_hkl[j] + i * increment_hkl[j])
no_trajectories = 0
try:
self.device.write_attribute("computetrajectoriessim", hkl_temp)
except:
no_trajectories = 1
if not no_trajectories:
angles_list = self.device.trajectorylist[tr]
taurusValueAngle.append([])
for iangle in range(0, self.nb_motors):
taurusValueAngle[i].append(TaurusValueLineEdit(w))
taurusValueAngle[i][iangle].setGeometry(
QtCore.QRect(xangle[iangle], yhkl + 30 * (i + 1), 80, 27))
taurusValueAngle[i][iangle].setReadOnly(True)
tva_name = "taurusValueAngle" + str(i) + "_" + str(iangle)
taurusValueAngle[i][iangle].setObjectName(tva_name)
taurusValueAngle[i][iangle].setValue(
"%10.4f" % angles_list[iangle])
else:
taurusValueAngle.append(TaurusValueLineEdit(w))
taurusValueAngle[i].setGeometry(QtCore.QRect(
xangle[0], yhkl + 30 * (i + 1), self.nb_motors * 120, 27))
taurusValueAngle[i].setReadOnly(True)
tva_name = "taurusValueAngle" + str(i)
taurusValueAngle[i].setObjectName(tva_name)
taurusValueAngle[i].setValue(
"... No angle solution for hkl values ...")
# TODO: not all dimensions (H & K & L) are obligatory. One could try
# to display angles of just 1 or 2 dimensional scan.
if nb_points == -1:
nb_points = 0
taurusValueAngle.append(TaurusValueLineEdit(w))
taurusValueAngle[0].setGeometry(QtCore.QRect(
xangle[0], yhkl + 30, self.nb_motors * 120, 27))
taurusValueAngle[0].setReadOnly(True)
tva_name = "taurusValueAngle"
taurusValueAngle[0].setObjectName(tva_name)
taurusValueAngle[0].setValue(
"... No scan parameters filled. Fill them in the main window ...")
w.resize(self.nb_motors * 140, 120 + nb_points * 40)
w.show()
w.show()
def open_macroserver_connection_panel(self):
w = TaurusMacroConfigurationDialog(self)
Qt.qApp.SDM.connectReader("macroserverName", w.selectMacroServer)
Qt.qApp.SDM.connectReader("doorName", w.selectDoor)
Qt.qApp.SDM.connectReader("doorName", self.onDoorChanged)
Qt.qApp.SDM.connectWriter(
"macroserverName", w, 'macroserverNameChanged')
Qt.qApp.SDM.connectWriter("doorName", w, 'doorNameChanged')
w.show()
def onDoorChanged(self, doorName):
if doorName != self.door_device_name:
self.door_device_name = doorName
self.door_device = taurus.Device(doorName)
def main():
parser = taurus.core.util.argparse.get_taurus_parser()
parser.usage = "%prog <model> [door_name]"
parser.set_description("a taurus application for performing hkl scans")
app = taurus.qt.qtgui.application.TaurusApplication(cmd_line_parser=parser,
app_version=sardana.Release.version)
app.setApplicationName("hklscan")
args = app.get_command_line_args()
if len(args) < 1:
msg = "model not set (requires diffractometer controller)"
parser.error(msg)
w = HKLScan()
w.model = args[0]
w.setModel(w.model)
w.door_device = None
w.door_device_name = None
if len(args) > 1:
w.onDoorChanged(args[1])
else:
print "WARNING: Not door name supplied. Connection to MacroServer/Door not automatically done"
w.show()
sys.exit(app.exec_())
# if len(sys.argv)>1: model=sys.argv[1]
# else: model = None
# app = Qt.QApplication(sys.argv)
# w = HKLScan()
# w.setModel(model)
# w.show()
# sys.exit(app.exec_())
if __name__ == "__main__":
main()
|
lgpl-3.0
| 5,986,230,292,049,450,000
| 37.070529
| 119
| 0.596335
| false
| 3.596003
| false
| false
| false
|
josephxsxn/alchemists_notepad
|
Tests.py
|
1
|
6304
|
#List all ENUMS
from Object.Ingredient import Ingredient
for i in Ingredient:
print(i)
from Object.PotionColor import PotionColor
for r in PotionColor:
print(r)
from Object.PotionSign import PotionSign
for r in PotionSign:
print(r)
#//TODO
#NEED TO ADD ALCHEMICAL ENUMS HERE
#Make a Potion and Fetch its values
from Object.Potion import Potion
from Object.PotionColor import PotionColor
from Object.PotionSign import PotionSign
flowertoad = Potion(Ingredient.TOAD, Ingredient.FLOWER, PotionColor.RED, PotionSign.POSITIVE)
print(flowertoad.get_ingredients())
print(flowertoad.get_color())
print(flowertoad.get_sign())
###Put some Potions in the List and Get back
from Object.PotionList import PotionList
polist = PotionList()
polist.add_potion(flowertoad)
pores = polist.get_potions()
for po in pores:
print(po.get_ingredients())
print(po.get_color())
print(po.get_sign())
#Get an exact one from the list
pores = polist.get_potion(0)
print(pores.get_ingredients())
print(pores.get_color())
print(pores.get_sign())
#fetch one that doesnt exist from the list
pores = polist.get_potion(1)
print(pores)
#make an few Alchemicals
from Object.Alchemical import Alchemical
from Object.AlchemicalColor import AlchemicalColor
from Object.AlchemicalSign import AlchemicalSign
from Object.AlchemicalSize import AlchemicalSize
#triplet one
redposlarge = Alchemical(AlchemicalColor.RED, AlchemicalSign.POSITIVE, AlchemicalSize.LARGE)
bluenegsmall = Alchemical(AlchemicalColor.BLUE, AlchemicalSign.NEGATIVE, AlchemicalSize.SMALL)
greennegsmall = Alchemical(AlchemicalColor.GREEN, AlchemicalSign.NEGATIVE, AlchemicalSize.SMALL)
#triplet two
redpossmall = Alchemical(AlchemicalColor.RED, AlchemicalSign.POSITIVE, AlchemicalSize.SMALL)
bluepossmall = Alchemical(AlchemicalColor.BLUE, AlchemicalSign.POSITIVE, AlchemicalSize.SMALL)
greenposlarge = Alchemical(AlchemicalColor.GREEN, AlchemicalSign.POSITIVE, AlchemicalSize.SMALL)
print('T1 ' + str(redposlarge.get_color()) + ' ' + str(redposlarge.get_sign()) + ' ' + str(redposlarge.get_size()))
print('T1 ' + str(bluenegsmall.get_color()) + ' ' + str(bluenegsmall.get_sign()) + ' ' + str(bluenegsmall.get_size()))
print('T1 ' + str(greennegsmall.get_color()) + ' ' + str(greennegsmall.get_sign()) + ' ' + str(greennegsmall.get_size()))
print('T2 ' + str(redpossmall.get_color()) + ' ' + str(redpossmall.get_sign()) + ' ' + str(redpossmall.get_size()))
print('T2 ' + str(bluepossmall.get_color()) + ' ' + str(bluepossmall.get_sign()) + ' ' + str(bluepossmall.get_size()))
print('T2 ' + str(greenposlarge.get_color()) + ' ' + str(greenposlarge.get_sign()) + ' ' + str(greenposlarge.get_size()))
#make a Triplet
from Object.AlchemicalTriplet import AlchemicalTriplet
triplet_one = AlchemicalTriplet([redposlarge, bluenegsmall, greennegsmall])
triplet_one_list = triplet_one.get_alchemicals()
for a in triplet_one_list:
print('Triplet_ONE ' + str(a.get_color()) + ' ' + str(a.get_sign()) + ' ' + str(a.get_size()))
triplet_two = AlchemicalTriplet([redpossmall, bluepossmall, greenposlarge])
triplet_two_list = triplet_two.get_alchemicals()
print(triplet_two_list)
for b in triplet_two_list:
print('Triplet_TWO ' + str(b.get_color()) + ' ' + str(b.get_sign()) + ' ' + str(b.get_size()))
#make some ingredients and properties
from Object.IngredientProperties import IngredientProperties
ip = IngredientProperties(Ingredient.TOAD)
print(str(ip.get_name()))
print(ip.get_alchemical_options())
ip.set_alchemical_options([triplet_one])
ip_triplet_list = ip.get_alchemical_options()
#for given ingredient list all triplet props
for l in ip_triplet_list:
for a in l.get_alchemicals():
print('IngredientProps ' + str(a.get_color()) + ' ' + str(a.get_sign()) + ' ' + str(a.get_size()))
#Alchemical Combinations Test
from Routine.AlchemicalCombinations import AlchemicalCombinations
ingredient_dic = {Ingredient.TOAD : ip}
print(ingredient_dic.keys())
triplet_list = ingredient_dic[Ingredient.TOAD].get_alchemical_options()
for triplet in triplet_list:
for a in triplet.get_alchemicals():
print('AC Combos ' + str(a.get_color()) + ' ' + str(a.get_sign()) + ' ' + str(a.get_size()))
ac = AlchemicalCombinations()
res = ac.reduce_potion_alchemicals(polist.get_potion(0), ingredient_dic)
print(polist.get_potion(0).get_ingredients())
print(polist.get_potion(0).get_sign())
print(polist.get_potion(0).get_color())
print(res.keys())
triplet_list = res[Ingredient.TOAD]
for triplet in triplet_list:
for a in triplet.get_alchemicals():
print('Filtered Toad Combos ' + str(a.get_color()) + ' ' + str(a.get_sign()) + ' ' + str(a.get_size()))
print(len(res[Ingredient.TOAD]))
print(len(res[Ingredient.FLOWER]))
#triplet_list = res[Ingredient.FLOWER]
#for triplet in triplet_list:
# for a in triplet.get_alchemicals():
# print('Filtered Flower Combos ' + str(a.get_color()) + ' ' + str(a.get_sign()) + ' ' + str(a.get_size()))
ip = IngredientProperties(Ingredient.FLOWER)
print(str(ip.get_name()))
print(ip.get_alchemical_options())
ip.set_alchemical_options(res[Ingredient.FLOWER])
ingredient_dic[Ingredient.FLOWER] = ip
print('TOAD LEN ' + str(len(ingredient_dic[Ingredient.TOAD].get_alchemical_options())))
print('FLOWER LEN ' + str(len(ingredient_dic[Ingredient.FLOWER].get_alchemical_options())))
initalTriplets = ac.inital_alchemical_options()
print(len(initalTriplets))
print(len(ac.potion_only_filter(initalTriplets, polist.get_potion(0).get_color(), polist.get_potion(0).get_sign())))
#################
###NEUTRAL POTION
#################
herbtoad = Potion(Ingredient.TOAD, Ingredient.HERB, PotionColor.NEUTRAL, PotionSign.NEUTRAL)
polist.add_potion(herbtoad)
#ac2 = AlchemicalCombinations()
res = ac.reduce_potion_alchemicals(herbtoad, ingredient_dic)
print(polist.get_potion(1).get_ingredients())
print(polist.get_potion(1).get_sign())
print(polist.get_potion(1).get_color())
print(res.keys())
print('TOAD LEN RES: ' + str(len(res[Ingredient.TOAD])))
print('HERB LEN RES: ' + str(len(res[Ingredient.HERB])))
ip = IngredientProperties(Ingredient.TOAD)
print(str(ip.get_name()))
ip.set_alchemical_options(res[Ingredient.TOAD])
ingredient_dic[Ingredient.TOAD] = ip
ip = IngredientProperties(Ingredient.HERB)
print(str(ip.get_name()))
ip.set_alchemical_options(res[Ingredient.HERB])
ingredient_dic[Ingredient.HERB] = ip
print(ingredient_dic.keys())
|
apache-2.0
| -8,223,180,517,559,525,000
| 39.410256
| 121
| 0.740641
| false
| 2.636554
| false
| false
| false
|
domain51/d51.django.apps.logger
|
d51/django/apps/logger/tests/views.py
|
1
|
1154
|
import datetime
from django.test import TestCase
from django.test.client import Client
from ..models import Hit
from .utils import build_hit_url, random_url
class TestOfHitView(TestCase):
def test_logs_hit(self):
url = random_url()
c = Client()
response = c.get(build_hit_url(url))
hit = Hit.objects.get(url=url)
def test_stores_current_time(self):
url = random_url()
response = Client().get(build_hit_url(url))
hit = Hit.objects.get(url=url)
self.assert_(isinstance(hit.created_on, datetime.datetime))
self.assert_((datetime.datetime.now() - hit.created_on).seconds < 1,
"Check creation time, might fail on slow machines/network connections.")
def test_redirects_to_url(self):
url = random_url()
response = Client().get(build_hit_url(url))
self.assertEquals(response.status_code, 302)
# TODO: refactor this - we can't use assertRedirect() because it
# tries to load crap, but this test should be simplified
self.assertEquals(response._headers['location'][1], url, "ensure redirection took place")
|
gpl-3.0
| -987,452,888,625,049,100
| 36.225806
| 97
| 0.652513
| false
| 3.796053
| true
| false
| false
|
roam/machete
|
machete/endpoints.py
|
1
|
25618
|
# -*- coding: utf-8 -*-
from __future__ import (unicode_literals, print_function, division,
absolute_import)
import sys
import hashlib
from contextlib import contextmanager
from django.views.decorators.csrf import csrf_exempt
from django.db import transaction, models
from django.views.generic import View
from django.core.exceptions import ImproperlyConfigured
from django.core.urlresolvers import reverse
from django.http import HttpResponse, Http404
from django.utils.http import quote_etag, parse_etags
from .serializers import serialize
from .urls import create_resource_view_name
from .exceptions import (JsonApiError, MissingRequestBody, InvalidDataFormat,
IdMismatch, FormValidationError)
from .utils import (RequestContext, RequestWithResourceContext, pluck_ids,
RequestPayloadDescriptor)
from . import compat, json
@contextmanager
def not_atomic(using=None):
yield
class GetEndpoint(View):
"""
Extends a generic View to provide support for retrieving resources.
Some methods might seem convoluted, but they're mostly built that
way to provide useful points of extension/override. Methods are
rarely passed all information, but request-method methods
(get, post,...) should provide a context object containing the
necessary information under ``self.context``.
"""
context = None
content_type = 'application/json' # Default to this for now; works better in browsers
methods = ['get']
pks_url_key = 'pks'
pk_field = 'pk'
queryset = None
model = None
form_class = None
filter_class = None
include_link_to_self = False
etag_attribute = None
def __init__(self, *args, **kwargs):
super(GetEndpoint, self).__init__(*args, **kwargs)
# Django uses http_method_names to know which methods are
# supported, we always add options on top which will advertise
# the actual methods we support.
self.http_method_names = self.get_methods() + ['options']
@classmethod
def endpoint(cls, **initkwargs):
return csrf_exempt(cls.as_view(**initkwargs))
def dispatch(self, request, *args, **kwargs):
# Override dispatch to enable the handling or errors we can
# handle.
# Because Django 1.4 only sets the request parameters in
# dispatch we'll set them right now ourselves.
self.request = request
self.args = args
self.kwargs = kwargs
manager, m_args, m_kwargs = self.context_manager()
try:
with manager(*m_args, **m_kwargs):
return super(GetEndpoint, self).dispatch(request, *args, **kwargs)
except Exception as error:
et, ei, tb = sys.exc_info()
return self.handle_error(error, tb)
def options(self, request, *args, **kwargs):
# From the JSON API FAQ:
# http://jsonapi.org/faq/#how-to-discover-resource-possible-actions
self.context = self.create_get_context(request)
actions = self.possible_actions()
return HttpResponse(','.join(a.upper() for a in actions))
def possible_actions(self):
"""
Returns a list of allowed methods for this endpoint.
You can use the context (a GET context) to determine what's
possible. By default this simply returns all allowed methods.
"""
return self.get_methods()
def get(self, request, *args, **kwargs):
self.context = self.create_get_context(request)
if not self.has_etag_changed():
content_type = self.get_content_type()
return HttpResponse(status=304, content_type=content_type)
collection = False
if self.context.requested_single_resource:
data = self.get_resource()
else:
data = self.get_resources()
collection = True
return self.create_http_response(data, collection=collection, compound=True)
def has_etag_changed(self):
if not self.etag_attribute:
return True
etag = self.generate_etag()
if not etag:
return True
match = self.request.META.get('HTTP_IF_NONE_MATCH')
if match:
values = parse_etags(match)
for value in values:
# Django appends ";gzip" when gzip is enabled
clean_value = value.split(';')[0]
if clean_value == '*' or clean_value == etag:
return False
return True
def generate_etag(self):
if not self.etag_attribute:
return None
qs = self.get_filtered_queryset()
values = qs.values_list(self.etag_attribute, flat=True)
etag = ','.join('%s' % value for value in values)
return hashlib.md5(etag).hexdigest()
def create_http_response(self, data, collection=False, compound=False):
"""
Creates a HTTP response from the data.
The data might be an (a) HttpResponse object, (b) dict or (c)
object that can be serialized.
HttpResponse objects will simply be returned without further
processing, dicts will be turned into JSON and returned as a
response using the status attribute of the context. Other
objects will be serialized using ``serialize`` method.
"""
if isinstance(data, HttpResponse):
# No more processing necessary
return data
if isinstance(data, dict):
# How nice. Use it!
response_data = data
else:
# Everything else: run it through the serialization process
response_data = self.serialize(data, collection=collection, compound=compound)
json_data = self.create_json(response_data, indent=2)
status = self.context.status
content_type = self.get_content_type()
response = HttpResponse(json_data, content_type=content_type, status=status)
return self.postprocess_response(response, data, response_data, collection)
def serialize(self, data, collection=False, compound=False):
"""
Serializes the data.
Note that a serializer must have been registered with the name
of this resource or relationship, depending on the request type.
"""
name = self.get_resource_type()
context = self.context.__dict__
self_link = self.include_link_to_self
fields = self.context.resource_descriptor.fields
only = fields if fields else None
return serialize(name, data, many=collection, compound=compound, context=context, self_link=self_link, only=only)
def get_resource_type(self):
return self.resource_name
def handle_error(self, error, traceback=None):
# TODO Improve error reporting
error_object = {}
if isinstance(error, FormValidationError):
errors = []
for field, itemized_errors in error.form.errors.items():
composite = field == '__all__'
for e in itemized_errors:
detail = {'detail': '%s' % e}
if not composite:
detail['member'] = field
detail['member_label'] = '%s' % error.form.fields.get(field).label
errors.append(detail)
return HttpResponse(self.create_json({'errors': errors}), status=400)
if isinstance(error, Http404):
error_object['message'] = '%s' % error
return HttpResponse(self.create_json({'errors': [error_object]}), status=404)
if isinstance(error, JsonApiError):
error_object['message'] = '%s' % error
return HttpResponse(self.create_json({'errors': [error_object]}), status=500)
raise error.__class__, error, traceback
def postprocess_response(self, response, data, response_data, collection):
"""
If you need to do any further processing of the HttpResponse
objects, this is the place to do it.
"""
etag = self.generate_etag()
if etag:
response['ETag'] = quote_etag(etag)
response['Cache-Control'] = 'private, max-age=0'
return response
def get_resource(self):
"""
Grabs the resource for a resource request.
Maps to ``GET /posts/1``.
"""
filter = {self.get_pk_field(): self.context.pk}
return self.get_filtered_queryset().get(**filter)
def get_resources(self):
"""
Grabs the resources for a collection request.
Maps to ``GET /posts/1,2,3`` or ``GET /posts``.
"""
qs = self.get_filtered_queryset()
if self.context.pks:
filter = {'%s__in' % self.get_pk_field(): self.context.pks}
qs = qs.filter(**filter)
if self.context.pks and not qs.exists():
raise Http404()
return qs
def get_filtered_queryset(self):
qs = self.get_queryset()
if self.filter_class:
return self.filter_class(self.request.GET, queryset=qs).qs
return qs
def is_changed_besides(self, resource, model):
# TODO Perform simple diff of serialized model with resource
return False
def get_pk_field(self):
"""
Determines the name of the primary key field of the model.
Either set the ``pk_field`` on the class or override this method
when your model's primary key points to another field than the
default.
"""
return self.pk_field
def get_queryset(self):
"""
Get the list of items for this main resource.
This must be an iterable, and may be a queryset
(in which qs-specific behavior will be enabled).
"""
if self.queryset is not None:
queryset = self.queryset
if hasattr(queryset, '_clone'):
queryset = queryset._clone()
elif self.model is not None:
queryset = self.model._default_manager.all()
else:
raise ImproperlyConfigured("'%s' must define 'queryset' or 'model'"
% self.__class__.__name__)
return queryset
def get_content_type(self):
"""
Determines the content type of responses.
Override this method or set ``content_type`` on the class.
"""
return self.content_type
def create_get_context(self, request):
"""Creates the context for a GET request."""
pks = self.kwargs.get(self.pks_url_key, '')
pks = pks.split(',') if pks else []
fields = request.GET.get('fields')
fields = None if not fields else fields.split(',')
resource_descriptor = RequestContext.create_resource_descriptor(self.resource_name, pks, fields=fields)
context = RequestContext(request, resource_descriptor)
context.update_mode('GET')
return context
def extract_resources(self, request):
"""
Extracts resources from the request body.
This should probably be moved elsewhere since it doesn't make
sense in a GET request. But still.
"""
body = request.body
if not body:
raise MissingRequestBody()
resource_name = self.resource_name
try:
data = self.parse_json(body)
if not resource_name in data:
raise InvalidDataFormat('Missing %s as key' % resource_name)
obj = data[resource_name]
if isinstance(obj, list):
resource = None
resources = obj
else:
resource = obj
resources = [obj]
return RequestPayloadDescriptor(resource_name, resources, resource)
except ValueError:
raise InvalidDataFormat()
def parse_json(self, data):
return json.loads(data)
def create_json(self, data, *args, **kwargs):
return json.dumps(data, *args, **kwargs)
def get_methods(self):
return self.methods
def context_manager(self):
if self.request.method in ['POST', 'PUT', 'DELETE', 'PATCH']:
return (transaction.atomic, [], {})
return (not_atomic, [], {})
class GetLinkedEndpoint(GetEndpoint):
relationship_name = None
relationship_pks_url_keys = None
relationship_pk_fields = None
@classmethod
def endpoint(cls, relationship_name=None, **initkwargs):
initkwargs['relationship_name'] = relationship_name
return csrf_exempt(cls.as_view(**initkwargs))
def dispatch(self, request, *args, **kwargs):
if not self.relationship_name:
self.relationship_name = kwargs.get('relationship')
return super(GetLinkedEndpoint, self).dispatch(request, *args, **kwargs)
def get(self, request, *args, **kwargs):
self.context = self.create_get_context(request)
collection = False
# We're dealing with a request for a related resource
if self.context.requested_single_related_resource or not self.context.to_many:
# Either a single relationship id was passed in or the
# relationship is a to-one
data = self.get_related_resource()
else:
# Multiple relationship ids or a to-many relationship
data = self.get_related_resources()
collection = True
return self.create_http_response(data, collection=collection)
def get_related_resource(self):
"""
Handles the retrieval of a related resource.
This will be called when either a single relationship instance
was requested or the relationship is to-one.
"""
qs = self.get_related_queryset()
if not self.context.to_many:
# Since it's not a to-many, we can simply return the value
return qs
pk_field = self.get_relationship_pk_field()
filter = {pk_field: self.context.relationship_pk}
return qs.get(**filter)
def get_related_resources(self):
"""
Handles the retrieval of multiple related resources.
This will be called when either a multiple relationship
instances were requested or no ids were supplied.
"""
qs = self.get_related_queryset().all()
if self.context.relationship_pks:
pk_field = self.get_relationship_pk_field()
filter = {'%s__in' % pk_field: self.context.relationship_pks}
qs = qs.filter(**filter)
if not qs.exists():
raise Http404()
return qs
def get_related_queryset(self):
field_name = self.get_related_field_name()
resource = self.get_resource()
return getattr(resource, field_name)
def get_resource_type(self):
return self.relationship_name
def create_get_context(self, request):
"""Creates the context for a GET request."""
pks = self.kwargs.get(self.pks_url_key, '')
pks = pks.split(',') if pks else []
rel_pks_url_key = self.get_relationship_pks_url_key()
rel_pks = self.kwargs.get(rel_pks_url_key, '')
rel_pks = rel_pks.split(',') if rel_pks else []
many = self.is_to_many_relationship()
rel_descriptor = RequestContext.create_relationship_descriptor(self.relationship_name, rel_pks, many)
resource_descriptor = RequestContext.create_resource_descriptor(self.resource_name, pks, rel_descriptor)
context = RequestContext(request, resource_descriptor)
context.update_mode('GET')
return context
def get_related_field_name(self):
# TODO Use serializer to find correct name by default
return self.relationship_name
def get_relationship_pks_url_key(self):
rel_name = self.get_related_field_name()
keys = self.relationship_pks_url_keys
keys = keys if keys else {}
return keys.get(rel_name, 'rel_pks')
def get_relationship_pk_field(self):
rel_name = self.get_related_field_name()
fields = self.relationship_pk_fields
fields = fields if fields else {}
return fields.get(rel_name, 'pk')
def is_to_many_relationship(self):
rel_name = self.get_related_field_name()
if self.model:
model = self.model
elif self.queryset:
model = self.queryset.model
else:
model = self.get_queryset().model
meta = model._meta
field_object, model, direct, m2m = compat.get_field_by_name(meta, rel_name)
if direct:
return m2m
return field_object.field.rel.multiple
class WithFormMixin(object):
"""
Mixin supporting create and update of resources with a model form.
Note that it relies on some methods made available by the
GetEndpoint.
"""
form_class = None
def get_form_kwargs(self, **kwargs):
return kwargs
def get_form_class(self):
return self.form_class
def form_valid(self, form):
return form.save()
def form_invalid(self, form):
raise FormValidationError('', form=form)
def get_form(self, resource, instance=None):
"""Constructs a new form instance with the supplied data."""
data = self.prepare_form_data(resource, instance)
form_kwargs = {'data': data, 'instance': instance}
form_kwargs = self.get_form_kwargs(**form_kwargs)
form_class = self.get_form_class()
if not form_class:
raise ImproperlyConfigured('Missing form_class')
return form_class(**form_kwargs)
def prepare_form_data(self, resource, instance=None):
"""Last chance to tweak the data being passed to the form."""
if instance:
# The instance is converted to JSON and then loaded to ensure
# special encodings (like timezone-conversion) are performed
as_json = self.create_json(self.serialize(instance, compound=False))
original = json.loads(as_json)
original = original[self.resource_name]
merged = dict(original.items() + original.get('links', {}).items())
data = dict(resource.items() + resource.get('links', {}).items())
for field, value in data.items():
if value is None:
merged[field] = None
else:
merged[field] = value
return merged
return dict(resource.items() + resource.get('links', {}).items())
class PostMixin(object):
"""
Provides support for POST requests on resources.
The ``create_resource`` method must be implemented to actually do
something.
"""
def get_methods(self):
return super(PostMixin, self).get_methods() + ['post']
def post(self, request, *args, **kwargs):
self.context = self.create_post_context(request)
collection = False
payload = self.context.payload
if payload.many:
data = self.create_resources(payload.resources)
collection = True
else:
data = self.create_resource(payload.resource)
return self.create_http_response(data, collection=collection)
def create_post_context(self, request):
payload = self.extract_resources(request)
descriptor = RequestContext.create_resource_descriptor(self.resource_name)
context = RequestWithResourceContext(request, descriptor, payload, status=201)
context.update_mode('POST')
return context
def create_resources(self, resources):
return [self.create_resource(r) for r in resources]
def create_resource(self, resource):
"""Create the resource and return the corresponding model."""
pass
def postprocess_response(self, response, data, response_data, collection):
response = super(PostMixin, self).postprocess_response(response, data, response_data, collection)
if self.context.status != 201:
return response
pks = ','.join(pluck_ids(response_data, self.resource_name))
location = self.create_resource_url(pks)
response['Location'] = location
return response
def create_resource_url(self, pks):
kwargs = {self.pks_url_key: pks}
return reverse(self.get_url_name(), kwargs=kwargs)
def get_url_name(self):
return create_resource_view_name(self.resource_name)
class PostWithFormMixin(PostMixin, WithFormMixin):
"""
Provides an implementation of ``create_resource`` using a form.
"""
def create_resource(self, resource):
form = self.get_form(resource)
if form.is_valid():
return self.form_valid(form)
return self.form_invalid(form)
class PutMixin(object):
"""
Provides support for PUT requests on resources.
This supports both full and partial updates, on single and multiple
resources.
Requires ``update_resource`` to be implemented.
"""
def get_methods(self):
return super(PutMixin, self).get_methods() + ['put']
def put(self, request, *args, **kwargs):
self.context = self.create_put_context(request)
collection = False
payload = self.context.payload
if payload.many:
changed_more, data = self.update_resources(payload.resources)
collection = True
else:
changed_more, data = self.update_resource(payload.resource)
if not changed_more:
# > A server MUST return a 204 No Content status code if an update
# > is successful and the client's current attributes remain up to
# > date. This applies to PUT requests as well as POST and DELETE
# > requests that modify links without affecting other attributes
# > of a resource.
return HttpResponse(status=204)
return self.create_http_response(data, collection=collection)
def create_put_context(self, request):
pks = self.kwargs.get(self.pks_url_key, '')
pks = pks.split(',') if pks else []
payload = self.extract_resources(request)
descriptor = RequestContext.create_resource_descriptor(self.resource_name, pks)
context = RequestWithResourceContext(request, descriptor, payload, status=200)
context.update_mode('PUT')
return context
def update_resources(self, resources):
updated = []
changed = []
for res in resources:
changed_more, result = self.update_resource(res)
updated.append(result)
changed.append(changed_more)
return any(changed), updated
def update_resource(self, resource):
pass
class PutWithFormMixin(PutMixin, WithFormMixin):
"""
Provides an implementation of ``update_resource`` using a form.
"""
def update_resource(self, resource):
resource_id = resource['id']
if resource_id not in self.context.pks:
message = 'Id %s in request body but not in URL' % resource_id
raise IdMismatch(message)
filter = {self.get_pk_field(): resource_id}
instance = self.get_queryset().get(**filter)
form = self.get_form(resource, instance)
if form.is_valid():
model = self.form_valid(form)
return self.is_changed_besides(resource, model), model
return self.form_invalid(form)
class DeleteMixin(object):
"""
Provides support for DELETE request on single + multiple resources.
"""
def get_methods(self):
return super(DeleteMixin, self).get_methods() + ['delete']
def delete(self, request, *args, **kwargs):
self.context = self.create_delete_context(request)
if not self.context.pks:
raise Http404('Missing ids')
# Although the default implementation defers DELETE request for
# both single and multiple resources to the ``perform_delete``
# method, we still split based on
if self.context.requested_single_resource:
not_deleted = self.delete_resource()
else:
not_deleted = self.delete_resources()
if not_deleted:
raise Http404('Resources %s not found' % ','.join(not_deleted))
return HttpResponse(status=204)
def create_delete_context(self, request):
pks = self.kwargs.get(self.pks_url_key, '')
pks = pks.split(',') if pks else []
descriptor = RequestContext.create_resource_descriptor(self.resource_name, pks)
context = RequestContext(request, descriptor)
context.update_mode('DELETE')
return context
def delete_resources(self):
return self.perform_delete(self.context.pks)
def delete_resource(self):
return self.perform_delete(self.context.pks)
def perform_delete(self, pks):
not_deleted = pks[:]
filter = {'%s__in' % self.get_pk_field(): pks}
for item in self.get_queryset().filter(**filter).iterator():
# Fetch each item separately to actually trigger any logic
# performed in the delete method (like implicit deletes)
not_deleted.remove('%s' % item.pk)
item.delete()
return not_deleted
class Endpoint(PostWithFormMixin, PutWithFormMixin, DeleteMixin, GetEndpoint):
"""
Ties everything together.
Use this base class when you need to support GET, POST, PUT and
DELETE and want to use a form to process incoming data.
"""
pass
|
bsd-2-clause
| 7,528,123,250,925,826,000
| 35.183616
| 121
| 0.61851
| false
| 4.340563
| false
| false
| false
|
Zhang-O/small
|
tensor__cpu/http/spyser_liyou.py
|
1
|
5473
|
import urllib.request
from bs4 import BeautifulSoup
import re
import urllib.parse
import xlsxwriter
import pandas as pd
import numpy as np
from urllib import request, parse
from urllib.error import URLError
import json
import multiprocessing
import time
# 详情页面的 地址 存放在这里面
urls_of_detail = []
total_pages = 0
# 要爬取的内容 按序存成数组
_1 = []
_2 = []
_3 = []
_4 = []
_5 = []
issue_date_sum = []
project_address_sum = []
project_sector_sum = []
project_content_sum = []
company_name_sum = []
company_staff_sum = []
company_phone_sum = []
# 一级网址
url = 'http://www.stc.gov.cn/ZWGK/TZGG/GGSB/'
# page 表示第几页
def get_urls(url,page):
# 构造 form 数据
# postdata = urllib.parse.urlencode({'currDistrict': '', 'pageNo': page,'hpjgName_hidden':'','keyWordName':''})
# postdata = postdata.encode('utf-8')
#
# #发送请求
# response = urllib.request.urlopen(url, data=postdata)
# html_cont = response.read()
if page == 0:
url = url + 'index.htm'
else:
url = url + 'index_' + str(page) + '.htm'
req = request.Request(url=url)
res_data = request.urlopen(req)
# print(res_data)
html_cont = res_data.read()
# 解析文档树
soup = BeautifulSoup(html_cont, 'html.parser', from_encoding='utf-8')
#
# # 用正则表达式 查找 二级网站的网址 所在的 元素 tr
trs = soup.find_all('a', href=re.compile(r"^./201"))
# # 把 二级网站的网址存到 urls_of_detail 中
for i in trs:
# print(i['href'][2:])
urls_of_detail.append(i['href'][2:])
def get_info(url,second_url):
# s = urllib.request.urlopen(urls_of_detail[0])
# 请求文档
second_url = url + second_url
s = urllib.request.urlopen(second_url)
# 解析文档
soup = BeautifulSoup(s, 'html.parser', from_encoding='utf-8')
# 查找的内容 在 td 元素内 ,且没有任何唯一标识 ,找到所有td ,查看每个待爬取得内容在 list 中 的索引
div = soup.find_all('div', class_=re.compile(r"TRS_Editor"))
trs = div[0].find_all('tr')
trs = trs[1:]
# print(trs[0])
print('trs num',len(trs))
for tr in trs:
tds = tr.find_all('td')
if len(tds[0].find_all('font')) > 0 :
if tds[3].find_all('font')[0].string == None:
print(second_url)
_1.append(tds[0].find_all('font')[0].string)
_2.append(tds[1].find_all('font')[0].string)
_3.append(tds[2].find_all('font')[0].string)
_4.append(tds[3].find_all('font')[0].string)
if len(tds) == 5:
_5.append(tds[4].find_all('font')[0].string)
else:
_5.append('null')
elif len(tds[0].find_all('p')) > 0 :
# if tds[3].find_all('p')[0].string == None:
# print(second_url)
_1.append(tds[0].find_all('p')[0].string)
_2.append(tds[1].find_all('p')[0].string)
_3.append(tds[2].find_all('p')[0].string)
if len(tds[3].find_all('p')) > 0:
_4.append(tds[3].find_all('p')[0].string)
else:
_4.append(tds[3].string)
if len(tds) == 5:
_5.append(tds[4])
else:
_5.append('null')
else:
if tds[3].string == None:
print(second_url)
_1.append(tds[0].string)
_2.append(tds[1].string)
if len(tds[2].find_all('span'))>0 and tds[2].find_all('span')[0].string == None:
_3.append(tds[2].string)
else:
_3.append(tds[2].string)
_4.append(tds[3].string)
if len(tds) == 5:
_5.append(tds[4].string)
else:
_5.append('null')
# elif len(tds[0].find_all('td'))
# print(len(tds))
# print(tds[0].string)
# print(tds[1].string)
# print(tds[2].string)
# print(tds[3].string)
# print(response.read().decode('utf-8','ignore'))
# 网站显示一共有 1036 页
num0 =0
for page in range(0,7):
num0 += 1
# print(num0)
get_urls(url, page)
# 把所有的二级网站 存成文本
with open('urls_all_liyou','w') as f:
f.write(str(urls_of_detail))
# print(len(urls_of_detail))
# print(len(set(urls_of_detail)))
print('urls num :' , len(urls_of_detail))
num=0 # 这个主要用于调试 爬的过程中如果出错 看看是在哪个网址出的
for second_url in urls_of_detail:
num += 1
print('page num : ', num)
if num in [15,42]:
continue
if num > 54:
break
get_info(url, second_url)
print('end ----------')
print(len(_1))
workbook = xlsxwriter.Workbook('./liyou.xlsx')
# 1.------------------ 创建一个 worksheet 存放具体分数-------------------------------
ws = workbook.add_worksheet('liyou')
#设置宽度
ws.set_column('A:A', 25)
ws.set_column('B:B', 25)
ws.set_column('C:C', 15)
ws.set_column('D:D', 15)
ws.set_column('E:E', 15)
# 写表头
ws.write(0, 0, '序号')
ws.write(0, 1, '区域')
ws.write(0, 2, '类型')
ws.write(0, 3, '设置地点')
ws.write(0, 4, '方向')
number = len(_1)
for i in range(number):
ws.write(i + 1, 0, str(_1[i]))
ws.write(i + 1, 1, str(_2[i]))
ws.write(i + 1, 2, str(_3[i]))
ws.write(i + 1, 3, str(_4[i]))
ws.write(i + 1, 4, str(_5[i]))
workbook.close()
|
mit
| -3,506,324,734,389,136,400
| 22.686916
| 115
| 0.533241
| false
| 2.440539
| false
| false
| false
|
Nik0las1984/mudpyl
|
mudpyl/net/nvt.py
|
1
|
5604
|
"""This module contains tools for emulating a network virtual terminal. See
RFC 854 for details of the NVT commands, and VT100 documentation for the
colour codes.
"""
from mudpyl.metaline import Metaline, RunLengthList
from mudpyl.colours import NORMAL_CODES, fg_code, bg_code, WHITE, BLACK
import re
ALL_RESET = '0'
BOLDON = '1'
BOLDOFF = '22'
FG_FLAG = '3'
BG_FLAG = '4'
GROUND_RESET = '8'
colour_pattern = re.compile( "\x1b" + #ESC
r"\[" #open square bracket
r"(\d+" #open group, initial digits
r"(?:;\d{1,2})*" #following digits
r")" #close the group
"m" #just an 'm'
)
toremove = set('\000' #NUL
'\007' #BEL
'\013' #VT
'\014') #FF
BS = '\010'
HT = '\011' #AKA '\t' and tab.
HT_replacement = ' ' #four spaces
def make_string_sane(string):
"""Process (in most cases, this means 'ignore') the NVT characters in the
input string.
"""
#simple characters don't need any special machinery.
for char in toremove:
string = string.replace(char, '')
#do it backspace by backspace because otherwise, if there were multiple
#backspaces in a row, it gets confused and backspaces over backspaces.
while BS in string:
#take off leading backspaces so that the following regex doesn't get
#confused.
string = string.lstrip(BS)
string = re.sub('.' + BS, '', string, 1)
#swap tabs for four whitespaces.
string = string.replace(HT, HT_replacement)
return string
class ColourCodeParser(object):
"""A stateful colour code parser."""
def __init__(self):
self.fore = WHITE
self.back = BLACK
self.bold = False
def _parseline(self, line):
"""Feed it lines of VT100-infested text, and it splits it all up.
This returns a threeple: a string, the foreground colours, and the
background colours. The string is simple enough. The background list
is a list of integers corresponding to WHITE, GREEN, etc. The
foreground list is made up of two-ples: the first is the integer
colour, and the second is whether bold is on or off.
The lists of fore and back changes isn't redundant -- there are no
changes that could be removed without losing colour information.
"""
#this is a performance hotspot, so minimise the number of attribute
#lookups and modifications
fore = self.fore
bold = self.bold
back = self.back
backs = [(0, back)]
fores = [(0, (fore, bold))]
text = ''
prev_end = 0
for match in colour_pattern.finditer(line):
text += line[prev_end:match.start()]
prev_end = match.end()
codes = match.group(1)
for code in codes.split(';'):
code = code.lstrip('0') #normalisation.
if not code:
#leading zeroes been stripped from ALL_RESET
if fore != WHITE or bold:
fore = WHITE
bold = False
fores.append((len(text), (fore, bold)))
if back != BLACK:
back = BLACK
backs.append((len(text), back))
elif code == BOLDON and not bold:
bold = True
fores.append((len(text), (fore, bold)))
elif code == BOLDOFF and bold:
bold = False
fores.append((len(text), (fore, bold)))
elif code.startswith(FG_FLAG):
code = code[1:]
if code == GROUND_RESET:
code = WHITE
if code in NORMAL_CODES and code != fore:
fore = code
fores.append((len(text), (fore, bold)))
elif code.startswith(BG_FLAG):
code = code[1:]
if code == GROUND_RESET:
code = BLACK
if code in NORMAL_CODES and code != back:
back = code
backs.append((len(text), back))
#We don't really care about chopped colour codes. This class is
#actually going to be tossed whole lines (ie, \r\n or similar
#terminated), and any escape code of the form "\x1b[\r\n30m" or
#similar is broken anyway. I'll probably be proved wrong somehow
#on this one...
if len(line) - 1 > prev_end:
text += line[prev_end:]
self.fore = fore
self.back = back
self.bold = bold
return (fores, backs, text)
def parseline(self, line):
"""Interpret the VT100 codes in line and returns a Metaline, replete
with RunLengthLists, that splits the text, foreground and background
into three separate channels.
"""
fores, backs, cleanline = self._parseline(line)
rlfores = RunLengthList(((length, fg_code(colour, bold))
for (length, (colour, bold)) in fores),
_normalised = True)
rlbacks = RunLengthList(((length, bg_code(colour))
for (length, colour) in backs),
_normalised = True)
return Metaline(cleanline, rlfores, rlbacks)
|
gpl-2.0
| -6,692,970,020,253,625,000
| 35.868421
| 77
| 0.526588
| false
| 4.26484
| false
| false
| false
|
RCAD/ringling-render-tools
|
src/rrt/maya/ui/submit.py
|
1
|
13088
|
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file '.\src\rrt\maya\ui\submit.ui'
#
# Created: Wed Oct 24 16:19:16 2012
# by: PyQt4 UI code generator 4.7.7
#
# WARNING! All changes made in this file will be lost!
from PyQt4 import QtCore, QtGui
try:
_fromUtf8 = QtCore.QString.fromUtf8
except AttributeError:
_fromUtf8 = lambda s: s
class Ui_SubmitMainWindow(object):
def setupUi(self, SubmitMainWindow):
SubmitMainWindow.setObjectName(_fromUtf8("SubmitMainWindow"))
SubmitMainWindow.setEnabled(True)
SubmitMainWindow.resize(445, 283)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Preferred, QtGui.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(SubmitMainWindow.sizePolicy().hasHeightForWidth())
SubmitMainWindow.setSizePolicy(sizePolicy)
SubmitMainWindow.setMinimumSize(QtCore.QSize(445, 283))
SubmitMainWindow.setWindowTitle(_fromUtf8("hpc-submit-maya"))
self.verticalLayout = QtGui.QVBoxLayout(SubmitMainWindow)
self.verticalLayout.setObjectName(_fromUtf8("verticalLayout"))
self.formLayout = QtGui.QFormLayout()
self.formLayout.setFieldGrowthPolicy(QtGui.QFormLayout.AllNonFixedFieldsGrow)
self.formLayout.setLabelAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter)
self.formLayout.setHorizontalSpacing(6)
self.formLayout.setVerticalSpacing(8)
self.formLayout.setObjectName(_fromUtf8("formLayout"))
self.head_node_label = QtGui.QLabel(SubmitMainWindow)
self.head_node_label.setObjectName(_fromUtf8("head_node_label"))
self.formLayout.setWidget(0, QtGui.QFormLayout.LabelRole, self.head_node_label)
self.head_node_field = QtGui.QComboBox(SubmitMainWindow)
self.head_node_field.setObjectName(_fromUtf8("head_node_field"))
self.formLayout.setWidget(0, QtGui.QFormLayout.FieldRole, self.head_node_field)
self.title_label = QtGui.QLabel(SubmitMainWindow)
self.title_label.setLayoutDirection(QtCore.Qt.LeftToRight)
self.title_label.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.title_label.setObjectName(_fromUtf8("title_label"))
self.formLayout.setWidget(1, QtGui.QFormLayout.LabelRole, self.title_label)
self.project_label = QtGui.QLabel(SubmitMainWindow)
self.project_label.setMinimumSize(QtCore.QSize(0, 0))
self.project_label.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.project_label.setObjectName(_fromUtf8("project_label"))
self.formLayout.setWidget(2, QtGui.QFormLayout.LabelRole, self.project_label)
self.horizontalLayout = QtGui.QHBoxLayout()
self.horizontalLayout.setSpacing(6)
self.horizontalLayout.setObjectName(_fromUtf8("horizontalLayout"))
self.project_field = QtGui.QLineEdit(SubmitMainWindow)
self.project_field.setEnabled(True)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.project_field.sizePolicy().hasHeightForWidth())
self.project_field.setSizePolicy(sizePolicy)
self.project_field.setMinimumSize(QtCore.QSize(161, 26))
self.project_field.setReadOnly(True)
self.project_field.setObjectName(_fromUtf8("project_field"))
self.horizontalLayout.addWidget(self.project_field)
self.browse_button = QtGui.QPushButton(SubmitMainWindow)
self.browse_button.setMinimumSize(QtCore.QSize(85, 27))
self.browse_button.setObjectName(_fromUtf8("browse_button"))
self.horizontalLayout.addWidget(self.browse_button)
self.formLayout.setLayout(2, QtGui.QFormLayout.FieldRole, self.horizontalLayout)
self.scene_label = QtGui.QLabel(SubmitMainWindow)
self.scene_label.setObjectName(_fromUtf8("scene_label"))
self.formLayout.setWidget(3, QtGui.QFormLayout.LabelRole, self.scene_label)
self.horizontalLayout1 = QtGui.QHBoxLayout()
self.horizontalLayout1.setSpacing(6)
self.horizontalLayout1.setObjectName(_fromUtf8("horizontalLayout1"))
self.scene_field = QtGui.QLineEdit(SubmitMainWindow)
self.scene_field.setEnabled(True)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.scene_field.sizePolicy().hasHeightForWidth())
self.scene_field.setSizePolicy(sizePolicy)
self.scene_field.setMinimumSize(QtCore.QSize(161, 26))
self.scene_field.setReadOnly(True)
self.scene_field.setObjectName(_fromUtf8("scene_field"))
self.horizontalLayout1.addWidget(self.scene_field)
self.scene_button = QtGui.QPushButton(SubmitMainWindow)
self.scene_button.setMinimumSize(QtCore.QSize(85, 27))
self.scene_button.setObjectName(_fromUtf8("scene_button"))
self.horizontalLayout1.addWidget(self.scene_button)
self.formLayout.setLayout(3, QtGui.QFormLayout.FieldRole, self.horizontalLayout1)
self.start_label = QtGui.QLabel(SubmitMainWindow)
self.start_label.setObjectName(_fromUtf8("start_label"))
self.formLayout.setWidget(4, QtGui.QFormLayout.LabelRole, self.start_label)
self.start_field = QtGui.QSpinBox(SubmitMainWindow)
self.start_field.setMinimum(1)
self.start_field.setMaximum(999999999)
self.start_field.setObjectName(_fromUtf8("start_field"))
self.formLayout.setWidget(4, QtGui.QFormLayout.FieldRole, self.start_field)
self.end_label = QtGui.QLabel(SubmitMainWindow)
self.end_label.setObjectName(_fromUtf8("end_label"))
self.formLayout.setWidget(5, QtGui.QFormLayout.LabelRole, self.end_label)
self.end_field = QtGui.QSpinBox(SubmitMainWindow)
self.end_field.setMinimum(1)
self.end_field.setMaximum(999999999)
self.end_field.setObjectName(_fromUtf8("end_field"))
self.formLayout.setWidget(5, QtGui.QFormLayout.FieldRole, self.end_field)
self.step_label = QtGui.QLabel(SubmitMainWindow)
self.step_label.setObjectName(_fromUtf8("step_label"))
self.formLayout.setWidget(6, QtGui.QFormLayout.LabelRole, self.step_label)
self.horizontalLayout_11 = QtGui.QHBoxLayout()
self.horizontalLayout_11.setObjectName(_fromUtf8("horizontalLayout_11"))
self.step_field = QtGui.QSpinBox(SubmitMainWindow)
self.step_field.setMinimum(1)
self.step_field.setMaximum(999999999)
self.step_field.setObjectName(_fromUtf8("step_field"))
self.horizontalLayout_11.addWidget(self.step_field)
spacerItem = QtGui.QSpacerItem(20, 40, QtGui.QSizePolicy.Minimum, QtGui.QSizePolicy.Expanding)
self.horizontalLayout_11.addItem(spacerItem)
spacerItem1 = QtGui.QSpacerItem(20, 40, QtGui.QSizePolicy.Minimum, QtGui.QSizePolicy.Expanding)
self.horizontalLayout_11.addItem(spacerItem1)
spacerItem2 = QtGui.QSpacerItem(50, 40, QtGui.QSizePolicy.Minimum, QtGui.QSizePolicy.Expanding)
self.horizontalLayout_11.addItem(spacerItem2)
self.render_label = QtGui.QLabel(SubmitMainWindow)
self.render_label.setObjectName(_fromUtf8("render_label"))
self.horizontalLayout_11.addWidget(self.render_label)
self.render_field = QtGui.QComboBox(SubmitMainWindow)
self.render_field.setObjectName(_fromUtf8("render_field"))
self.horizontalLayout_11.addWidget(self.render_field)
spacerItem3 = QtGui.QSpacerItem(10, 10, QtGui.QSizePolicy.Minimum, QtGui.QSizePolicy.Expanding)
self.horizontalLayout_11.addItem(spacerItem3)
self.formLayout.setLayout(6, QtGui.QFormLayout.FieldRole, self.horizontalLayout_11)
self.horizontalLayout_5 = QtGui.QHBoxLayout()
self.horizontalLayout_5.setObjectName(_fromUtf8("horizontalLayout_5"))
self.rrt_debug = QtGui.QCheckBox(SubmitMainWindow)
self.rrt_debug.setLayoutDirection(QtCore.Qt.LeftToRight)
self.rrt_debug.setObjectName(_fromUtf8("rrt_debug"))
self.horizontalLayout_5.addWidget(self.rrt_debug)
self.pause = QtGui.QCheckBox(SubmitMainWindow)
self.pause.setLayoutDirection(QtCore.Qt.LeftToRight)
self.pause.setObjectName(_fromUtf8("pause"))
self.horizontalLayout_5.addWidget(self.pause)
self.formLayout.setLayout(7, QtGui.QFormLayout.FieldRole, self.horizontalLayout_5)
self.horizontalLayout_4 = QtGui.QHBoxLayout()
self.horizontalLayout_4.setObjectName(_fromUtf8("horizontalLayout_4"))
self.title_field = QtGui.QLineEdit(SubmitMainWindow)
self.title_field.setObjectName(_fromUtf8("title_field"))
self.horizontalLayout_4.addWidget(self.title_field)
self.formLayout.setLayout(1, QtGui.QFormLayout.FieldRole, self.horizontalLayout_4)
self.verticalLayout.addLayout(self.formLayout)
self.line = QtGui.QFrame(SubmitMainWindow)
self.line.setFrameShape(QtGui.QFrame.HLine)
self.line.setFrameShadow(QtGui.QFrame.Sunken)
self.line.setObjectName(_fromUtf8("line"))
self.verticalLayout.addWidget(self.line)
self.horizontalLayout_2 = QtGui.QHBoxLayout()
self.horizontalLayout_2.setSpacing(6)
self.horizontalLayout_2.setObjectName(_fromUtf8("horizontalLayout_2"))
spacerItem4 = QtGui.QSpacerItem(40, 20, QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Minimum)
self.horizontalLayout_2.addItem(spacerItem4)
self.submit_button = QtGui.QPushButton(SubmitMainWindow)
self.submit_button.setObjectName(_fromUtf8("submit_button"))
self.horizontalLayout_2.addWidget(self.submit_button)
self.cancel_button = QtGui.QPushButton(SubmitMainWindow)
self.cancel_button.setObjectName(_fromUtf8("cancel_button"))
self.horizontalLayout_2.addWidget(self.cancel_button)
self.verticalLayout.addLayout(self.horizontalLayout_2)
self.retranslateUi(SubmitMainWindow)
QtCore.QObject.connect(self.browse_button, QtCore.SIGNAL(_fromUtf8("clicked()")), SubmitMainWindow.browse)
QtCore.QObject.connect(self.cancel_button, QtCore.SIGNAL(_fromUtf8("clicked()")), SubmitMainWindow.quit)
QtCore.QObject.connect(self.submit_button, QtCore.SIGNAL(_fromUtf8("clicked()")), SubmitMainWindow.submit_job)
QtCore.QObject.connect(self.scene_button, QtCore.SIGNAL(_fromUtf8("clicked()")), SubmitMainWindow.scene)
QtCore.QMetaObject.connectSlotsByName(SubmitMainWindow)
def retranslateUi(self, SubmitMainWindow):
self.head_node_label.setToolTip(QtGui.QApplication.translate("SubmitMainWindow", "which cluster to use", None, QtGui.QApplication.UnicodeUTF8))
self.head_node_label.setText(QtGui.QApplication.translate("SubmitMainWindow", "Head Node", None, QtGui.QApplication.UnicodeUTF8))
self.head_node_field.setToolTip(QtGui.QApplication.translate("SubmitMainWindow", "Which cluster to submit to", None, QtGui.QApplication.UnicodeUTF8))
self.title_label.setText(QtGui.QApplication.translate("SubmitMainWindow", "Job Title", None, QtGui.QApplication.UnicodeUTF8))
self.project_label.setText(QtGui.QApplication.translate("SubmitMainWindow", "Project Folder", None, QtGui.QApplication.UnicodeUTF8))
self.browse_button.setText(QtGui.QApplication.translate("SubmitMainWindow", "Set", None, QtGui.QApplication.UnicodeUTF8))
self.scene_label.setText(QtGui.QApplication.translate("SubmitMainWindow", "Maya Scene File", None, QtGui.QApplication.UnicodeUTF8))
self.scene_button.setText(QtGui.QApplication.translate("SubmitMainWindow", "Browse", None, QtGui.QApplication.UnicodeUTF8))
self.start_label.setText(QtGui.QApplication.translate("SubmitMainWindow", "Start Frame", None, QtGui.QApplication.UnicodeUTF8))
self.end_label.setText(QtGui.QApplication.translate("SubmitMainWindow", "End Frame", None, QtGui.QApplication.UnicodeUTF8))
self.step_label.setText(QtGui.QApplication.translate("SubmitMainWindow", "Frame Step", None, QtGui.QApplication.UnicodeUTF8))
self.render_label.setText(QtGui.QApplication.translate("SubmitMainWindow", "Renderer", None, QtGui.QApplication.UnicodeUTF8))
self.rrt_debug.setText(QtGui.QApplication.translate("SubmitMainWindow", "Show Debug Messages", None, QtGui.QApplication.UnicodeUTF8))
self.pause.setText(QtGui.QApplication.translate("SubmitMainWindow", "Pause before exit", None, QtGui.QApplication.UnicodeUTF8))
self.submit_button.setText(QtGui.QApplication.translate("SubmitMainWindow", "Submit Job", None, QtGui.QApplication.UnicodeUTF8))
self.cancel_button.setText(QtGui.QApplication.translate("SubmitMainWindow", "Cancel", None, QtGui.QApplication.UnicodeUTF8))
|
mit
| 7,755,343,654,843,894,000
| 66.117949
| 157
| 0.737164
| false
| 3.985384
| false
| false
| false
|
kunalarya/simple-sat-solver
|
satsolver/solver.py
|
1
|
9011
|
from __future__ import print_function
import argparse
import logging
from collections import namedtuple
import satsolver.parser as parser
from satsolver.util import Success, Failure
from satsolver.state import Instance
class Node(object):
def __init__(self, lit, asg, level):
assert lit > 0
self.lit = lit
self.asg = asg
self.level = level
def __repr__(self):
return '<x_{} = {} @ {}>'.format(
self.lit, self.asg, self.level)
class ImplicationGraph(object):
"""Implication Graph"""
def __init__(self):
self.nodes = set()
self.lits = set() # set of literals in nodes
# map lit -> nodes w/assignments
# dict[int, list[Node]]
self.nodes_by_lit = {}
self.fwd_edges = {}
# maps (x -> y) tuple edge to clause
self.edge_annot = {}
def add_node(self, node):
self.nodes.add(node)
self.lits.add(node.lit)
self.fwd_edges[node] = []
self.nodes_by_lit[node.lit] = node
def del_node(self, node):
self.lits.remove(node.lit)
self.nodes.remove(node)
del self.fwd_edges[node]
del self.nodes_by_lit[node.lit]
def add_edge(self, src, dst, reason):
self.fwd_edges[src].append(dst)
self.edge_annot[src, dst] = reason
Decision = namedtuple('Decision', ['level', 'lit', 'value'])
Implication = namedtuple('Implication', ['clause', 'lit', 'value'])
class Solver(object):
"""Main Solver"""
def __init__(self, instance, recipe=None):
self.instance = instance
# Pick variables in this order, if given.
self.recipe = recipe
self.recipe_index = 0
# def new_var(self):
# pass
# def add_clause(self, lits):
# pass
# def simplify_db(self):
# pass
def solve(self):
result = self.decide([], 1)
return result
def determine_next_var(self):
"""Choose the next variable to assign.
It will run the recipe if given, otherwise select a random unassigned
variable.
Returns:
tuple(variable, value)
"""
if self.recipe is not None:
if len(self.recipe) > 0:
next_var_and_value = self.recipe[0]
self.recipe = self.recipe[1:]
return next_var_and_value
# Otherwise, choose a variable randomly.
next_var = next(iter(self.instance.unasg_vars))
return next_var, 1
def bcp(self, decision_level, igraph):
"""Boolean Constrain Propagation
Returns:
Success | Failure
Success result:
{lit: Implication}
Failure means UNSAT
"""
any_unit = True
implications = {} # Keyed on int
while any_unit:
any_unit = False
for clause_index, clause in enumerate(self.instance.clauses):
r = self.instance.is_unit(clause)
if not r.success: return r
is_unit, implied = r.result
if is_unit:
lit = abs(implied)
if implied > 0:
r = self.instance.set_lit(lit, 1)
if not r.success: return r
implications[lit] = Implication(clause_index, lit, 1)
value = 1
else:
r = self.instance.set_lit(lit, 0)
if not r.success: return r
implications[lit] = Implication(clause_index, lit, 0)
value = 0
logging.debug('implied=%d -> %d', lit, value)
# Create a node in the ImplicationGraph if it doesn't yet exist.
if not lit in igraph.nodes_by_lit:
lit_node = Node(lit, value, decision_level)
igraph.add_node(lit_node)
# Create any edges
for implicating_lit in clause:
implicating_pair = self.instance.get_value(implicating_lit)
implicating_lit, implicating_value = implicating_pair
if implicating_lit != lit:
# create the implicating lit if needed
if implicating_lit not in igraph.lits:
inode = Node(implicating_lit, implicating_value,
decision_level)
igraph.add_node(inode)
else:
inode = igraph.nodes_by_lit[implicating_lit]
# create an edge for this node
lit_node = igraph.nodes_by_lit[lit]
igraph.add_edge(inode, lit_node, clause)
logging.debug('add edge %s->%s because of %s',
inode, lit_node, clause)
any_unit = True
return Success(implications)
def decide(self, decisions, level):
"""
Args:
decisions (list[Decision]):
level (int):
Returns:
Success | Failure
"""
# choose a variable to decide
print('.', end='')
logging.debug('______________________________')
logging.debug('[level: %d]', level)
# Choose a variable to set.
next_var, next_value = self.determine_next_var()
# Create a new copy of the decisions.
decisions = list(decisions)
decisions.append(Decision(level, next_var, next_value))
logging.debug('try_assignment(level=%d, %d->%d)', level, next_var,
next_value)
result = self.try_assignment(level, decisions, next_var, next_value)
if not result.success:
logging.debug('caused unsat: try_assignment(level=%d, %d->%d)',
level, next_var, next_value)
# try the other branch
inverted_value = 1 - next_value
# remove last decision
decisions = decisions[:-1]
# add new decision
decisions.append(Decision(level, next_var, inverted_value))
r = self.try_assignment(level, decisions, next_var, inverted_value)
# If we reached UNSAT here, then there's no solution here, so propagate
# this issue up.
if not r.success:
return r
else:
# If all variables have been assigned, store this as a solution.
if len(self.instance.unasg_vars) == 0:
if self.instance.verify():
self.instance.save_solution()
print('satisfied!')
else:
raise ValueError('All variables assigned, but UNSAT')
return Success()
def try_assignment(self, level, decisions, lit, value):
logging.debug('try_assignment: lit = %d -- setting to %d', lit, value)
# assign it True
r = self.instance.set_lit(lit, value)
if not r.success:
return r
igraph = ImplicationGraph()
# build the graph
for decision in decisions:
# create a node for each decision
node = Node(decision.lit, decision.value, decision.level)
igraph.add_node(node)
logging.debug('adding node %s', node)
logging.debug('running bcp...')
r = self.bcp(level, igraph)
if not r.success: # Meaning UNSAT:
logging.debug('decision led to UNSAT. unsetting')
self.instance.unset_lit(lit)
# If it's UNSAT, we need to backtrack
return Failure('Unsat!')
# Otherwise it was a Success
implications = r.result
if len(self.instance.unasg_vars) > 0:
# increase the decision level
r = self.decide(decisions, level+1)
self.instance.unset_lit(lit)
return r
# otherwise, return igraph
return Success(result=(igraph, None))
def solve(instance):
"""
Args:
instance (Instance): parsed SAT instance
Returns:
Success | Failure
"""
solver = Solver(instance)
result = solver.solve()
if not result.success:
print('Unsatisfiable')
return result
def main():
cmdline_parser = argparse.ArgumentParser()
cmdline_parser.add_argument('filename', action='store', type=str)
args = cmdline_parser.parse_args()
file_parser = parser.CNFFileParser(args.filename)
inst = Instance(var_count=file_parser.var_count, clauses=file_parser.clauses)
result = solve(inst)
if result.success:
# Print the solutions
print('Satisfying solutions:')
for solution in inst.solutions:
print(solution)
if __name__ == '__main__':
main()
|
apache-2.0
| -7,440,824,354,011,734,000
| 29.545763
| 84
| 0.52669
| false
| 4.246466
| false
| false
| false
|
luyijun/evennia_worldloader
|
worldloader/example_tutorial_world/worlddata/migrations/0001_initial.py
|
1
|
5070
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
]
operations = [
migrations.CreateModel(
name='personal_objects',
fields=[
('key', models.CharField(max_length=255, serialize=False, primary_key=True)),
('name', models.CharField(max_length=255)),
('alias', models.CharField(max_length=255, blank=True)),
('typeclass', models.CharField(max_length=255)),
('desc', models.TextField(blank=True)),
('location', models.CharField(max_length=255, blank=True)),
('home', models.CharField(max_length=255, blank=True)),
('lock', models.CharField(max_length=255, blank=True)),
('attributes', models.TextField(blank=True)),
('tutorial_info', models.TextField(blank=True)),
('destination', models.CharField(max_length=255, blank=True)),
],
options={
'verbose_name': 'Personal Object List',
'verbose_name_plural': 'Personal Object List',
},
bases=(models.Model,),
),
migrations.CreateModel(
name='world_details',
fields=[
('key', models.CharField(max_length=255, serialize=False, primary_key=True)),
('name', models.CharField(max_length=255)),
('desc', models.TextField(blank=True)),
('location', models.CharField(max_length=255, blank=True)),
],
options={
'verbose_name': 'World Detail List',
'verbose_name_plural': 'World Detail List',
},
bases=(models.Model,),
),
migrations.CreateModel(
name='world_exits',
fields=[
('key', models.CharField(max_length=255, serialize=False, primary_key=True)),
('name', models.CharField(max_length=255)),
('alias', models.CharField(max_length=255, blank=True)),
('typeclass', models.CharField(max_length=255)),
('desc', models.TextField(blank=True)),
('location', models.CharField(max_length=255, blank=True)),
('home', models.CharField(max_length=255, blank=True)),
('lock', models.CharField(max_length=255, blank=True)),
('attributes', models.TextField(blank=True)),
('tutorial_info', models.TextField(blank=True)),
('destination', models.CharField(max_length=255, blank=True)),
],
options={
'verbose_name': 'World Exit List',
'verbose_name_plural': 'World Exit List',
},
bases=(models.Model,),
),
migrations.CreateModel(
name='world_objects',
fields=[
('key', models.CharField(max_length=255, serialize=False, primary_key=True)),
('name', models.CharField(max_length=255)),
('alias', models.CharField(max_length=255, blank=True)),
('typeclass', models.CharField(max_length=255)),
('desc', models.TextField(blank=True)),
('location', models.CharField(max_length=255, blank=True)),
('home', models.CharField(max_length=255, blank=True)),
('lock', models.CharField(max_length=255, blank=True)),
('attributes', models.TextField(blank=True)),
('tutorial_info', models.TextField(blank=True)),
('destination', models.CharField(max_length=255, blank=True)),
],
options={
'verbose_name': 'World Object List',
'verbose_name_plural': 'World Object List',
},
bases=(models.Model,),
),
migrations.CreateModel(
name='world_rooms',
fields=[
('key', models.CharField(max_length=255, serialize=False, primary_key=True)),
('name', models.CharField(max_length=255)),
('alias', models.CharField(max_length=255, blank=True)),
('typeclass', models.CharField(max_length=255)),
('desc', models.TextField(blank=True)),
('location', models.CharField(max_length=255, blank=True)),
('home', models.CharField(max_length=255, blank=True)),
('lock', models.CharField(max_length=255, blank=True)),
('attributes', models.TextField(blank=True)),
('tutorial_info', models.TextField(blank=True)),
('destination', models.CharField(max_length=255, blank=True)),
],
options={
'verbose_name': 'World Room List',
'verbose_name_plural': 'World Room List',
},
bases=(models.Model,),
),
]
|
bsd-3-clause
| 3,046,071,758,788,312,000
| 44.675676
| 93
| 0.522288
| false
| 4.522748
| false
| false
| false
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.