code
stringlengths 2
1.05M
| repo_name
stringlengths 5
104
| path
stringlengths 4
251
| language
stringclasses 1
value | license
stringclasses 15
values | size
int32 2
1.05M
|
|---|---|---|---|---|---|
# -*- coding: utf-8 -*-
#
# openssh-ldap-pubkey documentation build configuration file, created by
# sphinx-quickstart on Tue Oct 13 12:47:40 2015.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys
import os
import shlex
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = []
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'openssh-ldap-pubkey'
copyright = u'2015-2020, Kouhei Maeda'
author = u'Kouhei Maeda'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '0.3'
# The full version, including alpha/beta/rc tags.
release = '0.3.0'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = []
# The reST default role (used for this markup: `text`) to use for all
# documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
#keep_warnings = False
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'sphinx_rtd_theme'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#html_extra_path = []
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Language to be used for generating the HTML full-text search index.
# Sphinx supports the following languages:
# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja'
# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr'
#html_search_language = 'en'
# A dictionary with options for the search language support, empty by default.
# Now only 'ja' uses this config value
#html_search_options = {'type': 'default'}
# The name of a javascript file (relative to the configuration directory) that
# implements a search results scorer. If empty, the default will be used.
#html_search_scorer = 'scorer.js'
# Output file base name for HTML help builder.
htmlhelp_basename = 'openssh-ldap-pubkeydoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
# Latex figure (float) alignment
#'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'openssh-ldap-pubkey.tex', u'openssh-ldap-pubkey Documentation',
u'Kouhei Maeda', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'sshauthzkeyscmd-ldap', u'openssh-ldap-pubkey Documentation',
[author], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'openssh-ldap-pubkey', u'openssh-ldap-pubkey Documentation',
author, 'openssh-ldap-pubkey', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#texinfo_no_detailmenu = False
|
mkouhei/openssh-ldap-pubkey
|
docs/source/conf.py
|
Python
|
gpl-3.0
| 9,288
|
# created based on C. Warrick's page here:
# https://chriswarrick.com/blog/2014/09/15/python-apps-the-right-way-entry_points-and-scrip ts/
import sys
import os
MODEL_NAME = 0
MODEL_RUN = 1
def entry_point(args, models):
print("In this module, the models available to run are: ")
for num, model in models.items():
print(str(num) + ". " + model[MODEL_NAME])
choice = int(input("Enter model number to run it: "))
models[choice][MODEL_RUN]()
|
gcallah/Indra
|
indraV1/indra/entry_point.py
|
Python
|
gpl-3.0
| 469
|
# -*- coding: utf-8 -*-
# -----------------------------------------------------------------------------
# Getting Things GNOME! - a personal organizer for the GNOME desktop
# Copyright (c) 2008-2012 - Lionel Dricot & Bertrand Rousseau
#
# This program is free software: you can redistribute it and/or modify it under
# the terms of the GNU General Public License as published by the Free Software
# Foundation, either version 3 of the License, or (at your option) any later
# version.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program. If not, see <http://www.gnu.org/licenses/>.
# -----------------------------------------------------------------------------
""" A dialog for batch adding/removal of tags """
import gtk
from GTG import _
from GTG.gtk.browser import GnomeConfig
from GTG.tools.tags import parse_tag_list
class ModifyTagsDialog:
""" Dialog for batch adding/removal of tags """
def __init__(self, tag_completion, req):
self.req = req
self.tasks = []
self._init_dialog()
self.tag_entry.set_completion(tag_completion)
# Rember values from last time
self.last_tag_entry = _("NewTag")
self.last_apply_to_subtasks = False
def _init_dialog(self):
""" Init .glade file """
builder = gtk.Builder()
builder.add_from_file(GnomeConfig.MODIFYTAGS_GLADE_FILE)
builder.connect_signals({
"on_modifytags_confirm":
self.on_confirm,
"on_modifytags_cancel":
lambda dialog: dialog.hide,
})
self.tag_entry = builder.get_object("tag_entry")
self.apply_to_subtasks = builder.get_object("apply_to_subtasks")
self.dialog = builder.get_object("modifytags_dialog")
def modify_tags(self, tasks):
""" Show and run dialog for selected tasks """
if len(tasks) == 0:
return
self.tasks = tasks
self.tag_entry.set_text(self.last_tag_entry)
self.tag_entry.grab_focus()
self.apply_to_subtasks.set_active(self.last_apply_to_subtasks)
self.dialog.run()
self.dialog.hide()
self.tasks = []
def on_confirm(self, widget):
""" Apply changes """
tags = parse_tag_list(self.tag_entry.get_text())
# If the checkbox is checked, find all subtasks
if self.apply_to_subtasks.get_active():
for task_id in self.tasks:
task = self.req.get_task(task_id)
# FIXME: Python not reinitialize the default value of its
# parameter therefore it must be done manually. This function
# should be refractored # as far it is marked as depricated
for subtask in task.get_self_and_all_subtasks(tasks=[]):
subtask_id = subtask.get_id()
if subtask_id not in self.tasks:
self.tasks.append(subtask_id)
for task_id in self.tasks:
task = self.req.get_task(task_id)
for tag, is_positive in tags:
if is_positive:
task.add_tag(tag)
else:
task.remove_tag(tag)
task.sync()
# Rember the last actions
self.last_tag_entry = self.tag_entry.get_text()
self.last_apply_to_subtasks = self.apply_to_subtasks.get_active()
|
parinporecha/backend_gtgonline
|
GTG/gtk/browser/modifytags_dialog.py
|
Python
|
gpl-3.0
| 3,656
|
import os
import argparse
import datetime
import yaml
import api.src.common.initial_environment_config
from ..models.inception import create_model
from ..data_processing.data_generator import DataGenerator
from ..common.config import TrainingConfig, DataConfig, Config
from ..common.utils import print_info, ensure_dir
from keras.callbacks import ModelCheckpoint, CSVLogger, TensorBoard, LearningRateScheduler, EarlyStopping
RUNNING_TIME = datetime.datetime.now().strftime("%H_%M_%d_%m_%y")
def train(num_epochs, batch_size, input_size, num_workers):
if not Config.NO_SAVE:
ensure_dir(os.path.join(TrainingConfig.PATHS['MODELS'], RUNNING_TIME))
model = create_model()
model.summary()
callbacks = [
ModelCheckpoint(os.path.join(TrainingConfig.PATHS['MODELS'], RUNNING_TIME, 'weights.h5'), save_best_only=True, monitor=TrainingConfig.callbacks_monitor),
CSVLogger(os.path.join(TrainingConfig.PATHS['MODELS'], RUNNING_TIME, 'history.csv')),
LearningRateScheduler(TrainingConfig.schedule),
EarlyStopping(patience=12)
] if not Config.NO_SAVE else []
if not Config.NO_SAVE:
introduced_change = input("What new was introduced?: ")
with open(os.path.join(TrainingConfig.PATHS['MODELS'], RUNNING_TIME, 'change.txt'), 'w') as f:
f.write(introduced_change)
with open(os.path.join(TrainingConfig.PATHS['MODELS'], RUNNING_TIME, 'config.yml'), 'w') as f:
yaml.dump(list([TrainingConfig.get_config(), Config.get_config(), DataConfig.get_config()]), f, default_flow_style=False)
optimizer = TrainingConfig.optimizer
data_generator_train = DataGenerator(DataConfig.PATHS['TRAINING_PROCESSED_DATA'], batch_size, input_size, False)
data_generator_valid = DataGenerator(DataConfig.PATHS['VALID_PROCESSED_DATA'], batch_size, input_size, True)
model.compile(TrainingConfig.available_optimizers[optimizer], TrainingConfig.loss, metrics=TrainingConfig.metrics)
model.fit_generator(data_generator_train, samples_per_epoch=data_generator_train.samples_per_epoch, nb_epoch=num_epochs,
validation_data=data_generator_valid, nb_val_samples=data_generator_valid.samples_per_epoch,
callbacks=callbacks)
def main(args):
print_info("Training")
train(args.num_epochs, args.batch_size, args.input_size, args.num_workers)
print_info("Finished")
if __name__ == '__main__':
argparser = argparse.ArgumentParser(description='Script performing training')
argparser.add_argument('--num_epochs', default=TrainingConfig.NB_EPOCHS, type=int, help='Number of training epochs')
argparser.add_argument('--num_workers', type=int, default=TrainingConfig.NUM_WORKERS, help='Number of workers during training')
argparser.add_argument('--batch_size', type=int, default=TrainingConfig.BATCH_SIZE, help='Batch size')
argparser.add_argument('--input_size', type=int, default=Config.IMAGE_SIZE, help='Image size to input')
arguments = argparser.parse_args()
main(arguments)
|
kacper1095/asl-translator
|
api/src/scripts/train_inception.py
|
Python
|
gpl-3.0
| 3,051
|
from gh.base import Command
class UserFollowCommand(Command):
name = 'user.follow'
usage = '%prog [options] user.follow <login>'
summary = 'Follow a user'
subcommands = {}
def run(self, options, args):
opts, args = self.parser.parse_args(args)
status = self.SUCCESS
self.login()
if opts.help:
self.help()
if not args:
for u in self.gh.iter_following():
print(u.login)
else:
if not self.gh.follow(args[0]):
status = self.FAILURE
return status
UserFollowCommand()
|
sigmavirus24/github-cli
|
gh/commands/user/follow.py
|
Python
|
gpl-3.0
| 615
|
# -*- coding: utf-8 -*-
# -*- Channel Xdede -*-
# -*- Created for Alfa-addon -*-
# -*- By the Alfa Develop Group -*-
from builtins import range
import sys
PY3 = False
if sys.version_info[0] >= 3: PY3 = True; unicode = str; unichr = chr; long = int
import re
from channelselector import get_thumb
from core import httptools
from core import jsontools
from core import scrapertools
from core import servertools
from core import tmdb
from core.item import Item
from channels import filtertools
from channels import autoplay
from platformcode import config, logger
#unify = config.get_setting('unify')
IDIOMAS = {'1':'CAST', '2':'LAT', '3':'VOSE', '4':'VO'}
list_language = list(IDIOMAS.values())
list_quality = ['Oficial', '1080p', '720p', '480p', '360p']
list_servers = ['fembed', 'vidcloud','clipwatching', 'gamovideo']
host = 'https://movidy.co/'
host2 = 'https://wmovies.co/'
def mainlist(item):
logger.info()
autoplay.init(item.channel, list_servers, list_quality)
itemlist = []
itemlist.append(Item(channel=item.channel, title='Peliculas', action='sub_menu', type='peliculas',
thumbnail= get_thumb('movies', auto=True)))
itemlist.append(Item(channel=item.channel, title='Series', action='sub_menu', type='series',
thumbnail= get_thumb('tvshows', auto=True)))
itemlist.append(Item(channel=item.channel, title='Animes', action='sub_menu', type='animes',
thumbnail= get_thumb('anime', auto=True)))
itemlist.append(Item(channel=item.channel, title='Colecciones', action='list_collections',
url= host+'listas', thumbnail=get_thumb('colections', auto=True)))
itemlist.append(Item(channel=item.channel, title='Buscar...', action="search",
url=host + 'search?go=', thumbnail=get_thumb("search", auto=True)))
itemlist.append(Item(channel=item.channel,
title="Configurar Canal...",
text_color="turquoise",
action="settingCanal",
thumbnail=get_thumb('setting_0.png'),
url='',
fanart=get_thumb('setting_0.png')
))
autoplay.show_option(item.channel, itemlist)
return itemlist
def sub_menu(item):
logger.info()
itemlist=[]
url_estreno = host + item.type + '/novedades'
if item.type == 'peliculas':
url_estreno = host + item.type + '/estrenos'
itemlist.append(Item(channel=item.channel, title='Estrenos', url=url_estreno, action='list_all',
thumbnail=get_thumb('estrenos', auto=True), type=item.type))
else:
itemlist.append(Item(channel=item.channel, title='Nuevos Capitulos', url=url_estreno, action='list_all',
thumbnail=get_thumb('new episodes', auto=True), type=item.type))
itemlist.append(Item(channel=item.channel, title='Novedades', url=host+item.type, action='list_all',
thumbnail=get_thumb('newest', auto=True), type=item.type))
itemlist.append(Item(channel=item.channel, title='Actualizadas', url=host+'/actualizado/'+item.type,
action='list_all', thumbnail=get_thumb('updated', auto=True), type=item.type))
itemlist.append(Item(channel=item.channel, title='Mejor Valoradas', url=host+item.type+'/mejor-valoradas',
action='list_all', thumbnail=get_thumb('more voted', auto=True), type=item.type))
itemlist.append(Item(channel=item.channel, title='Genero', action='section',
thumbnail=get_thumb('genres', auto=True), type=item.type))
itemlist.append(Item(channel=item.channel, title='Por Año', action='section',
thumbnail=get_thumb('year', auto=True), type=item.type))
return itemlist
def settingCanal(item):
from platformcode import platformtools
platformtools.show_channel_settings()
platformtools.itemlist_refresh()
return
def get_source(url, referer=None):
logger.info()
if referer is None:
data = httptools.downloadpage(url).data
else:
data = httptools.downloadpage(url, headers={'Referer':referer}).data
data = re.sub(r'\n|\r|\t| |<br>|\s{2,}', "", data)
return data
def section(item):
logger.info()
itemlist=[]
if 'Genero' in item.title:
list_genre = {"Acción","Animación","Bélica","Ciencia ficción",
"Comedia","Crimen","Drama","Familia","Misterio",
"Música","Romance","Suspense","Terror","Wester"}
for name in list_genre:
url = '%s%s/filtro/%s,/,' % (host, item.type, name)
itemlist.append(Item(channel=item.channel, url=url, title=name,
action='list_all', type=item.type))
else:
try:
import datetime
now = datetime.datetime.now()
c_year = now.year + 1
except:
c_year = 2020
l_year = c_year - 19
year_list = list(range(l_year, c_year))
for year in year_list:
year = str(year)
url = '%s%s/filtro/,/%s,' % (host, item.type, year)
itemlist.append(Item(channel=item.channel, title=year, url=url,
action="list_all", type=item.type))
itemlist.reverse()
itemlist.append(Item(channel=item.channel, title='Introduzca otro año...', url='',
action="year_cus", type=item.type))
return itemlist
def year_cus(item):
from platformcode import platformtools
heading = 'Introduzca Año (4 digitos)'
year = platformtools.dialog_numeric(0, heading, default="")
item.url = '%s%s/filtro/,/%s,' % (host, item.type, year)
item.action = "list_all"
if year and len(year) == 4:
return list_all(item)
def list_all(item):
logger.info()
itemlist = []
data = get_source(item.url)
patron = '<article class="Cards.*?href="([^"]+)"(.*?)<img.*?'
patron += 'data-echo="([^"]+)" alt="([^"]+)"'
matches = re.compile(patron, re.DOTALL).findall(data)
for scrapedurl, scrapedinfo, scrapedthumbnail, scrapedtitle in matches:
title = scrapedtitle
scrapedtitle = re.sub(r' \((.*?)\)$', '', scrapedtitle)
thumbnail = scrapedthumbnail.strip()
url = scrapedurl
tmdb_id = scrapertools.find_single_match(url, r'/\w(\d+)-')
thumbnail = re.sub(r'p/w\d+', 'p/original', thumbnail)
# if item.type == 'search':
# s_title = scrapertools.find_single_match(url, host+'(\w+)')
# if not unify:
# title += ' [COLOR grey][I](%s)[/I][/COLOR]' % s_title.capitalize()[:-1]
new_item = Item(channel=item.channel,
title=title,
url=url,
thumbnail=thumbnail,
infoLabels={'tmdb_id':tmdb_id})
if item.type == 'peliculas' or 'peliculas' in url:
new_item.action = 'findvideos'
new_item.contentTitle = scrapedtitle
new_item.type = 1
calidad_baja = scrapertools.find_single_match(scrapedinfo, r'>(\w+\s\w+)</div>$')
if calidad_baja:
new_item.quality = 'Baja'
else:
new_item.action = 'seasons'
new_item.contentSerieName = scrapedtitle
new_item.type = 0
sesxep = scrapertools.find_single_match(url, r'/(\d+x\d+)$')
if sesxep:
new_item.title += ' '+sesxep
new_item.action = 'findvideos'
itemlist.append(new_item)
tmdb.set_infoLabels(itemlist, seekTmdb=True)
if item.type == 'search':
itemlist.sort (key=lambda i: (i.type, i.title))
# Paginación
url_next_page = scrapertools.find_single_match(data,'<a href="([^"]+)" up-target="body">Pagina s')
if url_next_page:
itemlist.append(item.clone(title="Siguiente >>", url=url_next_page, action='list_all', text_color='gold'))
return itemlist
def list_collections(item):
logger.info()
itemlist = []
data = get_source(item.url)
patron = '<article>(.*?)href="([^"]+)".*?<h2>([^<]+)</h2><p>([^<]+)</p>'
matches = re.compile(patron, re.DOTALL).findall(data)
for thumb, url, title, plot in matches:
thumbnail = scrapertools.find_single_match(thumb, '<img src="([^"]+)">')
if thumbnail:
thumb = re.sub('p/w\d+', 'p/original', thumbnail)
else:
thumb = 'https://i.imgur.com/P4g4aW2.png'
itemlist.append(Item(channel=item.channel, action='list_all', title=title, url=url, thumbnail=thumb, plot=plot))
url_next_page = scrapertools.find_single_match(data, '<link rel="next" href="([^"]+)"')
if url_next_page:
itemlist.append(item.clone(title="Siguiente >>", url=url_next_page, action='list_collections'))
return itemlist
def seasons(item):
logger.info()
itemlist=[]
data = get_source(item.url)
patron = "activeSeason\(this,'temporada-(\d+)'"
matches = re.compile(patron, re.DOTALL).findall(data)
infoLabels = item.infoLabels
for season in matches:
infoLabels['season']=season
title = 'Temporada %s' % season
itemlist.append(Item(channel=item.channel, title=title, url=item.url, action='episodesxseasons',
infoLabels=infoLabels))
tmdb.set_infoLabels_itemlist(itemlist, seekTmdb=True)
if config.get_videolibrary_support() and len(itemlist) > 0:
itemlist.append(
Item(channel=item.channel, title='[COLOR yellow]Añadir esta serie a la videoteca[/COLOR]', url=item.url,
action="add_serie_to_library", extra="episodios", contentSerieName=item.contentSerieName))
return itemlist
def episodios(item):
logger.info()
itemlist = []
templist = seasons(item)
for tempitem in templist:
itemlist += episodesxseasons(tempitem)
return itemlist
def episodesxseasons(item):
logger.info()
itemlist = []
infoLabels = item.infoLabels
data=get_source(item.url)
pat = '<div class="season temporada-%s(.*?)</a></li></div>' % item.infoLabels['season']
data = scrapertools.find_single_match(data, pat)
patron= '<li ><a href="([^"]+)"(.*?)'
patron += r'<h2>([^>]+)</h2>.*?<span>\d+ - (\d+)</span>'
matches = re.compile(patron, re.DOTALL).findall(data)
for url, sthumbnail, scrapedtitle, ep in matches:
thumbnail = scrapertools.find_single_match(sthumbnail, 'data-echo="([^"]+)"')
thumb = re.sub(r'p/w\d+', 'p/original', thumbnail)
infoLabels['episode'] = ep
title = '%sx%s - %s' % (infoLabels['season'], infoLabels['episode'], scrapedtitle)
itemlist.append(Item(channel=item.channel, title=title, url=url, action='findvideos',
infoLabels=infoLabels, thumbnail=thumb))
tmdb.set_infoLabels_itemlist(itemlist, seekTmdb=True)
return itemlist
def findvideos(item):
logger.info()
itemlist = []
itemlist2 = []
headers = {'Referer': item.url}
users_links = config.get_setting('show_users_links', 'movidy')
server_l = {'waaw': 'netu', 'powvldeo': 'powvideo', 'beta': 'directo'
}
data = get_source(item.url)
ref_ = scrapertools.find_single_match(data, r'<iframe src="(https://wmovies.co/ifr/\w\d+)"')
s_id = scrapertools.find_single_match(ref_, 'ifr/(\w\d+)')
if s_id:
import requests
url = host2+"cpt"
header = {'Referer': ref_}
session = requests.Session()
page = session.post(url, data={'type': '1', 'id': s_id}, headers=header).json()
if page.get('status', 0) == 200:
data2 = page['data']
data2 = re.sub(r'\n|\r|\t| |<br>|\s{2,}', "", data2)
patron = r'<div class="OD_(\d+)(.*?)</div>'
patron1 = r"onclick=\"go_to_player\('([^']+)'.*?<span>([^<]+)"
matches = re.compile(patron, re.DOTALL).findall(data2)
for language, info in matches:
lang = IDIOMAS.get(language, 'VO')
matches1 = re.compile(patron1, re.DOTALL).findall(info)
for url, serv in matches1:
if 'google' in serv.lower():
continue
url = host2+url
quality = 'Oficial'
title = '%s [%s]' % (serv.capitalize(), lang)
serv = server_l.get(serv.lower(), serv.lower())
p_quality = -2000
itemlist.append(Item(channel=item.channel, title=title, url=url, action='play', language=lang,
quality=quality, headers=header, infoLabels=item.infoLabels,
p_lang=language, p_quality=p_quality, server=serv))
if users_links == 0:
data = scrapertools.find_single_match(data, '<div class="linksUsers">(.*?)</html>')
patron = '<li><a href="([^"]+)".*?<img.*?>([^<]+)<b>([^<]+)<.*?src="([^"]+)"'
matches = re.compile(patron, re.DOTALL).findall(data)
for url, server, quality, language in matches:
if '/sc_' in url:
continue
if url != '':
try:
server = server.split(".")[0].replace('1', 'l')
except:
continue
# _id = scrapertools.find_single_match(url, r'link/\w+(.*)')
server = server_l.get(server.lower(), server)
# if not url.startswith(host):
# url = url % _id
language = scrapertools.find_single_match(language, r'/(\d+)\.png')
lang = IDIOMAS.get(language, 'VO')
title = '%s [%s] [%s]' % (server.capitalize(), lang, quality)
try:
p_quality = -int(quality.replace('p', ''))
except:
p_quality = 0
itemlist2.append(Item(channel=item.channel, title=title, url=url, action='play', language=lang,
quality=quality, server=server.lower(), headers=headers, infoLabels=item.infoLabels,
p_lang=language, p_quality=p_quality, _type="user"))
itemlist.extend(itemlist2)
itemlist.sort(key=lambda i: (i.p_lang, i.p_quality, i.server))
# if not itemlist:
# itemlist.append(Item(channel=item.channel, folder=False, text_color='tomato',
# title='[I] Aún no hay enlaces disponibles [/I]'))
# return itemlist
itemlist = filtertools.get_links(itemlist, item, list_language)
# Requerido para AutoPlay
autoplay.start(itemlist, item)
return itemlist
def play(item):
logger.info()
itemlist = []
if not item.url.startswith(host2):
return item
url = item.url
header = item.headers
item.server = ''
if item._type == 'user':
req = httptools.downloadpage(url, headers=header).data
url = scrapertools.find_single_match(req, 'Go_V2" href="([^"]+)"')
else:
if 'procesador-servidores' in url:
req = httptools.downloadpage(url, headers=header).json
data = req.get("data")
if req.get('total'):
data = req.get("data").get("data")
if 'beta' in item.title.lower():
item.server = 'directo'
url = "%sget-player/%s" % (host2, data)
req = httptools.downloadpage(url, headers=header, follow_redirects=False)
location = req.headers.get('location', None)
if location:
url = location
else:
new_data = req.data.replace("'", '"')
url = scrapertools.find_single_match(new_data, 'file": "([^"]+)"')
if url:
item.url = url.replace("isthebest.rest", ".com")
itemlist.append(item.clone())
itemlist = servertools.get_servers_itemlist(itemlist)
return itemlist
def search(item, texto):
logger.info()
texto = texto.replace(" ", "+")
item.url = item.url + texto
item.type = 'search'
if texto != '':
try:
return list_all(item)
except:
import sys
for line in sys.exc_info():
logger.error("{0}".format(line))
return []
else:
return []
def newest(categoria):
logger.info()
itemlist = []
item = Item()
try:
if categoria in ['peliculas']:
item.url = host + 'peliculas'
elif categoria == 'infantiles':
item.url = host + 'peliculas/filtro/Animación,/,'
elif categoria == 'terror':
item.url = host + 'peliculas/filtro/Terror,/,'
item.type='peliculas'
itemlist = list_all(item)
if itemlist[-1].title == 'Siguiente >>':
itemlist.pop()
except:
import sys
for line in sys.exc_info():
logger.error("{0}".format(line))
return []
return itemlist
|
alfa-addon/addon
|
plugin.video.alfa/channels/movidy.py
|
Python
|
gpl-3.0
| 17,665
|
# -*- coding: utf-8 -*-
from django.db import models
from tasks.models import Link
from tasks.constants import LINK_STATUSES
class CrawlerRecord(models.Model):
""" Crawler visit record for Link """
link = models.ForeignKey(Link)
visited_at = models.DateTimeField()
http_status = models.PositiveSmallIntegerField()
status = models.CharField(max_length=32, choices=LINK_STATUSES)
def __unicode__(self):
return u'HTTP %d: %s' % (self.status, self.get_status_display())
|
alexanderad/Super-Cow-Task-Force
|
crawler/models.py
|
Python
|
gpl-3.0
| 499
|
#!/usr/bin/python3
from abc import ABCMeta, abstractmethod
class Generator:
#Abstract class to be parent to all uml parsers
__metaclass__ = ABCMeta
@abstractmethod
def generate(self): pass
|
Rihorama/dia2code
|
src/dia2code/parents/generator.py
|
Python
|
gpl-3.0
| 227
|
# NOTE: DO NOT UPDATE THIS FILE - Create a new views_common.py and place any views there if you need it. then reference
# your new one in the urls.py file (which should be edited by you)
import os
# import imp
import logging
import mimetypes
import importlib.util
from django.conf import settings
from django.template import Context, Template, Origin
from django.http import HttpResponse, FileResponse
from django.views.decorators.csrf import csrf_protect
from django.shortcuts import redirect
log = logging.getLogger("docroot.views")
# This view is called from DocrootFallbackMiddleware.process_response
# when a 404 is raised and we are not working with a template (we want to look for a static file in the docroot)
def static(request):
base=getattr(settings, "BASE_DIR", "")
log.debug("base: " + base)
path = request.path_info
if path.startswith("/"):
path = path[1:]
log.debug("path: " + path)
file = os.path.join(base, "docroot/files", path)
log.debug("file: " + file)
if os.path.isfile(file):
log.debug("found static file: " + file)
log.debug("downloading...")
response = FileResponse(open(file, 'rb'), content_type=mimetypes.guess_type(path)[0])
return response
else:
return None
# This view is called from DocrootFallbackMiddleware.process_response
# when a 404 is raised, which often means CsrfViewMiddleware.process_view
# has not been called even if CsrfViewMiddleware is installed. So we need
# to use @csrf_protect, in case the template needs {% csrf_token %}.
# However, we can't just wrap this view; if no matching page exists,
# or a redirect is required for authentication, the 404 needs to be returned
# without any CSRF checks. Therefore, we only
# CSRF protect the internal implementation (render page).
def page(request):
"""
We are going to try to look for a template a couple of ways based on the request; if nothing is found we return None
and things proceed like never called. If found we return the response it should return instead
"""
# for perfonmance reasons and because we want to have our webserver
base=getattr(settings, "BASE_DIR", "")
log.debug("base: " + base)
path = request.path_info
if path.startswith("/"):
path = path[1:]
file=os.path.join(base, "docroot/files", path)
log.debug("file: " + file)
url = file
module_name = path
template_name = path
template = None
# if the url ends in .html then try to load a corresponding template from the docroot/files directory
if url.endswith(".html"):
# our url will request .html but we want to look for a .dt file (required for template processing)
url = url[:-4]
url += "dt"
template_name = template_name[:-4]
template_name += "dt"
if os.path.isfile(url):
log.debug("found file: " + url)
else:
url = None
elif url.endswith('/'):
url += "index.dt"
if os.path.isfile(url):
log.debug("found file: " + url)
module_name += "index.html"
template_name += "index.dt"
else:
url = None
else:
url += ".dt"
if os.path.isfile(url):
log.debug("found file: " + url)
module_name += ".html"
template_name += ".dt"
else:
url = None
if url:
log.debug("opening file: " + url)
fp = open(url)
log.debug("loading template...")
template = Template(fp.read(), Origin(url), template_name)
log.debug("closing file")
fp.close()
if template:
log.debug("attempting to load context and render the template...")
return render_page(request, template, module_name)
else:
return None
@csrf_protect
def render_page(request, template, module_name):
"""
Internal interface to the dev page view.
"""
context = {}
log.debug("template name: " + template.name)
log.debug("module_name: " + module_name)
datafile_name = template.origin.name
# strip off the html and try data.py
if datafile_name.endswith('dt'):
datafile_name = datafile_name[0:len(datafile_name) - 2]
datafile_name += 'data.py'
log.debug("datafilename: " + datafile_name)
# else:
# datafile_name += '.data.py'
# try to load a data file if it is there in order to get the context
# all data files should support get_context() or a context property
try:
log.debug("attempting to load data_file...")
spec = importlib.util.spec_from_file_location(module_name, datafile_name)
data = importlib.util.module_from_spec(spec)
spec.loader.exec_module(data)
# datafile = imp.load_source(module_name, datafile_name)
# note changing datafile below to data
except Exception:
data = None
if data:
try:
initmethod = getattr(data, 'get_context')
except AttributeError:
initmethod = None
if initmethod:
context = initmethod(request)
else:
try:
context = getattr(data, 'context')
except AttributeError:
context = {}
log.debug("context string: " + str(context))
response = HttpResponse(template.render(Context(context)))
return response
|
sstacha/uweb-vagrant
|
files/docroot/docroot_views.py
|
Python
|
gpl-3.0
| 5,398
|
from __future__ import absolute_import
import re
from collections import namedtuple
from ..exceptions import LocationParseError
from ..packages import six
url_attrs = ["scheme", "auth", "host", "port", "path", "query", "fragment"]
# We only want to normalize urls with an HTTP(S) scheme.
# urllib3 infers URLs without a scheme (None) to be http.
NORMALIZABLE_SCHEMES = ("http", "https", None)
# Almost all of these patterns were derived from the
# 'rfc3986' module: https://github.com/python-hyper/rfc3986
PERCENT_RE = re.compile(r"%[a-fA-F0-9]{2}")
SCHEME_RE = re.compile(r"^(?:[a-zA-Z][a-zA-Z0-9+-]*:|/)")
URI_RE = re.compile(
r"^(?:([a-zA-Z][a-zA-Z0-9+.-]*):)?"
r"(?://([^/?#]*))?"
r"([^?#]*)"
r"(?:\?([^#]*))?"
r"(?:#(.*))?$",
re.UNICODE | re.DOTALL,
)
IPV4_PAT = r"(?:[0-9]{1,3}\.){3}[0-9]{1,3}"
HEX_PAT = "[0-9A-Fa-f]{1,4}"
LS32_PAT = "(?:{hex}:{hex}|{ipv4})".format(hex=HEX_PAT, ipv4=IPV4_PAT)
_subs = {"hex": HEX_PAT, "ls32": LS32_PAT}
_variations = [
# 6( h16 ":" ) ls32
"(?:%(hex)s:){6}%(ls32)s",
# "::" 5( h16 ":" ) ls32
"::(?:%(hex)s:){5}%(ls32)s",
# [ h16 ] "::" 4( h16 ":" ) ls32
"(?:%(hex)s)?::(?:%(hex)s:){4}%(ls32)s",
# [ *1( h16 ":" ) h16 ] "::" 3( h16 ":" ) ls32
"(?:(?:%(hex)s:)?%(hex)s)?::(?:%(hex)s:){3}%(ls32)s",
# [ *2( h16 ":" ) h16 ] "::" 2( h16 ":" ) ls32
"(?:(?:%(hex)s:){0,2}%(hex)s)?::(?:%(hex)s:){2}%(ls32)s",
# [ *3( h16 ":" ) h16 ] "::" h16 ":" ls32
"(?:(?:%(hex)s:){0,3}%(hex)s)?::%(hex)s:%(ls32)s",
# [ *4( h16 ":" ) h16 ] "::" ls32
"(?:(?:%(hex)s:){0,4}%(hex)s)?::%(ls32)s",
# [ *5( h16 ":" ) h16 ] "::" h16
"(?:(?:%(hex)s:){0,5}%(hex)s)?::%(hex)s",
# [ *6( h16 ":" ) h16 ] "::"
"(?:(?:%(hex)s:){0,6}%(hex)s)?::",
]
UNRESERVED_PAT = r"ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789._!\-"
IPV6_PAT = "(?:" + "|".join([x % _subs for x in _variations]) + ")"
ZONE_ID_PAT = "(?:%25|%)(?:[" + UNRESERVED_PAT + "]|%[a-fA-F0-9]{2})+"
IPV6_ADDRZ_PAT = r"\[" + IPV6_PAT + r"(?:" + ZONE_ID_PAT + r")?\]"
REG_NAME_PAT = r"(?:[^\[\]%:/?#]|%[a-fA-F0-9]{2})*"
TARGET_RE = re.compile(r"^(/[^?]*)(?:\?([^#]+))?(?:#(.*))?$")
IPV4_RE = re.compile("^" + IPV4_PAT + "$")
IPV6_RE = re.compile("^" + IPV6_PAT + "$")
IPV6_ADDRZ_RE = re.compile("^" + IPV6_ADDRZ_PAT + "$")
BRACELESS_IPV6_ADDRZ_RE = re.compile("^" + IPV6_ADDRZ_PAT[2:-2] + "$")
ZONE_ID_RE = re.compile("(" + ZONE_ID_PAT + r")\]$")
SUBAUTHORITY_PAT = (u"^(?:(.*)@)?" u"(%s|%s|%s)" u"(?::([0-9]{0,5}))?$") % (
REG_NAME_PAT,
IPV4_PAT,
IPV6_ADDRZ_PAT,
)
SUBAUTHORITY_RE = re.compile(SUBAUTHORITY_PAT, re.UNICODE | re.DOTALL)
ZONE_ID_CHARS = set(
"ABCDEFGHIJKLMNOPQRSTUVWXYZ" "abcdefghijklmnopqrstuvwxyz" "0123456789._!-"
)
USERINFO_CHARS = ZONE_ID_CHARS | set("$&'()*+,;=:")
PATH_CHARS = USERINFO_CHARS | {"@", "/"}
QUERY_CHARS = FRAGMENT_CHARS = PATH_CHARS | {"?"}
class Url(namedtuple("Url", url_attrs)):
"""
Data structure for representing an HTTP URL. Used as a return value for
:func:`parse_url`. Both the scheme and host are normalized as they are
both case-insensitive according to RFC 3986.
"""
__slots__ = ()
def __new__(
cls,
scheme=None,
auth=None,
host=None,
port=None,
path=None,
query=None,
fragment=None,
):
if path and not path.startswith("/"):
path = "/" + path
if scheme is not None:
scheme = scheme.lower()
return super(Url, cls).__new__(
cls, scheme, auth, host, port, path, query, fragment
)
@property
def hostname(self):
"""For backwards-compatibility with urlparse. We're nice like that."""
return self.host
@property
def request_uri(self):
"""Absolute path including the query string."""
uri = self.path or "/"
if self.query is not None:
uri += "?" + self.query
return uri
@property
def netloc(self):
"""Network location including host and port"""
if self.port:
return "%s:%d" % (self.host, self.port)
return self.host
@property
def url(self):
"""
Convert self into a url
This function should more or less round-trip with :func:`.parse_url`. The
returned url may not be exactly the same as the url inputted to
:func:`.parse_url`, but it should be equivalent by the RFC (e.g., urls
with a blank port will have : removed).
Example: ::
>>> U = parse_url('http://google.com/mail/')
>>> U.url
'http://google.com/mail/'
>>> Url('http', 'username:password', 'host.com', 80,
... '/path', 'query', 'fragment').url
'http://username:password@host.com:80/path?query#fragment'
"""
scheme, auth, host, port, path, query, fragment = self
url = u""
# We use "is not None" we want things to happen with empty strings (or 0 port)
if scheme is not None:
url += scheme + u"://"
if auth is not None:
url += auth + u"@"
if host is not None:
url += host
if port is not None:
url += u":" + str(port)
if path is not None:
url += path
if query is not None:
url += u"?" + query
if fragment is not None:
url += u"#" + fragment
return url
def __str__(self):
return self.url
def split_first(s, delims):
"""
.. deprecated:: 1.25
Given a string and an iterable of delimiters, split on the first found
delimiter. Return two split parts and the matched delimiter.
If not found, then the first part is the full input string.
Example::
>>> split_first('foo/bar?baz', '?/=')
('foo', 'bar?baz', '/')
>>> split_first('foo/bar?baz', '123')
('foo/bar?baz', '', None)
Scales linearly with number of delims. Not ideal for large number of delims.
"""
min_idx = None
min_delim = None
for d in delims:
idx = s.find(d)
if idx < 0:
continue
if min_idx is None or idx < min_idx:
min_idx = idx
min_delim = d
if min_idx is None or min_idx < 0:
return s, "", None
return s[:min_idx], s[min_idx + 1 :], min_delim
def _encode_invalid_chars(component, allowed_chars, encoding="utf-8"):
"""Percent-encodes a URI component without reapplying
onto an already percent-encoded component.
"""
if component is None:
return component
component = six.ensure_text(component)
# Try to see if the component we're encoding is already percent-encoded
# so we can skip all '%' characters but still encode all others.
percent_encodings = PERCENT_RE.findall(component)
# Normalize existing percent-encoded bytes.
for enc in percent_encodings:
if not enc.isupper():
component = component.replace(enc, enc.upper())
uri_bytes = component.encode("utf-8", "surrogatepass")
is_percent_encoded = len(percent_encodings) == uri_bytes.count(b"%")
encoded_component = bytearray()
for i in range(0, len(uri_bytes)):
# Will return a single character bytestring on both Python 2 & 3
byte = uri_bytes[i : i + 1]
byte_ord = ord(byte)
if (is_percent_encoded and byte == b"%") or (
byte_ord < 128 and byte.decode() in allowed_chars
):
encoded_component.extend(byte)
continue
encoded_component.extend(b"%" + (hex(byte_ord)[2:].encode().zfill(2).upper()))
return encoded_component.decode(encoding)
def _remove_path_dot_segments(path):
# See http://tools.ietf.org/html/rfc3986#section-5.2.4 for pseudo-code
segments = path.split("/") # Turn the path into a list of segments
output = [] # Initialize the variable to use to store output
for segment in segments:
# '.' is the current directory, so ignore it, it is superfluous
if segment == ".":
continue
# Anything other than '..', should be appended to the output
elif segment != "..":
output.append(segment)
# In this case segment == '..', if we can, we should pop the last
# element
elif output:
output.pop()
# If the path starts with '/' and the output is empty or the first string
# is non-empty
if path.startswith("/") and (not output or output[0]):
output.insert(0, "")
# If the path starts with '/.' or '/..' ensure we add one more empty
# string to add a trailing '/'
if path.endswith(("/.", "/..")):
output.append("")
return "/".join(output)
def _normalize_host(host, scheme):
if host:
if isinstance(host, six.binary_type):
host = six.ensure_str(host)
if scheme in NORMALIZABLE_SCHEMES:
is_ipv6 = IPV6_ADDRZ_RE.match(host)
if is_ipv6:
match = ZONE_ID_RE.search(host)
if match:
start, end = match.span(1)
zone_id = host[start:end]
if zone_id.startswith("%25") and zone_id != "%25":
zone_id = zone_id[3:]
else:
zone_id = zone_id[1:]
zone_id = "%" + _encode_invalid_chars(zone_id, ZONE_ID_CHARS)
return host[:start].lower() + zone_id + host[end:]
else:
return host.lower()
elif not IPV4_RE.match(host):
return six.ensure_str(
b".".join([_idna_encode(label) for label in host.split(".")])
)
return host
def _idna_encode(name):
if name and any([ord(x) > 128 for x in name]):
try:
import idna
except ImportError:
six.raise_from(
LocationParseError("Unable to parse URL without the 'idna' module"),
None,
)
try:
return idna.encode(name.lower(), strict=True, std3_rules=True)
except idna.IDNAError:
six.raise_from(
LocationParseError(u"Name '%s' is not a valid IDNA label" % name), None
)
return name.lower().encode("ascii")
def _encode_target(target):
"""Percent-encodes a request target so that there are no invalid characters"""
if not target.startswith("/"):
return target
path, query, fragment = TARGET_RE.match(target).groups()
target = _encode_invalid_chars(path, PATH_CHARS)
query = _encode_invalid_chars(query, QUERY_CHARS)
fragment = _encode_invalid_chars(fragment, FRAGMENT_CHARS)
if query is not None:
target += "?" + query
if fragment is not None:
target += "#" + target
return target
def parse_url(url):
"""
Given a url, return a parsed :class:`.Url` namedtuple. Best-effort is
performed to parse incomplete urls. Fields not provided will be None.
This parser is RFC 3986 compliant.
The parser logic and helper functions are based heavily on
work done in the ``rfc3986`` module.
:param str url: URL to parse into a :class:`.Url` namedtuple.
Partly backwards-compatible with :mod:`urlparse`.
Example::
>>> parse_url('http://google.com/mail/')
Url(scheme='http', host='google.com', port=None, path='/mail/', ...)
>>> parse_url('google.com:80')
Url(scheme=None, host='google.com', port=80, path=None, ...)
>>> parse_url('/foo?bar')
Url(scheme=None, host=None, port=None, path='/foo', query='bar', ...)
"""
if not url:
# Empty
return Url()
source_url = url
if not SCHEME_RE.search(url):
url = "//" + url
try:
scheme, authority, path, query, fragment = URI_RE.match(url).groups()
normalize_uri = scheme is None or scheme.lower() in NORMALIZABLE_SCHEMES
if scheme:
scheme = scheme.lower()
if authority:
auth, host, port = SUBAUTHORITY_RE.match(authority).groups()
if auth and normalize_uri:
auth = _encode_invalid_chars(auth, USERINFO_CHARS)
if port == "":
port = None
else:
auth, host, port = None, None, None
if port is not None:
port = int(port)
if not (0 <= port <= 65535):
raise LocationParseError(url)
host = _normalize_host(host, scheme)
if normalize_uri and path:
path = _remove_path_dot_segments(path)
path = _encode_invalid_chars(path, PATH_CHARS)
if normalize_uri and query:
query = _encode_invalid_chars(query, QUERY_CHARS)
if normalize_uri and fragment:
fragment = _encode_invalid_chars(fragment, FRAGMENT_CHARS)
except (ValueError, AttributeError):
return six.raise_from(LocationParseError(source_url), None)
# For the sake of backwards compatibility we put empty
# string values for path if there are any defined values
# beyond the path in the URL.
# TODO: Remove this when we break backwards compatibility.
if not path:
if query is not None or fragment is not None:
path = ""
else:
path = None
# Ensure that each part of the URL is a `str` for
# backwards compatibility.
if isinstance(url, six.text_type):
ensure_func = six.ensure_text
else:
ensure_func = six.ensure_str
def ensure_type(x):
return x if x is None else ensure_func(x)
return Url(
scheme=ensure_type(scheme),
auth=ensure_type(auth),
host=ensure_type(host),
port=port,
path=ensure_type(path),
query=ensure_type(query),
fragment=ensure_type(fragment),
)
def get_host(url):
"""
Deprecated. Use :func:`parse_url` instead.
"""
p = parse_url(url)
return p.scheme or "http", p.hostname, p.port
|
KnoxMakers/KM-Laser
|
extensions/km_deps/urllib3/util/url.py
|
Python
|
gpl-3.0
| 14,192
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Autor: Flores Facundo
# Año: 2016
# Licencia: GNU/GPL V3 http://www.gnu.org/copyleft/gpl.html
# Estado: Produccion
from traitsui.api import Handler
class MaskHandler(Handler):
""" Controlador para el ModelView: Mask
"""
def object_use_masking_changed(self, info):
if info.object.use_masking:
print "Using Mask"
else:
print "Not using Mask"
def object_use_zero_mask_changed(self, info):
if info.object.use_zero_mask:
print "Using Zero Mask"
else:
print "Not using Zero Mask"
def object_use_cuttop_changed(self, info):
if info.object.use_cuttop:
print "Using Cuttop"
else:
print "Not using Cuttop"
|
FacundoGFlores/golsoftv2
|
src/controllers/mask.py
|
Python
|
gpl-3.0
| 792
|
##
## Biskit, a toolkit for the manipulation of macromolecular structures
## Copyright (C) 2004-2012 Raik Gruenberg & Johan Leckner
##
## This program is free software; you can redistribute it and/or
## modify it under the terms of the GNU General Public License as
## published by the Free Software Foundation; either version 3 of the
## License, or any later version.
##
## This program is distributed in the hope that it will be useful,
## but WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## General Public License for more details.
##
## You find a copy of the GNU General Public License in the file
## license.txt along with this program; if not, write to the Free
## Software Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
##
##
##
## last $Author$
## last $Date$
## $Revision$
"""
Parallizes calculation and plotting of ensemble trajectory RMSDs
"""
from Biskit.PVM.hosts import cpus_all, nice_dic, nodes_all
import Biskit.tools as T
from Biskit.PVM import TrackingJobMaster
class QualMaster(TrackingJobMaster):
def __init__(self, trajFiles, n_hosts=20, **kw):
"""
@param trajFiles: list of trajectory files
@type trajFiles: [ str ]
@param n_hosts: number of hosts to use
@type n_hosts: int
"""
dat = {}
i = 0
for f in trajFiles:
dat[i] = T.absfile( f )
i += 1
niceness = nice_dic
hosts = nodes_all[ :n_hosts ]
slave_script = T.projectRoot() +'/Biskit/QualSlave.py'
TrackingJobMaster.__init__(self, dat, 1, hosts, niceness,
slave_script, **kw)
def getInitParameters(self, slave_tid):
"""
hand over parameters to slave once.
@param slave_tid: slave task id
@type slave_tid: int
@return: dictionary with init parameters
@rtype: {param:value}
"""
return {'verbose':self.verbose}
def done(self):
self.exit()
#############
## TESTING
#############
import Biskit.test as BT
class Test(BT.BiskitTest):
"""Test"""
TAGS = [ BT.PVM ]
def test_QualMaster(self):
"""QualMaster test"""
import os.path
## a minimal list of trajectories
traj_list = [ T.testRoot() + '/lig_pcr_00/traj.dat' ]
self.master = QualMaster( traj_list,
show_output=self.local,
verbose=self.local )
## run and wait for result
self.r = self.master.calculateResult()
#master.start()
self.f_plot = '%s/rms_traj.eps'%os.path.dirname( self.r[0] )
if self.local:
print 'A RMSD plot is writen to: %s'% self.f_plot
def cleanUp(self):
try:
T.tryRemove( self.f_plot )
except:
pass
if __name__ == '__main__':
BT.localTest()
|
ostrokach/biskit
|
Biskit/QualMaster.py
|
Python
|
gpl-3.0
| 2,978
|
#!/usr/bin/env python
#
# Copyright 2009 Claudio Pisa (claudio dot pisa at uniroma2 dot it)
#
# This file is part of SVEF (SVC Streaming Evaluation Framework).
#
# SVEF is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# SVEF is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with SVEF. If not, see <http://www.gnu.org/licenses/>.
#
import sys
from nalulib import *
import os
if(len(sys.argv) < 3):
print """
Compute the delay (i.e. the difference between sending and receiving
times) for each NAL unit and the average.
Usage: %s <received trace file> <fps>
<received trace file>: JSVM BitstreamExtractor trace file with a
further column containing the frame number for each NAL unit. This
trace may be obtained from the receiver module.
<fps>: frames per second
""" % os.path.basename(sys.argv[0])
sys.exit(1)
receivedtracefilename = sys.argv[1]
fps = int(sys.argv[2]) * 1.0
oneframetime = 1000.0 / fps
receivedtracefile = open(receivedtracefilename)
receivednalulist = []
for line in receivedtracefile:
nalu = NALU(line)
if nalu.packettype == "SliceData":
receivednalulist.append(nalu)
receivedtracefile.close()
receivednalulist.sort()
t0 = receivednalulist[0].timestamp
print "%10s%10s%13s%13s%10s%10s%10s%10s" % ("ID","Frame #","Timestamp", "Expected TS", "Delay","Length","TID","QID")
for nalu in receivednalulist:
expectedarrivaltime = int(t0 + nalu.frame_number * oneframetime)
nalu.delay = nalu.timestamp - expectedarrivaltime
print "%#010x%10d%13d%13d%10d%10d%10x%10x" % (nalu.id, nalu.frame_number, nalu.timestamp, expectedarrivaltime, nalu.delay, nalu.length, nalu.tid, nalu.qid)
delays = [nalu.delay for nalu in receivednalulist]
print "Average delay: %f milliseconds" % (1.0 * sum(delays)/len(delays),)
|
netgroup/svef
|
computedelay.py
|
Python
|
gpl-3.0
| 2,220
|
import handler
import server
import service
|
alxark/scmt
|
server/scmt/api/__init__.py
|
Python
|
gpl-3.0
| 43
|
"""
This module implements the selection sort algorithim
"""
def selection_sort(arr, key=lambda x:x):
"""
Perform a selection sort on a given list. Return the sorted list.
Example:
>>> selection_sort([1, 5, 7, 2, 3, 4, 1])
[1, 1, 2, 3, 4, 5, 7]
>>> selection_sort(['a', 'd', 'b', 'c'])
['a', 'b', 'c', 'd']
:param arr: The list to sort
:type arr: list
:param key: Define how to sort
:type key: function
:returns: Sorted list
"""
if arr == []: # No point in sorting an exmpty list
return []
for max_index in range(len(arr) -1 , 0, -1):
max_pos = arr.index(max(arr[0:max_index + 1], key=key), 0, max_index + 1) # get the first index of the largest list member
arr[max_index], arr[max_pos] = arr[max_pos], arr[max_index] # swap
return arr
def _sort(*args, **kwargs):
return selection_sort(*args, **kwargs)
_sort_name = "Selection sort"
if __name__ == '__main__':
import doctest
doctest.testmod()
|
jcotton42/libuseful
|
sorts/selectionsort.py
|
Python
|
gpl-3.0
| 1,042
|
from os import path, listdir
from git import Repo, Git
from lense_devtools.common import DevToolsCommon
class DevToolsGitRepo(DevToolsCommon):
"""
Helper class for retrieving a lense project repository.
"""
def __init__(self, project, attrs, automode=False):
"""
:param project: The project name
:type project: str
:param attrs: Project attributes
:type attrs: dict
"""
super(DevToolsGitRepo, self).__init__()
# Auto mode (avoid prompts)
self.automode = automode
# Project name
self.name = project
# Local / remote / branch
self.local = self.mkdir('{0}/{1}/{2}'.format(self.workspace, attrs.get('git-local', 'src/{0}'.format(project)), project))
self.remote = attrs.get('git-remote')
self.branch = attrs.get('git-branch')
# Repo / Git objects
self._repo = None
self._git = None
# Has the repo been updated / cloned
self.updated = False
self.cloned = False
def _exists(self):
"""
Check if the local repo exists (path exists and is not empty)
"""
if not path.isdir(self.local):
return False
if not listdir(self.local):
return False
return True
def _get_current_branch(self):
"""
Get the checked out local branch.
"""
return str(self._repo.active_branch)
def _checkout(self, branch):
"""
Checkout the request branch.
"""
current_branch = self._get_current_branch()
# Target branch is already checked out
if current_branch == branch:
return True
# Checkout the branch
self._git.checkout(branch)
return self.feedback.success('Switched to branch: {0}'.format(branch))
def _clone(self):
"""
Clone a remote repository.
"""
if not self._exists():
Repo.clone_from(self.remote, self.local)
self.feedback.success('Cloned repository')
self.feedback.info('Remote: {0}'.format(self.remote))
self.feedback.info('Local: {0}'.format(self.local))
# Store the Repo/Git objects
self._git = Git(self.local)
self._repo = Repo(self.local)
# Checkout the requested branch
self._checkout(self.branch)
self.cloned = True
# Local repo already exists
else:
self.feedback.info('Local repository found: {0}'.format(self.local))
def _refresh(self):
"""
Refresh the repository objects.
"""
self._git = Git(self.local)
self._repo = Repo(self.local)
# Fetch remotes
self._repo.remotes.origin.fetch()
self.feedback.info('Fetched changes from remote')
def _get_local_commit(self):
"""
Get the latest commit tag from the local branch.
"""
for o in self._repo.refs:
if o.name == self.branch:
return o.commit
def _get_remote_commit(self):
"""
Get the latest commit tag from the remote branch.
"""
for o in self._repo.remotes.origin.refs:
if o.remote_head == self.branch:
return o.commit
def _pull(self):
"""
Pull changes from a remote repository.
"""
# If the repo has just been cloned
if self.cloned:
self.feedback.info('Newly cloned repo, skipped pull')
return True
# Refresh repo objects
self._refresh()
# Checkout the branch
self._checkout(self.branch)
# Remote / local commits
remote_commit = self._get_remote_commit()
local_commit = self._get_local_commit()
# Show the local/remote commit info
self.feedback.info('Local <{0}> is on commit: {1}'.format(self.local, local_commit))
self.feedback.info('Remote <{0}> is on commit: {1}'.format(self.remote, remote_commit))
# If local is up to date
if remote_commit == local_commit:
return self.feedback.info('Local matches remote, everything up to date'.format(local_commit, remote_commit))
# Update the local branch
origin = self._repo.remotes.origin
origin.pull()
# Refresh the branches
self._refresh()
# Updated success
self.feedback.success('Local branch updated -> {0}'.format(self._get_local_commit()))
self.updated = True
def setup(self):
"""
Construct information about the repository.
"""
# Make sure the repo exists locally
self._clone()
# Pull any changes from the remote
self._pull()
|
djtaylor/lense-devtools
|
usr/lib/python2.7/dist-packages/lense_devtools/gitrepo.py
|
Python
|
gpl-3.0
| 4,898
|
import datetime
from django.utils.timezone import utc
from django.db import models
from django.db.models import Q
#from django.contrib.gis.db import models
from django.contrib.auth.models import User
from django.utils.translation import ugettext as _
from cms.models import CMSPlugin, Page
from django_extensions.db.fields import *
from django.core.urlresolvers import reverse
from django.db.models.signals import post_save
from lib.fields import extra
# logging
import logging
log = logging.getLogger(__name__)
__all__ = ('ShortcutCollection', 'Shortcut')
class BaseModel(models.Model):
uuid = UUIDField()
created = CreationDateTimeField()
updated = ModificationDateTimeField()
class Meta:
abstract = True
class ShortcutCollection(BaseModel):
name = models.CharField(max_length=256, null=True, blank=False)
title = models.CharField(max_length=256, null=True, blank=True)
class Meta:
app_label = 'shortcutter'
verbose_name = _('Collection')
verbose_name_plural = _('Collections')
ordering = ('name', )
def __unicode__(self):
return "%s" % self.name
def save(self, *args, **kwargs):
obj = self
super(ShortcutCollection, self).save(*args, **kwargs)
class Shortcut(BaseModel):
name = models.CharField(max_length=256, null=True, blank=True)
#description = models.TextField(null=True, blank=True)
description = extra.MarkdownTextField(blank=True, null=True)
key = models.CharField(max_length=256, null=True, blank=True)
position = models.PositiveIntegerField(default=0)
url = models.URLField(_("link"), blank=True, null=True)
page_link = models.ForeignKey(Page, verbose_name=_("page"), blank=True, null=True, help_text=_("A link to a page has priority over a text link."))
collection = models.ForeignKey('ShortcutCollection', null=True, blank=True, on_delete=models.SET_NULL, related_name='shortcuts')
# meta
class Meta:
app_label = 'shortcutter'
verbose_name = _('Shortcut')
verbose_name_plural = _('Shortcuts')
ordering = ('position', 'name', )
def __unicode__(self):
return "%s" % self.name
def get_link(self):
if self.page_link:
link = self.page_link.get_absolute_url()
elif self.url:
link = self.url
else:
link = None
return link
def save(self, *args, **kwargs):
obj = self
super(Shortcut, self).save(*args, **kwargs)
""""""
class ShortcutPlugin(CMSPlugin):
collection = models.ForeignKey('ShortcutCollection', related_name='plugins')
class Meta:
app_label = 'shortcutter'
def __unicode__(self):
return "%s" % self.collection.name
|
hzlf/openbroadcast
|
website/apps/shortcutter/models.py
|
Python
|
gpl-3.0
| 2,869
|
"""
Roleme stuff.
"""
from curious import Role
from curious.commands import Context, Plugin, command, condition
from jokusoramame.plugins.roles.roleme_shared import RolemeResult, RolemeShared
class Roleme(Plugin):
"""
Commands for the roleme portion of the bot.
"""
def __init__(self, client):
super().__init__(client)
self.impl = RolemeShared(client)
@command()
async def roleme(self, ctx: Context, *, role: Role = None):
"""
Adds a role to your list of roles.
"""
roles = await self.impl.get_all_roleme_roles(ctx.guild)
# behaviour a, assign a role
if role is not None:
result = await self.impl.apply_roleme_role(role, ctx.author)
if result is RolemeResult.ERR_NOT_ASSIGNABLE:
return await ctx.channel.messages.send(":x: This role is not self-assignable.")
return await ctx.channel.messages.send(":heavy_check_mark: Assigned you this role.")
else:
if not roles:
return await ctx.channel.messages.send(":pencil: There are no roles you can assign "
"yourself in this server currently.")
fmts = []
for role in sorted(roles, key=lambda r: r.name):
fmts.append(" - {}".format(role.name))
role_list = '\n'.join(fmts)
return await ctx.channel.messages.send(f":pencil: **Roles you can give yourself:**"
f"\n\n{role_list}")
@roleme.subcommand()
@condition(lambda ctx: ctx.author.guild_permissions.manage_roles)
async def add(self, ctx: Context, *, role: Role = None):
"""
Adds a role as a roleme role.
"""
await self.impl.add_roleme_role(role, is_colourme=False)
await ctx.channel.messages.send(f":heavy_check_mark: Added {role.name} as a roleme role.")
@roleme.subcommand()
@condition(lambda ctx: ctx.author.guild_permissions.manage_roles)
async def remove(self, ctx: Context, *, role: Role = None):
"""
Removes a role as a roleme role.
"""
await self.impl.remove_roleme_role(role)
await ctx.channel.messages.send(f":heavy_check_mark: Removed {role.name} as a roleme role.")
@roleme.subcommand()
async def unroleme(self, ctx: Context, *, role: Role):
"""
Removes a role from you.
"""
result = await self.impl.unapply_roleme_role(role, ctx.author)
if result is result.ERR_NOT_ASSIGNABLE:
return await ctx.channel.messages.send(":x: This role is not self-assignable.")
await ctx.channel.messages.send(":heavy_check_mark: Removed you from this role.")
@command(name="unroleme")
async def unroleme_toplevel(self, ctx: Context, *, role: Role):
return await self.unroleme(ctx, role=role)
|
SunDwarf/Jokusoramame
|
jokusoramame/plugins/roles/roleme.py
|
Python
|
gpl-3.0
| 2,924
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9.5 on 2016-04-20 16:05
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations
from django.contrib.auth.hashers import make_password
def add_anonymous_profile(apps, schema_editor):
"""Ensure anonymous user has profile"""
User = apps.get_model('auth', 'User')
Group = apps.get_model('auth', 'Group')
Profile = apps.get_model('accounts', 'Profile')
anon_user = User.objects.get_or_create(
username=settings.ANONYMOUS_USER_NAME,
defaults={
'is_active': False,
'password': make_password(None),
'email': 'noreply@weblate.org',
}
)[0]
guest_group = Group.objects.get_or_create(name='Guests')[0]
anon_user.groups.clear()
anon_user.groups.add(guest_group)
Profile.objects.get_or_create(user=anon_user)
class Migration(migrations.Migration):
dependencies = [
('accounts', '0016_add-api-keys'),
('auth', '0001_initial'),
]
operations = [
migrations.RunPython(add_anonymous_profile),
]
|
lem9/weblate
|
weblate/accounts/migrations/0017_anonymous_profile.py
|
Python
|
gpl-3.0
| 1,121
|
# -*- coding: utf-8 -*-
"""
Define the form for django's admin
"""
from django.contrib import admin
from baobab.utils.admin import ModelAdmin
from baobab.translate.models import Lang, Event, EventLog
from baobab.translate.admininline import EventDataInline, EventLogDataInline
from baobab.translate.adminfilter import (EventCompleteFilter,
EventLogCompleteFilter)
class EventAdmin(ModelAdmin):
fields = ['title_en', ]
readonly_fields = ['title_en', ]
list_display = ['title_en', 'complete_event', 'complete_log']
list_display_dynamic = {
'val_name': Lang.objects.exclude(iso='en').values_list('iso', 'name'),
'filter': 'lang__iso',
'sub_obj': 'eventdatas',
}
inlines = [EventDataInline, ]
list_filter = [EventCompleteFilter, ]
search_fields = ['eventdatas__title']
def title_en(self, obj):
return obj.eventdatas.get(lang__iso='en').title
title_en.short_description = 'Title'
def complete_event(self, obj):
if obj.eventdatas.count() == Lang.objects.count():
return '<b>True<b>'
return 'False'
complete_event.allow_tags = True
def complete_log(self, obj):
for eventlog in obj.eventlogs.all():
if eventlog.eventlogdatas.count() != Lang.objects.count():
return 'False'
return '<b>True<b>'
complete_log.allow_tags = True
# XXX for safety
def has_delete_permission(self, request, obj=None):
return False
def has_add_permission(self, request, obj=None):
return False
def has_change_permission(self, request, obj=None):
if request.user.groups.filter(name='translate').exists():
return True
return super(EventAdmin, self).has_change_permission(request, obj=obj)
class EventLogAdmin(ModelAdmin):
fields = ['comment_en', ]
readonly_fields = ['comment_en', ]
list_display = ['title_en', 'comment_en', 'complete']
list_display_dynamic = {
'val_name': Lang.objects.exclude(iso='en').values_list('iso', 'name'),
'filter': 'lang__iso',
'sub_obj': 'eventlogdatas',
}
inlines = [EventLogDataInline, ]
list_filter = [EventLogCompleteFilter, ]
search_fields = ['event__eventdatas__title']
def title_en(self, obj):
return obj.event.eventdatas.get(lang__iso='en').title
title_en.short_description = 'Title'
def comment_en(self, obj):
return obj.eventlogdatas.get(lang__iso='en').comment
comment_en.short_description = 'Comment'
def complete(self, obj):
if obj.eventlogdatas.count() == Lang.objects.count():
return '<b>True<b>'
return 'False'
complete.allow_tags = True
# XXX for safety
def has_delete_permission(self, request, obj=None):
return False
def has_add_permission(self, request, obj=None):
return False
def has_change_permission(self, request, obj=None):
if request.user.groups.filter(name='translate').exists():
return True
return super(EventLogAdmin, self).has_change_permission(request,
obj=obj)
admin.site.register(Event, EventAdmin)
admin.site.register(EventLog, EventLogAdmin)
|
Gandi/baobab
|
baobab/translate/admin.py
|
Python
|
gpl-3.0
| 3,302
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# complexity documentation build configuration file, created by
# sphinx-quickstart on Tue Jul 9 22:26:36 2013.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys
import os
# If extensions (or modules to document with autodoc) are in another
# directory, add these directories to sys.path here. If the directory is
# relative to the documentation root, use os.path.abspath to make it
# absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
# Get the project root dir, which is the parent dir of this
cwd = os.getcwd()
project_root = os.path.dirname(cwd)
# Insert the project root dir as the first element in the PYTHONPATH.
# This lets us ensure that the source package is imported, and that its
# version is used.
sys.path.insert(0, project_root)
import pyformula
# -- General configuration ---------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.viewcode']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'PyFormula'
copyright = u'2014, Stefan Bakker'
# The version info for the project you're documenting, acts as replacement
# for |version| and |release|, also used in various other places throughout
# the built documents.
#
# The short X.Y version.
version = pyformula.__version__
# The full version, including alpha/beta/rc tags.
release = pyformula.__version__
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to
# some non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all
# documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built
# documents.
#keep_warnings = False
# -- Options for HTML output -------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'default'
# Theme options are theme-specific and customize the look and feel of a
# theme further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as
# html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the
# top of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon
# of the docs. This file should be a Windows icon file (.ico) being
# 16x16 or 32x32 pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets)
# here, relative to this directory. They are copied after the builtin
# static files, so a file named "default.css" will overwrite the builtin
# "default.css".
html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page
# bottom, using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names
# to template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer.
# Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer.
# Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages
# will contain a <link> tag referring to it. The value of this option
# must be the base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'pyformuladoc'
# -- Options for LaTeX output ------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass
# [howto/manual]).
latex_documents = [
('index', 'pyformula.tex',
u'PyFormula Documentation',
u'Stefan Bakker', 'manual'),
]
# The name of an image file (relative to this directory) to place at
# the top of the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings
# are parts, not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output ------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'pyformula',
u'PyFormula Documentation',
[u'Stefan Bakker'], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output ----------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'pyformula',
u'PyFormula Documentation',
u'Stefan Bakker',
'pyformula',
'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#texinfo_no_detailmenu = False
|
RipCazza/pyformula
|
docs/conf.py
|
Python
|
gpl-3.0
| 8,417
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# Copyright 2019 Daniel Estevez <daniel@destevez.net>
#
# This file is part of gr-satellites
#
# SPDX-License-Identifier: GPL-3.0-or-later
#
import argparse
import functools
import itertools
import os
import shlex
import yaml
from gnuradio import gr, zeromq
import pmt
from ..components import datasinks
from ..components import datasources
from ..components import deframers
from ..components import demodulators
from ..components import transports
from ..satyaml import yamlfiles
from .. import pdu_add_meta
def set_options(cl, *args, **kwargs):
"""
Given a class, returns a derived class with some fixed
options set in the constructor.
This is intended to generate GNU Radio blocks with some options set
by deriving from blocks that allow for options.
Args:
cl: the base class to derive from (class)
*args: arguments to pass to the __init__ method
**kwargs: keyword arguments to pass to the __init__ method
"""
class C(cl):
__init__ = functools.partialmethod(cl.__init__, *args, **kwargs)
return C
def try_add_options(x, parser):
"""
Given an object x, calls x.add_options(parser) if add_options
is an attribute of x.
Args:
x: an object
parser: an argparser ArgumentParser
"""
if hasattr(x, 'add_options'):
x.add_options(parser)
def filter_translate_dict(d, key_translation):
"""
Filter and translate the keys of a dictionary
Args:
d: a dictionary to filter and translate
keys_translation: a dictionary of key translations
"""
return {key_translation[k]: v
for k, v in d.items() if k in key_translation}
class gr_satellites_flowgraph(gr.hier_block2):
"""
gr-satellites decoder flowgraph
Uses a YAML file with a satellite description to create a
hierarchical flowgraph for that satellite. There are two modes of
operation. If this is called from GRC, then only demodulation and
deframing is done, getting all messages to the 'out' output port. If
this is not called from GRC, then messages are routed to data sinks
as appropriate.
Args:
file: filename of the YAML file to load (string)
name: satellite name to search in all YAML files (string)
norad: NORAD ID to search in all YAML files (int)
samp_rate: sample rate (float)
iq: use IQ or real input (bool)
grc_block: whether this is called from GRC (bool)
options: options from argparser
config: configuration file from configparser
pdu_in: use PDU input instead of samples (bool)
dump_path: Path to dump internal signals to files (str)
Note that exactly one of file, name and norad should be specified
"""
def __init__(self, file=None, name=None, norad=None,
samp_rate=None, iq=False, grc_block=False,
options=None, config=None, pdu_in=False,
dump_path=None):
gr.hier_block2.__init__(
self,
'gr_satellites_flowgraph',
gr.io_signature(0, 0, 0)
if pdu_in else
gr.io_signature(1, 1,
gr.sizeof_gr_complex
if iq else gr.sizeof_float),
gr.io_signature(0, 0, 0))
self.samp_rate = samp_rate
self.iq = iq
self.grc_block = grc_block
self.dump_path = dump_path
self.config = config
# Load up options, similarly to option block
if type(options) is str:
p = argparse.ArgumentParser(prog=self.__class__.__name__,
conflict_handler='resolve')
gr_satellites_flowgraph.add_options(p, file, name, norad)
options = p.parse_args(shlex.split(options))
self.options = options
if pdu_in:
self.message_port_register_hier_in('in')
elif samp_rate is None:
raise ValueError('samp_rate not specified')
self.satyaml = satyaml = self.open_satyaml(file, name, norad)
if grc_block:
self.message_port_register_hier_out('out')
else:
self._datasinks = dict()
self._additional_datasinks = list()
do_telemetry = not (self.options is not None
and self.options.hexdump)
for key, info in satyaml['data'].items():
is_telemetry = ('telemetry' in info
or info == 'unknown')
if not is_telemetry or do_telemetry:
self._init_datasink(key, info)
self._init_additional_datasinks()
self._transports = dict()
if 'transports' in satyaml:
for key, info in satyaml['transports'].items():
self._init_transport(key, info)
if pdu_in:
for sink in itertools.chain(
self._datasinks.values(), self._additional_datasinks):
self.msg_connect((self, 'in'), (sink, 'in'))
else:
self._demodulators = dict()
self._deframers = dict()
self._taggers = dict()
for key, transmitter in satyaml['transmitters'].items():
self._init_demodulator_deframer(key, transmitter)
def _init_datasink(self, key, info):
"""Initialize a datasink
Initializes a datasink according to a SatYAML entry
Args:
key: the name of the datasink entry in SatYAML
info: the body of the datasink entry in SatYAML
"""
if 'decoder' in info:
ds = getattr(datasinks, info['decoder'])
try:
datasink = ds(options=self.options)
except TypeError: # raised if ds doesn't have an options parameter
datasink = ds()
elif 'telemetry' in info:
datasink = datasinks.telemetry_parser(info['telemetry'],
options=self.options)
elif 'files' in info:
datasink = datasinks.file_receiver(info['files'],
options=self.options)
elif 'image' in info:
datasink = datasinks.file_receiver(
info['image'], options=self.options, display=True)
else:
datasink = datasinks.hexdump_sink()
self._datasinks[key] = datasink
def _init_additional_datasinks(self):
"""Initialize additional datasinks
Creates all the datasinks that are not explicitly indicated
in the SatYAML (telemetry submit, KISS output, etc.)
"""
if self.options is not None and self.options.kiss_out:
self._additional_datasinks.append(
datasinks.kiss_file_sink(self.options.kiss_out,
bool(self.options.kiss_append),
options=self.options))
if self.options is not None and self.options.kiss_server:
self._additional_datasinks.append(
datasinks.kiss_server_sink(self.options.kiss_server_address,
self.options.kiss_server))
if self.options is not None and self.options.zmq_pub:
self._additional_datasinks.append(
zeromq.pub_msg_sink(self.options.zmq_pub))
# The GR_SATELLITES_SUBMIT_TLM environment variable takes precendence
# over the configuration to choose whether to enable telemetry
# submission
tlm_env = os.environ.get('GR_SATELLITES_SUBMIT_TLM')
if tlm_env is not None:
tlm_submit = bool(int(tlm_env))
else:
tlm_submit = self.config.getboolean('Groundstation', 'submit_tlm')
if tlm_submit:
self._additional_datasinks.extend(
self.get_telemetry_submitters(self.satyaml, self.config,
self.options))
if self.options is not None and self.options.hexdump:
self._additional_datasinks.append(datasinks.hexdump_sink())
def _init_transport(self, key, info):
"""Initialize a datasink
Initializes a transport according to a SatYAML entry and connects
it to the appropriate datasink
Args:
key: the name of the transport entry in SatYAML
info: the body of the transport entry in SatYAML
"""
transport = self.get_transport(info['protocol'])()
self._transports[key] = transport
if not self.options.hexdump:
for data in info['data']:
self.msg_connect(
(transport, 'out'), (self._datasinks[data], 'in'))
def _init_demodulator_deframer(self, key, transmitter):
"""Initialize a demodulator and deframer
Creates a demodulator and deframer according to a SatYAML
entry and connects the deframer to the data and transports
Args:
key: name of the transmitter entry in the SatYAML
transmitter: transmitter entry in the SatYAML
"""
baudrate = transmitter['baudrate']
demod_options = ['deviation', 'af_carrier']
demod_options = {k: k for k in demod_options}
demodulator_additional_options = filter_translate_dict(transmitter,
demod_options)
demodulator = self.get_demodulator(transmitter['modulation'])(
baudrate=baudrate, samp_rate=self.samp_rate, iq=self.iq,
dump_path=self.dump_path, options=self.options,
**demodulator_additional_options)
deframe_options = {
'frame size': 'frame_size',
'precoding': 'precoding',
'RS basis': 'rs_basis',
'RS interleaving': 'rs_interleaving',
'convolutional': 'convolutional',
'scrambler': 'scrambler',
}
deframer_additional_options = filter_translate_dict(transmitter,
deframe_options)
deframer = self.get_deframer(transmitter['framing'])(
options=self.options, **deframer_additional_options)
self.connect(self, demodulator, deframer)
self._demodulators[key] = demodulator
self._deframers[key] = deframer
self._connect_transmitter_to_data(key, transmitter, deframer)
def _connect_transmitter_to_data(self, key, transmitter, deframer):
"""Connect a deframer to the datasinks and transports
Connects a deframer to the datasinks and transports indicated in
the SatYAML file
Args:
transmitter: the transmitter entry in SatYAML
deframer: the deframer to connect
"""
# Add a tagger
meta = pmt.make_dict()
meta = pmt.dict_add(meta, pmt.intern('transmitter'),
pmt.intern(key))
tagger = pdu_add_meta(meta)
self._taggers[key] = tagger
self.msg_connect((deframer, 'out'), (tagger, 'in'))
if self.grc_block:
# If we are a GRC block we have no datasinks
# so we connect directly to our output
self.msg_connect((tagger, 'out'), (self, 'out'))
return
for s in self._additional_datasinks:
self.msg_connect((tagger, 'out'), (s, 'in'))
for data in transmitter.get('data', []):
if data in self._datasinks:
# The datasink may not exist if it's a telemetry parser
# and we're running in hexdump mode
self.msg_connect(
(tagger, 'out'), (self._datasinks[data], 'in'))
for transport in transmitter.get('transports', []):
self.msg_connect(
(tagger, 'out'), (self._transports[transport], 'in'))
if 'additional_data' in transmitter:
for k, v in transmitter['additional_data'].items():
# Add a tagger
tagger = pdu_add_meta(meta)
self._taggers[(key, k)] = tagger
self.msg_connect((deframer, k), (tagger, 'in'))
self.msg_connect((tagger, 'out'), (self._datasinks[v], 'in'))
def get_telemetry_submitters(self, satyaml, config, options):
"""
Returns a list of block instances of telemetry submitters
The telemetry submitters are those appropriate for this satellite
Args:
satyaml: satellite YAML file, as returned by self.open_satyaml
config: configuration file from configparser
"""
norad = satyaml['norad']
submitters = [
datasinks.telemetry_submit(
'SatNOGS', norad=norad, config=config, options=options)]
for server in satyaml.get('telemetry_servers', []):
port = None
url = None
if server.startswith('HIT '):
port = server.split()[1]
server = 'HIT'
elif server.startswith('SIDS '):
url = server.split()[1]
server = 'SIDS'
submitters.append(datasinks.telemetry_submit(
server, norad=norad, port=port, url=url, config=config,
options=options))
return submitters
def get_demodulator(self, modulation):
return self._demodulator_hooks[modulation]
def get_deframer(self, framing):
return self._deframer_hooks[framing]
def get_transport(self, protocol):
return self._transport_hooks[protocol]
@staticmethod
def open_satyaml(file, name, norad):
if sum([x is not None for x in [file, name, norad]]) != 1:
raise ValueError(
'exactly one of file, name and norad needs to be specified')
if file is not None:
satyaml = yamlfiles.get_yamldata(file)
elif name is not None:
satyaml = yamlfiles.search_name(name)
else:
satyaml = yamlfiles.search_norad(norad)
return satyaml
@classmethod
def add_options(cls, parser, file=None, name=None, norad=None):
satyaml = cls.open_satyaml(file, name, norad)
demod_options = parser.add_argument_group('demodulation')
deframe_options = parser.add_argument_group('deframing')
data_options = parser.add_argument_group('data sink')
for info in satyaml['data'].values():
if 'decoder' in info:
try_add_options(getattr(datasinks, info['decoder']),
data_options)
if 'telemetry' in info:
try_add_options(datasinks.telemetry_parser, data_options)
if 'files' in info or 'image' in info:
try_add_options(datasinks.file_receiver, data_options)
for transmitter in satyaml['transmitters'].values():
try_add_options(cls._demodulator_hooks[transmitter['modulation']],
demod_options)
try_add_options(cls._deframer_hooks[transmitter['framing']],
deframe_options)
# Default parameters
_demodulator_hooks = {
'AFSK': demodulators.afsk_demodulator,
'FSK': demodulators.fsk_demodulator,
'BPSK': demodulators.bpsk_demodulator,
'BPSK Manchester': set_options(demodulators.bpsk_demodulator,
manchester=True),
'DBPSK': set_options(demodulators.bpsk_demodulator,
differential=True),
'DBPSK Manchester': set_options(demodulators.bpsk_demodulator,
differential=True, manchester=True),
'FSK subaudio': set_options(demodulators.fsk_demodulator,
subaudio=True),
}
_deframer_hooks = {
'AX.25': set_options(deframers.ax25_deframer, g3ruh_scrambler=False),
'AX.25 G3RUH': set_options(deframers.ax25_deframer,
g3ruh_scrambler=True),
'AX100 ASM+Golay': set_options(deframers.ax100_deframer, mode='ASM'),
'AX100 Reed Solomon': set_options(deframers.ax100_deframer,
mode='RS'),
'3CAT-1': deframers.sat_3cat_1_deframer,
'Astrocast FX.25 NRZ-I': set_options(deframers.astrocast_fx25_deframer,
nrzi=True),
'Astrocast FX.25 NRZ': set_options(deframers.astrocast_fx25_deframer,
nrzi=False),
'AO-40 FEC': deframers.ao40_fec_deframer,
'AO-40 FEC short': set_options(deframers.ao40_fec_deframer,
short_frames=True),
'AO-40 FEC CRC-16-ARC': set_options(deframers.ao40_fec_deframer,
crc=True),
'AO-40 FEC CRC-16-ARC short': set_options(deframers.ao40_fec_deframer,
short_frames=True,
crc=True),
'AO-40 uncoded': deframers.ao40_uncoded_deframer,
'TT-64': deframers.tt64_deframer,
'ESEO': deframers.eseo_deframer,
'Lucky-7': deframers.lucky7_deframer,
'Reaktor Hello World': deframers.reaktor_hello_world_deframer,
'S-NET': deframers.snet_deframer,
'SALSAT': set_options(deframers.snet_deframer, buggy_crc=False),
'Swiatowid': deframers.swiatowid_deframer,
'NuSat': deframers.nusat_deframer,
'K2SAT': deframers.k2sat_deframer,
'CCSDS Reed-Solomon': deframers.ccsds_rs_deframer,
'CCSDS Concatenated': deframers.ccsds_concatenated_deframer,
'CCSDS Uncoded': set_options(deframers.ccsds_rs_deframer, rs_en=False),
'LilacSat-1': deframers.lilacsat_1_deframer,
'AAUSAT-4': deframers.aausat4_deframer,
'NGHam': set_options(deframers.ngham_deframer, decode_rs=True),
'NGHam no Reed Solomon': set_options(deframers.ngham_deframer,
decode_rs=False),
'SMOG-P RA': deframers.smogp_ra_deframer,
'SMOG-1 RA': set_options(deframers.smogp_ra_deframer,
new_protocol=True),
'SMOG-P Signalling': deframers.smogp_signalling_deframer,
'SMOG-1 Signalling': set_options(deframers.smogp_signalling_deframer,
new_protocol=True),
'OPS-SAT': deframers.ops_sat_deframer,
'U482C': deframers.u482c_deframer,
'UA01': deframers.ua01_deframer,
'Mobitex': deframers.mobitex_deframer,
'Mobitex-NX': set_options(deframers.mobitex_deframer, nx=True),
'FOSSASAT': deframers.fossasat_deframer,
'AISTECHSAT-2': deframers.aistechsat_2_deframer,
'AALTO-1': deframers.aalto1_deframer,
'Grizu-263A': deframers.grizu263a_deframer,
'IDEASSat': deframers.ideassat_deframer,
'YUSAT': deframers.yusat_deframer,
'AX5043': deframers.ax5043_deframer,
'USP': deframers.usp_deframer,
'DIY-1': deframers.diy1_deframer,
'BINAR-1': deframers.binar1_deframer,
'Endurosat': deframers.endurosat_deframer,
}
_transport_hooks = {
'KISS': transports.kiss_transport,
'KISS no control byte': set_options(transports.kiss_transport,
control_byte=False),
'KISS KS-1Q': set_options(transports.kiss_transport,
control_byte=False, header_remove_bytes=3),
}
|
daniestevez/gr-satellites
|
python/core/gr_satellites_flowgraph.py
|
Python
|
gpl-3.0
| 19,752
|
'''
* Created by Zhenia Syryanyy (Yevgen Syryanyy)
* e-mail: yuginboy@gmail.com
* License: this code is under GPL license
* Last modified: 2017-09-19
'''
import numpy as np
import re
import periodictable as pd
from libs.dir_and_file_operations import createUniqFile, createUniqFile_from_idx
from libs.inputheader import writeHeaderToInpFile
from feff.libs.dir_and_file_operations import create_out_data_folder, get_folder_name, runningScriptDir
import os
class Cell():
def __init__(self):
self.x = []
self.y = []
self.z = []
self.tagName = []
self.atomIndex = []
self.distance = []
class CellModel():
def __init__(self, numOfAtoms=1):
self.latCons = 1
self.majorElemTag = 'Mn'
self.structure = dict()
class Strategy():
def __init__(self):
pass
class CellGenerator():
def __init__(self):
self.numOfAtoms = 512
self.path_to_GULP_input_file = os.path.join(get_folder_name(runningScriptDir), 'data', 'src',
'GaMnAs_ideal.gin')
def load_cell_from_GULP_input_file(self):
file = open(self.path_to_GULP_input_file, 'r')
pattern = 'fractional'
for line in file:
i = i + 1
if i > 29:
tegTmp = re.findall('[a-zA-Z]+|\d+', line)
if __name__ == '__main__':
print('-> you run ', __file__, ' file in a main mode')
|
yuginboy/from_GULP_to_FEFF
|
feff/libs/cell_generator.py
|
Python
|
gpl-3.0
| 1,453
|
# Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
import frappe, os, json
from frappe import _
from frappe.utils import get_timestamp
from frappe.utils import cint, today, formatdate
import frappe.defaults
from frappe.cache_manager import clear_defaults_cache
from frappe.model.document import Document
from frappe.contacts.address_and_contact import load_address_and_contact
from frappe.utils.nestedset import NestedSet
class Company(NestedSet):
nsm_parent_field = 'parent_company'
def onload(self):
load_address_and_contact(self, "company")
self.get("__onload")["transactions_exist"] = self.check_if_transactions_exist()
def check_if_transactions_exist(self):
exists = False
for doctype in ["Sales Invoice", "Delivery Note", "Sales Order", "Quotation",
"Purchase Invoice", "Purchase Receipt", "Purchase Order", "Supplier Quotation"]:
if frappe.db.sql("""select name from `tab%s` where company=%s and docstatus=1
limit 1""" % (doctype, "%s"), self.name):
exists = True
break
return exists
def validate(self):
self.validate_abbr()
self.validate_default_accounts()
self.validate_currency()
self.validate_coa_input()
self.validate_perpetual_inventory()
self.check_country_change()
self.set_chart_of_accounts()
def validate_abbr(self):
if not self.abbr:
self.abbr = ''.join([c[0] for c in self.company_name.split()]).upper()
self.abbr = self.abbr.strip()
# if self.get('__islocal') and len(self.abbr) > 5:
# frappe.throw(_("Abbreviation cannot have more than 5 characters"))
if not self.abbr.strip():
frappe.throw(_("Abbreviation is mandatory"))
if frappe.db.sql("select abbr from tabCompany where name!=%s and abbr=%s", (self.name, self.abbr)):
frappe.throw(_("Abbreviation already used for another company"))
def create_default_tax_template(self):
from erpnext.setup.setup_wizard.operations.taxes_setup import create_sales_tax
create_sales_tax({
'country': self.country,
'company_name': self.name
})
def validate_default_accounts(self):
for field in ["default_bank_account", "default_cash_account",
"default_receivable_account", "default_payable_account",
"default_expense_account", "default_income_account",
"stock_received_but_not_billed", "stock_adjustment_account",
"expenses_included_in_valuation", "default_payroll_payable_account"]:
if self.get(field):
for_company = frappe.db.get_value("Account", self.get(field), "company")
if for_company != self.name:
frappe.throw(_("Account {0} does not belong to company: {1}")
.format(self.get(field), self.name))
def validate_currency(self):
if self.is_new():
return
self.previous_default_currency = frappe.get_cached_value('Company', self.name, "default_currency")
if self.default_currency and self.previous_default_currency and \
self.default_currency != self.previous_default_currency and \
self.check_if_transactions_exist():
frappe.throw(_("Cannot change company's default currency, because there are existing transactions. Transactions must be cancelled to change the default currency."))
def on_update(self):
NestedSet.on_update(self)
if not frappe.db.sql("""select name from tabAccount
where company=%s and docstatus<2 limit 1""", self.name):
if not frappe.local.flags.ignore_chart_of_accounts:
frappe.flags.country_change = True
self.create_default_accounts()
self.create_default_warehouses()
if frappe.flags.country_change:
install_country_fixtures(self.name)
self.create_default_tax_template()
if not frappe.db.get_value("Department", {"company": self.name}):
from erpnext.setup.setup_wizard.operations.install_fixtures import install_post_company_fixtures
install_post_company_fixtures(frappe._dict({'company_name': self.name}))
if not frappe.db.get_value("Cost Center", {"is_group": 0, "company": self.name}):
self.create_default_cost_center()
if not frappe.local.flags.ignore_chart_of_accounts:
self.set_default_accounts()
if self.default_cash_account:
self.set_mode_of_payment_account()
if self.default_currency:
frappe.db.set_value("Currency", self.default_currency, "enabled", 1)
if hasattr(frappe.local, 'enable_perpetual_inventory') and \
self.name in frappe.local.enable_perpetual_inventory:
frappe.local.enable_perpetual_inventory[self.name] = self.enable_perpetual_inventory
frappe.clear_cache()
def create_default_warehouses(self):
for wh_detail in [
{"warehouse_name": _("All Warehouses"), "is_group": 1},
{"warehouse_name": _("Stores"), "is_group": 0},
{"warehouse_name": _("Work In Progress"), "is_group": 0},
{"warehouse_name": _("Finished Goods"), "is_group": 0}]:
if not frappe.db.exists("Warehouse", "{0} - {1}".format(wh_detail["warehouse_name"], self.abbr)):
warehouse = frappe.get_doc({
"doctype":"Warehouse",
"warehouse_name": wh_detail["warehouse_name"],
"is_group": wh_detail["is_group"],
"company": self.name,
"parent_warehouse": "{0} - {1}".format(_("All Warehouses"), self.abbr) \
if not wh_detail["is_group"] else ""
})
warehouse.flags.ignore_permissions = True
warehouse.flags.ignore_mandatory = True
warehouse.insert()
def create_default_accounts(self):
from erpnext.accounts.doctype.account.chart_of_accounts.chart_of_accounts import create_charts
frappe.local.flags.ignore_root_company_validation = True
create_charts(self.name, self.chart_of_accounts, self.existing_company)
frappe.db.set(self, "default_receivable_account", frappe.db.get_value("Account",
{"company": self.name, "account_type": "Receivable", "is_group": 0}))
frappe.db.set(self, "default_payable_account", frappe.db.get_value("Account",
{"company": self.name, "account_type": "Payable", "is_group": 0}))
def validate_coa_input(self):
if self.create_chart_of_accounts_based_on == "Existing Company":
self.chart_of_accounts = None
if not self.existing_company:
frappe.throw(_("Please select Existing Company for creating Chart of Accounts"))
else:
self.existing_company = None
self.create_chart_of_accounts_based_on = "Standard Template"
if not self.chart_of_accounts:
self.chart_of_accounts = "Standard"
def validate_perpetual_inventory(self):
if not self.get("__islocal"):
if cint(self.enable_perpetual_inventory) == 1 and not self.default_inventory_account:
frappe.msgprint(_("Set default inventory account for perpetual inventory"),
alert=True, indicator='orange')
def check_country_change(self):
frappe.flags.country_change = False
if not self.get('__islocal') and \
self.country != frappe.get_cached_value('Company', self.name, 'country'):
frappe.flags.country_change = True
def set_chart_of_accounts(self):
''' If parent company is set, chart of accounts will be based on that company '''
if self.parent_company:
self.create_chart_of_accounts_based_on = "Existing Company"
self.existing_company = self.parent_company
def set_default_accounts(self):
self._set_default_account("default_cash_account", "Cash")
self._set_default_account("default_bank_account", "Bank")
self._set_default_account("round_off_account", "Round Off")
self._set_default_account("accumulated_depreciation_account", "Accumulated Depreciation")
self._set_default_account("depreciation_expense_account", "Depreciation")
self._set_default_account("capital_work_in_progress_account", "Capital Work in Progress")
self._set_default_account("asset_received_but_not_billed", "Asset Received But Not Billed")
self._set_default_account("expenses_included_in_asset_valuation", "Expenses Included In Asset Valuation")
if self.enable_perpetual_inventory:
self._set_default_account("stock_received_but_not_billed", "Stock Received But Not Billed")
self._set_default_account("default_inventory_account", "Stock")
self._set_default_account("stock_adjustment_account", "Stock Adjustment")
self._set_default_account("expenses_included_in_valuation", "Expenses Included In Valuation")
self._set_default_account("default_expense_account", "Cost of Goods Sold")
if not self.default_income_account:
income_account = frappe.db.get_value("Account",
{"account_name": _("Sales"), "company": self.name, "is_group": 0})
if not income_account:
income_account = frappe.db.get_value("Account",
{"account_name": _("Sales Account"), "company": self.name})
self.db_set("default_income_account", income_account)
if not self.default_payable_account:
self.db_set("default_payable_account", self.default_payable_account)
if not self.default_payroll_payable_account:
payroll_payable_account = frappe.db.get_value("Account",
{"account_name": _("Payroll Payable"), "company": self.name, "is_group": 0})
self.db_set("default_payroll_payable_account", payroll_payable_account)
if not self.default_employee_advance_account:
employe_advance_account = frappe.db.get_value("Account",
{"account_name": _("Employee Advances"), "company": self.name, "is_group": 0})
self.db_set("default_employee_advance_account", employe_advance_account)
if not self.write_off_account:
write_off_acct = frappe.db.get_value("Account",
{"account_name": _("Write Off"), "company": self.name, "is_group": 0})
self.db_set("write_off_account", write_off_acct)
if not self.exchange_gain_loss_account:
exchange_gain_loss_acct = frappe.db.get_value("Account",
{"account_name": _("Exchange Gain/Loss"), "company": self.name, "is_group": 0})
self.db_set("exchange_gain_loss_account", exchange_gain_loss_acct)
if not self.disposal_account:
disposal_acct = frappe.db.get_value("Account",
{"account_name": _("Gain/Loss on Asset Disposal"), "company": self.name, "is_group": 0})
self.db_set("disposal_account", disposal_acct)
def _set_default_account(self, fieldname, account_type):
if self.get(fieldname):
return
account = frappe.db.get_value("Account", {"account_type": account_type,
"is_group": 0, "company": self.name})
if account:
self.db_set(fieldname, account)
def set_mode_of_payment_account(self):
cash = frappe.db.get_value('Mode of Payment', {'type': 'Cash'}, 'name')
if cash and self.default_cash_account \
and not frappe.db.get_value('Mode of Payment Account', {'company': self.name, 'parent': cash}):
mode_of_payment = frappe.get_doc('Mode of Payment', cash)
mode_of_payment.append('accounts', {
'company': self.name,
'default_account': self.default_cash_account
})
mode_of_payment.save(ignore_permissions=True)
def create_default_cost_center(self):
cc_list = [
{
'cost_center_name': self.name,
'company':self.name,
'is_group': 1,
'parent_cost_center':None
},
{
'cost_center_name':_('Main'),
'company':self.name,
'is_group':0,
'parent_cost_center':self.name + ' - ' + self.abbr
},
]
for cc in cc_list:
cc.update({"doctype": "Cost Center"})
cc_doc = frappe.get_doc(cc)
cc_doc.flags.ignore_permissions = True
if cc.get("cost_center_name") == self.name:
cc_doc.flags.ignore_mandatory = True
cc_doc.insert()
frappe.db.set(self, "cost_center", _("Main") + " - " + self.abbr)
frappe.db.set(self, "round_off_cost_center", _("Main") + " - " + self.abbr)
frappe.db.set(self, "depreciation_cost_center", _("Main") + " - " + self.abbr)
def after_rename(self, olddn, newdn, merge=False):
frappe.db.set(self, "company_name", newdn)
frappe.db.sql("""update `tabDefaultValue` set defvalue=%s
where defkey='Company' and defvalue=%s""", (newdn, olddn))
clear_defaults_cache()
def abbreviate(self):
self.abbr = ''.join([c[0].upper() for c in self.company_name.split()])
def on_trash(self):
"""
Trash accounts and cost centers for this company if no gl entry exists
"""
NestedSet.validate_if_child_exists(self)
frappe.utils.nestedset.update_nsm(self)
rec = frappe.db.sql("SELECT name from `tabGL Entry` where company = %s", self.name)
if not rec:
frappe.db.sql("""delete from `tabBudget Account`
where exists(select name from tabBudget
where name=`tabBudget Account`.parent and company = %s)""", self.name)
for doctype in ["Account", "Cost Center", "Budget", "Party Account"]:
frappe.db.sql("delete from `tab{0}` where company = %s".format(doctype), self.name)
if not frappe.db.get_value("Stock Ledger Entry", {"company": self.name}):
frappe.db.sql("""delete from `tabWarehouse` where company=%s""", self.name)
frappe.defaults.clear_default("company", value=self.name)
for doctype in ["Mode of Payment Account", "Item Default"]:
frappe.db.sql("delete from `tab{0}` where company = %s".format(doctype), self.name)
# clear default accounts, warehouses from item
warehouses = frappe.db.sql_list("select name from tabWarehouse where company=%s", self.name)
if warehouses:
frappe.db.sql("""delete from `tabItem Reorder` where warehouse in (%s)"""
% ', '.join(['%s']*len(warehouses)), tuple(warehouses))
# reset default company
frappe.db.sql("""update `tabSingles` set value=""
where doctype='Global Defaults' and field='default_company'
and value=%s""", self.name)
# reset default company
frappe.db.sql("""update `tabSingles` set value=""
where doctype='Chart of Accounts Importer' and field='company'
and value=%s""", self.name)
# delete BOMs
boms = frappe.db.sql_list("select name from tabBOM where company=%s", self.name)
if boms:
frappe.db.sql("delete from tabBOM where company=%s", self.name)
for dt in ("BOM Operation", "BOM Item", "BOM Scrap Item", "BOM Explosion Item"):
frappe.db.sql("delete from `tab%s` where parent in (%s)"""
% (dt, ', '.join(['%s']*len(boms))), tuple(boms))
frappe.db.sql("delete from tabEmployee where company=%s", self.name)
frappe.db.sql("delete from tabDepartment where company=%s", self.name)
frappe.db.sql("delete from `tabTax Withholding Account` where company=%s", self.name)
frappe.db.sql("delete from `tabSales Taxes and Charges Template` where company=%s", self.name)
frappe.db.sql("delete from `tabPurchase Taxes and Charges Template` where company=%s", self.name)
@frappe.whitelist()
def enqueue_replace_abbr(company, old, new):
kwargs = dict(company=company, old=old, new=new)
frappe.enqueue('erpnext.setup.doctype.company.company.replace_abbr', **kwargs)
@frappe.whitelist()
def replace_abbr(company, old, new):
new = new.strip()
if not new:
frappe.throw(_("Abbr can not be blank or space"))
frappe.only_for("System Manager")
frappe.db.set_value("Company", company, "abbr", new)
def _rename_record(doc):
parts = doc[0].rsplit(" - ", 1)
if len(parts) == 1 or parts[1].lower() == old.lower():
frappe.rename_doc(dt, doc[0], parts[0] + " - " + new, force=True)
def _rename_records(dt):
# rename is expensive so let's be economical with memory usage
doc = (d for d in frappe.db.sql("select name from `tab%s` where company=%s" % (dt, '%s'), company))
for d in doc:
_rename_record(d)
for dt in ["Warehouse", "Account", "Cost Center", "Department",
"Sales Taxes and Charges Template", "Purchase Taxes and Charges Template"]:
_rename_records(dt)
frappe.db.commit()
def get_name_with_abbr(name, company):
company_abbr = frappe.get_cached_value('Company', company, "abbr")
parts = name.split(" - ")
if parts[-1].lower() != company_abbr.lower():
parts.append(company_abbr)
return " - ".join(parts)
def install_country_fixtures(company):
company_doc = frappe.get_doc("Company", company)
path = frappe.get_app_path('erpnext', 'regional', frappe.scrub(company_doc.country))
if os.path.exists(path.encode("utf-8")):
frappe.get_attr("erpnext.regional.{0}.setup.setup"
.format(frappe.scrub(company_doc.country)))(company_doc, False)
def update_company_current_month_sales(company):
current_month_year = formatdate(today(), "MM-yyyy")
results = frappe.db.sql('''
SELECT
SUM(base_grand_total) AS total,
DATE_FORMAT(`posting_date`, '%m-%Y') AS month_year
FROM
`tabSales Invoice`
WHERE
DATE_FORMAT(`posting_date`, '%m-%Y') = '{current_month_year}'
AND docstatus = 1
AND company = {company}
GROUP BY
month_year
'''.format(current_month_year=current_month_year, company=frappe.db.escape(company)),
as_dict = True)
monthly_total = results[0]['total'] if len(results) > 0 else 0
frappe.db.set_value("Company", company, "total_monthly_sales", monthly_total)
def update_company_monthly_sales(company):
'''Cache past year monthly sales of every company based on sales invoices'''
from frappe.utils.goal import get_monthly_results
import json
filter_str = "company = {0} and status != 'Draft' and docstatus=1".format(frappe.db.escape(company))
month_to_value_dict = get_monthly_results("Sales Invoice", "base_grand_total",
"posting_date", filter_str, "sum")
frappe.db.set_value("Company", company, "sales_monthly_history", json.dumps(month_to_value_dict))
def update_transactions_annual_history(company, commit=False):
transactions_history = get_all_transactions_annual_history(company)
frappe.db.set_value("Company", company, "transactions_annual_history", json.dumps(transactions_history))
if commit:
frappe.db.commit()
def cache_companies_monthly_sales_history():
companies = [d['name'] for d in frappe.get_list("Company")]
for company in companies:
update_company_monthly_sales(company)
update_transactions_annual_history(company)
frappe.db.commit()
@frappe.whitelist()
def get_children(doctype, parent=None, company=None, is_root=False):
if parent == None or parent == "All Companies":
parent = ""
return frappe.db.sql("""
select
name as value,
is_group as expandable
from
`tab{doctype}` comp
where
ifnull(parent_company, "")={parent}
""".format(
doctype = doctype,
parent=frappe.db.escape(parent)
), as_dict=1)
@frappe.whitelist()
def add_node():
from frappe.desk.treeview import make_tree_args
args = frappe.form_dict
args = make_tree_args(**args)
if args.parent_company == 'All Companies':
args.parent_company = None
frappe.get_doc(args).insert()
def get_all_transactions_annual_history(company):
out = {}
items = frappe.db.sql('''
select transaction_date, count(*) as count
from (
select name, transaction_date, company
from `tabQuotation`
UNION ALL
select name, transaction_date, company
from `tabSales Order`
UNION ALL
select name, posting_date as transaction_date, company
from `tabDelivery Note`
UNION ALL
select name, posting_date as transaction_date, company
from `tabSales Invoice`
UNION ALL
select name, creation as transaction_date, company
from `tabIssue`
UNION ALL
select name, creation as transaction_date, company
from `tabProject`
) t
where
company=%s
and
transaction_date > date_sub(curdate(), interval 1 year)
group by
transaction_date
''', (company), as_dict=True)
for d in items:
timestamp = get_timestamp(d["transaction_date"])
out.update({ timestamp: d["count"] })
return out
def get_timeline_data(doctype, name):
'''returns timeline data based on linked records in dashboard'''
out = {}
date_to_value_dict = {}
history = frappe.get_cached_value('Company', name, "transactions_annual_history")
try:
date_to_value_dict = json.loads(history) if history and '{' in history else None
except ValueError:
date_to_value_dict = None
if date_to_value_dict is None:
update_transactions_annual_history(name, True)
history = frappe.get_cached_value('Company', name, "transactions_annual_history")
return json.loads(history) if history and '{' in history else {}
return date_to_value_dict
|
Zlash65/erpnext
|
erpnext/setup/doctype/company/company.py
|
Python
|
gpl-3.0
| 19,798
|
# -*- coding: UTF-8 -*-
# Copyright (C) 2005-2011 J. David Ibáñez <jdavid.ibp@gmail.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
# Import from the Standard Library
from unittest import TestCase, main
# Import from itools
from itools.web import Cookie, SetCookieDataType
from itools.web.headers import ContentType, ContentDisposition, CookieDataType
from itools.web.headers import read_token, read_quoted_string, read_parameter
from itools.web.headers import read_parameters
class ParsingTestCase(TestCase):
"""Test basic parsing functions.
"""
def test_token(self):
a = 'Hello World'
b = ('Hello', ' World')
self.assertEqual(read_token(a), b)
def test_quoted_string(self):
a = '"Hello World"'
b = ('Hello World', '')
self.assertEqual(read_quoted_string(a), b)
def test_parameter(self):
a = 'Part_Number="Rocket_Launcher_0001"'
b = ('part_number', 'Rocket_Launcher_0001'), ''
self.assertEqual(read_parameter(a), b)
def test_parameters(self):
a = '; Version="1"; Path="/acme"'
b = {'version': '1', 'path': '/acme'}, ''
self.assertEqual(read_parameters(a), b)
class StandardHeadersTestCase(TestCase):
def test_content_type(self):
a = 'text/html; charset=UTF-8'
b = 'text/html', {'charset': 'UTF-8'}
self.assertEqual(ContentType.decode(a), b)
def test_content_disposition(self):
a = ('attachment; filename=genome.jpeg;'
'modification-date="Wed, 12 Feb 1997 16:29:51 -0500"')
b = ('attachment',
{'filename': 'genome.jpeg',
'modification-date': 'Wed, 12 Feb 1997 16:29:51 -0500'})
self.assertEqual(ContentDisposition.decode(a), b)
class CookieTestCase(TestCase):
#def test_set_cookie_decode_encode_decode(self):
# a = 'Customer="WILE_E_COYOTE"; Path="/acme"'
# b = SetCookieDataType.decode(a)
# c = SetCookieDataType.encode(b)
# d = SetCookieDataType.decode(c)
# self.assertEqual(b, d)
def test_cookie_decode_encode_decode(self):
a = 'Customer="WILE_E_COYOTE"; $Path="/acme"'
b = CookieDataType.decode(a)
c = CookieDataType.encode(b)
d = CookieDataType.decode(c)
self.assertEqual(b, d)
#######################################################################
# Netscape Cookies
# http://wp.netscape.com/newsref/std/cookie_spec.html
#######################################################################
#def test_example1(self):
# # Client requests a document, and receives in the response:
# a = ('CUSTOMER=WILE_E_COYOTE; path=/; expires=Wednesday,'
# ' 09-Nov-99 23:12:40 GMT')
# b = {'customer': Cookie('customer', 'WILE_E_COYOTE', path='/',
# expires='Wednesday, 09-Nov-99 23:12:40 GMT')}
# self.assertEqual(SetCookieDataType.decode(a), b)
# # When client requests a URL in path "/" on this server, it sends:
# a = 'CUSTOMER=WILE_E_COYOTE'
# b = {'customer': Cookie('customer', 'WILE_E_COYOTE')}
# self.assertEqual(CookieDataType.decode(a), b)
# # Client requests a document, and receives in the response:
# a = 'PART_NUMBER=ROCKET_LAUNCHER_0001; path=/'
# b = {'part_number': Cookie('customer', 'ROCKET_LAUNCHER_0001', path='/')}
# self.assertEqual(SetCookieDataType.decode(a), b)
# # When client requests a URL in path "/" on this server, it sends:
# a = 'CUSTOMER=WILE_E_COYOTE; PART_NUMBER=ROCKET_LAUNCHER_0001'
# b = {'customer': Cookie('customer', 'WILE_E_COYOTE'),
# 'part_number': Cookie('part_number', 'ROCKET_LAUNCHER_0001')}
# self.assertEqual(CookieDataType.decode(a), b)
# # Client receives:
# a = 'SHIPPING=FEDEX; path=/foo'
# b = {'shipping': Cookie('shipping', 'FEDEX', path='/foo')}
# self.assertEqual(SetCookieDataType.decode(a), b)
# # When client requests a URL in path "/foo" on this server, it sends:
# a = 'CUSTOMER=WILE_E_COYOTE; PART_NUMBER=ROCKET_LAUNCHER_0001; SHIPPING=FEDEX'
# b = {'customer': Cookie('customer', 'WILE_E_COYOTE'),
# 'part_number': Cookie('part_number', 'ROCKET_LAUNCHER_0001'),
# 'shipping': Cookie('shipping', 'FEDEX')}
# self.assertEqual(CookieDataType.decode(a), b)
#######################################################################
# Netscape Cookies (old style)
#######################################################################
def test_google(self):
cookie = '__utma=148580960.1549592533.1131137049.1200608996.1200962259.202; __qca=1193853942-44919481-52504193; __utmz=148580960.1196124914.184.2.utmccn=(organic)|utmcsr=google|utmctr=lorum+generator|utmcmd=organic; __qcb=689621141; __utmc=148580960; T3CK=TANT%3D1%7CTANO%3D0; __utma=148580960.1549592533.1131137049.1140634832.1140725853.67'
expected = {
'__utma': Cookie('__utma', '148580960.1549592533.1131137049.1200608996.1200962259.202'),
'__qca': Cookie('__qca', '1193853942-44919481-52504193'),
'__utmz': Cookie('__utmz', '148580960.1196124914.184.2.utmccn=(organic)|utmcsr=google|utmctr=lorum+generator|utmcmd=organic'),
'__qcb': Cookie('__qcb', '689621141'),
'__utmc': Cookie('__utmc', '148580960'),
't3ck': Cookie('t3ck', 'TANT%3D1%7CTANO%3D0'),
'__utma': Cookie('__utma', '148580960.1549592533.1131137049.1140634832.1140725853.67')}
self.assertEqual(CookieDataType.decode(cookie), expected)
#######################################################################
# Common cases
#######################################################################
def test_last_is_empty(self):
cookie = 'areYourCookiesEnabled='
expected = {'areyourcookiesenabled': Cookie('areyourcookiesenabled', '')}
self.assertEqual(CookieDataType.decode(cookie), expected)
def test_ends_with_semicolon(self):
cookie = 'language="en";'
expected = {'language': Cookie('language', 'en')}
self.assertEqual(CookieDataType.decode(cookie), expected)
def test_garbage(self):
cookie = 'a=1; toto; b=2'
expected = {'a': Cookie('a', '1'), 'b': Cookie('b', '2')}
self.assertEqual(CookieDataType.decode(cookie), expected)
#class MyRootView(BaseView):
# access = True
# def GET(self, resource, context):
# return 'hello world'
#
#
#class MyRoot(RootResource):
# default_view_name = 'view'
# view = MyRootView()
#
#
#class ServerTestCase(TestCase):
#
# def test00_simple(self):
# root = MyRoot()
# server = WebServer(root)
# server.start()
if __name__ == '__main__':
main()
|
bepatient-fr/itools
|
test/test_web.py
|
Python
|
gpl-3.0
| 7,427
|
#!/usr/bin/env python3
#
# This file is part of the Malboxes project.
#
# Malboxes - Vagrant box builder and config generator for malware analysis
# https://github.com/gosecure/malboxes
#
# Olivier Bilodeau <obilodeau@gosecure.ca>
# Copyright (C) 2016 GoSecure Inc.
# All rights reserved.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
import glob
import json
import os
import re
import sys
import unittest
from jinja2 import Environment, FileSystemLoader
from malboxes.malboxes import load_config
class PackerTemplateTestCase(unittest.TestCase):
def setUp(self):
self.env = Environment(loader=FileSystemLoader('malboxes/templates/'),
autoescape=False)
def test_packer_template_rendering(self):
for profile in glob.glob("malboxes/templates/*.json"):
print("Processing file {}".format(profile))
# process profile
profile_name = os.path.basename(profile)
config = load_config(open('malboxes/config-example.js', 'r'),
re.match('(.*).json$', profile_name).group(1))
try:
template = self.env.get_template(os.path.basename(profile_name))
profile_json = template.render(config) # pylint: disable=no-member
print("Properly formatted Jinja2 template")
except:
print("Badly formatted Jinja2 template! Failing test...")
raise()
# test if json is valid
try:
json.loads(profile_json)
print("Properly formatted JSON file")
except:
print("Badly formatted JSON file! Failing test...")
raise()
if __name__ == '__main__':
unittest.main()
|
GoSecure/malboxes
|
tests/test_packer_templates.py
|
Python
|
gpl-3.0
| 2,226
|
#!/usr/bin/env python
# -*- coding:utf8 -*-
# Copyright (C) 2011 Maxim Kovalev
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# @version $Id: sputnik.py 351 2012-07-27 05:09:23Z maxim.kovalev $
# maxim.kovalev@2012.auditory.ru
import pygame
import sys
import math
import argparse
import random
mu = 200
gravity_factor = 2
class Sputnik:
def __init__(self, start_point, g_point, v, planet_r, color = (255, 0, 255), boldness = 5):
self.point = float(start_point[0]), float(start_point[1])
self.vx, self.vy = float(v[0]), float(v[1])
self.g_point = g_point
self.enginex = self.enginey = 0.0
self.speed = 0.0
self.past = []
self.future = []
self.zoom = 1.0
self.planet_r = planet_r
self.center_shift = 0, 0
self.willfail = False
self.color = color
self.boldness = boldness
def render(self, surface, color=(0,0,255)):
try:
R = (self.point[0] - self.g_point[0]), (self.point[1] - self.g_point[1])
SMA = sma(dist(self.point, self.g_point), self.speed)
ECC = ecc(R, (self.vx, self.vy))
SMI = (SMA*((1+0j-ECC*ECC)**0.5)).real
F = math.sqrt(SMA**2 - SMI**2)
AGP = agp(R, (self.vx, self.vy))
HS = project((self.vx, self.vy), (-R[1], R[0]))
if HS < 0:
AGP = 180 - AGP
shiftx = int(F*math.cos(math.pi*AGP/180)*self.zoom)
shifty = int(F*math.sin(math.pi*AGP/180)*self.zoom)
elrect = pygame.Rect(0, 0, int(SMA*2*self.zoom), int(SMI*2*self.zoom))
elsur = pygame.Surface( (int(SMA*2*self.zoom), int(SMI*2*self.zoom)) )
elsur.fill((255,255,255))
elsur.set_colorkey((255, 255, 255))
pygame.draw.ellipse(elsur, (255, 255, 0), elrect, 1)
#pygame.draw.line(elsur, (255,0,0), elrect.midtop, elrect.midbottom)
#pygame.draw.line(elsur, (255,0,0), elrect.midleft, elrect.midright)
#cx, cy = elrect.center
#f1 = cx - F*self.zoom, cy
#f2 = cx + F*self.zoom, cy
#pygame.draw.circle(elsur, (255,0,0), map(int, f1), 10)
#pygame.draw.circle(elsur, (255,0,0), map(int, f2), 10)
transelsur = pygame.transform.rotate(elsur, 180-AGP)
transelsurrect = transelsur.get_rect()
transelsurrect.center = self.g_point
transelsurrect = transelsurrect.move(shiftx, shifty)
surface.blit(transelsur, transelsurrect)
except Exception as e:
#print e
pass
if len(self.past) >= 2: pygame.draw.lines(surface, color, False, map( (lambda (x, y): (x*self.zoom+self.center_shift[0], y*self.zoom+self.center_shift[1])) , self.past), 2)
to_render = int(self.point[0]*self.zoom + self.center_shift[0]), int(self.point[1]*self.zoom + self.center_shift[1])
pygame.draw.circle(surface, self.color, to_render, self.boldness)
pygame.draw.line(surface, (255,0,0), to_render, (to_render[0]-self.enginex*10, to_render[1]-self.enginey*10), 3)
def step(self, fps=25, prediction_distance=10000, history_depth=500, ep=0.01):
timestep = 200.0/fps
x, y = self.point
if self.g_point == self.point:
ax = ay = 0
else:
ax, ay = gravity(self.g_point, (x, y))
ax += self.enginex*ep
ay += self.enginey*ep
self.vx += ax * timestep
self.vy += ay *timestep
x += self.vx * timestep
y += self.vy * timestep
self.point = x, y
self.speed = math.sqrt(self.vx*self.vx+self.vy*self.vy)
self.past.append(self.point)
if len(self.past) >= history_depth:
del self.past[0]
def process_event(self, event):
if event.type == pygame.KEYDOWN:
if event.key == pygame.K_UP:
self.enginey = -1
elif event.key == pygame.K_DOWN:
self.enginey = 1
elif event.key == pygame.K_LEFT:
self.enginex = -1
elif event.key == pygame.K_RIGHT:
self.enginex = 1
elif event.type == pygame.KEYUP:
if event.key in [pygame.K_UP, pygame.K_DOWN]:
self.enginey = 0
elif event.key in [pygame.K_LEFT, pygame.K_RIGHT]:
self.enginex = 0
def gravity(g_point, sputnik):
global mu
global gravity_factor
gx, gy = g_point
sx, sy = sputnik
dx = float(gx - sx)
dy = float(gy - sy)
dst = math.sqrt(dx*dx + dy*dy)
force = mu/(dst**gravity_factor)
fx = dx*force/dst
fy = dy*force/dst
return fx, fy
def sma(r, v):
global mu
return 1/( (2.0/r) - ( (v*v) / mu))
def vecc(rv, vv):
global mu
rx, ry = map(float, rv)
vx, vy = map(float, vv)
vsq = vx*vx + vy*vy
r = math.sqrt(rx*rx + ry*ry)
h = rx*vy - ry*vx
vxh = vy*h
vyh = -vx*h
# r_d_v = rx*vx + ry*vy
# ex = (1.0/mu)*(vsq*rx - r_d_v*vx) - rx/r
# ey = (1.0/mu)*(vsq*ry - r_d_v*vy) - ry/r
# ex = -rx*vx/(r**3) - vx/r
# ey = -ry*vy/(r**3) - vy/r
ex = (1.0/mu)*(vxh - mu*rx/r)
ey = (1.0/mu)*(vyh - mu*ry/r)
return ex, ey
def ecc(rv, vv):
global mu
rx, ry = map(float, rv)
vx, vy = map(float, vv)
vsq = vx*vx + vy*vy
r = math.sqrt(rx*rx + ry*ry)
h = rx*vy - ry*vx
vxh = vy*h
vyh = -vx*h
# r_d_v = rx*vx + ry*vy
# ex = (1.0/mu)*(vsq*rx - r_d_v*vx) - rx/r
# ey = (1.0/mu)*(vsq*ry - r_d_v*vy) - ry/r
# ex = -rx*vx/(r**3) - vx/r
# ey = -ry*vy/(r**3) - vy/r
ex = (1.0/mu)*(vxh - mu*rx/r)
ey = (1.0/mu)*(vyh - mu*ry/r)
return math.sqrt(ex*ex + ey*ey)
def project(X, Y):
x1, x2 = map(float, X)
y1, y2 = map(float, Y)
dy = math.sqrt(y1*y1 + y2*y2)
ny = y1/dy, y2/dy
r1 = x1*ny[0]
r2 = x2*ny[1]
dx = math.sqrt(x1**2 + x2**2)
dot = x1*ny[0] + x2*ny[1]
res = dot
return res
def agp(rv, vv):
global mu
rx, ry = map(float, rv)
vx, vy = map(float, vv)
vsq = vx*vx + vy*vy
r = math.sqrt(rx*rx + ry*ry)
h = rx*vy - ry*vx
n = -h, 0
dn = math.sqrt(n[0]**2 + n[1]**2)
vxh = vy*h
vyh = -vx*h
ex = (1.0/mu)*(vxh - mu*rx/r)
ey = (1.0/mu)*(vyh - mu*ry/r)
de = math.sqrt(ex*ex + ey*ey)
res = math.acos((n[0]*ex + n[1]*ey)/(dn*de))
if ey > 0:
res = 2*math.pi - res
return 180*res/math.pi
def agl(rv, vv):
global mu
rx, ry = map(float, rv)
vx, vy = map(float, vv)
vsq = vx*vx + vy*vy
r = math.sqrt(rx*rx + ry*ry)
h = rx*vy - ry*vx
n = -h, 0
dn = math.sqrt(n[0]**2 + n[1]**2)
res = math.acos((n[0]*rx + n[1]*ry)/(dn*r))
nv = n[0]*vx + n[1]*vy
if nv > 0:
res = 2*math.pi - res
return 180*res/math.pi
def tra(rv, vv):
global mu
rx, ry = map(float, rv)
vx, vy = map(float, vv)
vsq = vx*vx + vy*vy
r = math.sqrt(rx*rx + ry*ry)
h = rx*vy - ry*vx
vxh = vy*h
vyh = -vx*h
ex = (1.0/mu)*(vxh - mu*rx/r)
ey = (1.0/mu)*(vyh - mu*ry/r)
er = ex*rx + ey*ry
de = math.sqrt(ex*ex + ey*ey)
res = math.acos(er/(de*r))
rv = rx*vx + ry*vy
if rv < 0:
res = math.pi*2 - res
return 180*res/math.pi
def eca(ECC, TRA):
ECC = ECC*math.pi/180
TRA = TRA*math.pi/180
# tane = math.sqrt(1-ECC**2)*math.sin(TRA)/(ECC+math.cos(TRA))
# res = math.atan(tane)
try:
t = math.tan(TRA/2)/(math.sqrt((1+ECC)/(1-ECC)))
res = 2*math.atan(t)
return 180*res/math.pi
except Exception as e:
return 180
def _mea(ECC, ECA):
ECC = ECC*math.pi/180
ECA = ECA*math.pi/180
res = ECA - ECC*math.sin(ECA)
if res < 0:
res = 2*math.pi + res
return 180*res/math.pi
def mea(ECC, TRA):
return _mea(ECC, eca(ECC, TRA))
def relvr(p1, v1, p2, v2):
x1, y1 = map(float, p1)
x2, y2 = map(float, p2)
vx1, vy1 = map(float, v1)
vx2, vy2 = map(float, v2)
vxrel = vx2 - vx1
vyrel = vy2 - vy1
xrel = x2 - x1
yrel = y2 - y1
dist = math.sqrt(xrel**2 + yrel**2)
vrel = math.sqrt(vxrel**2 + vyrel**2)
vrel = math.copysign(vrel, project( (vxrel, vyrel), (xrel, yrel) ) )
return dist, vrel
class Label(object):
def __init__(self, text, form, **kwargs):
""" self, text, form, color=(0,0,0), font="Arial", fontsize=24, align="left" """
self.text = text
self.form = form
self.color = kwargs.get("color", (32,255,32))
self.align = kwargs.get("align", "left")
self.font = pygame.font.Font(pygame.font.match_font(kwargs.get("font", "Arial")), kwargs.get("fontsize", 24))
self.label = self.font.render(unicode(self.text), True, self.color)
self.rect = self.label.get_rect()
def set_value(self, value):
self.val = self.font.render(unicode(self.form) % value, True, self.color)
valrect = self.val.get_rect()
labrect = self.label.get_rect()
self.surface = pygame.Surface( (valrect.width + labrect.width, valrect.height + labrect.height) )
self.rect = self.surface.get_rect()
if self.align == "left":
labrect.topleft = 0,0
valrect.topleft = labrect.bottomleft
else:
labrect.topright = self.rect.topright
valrect.topright = labrect.bottomright
self.surface.fill((255,255,255))
self.surface.set_colorkey((255,255,255))
self.surface.blit(self.label, labrect)
self.surface.blit(self.val, valrect)
def render(self, surface):
surface.blit(self.surface, self.rect)
def dist(a, b):
xa, ya = a
xb, yb = b
return math.sqrt( (xa-xb)**2 + (ya-yb)**2 )
def scaleblit(dst, src, zoom, center_shift = (0, 0) ):
w, h = src.get_rect().size
w, h = int(w*zoom), int(h*zoom)
rect = src.get_rect()
rect.centerx += center_shift[0]
rect.centery += center_shift[1]
dst.blit(pygame.transform.scale(src, (w, h)), rect )
def main():
global mu
global gravity_factor
size = 700, 700
g_point = size[0]/2, size[0]/2
bgcolor = 255, 255, 255
planet_r = 50
star_count = 100
air_alt = 10
parser = argparse.ArgumentParser(description=u"Simple sputnik emulator. Keys: UP, DOWN, LEFT, RIGHT -- start engine to corresponding direction; \"-\" -- zoom out; \"+\" -- zoom in")
parser.add_argument("-p", "--prediction-depth", action="store", default=1000, type=int, help="Number of steps calculated while predicting the orbit. 1000 by default.")
parser.add_argument("-t", "--trace-depth", action="store", default=10000, type=int, help="Number of steps stored in orbit history. 10000 by default")
parser.add_argument("-e", "--engine-power", action="store", default=0.01, type=float, help="Force of sputnik's engine. 0.01 by default")
# parser.add_argument("--tangent-speed", action="store", default=1.581, type=float, help="Initial tangent speed of sputnik. 1.581 by defaut")
parser.add_argument("--tangent-speed", action="store", default=1.86, type=float, help="Initial tangent speed of sputnik. 1.86 by defaut")
parser.add_argument("--normal-speed", action="store", default=0, type=float, help="Initial normal speed of sputnik. 0 by default.")
parser.add_argument("-a", "--altitude", action="store", default=30, type=int, help="Initial altitude of sputnik. 30 by default")
parser.add_argument("-g", "--gravity-factor", action="store", default=2, type=float, help="Power of the denominator of gravity force. 2 by default")
parser.add_argument("-m", "--gravity-constant", action="store", default=200, type=float, help="Gravity constant. 200 by default")
args = parser.parse_args()
prediction_depth = args.prediction_depth
history_depth = args.trace_depth
ep = args.engine_power
vx = args.tangent_speed
vy = args.normal_speed
alt = args.altitude
gravity_factor = args.gravity_factor
mu = args.gravity_constant
pygame.init()
sma_label = Label("Semi-major axis:", "%d")
ecc_label = Label("Eccentricity:", "%.2f")
smi_label = Label("Semi-minor axis:", "%d")
per_label = Label("Periapsis (radius):", "%d")
apo_label = Label("Apoapsis (radius):", "%d")
agp_label = Label("Agrument of periapsis:", "%d")
rad_label = Label("Radius:", "%.2f", align="right")
t_label = Label("Period:", "%d")
vs_label = Label("Vertical velocity:", "%.2f", align="right")
hs_label = Label("Horizontal velocity:", "%.2f", align="right")
tra_label = Label("True anomaly:", "%.2f", align="right")
mea_label = Label("Mean anomaly:", "%.2f", align="right")
agl_label = Label("Agrument of latitude:", "%.2f", align="right")
dist_label = Label("Distance between satellites:", "%.2f", align="right")
vrel_label = Label("Relative velocity:", "%.2f", align="right")
alt_label = Label("Altitude:", "%.2f", align="right")
speed_label = Label("Velocity:", "%.2f", align="right")
# fps_label = Label("FPS:", "%.2f")
screen = pygame.display.set_mode(size, pygame.DOUBLEBUF | pygame.HWSURFACE)
pygame.display.set_caption(u"Sputnik")
clock = pygame.time.Clock()
sputnik1 = Sputnik((g_point[0],g_point[1]-planet_r-alt), g_point, (vx, vy), planet_r)
# sputnik1 = Sputnik((g_point[0],g_point[1]+planet_r+alt), g_point, (-vx, -vy), planet_r)
# sputnik1 = Sputnik((g_point[0]-planet_r-alt,g_point[1]), g_point, (vy, -vx), planet_r)
# sputnik1 = Sputnik((g_point[0]+planet_r+alt,g_point[1]), g_point, (-vy, vx), planet_r)
sputnik2 = Sputnik((g_point[0], g_point[1]-planet_r-120), g_point, (1.1, 0), planet_r, color=(200, 200, 200), boldness=10)
trace = pygame.Surface(size)
trace.fill((255,255,255))
trace.set_colorkey((255,255,255))
failfont = pygame.font.Font(pygame.font.match_font("Arial"), 24)
faillabel = failfont.render(unicode("You will fail!"), True, (255, 0, 0))
failrect = faillabel.get_rect()
failrect.midbottom = screen.get_rect().midbottom
pygame.draw.circle(trace, (0,0,255), g_point, planet_r)
for _ in xrange(70):
r = random.randrange(2, planet_r/4)
dr = random.randrange(1, planet_r - r)
vect = random.random()*2*math.pi
x = int(g_point[0] + dr*math.cos(vect))
y = int(g_point[1] + dr*math.sin(vect))
pygame.draw.circle(trace, (0,255,0), (x, y), r)
pygame.draw.line(trace, (0,0,0), (g_point[0]-planet_r, g_point[1]), (g_point[1]+planet_r, g_point[1]), 2)
rct = pygame.Rect(g_point[0]-planet_r, g_point[1]-planet_r, 2*planet_r, 2*planet_r)
rct.width -= planet_r/4
rct.centerx = g_point[0]
pygame.draw.ellipse(trace, (0,0,0), rct, 2)
rct.width -= planet_r/2
rct.centerx = g_point[0]
pygame.draw.ellipse(trace, (0,0,0), rct, 2)
rct.width -= planet_r/2
rct.centerx = g_point[0]
pygame.draw.ellipse(trace, (0,0,0), rct, 2)
rct.width -= planet_r/2
rct.centerx = g_point[0]
pygame.draw.ellipse(trace, (0,0,0), rct, 2)
for i in xrange(air_alt):
c = int(255/float(i+1))
pygame.draw.circle(trace, (0,c,c), g_point, planet_r + i, 1)
stars = pygame.Surface(size)
stars.fill((0,0,0))
for _ in xrange(star_count):
center = random.randrange(1, size[0]), random.randrange(1, size[1])
pygame.draw.circle(stars, (255,255,255), center, 1)
sputniks = pygame.Surface(size)
sputniks.set_colorkey((0,0,0))
running = True
zoom = 1.0
d_zoom = 0.0
while True:
for event in pygame.event.get():
if event.type == pygame.QUIT:
sys.exit(0)
elif event.type == pygame.KEYDOWN:
if event.key == pygame.K_MINUS:
d_zoom = -0.02
elif event.key in [pygame.K_PLUS, pygame.K_EQUALS]:
d_zoom = 0.02
elif event.type == pygame.KEYUP:
if event.key in [pygame.K_PLUS, pygame.K_EQUALS, pygame.K_MINUS]:
d_zoom = 0.0
sputnik1.process_event(event)
fps = clock.get_fps()
if not fps: fps = 200
if running:
sputnik1.step(200, prediction_depth, history_depth, ep)
sputnik2.step(200, prediction_depth, history_depth, ep)
if dist(sputnik1.point, g_point) <= planet_r:
running = False
if zoom > 0:
zoom += d_zoom
else:
zoom = 0.02
center = screen.get_rect().center
fake_center = center[0]*float(zoom), center[1]*float(zoom)
center_shift = center[0] - fake_center[0], center[1] - fake_center[1]
sputnik1.center_shift = center_shift
sputnik2.center_shift = center_shift
sputnik1.zoom = zoom
sputnik2.zoom = zoom
screen.blit(stars, (0, 0))
sputniks.fill((0,0,0))
sputnik2.render(sputniks, (0,255,0))
sputnik1.render(sputniks)
scaleblit(screen, trace, zoom, center_shift)
scaleblit(screen, sputniks, 1)
if sputnik1.willfail:
screen.blit(faillabel, failrect)
R = (sputnik1.point[0] - g_point[0]), (sputnik1.point[1] - g_point[1])
SMA = sma(dist(sputnik1.point, g_point), sputnik1.speed)
RAD = dist(sputnik1.point, g_point)
ALT = RAD -planet_r
ECC = ecc(R, (sputnik1.vx, sputnik1.vy))
SMI = SMA*((1+0j-ECC*ECC)**0.5)
VS = project((sputnik1.vx, sputnik1.vy), R)
HS = project((sputnik1.vx, sputnik1.vy), (-R[1], R[0]))
APO = (1+ECC)*SMA
PER = SMA - ECC*SMA
T = (2*math.pi*(0j+SMA**3/mu)**(0.5)).real
AGP = agp(R, (sputnik1.vx, sputnik1.vy))
AGL = agl(R, (sputnik1.vx, sputnik1.vy))
TRA = tra(R, (sputnik1.vx, sputnik1.vy))
MEA = mea(ECC, TRA)
DIST, VREL = relvr(sputnik1.point, (sputnik1.vx, sputnik1.vy), sputnik2.point, (sputnik2.vx, sputnik2.vy))
rad_label.set_value(RAD)
rad_label.rect.topright = size[0] - 10, 10
rad_label.render(screen)
alt_label.set_value(ALT)
alt_label.rect.topright = rad_label.rect.bottomright
alt_label.render(screen)
speed_label.set_value(sputnik1.speed)
speed_label.rect.topright = alt_label.rect.bottomright
speed_label.render(screen)
vs_label.set_value(VS)
vs_label.rect.topright = speed_label.rect.bottomright
vs_label.render(screen)
hs_label.set_value(HS)
hs_label.rect.topright = vs_label.rect.bottomright
hs_label.render(screen)
tra_label.set_value(TRA)
tra_label.rect.topright = hs_label.rect.bottomright
tra_label.render(screen)
mea_label.set_value(MEA)
mea_label.rect.topright = tra_label.rect.bottomright
mea_label.render(screen)
agl_label.set_value(AGL)
agl_label.rect.topright = mea_label.rect.bottomright
agl_label.render(screen)
sma_label.set_value(SMA)
sma_label.rect.topleft = 10,10
sma_label.render(screen)
smi_label.set_value(SMI.real)
smi_label.rect.topleft = sma_label.rect.bottomleft
smi_label.render(screen)
ecc_label.set_value(ECC)
ecc_label.rect.topleft = smi_label.rect.bottomleft
ecc_label.render(screen)
per_label.set_value(PER)
per_label.rect.topleft = ecc_label.rect.bottomleft
per_label.render(screen)
apo_label.set_value(APO)
apo_label.rect.topleft = per_label.rect.bottomleft
apo_label.render(screen)
agp_label.set_value(AGP)
agp_label.rect.topleft = apo_label.rect.bottomleft
agp_label.render(screen)
t_label.set_value(T)
t_label.rect.topleft = agp_label.rect.bottomleft
t_label.render(screen)
dist_label.set_value(DIST)
dist_label.rect.bottomright = size[0] - 10, size[1] - 10
dist_label.render(screen)
vrel_label.set_value(VREL)
vrel_label.rect.bottomright = dist_label.rect.topright
vrel_label.render(screen)
# VECC = map(lambda x: x*10000, vecc(R, (sputnik1.vx, sputnik1.vy)))
# print VECC
# pygame.draw.line(screen, (255, 0, 0), sputnik1.point, (sputnik1.point[0] + VECC[0], sputnik1.point[1]+VECC[1]), 2)
# fps_label.set_value(fps)
# fps_label.rect.bottomleft = 10, size[1] - 10
# fps_label.render(screen)
pygame.display.flip()
# clock.tick(1)
if __name__ == "__main__":
main()
|
maxikov/sputnik
|
newsputnik.py
|
Python
|
gpl-3.0
| 20,195
|
# coding: utf-8
import os
import numpy as np
import pandas as pd
from bokeh.plotting import figure, output_notebook, output_file, show
# to plot inline in Jupyter Notebook
# output_notebook()
def h_theta(theta, X):
# X: features with the first column set as 1 (x0=1)
theta = theta.reshape(-1, 1) # column vector shape
if X.ndim!=2:
print('wrong X dimension, 2D array expected')
return None
else:
# m: number of training data
# n: number of feature including x0
(m, n) = X.shape
h = X.dot(theta)
return h
def costJ(theta, X, Y):
# X: features with the first column set as 1 (x0=1)
# Y: response as a column vector
theta = theta.reshape(-1, 1) # column vector shape
if X.ndim!=2:
print('wrong X dimension, 2D array expected')
return None
else:
# m: number of training data
# n: number of feature including x0
(m, n) = X.shape
Y = Y.reshape(-1, 1)
h = h_theta(theta, X)
cost = (h - Y).T.dot(h - Y)
cost = cost.flatten()
return cost[0]
def gradientDescent(X, Y, iteration, alpha):
# gradient descent algorithm
# X: features with the first column set as 1 (x0=1)
# Y: response as a column vector
# iteration: int
# alpha: float
# return: theta, a ndarray of floats
if X.ndim!=2:
print('wrong X dimension, 2D array expected')
return None
else:
# m: number of training data
# n: number of feature including x0
(m, n) = X.shape
Y = Y.reshape(-1, 1)
theta = np.zeros(n) # returnn theta
temp = np.zeros(n) # for updating theta
J_cost = np.zeros(iteration) # store cost function J(theta)
for i in range(0, iteration):
# calculate cost function J
J_cost[i] = costJ(theta, X, Y)
# calculate new theta
h = h_theta(theta, X)
temp = theta - alpha / m * (h - Y).T.dot(X)
# update theta simultaneously
theta=temp
return (theta, J_cost)
# feature normalization
def normalizeFeature(X):
# feature normalization
# X does NOT contain x0=1 column before normalization
if X.ndim!=2:
print('wrong X dimension, 2D array expected')
return None
else:
# m: number of training data
# n: number of feature including x0
(m, n) = X.shape
# data after normalization
norm_data=np.zeros_like(X)
rescale_factor = np.vstack((X.mean(axis=0), X.std(axis=0)))
rescale_factor = pd.DataFrame(rescale_factor, index=['mean', 'std'])
norm_data = (X - rescale_factor.ix['mean'].values) / rescale_factor.ix['std'].values
return (norm_data, rescale_factor)
# linear regression using norm equation
def normEquation(X, Y):
# X: features with the first column set as 1 (x0=1)
# Y: response as a column vector
X = np.matrix(X)
Y = np.matrix(Y)
theta = (X.T * X).I * (X.T) * Y
theta = theta.A1
return theta
def main():
# ex1 data1
path=os.getcwd()+'/'
data=pd.read_csv(path+'ex1data1.txt', header=None, index_col=None)
data.columns=['population', 'profit']
X_1=data['population'].values.reshape(-1, 1)
X_1 = np.insert(X_1, 0, 1, axis=1) # add x0=1
Y_1=data['profit'].values.reshape(-1, 1)
iteration=1500
alpha=0.01
(theta_data1, J_cost_data1) = gradientDescent(X_1, Y_1, iteration, alpha)
# output to static HTML file
output_file("ex1_data1_costJ.html")
# plot J_cost vs. iteration
p=figure(
tools='pan, reset, box_zoom, save',
# y_axis_type='log',
x_axis_label='Iteration',
y_axis_label='J(theta)'
)
p.circle(range(0, J_cost_data1.size), J_cost_data1)
show(p)
# plot raw data and linear regression
output_file("ex1_data1_regression.html")
p=figure(
tools='pan, reset, box_zoom, save',
x_axis_label='Population of City in 10,000s',
y_axis_label='Profit in $10,000s'
)
pred = h_theta(theta_data1, X_1).flatten()
p.circle(data['population'], data['profit'])
p.line(data['population'], pred, color='red')
show(p)
# ex1 data2
path = os.getcwd()+'/'
housing_data = pd.read_csv(path+'ex1data2.txt', header=None, index_col=None)
housing_data.columns = ['size', 'bedrooms', 'price']
X_2 = housing_data[['size', 'bedrooms']].values
# feature normalization
(X_2, rescale_factor) = normalizeFeature(X_2)
X_2 = np.insert(X_2, 0, 1, axis=1) # add x0=1
Y_2 = housing_data['price'].values.reshape(-1, 1)
iteration=1500
alpha=0.01
(theta_data2, J_cost_data2) = gradientDescent(X_2, Y_2, iteration, alpha)
# plot J_cost vs. iteration
output_file("ex1_data2_costJ.html")
p=figure(
tools='pan, reset, box_zoom, save',
# y_axis_type='log',
x_axis_label='Iteration',
y_axis_label='J(theta)'
)
p.circle(range(0, J_cost_data2.size), J_cost_data2)
show(p)
# test data prediction
test_data2 = np.array([[1650, 3]])
test_data2 = (test_data2 - rescale_factor.ix['mean'].values) / rescale_factor.ix['std'].values
test_data2 = np.insert(test_data2, 0, 1, axis=1)
predict_data2 = h_theta(theta_data2, test_data2)
predict_data2 = predict_data2.flatten()
print(predict_data2[0])
# regression using norm equation
X_2_norm = housing_data[['size', 'bedrooms']].values
X_2_norm = np.insert(X_2_norm, 0, 1, axis=1) # add x0=1
Y_2_norm = housing_data['price'].values.reshape(-1, 1)
theta_data2_norm = normEquation(X_2_norm, Y_2_norm)
predict_data2_norm = h_theta(theta_data2_norm, np.array([[1, 1650, 3]]))
predict_data2_norm = predict_data2_norm.flatten()
print(predict_data2_norm[0])
if __name__ == '__main__':
main()
|
graffaner/machine_learning
|
ex1/ex1.py
|
Python
|
gpl-3.0
| 5,814
|
import copy
import pytest
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
from django.core.management import call_command
from crashstats.base.tests.testbase import DjangoTestCase
class LeftoverPipelineFinder(DjangoTestCase):
"""Test our custom staticfiles finder class."""
def test_missing_css_source_file(self):
busted_pipeline = copy.deepcopy(settings.PIPELINE)
# Doesn't matter which key we chose to bust, so let's just
# pick the first one.
key = busted_pipeline['STYLESHEETS'].keys()[0]
filenames = busted_pipeline['STYLESHEETS'][key]['source_filenames']
# add a junk one
filenames += ('neverheardof.css',)
busted_pipeline['STYLESHEETS'][key]['source_filenames'] = (
filenames
)
with self.settings(PIPELINE=busted_pipeline):
with pytest.raises(ImproperlyConfigured):
call_command('collectstatic', '--noinput', interactive=False)
|
Tayamarn/socorro
|
webapp-django/crashstats/base/tests/test_finders.py
|
Python
|
mpl-2.0
| 1,010
|
r"""XML-RPC Servers.
This module can be used to create simple XML-RPC servers
by creating a server and either installing functions, a
class instance, or by extending the SimpleXMLRPCServer
class.
It can also be used to handle XML-RPC requests in a CGI
environment using CGIXMLRPCRequestHandler.
The Doc* classes can be used to create XML-RPC servers that
serve pydoc-style documentation in response to HTTP
GET requests. This documentation is dynamically generated
based on the functions and methods registered with the
server.
A list of possible usage patterns follows:
1. Install functions:
server = SimpleXMLRPCServer(("localhost", 8000))
server.register_function(pow)
server.register_function(lambda x,y: x+y, 'add')
server.serve_forever()
2. Install an instance:
class MyFuncs:
def __init__(self):
# make all of the sys functions available through sys.func_name
import sys
self.sys = sys
def _listMethods(self):
# implement this method so that system.listMethods
# knows to advertise the sys methods
return list_public_methods(self) + \
['sys.' + method for method in list_public_methods(self.sys)]
def pow(self, x, y): return pow(x, y)
def add(self, x, y) : return x + y
server = SimpleXMLRPCServer(("localhost", 8000))
server.register_introspection_functions()
server.register_instance(MyFuncs())
server.serve_forever()
3. Install an instance with custom dispatch method:
class Math:
def _listMethods(self):
# this method must be present for system.listMethods
# to work
return ['add', 'pow']
def _methodHelp(self, method):
# this method must be present for system.methodHelp
# to work
if method == 'add':
return "add(2,3) => 5"
elif method == 'pow':
return "pow(x, y[, z]) => number"
else:
# By convention, return empty
# string if no help is available
return ""
def _dispatch(self, method, params):
if method == 'pow':
return pow(*params)
elif method == 'add':
return params[0] + params[1]
else:
raise ValueError('bad method')
server = SimpleXMLRPCServer(("localhost", 8000))
server.register_introspection_functions()
server.register_instance(Math())
server.serve_forever()
4. Subclass SimpleXMLRPCServer:
class MathServer(SimpleXMLRPCServer):
def _dispatch(self, method, params):
try:
# We are forcing the 'export_' prefix on methods that are
# callable through XML-RPC to prevent potential security
# problems
func = getattr(self, 'export_' + method)
except AttributeError:
raise Exception('method "%s" is not supported' % method)
else:
return func(*params)
def export_add(self, x, y):
return x + y
server = MathServer(("localhost", 8000))
server.serve_forever()
5. CGI script:
server = CGIXMLRPCRequestHandler()
server.register_function(pow)
server.handle_request()
"""
# Written by Brian Quinlan (brian@sweetapp.com).
# Based on code written by Fredrik Lundh.
from xmlrpc.client import Fault, dumps, loads, gzip_encode, gzip_decode
from http.server import BaseHTTPRequestHandler
import http.server
import socketserver
import sys
import os
import re
import pydoc
import inspect
import traceback
try:
import fcntl
except ImportError:
fcntl = None
def resolve_dotted_attribute(obj, attr, allow_dotted_names=True):
"""resolve_dotted_attribute(a, 'b.c.d') => a.b.c.d
Resolves a dotted attribute name to an object. Raises
an AttributeError if any attribute in the chain starts with a '_'.
If the optional allow_dotted_names argument is false, dots are not
supported and this function operates similar to getattr(obj, attr).
"""
if allow_dotted_names:
attrs = attr.split('.')
else:
attrs = [attr]
for i in attrs:
if i.startswith('_'):
raise AttributeError(
'attempt to access private attribute "%s"' % i
)
else:
obj = getattr(obj,i)
return obj
def list_public_methods(obj):
"""Returns a list of attribute strings, found in the specified
object, which represent callable attributes"""
return [member for member in dir(obj)
if not member.startswith('_') and
callable(getattr(obj, member))]
class SimpleXMLRPCDispatcher:
"""Mix-in class that dispatches XML-RPC requests.
This class is used to register XML-RPC method handlers
and then to dispatch them. This class doesn't need to be
instanced directly when used by SimpleXMLRPCServer but it
can be instanced when used by the MultiPathXMLRPCServer
"""
def __init__(self, allow_none=False, encoding=None,
use_builtin_types=False):
self.funcs = {}
self.instance = None
self.allow_none = allow_none
self.encoding = encoding or 'utf-8'
self.use_builtin_types = use_builtin_types
def register_instance(self, instance, allow_dotted_names=False):
"""Registers an instance to respond to XML-RPC requests.
Only one instance can be installed at a time.
If the registered instance has a _dispatch method then that
method will be called with the name of the XML-RPC method and
its parameters as a tuple
e.g. instance._dispatch('add',(2,3))
If the registered instance does not have a _dispatch method
then the instance will be searched to find a matching method
and, if found, will be called. Methods beginning with an '_'
are considered private and will not be called by
SimpleXMLRPCServer.
If a registered function matches an XML-RPC request, then it
will be called instead of the registered instance.
If the optional allow_dotted_names argument is true and the
instance does not have a _dispatch method, method names
containing dots are supported and resolved, as long as none of
the name segments start with an '_'.
*** SECURITY WARNING: ***
Enabling the allow_dotted_names options allows intruders
to access your module's global variables and may allow
intruders to execute arbitrary code on your machine. Only
use this option on a secure, closed network.
"""
self.instance = instance
self.allow_dotted_names = allow_dotted_names
def register_function(self, function, name=None):
"""Registers a function to respond to XML-RPC requests.
The optional name argument can be used to set a Unicode name
for the function.
"""
if name is None:
name = function.__name__
self.funcs[name] = function
def register_introspection_functions(self):
"""Registers the XML-RPC introspection methods in the system
namespace.
see http://xmlrpc.usefulinc.com/doc/reserved.html
"""
self.funcs.update({'system.listMethods' : self.system_listMethods,
'system.methodSignature' : self.system_methodSignature,
'system.methodHelp' : self.system_methodHelp})
def register_multicall_functions(self):
"""Registers the XML-RPC multicall method in the system
namespace.
see http://www.xmlrpc.com/discuss/msgReader$1208"""
self.funcs.update({'system.multicall' : self.system_multicall})
def _marshaled_dispatch(self, data, dispatch_method = None, path = None):
"""Dispatches an XML-RPC method from marshalled (XML) data.
XML-RPC methods are dispatched from the marshalled (XML) data
using the _dispatch method and the result is returned as
marshalled data. For backwards compatibility, a dispatch
function can be provided as an argument (see comment in
SimpleXMLRPCRequestHandler.do_POST) but overriding the
existing method through subclassing is the preferred means
of changing method dispatch behavior.
"""
try:
params, method = loads(data, use_builtin_types=self.use_builtin_types)
# generate response
if dispatch_method is not None:
response = dispatch_method(method, params)
else:
response = self._dispatch(method, params)
# wrap response in a singleton tuple
response = (response,)
response = dumps(response, methodresponse=1,
allow_none=self.allow_none, encoding=self.encoding)
except Fault as fault:
response = dumps(fault, allow_none=self.allow_none,
encoding=self.encoding)
except:
# report exception back to server
exc_type, exc_value, exc_tb = sys.exc_info()
response = dumps(
Fault(1, "%s:%s" % (exc_type, exc_value)),
encoding=self.encoding, allow_none=self.allow_none,
)
return response.encode(self.encoding, 'xmlcharrefreplace')
def system_listMethods(self):
"""system.listMethods() => ['add', 'subtract', 'multiple']
Returns a list of the methods supported by the server."""
methods = set(self.funcs.keys())
if self.instance is not None:
# Instance can implement _listMethod to return a list of
# methods
if hasattr(self.instance, '_listMethods'):
methods |= set(self.instance._listMethods())
# if the instance has a _dispatch method then we
# don't have enough information to provide a list
# of methods
elif not hasattr(self.instance, '_dispatch'):
methods |= set(list_public_methods(self.instance))
return sorted(methods)
def system_methodSignature(self, method_name):
"""system.methodSignature('add') => [double, int, int]
Returns a list describing the signature of the method. In the
above example, the add method takes two integers as arguments
and returns a double result.
This server does NOT support system.methodSignature."""
# See http://xmlrpc.usefulinc.com/doc/sysmethodsig.html
return 'signatures not supported'
def system_methodHelp(self, method_name):
"""system.methodHelp('add') => "Adds two integers together"
Returns a string containing documentation for the specified method."""
method = None
if method_name in self.funcs:
method = self.funcs[method_name]
elif self.instance is not None:
# Instance can implement _methodHelp to return help for a method
if hasattr(self.instance, '_methodHelp'):
return self.instance._methodHelp(method_name)
# if the instance has a _dispatch method then we
# don't have enough information to provide help
elif not hasattr(self.instance, '_dispatch'):
try:
method = resolve_dotted_attribute(
self.instance,
method_name,
self.allow_dotted_names
)
except AttributeError:
pass
# Note that we aren't checking that the method actually
# be a callable object of some kind
if method is None:
return ""
else:
return pydoc.getdoc(method)
def system_multicall(self, call_list):
"""system.multicall([{'methodName': 'add', 'params': [2, 2]}, ...]) => \
[[4], ...]
Allows the caller to package multiple XML-RPC calls into a single
request.
See http://www.xmlrpc.com/discuss/msgReader$1208
"""
results = []
for call in call_list:
method_name = call['methodName']
params = call['params']
try:
# XXX A marshalling error in any response will fail the entire
# multicall. If someone cares they should fix this.
results.append([self._dispatch(method_name, params)])
except Fault as fault:
results.append(
{'faultCode' : fault.faultCode,
'faultString' : fault.faultString}
)
except:
exc_type, exc_value, exc_tb = sys.exc_info()
results.append(
{'faultCode' : 1,
'faultString' : "%s:%s" % (exc_type, exc_value)}
)
return results
def _dispatch(self, method, params):
"""Dispatches the XML-RPC method.
XML-RPC calls are forwarded to a registered function that
matches the called XML-RPC method name. If no such function
exists then the call is forwarded to the registered instance,
if available.
If the registered instance has a _dispatch method then that
method will be called with the name of the XML-RPC method and
its parameters as a tuple
e.g. instance._dispatch('add',(2,3))
If the registered instance does not have a _dispatch method
then the instance will be searched to find a matching method
and, if found, will be called.
Methods beginning with an '_' are considered private and will
not be called.
"""
try:
# call the matching registered function
func = self.funcs[method]
except KeyError:
pass
else:
if func is not None:
return func(*params)
raise Exception('method "%s" is not supported' % method)
if self.instance is not None:
if hasattr(self.instance, '_dispatch'):
# call the `_dispatch` method on the instance
return self.instance._dispatch(method, params)
# call the instance's method directly
try:
func = resolve_dotted_attribute(
self.instance,
method,
self.allow_dotted_names
)
except AttributeError:
pass
else:
if func is not None:
return func(*params)
raise Exception('method "%s" is not supported' % method)
class SimpleXMLRPCRequestHandler(BaseHTTPRequestHandler):
"""Simple XML-RPC request handler class.
Handles all HTTP POST requests and attempts to decode them as
XML-RPC requests.
"""
# Class attribute listing the accessible path components;
# paths not on this list will result in a 404 error.
rpc_paths = ('/', '/RPC2')
#if not None, encode responses larger than this, if possible
encode_threshold = 1400 #a common MTU
#Override form StreamRequestHandler: full buffering of output
#and no Nagle.
wbufsize = -1
disable_nagle_algorithm = True
# a re to match a gzip Accept-Encoding
aepattern = re.compile(r"""
\s* ([^\s;]+) \s* #content-coding
(;\s* q \s*=\s* ([0-9\.]+))? #q
""", re.VERBOSE | re.IGNORECASE)
def accept_encodings(self):
r = {}
ae = self.headers.get("Accept-Encoding", "")
for e in ae.split(","):
match = self.aepattern.match(e)
if match:
v = match.group(3)
v = float(v) if v else 1.0
r[match.group(1)] = v
return r
def is_rpc_path_valid(self):
if self.rpc_paths:
return self.path in self.rpc_paths
else:
# If .rpc_paths is empty, just assume all paths are legal
return True
def do_POST(self):
"""Handles the HTTP POST request.
Attempts to interpret all HTTP POST requests as XML-RPC calls,
which are forwarded to the server's _dispatch method for handling.
"""
# Check that the path is legal
if not self.is_rpc_path_valid():
self.report_404()
return
try:
# Get arguments by reading body of request.
# We read this in chunks to avoid straining
# socket.read(); around the 10 or 15Mb mark, some platforms
# begin to have problems (bug #792570).
max_chunk_size = 10*1024*1024
size_remaining = int(self.headers["content-length"])
L = []
while size_remaining:
chunk_size = min(size_remaining, max_chunk_size)
chunk = self.rfile.read(chunk_size)
if not chunk:
break
L.append(chunk)
size_remaining -= len(L[-1])
data = b''.join(L)
data = self.decode_request_content(data)
if data is None:
return #response has been sent
# In previous versions of SimpleXMLRPCServer, _dispatch
# could be overridden in this class, instead of in
# SimpleXMLRPCDispatcher. To maintain backwards compatibility,
# check to see if a subclass implements _dispatch and dispatch
# using that method if present.
response = self.server._marshaled_dispatch(
data, getattr(self, '_dispatch', None), self.path
)
except Exception as e: # This should only happen if the module is buggy
# internal error, report as HTTP server error
self.send_response(500)
# Send information about the exception if requested
if hasattr(self.server, '_send_traceback_header') and \
self.server._send_traceback_header:
self.send_header("X-exception", str(e))
trace = traceback.format_exc()
trace = str(trace.encode('ASCII', 'backslashreplace'), 'ASCII')
self.send_header("X-traceback", trace)
self.send_header("Content-length", "0")
self.end_headers()
else:
self.send_response(200)
self.send_header("Content-type", "text/xml")
if self.encode_threshold is not None:
if len(response) > self.encode_threshold:
q = self.accept_encodings().get("gzip", 0)
if q:
try:
response = gzip_encode(response)
self.send_header("Content-Encoding", "gzip")
except NotImplementedError:
pass
self.send_header("Content-length", str(len(response)))
self.end_headers()
self.wfile.write(response)
def decode_request_content(self, data):
#support gzip encoding of request
encoding = self.headers.get("content-encoding", "identity").lower()
if encoding == "identity":
return data
if encoding == "gzip":
try:
return gzip_decode(data)
except NotImplementedError:
self.send_response(501, "encoding %r not supported" % encoding)
except ValueError:
self.send_response(400, "error decoding gzip content")
else:
self.send_response(501, "encoding %r not supported" % encoding)
self.send_header("Content-length", "0")
self.end_headers()
def report_404 (self):
# Report a 404 error
self.send_response(404)
response = b'No such page'
self.send_header("Content-type", "text/plain")
self.send_header("Content-length", str(len(response)))
self.end_headers()
self.wfile.write(response)
def log_request(self, code='-', size='-'):
"""Selectively log an accepted request."""
if self.server.logRequests:
BaseHTTPRequestHandler.log_request(self, code, size)
class SimpleXMLRPCServer(socketserver.TCPServer,
SimpleXMLRPCDispatcher):
"""Simple XML-RPC server.
Simple XML-RPC server that allows functions and a single instance
to be installed to handle requests. The default implementation
attempts to dispatch XML-RPC calls to the functions or instance
installed in the server. Override the _dispatch method inherited
from SimpleXMLRPCDispatcher to change this behavior.
"""
allow_reuse_address = True
# Warning: this is for debugging purposes only! Never set this to True in
# production code, as will be sending out sensitive information (exception
# and stack trace details) when exceptions are raised inside
# SimpleXMLRPCRequestHandler.do_POST
_send_traceback_header = False
def __init__(self, addr, requestHandler=SimpleXMLRPCRequestHandler,
logRequests=True, allow_none=False, encoding=None,
bind_and_activate=True, use_builtin_types=False):
self.logRequests = logRequests
SimpleXMLRPCDispatcher.__init__(self, allow_none, encoding, use_builtin_types)
socketserver.TCPServer.__init__(self, addr, requestHandler, bind_and_activate)
class MultiPathXMLRPCServer(SimpleXMLRPCServer):
"""Multipath XML-RPC Server
This specialization of SimpleXMLRPCServer allows the user to create
multiple Dispatcher instances and assign them to different
HTTP request paths. This makes it possible to run two or more
'virtual XML-RPC servers' at the same port.
Make sure that the requestHandler accepts the paths in question.
"""
def __init__(self, addr, requestHandler=SimpleXMLRPCRequestHandler,
logRequests=True, allow_none=False, encoding=None,
bind_and_activate=True, use_builtin_types=False):
SimpleXMLRPCServer.__init__(self, addr, requestHandler, logRequests, allow_none,
encoding, bind_and_activate, use_builtin_types)
self.dispatchers = {}
self.allow_none = allow_none
self.encoding = encoding or 'utf-8'
def add_dispatcher(self, path, dispatcher):
self.dispatchers[path] = dispatcher
return dispatcher
def get_dispatcher(self, path):
return self.dispatchers[path]
def _marshaled_dispatch(self, data, dispatch_method = None, path = None):
try:
response = self.dispatchers[path]._marshaled_dispatch(
data, dispatch_method, path)
except:
# report low level exception back to server
# (each dispatcher should have handled their own
# exceptions)
exc_type, exc_value = sys.exc_info()[:2]
response = dumps(
Fault(1, "%s:%s" % (exc_type, exc_value)),
encoding=self.encoding, allow_none=self.allow_none)
response = response.encode(self.encoding, 'xmlcharrefreplace')
return response
class CGIXMLRPCRequestHandler(SimpleXMLRPCDispatcher):
"""Simple handler for XML-RPC data passed through CGI."""
def __init__(self, allow_none=False, encoding=None, use_builtin_types=False):
SimpleXMLRPCDispatcher.__init__(self, allow_none, encoding, use_builtin_types)
def handle_xmlrpc(self, request_text):
"""Handle a single XML-RPC request"""
response = self._marshaled_dispatch(request_text)
print('Content-Type: text/xml')
print('Content-Length: %d' % len(response))
print()
sys.stdout.flush()
sys.stdout.buffer.write(response)
sys.stdout.buffer.flush()
def handle_get(self):
"""Handle a single HTTP GET request.
Default implementation indicates an error because
XML-RPC uses the POST method.
"""
code = 400
message, explain = BaseHTTPRequestHandler.responses[code]
response = http.server.DEFAULT_ERROR_MESSAGE % \
{
'code' : code,
'message' : message,
'explain' : explain
}
response = response.encode('utf-8')
print('Status: %d %s' % (code, message))
print('Content-Type: %s' % http.server.DEFAULT_ERROR_CONTENT_TYPE)
print('Content-Length: %d' % len(response))
print()
sys.stdout.flush()
sys.stdout.buffer.write(response)
sys.stdout.buffer.flush()
def handle_request(self, request_text=None):
"""Handle a single XML-RPC request passed through a CGI post method.
If no XML data is given then it is read from stdin. The resulting
XML-RPC response is printed to stdout along with the correct HTTP
headers.
"""
if request_text is None and \
os.environ.get('REQUEST_METHOD', None) == 'GET':
self.handle_get()
else:
# POST data is normally available through stdin
try:
length = int(os.environ.get('CONTENT_LENGTH', None))
except (ValueError, TypeError):
length = -1
if request_text is None:
request_text = sys.stdin.read(length)
self.handle_xmlrpc(request_text)
# -----------------------------------------------------------------------------
# Self documenting XML-RPC Server.
class ServerHTMLDoc(pydoc.HTMLDoc):
"""Class used to generate pydoc HTML document for a server"""
def markup(self, text, escape=None, funcs={}, classes={}, methods={}):
"""Mark up some plain text, given a context of symbols to look for.
Each context dictionary maps object names to anchor names."""
escape = escape or self.escape
results = []
here = 0
# XXX Note that this regular expression does not allow for the
# hyperlinking of arbitrary strings being used as method
# names. Only methods with names consisting of word characters
# and '.'s are hyperlinked.
pattern = re.compile(r'\b((http|ftp)://\S+[\w/]|'
r'RFC[- ]?(\d+)|'
r'PEP[- ]?(\d+)|'
r'(self\.)?((?:\w|\.)+))\b')
while 1:
match = pattern.search(text, here)
if not match: break
start, end = match.span()
results.append(escape(text[here:start]))
all, scheme, rfc, pep, selfdot, name = match.groups()
if scheme:
url = escape(all).replace('"', '"')
results.append('<a href="%s">%s</a>' % (url, url))
elif rfc:
url = 'http://www.rfc-editor.org/rfc/rfc%d.txt' % int(rfc)
results.append('<a href="%s">%s</a>' % (url, escape(all)))
elif pep:
url = 'http://www.python.org/dev/peps/pep-%04d/' % int(pep)
results.append('<a href="%s">%s</a>' % (url, escape(all)))
elif text[end:end+1] == '(':
results.append(self.namelink(name, methods, funcs, classes))
elif selfdot:
results.append('self.<strong>%s</strong>' % name)
else:
results.append(self.namelink(name, classes))
here = end
results.append(escape(text[here:]))
return ''.join(results)
def docroutine(self, object, name, mod=None,
funcs={}, classes={}, methods={}, cl=None):
"""Produce HTML documentation for a function or method object."""
anchor = (cl and cl.__name__ or '') + '-' + name
note = ''
title = '<a name="%s"><strong>%s</strong></a>' % (
self.escape(anchor), self.escape(name))
if inspect.ismethod(object):
args = inspect.getfullargspec(object)
# exclude the argument bound to the instance, it will be
# confusing to the non-Python user
argspec = inspect.formatargspec (
args.args[1:],
args.varargs,
args.varkw,
args.defaults,
annotations=args.annotations,
formatvalue=self.formatvalue
)
elif inspect.isfunction(object):
args = inspect.getfullargspec(object)
argspec = inspect.formatargspec(
args.args, args.varargs, args.varkw, args.defaults,
annotations=args.annotations,
formatvalue=self.formatvalue)
else:
argspec = '(...)'
if isinstance(object, tuple):
argspec = object[0] or argspec
docstring = object[1] or ""
else:
docstring = pydoc.getdoc(object)
decl = title + argspec + (note and self.grey(
'<font face="helvetica, arial">%s</font>' % note))
doc = self.markup(
docstring, self.preformat, funcs, classes, methods)
doc = doc and '<dd><tt>%s</tt></dd>' % doc
return '<dl><dt>%s</dt>%s</dl>\n' % (decl, doc)
def docserver(self, server_name, package_documentation, methods):
"""Produce HTML documentation for an XML-RPC server."""
fdict = {}
for key, value in methods.items():
fdict[key] = '#-' + key
fdict[value] = fdict[key]
server_name = self.escape(server_name)
head = '<big><big><strong>%s</strong></big></big>' % server_name
result = self.heading(head, '#ffffff', '#7799ee')
doc = self.markup(package_documentation, self.preformat, fdict)
doc = doc and '<tt>%s</tt>' % doc
result = result + '<p>%s</p>\n' % doc
contents = []
method_items = sorted(methods.items())
for key, value in method_items:
contents.append(self.docroutine(value, key, funcs=fdict))
result = result + self.bigsection(
'Methods', '#ffffff', '#eeaa77', ''.join(contents))
return result
class XMLRPCDocGenerator:
"""Generates documentation for an XML-RPC server.
This class is designed as mix-in and should not
be constructed directly.
"""
def __init__(self):
# setup variables used for HTML documentation
self.server_name = 'XML-RPC Server Documentation'
self.server_documentation = \
"This server exports the following methods through the XML-RPC "\
"protocol."
self.server_title = 'XML-RPC Server Documentation'
def set_server_title(self, server_title):
"""Set the HTML title of the generated server documentation"""
self.server_title = server_title
def set_server_name(self, server_name):
"""Set the name of the generated HTML server documentation"""
self.server_name = server_name
def set_server_documentation(self, server_documentation):
"""Set the documentation string for the entire server."""
self.server_documentation = server_documentation
def generate_html_documentation(self):
"""generate_html_documentation() => html documentation for the server
Generates HTML documentation for the server using introspection for
installed functions and instances that do not implement the
_dispatch method. Alternatively, instances can choose to implement
the _get_method_argstring(method_name) method to provide the
argument string used in the documentation and the
_methodHelp(method_name) method to provide the help text used
in the documentation."""
methods = {}
for method_name in self.system_listMethods():
if method_name in self.funcs:
method = self.funcs[method_name]
elif self.instance is not None:
method_info = [None, None] # argspec, documentation
if hasattr(self.instance, '_get_method_argstring'):
method_info[0] = self.instance._get_method_argstring(method_name)
if hasattr(self.instance, '_methodHelp'):
method_info[1] = self.instance._methodHelp(method_name)
method_info = tuple(method_info)
if method_info != (None, None):
method = method_info
elif not hasattr(self.instance, '_dispatch'):
try:
method = resolve_dotted_attribute(
self.instance,
method_name
)
except AttributeError:
method = method_info
else:
method = method_info
else:
assert 0, "Could not find method in self.functions and no "\
"instance installed"
methods[method_name] = method
documenter = ServerHTMLDoc()
documentation = documenter.docserver(
self.server_name,
self.server_documentation,
methods
)
return documenter.page(self.server_title, documentation)
class DocXMLRPCRequestHandler(SimpleXMLRPCRequestHandler):
"""XML-RPC and documentation request handler class.
Handles all HTTP POST requests and attempts to decode them as
XML-RPC requests.
Handles all HTTP GET requests and interprets them as requests
for documentation.
"""
def do_GET(self):
"""Handles the HTTP GET request.
Interpret all HTTP GET requests as requests for server
documentation.
"""
# Check that the path is legal
if not self.is_rpc_path_valid():
self.report_404()
return
response = self.server.generate_html_documentation().encode('utf-8')
self.send_response(200)
self.send_header("Content-type", "text/html")
self.send_header("Content-length", str(len(response)))
self.end_headers()
self.wfile.write(response)
class DocXMLRPCServer( SimpleXMLRPCServer,
XMLRPCDocGenerator):
"""XML-RPC and HTML documentation server.
Adds the ability to serve server documentation to the capabilities
of SimpleXMLRPCServer.
"""
def __init__(self, addr, requestHandler=DocXMLRPCRequestHandler,
logRequests=True, allow_none=False, encoding=None,
bind_and_activate=True, use_builtin_types=False):
SimpleXMLRPCServer.__init__(self, addr, requestHandler, logRequests,
allow_none, encoding, bind_and_activate,
use_builtin_types)
XMLRPCDocGenerator.__init__(self)
class DocCGIXMLRPCRequestHandler( CGIXMLRPCRequestHandler,
XMLRPCDocGenerator):
"""Handler for XML-RPC data and documentation requests passed through
CGI"""
def handle_get(self):
"""Handles the HTTP GET request.
Interpret all HTTP GET requests as requests for server
documentation.
"""
response = self.generate_html_documentation().encode('utf-8')
print('Content-Type: text/html')
print('Content-Length: %d' % len(response))
print()
sys.stdout.flush()
sys.stdout.buffer.write(response)
sys.stdout.buffer.flush()
def __init__(self):
CGIXMLRPCRequestHandler.__init__(self)
XMLRPCDocGenerator.__init__(self)
if __name__ == '__main__':
import datetime
class ExampleService:
def getData(self):
return '42'
class currentTime:
@staticmethod
def getCurrentTime():
return datetime.datetime.now()
server = SimpleXMLRPCServer(("localhost", 8000))
server.register_function(pow)
server.register_function(lambda x,y: x+y, 'add')
server.register_instance(ExampleService(), allow_dotted_names=True)
server.register_multicall_functions()
print('Serving XML-RPC on localhost port 8000')
print('It is advisable to run this example server within a secure, closed network.')
try:
server.serve_forever()
except KeyboardInterrupt:
print("\nKeyboard interrupt received, exiting.")
server.server_close()
sys.exit(0)
|
Reflexe/doc_to_pdf
|
Windows/program/python-core-3.5.0/lib/xmlrpc/server.py
|
Python
|
mpl-2.0
| 36,701
|
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
#
# Copyright (c) 2015 Digi International Inc. All Rights Reserved.
from io import BytesIO
import sys
from suitcase.exceptions import SuitcaseException, \
SuitcasePackException, SuitcaseParseError
from suitcase.fields import FieldPlaceholder, CRCField, SubstructureField
class ParseError(Exception):
"""Exception raied when there is an error parsing"""
class Packer(object):
"""Object responsible for packing/unpacking bytes into/from fields"""
def __init__(self, ordered_fields, crc_field):
self.crc_field = crc_field
self.ordered_fields = ordered_fields
def pack(self):
sio = BytesIO()
self.write(sio)
return sio.getvalue()
def write(self, stream):
# now, pack everything in
crc_fields = []
for name, field in self.ordered_fields:
try:
if isinstance(field, CRCField):
crc_offset = stream.tell()
field.pack(stream)
crc_fields.append((field, crc_offset))
else:
field.pack(stream)
except SuitcaseException:
raise # just reraise the same exception object
except Exception:
# keep the original traceback information, see
# http://stackoverflow.com/questions/3847503/wrapping-exceptions-in-python
exc_value = SuitcasePackException("Unexpected exception during pack of %r" % name)
raise(type(exc_value), exc_value, sys.exc_info()[2])
# if there is a crc value, seek back to the field and
# pack it with the right value
if len(crc_fields) > 0:
data = stream.getvalue()
for field, offset in crc_fields:
stream.seek(offset)
checksum_data = self.crc_field.packed_checksum(data)
stream.write(checksum_data)
def unpack(self, data, trailing=False):
stream = BytesIO(data)
self.unpack_stream(stream)
stream.tell()
if trailing:
return stream
elif stream.tell() != len(data):
raise SuitcaseParseError("Structure fully parsed but additional bytes remained. Parsing "
"consumed %d of %d bytes" %
(stream.tell(), len(data)))
def unpack_stream(self, stream):
"""Unpack bytes from a stream of data field-by-field
In the most basic case, the basic algorithm here is as follows::
for _name, field in self.ordered_fields:
length = field.bytes_required
data = stream.read(length)
field.unpack(data)
This logic is complicated somewhat by the handling of variable length
greedy fields (there may only be one). The logic when we see a
greedy field (bytes_required returns None) in the stream is to
pivot and parse the remaining fields starting from the last and
moving through the stream backwards. There is also some special
logic present for dealing with checksum fields.
"""
crc_fields = []
greedy_field = None
# go through the fields from first to last. If we hit a greedy
# field, break out of the loop
for i, (name, field) in enumerate(self.ordered_fields):
if isinstance(field, CRCField):
crc_fields.append((field, stream.tell()))
length = field.bytes_required
if isinstance(field, SubstructureField):
remaining_data = stream.getvalue()[stream.tell():]
returned_stream = field.unpack(remaining_data, trailing=True)
# We need to fast forward by as much as was consumed by the structure
stream.seek(stream.tell() + returned_stream.tell())
continue
elif length is None:
greedy_field = field
break
else:
data = stream.read(length)
if len(data) != length:
raise SuitcaseParseError("While attempting to parse field "
"%r we tried to read %s bytes but "
"we were only able to read %s." %
(name, length, len(data)))
try:
field.unpack(data)
except SuitcaseException:
raise # just re-raise these
except Exception:
exc_value = SuitcaseParseError("Unexpected exception while unpacking field %r" % name)
raise(type(exc_value), exc_value, sys.exc_info()[2])
if greedy_field is not None:
remaining_data = stream.read()
inverted_stream = BytesIO(remaining_data[::-1])
# work through the remaining fields in reverse order in order
# to narrow in on the right bytes for the greedy field
reversed_remaining_fields = self.ordered_fields[(i + 1):][::-1]
for _name, field in reversed_remaining_fields:
if isinstance(field, CRCField):
crc_fields.append(
(field, -inverted_stream.tell() - field.bytes_required))
length = field.bytes_required
data = inverted_stream.read(length)[::-1]
if len(data) != length:
raise SuitcaseParseError("While attempting to parse field "
"%r we tried to read %s bytes but "
"we were only able to read %s." %
(name, length, len(data)))
try:
field.unpack(data)
except SuitcaseException:
raise # just re-raise these
except Exception:
exc_value = SuitcaseParseError("Unexpected exception while unpacking field %r" % name)
raise(type(exc_value), exc_value, sys.exc_info()[2])
greedy_data_chunk = inverted_stream.read()[::-1]
greedy_field.unpack(greedy_data_chunk)
if crc_fields:
data = stream.getvalue()
for (crc_field, offset) in crc_fields:
crc_field.validate(data, offset)
class StructureMeta(type):
"""Metaclass for all structure objects
When a class with this metaclass is created, we look for any
FieldProperty instances associated with the class and record
those for use later on.
"""
def __new__(cls, name, bases, dct):
# find all the placeholders in this class declaration and store
# them away. Add name mangling to the original fields so they
# do not get in the way.
dct['_field_placeholders'] = {}
dct['_crc_field'] = None
for key, value in list(dct.items()): # use a copy, we mutate dct
if isinstance(value, FieldPlaceholder):
dct['_field_placeholders'][key] = value
dct['__%s' % key] = value
del dct[key]
if value.cls == CRCField:
dct['_crc_field'] = value
sorted_fields = list(sorted(dct['_field_placeholders'].items(),
key=lambda kv: kv[1]._field_seqno))
dct['_sorted_fields'] = sorted_fields
return type.__new__(cls, name, bases, dct)
class Structure(object):
r"""Base class for message schema declaration
``Structure`` forms the core of the Suitcase library and allows for
a declarative syntax for specifying packet schemas and associated
methods for transforming these schemas into packed bytes (and vice-versa).
Here's an example showing how one might specify the format for a UDP
Datagram::
>>> from suitcase.fields import UBInt16, LengthField, VariableRawPayload
>>> class UDPDatagram(Structure):
... source_port = UBInt16()
... destination_port = UBInt16()
... length = LengthField(UBInt16())
... checksum = UBInt16()
... data = VariableRawPayload(length)
From this we have a near-ideal form for packing and parsing packet
data following the schema::
>>> def printb(s):
... print(repr(s).replace("b'", "'").replace("u'", "'"))
...
>>> dgram = UDPDatagram()
>>> dgram.source_port = 9110
>>> dgram.destination_port = 1001
>>> dgram.checksum = 27193
>>> dgram.data = b"Hello, world!"
>>> printb(dgram.pack())
'#\x96\x03\xe9\x00\rj9Hello, world!'
>>> dgram2 = UDPDatagram()
>>> dgram2.unpack(dgram.pack())
>>> dgram2
UDPDatagram (
source_port=9110,
destination_port=1001,
length=13,
checksum=27193,
data=...'Hello, world!',
)
"""
__metaclass__ = StructureMeta
@classmethod
def from_data(cls, data):
"""Create a new, populated message from some data
This factory method is identical to doing the following, it just takes
one line instead of two and looks nicer in general::
m = MyMessage()
m.unpack(data)
Can be rewritten as just::
m = MyMessage.from_data(data)
"""
m = cls()
m.unpack(data)
return m
def __init__(self):
self._key_to_field = {}
self._parent = None
self._sorted_fields = []
self._placeholder_to_field = {}
if self.__class__._crc_field is None:
self._crc_field = None
else:
self._crc_field = self.__class__._crc_field.create_instance(self)
for key, field_placeholder in self.__class__._sorted_fields:
field = field_placeholder.create_instance(self)
self._key_to_field[key] = field
self._placeholder_to_field[field_placeholder] = field
self._sorted_fields.append((key, field))
self._packer = Packer(self._sorted_fields, self._crc_field)
def __getattr__(self, key):
k2f = self.__dict__.get('_key_to_field', {})
if key in k2f:
field = self._key_to_field[key]
return field.getval()
raise AttributeError
def __setattr__(self, key, value):
k2f = self.__dict__.get('_key_to_field', {})
if key in k2f:
field = self._key_to_field[key]
return field.setval(value)
return object.__setattr__(self, key, value)
def __iter__(self):
return iter(self._sorted_fields)
def __repr__(self):
output = "%s (\n" % self.__class__.__name__
for field_name, field in self:
output += " %s=%s,\n" % (field_name, field)
output += ")"
return output
def lookup_field_by_name(self, name):
for fname, field in self:
if name == fname:
return field
raise KeyError
def lookup_field_by_placeholder(self, placeholder):
return self._placeholder_to_field[placeholder]
def unpack(self, data, trailing=False):
return self._packer.unpack(data, trailing)
def pack(self):
return self._packer.pack()
|
rtzoeller/python-suitcase
|
suitcase/structure.py
|
Python
|
mpl-2.0
| 11,887
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9.11 on 2016-11-08 13:32
from django.db import migrations
from atmo.models import PermissionMigrator
def assign_spark_job_view_permission(apps, schema_editor):
SparkJob = apps.get_model("jobs", "SparkJob")
PermissionMigrator(apps, SparkJob, "view", user_field="created_by").assign()
def remove_spark_job_view_permission(apps, schema_editor):
SparkJob = apps.get_model("jobs", "SparkJob")
PermissionMigrator(apps, SparkJob, "view", user_field="created_by").remove()
class Migration(migrations.Migration):
dependencies = [
("jobs", "0006_auto_20161108_0933"),
("auth", "0007_alter_validators_add_error_messages"),
("guardian", "0001_initial"),
("contenttypes", "0001_initial"),
]
operations = [
migrations.RunPython(
assign_spark_job_view_permission, remove_spark_job_view_permission
)
]
|
mozilla/telemetry-analysis-service
|
atmo/jobs/migrations/0007_assign_view_perms.py
|
Python
|
mpl-2.0
| 935
|
x = input()
if x:
y = x
else:
y = True
print(y)
|
gitsimon/spadup-lyra
|
unittests/traces/if3N.py
|
Python
|
mpl-2.0
| 56
|
import csv
import datetime
import json
import mock
import os
import re
import shutil
import tempfile
import urllib
from cStringIO import StringIO
from nose.tools import eq_, ok_
from nose.plugins.skip import SkipTest
from django.test import TestCase
from django.test.client import RequestFactory
from django.test.utils import override_settings
from django.conf import settings
from django.contrib.auth.models import User
from django.core.cache import cache
from django.core.exceptions import ImproperlyConfigured
from django.core.urlresolvers import reverse
from crashstats.crashstats import models
class Response(object):
def __init__(self, content=None, status_code=200):
self.content = content
self.status_code = status_code
class RobotsTestViews(TestCase):
@override_settings(ENGAGE_ROBOTS=True)
def test_robots_txt(self):
url = '/robots.txt'
response = self.client.get(url)
eq_(response.status_code, 200)
eq_(response['Content-Type'], 'text/plain')
ok_('Allow: /' in response.content)
@override_settings(ENGAGE_ROBOTS=False)
def test_robots_txt_disengage(self):
url = '/robots.txt'
response = self.client.get(url)
eq_(response.status_code, 200)
eq_(response['Content-Type'], 'text/plain')
ok_('Disallow: /' in response.content)
class FaviconTestViews(TestCase):
def test_favicon(self):
tmp_static_root = tempfile.mkdtemp()
self.addCleanup(shutil.rmtree, tmp_static_root)
favicon_dir = os.path.join(tmp_static_root, 'img')
os.makedirs(favicon_dir)
favicon_path = os.path.join(favicon_dir, 'favicon.ico')
with open(favicon_path, 'wb') as icon:
icon.write('totally fake')
with self.settings(STATIC_ROOT=tmp_static_root):
response = self.client.get('/favicon.ico')
eq_(response.status_code, 200)
ok_('image/x-icon' in response['Content-Type'])
class BaseTestViews(TestCase):
@mock.patch('requests.get')
def setUp(self, rget):
super(BaseTestViews, self).setUp()
# checking settings.CACHES isn't as safe as `cache.__class__`
if 'LocMemCache' not in cache.__class__.__name__:
raise ImproperlyConfigured(
'The tests requires that you use LocMemCache when running'
)
# we do this here so that the current/versions thing
# is cached since that's going to be called later
# in every view more or less
def mocked_get(url, **options):
now = datetime.datetime.utcnow()
now = now.replace(microsecond=0).isoformat()
if 'products/' in url:
return Response("""
{"products": [
"Firefox",
"Thunderbird",
"SeaMonkey"
],
"hits": {
"Firefox": [
{"product": "Firefox",
"throttle": "100.00",
"end_date": "%(end_date)s",
"start_date": "2012-03-08T00:00:00",
"featured": true,
"version": "19.0",
"release": "Beta",
"id": 922},
{"product": "Firefox",
"throttle": "100.00",
"end_date": "%(end_date)s",
"start_date": "2012-03-08T00:00:00",
"featured": true,
"version": "18.0",
"release": "Stable",
"id": 920},
{"product": "Firefox",
"throttle": "100.00",
"end_date": "%(end_date)s",
"start_date": "2012-03-08T00:00:00",
"featured": true,
"version": "20.0",
"release": "Nightly",
"id": 923}
],
"Thunderbird":[
{"product": "Thunderbird",
"throttle": "100.00",
"end_date": "%(end_date)s",
"start_date": "2012-03-08T00:00:00",
"featured": true,
"version": "18.0",
"release": "Aurora",
"id": 924},
{"product": "Thunderbird",
"throttle": "100.00",
"end_date": "%(end_date)s",
"start_date": "2012-03-08T00:00:00",
"featured": true,
"version": "19.0",
"release": "Nightly",
"id": 925}
],
"SeaMonkey": [
{"product": "SeaMonkey",
"throttle": "99.00",
"end_date": "%(end_date)s",
"start_date": "2012-03-08T00:00:00",
"featured": true,
"version": "9.5",
"release": "Alpha",
"id": 921}
]
},
"total": 3
}
""" % {'end_date': now})
raise NotImplementedError(url)
rget.side_effect = mocked_get
from crashstats.crashstats.models import CurrentVersions
api = CurrentVersions()
api.get()
def tearDown(self):
super(BaseTestViews, self).tearDown()
cache.clear()
class TestViews(BaseTestViews):
@mock.patch('requests.get')
def test_handler500(self, rget):
root_urlconf = __import__(
settings.ROOT_URLCONF,
globals(),
locals(),
['urls'],
-1
)
# ...so that we can access the 'handler500' defined in there
par, end = root_urlconf.handler500.rsplit('.', 1)
# ...which is an importable reference to the real handler500 function
views = __import__(par, globals(), locals(), [end], -1)
# ...and finally we have the handler500 function at hand
handler500 = getattr(views, end)
# to make a mock call to the django view functions you need a request
fake_request = RequestFactory().request(**{'wsgi.input': None})
# Need a fake user for the persona bits on crashstats_base
fake_request.user = {}
fake_request.user['is_active'] = False
# the reason for first causing an exception to be raised is because
# the handler500 function is only called by django when an exception
# has been raised which means sys.exc_info() is something.
try:
raise NameError('sloppy code')
except NameError:
# do this inside a frame that has a sys.exc_info()
response = handler500(fake_request)
eq_(response.status_code, 500)
ok_('Internal Server Error' in response.content)
ok_('id="products_select"' not in response.content)
def test_handler404(self):
url = reverse('crashstats.home', args=('Unknown',))
response = self.client.get(url)
eq_(response.status_code, 404)
ok_('Page not Found' in response.content)
ok_('id="products_select"' not in response.content)
def test_homepage_redirect(self):
response = self.client.get('/')
eq_(response.status_code, 302)
destination = reverse('crashstats.home',
args=[settings.DEFAULT_PRODUCT])
ok_(destination in response['Location'])
def test_legacy_query_redirect(self):
response = self.client.get('/query/query?foo=bar')
redirect_code = settings.PERMANENT_LEGACY_REDIRECTS and 301 or 302
eq_(response.status_code, redirect_code)
ok_(reverse('crashstats.query') + '?foo=bar' in response['Location'])
@mock.patch('requests.get')
def test_buginfo(self, rget):
url = reverse('crashstats.buginfo')
def mocked_get(url, **options):
if 'bug?id=' in url:
return Response('{"bugs": [{"product": "allizom.org"}]}')
raise NotImplementedError(url)
rget.side_effect = mocked_get
response = self.client.get(url)
eq_(response.status_code, 400)
response = self.client.get(url, {'bug_ids': '123,456'})
eq_(response.status_code, 400)
response = self.client.get(url, {'include_fields': 'product'})
eq_(response.status_code, 400)
response = self.client.get(url, {'bug_ids': ' 123, 456 ',
'include_fields': ' product'})
eq_(response.status_code, 200)
struct = json.loads(response.content)
ok_(struct['bugs'])
eq_(struct['bugs'][0]['product'], 'allizom.org')
@mock.patch('requests.get')
def test_home(self, rget):
url = reverse('crashstats.home', args=('Firefox',))
def mocked_get(url, **options):
if 'products' in url and not 'version' in url:
return Response("""
{
"products": [
"Firefox"
],
"hits": {
"Firefox": [{
"featured": true,
"throttle": 100.0,
"end_date": "2012-11-27",
"product": "Firefox",
"release": "Nightly",
"version": "19.0",
"has_builds": true,
"start_date": "2012-09-25"
}]
},
"total": 1
}
""")
elif 'products' in url:
return Response("""
{
"hits": [{
"is_featured": true,
"throttle": 100.0,
"end_date": "2012-11-27",
"product": "Firefox",
"build_type": "Nightly",
"version": "19.0",
"has_builds": true,
"start_date": "2012-09-25"
}],
"total": 1
}
""")
raise NotImplementedError(url)
rget.side_effect = mocked_get
response = self.client.get(url)
eq_(response.status_code, 200)
# Testing with unknown product
url = reverse('crashstats.home', args=('InternetExplorer',))
response = self.client.get(url)
eq_(response.status_code, 404)
# Testing with unknown version for product
url = reverse('crashstats.home', args=('Firefox', '99'))
response = self.client.get(url)
eq_(response.status_code, 404)
# Testing with valid version for product
url = reverse('crashstats.home', args=('Firefox', '19.0'))
response = self.client.get(url)
eq_(response.status_code, 200)
@mock.patch('requests.get')
def test_frontpage_json(self, rget):
url = reverse('crashstats.frontpage_json')
def mocked_get(url, **options):
if 'crashes/daily' in url:
return Response("""
{
"hits": {
"Firefox:19.0": {
"2012-10-08": {
"product": "Firefox",
"adu": 30000,
"crash_hadu": 71.099999999999994,
"version": "19.0",
"report_count": 2133,
"date": "2012-10-08"
},
"2012-10-02": {
"product": "Firefox",
"adu": 30000,
"crash_hadu": 77.299999999999997,
"version": "19.0",
"report_count": 2319,
"date": "2012-10-02"
}
}
}
}
""")
raise NotImplementedError(url)
rget.side_effect = mocked_get
response = self.client.get(url, {'product': 'Firefox'})
eq_(response.status_code, 200)
ok_('application/json' in response['content-type'])
struct = json.loads(response.content)
ok_(struct['product_versions'])
eq_(struct['count'], 1)
@mock.patch('requests.get')
def test_frontpage_json_bad_request(self, rget):
url = reverse('crashstats.frontpage_json')
def mocked_get(url, **options):
assert 'crashes/daily' in url, url
if 'product/Firefox' in url:
return Response("""
{
"hits": {
"Firefox:19.0": {
"2012-10-08": {
"product": "Firefox",
"adu": 30000,
"crash_hadu": 71.099999999999994,
"version": "19.0",
"report_count": 2133,
"date": "2012-10-08"
},
"2012-10-02": {
"product": "Firefox",
"adu": 30000,
"crash_hadu": 77.299999999999997,
"version": "19.0",
"report_count": 2319,
"date": "2012-10-02"
}
}
}
}
""")
raise NotImplementedError(url)
rget.side_effect = mocked_get
response = self.client.get(url, {'product': 'Neverheardof'})
eq_(response.status_code, 400)
response = self.client.get(url, {'versions': '999.1'})
eq_(response.status_code, 400)
response = self.client.get(url, {
'product': 'Firefox',
'versions': '99.9' # mismatch
})
eq_(response.status_code, 400)
response = self.client.get(url, {
'product': 'Firefox',
'versions': '19.0'
})
eq_(response.status_code, 200)
response = self.client.get(url, {
'product': 'Firefox',
'duration': 'xxx'
})
eq_(response.status_code, 400)
response = self.client.get(url, {
'product': 'Firefox',
'duration': '-100'
})
eq_(response.status_code, 400)
response = self.client.get(url, {
'product': 'Firefox',
'duration': '10'
})
eq_(response.status_code, 200)
response = self.client.get(url, {
'product': 'Firefox',
'date_range_type': 'junk'
})
eq_(response.status_code, 400)
response = self.client.get(url, {
'product': 'Firefox',
'date_range_type': 'build'
})
eq_(response.status_code, 200)
response = self.client.get(url, {
'product': 'Firefox',
'date_range_type': 'report'
})
eq_(response.status_code, 200)
@mock.patch('requests.get')
def test_products_list(self, rget):
url = reverse('crashstats.products_list')
def mocked_get(url, **options):
if 'products' in url:
return Response("""
{
"products": [
"Firefox",
"Fennec"
],
"hits": [
{
"sort": "1",
"default_version": "15.0.1",
"release_name": "firefox",
"rapid_release_version": "5.0",
"product_name": "Firefox"
},
{
"sort": "3",
"default_version": "10.0.6esr",
"release_name": "mobile",
"rapid_release_version": "5.0",
"product_name": "Fennec"
}],
"total": "2"
}
""")
rget.side_effect = mocked_get
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
@mock.patch('requests.get')
def test_crash_trends(self, rget):
url = reverse('crashstats.crash_trends', args=('Firefox',))
unkown_product_url = reverse('crashstats.crash_trends',
args=('NotKnown',))
def mocked_get(**options):
if 'products' in options['url']:
return Response("""
{
"products": ["WaterWolf"],
"hits": [
{
"product": "WaterWolf",
"version": "5.0a1",
"release": "Release",
"throttle": 10.0
}
],
"total": "1"
}
""")
raise NotImplementedError(url)
rget.side_effect = mocked_get
response = self.client.get(url)
eq_(response.status_code, 200)
ok_('Nightly Crash Trends For Firefox' in response.content)
response = self.client.get(unkown_product_url)
eq_(response.status_code, 404)
@mock.patch('requests.get')
def test_crashtrends_versions_json(self, rget):
url = reverse('crashstats.crashtrends_versions_json')
def mocked_get(**options):
if 'products' in options['url']:
return Response("""
{
"hits": [
{
"sort": "1",
"default_version": "5.0a1",
"release_name": "waterwolf",
"rapid_release_version": "5.0",
"product_name": "WaterWolf"
}],
"total": "1"
}
""")
raise NotImplementedError(url)
rget.side_effect = mocked_get
response = self.client.get(url, {'product': 'Firefox'})
ok_('application/json' in response['content-type'])
eq_(response.status_code, 200)
ok_(response.content, ['20.0'])
response = self.client.get(url, {'product': 'Thunderbird'})
eq_(response.status_code, 200)
ok_(response.content, ['18.0', '19.0'])
response = self.client.get(url, {'product': 'Unknown'})
ok_(response.content, [])
@mock.patch('requests.get')
def test_crashtrends_json(self, rget):
url = reverse('crashstats.crashtrends_json')
def mocked_get(url, **options):
ok_('/start_date/2012-10-01/' in url)
ok_('/end_date/2012-10-10/' in url)
if 'crashtrends/' in url:
return Response("""
{
"crashtrends": [{
"build_date": "2012-10-10",
"version_string": "5.0a1",
"product_version_id": 1,
"days_out": 6,
"report_count": 144,
"report_date": "2012-10-04",
"product_name": "WaterWolf"
},
{
"build_date": "2012-10-06",
"version_string": "5.0a1",
"product_version_id": 1,
"days_out": 2,
"report_count": 162,
"report_date": "2012-10-08",
"product_name": "WaterWolf"
},
{
"build_date": "2012-09-29",
"version_string": "5.0a1",
"product_version_id": 1,
"days_out": 5,
"report_count": 144,
"report_date": "2012-10-04",
"product_name": "WaterWolf"
}]
}
""")
raise NotImplementedError(url)
rget.side_effect = mocked_get
response = self.client.get(url, {
'product': 'Firefox',
'version': '20.0',
'start_date': '2012-10-01',
'end_date': '2012-10-10'
})
ok_(response.status_code, 200)
ok_('application/json' in response['content-type'])
struct = json.loads(response.content)
eq_(struct['total'], 2)
# Test with product that does not have a nightly
response = self.client.get(url, {
'product': 'SeaMonkey',
'version': '9.5',
'start_date': '2012-10-01',
'end_date': '2012-10-10'
})
ok_(response.status_code, 400)
ok_('text/html' in response['content-type'])
ok_(
'SeaMonkey is not one of the available choices'
in response.content
)
@mock.patch('requests.post')
@mock.patch('requests.get')
def test_topcrasher(self, rget, rpost):
# first without a version
no_version_url = reverse('crashstats.topcrasher',
args=('Firefox',))
url = reverse('crashstats.topcrasher',
args=('Firefox', '19.0'))
has_builds_url = reverse('crashstats.topcrasher',
args=('Firefox', '19.0', 'build'))
response = self.client.get(no_version_url)
ok_(url in response['Location'])
def mocked_post(**options):
assert '/bugs/' in options['url'], options['url']
return Response("""
{"hits": [{"id": "123456789",
"signature": "Something"}]}
""")
def mocked_get(url, **options):
if 'crashes/signatures' in url:
return Response(u"""
{"crashes": [
{
"count": 188,
"mac_count": 66,
"content_count": 0,
"first_report": "2012-06-21",
"startup_percent": 0.0,
"currentRank": 0,
"previousRank": 1,
"first_report_exact": "2012-06-21T21:28:08",
"versions":
"2.0, 2.1, 3.0a2, 3.0b2, 3.1b1, 4.0a1, 4.0a2, 5.0a1",
"percentOfTotal": 0.24258064516128999,
"win_count": 56,
"changeInPercentOfTotal": 0.011139597126354983,
"linux_count": 66,
"hang_count": 0,
"signature": "FakeSignature1 \u7684 Japanese",
"versions_count": 8,
"changeInRank": 1,
"plugin_count": 0,
"previousPercentOfTotal": 0.23144104803493501,
"is_gc_count": 10
}
],
"totalPercentage": 0,
"start_date": "2012-05-10",
"end_date": "2012-05-24",
"totalNumberOfCrashes": 0}
""")
if 'products/versions' in url:
return Response("""
{
"hits": [
{
"is_featured": true,
"throttle": 1.0,
"end_date": "string",
"start_date": "integer",
"build_type": "string",
"product": "Firefox",
"version": "19.0",
"has_builds": true
}],
"total": "1"
}
""")
raise NotImplementedError(url)
rpost.side_effect = mocked_post
rget.side_effect = mocked_get
response = self.client.get(url)
eq_(response.status_code, 200)
ok_('By Crash Date' in response.content)
response = self.client.get(has_builds_url)
eq_(response.status_code, 200)
ok_('By Build Date' in response.content)
# also, render the CSV
response = self.client.get(url, {'format': 'csv'})
eq_(response.status_code, 200)
ok_('text/csv' in response['Content-Type'])
# know your fixtures :)
ok_('Firefox' in response['Content-Disposition'])
ok_('19.0' in response['Content-Disposition'])
# we should be able unpack it
reader = csv.reader(StringIO(response.content))
line1, line2 = reader
eq_(line1[0], 'Rank')
try:
eq_(int(line2[0]), 1)
except Exception:
raise SkipTest
# bytestring when exported as CSV with UTF-8 encoding
eq_(line2[4], 'FakeSignature1 \xe7\x9a\x84 Japanese')
@mock.patch('requests.post')
@mock.patch('requests.get')
def test_topcrasher_without_any_signatures(self, rget, rpost):
# first without a version
no_version_url = reverse('crashstats.topcrasher',
args=('Firefox',))
url = reverse('crashstats.topcrasher',
args=('Firefox', '19.0'))
has_builds_url = reverse('crashstats.topcrasher',
args=('Firefox', '19.0', 'build'))
response = self.client.get(no_version_url)
ok_(url in response['Location'])
def mocked_post(**options):
assert '/bugs/' in options['url'], options['url']
return Response("""
{"hits": [{"id": "123456789",
"signature": "Something"}]}
""")
def mocked_get(url, **options):
if 'crashes/signatures' in url:
return Response(u"""
{"crashes": [],
"totalPercentage": 0,
"start_date": "2012-05-10",
"end_date": "2012-05-24",
"totalNumberOfCrashes": 0}
""")
if 'products/versions' in url:
return Response("""
{
"hits": [
{
"is_featured": true,
"throttle": 1.0,
"end_date": "string",
"start_date": "integer",
"build_type": "string",
"product": "Firefox",
"version": "19.0",
"has_builds": true
}],
"total": "1"
}
""")
raise NotImplementedError(url)
rpost.side_effect = mocked_post
rget.side_effect = mocked_get
response = self.client.get(url)
eq_(response.status_code, 200)
ok_('By Crash Date' in response.content)
response = self.client.get(has_builds_url)
eq_(response.status_code, 200)
ok_('By Build Date' in response.content)
# also, render the CSV
response = self.client.get(url, {'format': 'csv'})
eq_(response.status_code, 200)
ok_('text/csv' in response['Content-Type'])
# know your fixtures :)
ok_('Firefox' in response['Content-Disposition'])
ok_('19.0' in response['Content-Disposition'])
#
# no signatures, the CSV is empty apart from the header
eq_(len(response.content.splitlines()), 1)
reader = csv.reader(StringIO(response.content))
line1, = reader
eq_(line1[0], 'Rank')
@mock.patch('requests.get')
def test_exploitable_crashes(self, rget):
url = reverse('crashstats.exploitable_crashes')
def mocked_get(url, **options):
assert 'crashes/exploitability' in url
return Response("""
{
"hits": [
{
"signature": "FakeSignature",
"report_date": "2013-06-06",
"high_count": 4,
"medium_count": 3,
"low_count": 2,
"none_count": 1
}
],
"total": 1
}
""")
rget.side_effect = mocked_get
response = self.client.get(url)
ok_(settings.LOGIN_URL in response['Location'] + '?next=%s' % url)
ok_(response.status_code, 302)
User.objects.create_user('test', 'test@mozilla.com', 'secret')
assert self.client.login(username='test', password='secret')
response = self.client.get(url)
ok_(response.status_code, 200)
@mock.patch('requests.get')
def test_daily(self, rget):
url = reverse('crashstats.daily')
def mocked_get(url, **options):
if 'products' in url:
return Response("""
{
"products": [
"Firefox",
"Thunderbird"
],
"hits": {
"Firefox": [{
"featured": true,
"throttle": 100.0,
"end_date": "2012-11-27",
"product": "Firefox",
"release": "Nightly",
"version": "19.0",
"has_builds": true,
"start_date": "2012-09-25"
}],
"Thunderbird": [{
"featured": true,
"throttle": 100.0,
"end_date": "2012-11-27",
"product": "Thunderbird",
"release": "Nightly",
"version": "18.0",
"has_builds": true,
"start_date": "2012-09-25"
}]
},
"total": 2
}
""")
if 'crashes' in url:
# This list needs to match the versions as done in the common
# fixtures set up in setUp() above.
return Response("""
{
"hits": {
"Firefox:20.0": {
"2012-09-23": {
"adu": 80388,
"crash_hadu": 12.279,
"date": "2012-08-23",
"product": "Firefox",
"report_count": 9871,
"throttle": 0.1,
"version": "20.0"
}
},
"Firefox:19.0": {
"2012-08-23": {
"adu": 80388,
"crash_hadu": 12.279,
"date": "2012-08-23",
"product": "Firefox",
"report_count": 9871,
"throttle": 0.1,
"version": "19.0"
}
},
"Firefox:18.0": {
"2012-08-13": {
"adu": 80388,
"crash_hadu": 12.279,
"date": "2012-08-23",
"product": "Firefox",
"report_count": 9871,
"throttle": 0.1,
"version": "18.0"
}
}
}
}
""")
raise NotImplementedError(url)
rget.side_effect = mocked_get
response = self.client.get(url, {
'p': 'Firefox',
'v': ['20.0', '19.0']
})
eq_(response.status_code, 200)
# XXX any basic tests with can do on response.content?
ok_('18.0' in response.content.split('id="version3"')[1].
split("</select>")[0])
ok_('18.0' in response.content.split('id="version2"')[1].
split("</select>")[0])
ok_('18.0' in response.content.split('id="version1"')[1].
split("</select>")[0])
ok_('18.0' in response.content.split('id="version0"')[1].
split("</select>")[0])
# check that the CSV version is working too
response = self.client.get(url, {
'p': 'Firefox',
'v': ['20.0', '19.0'],
'format': 'csv'
})
eq_(response.status_code, 200)
eq_(response['Content-Type'], 'text/csv')
# also, I should be able to read it
reader = csv.reader(response)
# because response is an iterator that will return a blank line first
# we skip till the next time
rows = list(reader)[1:]
ok_(rows)
head_row = rows[0]
eq_(head_row[0], 'Date')
eq_(head_row[1:], [
'Firefox 20.0 Crashes',
'Firefox 20.0 ADU',
'Firefox 20.0 Throttle',
'Firefox 20.0 Ratio',
'Firefox 19.0 Crashes',
'Firefox 19.0 ADU',
'Firefox 19.0 Throttle',
'Firefox 19.0 Ratio'
])
first_row = rows[1]
eq_(first_row[0], '2012-09-23')
@mock.patch('crashstats.crashstats.models.Platforms')
@mock.patch('requests.get')
def test_daily_by_os(self, rget, platforms_get):
url = reverse('crashstats.daily')
def mocked_get(url, **options):
if 'products' in url:
return Response("""
{
"products": [
"Firefox",
"Thunderbird"
],
"hits": {
"Firefox": [{
"featured": true,
"throttle": 100.0,
"end_date": "2012-11-27",
"product": "Firefox",
"release": "Nightly",
"version": "19.0",
"has_builds": true,
"start_date": "2012-09-25"
}],
"Thunderbird": [{
"featured": true,
"throttle": 100.0,
"end_date": "2012-11-27",
"product": "Thunderbird",
"release": "Nightly",
"version": "18.0",
"has_builds": true,
"start_date": "2012-09-25"
}]
},
"total": 2
}
""")
if 'crashes' in url:
assert '/separated_by/os' in url, url
assert '/os/Windows%2BAmiga' in url, url # %2B is a +
# This list needs to match the versions as done in the common
# fixtures set up in setUp() above.
return Response("""
{
"hits": {
"Firefox:20.0:win": {
"2012-09-23": {
"os": "Windows",
"adu": 80388,
"crash_hadu": 12.279,
"date": "2012-08-23",
"product": "Firefox",
"report_count": 9871,
"throttle": 0.1,
"version": "20.0"
}
},
"Firefox:20.0:ami": {
"2012-09-23": {
"os": "Amiga",
"adu": 7377,
"crash_hadu": 12.279,
"date": "2012-08-23",
"product": "Firefox",
"report_count": 871,
"throttle": 0.1,
"version": "20.0"
}
}
}
}
""")
raise NotImplementedError(url)
rget.side_effect = mocked_get
def mocked_platforms_get():
return [
{'code': 'win', 'name': 'Windows'},
{'code': 'ami', 'name': 'Amiga'},
]
platforms_get().get.side_effect = mocked_platforms_get
response = self.client.get(url, {
'p': 'Firefox',
'v': '20.0',
'form_selection': 'by_os'
})
eq_(response.status_code, 200)
# XXX any basic tests with can do on response.content?
# check that the CSV version is working too
response = self.client.get(url, {
'p': 'Firefox',
'v': '20.0',
'format': 'csv',
'form_selection': 'by_os'
})
eq_(response.status_code, 200)
eq_(response['Content-Type'], 'text/csv')
# also, we should be able to read it
reader = csv.reader(response)
# because response is an iterator that will return a blank line first
# we skip till the next time
rows = list(reader)[1:]
head_row = rows[0]
first_row = rows[1]
eq_(head_row[0], 'Date')
eq_(head_row[1:], [
'Firefox 20.0 on Windows Crashes',
'Firefox 20.0 on Windows ADU',
'Firefox 20.0 on Windows Throttle',
'Firefox 20.0 on Windows Ratio',
'Firefox 20.0 on Amiga Crashes',
'Firefox 20.0 on Amiga ADU',
'Firefox 20.0 on Amiga Throttle',
'Firefox 20.0 on Amiga Ratio'
])
eq_(first_row[0], '2012-09-23')
def test_daily_legacy_redirect(self):
url = reverse('crashstats.daily')
response = self.client.get(url + '?p=Firefox&v[]=Something')
eq_(response.status_code, 301)
ok_('p=Firefox' in response['Location'].split('?')[1])
ok_('v=Something' in response['Location'].split('?')[1])
response = self.client.get(url + '?p=Firefox&os[]=Something&os[]=Else')
eq_(response.status_code, 301)
ok_('p=Firefox' in response['Location'].split('?')[1])
ok_('os=Something' in response['Location'].split('?')[1])
ok_('os=Else' in response['Location'].split('?')[1])
@mock.patch('requests.get')
def test_daily_with_bad_input(self, rget):
url = reverse('crashstats.daily')
def mocked_get(url, **options):
if 'products' in url:
return Response("""
{
"products": [
"Firefox",
"Thunderbird"
],
"hits": {
"Firefox": [{
"featured": true,
"throttle": 100.0,
"end_date": "2012-11-27",
"product": "Firefox",
"release": "Nightly",
"version": "19.0",
"has_builds": true,
"start_date": "2012-09-25"
}],
"Thunderbird": [{
"featured": true,
"throttle": 100.0,
"end_date": "2012-11-27",
"product": "Thunderbird",
"release": "Nightly",
"version": "18.0",
"has_builds": true,
"start_date": "2012-09-25"
}]
},
"total": 2
}
""")
if 'crashes' in url:
# This list needs to match the versions as done in the common
# fixtures set up in setUp() above.
return Response("""
{
"hits": {}
}
""")
raise NotImplementedError(url)
rget.side_effect = mocked_get
response = self.client.get(url, {
'p': 'Firefox',
'date_start': u' \x00'
})
eq_(response.status_code, 400)
response = self.client.get(url, {
'p': 'Firefox',
'date_range_type': 'any old crap'
})
eq_(response.status_code, 400)
response = self.client.get(url, {
'p': 'Firefox',
'hang_type': 'any old crap'
})
eq_(response.status_code, 400)
response = self.client.get(url, {
'p': 'Firefox',
'format': 'csv',
})
eq_(response.status_code, 200)
eq_(response['Content-Type'], 'text/csv')
# last sanity check
response = self.client.get(url, {
'p': 'Firefox',
})
eq_(response.status_code, 200)
@mock.patch('requests.get')
def test_builds(self, rget):
url = reverse('crashstats.builds', args=('Firefox',))
rss_url = reverse('crashstats.buildsrss', args=('Firefox',))
def mocked_get(url, **options):
if 'products/builds/product' in url:
# Note that the last one isn't build_type==Nightly
return Response("""
[
{
"product": "Firefox",
"repository": "dev",
"buildid": 20120625000001,
"beta_number": null,
"platform": "Mac OS X",
"version": "19.0",
"date": "2012-06-25",
"build_type": "Nightly"
},
{
"product": "Firefox",
"repository": "dev",
"buildid": 20120625000002,
"beta_number": null,
"platform": "Windows",
"version": "19.0",
"date": "2012-06-25",
"build_type": "Nightly"
},
{
"product": "Firefox",
"repository": "dev",
"buildid": 20120625000003,
"beta_number": null,
"platform": "BeOS",
"version": "5.0a1",
"date": "2012-06-25",
"build_type": "Beta"
}
]
""")
raise NotImplementedError(url)
rget.side_effect = mocked_get
response = self.client.get(url)
eq_(response.status_code, 200)
ok_('20120625000001' in response.content)
ok_('20120625000002' in response.content)
# the not, build_type==Nightly
ok_('20120625000003' not in response.content)
rss_response = self.client.get(rss_url)
self.assertEquals(rss_response.status_code, 200)
self.assertEquals(rss_response['Content-Type'],
'application/rss+xml; charset=utf-8')
ok_('20120625000001' in rss_response.content)
ok_('20120625000002' in rss_response.content)
# the not, build_type==Nightly
ok_('20120625000003' not in rss_response.content)
@mock.patch('requests.get')
def test_builds_by_old_version(self, rget):
url = reverse('crashstats.builds', args=('Firefox', '18.0'))
def mocked_get(url, **options):
if 'products/builds/product' in url and 'version/18.0' in url:
return Response("""
[
{
"product": "Firefox",
"repository": "dev",
"buildid": 20120625000007,
"beta_number": null,
"platform": "Mac OS X",
"version": "5.0a1",
"date": "2012-06-25",
"build_type": "Nightly"
},
{
"product": "Firefox",
"repository": "dev",
"buildid": 20120625000007,
"beta_number": null,
"platform": "Windows",
"version": "5.0a1",
"date": "2012-06-25",
"build_type": "Nightly"
}
]
""")
raise NotImplementedError(url)
rget.side_effect = mocked_get
response = self.client.get(url)
eq_(response.status_code, 200)
header = response.content.split('<h2')[1].split('</h2>')[0]
ok_('18.0' in header)
@mock.patch('requests.post')
@mock.patch('requests.get')
def test_query(self, rget, rpost):
def mocked_post(**options):
assert 'bugs' in options['url'], options['url']
return Response("""
{"hits": [
{
"id": "123456",
"signature": "nsASDOMWindowEnumerator::GetNext()"
}
],
"total": 1
}
""")
def mocked_get(url, **options):
assert 'search/signatures' in url
if 'products/Firefox' in url:
return Response("""{
"hits": [
{
"count": 586,
"signature": "nsASDOMWindowEnumerator::GetNext()",
"numcontent": 0,
"is_windows": 586,
"is_linux": 0,
"numplugin": 56,
"is_mac": 0,
"numhang": 0
},
{
"count": 13,
"signature": "mySignatureIsCool",
"numcontent": 0,
"is_windows": 10,
"is_linux": 2,
"numplugin": 0,
"is_mac": 1,
"numhang": 0
},
{
"count": 2,
"signature": "mineIsCoolerThanYours",
"numcontent": 0,
"is_windows": 0,
"is_linux": 0,
"numplugin": 0,
"is_mac": 2,
"numhang": 2
},
{
"count": 2,
"signature": null,
"numcontent": 0,
"is_windows": 0,
"is_linux": 0,
"numplugin": 0,
"is_mac": 2,
"numhang": 2
}
],
"total": 4
} """)
elif 'products/Thunderbird' in url:
return Response('{"hits": [], "total": 0}')
elif 'products/SeaMonkey' in url:
self.assertTrue('plugin_search_mode/is_exactly' in url)
return Response("""
{"hits": [
{
"count": 586,
"signature": "nsASDOMWindowEnumerator::GetNext()",
"numcontent": 0,
"is_windows": 586,
"is_linux": 0,
"numplugin": 533,
"is_mac": 0,
"numhang": 0,
"pluginname": "superAddOn",
"pluginfilename": "addon.dll",
"pluginversion": "1.2.3"
}],
"total": 1
}
""")
else:
return Response("""
{"hits": [
{
"count": 586,
"signature": "nsASDOMWindowEnumerator::GetNext()",
"numcontent": 0,
"is_windows": 586,
"is_linux": 0,
"numplugin": 0,
"is_mac": 0,
"numhang": 0
}],
"total": 1
}
""")
raise NotImplementedError(url)
rpost.side_effect = mocked_post
rget.side_effect = mocked_get
url = reverse('crashstats.query')
response = self.client.get(url)
eq_(response.status_code, 200)
ok_('<h2>Query Results</h2>' not in response.content)
ok_('table id="signatureList"' not in response.content)
# Verify that the passed product is selected in search form
response = self.client.get(url, {'product': 'Thunderbird'})
eq_(response.status_code, 200)
ok_('<h2>Query Results</h2>' not in response.content)
ok_('table id="signatureList"' not in response.content)
ok_('value="Thunderbird" selected' in response.content)
# Verify that the passed version is selected in nav
response = self.client.get(url, {
'product': 'Thunderbird',
'version': 'Thunderbird:18.0'
})
eq_(response.status_code, 200)
ok_('<h2>Query Results</h2>' not in response.content)
ok_('table id="signatureList"' not in response.content)
# Because versions in the search form only gets set on DOM ready,
# we here ensure that the version was passed and set by checking
# that the correct version is selected in the versions drop-down.
ok_('option value="18.0" selected' in response.content)
response = self.client.get(url, {
'product': 'Firefox',
'date': '2012-01-01'
})
eq_(response.status_code, 200)
ok_('<h2>Query Results</h2>' in response.content)
ok_('table id="signatureList"' in response.content)
ok_('nsASDOMWindowEnumerator::GetNext()' in response.content)
ok_('mySignatureIsCool' in response.content)
ok_('mineIsCoolerThanYours' in response.content)
ok_('(null signature)' in response.content)
# Test that the default value for query_type is 'contains'
ok_('<option value="contains" selected' in response.content)
# Test with empty results
response = self.client.get(url, {
'product': 'Thunderbird',
'date': '2012-01-01'
})
eq_(response.status_code, 200)
ok_('<h2>Query Results</h2>' in response.content)
ok_('The maximum query date' not in response.content)
ok_('table id="signatureList"' not in response.content)
ok_('Results within' in response.content)
ok_('No results were found' in response.content)
response = self.client.get(url, {'query': 'nsASDOMWindowEnumerator'})
eq_(response.status_code, 200)
ok_('<h2>Query Results</h2>' in response.content)
ok_('table id="signatureList"' in response.content)
ok_('nsASDOMWindowEnumerator::GetNext()' in response.content)
ok_('123456' in response.content)
# Test that the signature parameter is used as default value
response = self.client.get(url, {'signature': 'myFunctionIsCool'})
eq_(response.status_code, 200)
ok_('<h2>Query Results</h2>' not in response.content)
ok_('table id="signatures-list"' not in response.content)
ok_('value="myFunctionIsCool"' in response.content)
# Test a simple search containing a crash id
crash_id = '1234abcd-ef56-7890-ab12-abcdef123456'
response = self.client.get(url, {
'query': crash_id,
'query_type': 'simple'
})
eq_(response.status_code, 302)
ok_(crash_id in response['Location'])
# Test a simple search containing a crash id and spaces
crash_id = ' 1234abcd-ef56-7890-ab12-abcdef123456 '
response = self.client.get(url, {
'query': crash_id,
'query_type': 'simple'
})
eq_(response.status_code, 302)
ok_(urllib.quote(crash_id) not in response['Location'])
ok_(crash_id.strip() in response['Location'])
# Test that null bytes break the page cleanly
response = self.client.get(url, {'date': u' \x00'})
eq_(response.status_code, 400)
ok_('<h2>Query Results</h2>' not in response.content)
ok_('Enter a valid date/time' in response.content)
# Test an out-of-range date range
response = self.client.get(url, {
'query': 'js::',
'range_unit': 'weeks',
'range_value': 9
})
eq_(response.status_code, 200)
ok_('The maximum query date' in response.content)
ok_('name="range_value" value="%s"' % settings.QUERY_RANGE_DEFAULT_DAYS
in response.content)
ok_('value="days" selected' in response.content)
# Test that do_query forces the query
response = self.client.get(url, {
'do_query': 1,
'product': 'Firefox'
})
eq_(response.status_code, 200)
ok_('<h2>Query Results</h2>' in response.content)
ok_('table id="signatureList"' in response.content)
ok_('nsASDOMWindowEnumerator::GetNext()' in response.content)
# Test that old query types are changed
# Test that plugin data is displayed
response = self.client.get(url, {
'do_query': 1,
'product': 'SeaMonkey',
'plugin_query_type': 'exact',
'process_type': 'plugin',
})
eq_(response.status_code, 200)
ok_('<h2>Query Results</h2>' in response.content)
ok_('table id="signatureList"' in response.content)
ok_('nsASDOMWindowEnumerator::GetNext()' in response.content)
ok_('Plugin Filename' in response.content)
ok_('Plugin Name/Ver' in response.content)
ok_('addon.dll' in response.content)
ok_('superAddOn 1.2.3' in response.content)
# Test 'all' is an accepted value for report_type and hang_type
response = self.client.get(url, {
'do_query': 1,
'product': 'Firefox',
'hang_type': 'all',
'process_type': 'all',
})
eq_(response.status_code, 200)
ok_('table id="signatureList"' in response.content)
ok_('value="any" checked' in response.content)
# Test defaut date
expected = datetime.datetime.utcnow()
response = self.client.get(url)
eq_(response.status_code, 200)
ok_(expected.strftime('%m/%d/%Y %H:00:00') in response.content)
# Test passed date
response = self.client.get(url, {
'date': '11/27/2085 10:10:10'
})
eq_(response.status_code, 200)
ok_('11/27/2085 10:10:10' in response.content)
# Test value of build ids
response = self.client.get(url, {
'build_id': '12345'
})
eq_(response.status_code, 200)
ok_('value="12345"' in response.content)
response = self.client.get(url, {
'build_id': '12345,54321'
})
eq_(response.status_code, 200)
ok_('value="12345, 54321"' in response.content)
@mock.patch('requests.post')
@mock.patch('requests.get')
def test_query_pagination(self, rget, rpost):
def mocked_post(**options):
return Response('{"hits": [], "total": 0}')
def mocked_get(url, **options):
assert 'search/signatures' in url
response = ','.join('''
{
"count": %(x)s,
"signature": "sig%(x)s",
"numcontent": 0,
"is_windows": %(x)s,
"is_linux": 0,
"numplugin": 0,
"is_mac": 0,
"numhang": 0
}
''' % {'x': x} for x in range(150))
return Response('{"hits": [%s], "total": 150}' % response)
rpost.side_effect = mocked_post
rget.side_effect = mocked_get
url = reverse('crashstats.query')
response = self.client.get(url, {'do_query': 1})
eq_(response.status_code, 200)
next_page_url = '%s?do_query=1&page=2' % url
ok_(next_page_url in response.content)
@mock.patch('requests.post')
@mock.patch('requests.get')
def test_query_summary(self, rget, rpost):
def mocked_post(**options):
return Response('{"hits": [], "total": 0}')
def mocked_get(url, **options):
return Response('{"hits": [], "total": 0}')
rpost.side_effect = mocked_post
rget.side_effect = mocked_get
url = reverse('crashstats.query')
response = self.client.get(url, {
'query': 'test',
'query_type': 'contains'
})
eq_(response.status_code, 200)
ok_('Results within' in response.content)
ok_("crash signature contains 'test'" in response.content)
ok_('the crashing process was of any type' in response.content)
response = self.client.get(url, {
'query': 'test',
'query_type': 'is_exactly',
'build_id': '1234567890',
'product': ['Firefox', 'Thunderbird'],
'version': ['Firefox:18.0'],
'platform': ['mac'],
'process_type': 'plugin',
'plugin_query_type': 'starts_with',
'plugin_query_field': 'filename',
'plugin_query': 'lib'
})
eq_(response.status_code, 200)
ok_('Results within' in response.content)
ok_("crash signature is exactly 'test'" in response.content)
ok_('product is one of Firefox, Thunderbird' in response.content)
ok_('version is one of Firefox:18.0' in response.content)
ok_('platform is one of Mac OS X' in response.content)
ok_('for build 1234567890' in response.content)
ok_('the crashing process was a plugin' in response.content)
ok_('and its filename starts with lib' in response.content)
@override_settings(SEARCH_MIDDLEWARE_IMPL='elasticsearch')
@mock.patch('requests.post')
@mock.patch('requests.get')
def test_query_force_impl_settings(self, rget, rpost):
def mocked_post(**options):
return Response('{"hits": [], "total": 0}')
def mocked_get(url, **options):
ok_('_force_api_impl/elasticsearch' in url)
return Response('{"hits": [], "total": 0}')
rpost.side_effect = mocked_post
rget.side_effect = mocked_get
url = reverse('crashstats.query')
response = self.client.get(url, {
'do_query': 1,
})
eq_(response.status_code, 200)
@mock.patch('requests.post')
@mock.patch('requests.get')
def test_query_force_impl_url(self, rget, rpost):
def mocked_post(**options):
return Response('{"hits": [], "total": 0}')
def mocked_get(url, **options):
ok_('_force_api_impl/postgres' in url)
return Response('{"hits": [], "total": 0}')
rpost.side_effect = mocked_post
rget.side_effect = mocked_get
url = reverse('crashstats.query')
response = self.client.get(url, {
'do_query': 1,
'_force_api_impl': 'postgres'
})
eq_(response.status_code, 200)
@override_settings(SEARCH_MIDDLEWARE_IMPL='mongodb')
@mock.patch('requests.post')
@mock.patch('requests.get')
def test_query_force_impl_url_over_settings(self, rget, rpost):
def mocked_post(**options):
return Response('{"hits": [], "total": 0}')
def mocked_get(url, **options):
ok_('_force_api_impl/mysql' in url)
return Response('{"hits": [], "total": 0}')
rpost.side_effect = mocked_post
rget.side_effect = mocked_get
url = reverse('crashstats.query')
response = self.client.get(url, {
'do_query': 1,
'_force_api_impl': 'mysql'
})
eq_(response.status_code, 200)
@mock.patch('requests.get')
def test_plot_signature(self, rget):
def mocked_get(url, **options):
if 'crashes/signature_history' in url:
return Response("""
{
"hits": [],
"total": 0
}
""")
raise NotImplementedError(url)
rget.side_effect = mocked_get
# missing signature
url = reverse('crashstats.plot_signature',
args=('Firefox', '19.0',
'2011-12-01', '2011-12-02', ''))
response = self.client.get(url)
eq_(response.status_code, 400)
# invalid start date
url = reverse('crashstats.plot_signature',
args=('Firefox', '19.0',
'2012-02-33', '2012-12-01',
'Read::Bytes'))
response = self.client.get(url)
eq_(response.status_code, 400)
# invalid end date
url = reverse('crashstats.plot_signature',
args=('Firefox', '19.0',
'2012-02-28', '2012-13-01',
'Read::Bytes'))
response = self.client.get(url)
eq_(response.status_code, 400)
# valid dates
url = reverse('crashstats.plot_signature',
args=('Firefox', '19.0',
'2011-12-01', '2011-12-02',
'Read::Bytes'))
response = self.client.get(url)
eq_(response.status_code, 200)
ok_('application/json' in response['content-type'])
struct = json.loads(response.content)
ok_(struct['signature'])
@mock.patch('requests.post')
@mock.patch('requests.get')
def test_topchangers(self, rget, rpost):
url = reverse('crashstats.topchangers',
args=('Firefox', '19.0'))
bad_url = reverse('crashstats.topchangers',
args=('SeaMonkey', '19.0'))
bad_url2 = reverse('crashstats.topchangers',
args=('Firefox', '19.999'))
url_wo_version = reverse('crashstats.topchangers',
args=('Firefox',))
def mocked_post(**options):
assert 'by/signatures' in options['url'], options['url']
return Response("""
{"bug_associations": [{"bug_id": "123456789",
"signature": "Something"}]}
""")
def mocked_get(url, **options):
if 'crashes/signatures' in url:
return Response("""
{"crashes": [
{
"count": 188,
"mac_count": 66,
"content_count": 0,
"first_report": "2012-06-21",
"startup_percent": 0.0,
"currentRank": 0,
"previousRank": 1,
"first_report_exact": "2012-06-21T21:28:08",
"versions":
"2.0, 2.1, 3.0a2, 3.0b2, 3.1b1, 4.0a1, 4.0a2, 5.0a1",
"percentOfTotal": 0.24258064516128999,
"win_count": 56,
"changeInPercentOfTotal": 0.011139597126354983,
"linux_count": 66,
"hang_count": 0,
"signature": "FakeSignature1",
"versions_count": 8,
"changeInRank": 0,
"plugin_count": 0,
"previousPercentOfTotal": 0.23144104803493501,
"is_gc_count": 10
}
],
"totalPercentage": 0,
"start_date": "2012-05-10",
"end_date": "2012-05-24",
"totalNumberOfCrashes": 0}
""")
raise NotImplementedError(url)
rpost.side_effect = mocked_post
rget.side_effect = mocked_get
response = self.client.get(url_wo_version)
eq_(response.status_code, 200)
# invalid version for the product name
response = self.client.get(bad_url)
eq_(response.status_code, 404)
# invalid version for the product name
response = self.client.get(bad_url2)
eq_(response.status_code, 404)
response = self.client.get(url)
eq_(response.status_code, 200)
@mock.patch('requests.get')
def test_signature_summary(self, rget):
def mocked_get(url, **options):
if 'signaturesummary' in url:
return Response("""
[
{
"version_string": "12.0",
"percentage": "48.440",
"report_count": 52311,
"product_name": "Firefox",
"category": "XXX",
"crashes": "1234",
"installations": "5679",
"null_count" : "456",
"low_count": "789",
"medium_count": "123",
"high_count": "1200",
"report_date": "2013-01-01"
},
{
"version_string": "13.0b4",
"percentage": "9.244",
"report_count": 9983,
"product_name": "Firefox",
"category": "YYY",
"crashes": "3210",
"installations": "9876",
"null_count" : "123",
"low_count": "456",
"medium_count": "789",
"high_count": "1100",
"report_date": "2013-01-02"
}
]
""")
raise NotImplementedError(url)
url = reverse('crashstats.signature_summary')
rget.side_effect = mocked_get
response = self.client.get(url, {'range_value': '1',
'signature': 'sig',
'version': 'Firefox:19.0'})
eq_(response.status_code, 200)
ok_('application/json' in response['content-type'])
struct = json.loads(response.content)
ok_(struct['architectures'])
ok_(struct['flashVersions'])
ok_(struct['percentageByOs'])
ok_(struct['processTypes'])
ok_(struct['productVersions'])
ok_(struct['uptimeRange'])
ok_(struct['distinctInstall'])
ok_('exploitabilityScore' not in struct)
User.objects.create_user('test', 'test@mozilla.com', 'secret')
assert self.client.login(username='test', password='secret')
response = self.client.get(url, {'range_value': '1',
'signature': 'sig',
'version': 'Firefox:19.0'})
eq_(response.status_code, 200)
ok_('application/json' in response['content-type'])
struct = json.loads(response.content)
ok_(struct['exploitabilityScore'])
@mock.patch('requests.get')
def test_status(self, rget):
def mocked_get(**options):
assert 'status' in options['url'], options['url']
return Response("""
{
"breakpad_revision": "1035",
"hits": [
{
"date_oldest_job_queued":
"2012-09-28T20:39:33+00:00",
"date_recently_completed":
"2012-09-28T20:40:00+00:00",
"processors_count": 1,
"avg_wait_sec": 16.407,
"waiting_job_count": 56,
"date_created": "2012-09-28T20:40:02+00:00",
"id": 410655,
"avg_process_sec": 0.914149
},
{
"date_oldest_job_queued":
"2012-09-28T20:34:33+00:00",
"date_recently_completed":
"2012-09-28T20:35:00+00:00",
"processors_count": 1,
"avg_wait_sec": 13.8293,
"waiting_job_count": 48,
"date_created": "2012-09-28T20:35:01+00:00",
"id": 410654,
"avg_process_sec": 1.24177
},
{
"date_oldest_job_queued":
"2012-09-28T20:29:32+00:00",
"date_recently_completed":
"2012-09-28T20:30:01+00:00",
"processors_count": 1,
"avg_wait_sec": 14.8803,
"waiting_job_count": 1,
"date_created": "2012-09-28T20:30:01+00:00",
"id": 410653,
"avg_process_sec": 1.19637
}
],
"total": 12,
"socorro_revision":
"017d7b3f7042ce76bc80949ae55b41d1e915ab62"
}
""")
rget.side_effect = mocked_get
url = reverse('crashstats.status')
response = self.client.get(url)
eq_(response.status_code, 200)
ok_('017d7b3f7042ce76bc80949ae55b41d1e915ab62' in response.content)
ok_('1035' in response.content)
ok_('Sep 28 2012 20:30:01' in response.content)
def test_login_required(self):
url = reverse('crashstats.exploitable_crashes')
response = self.client.get(url)
eq_(response.status_code, 302)
ok_(settings.LOGIN_URL in response['Location'] + '?next=%s' % url)
@mock.patch('requests.get')
def test_status_json(self, rget):
def mocked_get(**options):
assert 'status' in options['url'], options['url']
return Response("""
{
"breakpad_revision": "1035",
"hits": [
{
"date_oldest_job_queued":
"2012-09-28T20:39:33+00:00",
"date_recently_completed":
"2012-09-28T20:40:00+00:00",
"processors_count": 1,
"avg_wait_sec": 16.407,
"waiting_job_count": 56,
"date_created": "2012-09-28T20:40:02+00:00",
"id": 410655,
"avg_process_sec": 0.914149
},
{
"date_oldest_job_queued":
"2012-09-28T20:34:33+00:00",
"date_recently_completed":
"2012-09-28T20:35:00+00:00",
"processors_count": 1,
"avg_wait_sec": 13.8293,
"waiting_job_count": 48,
"date_created": "2012-09-28T20:35:01+00:00",
"id": 410654,
"avg_process_sec": 1.24177
},
{
"date_oldest_job_queued":
"2012-09-28T20:29:32+00:00",
"date_recently_completed":
"2012-09-28T20:30:01+00:00",
"processors_count": 1,
"avg_wait_sec": 14.8803,
"waiting_job_count": 1,
"date_created": "2012-09-28T20:30:01+00:00",
"id": 410653,
"avg_process_sec": 1.19637
}
],
"total": 12,
"socorro_revision":
"017d7b3f7042ce76bc80949ae55b41d1e915ab62"
}
""")
rget.side_effect = mocked_get
url = reverse('crashstats.status_json')
response = self.client.get(url)
eq_(response.status_code, 200)
ok_(response.content.strip().startswith('{'))
ok_('017d7b3f7042ce76bc80949ae55b41d1e915ab62' in response.content)
ok_('1035' in response.content)
ok_('2012-09-28T20:30:01+00:00' in response.content)
ok_('application/json' in response['Content-Type'])
eq_('*', response['Access-Control-Allow-Origin'])
@mock.patch('requests.get')
def test_crontabber_state(self, rget):
def mocked_get(**options):
assert 'crontabber_state' in options['url'], options['url']
return Response("""
{
"state": {
"slow-one": {
"next_run": "2013-02-19 01:16:00.893834",
"first_run": "2012-11-05 23:27:07.316347",
"last_error": {
"traceback": "error error error",
"type": "<class 'sluggish.jobs.InternalError'>",
"value": "Have already run this for 2012-12-24 23:27"
},
"last_run": "2013-02-09 00:16:00.893834",
"last_success": "2012-12-24 22:27:07.316893",
"error_count": 6,
"depends_on": []
},
"slow-two": {
"next_run": "2012-11-12 19:39:59.521605",
"first_run": "2012-11-05 23:27:17.341879",
"last_error": {},
"last_run": "2012-11-12 18:39:59.521605",
"last_success": "2012-11-12 18:27:17.341895",
"error_count": 0,
"depends_on": ["slow-one"]
},
"slow-zero": {
"next_run": "2012-11-12 19:39:59.521605",
"first_run": "2012-11-05 23:27:17.341879",
"last_error": {},
"last_run": "2012-11-12 18:39:59.521605",
"last_success": "2012-11-12 18:27:17.341895",
"error_count": 0,
"depends_on": []
}
},
"last_updated": "2000-01-01T00:00:00+00:00"
}
""")
rget.side_effect = mocked_get
url = reverse('crashstats.crontabber_state')
response = self.client.get(url)
eq_(response.status_code, 200)
ok_('2000-01-01T00:00:00+00:00' in response.content)
ok_('1/01/2000 00:00 UTC' in response.content)
@mock.patch('requests.get')
def test_crontabber_state_json(self, rget):
url = reverse('crashstats.crontabber_state_json')
sample_data = {
"state": {
"slow-one": {
"next_run": "2013-02-19 01:16:00.893834",
"first_run": "2012-11-05 23:27:07.316347",
"last_error": {
"traceback": "error error error",
"type": "<class 'sluggish.jobs.InternalError'>",
"value": "Have already run this for 2012-12-24 23:27"
},
"last_run": "2013-02-09 00:16:00.893834",
"last_success": "2012-12-24 22:27:07.316893",
"error_count": 6,
"depends_on": []
}
}
}
def mocked_get(**options):
assert 'crontabber_state' in options['url']
return Response(json.dumps(sample_data))
rget.side_effect = mocked_get
response = self.client.get(url)
ok_('application/json' in response['Content-Type'])
eq_(response.status_code, 200)
eq_(sample_data, json.loads(response.content))
@mock.patch('requests.post')
@mock.patch('requests.get')
def test_report_index(self, rget, rpost):
dump = "OS|Mac OS X|10.6.8 10K549\\nCPU|amd64|family 6 mod"
comment0 = "This is a comment"
email0 = "some@emailaddress.com"
url0 = "someaddress.com"
email1 = "some@otheremailaddress.com"
def mocked_get(url, **options):
if '/crash_data/' in url and '/datatype/meta/' in url:
return Response("""
{
"InstallTime": "1339289895",
"FramePoisonSize": "4096",
"Theme": "classic/1.0",
"Version": "5.0a1",
"Email": "%s",
"Vendor": "Mozilla",
"URL": "%s"
}
""" % (email0, url0))
if 'crashes/comments' in url:
return Response("""
{
"hits": [
{
"user_comments": "%s",
"date_processed": "2012-08-21T11:17:28-07:00",
"email": "%s",
"uuid": "469bde48-0e8f-3586-d486-b98810120830"
}
],
"total": 1
}
""" % (comment0, email1))
if '/crash_data/' in url and '/datatype/processed' in url:
return Response("""
{
"client_crash_date": "2012-06-11T06:08:45",
"dump": "%s",
"signature": "FakeSignature1",
"user_comments": null,
"uptime": 14693,
"release_channel": "nightly",
"uuid": "11cb72f5-eb28-41e1-a8e4-849982120611",
"flash_version": "[blank]",
"hangid": null,
"distributor_version": null,
"truncated": true,
"process_type": null,
"id": 383569625,
"os_version": "10.6.8 10K549",
"version": "5.0a1",
"build": "20120609030536",
"ReleaseChannel": "nightly",
"addons_checked": null,
"product": "WaterWolf",
"os_name": "Mac OS X",
"last_crash": 371342,
"date_processed": "2012-06-11T06:08:44",
"cpu_name": "amd64",
"reason": "EXC_BAD_ACCESS / KERN_INVALID_ADDRESS",
"address": "0x8",
"completeddatetime": "2012-06-11T06:08:57",
"success": true
}
""" % dump)
if 'correlations/signatures' in url:
return Response("""
{
"hits": [
"FakeSignature1",
"FakeSignature2"
],
"total": 2
}
""")
raise NotImplementedError(url)
rget.side_effect = mocked_get
def mocked_post(url, **options):
if '/bugs/' in url:
return Response("""
{"hits": [{"id": "111222333444",
"signature": "FakeSignature1"},
{"id": "111222333444",
"signature": "FakeSignature2"}]}
""")
raise NotImplementedError(url)
rpost.side_effect = mocked_post
url = reverse('crashstats.report_index',
args=['11cb72f5-eb28-41e1-a8e4-849982120611'])
response = self.client.get(url)
eq_(response.status_code, 200)
# link to bugzilla with that bug ID should only appear once
eq_(response.content.count('show_bug.cgi?id=111222333444'), 1)
ok_('FakeSignature1' in response.content)
ok_('11cb72f5-eb28-41e1-a8e4-849982120611' in response.content)
ok_(comment0 in response.content)
ok_(email0 not in response.content)
ok_(email1 not in response.content)
ok_(url0 not in response.content)
ok_(
'You need to be signed in to be able to download raw dumps.'
in response.content
)
# the email address will appear if we log in
User.objects.create_user('test', 'test@mozilla.com', 'secret')
assert self.client.login(username='test', password='secret')
response = self.client.get(url)
ok_(email0 in response.content)
ok_(email1 in response.content)
ok_(url0 in response.content)
eq_(response.status_code, 200)
@mock.patch('requests.post')
@mock.patch('requests.get')
def test_report_pending_today(self, rget, rpost):
def mocked_get(url, **options):
if '/crash_data/' in url and '/datatype/processed' in url:
raise models.BadStatusCodeError(404)
rget.side_effect = mocked_get
today = datetime.datetime.utcnow().strftime('%y%m%d')
url = reverse('crashstats.report_index',
args=['11cb72f5-eb28-41e1-a8e4-849982%s' % today])
response = self.client.get(url)
ok_('pendingStatus' in response.content)
eq_(response.status_code, 200)
yesterday = datetime.datetime.utcnow() - datetime.timedelta(days=1)
yesterday = yesterday.strftime('%y%m%d')
url = reverse('crashstats.report_index',
args=['11cb72f5-eb28-41e1-a8e4-849982%s' % yesterday])
response = self.client.get(url)
ok_('Crash Not Found' in response.content)
eq_(response.status_code, 200)
url = reverse('crashstats.report_index',
args=['blablabla'])
response = self.client.get(url)
eq_(response.status_code, 400)
@mock.patch('requests.post')
@mock.patch('requests.get')
def test_report_index_with_hangid_in_raw_data(self, rget, rpost):
dump = "OS|Mac OS X|10.6.8 10K549\\nCPU|amd64|family 6 mod"
comment0 = "This is a comment"
email0 = "some@emailaddress.com"
url0 = "someaddress.com"
email1 = "some@otheremailaddress.com"
def mocked_get(url, **options):
if '/crash_data/' in url and '/datatype/meta/' in url:
return Response("""
{
"InstallTime": "1339289895",
"FramePoisonSize": "4096",
"Theme": "classic/1.0",
"Version": "5.0a1",
"Email": "%s",
"Vendor": "Mozilla",
"URL": "%s",
"HangID": "123456789"
}
""" % (email0, url0))
if '/crashes/paireduuid/' in url:
return Response("""
{
"hits": [{
"uuid": "e8820616-1462-49b6-9784-e99a32120201"
}],
"total": 1
}
""")
if 'crashes/comments' in url:
return Response("""
{
"hits": [
{
"user_comments": "%s",
"date_processed": "2012-08-21T11:17:28-07:00",
"email": "%s",
"uuid": "469bde48-0e8f-3586-d486-b98810120830"
}
],
"total": 1
}
""" % (comment0, email1))
if 'correlations/signatures' in url:
return Response("""
{
"hits": [
"FakeSignature1",
"FakeSignature2"
],
"total": 2
}
""")
if '/crash_data/' in url and '/datatype/processed' in url:
return Response("""
{
"client_crash_date": "2012-06-11T06:08:45",
"dump": "%s",
"signature": "FakeSignature1",
"user_comments": null,
"uptime": 14693,
"release_channel": "nightly",
"uuid": "11cb72f5-eb28-41e1-a8e4-849982120611",
"flash_version": "[blank]",
"hangid": null,
"distributor_version": null,
"truncated": true,
"process_type": null,
"id": 383569625,
"os_version": "10.6.8 10K549",
"version": "5.0a1",
"build": "20120609030536",
"ReleaseChannel": "nightly",
"addons_checked": null,
"product": "WaterWolf",
"os_name": "Mac OS X",
"last_crash": 371342,
"date_processed": "2012-06-11T06:08:44",
"cpu_name": "amd64",
"reason": "EXC_BAD_ACCESS / KERN_INVALID_ADDRESS",
"address": "0x8",
"completeddatetime": "2012-06-11T06:08:57",
"success": true
}
""" % dump)
raise NotImplementedError(url)
rget.side_effect = mocked_get
def mocked_post(url, **options):
if '/bugs/' in url:
return Response("""
{"hits": [{"id": "123456789",
"signature": "Something"}]}
""")
raise NotImplementedError(url)
rpost.side_effect = mocked_post
url = reverse('crashstats.report_index',
args=['11cb72f5-eb28-41e1-a8e4-849982120611'])
response = self.client.get(url)
ok_('Hang Minidump' in response.content)
# the HangID in the fixture above
ok_('123456789' in response.content)
@mock.patch('requests.get')
def test_report_index_not_found(self, rget):
crash_id = '11cb72f5-eb28-41e1-a8e4-849982120611'
def mocked_get(url, **options):
if '/datatype/processed/' in url:
raise models.BadStatusCodeError(404)
raise NotImplementedError(url)
rget.side_effect = mocked_get
url = reverse('crashstats.report_index',
args=[crash_id])
response = self.client.get(url)
eq_(response.status_code, 200)
ok_("We couldn't find" in response.content)
@mock.patch('requests.get')
def test_report_index_pending(self, rget):
crash_id = '11cb72f5-eb28-41e1-a8e4-849982120611'
def mocked_get(url, **options):
if '/datatype/processed/' in url:
raise models.BadStatusCodeError(408)
raise NotImplementedError(url)
rget.side_effect = mocked_get
url = reverse('crashstats.report_index',
args=[crash_id])
response = self.client.get(url)
eq_(response.status_code, 200)
ok_('Fetching this archived report' in response.content)
@mock.patch('requests.get')
def test_report_index_too_old(self, rget):
crash_id = '11cb72f5-eb28-41e1-a8e4-849982120611'
def mocked_get(url, **options):
if '/datatype/processed/' in url:
raise models.BadStatusCodeError(410)
raise NotImplementedError(url)
rget.side_effect = mocked_get
url = reverse('crashstats.report_index',
args=[crash_id])
response = self.client.get(url)
eq_(response.status_code, 200)
ok_('This archived report has expired' in response.content)
@mock.patch('requests.get')
def test_report_pending_json(self, rget):
crash_id = '11cb72f5-eb28-41e1-a8e4-849982120611'
def mocked_get(url, **options):
if '/datatype/processed/' in url:
raise models.BadStatusCodeError(408)
raise NotImplementedError(url)
url = reverse('crashstats.report_pending',
args=[crash_id])
response = self.client.get(url)
expected = {
'status': 'error',
'status_message': ('The report for %s'
' is not available yet.' % crash_id),
'url_redirect': ''
}
eq_(response.status_code, 200)
eq_(expected, json.loads(response.content))
def test_report_index_and_pending_missing_crash_id(self):
url = reverse('crashstats.report_index', args=[''])
response = self.client.get(url)
eq_(response.status_code, 404)
url = reverse('crashstats.report_pending', args=[''])
response = self.client.get(url)
eq_(response.status_code, 404)
@mock.patch('requests.post')
@mock.patch('requests.get')
def test_report_list(self, rget, rpost):
def mocked_post(url, **options):
if '/bugs/' in url:
return Response("""
{"hits": [{"id": "123456789",
"signature": "Something"}]}
""")
raise NotImplementedError(url)
rpost.side_effect = mocked_post
def mocked_get(url, **options):
if 'report/list/' in url:
return Response("""
{
"hits": [
{
"user_comments": null,
"product": "WaterWolf",
"os_name": "Linux",
"uuid": "441017f4-e006-4eea-8451-dc20e0120905",
"cpu_info": "...",
"url": "http://example.com/116",
"last_crash": 1234,
"date_processed": "2012-09-05T21:18:58+00:00",
"cpu_name": "x86",
"uptime": 1234,
"process_type": "browser",
"hangid": null,
"reason": "reason7",
"version": "5.0a1",
"os_version": "1.2.3.4",
"build": "20120901000007",
"install_age": 1234,
"signature": "FakeSignature2",
"install_time": "2012-09-05T20:58:24+00:00",
"address": "0xdeadbeef",
"duplicate_of": null
},
{
"user_comments": null,
"product": "WaterWolf",
"os_name": "Mac OS X",
"uuid": "e491c551-be0d-b0fb-c69e-107380120905",
"cpu_info": "...",
"url": "http://example.com/60053",
"last_crash": 1234,
"date_processed": "2012-09-05T21:18:58+00:00",
"cpu_name": "x86",
"uptime": 1234,
"process_type": "content",
"hangid": null,
"reason": "reason7",
"version": "5.0a1",
"os_version": "1.2.3.4",
"build": "20120822000007",
"install_age": 1234,
"signature": "FakeSignature2",
"install_time": "2012-09-05T20:58:24+00:00",
"address": "0xdeadbeef",
"duplicate_of": null
}
],
"total": 2
}
""")
if '/crashes/comments/' in url:
return Response("""
{
"hits": [
{
"user_comments": "I LOVE CHEESE cheese@email.com",
"date_processed": "2012-08-21T11:17:28-07:00",
"email": "bob@uncle.com",
"uuid": "469bde48-0e8f-3586-d486-b98810120830"
}
],
"total": 1
}
""")
if 'correlations/signatures' in url:
return Response("""
{
"hits": [
"FakeSignature1",
"FakeSignature2"
],
"total": 2
}
""")
if 'products/builds/product' in url:
return Response("""
[
{
"product": "WaterWolf",
"repository": "dev",
"buildid": 20130709000007,
"beta_number": 0,
"platform": "Windows",
"version": "5.0a1",
"date": "2013-07-09",
"build_type": "Nightly"
}
]
""")
raise NotImplementedError(url)
rget.side_effect = mocked_get
url = reverse('crashstats.report_list')
response = self.client.get(url)
eq_(response.status_code, 400)
response = self.client.get(url, {
'signature': 'sig',
'range_value': 'xxx'
})
eq_(response.status_code, 400)
response = self.client.get(url, {'signature': 'sig'})
eq_(response.status_code, 200)
response = self.client.get(url, {
'signature': 'sig',
'range_value': 3
})
eq_(response.status_code, 200)
ok_('0xdeadbeef' in response.content)
ok_('I LOVE CHEESE' in response.content)
ok_('bob@uncle.com' not in response.content)
ok_('cheese@email.com' not in response.content)
@mock.patch('requests.post')
@mock.patch('requests.get')
def test_report_index_redirect_by_prefix(self, rget, rpost):
dump = "OS|Mac OS X|10.6.8 10K549\\nCPU|amd64|family 6 mod"
comment0 = "This is a comment"
email0 = "some@emailaddress.com"
url0 = "someaddress.com"
email1 = "some@otheremailaddress.com"
def mocked_get(url, **options):
if '/crash_data/' in url and '/datatype/meta/' in url:
return Response("""
{
"InstallTime": "1339289895",
"FramePoisonSize": "4096",
"Theme": "classic/1.0",
"Version": "5.0a1",
"Email": "%s",
"Vendor": "Mozilla",
"URL": "%s"
}
""" % (email0, url0))
if 'crashes/comments' in url:
return Response("""
{
"hits": [
{
"user_comments": "%s",
"date_processed": "2012-08-21T11:17:28-07:00",
"email": "%s",
"uuid": "469bde48-0e8f-3586-d486-b98810120830"
}
],
"total": 1
}
""" % (comment0, email1))
if '/crash_data/' in url and '/datatype/processed' in url:
return Response("""
{
"client_crash_date": "2012-06-11T06:08:45",
"dump": "%s",
"signature": "FakeSignature1",
"user_comments": null,
"uptime": 14693,
"release_channel": "nightly",
"uuid": "11cb72f5-eb28-41e1-a8e4-849982120611",
"flash_version": "[blank]",
"hangid": null,
"distributor_version": null,
"truncated": true,
"process_type": null,
"id": 383569625,
"os_version": "10.6.8 10K549",
"version": "5.0a1",
"build": "20120609030536",
"ReleaseChannel": "nightly",
"addons_checked": null,
"product": "WaterWolf",
"os_name": "Mac OS X",
"last_crash": 371342,
"date_processed": "2012-06-11T06:08:44",
"cpu_name": "amd64",
"reason": "EXC_BAD_ACCESS / KERN_INVALID_ADDRESS",
"address": "0x8",
"completeddatetime": "2012-06-11T06:08:57",
"success": true
}
""" % dump)
if 'correlations/signatures' in url:
return Response("""
{
"hits": [
"FakeSignature1",
"FakeSignature2"
],
"total": 2
}
""")
raise NotImplementedError(url)
rget.side_effect = mocked_get
def mocked_post(url, **options):
if '/bugs/' in url:
return Response("""
{"hits": [{"id": "123456789",
"signature": "Something"}]}
""")
raise NotImplementedError(url)
rpost.side_effect = mocked_post
base_crash_id = '11cb72f5-eb28-41e1-a8e4-849982120611'
crash_id = settings.CRASH_ID_PREFIX + base_crash_id
assert len(crash_id) > 36
url = reverse('crashstats.report_index', args=[crash_id])
response = self.client.get(url)
correct_url = reverse('crashstats.report_index', args=[base_crash_id])
self.assertRedirects(response, correct_url)
@mock.patch('requests.post')
@mock.patch('requests.get')
def test_report_list_with_no_data(self, rget, rpost):
def mocked_post(url, **options):
if '/bugs/' in url:
return Response("""
{"hits": [{"id": "123456789",
"signature": "Something"}]}
""")
raise NotImplementedError(url)
rpost.side_effect = mocked_post
def mocked_get(url, **options):
if 'report/list/' in url:
return Response("""
{
"hits": [],
"total": 0
}
""")
raise NotImplementedError(url)
rget.side_effect = mocked_get
url = reverse('crashstats.report_list')
response = self.client.get(url, {'signature': 'sig'})
eq_(response.status_code, 200)
# it sucks to depend on the output like this but it'll do for now since
# it's quite a rare occurance.
ok_('no reports in the time period specified' in response.content)
@mock.patch('requests.post')
@mock.patch('requests.get')
def test_report_list_logged_in(self, rget, rpost):
def mocked_post(url, **options):
if '/bugs/' in url:
return Response("""
{"hits": [{"id": "123456789",
"signature": "Something"}]}
""")
raise NotImplementedError(url)
rpost.side_effect = mocked_post
really_long_url = (
'http://thisistheworldsfivehundredthirtyfifthslong'
'esturk.com/that/contains/a/path/and/?a=query&'
)
assert len(really_long_url) > 80
def mocked_get(url, **options):
if '/signatureurls/' in url:
return Response("""{
"hits": [
{"url": "http://farm.ville", "crash_count":123},
{"url": "%s", "crash_count": 1}
],
"total": 2
}
""" % (really_long_url))
if 'report/list/' in url:
return Response("""
{
"hits": [
{
"user_comments": null,
"product": "WaterWolf",
"os_name": "Linux",
"uuid": "441017f4-e006-4eea-8451-dc20e0120905",
"cpu_info": "...",
"url": "http://example.com/116",
"last_crash": 1234,
"date_processed": "2012-09-05T21:18:58+00:00",
"cpu_name": "x86",
"uptime": 1234,
"process_type": "browser",
"hangid": null,
"reason": "reason7",
"version": "5.0a1",
"os_version": "1.2.3.4",
"build": "20120901000007",
"install_age": 1234,
"signature": "FakeSignature2",
"install_time": "2012-09-05T20:58:24+00:00",
"address": "0xdeadbeef",
"duplicate_of": null
},
{
"user_comments": null,
"product": "WaterWolf",
"os_name": "Mac OS X",
"uuid": "e491c551-be0d-b0fb-c69e-107380120905",
"cpu_info": "...",
"url": "http://example.com/60053",
"last_crash": 1234,
"date_processed": "2012-09-05T21:18:58+00:00",
"cpu_name": "x86",
"uptime": 1234,
"process_type": "content",
"hangid": null,
"reason": "reason7",
"version": "5.0a1",
"os_version": "1.2.3.4",
"build": "20120822000007",
"install_age": 1234,
"signature": "FakeSignature2",
"install_time": "2012-09-05T20:58:24+00:00",
"address": "0xdeadbeef",
"duplicate_of": null
}
],
"total": 2
}
""")
if '/crashes/comments/' in url:
return Response("""
{
"hits": [
{
"user_comments": "I LOVE CHEESE",
"date_processed": "2012-08-21T11:17:28-07:00",
"email": "bob@uncle.com",
"uuid": "469bde48-0e8f-3586-d486-b98810120830"
}
],
"total": 1
}
""")
if 'correlations/signatures' in url:
return Response("""
{
"hits": [
"FakeSignature1",
"FakeSignature2"
],
"total": 2
}
""")
if 'products/builds/product' in url:
return Response("""
[
{
"product": "WaterWolf",
"repository": "dev",
"buildid": 20130709000007,
"beta_number": 0,
"platform": "Windows",
"version": "5.0a1",
"date": "2013-07-09",
"build_type": "Nightly"
}
]
""")
raise NotImplementedError(url)
rget.side_effect = mocked_get
url = reverse('crashstats.report_list')
response = self.client.get(url, {'signature': 'sig'})
eq_(response.status_code, 200)
ok_('http://farm.ville' not in response.content)
ok_('bob@uncle.com' not in response.content)
User.objects.create_user('test', 'test@mozilla.com', 'secret')
assert self.client.login(username='test', password='secret')
url = reverse('crashstats.report_list')
response = self.client.get(url, {'signature': 'sig'})
eq_(response.status_code, 200)
# now it suddenly appears when we're logged in
ok_('http://farm.ville' in response.content)
ok_('bob@uncle.com' in response.content)
# not too long...
ok_(really_long_url[:80 - 3] + '...' in response.content)
@mock.patch('requests.get')
def test_raw_data(self, rget):
def mocked_get(url, **options):
assert '/crash_data/' in url
if 'datatype/meta/' in url:
return Response("""
{"foo": "bar",
"stuff": 123}
""")
if '/datatype/raw/' in url:
return Response("""
bla bla bla
""".strip())
raise NotImplementedError(url)
rget.side_effect = mocked_get
crash_id = '176bcd6c-c2ec-4b0c-9d5f-dadea2120531'
json_url = reverse('crashstats.raw_data', args=(crash_id, 'json'))
response = self.client.get(json_url)
eq_(response.status_code, 403)
User.objects.create_user('test', 'test@mozilla.com', 'secret')
assert self.client.login(username='test', password='secret')
response = self.client.get(json_url)
eq_(response.status_code, 200)
eq_(response['Content-Type'], 'application/json')
eq_(json.loads(response.content),
{"foo": "bar", "stuff": 123})
dump_url = reverse('crashstats.raw_data', args=(crash_id, 'dmp'))
response = self.client.get(dump_url)
eq_(response.status_code, 200)
eq_(response['Content-Type'], 'application/octet-stream')
ok_('bla bla bla' in response.content)
# dump files are cached.
# check the mock function and expect no change
def different_mocked_get(url, **options):
if 'crash_data/datatype/raw/uuid' in url:
return Response("""
SOMETHING DIFFERENT
""".strip())
raise NotImplementedError(url)
rget.side_effect = different_mocked_get
response = self.client.get(dump_url)
eq_(response.status_code, 200)
ok_('bla bla bla' in response.content) # still. good.
@mock.patch('requests.get')
def test_links_to_builds_rss(self, rget):
def mocked_get(url, **options):
if 'products/builds/product' in url:
# Note that the last one isn't build_type==Nightly
return Response("""
[
{
"product": "Firefox",
"repository": "dev",
"buildid": 20120625000001,
"beta_number": null,
"platform": "Mac OS X",
"version": "19.0",
"date": "2012-06-25",
"build_type": "Nightly"
},
{
"product": "Firefox",
"repository": "dev",
"buildid": 20120625000002,
"beta_number": null,
"platform": "Windows",
"version": "19.0",
"date": "2012-06-25",
"build_type": "Nightly"
},
{
"product": "Firefox",
"repository": "dev",
"buildid": 20120625000003,
"beta_number": null,
"platform": "BeOS",
"version": "5.0a1",
"date": "2012-06-25",
"build_type": "Beta"
}
]
""")
raise NotImplementedError(url)
rget.side_effect = mocked_get
rss_product_url = reverse('crashstats.buildsrss', args=('Firefox',))
rss_version_url = reverse('crashstats.buildsrss',
args=('Firefox', '19.0'))
url = reverse('crashstats.builds', args=('Firefox',))
response = self.client.get(url)
ok_('href="%s"' % rss_product_url in response.content)
ok_('href="%s"' % rss_version_url not in response.content)
url = reverse('crashstats.builds', args=('Firefox', '19.0'))
response = self.client.get(url)
ok_('href="%s"' % rss_product_url not in response.content)
ok_('href="%s"' % rss_version_url in response.content)
@mock.patch('requests.post')
@mock.patch('requests.get')
def test_remembered_date_range_type(self, rget, rpost):
# if you visit the home page, the default date_range_type will be
# 'report' but if you switch to 'build' it'll remember that
def mocked_get(url, **options):
if 'products' in url and not 'version' in url:
return Response("""
{
"products": [
"Firefox"
],
"hits": {
"Firefox": [{
"featured": true,
"throttle": 100.0,
"end_date": "2012-11-27",
"product": "Firefox",
"release": "Nightly",
"version": "19.0",
"has_builds": true,
"start_date": "2012-09-25"
}]
},
"total": 1
}
""")
elif 'products' in url:
return Response("""
{
"hits": [{
"is_featured": true,
"throttle": 100.0,
"end_date": "2012-11-27",
"product": "Firefox",
"build_type": "Nightly",
"version": "19.0",
"has_builds": true,
"start_date": "2012-09-25"
}],
"total": 1
}
""")
if 'crashes/daily' in url:
return Response("""
{
"hits": {
"Firefox:19.0": {
"2012-10-08": {
"product": "Firefox",
"adu": 30000,
"crash_hadu": 71.099999999999994,
"version": "19.0",
"report_count": 2133,
"date": "2012-10-08"
},
"2012-10-02": {
"product": "Firefox",
"adu": 30000,
"crash_hadu": 77.299999999999997,
"version": "19.0",
"report_count": 2319,
"date": "2012-10-02"
}
}
}
}
""")
if 'crashes/signatures' in url:
return Response("""
{"crashes": [
{
"count": 188,
"mac_count": 66,
"content_count": 0,
"first_report": "2012-06-21",
"startup_percent": 0.0,
"currentRank": 0,
"previousRank": 1,
"first_report_exact": "2012-06-21T21:28:08",
"versions":
"2.0, 2.1, 3.0a2, 3.0b2, 3.1b1, 4.0a1, 4.0a2, 5.0a1",
"percentOfTotal": 0.24258064516128999,
"win_count": 56,
"changeInPercentOfTotal": 0.011139597126354983,
"linux_count": 66,
"hang_count": 0,
"signature": "FakeSignature1",
"versions_count": 8,
"changeInRank": 0,
"plugin_count": 0,
"previousPercentOfTotal": 0.23144104803493501,
"is_gc_count": 10
}
],
"totalPercentage": 0,
"start_date": "2012-05-10",
"end_date": "2012-05-24",
"totalNumberOfCrashes": 0}
""")
raise NotImplementedError(url)
def mocked_post(**options):
assert '/bugs/' in options['url'], options['url']
return Response("""
{"hits": [{"id": "123456789",
"signature": "Something"}]}
""")
rpost.side_effect = mocked_post
rget.side_effect = mocked_get
url = reverse('crashstats.home', args=('Firefox',))
response = self.client.get(url)
eq_(response.status_code, 200)
regex = re.compile('(<a\s+href="\?date_range_type=(\w+)[^>]+)')
for tag, value in regex.findall(response.content):
if value == 'report':
ok_('selected' in tag)
else:
ok_('selected' not in tag)
# now, like the home page does, fire of an AJAX request to frontpage
# for 'build' instead
frontpage_json_url = reverse('crashstats.frontpage_json')
frontpage_reponse = self.client.get(frontpage_json_url, {
'product': 'Firefox',
'date_range_type': 'build'
})
eq_(frontpage_reponse.status_code, 200)
# load the home page again, and it should be on build date instead
response = self.client.get(url)
eq_(response.status_code, 200)
for tag, value in regex.findall(response.content):
if value == 'build':
ok_('selected' in tag)
else:
ok_('selected' not in tag)
# open topcrashers with 'report'
topcrasher_report_url = reverse(
'crashstats.topcrasher',
kwargs={
'product': 'Firefox',
'versions': '19.0',
'date_range_type': 'report'
}
)
response = self.client.get(topcrasher_report_url)
eq_(response.status_code, 200)
# now, go back to the home page, and 'report' should be the new default
response = self.client.get(url)
eq_(response.status_code, 200)
for tag, value in regex.findall(response.content):
if value == 'report':
ok_('selected' in tag)
else:
ok_('selected' not in tag)
# open topcrashers with 'build'
topcrasher_report_url = reverse(
'crashstats.topcrasher',
kwargs={
'product': 'Firefox',
'versions': '19.0',
'date_range_type': 'build'
}
)
response = self.client.get(topcrasher_report_url)
eq_(response.status_code, 200)
# now, go back to the home page, and 'report' should be the new default
response = self.client.get(url)
eq_(response.status_code, 200)
for tag, value in regex.findall(response.content):
if value == 'build':
ok_('selected' in tag)
else:
ok_('selected' not in tag)
@mock.patch('requests.get')
def test_correlations_json(self, rget):
url = reverse('crashstats.correlations_json')
def mocked_get(url, **options):
assert 'correlations/report_type' in url
return Response("""
{
"reason": "EXC_BAD_ACCESS / KERN_INVALID_ADDRESS",
"count": 13,
"load": "36% (4/11) vs. 26% (47/180) amd64 with 2 cores"
}
""")
raise NotImplementedError(url)
rget.side_effect = mocked_get
response = self.client.get(
url,
{'correlation_report_type': 'core-counts',
'product': 'Firefox',
'version': '19.0',
'platform': 'Windows NT',
'signature': 'FakeSignature'}
)
ok_(response.status_code, 200)
ok_('application/json' in response['content-type'])
struct = json.loads(response.content)
eq_(struct['reason'], 'EXC_BAD_ACCESS / KERN_INVALID_ADDRESS')
@mock.patch('requests.get')
def test_correlations_signatures_json(self, rget):
url = reverse('crashstats.correlations_signatures_json')
def mocked_get(url, **options):
assert 'correlations/signatures' in url
return Response("""
{
"hits": ["FakeSignature1",
"FakeSignature2"],
"total": 2
}
""")
raise NotImplementedError(url)
rget.side_effect = mocked_get
response = self.client.get(
url,
{'correlation_report_type': 'core-counts',
'product': 'Firefox',
'version': '19.0',
'platforms': 'Windows NT,Linux'}
)
ok_(response.status_code, 200)
ok_('application/json' in response['content-type'])
struct = json.loads(response.content)
eq_(struct['total'], 2)
|
AdrianGaudebert/socorro-crashstats
|
crashstats/crashstats/tests/test_views.py
|
Python
|
mpl-2.0
| 120,644
|
# This is a template config file for marionette production on Windows.
import os
config = {
# marionette options
"test_type": "browser",
"marionette_address": "localhost:2828",
"test_manifest": "unit-tests.ini",
"virtualenv_python_dll": 'c:/mozilla-build/python27/python27.dll',
"virtualenv_path": 'c:/talos-slave/test/build/venv',
"exes": {
'python': 'c:/mozilla-build/python27/python',
'virtualenv': ['c:/mozilla-build/python27/python', 'c:/mozilla-build/buildbotve/virtualenv.py'],
},
"find_links": [
"http://pypi.pvt.build.mozilla.org/pub",
"http://pypi.pub.build.mozilla.org/pub",
],
"pip_index": False,
"buildbot_json_path": "buildprops.json",
"default_actions": [
'clobber',
'read-buildbot-config',
'download-and-extract',
'create-virtualenv',
'install',
'run-marionette',
],
"default_blob_upload_servers": [
"https://blobupload.elasticbeanstalk.com",
],
"blob_uploader_auth_file" : os.path.join(os.getcwd(), "oauth.txt"),
}
|
lundjordan/mozharness
|
configs/marionette/windows_config.py
|
Python
|
mpl-2.0
| 1,094
|
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at https://mozilla.org/MPL/2.0/.
from io import StringIO
from django.conf import settings
from django.core.management.base import BaseCommand
from bedrock.sitemaps.models import SitemapURL
from bedrock.utils.git import GitRepo
class Command(BaseCommand):
help = "Clones or updates sitemaps info from github"
def add_arguments(self, parser):
parser.add_argument("-q", "--quiet", action="store_true", dest="quiet", default=False, help="If no error occurs, swallow all output."),
parser.add_argument("-f", "--force", action="store_true", dest="force", default=False, help="Load the data even if nothing new from git."),
def handle(self, *args, **options):
if options["quiet"]:
self.stdout._out = StringIO()
repo = GitRepo(settings.SITEMAPS_PATH, settings.SITEMAPS_REPO, name="Sitemaps")
self.stdout.write("Updating git repo")
repo.update()
if not (options["force"] or repo.has_changes()):
self.stdout.write("No sitemap updates")
return
SitemapURL.objects.refresh()
repo.set_db_latest()
self.stdout.write("Updated sitemaps files")
|
flodolo/bedrock
|
bedrock/sitemaps/management/commands/update_sitemaps_data.py
|
Python
|
mpl-2.0
| 1,330
|
def singBottles():
for i in range(99,0,-1): #Sets range and increment to decrese by
print(str(i)+" bottles of beer on the wall, "+str(i) +" bottles of beer.") #prints verse with current number of bottles
print("Take one down pass it around," +str(i-1) +" bottles of beer on the wall.") #prints verse with one less bottle
print(" ")
singBottles()
|
Riotpunchbeats/VanierCompSci
|
DONE AND WORKING/19.py
|
Python
|
mpl-2.0
| 380
|
from .token_route import add_token
from .users_route import send_sms, add_user
__all__ = [add_token, send_sms, add_user]
|
mrcodehang/cqut-chat-server
|
routes/__init__.py
|
Python
|
mpl-2.0
| 121
|
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from __future__ import absolute_import, print_function, unicode_literals
import re
def split_symbol(treeherder_symbol):
"""Split a symbol expressed as grp(sym) into its two parts. If no group is
given, the returned group is '?'"""
groupSymbol = '?'
symbol = treeherder_symbol
if '(' in symbol:
groupSymbol, symbol = re.match(r'([^(]*)\(([^)]*)\)', symbol).groups()
return groupSymbol, symbol
def join_symbol(group, symbol):
"""Perform the reverse of split_symbol, combining the given group and
symbol. If the group is '?', then it is omitted."""
if group == '?':
return symbol
return '{}({})'.format(group, symbol)
|
Yukarumya/Yukarum-Redfoxes
|
taskcluster/taskgraph/util/treeherder.py
|
Python
|
mpl-2.0
| 877
|
# -*- coding: utf-8 -*-
##############################################################################
#
# Author: Nicolas Bessi
# Copyright 2013, 2014 Camptocamp SA
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import orm, fields
from openerp import models, fields as nfields, api
from openerp import exceptions, _
class purchase_order_line(orm.Model):
"""Add on change on price to raise a warning if line is subject to
an agreement
There is too munch conflict when overriding on change defined on old API
With new API classes. This does not work
"""
_inherit = "purchase.order.line"
def _get_po_store(self, cr, uid, ids, context=None):
res = set()
po_obj = self.pool.get('purchase.order')
for row in po_obj.browse(cr, uid, ids, context=context):
res.update([x.id for x in row.order_line])
return res
def _get_po_line_store(self, cr, uid, ids, context=None):
return ids
_store_tuple = (_get_po_store, ['framework_agreement_id'], 20)
_line_store_tuple = (_get_po_line_store, [], 20)
_columns = {
'framework_agreement_id': fields.related(
'order_id',
'framework_agreement_id',
type='many2one',
readonly=True,
store={'purchase.order': _store_tuple,
'purchase.order.line': _line_store_tuple},
relation='framework.agreement',
string='Agreement'
)
}
def _currency_get(self, cr, uid, pricelist_id, context=None):
"""Retrieve pricelist currency"""
return self.pool['product.pricelist'].browse(
cr, uid,
pricelist_id,
context=context).currency_id
def _onchange_price(self, cr, uid, ids, price, agreement_id,
currency=None, qty=0, context=None):
"""Raise a warning if a agreed price is changed on observed object"""
if context is None:
context = {}
if not agreement_id or context.get('no_chained'):
return {}
agr_obj = self.pool['framework.agreement']
agreement = agr_obj.browse(cr, uid, agreement_id, context=context)
if agreement.get_price(qty, currency=currency) != price:
msg = _(
"You have set the price to %s \n"
" but there is a running agreement"
" with price %s") % (
price, agreement.get_price(qty, currency=currency)
)
raise exceptions.Warning(msg)
return {}
def onchange_price(self, cr, uid, ids, price, agreement_id,
qty, pricelist_id, product_id, context=None):
"""Raise a warning if a agreed price is changed"""
if not product_id or not agreement_id:
return {}
currency = self._currency_get(cr, uid, pricelist_id, context=context)
product = self.pool['product.product'].browse(
cr, uid, product_id, context=context)
if product.type == 'service':
return {}
return self._onchange_price(cr, uid, ids, price,
agreement_id, currency=currency,
qty=qty, context=None)
def onchange_product_id(self, cr, uid, ids, pricelist_id, product_id,
qty, uom_id, partner_id, date_order=False,
fiscal_position_id=False,
date_planned=False, name=False,
price_unit=False, state='draft', context=None):
""" We override this function to check qty change (I know...)
The price retrieval is managed by
the override of product.pricelist.price_get
that is overidden to support agreement.
This is maybe a faulty design as it has a low level impact
We use web_context_tunnel to keep the original signature.
"""
agreement_id = context.get('agreement_id')
# rock n'roll
res = super(purchase_order_line, self).onchange_product_id(
cr,
uid,
ids,
pricelist_id,
product_id,
qty,
uom_id,
partner_id,
date_order=date_order,
fiscal_position_id=fiscal_position_id,
date_planned=date_planned,
name=name,
price_unit=price_unit,
context=context
)
if not product_id or not agreement_id:
return res
product = self.pool['product.product'].browse(
cr, uid,
product_id,
context=context
)
if product.type != 'service' and agreement_id:
agreement = self.pool['framework.agreement'].browse(
cr, uid,
agreement_id,
context=context
)
if agreement.product_id.id != product.product_tmpl_id.id:
raise exceptions.Warning(_('Product not in agreement'))
currency = self._currency_get(
cr, uid,
pricelist_id,
context=context
)
res['value']['price_unit'] = agreement.get_price(
qty,
currency=currency
)
return res
class purchase_order(models.Model):
"""Add on change to raise warning
and add a relation to framework agreement"""
_inherit = "purchase.order"
framework_agreement_id = nfields.Many2one(
'framework.agreement',
'Agreement'
)
@api.model
def _currency_get(self, pricelist_id):
"""Get a currency from a pricelist"""
return self.env['product.pricelist'].browse(
pricelist_id).currency_id
@api.onchange('framework_agreement_id')
def onchange_agreement(self):
res = {}
if isinstance(self.id, models.NewId):
return res
if self.framework_agreement_id:
agreement = self.framework_agreement_id
if not agreement.date_valid(self.date_order):
raise exceptions.Warning(
_('Invalid date '
'Agreement and purchase date does not match')
)
if agreement.supplier_id.id != self.partner_id:
raise exceptions.Warning(
_('Invalid agreement '
'Agreement and supplier does not match')
)
raise exceptions.Warning(
_('Agreement Warning! '
'If you change the agreement of this order'
' (and eventually the currency),'
' existing order lines will not be updated.')
)
return res
@api.multi
def onchange_pricelist(self, pricelist_id):
"""We use web_context_tunnel to keep the original signature"""
res = super(purchase_order, self).onchange_pricelist(
pricelist_id,
)
if not pricelist_id or not self._context.get('order_line_ids'):
return res
if self.framework_agreement_id:
raise exceptions.Warning(
_('If you change the pricelist of this order'
' (and eventually the currency),'
' prices of existing order lines will not be updated.')
)
return res
@api.model
def _date_valid(self):
"""predicate that check that date of invoice is in agreement"""
return self.framework_agreement_id.date_valid(self.date_order)
@api.onchange('date_order')
def onchange_date(self):
"""Check that date is in agreement bound"""
if not self.framework_agreement_id:
return {}
if not self._date_valid(self.framework_agreement_id,
self.date_order):
raise exceptions.Warning(
_('Invalid date '
'Agreement and purchase date does not match')
)
return {}
@api.multi
def onchange_partner_id(self, partner_id):
"""Override to ensure that partner can not be changed if agreement.
We use web_context_tunnel in order to keep the original signature.
"""
res = super(purchase_order, self).onchange_partner_id(
partner_id
)
if self._context.get('agreement_id'):
raise exceptions.Warning(
_('You cannot change the supplier: '
'the PO is linked to an agreement')
)
return res
|
eLBati/purchase-workflow
|
framework_agreement/model/purchase.py
|
Python
|
agpl-3.0
| 9,354
|
import sys, os
sys.path.append("./EduTK.zip")
sys.path.append("../EduTK.zip")
if ('Q37_XPP' in os.environ):
sys.path.append(os.path.join(os.environ["HOME"],"epeios/other/libs/edutk/PYH/edutk"))
import edutk as _
from edutk import Core
F_HELLO = "Hello"
_.defineUserItem(globals(), "uf", F_HELLO)
_.setEnums(globals(),"label",("MissingName", "NameToDisplay"))
def clear():
_.dom().setLayout("output", "<span/>")
def display(text):
output = _.Atlas.createHTML()
output.putTagAndValue("h1", text)
_.dom().appendLayout("output", output)
def _acConnect(c, dom):
dom.setLayout("", _.read(os.path.join("workshop", "Body.html"), c.body_i18n))
dom.focus("input")
def _acSubmit(c, dom):
input=dom.getContent("input").strip()
if (len(input)) != 0:
ufHello()(dom.getContent("input"))
dom.setContent( "input", "")
dom.removeClass("output", "hidden")
else:
dom.alert(c.i18n[label.MissingName])
dom.setContent("input", "")
dom.focus("input")
def main(callback, globals, userFunctionLabels, title):
# Uncomment to display exceptions in terminal,
# instead of being displayed in an alert box.
#_.useRegularExceptions()
_.assignUserItems((F_HELLO,), globals, userFunctionLabels)
_.main( os.path.join("workshop", "Head.html"), callback, {
"": _acConnect,
"Submit": _acSubmit,
}, title
)
|
epeios-q37/epeios
|
other/exercises/base/workshop/core.py
|
Python
|
agpl-3.0
| 1,349
|
from django.conf import settings
from tempfile import mkdtemp, mkstemp
import os
import subprocess
import shutil
import badges.models
class BadgeCreatorError(Exception):
def __init__(self, value, latex_output=None):
self.value = value
self.latex_output = latex_output
Exception.__init__(self, value, latex_output)
def __str__(self):
return repr(self.value)
class BadgeCreator:
def __init__(self, badgesettings):
self.settings = badgesettings
self.columns = self.settings.columns
self.rows = self.settings.rows
# list of badges (dict with attributes)
self.badges = []
# create temporary files
self.dir = mkdtemp(dir=settings.TMP_ROOT, prefix="badges_")
self.latex_file, self.latex_file_path = mkstemp(suffix='.tex', dir=self.dir)
# we copy the photos and background images to the temporary directory
# pdflatex is only allowed to include files from there
self.dir_photos = os.path.join(self.dir, 'photos')
os.mkdir(self.dir_photos, mode=0o700)
self.dir_backgrounds = os.path.join(self.dir, 'backgrounds')
os.mkdir(self.dir_backgrounds, mode=0o700)
# prevent that the same file is copied multiple times
self._copied_files = []
def add_badge(self, badge):
design = badge.get_design()
role = badge.get_role()
tmp = {
# texts
'firstname': self._latex_escape(badge.get_firstname_text()),
'surname': self._latex_escape(badge.get_surname_text()),
'job': self._latex_escape(badge.get_job_text()),
'shift': self._latex_escape(badge.get_shift_text(self.settings)),
'role': self._latex_escape(badge.get_role_text(self.settings)),
'photo': '', # filled later
'fontcolor': self._latex_color(design.font_color),
'bgcolor': self._latex_color(design.bg_color),
'bgfront': '', # filled later
'bgback': '', # filled later
'id': '', # filled later (= barcode)
'roleid': role.latex_name,
}
# copy photo
if badge.photo:
tmp['photo'] = self._copy_photo(badge.photo.path)
# design
if design.bg_front:
tmp['bgfront'] = self._copy_background(design.bg_front.path)
if design.bg_back:
tmp['bgback'] = self._copy_background(design.bg_back.path)
# badge id
if self.settings.barcodes:
tmp['id'] = "%010d" % badge.barcode
# permissions
all_permissions = badges.models.BadgePermission.objects.filter(badge_settings=self.settings.pk).all()
selected_permissions = role.permissions
for perm in all_permissions:
if selected_permissions.filter(pk=perm.pk).exists():
tmp['perm-%s' % perm.latex_name] = 'true'
else:
tmp['perm-%s' % perm.latex_name] = 'false'
self.badges.append(tmp)
def generate(self):
latex_code = self._get_latex()
# read template
try:
f = self.settings.latex_template
f.open('r')
template = f.read()
f.close()
except IOError as e:
raise BadgeCreatorError("Cannot open file \"%s\": %s" %
(self.settings.latex_template.path,
str(e)))
# replace '%BADGEDATA%'
latex = template.replace('%BADGEDATA%', latex_code)
# write code
try:
f = os.fdopen(self.latex_file, 'w')
f.write(latex)
f.close()
except IOError as e:
raise BadgeCreatorError("Cannot write to file \"%s\": %s" %
(self.latex_file_path, str(e)))
# debug
if settings.BADGE_TEMPLATE_DEBUG_FILE:
shutil.copyfile(self.latex_file_path, settings.BADGE_TEMPLATE_DEBUG_FILE)
# call pdflatex
try:
# only allow read in the directory of the tex file (and write, but this is default)
env = os.environ
env["openin_any"] = "p"
env["openout_any"] = "p"
env["TEXMFOUTPUT"] = self.dir
subprocess.check_output([settings.BADGE_PDFLATEX,
"-halt-on-error",
"-no-shell-escape",
"-output-directory", self.dir,
os.path.basename(self.latex_file_path)],
cwd=self.dir)
except subprocess.CalledProcessError as e:
raise BadgeCreatorError("PDF generation failed", e.output.decode('utf8'))
# return path to pdf
pdf_filename = "%s.pdf" % os.path.splitext(self.latex_file_path)[0]
return self.dir, pdf_filename
def finish(self):
if os.path.isdir(self.dir):
shutil.rmtree(self.dir)
def _get_latex(self):
# whitespace, if code would be empty
if len(self.badges) == 0:
return r'\ '
r = ''
# number of badges on one page
num_page = self.columns*self.rows
page = 1
while (page-1)*num_page < len(self.badges):
# helper for this page
data_for_page = self.badges[(page-1)*num_page:page*num_page]
# front side
r = r + self._create_table('badgefront', data_for_page)
# back
r = r + self._create_table('badgeback', data_for_page, True)
# next page
page = page + 1
return r
def _create_badge_side(self, latex_command, helper_data):
data = ",".join(["%s=%s" % (key, helper_data[key]) for key in
helper_data])
template = r'\%s[%s]' % (latex_command, data)
return template
def _create_table(self, latex_command, helpers_data, reverse_rows=False):
r = ''
# begin of table
r = r + r'\begin{tabular}{|l|l|}' + "\n"
r = r + r'\hline' + "\n"
# add rows until all helpers were added
row = 1
while (row-1)*self.columns < len(helpers_data):
# get helpers for this row
data_for_row = helpers_data[(row-1)*self.columns:row*self.columns]
latex_for_row = [self._create_badge_side(latex_command, h) for h in
data_for_row]
# fill row if necessary
while len(latex_for_row) < self.columns:
latex_for_row.append("")
# reverse?
if reverse_rows:
latex_for_row.reverse()
# insert ' & ' between items, add '\\' and linebreak
latex_row = ' & '.join(latex_for_row) + r' \\' + "\n"
# add to result
r = r + latex_row
# add hline
r = r + r'\hline' + "\n"
# next row
row = row + 1
# end of table
r = r + r'\end{tabular}' + "\n"
# page break
r = r + "\n" + r'\pagebreak' + "\n\n"
return r
def _latex_color(self, string):
# latex expects HTML colors without '#' and uppercase
if string.startswith('#'):
string = string[1:]
return string.upper()
def _latex_escape(self, string):
string = string.replace('\\', r'\textbackslash ')
string = string.replace(r' ', r'\ ')
string = string.replace(r'&', r'\&')
string = string.replace(r'%', r'\%')
string = string.replace(r'$', r'\$')
string = string.replace(r'#', r'\#')
string = string.replace(r'_', r'\_')
string = string.replace(r'{', r'\{')
string = string.replace(r'}', r'\}')
string = string.replace(r'~', r'\textasciitilde ')
string = string.replace(r'^', r'\textasciicircum ')
return '{' + string + '}'
def _copy_photo(self, src_path):
return self._copy_file(src_path, self.dir_photos)
def _copy_background(self, src_path):
return self._copy_file(src_path, self.dir_backgrounds)
def _copy_file(self, src_path, dest_folder):
filename = os.path.basename(src_path)
dest_path = os.path.join(dest_folder, filename)
if src_path not in self._copied_files:
shutil.copyfile(src_path, dest_path)
self._copied_files.append(src_path)
return os.path.relpath(dest_path, self.dir)
|
helfertool/helfertool
|
src/badges/creator.py
|
Python
|
agpl-3.0
| 8,562
|
from coaster.utils import classmethodproperty
from coaster.views import ClassView
class AdminView(ClassView):
"""Base class for all tabbed admin views"""
@classmethodproperty
def tabs(cls): # noqa: N805
views = ((name, getattr(cls, name)) for name in cls.__views__)
tabviews = sorted(
(view.data.get('index', 0), name, view)
for name, view, in views
if view.data.get('tab')
)
return ((name, view.data['title'], view) for index, name, view in tabviews)
@property
def current_tab(self):
return self.current_handler.name
|
hasgeek/hasjob
|
hasjob/views/admin.py
|
Python
|
agpl-3.0
| 619
|
# -*- coding: utf-8 -*-
{
'name': "Zoo Animals Management",
'summary': "Manage animals in a ZOO.",
'author': "Ludwik Trammer",
'website': "https://github.com/ludwiktrammer/odoo-zoo",
'category': "Specific Industry Applications",
'version': "1.0",
'application': True,
'post_load': 'print_on_load',
'depends': [
'base',
],
'data': [
'views/animal.xml',
'menu.xml',
],
}
|
ludwiktrammer/odoo-zoo
|
addons/zoo_animals/__openerp__.py
|
Python
|
agpl-3.0
| 441
|
import infomap
im = infomap.Infomap("--two-level --verbose")
# Set the start id for bipartite nodes
im.bipartite_start_id = 5
# Add weight as an optional third argument
im.add_link(5, 0)
im.add_link(5, 1)
im.add_link(5, 2)
im.add_link(6, 2)
im.add_link(6, 3)
im.add_link(6, 4)
im.run()
print(f"Found {im.num_top_modules} modules with codelength: {im.codelength}")
print("\n#node flow:")
for node in im.nodes:
print(node.node_id, node.flow)
|
mapequation/infomap
|
examples/python/bipartite.py
|
Python
|
agpl-3.0
| 450
|
from openerp.osv import fields, osv
from dateutil import parser
import re
import calendar
import openerp.tools
import openerp.addons.decimal_precision as dp
import amount_to_text_softapps
import time
from datetime import date
from datetime import datetime
from datetime import timedelta
from openerp.addons.hr_payroll import hr_payroll
class hr_payroll_register(osv.osv):
_name = 'hr.payroll.register'
_description = 'Payroll Register'
_columns = {'annual':fields.boolean('Annual payments')}
hr_payroll_register()
class hr_payslip(osv.osv):
_inherit = 'hr.payslip'
def _calculate(self, cr, uid, ids, field_names, arg, context=None):
res = super(hr_payslip, self)._calculate(cr, uid, ids,field_names,context)
for case in self.browse(cr,uid,ids):
res[case.id]['total_pay'] = res[case.id]['other_pay'] + res[case.id]['net']
if res[case.id]['total_pay'] >= 0 :
res[case.id]['amt_words'] = ' Rupees ' + amount_to_text_softapps._100000000_to_text(int(round(res[case.id]['total_pay']))) + ' Only'
return res
_columns = {'paid_lv':fields.float('Paid Leaves'),
'unpaid_lv':fields.float('UnPaid Leaves'),
'grows': fields.function(_calculate, method=True, store=True, multi='dc', string='Gross Salary', digits_compute=dp.get_precision('Account')),
'net': fields.function(_calculate, method=True, store=True, multi='dc', string='Net Salary', digits_compute=dp.get_precision('Account')),
'allounce': fields.function(_calculate, method=True, store=True, multi='dc', string='Allowance', digits_compute=dp.get_precision('Account')),
'deduction': fields.function(_calculate, method=True, store=True, multi='dc', string='Deduction', digits_compute=dp.get_precision('Account')),
'other_pay': fields.function(_calculate, method=True, store=True, multi='dc', string='Others', digits_compute=dp.get_precision('Account')),
'total_pay': fields.function(_calculate, method=True, store=True, multi='dc', string='Total Payment', digits_compute=dp.get_precision('Account')),
'amt_words': fields.function(_calculate, method=True, multi='dc', string="Amount in Words", type='text'),
'journal_id': fields.many2one('account.journal', 'Expense Journal',readonly=True, states={'draft': [('readonly', False)]}),
'bank_journal_id': fields.many2one('account.journal', 'Bank Journal',readonly=True, states={'draft': [('readonly', False)]}),
'line_ids':fields.one2many('hr.payslip.line', 'slip_id', 'Payslip Line', required=False, readonly=True, states={'draft': [('readonly', False)]}),
}
# _order = 'date desc'
# def compute_sheet(self, cr, uid, ids, context=None):
# atten_obj = self.pool.get('hr.attendance')
# cont_obj = self.pool.get('hr.contract')
# pay_line_obj = self.pool.get('hr.payslip.line')
# holstat_obj = self.pool.get('hr.holidays.status')
# payreg_obj = self.pool.get('hr.payroll.register')
# salhead_obj = self.pool.get('hr.allounce.deduction.categoty')
# resval={}
# pln_ids = []
# wrkedays = paiday = unpaiday = leave_ded = 0
# res = super(hr_payslip, self).compute_sheet(cr, uid, ids,context)
#
# for case in self.browse(cr,uid,ids):
# if case.register_id and case.line_ids:
## val = payreg_obj.browse(cr,uid,case.register_id.id).annual
## val =
## if not case.register_id.annual:
## salhead_ids = salhead_obj.search(cr,uid,[('annual','=',True)])
## for sl in salhead_ids:
## payln_ids = pay_line_obj.search(cr,uid,[('category_id','=',sl),('slip_id','=',case.id)])
## pay_line_obj.unlink(cr,uid,payln_ids)
# holstat_id = holstat_obj.search(cr,uid,[('type','=','unpaid')], limit=1)
# salhd_id = holstat_obj.browse(cr,uid,holstat_id)
#
# if salhd_id.head_id.id:
# pln_ids = pay_line_obj.search(cr,uid,[('category_id','=',salhd_id.head_id.id),('slip_id','=',case.id)],limit=1)
#
# pay_slip_date = ((parser.parse(''.join((re.compile('\d')).findall(case.date))))).strftime("%Y-%m-%d")
# total_sal = case.basic + case.allounce
# mon = ((parser.parse(''.join((re.compile('\d')).findall(case.date))))).strftime("%m")
#
# cr.execute("""select id,state
# from ed_attendance
# where extract(MONTH from log_date ::date) = '%s'
# and employee_id = %d
# """%(mon,case.employee_id))
# all_days = cr.dictfetchall()
# for d in all_days:
# if d['state'] in('present','paid','holiday','time_off'):
# wrkedays += 1
# if d['state'] == 'paid':
# paiday += 1
# if d['state'] == 'unpaid':
# unpaiday += 1
# resval['worked_days'] = wrkedays
# resval['paid_lv'] = paiday
# resval['unpaid_lv'] = unpaiday
# if total_sal and case.working_days:
# leave_ded = (total_sal/case.working_days) * unpaiday
# resval['leaves'] = leave_ded
# pay_line_obj.write(cr,uid,pln_ids,{'amount':leave_ded})
# self.write(cr,uid,ids,resval)
# return res
def compute_sheet(self, cr, uid, ids, context=None):
func_pool = self.pool.get('hr.payroll.structure')
slip_line_pool = self.pool.get('hr.payslip.line')
holiday_pool = self.pool.get('hr.holidays')
sequence_obj = self.pool.get('ir.sequence')
salhead_obj = self.pool.get('hr.allounce.deduction.categoty')
holstat_obj = self.pool.get('hr.holidays.status')
payreg_obj = self.pool.get('hr.payroll.register')
category_ids = salhead_obj.search(cr,uid,[('annual','=',True)])
if context is None:
context = {}
date = self.read(cr, uid, ids, ['date'], context=context)[0]['date']
#Check for the Holidays
def get_days(start, end, month, year, calc_day):
import datetime
count = 0
for day in range(start, end):
if datetime.date(year, month, day).weekday() == calc_day:
count += 1
return count
for slip in self.browse(cr, uid, ids, context=context):
old_slip_ids = slip_line_pool.search(cr, uid, [('slip_id','=',slip.id)], context=context)
slip_line_pool.unlink(cr, uid, old_slip_ids, context=context)
update = {}
ttyme = datetime.fromtimestamp(time.mktime(time.strptime(slip.date,"%Y-%m-%d")))
contracts = self.get_contract(cr, uid, slip.employee_id, date, context)
if contracts.get('id', False) == False:
update.update({
'basic': round(0.0),
'basic_before_leaves': round(0.0),
'name':'Salary Slip of %s for %s' % (slip.employee_id.name, tools.ustr(ttyme.strftime('%B-%Y'))),
'state':'draft',
'contract_id':False,
'company_id':slip.employee_id.company_id.id
})
self.write(cr, uid, [slip.id], update, context=context)
continue
contract = slip.employee_id.contract_id
sal_type = contract.wage_type_id.type
function = contract.struct_id.id
lines = []
if function:
func = func_pool.read(cr, uid, function, ['line_ids'], context=context)
print "before", func
print 'annual',slip.register_id.annual
if not slip.register_id.annual:
sliplnids = slip_line_pool.search(cr, uid, [('function_id','=', function), ('category_id','in', category_ids)])
for sl in sliplnids:
func['line_ids'].remove(sl)
lines = slip_line_pool.browse(cr, uid, func['line_ids'], context=context)
print "after", func
#lines += slip.employee_id.line_ids
ad = []
all_per = ded_per = all_fix = ded_fix = 0.0
obj = {'basic':0.0}
if contract.wage_type_id.type == 'gross':
obj['gross'] = contract.wage
update['igross'] = contract.wage
if contract.wage_type_id.type == 'net':
obj['net'] = contract.wage
update['inet'] = contract.wage
if contract.wage_type_id.type == 'basic':
obj['basic'] = contract.wage
update['basic'] = contract.wage
for line in lines:
cd = line.code.lower()
obj[cd] = line.amount or 0.0
for line in lines:
if line.category_id.code in ad:
continue
ad.append(line.category_id.code)
cd = line.category_id.code.lower()
calculate = False
try:
exp = line.category_id.condition
calculate = eval(exp, obj)
except Exception, e:
raise osv.except_osv(_('Variable Error !'), _('Variable Error: %s ') % (e))
if not calculate:
continue
percent = value = 0.0
base = False
# company_contrib = 0.0
base = line.category_id.base
try:
#Please have a look at the configuration guide.
amt = eval(base, obj)
except Exception, e:
raise osv.except_osv(_('Variable Error !'), _('Variable Error: %s ') % (e))
if sal_type in ('gross', 'net'):
if line.amount_type == 'per':
percent = line.amount
if amt > 1:
value = percent * amt
elif amt > 0 and amt <= 1:
percent = percent * amt
if value > 0:
percent = 0.0
elif line.amount_type == 'fix':
value = line.amount
elif line.amount_type == 'func':
value = slip_line_pool.execute_function(cr, uid, line.id, amt, context)
line.amount = value
else:
if line.amount_type in ('fix', 'per'):
value = line.amount
elif line.amount_type == 'func':
value = slip_line_pool.execute_function(cr, uid, line.id, amt, context)
line.amount = value
if line.type == 'allowance':
all_per += percent
all_fix += value
elif line.type == 'deduction':
ded_per += percent
ded_fix += value
vals = {
'amount':line.amount,
'slip_id':slip.id,
'employee_id':False,
'function_id':False,
'base':base
}
slip_line_pool.copy(cr, uid, line.id, vals, {})
if sal_type in ('gross', 'net'):
sal = contract.wage
if sal_type == 'net':
sal += ded_fix
sal -= all_fix
per = 0.0
if sal_type == 'net':
per = (all_per - ded_per)
else:
per = all_per
if per <=0:
per *= -1
final = (per * 100) + 100
basic = (sal * 100) / final
else:
basic = contract.wage
number = sequence_obj.get(cr, uid, 'salary.slip')
update.update({
'deg_id':function,
'number':number,
'basic': round(basic),
'basic_before_leaves': round(basic),
'name':'Salary Slip of %s for %s' % (slip.employee_id.name, tools.ustr(ttyme.strftime('%B-%Y'))),
'state':'draft',
'contract_id':contract.id,
'company_id':slip.employee_id.company_id.id
})
for line in slip.employee_id.line_ids:
vals = {
'amount':line.amount,
'slip_id':slip.id,
'employee_id':False,
'function_id':False,
'base':base
}
slip_line_pool.copy(cr, uid, line.id, vals, {})
self.write(cr, uid, [slip.id], update, context=context)
for slip in self.browse(cr, uid, ids, context=context):
if not slip.contract_id:
continue
basic_before_leaves = slip.basic
working_day = 0
off_days = 0
dates = hr_payroll.prev_bounds(slip.date)
days_arr = [0, 1, 2, 3, 4, 5, 6]
for dy in range(slip.employee_id.contract_id.working_days_per_week, 7):
off_days += get_days(1, dates[1].day, dates[1].month, dates[1].year, days_arr[dy])
total_off = off_days
working_day = dates[1].day - total_off
perday = slip.net / working_day
total = 0.0
leave = 0.0
leave_ids = self._get_leaves(cr, uid, slip, slip.employee_id, context)
total_leave = 0.0
paid_leave = 0.0
for hday in holiday_pool.browse(cr, uid, leave_ids, context=context):
if not hday.holiday_status_id.head_id:
raise osv.except_osv(_('Error !'), _('Please check configuration of %s, payroll head is missing') % (hday.holiday_status_id.name))
res = {
'slip_id':slip.id,
'name':hday.holiday_status_id.name + '-%s' % (hday.number_of_days),
'code':hday.holiday_status_id.code,
'amount_type':'fix',
'category_id':hday.holiday_status_id.head_id.id,
'sequence':hday.holiday_status_id.head_id.sequence
}
days = hday.number_of_days
if hday.number_of_days < 0:
days = hday.number_of_days * -1
total_leave += days
if hday.holiday_status_id.type == 'paid':
paid_leave += days
continue
# res['name'] = hday.holiday_status_id.name + '-%s' % (days)
# res['amount'] = perday * days
# res['type'] = 'allowance'
# leave += days
# total += perday * days
elif hday.holiday_status_id.type == 'halfpaid':
paid_leave += (days / 2)
res['name'] = hday.holiday_status_id.name + '-%s/2' % (days)
res['amount'] = perday * (days/2)
total += perday * (days/2)
leave += days / 2
res['type'] = 'deduction'
else:
res['name'] = hday.holiday_status_id.name + '-%s' % (days)
res['amount'] = perday * days
res['type'] = 'deduction'
leave += days
total += perday * days
slip_line_pool.create(cr, uid, res, context=context)
basic = basic - total
# leaves = total
update.update({
'basic':basic,
'basic_before_leaves': round(basic_before_leaves),
'leaves':total,
'holiday_days':leave,
'worked_days':working_day - leave,
'working_days':working_day,
})
holstat_id = holstat_obj.search(cr,uid,[('type','=','unpaid')], limit=1)
if holstat_id:
salhd_id = holstat_obj.browse(cr,uid,holstat_id[0])
pln_ids = slip_line_pool.search(cr,uid,[('category_id','=',salhd_id.head_id.id),('slip_id','=',slip.id)],limit=1)
pay_slip_date = ((parser.parse(''.join((re.compile('\d')).findall(slip.date))))).strftime("%Y-%m-%d")
total_sal = slip.basic + slip.allounce
mon = ((parser.parse(''.join((re.compile('\d')).findall(slip.date))))).strftime("%m")
cr.execute("""select id,state,no_days
from ed_attendance
where extract(MONTH from log_date ::date) = '%s'
and employee_id = %d
"""%(mon,slip.employee_id))
all_days = cr.dictfetchall()
wrkedays = paiday = unpaiday = leave_ded = 0
for d in all_days:
if d['state'] in('present','paid','holiday','time_off'):
wrkedays += d['no_days']
if d['state'] == 'paid':
paiday += d['no_days']
if d['state'] == 'unpaid':
unpaiday += d['no_days']
update['worked_days'] = wrkedays
update['paid_lv'] = paiday
update['unpaid_lv'] = unpaiday
if total_sal and slip.working_days:
leave_ded = (total_sal/slip.working_days) * unpaiday
update['leaves'] = leave_ded
slip_line_pool.write(cr,uid,pln_ids,{'amount':leave_ded})
self.write(cr, uid, [slip.id], update, context=context)
return True
hr_payslip()
class payment_category(osv.osv):
_name = 'hr.allounce.deduction.categoty'
_description = 'Allowance Deduction Heads'
_columns = {'annual':fields.boolean('Annual Payments')}
payment_category()
|
trabacus-softapps/docker-edumedia
|
additional_addons/Edumedia_India/ed_payroll.py
|
Python
|
agpl-3.0
| 18,412
|
# -*- coding: utf-8 -*-
# vim: ai ts=4 sts=4 et sw=4
# spamc - Python spamassassin spamc client library
# Copyright (C) 2015 Andrew Colin Kissa <andrew@topdog.za.net>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
spamc: Python spamassassin spamc client library
Copyright 2015, Andrew Colin Kissa
Licensed under AGPLv3+
"""
|
akissa/spamc
|
tests/__init__.py
|
Python
|
agpl-3.0
| 940
|
#
# SilverBot is a Python application to interact with IB's TWS API.
# Copyright (C) 2013 Christopher Jastram <cjastram@gmail.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
import sys
from ib.ext.Contract import Contract
from ib.ext.EWrapper import EWrapper
from ib.ext.EClientSocket import EClientSocket
from ib.ext.ExecutionFilter import ExecutionFilter
def showmessage(message, mapping):
try:
del(mapping['self'])
except (KeyError, ):
pass
items = mapping.items()
items.sort()
print('### {}'.format(message))
for k, v in items:
print(' {}:{}'.format(k, v))
#def gen_tick_id():
#i = randint(100, 10000)
#while True:
#yield i
#i += 1
#gen_tick_id = gen_tick_id().next
class Wrapper(EWrapper):
orders = None
order_ids = [0]
parameters = None
connection = None
_storage = None
def __init__(self, storage):
# Variable initialization
#self.orders = orders.OrderBook()
#self.price_log = data.PriceLog()
#self.parameters = parameters
self._storage = storage
self.connection = EClientSocket(self)
self.connection.eConnect('localhost', 7496, 0) # host, port, clientId
tick_id = 1
symbol = "SLV"
contract = self.makeContract(symbol)
self.connection.reqMktData(tick_id, contract, [], False)
def makeContract(self, symbol):
contract = Contract()
contract.m_symbol = symbol
contract.m_secType = 'STK'
contract.m_exchange = 'SMART'
contract.m_primaryExch = 'SMART'
contract.m_currency = 'USD'
contract.m_localSymbol = symbol
return contract
def tickPrice(self, tickerId, field, price, canAutoExecute):
# 1 = bid
# 2 = ask
# 4 = last
# 6 = high
# 7 = low
# 9 = close
priceLog = {}
side = ""
if field == 2:
self._storage.log_price("ask", price)
elif field == 1:
self._storage.log_price("bid", price)
if side != "":
print(side, price)
def openOrder(self, orderId, contract, order, state):
orderId = order.m_orderId
symbol = contract.m_symbol
qty = order.m_totalQuantity
price = order.m_lmtPrice
action = order.m_action
self.orders.add(orderId, symbol, qty, price, action)
order = [orderId, symbol, qty, price, action]
print("--> Open order:{} Status:{} Warning:{}".format(order, state.m_status, state.m_warningText))
def error(self, id=None, errorCode=None, errorMsg=None):
if errorCode == 2104:
print("--> {}".format(errorMsg))
elif errorCode == 502:
raise Exception(errorMsg)
else:
showmessage('error', vars())
def nextValidId(self, orderId):
self.order_ids.append(orderId)
def connectionClosed(self):
""" Something broke, connection lost. """
print("--> Connection closed, exiting...")
sys.exit(0)
def connected(self):
""" Returns True of connected to TraderWorkstation, otherwise False. """
return self.connection.m_connected
def tickSize(self, tickerId, field, size): pass #showmessage('tickSize', vars())
def tickGeneric(self, tickerId, tickType, value): pass #showmessage('tickGeneric', vars())
def tickString(self, tickerId, tickType, value): pass #showmessage('tickString', vars())
def tickEFP(self, tickerId, tickType, basisPoints, formattedBasisPoints, impliedFuture, holdDays, futureExpiry, dividendImpact, dividendsToExpiry): showmessage('tickEFP', vars())
def tickOptionComputation(self, tickerId, field, impliedVolatility, delta): showmessage('tickOptionComputation', vars())
def orderStatus(self, orderId, status, filled, remaining, avgFillPrice, permId, parentId, lastFillPrice, clientId, whyHeId): pass #showmessage('orderStatus', vars())
def openOrderEnd(self): showmessage('openOrderEnd', vars())
def updateAccountValue(self, key, value, currency, accountName): showmessage('updateAccountValue', vars())
def updatePortfolio(self, contract, position, marketPrice, marketValue, averageCost, unrealizedPNL, realizedPNL, accountName): showmessage('updatePortfolio', vars())
def updateAccountTime(self, timeStamp): showmessage('updateAccountTime', vars())
def accountDownloadEnd(self, accountName): showmessage('accountDownloadEnd', vars())
def contractDetails(self, contractDetails): showmessage('contractDetails', vars())
def bondContractDetails(self, contractDetails): showmessage('bondContractDetails', vars())
def contractDetailsEnd(self, reqId): showmessage('contractDetailsEnd', vars())
def execDetails(self, orderId, contract, execution): showmessage('execDetails', vars())
def execDetailsEnd(self, reqId): showmessage('execDetailsEnd', vars())
def error_0(self, strval): showmessage('error_0', vars())
def error_1(self, strval): showmessage('error_1', vars())
def updateMktDepth(self, tickerId, position, operation, side, price, size): showmessage('updateMktDepth', vars())
def updateMktDepthL2(self, tickerId, position, marketMaker, operation, side, price, size): showmessage('updateMktDepthL2', vars())
def updateNewsBulletin(self, msgId, msgType, message, origExchange): showmessage('updateNewsBulletin', vars())
def managedAccounts(self, accountsList): pass #showmessage('managedAccounts', vars())
def receiveFA(self, faDataType, xml): showmessage('receiveFA', vars())
def historicalData(self, reqId, date, open, high, low, close, volume, count, WAP, hasGaps): showmessage('historicalData', vars())
def scannerParameters(self, xml): showmessage('scannerParameters', vars())
def scannerData(self, reqId, rank, contractDetails, distance, benchmark, projection): showmessage('scannerData', vars())
def scannerDataEnd(self, reqId): showmessage('scannerDataEnd', vars())
def realtimeBar(self, reqId, time, open, high, low, close, volume, wap, count): showmessage('realtimeBar', vars())
def currentTime(self, time): showmessage('currentTime', vars())
def fundamentalData(self, reqId, data): showmessage('fundamentalData', vars())
def deltaNeutralValidation(self, reqId, underComp): showmessage('deltaNeutralValidation', vars())
def tickSnapshotEnd(self, reqId): showmessage('tickSnapshotEnd', vars())
def marketDataType(self, reqId, marketDataType): showmessage('marketDataType', vars())
def commissionReport(self, commissionReport): showmessage('commissionReport', vars())
#class App:
#parameters = None
#def __init__(self, host='localhost', port=7496, clientId=0):
#self.host = host
#self.port = port
#self.clientId = clientId
##self.parameters = settings.TradeParameters()
#self.wrapper = Wrapper(self.parameters)
#self.connection = EClientSocket(self.wrapper)
#
#def eConnect(self):
#self.connection.eConnect(self.host, self.port, self.clientId)
#
#def reqAccountUpdates(self):
#self.connection.reqAccountUpdates(1, '')
#
#def reqOpenOrders(self):
#self.connection.reqOpenOrders()
#
#def reqExecutions(self):
#filt = ExecutionFilter()
#self.connection.reqExecutions(filt)
##def reqIds(self):
##self.connection.reqIds(10)
##def reqNewsBulletins(self):
##self.connection.reqNewsBulletins(1)
##def cancelNewsBulletins(self):
##self.connection.cancelNewsBulletins()
##def setServerLogLevel(self):
##self.connection.setServerLogLevel(3)
##def reqAutoOpenOrders(self):
##self.connection.reqAutoOpenOrders(1)
##def reqAllOpenOrders(self):
##self.connection.reqAllOpenOrders()
##def reqManagedAccts(self):
##self.connection.reqManagedAccts()
##def requestFA(self):
##self.connection.requestFA(1)
##def reqMktData(self):
##tick_id = 1
##symbol = "SLV"
##contract = self.wrapper.makeContract(symbol)
##self.connection.reqMktData(tick_id, contract, [], False)
##def reqHistoricalData(self):
##contract = Contract()
##contract.m_symbol = 'QQQQ'
##contract.m_secType = 'STK'
##contract.m_exchange = 'SMART'
##endtime = strftime('%Y%m%d %H:%M:%S')
##self.connection.reqHistoricalData(
##tickerId=1,
##contract=contract,
##endDateTime=endtime,
##durationStr='1 D',
##barSizeSetting='1 min',
##whatToShow='TRADES',
##useRTH=0,
##formatDate=1)
#
#def eDisconnect(self):
#sleep(5)
#self.connection.eDisconnect()
|
cjastram/silverbot
|
lib/trader.py
|
Python
|
agpl-3.0
| 9,464
|
"""
Run demo with
python3 -m interpolation.interpolate interpolate.py
"""
import datetime
import random
import logging
import itertools
import os
import numpy
import pandas
from filter_weather_data.filters import StationRepository
from filter_weather_data import get_repository_parameters
from filter_weather_data import RepositoryParameter
from interpolation.interpolator.nearest_k_finder import NearestKFinder
from interpolation.interpolator.statistical_interpolator_experimental import get_interpolation_results
class Scorer:
def __init__(self, target_station_dict, neighbour_station_dicts, start_date, end_date):
self.target_station_dict = target_station_dict
self.nearest_k_finder = NearestKFinder(neighbour_station_dicts, start_date, end_date)
def score_all_neighbours(self, date, t_actual):
relevant_neighbours = self.nearest_k_finder.find_k_nearest_neighbours(self.target_station_dict, date, -1)
return get_interpolation_results(relevant_neighbours, t_actual, "_all")
def score_interpolation_algorithm_at_date(scorer, date):
t_actual = scorer.target_station_dict["data_frame"].loc[date].temperature
results = {}
results.update(scorer.score_all_neighbours(date, t_actual))
return results
def setup_logging(interpolation_name):
log = logging.getLogger('')
log.setLevel(logging.DEBUG)
formatter = logging.Formatter('%(asctime)s %(levelname)s %(message)s')
#console_handler = logging.StreamHandler(sys.stdout)
#console_handler.setFormatter(formatter)
#log.addHandler(console_handler)
file_name = "interpolation_{date}_{interpolation_name}.log".format(
interpolation_name=interpolation_name,
date=datetime.datetime.now().isoformat().replace(":", "-").replace(".", "-")
)
path_to_file_to_log_to = os.path.join(
os.path.dirname(os.path.realpath(__file__)),
"log",
file_name
)
file_handler = logging.FileHandler(path_to_file_to_log_to)
file_handler.setFormatter(formatter)
log.addHandler(file_handler)
log.propagate = False
log.info("### Start new logging")
return log
def do_interpolation_scoring(
target_station_dict,
j,
target_station_dicts_len,
neighbour_station_dicts,
start_date,
end_date
):
target_station_name = target_station_dict["name"]
logging.info("interpolate for " + target_station_name)
logging.info("currently at " + str(j + 1) + " out of " + target_station_dicts_len)
logging.info("use " + " ".join([station_dict["name"] for station_dict in neighbour_station_dicts]))
scorer = Scorer(target_station_dict, neighbour_station_dicts, start_date, end_date)
scorer.nearest_k_finder.sample_up(target_station_dict, start_date, end_date)
sum_square_errors = {}
total_len = len(target_station_dict["data_frame"].index.values)
each_minute = target_station_dict["data_frame"].index.values
grouped_by_half_day = numpy.array_split(each_minute, total_len / 720) # 12h
each_half_day = [numpy.random.choice(hour_group) for hour_group in grouped_by_half_day]
for current_i, date in enumerate(each_half_day):
result = score_interpolation_algorithm_at_date(scorer, date)
for method, square_error in result.items():
if method not in sum_square_errors:
sum_square_errors[method] = {}
sum_square_errors[method]["total"] = 0
sum_square_errors[method]["n"] = 0
if not numpy.isnan(square_error):
sum_square_errors[method]["total"] += square_error
sum_square_errors[method]["n"] += 1
method_and_result = list(sum_square_errors.items())
method_and_result.sort(key=lambda x: x[0])
for method, result in method_and_result:
if sum_square_errors[method]["n"] > 0:
method_rmse = numpy.sqrt(sum_square_errors[method]["total"] / sum_square_errors[method]["n"])
else:
method_rmse = numpy.nan
sum_square_errors[method]["rmse"] = method_rmse
score_str = "%.3f" % method_rmse
logging.info(method + " " * (12 - len(method)) + score_str + " n=" + str(sum_square_errors[method]["n"]))
logging.info("end method list")
data_dict = {}
for method in sum_square_errors.keys():
data_dict[method + "--rmse"] = [sum_square_errors[method]["rmse"]]
data_dict[method + "--n"] = [sum_square_errors[method]["n"]]
data_dict[method + "--total"] = [sum_square_errors[method]["total"]]
return pandas.DataFrame(data=data_dict)
def score_algorithm(start_date, end_date, repository_parameters, limit=0, interpolation_name="NONE"):
station_repository = StationRepository(*repository_parameters)
station_dicts = station_repository.load_all_stations(start_date, end_date, limit=limit)
# separate in two sets
random.shuffle(station_dicts)
separator = int(.3 * len(station_dicts)) # 70% vs 30%
target_station_dicts, neighbour_station_dicts = station_dicts[:separator], station_dicts[separator:]
setup_logging(interpolation_name)
logging.info("General Overview")
logging.info("targets: " + " ".join([station_dict["name"] for station_dict in target_station_dicts]))
logging.info("neighbours: " + " ".join([station_dict["name"] for station_dict in neighbour_station_dicts]))
logging.info("End overview")
logging.info("Several Runs")
target_station_dicts_len = str(len(target_station_dicts))
overall_result = itertools.starmap(do_interpolation_scoring, [
[
target_station_dict,
j,
target_station_dicts_len,
neighbour_station_dicts,
start_date,
end_date
] for j, target_station_dict in enumerate(target_station_dicts)
])
logging.info("end targets")
logging.info("overall results")
overall_result_df = pandas.concat(overall_result)
column_names = overall_result_df.columns.values.tolist()
methods = set()
for column_name in column_names:
method, value = column_name.split("--")
methods.update([method])
for method in methods:
overall_total = numpy.nansum(overall_result_df[method + "--total"])
overall_n = int(numpy.nansum(overall_result_df[method + "--n"]))
overall_rmse = numpy.sqrt(overall_total / overall_n)
score_str = "%.5f" % overall_rmse
logging.info(method + " " * (12 - len(method)) + score_str + " n=" + str(overall_n))
overall_result_df.to_csv("interpolation_result_{date}_{interpolation_name}.csv".format(
date=datetime.datetime.now().isoformat().replace(":", "-").replace(".", "-"),
interpolation_name=interpolation_name
))
def demo():
start_date = "2016-01-31"
end_date = "2016-02-01"
repository_parameters = get_repository_parameters(RepositoryParameter.ONLY_OUTDOOR_AND_SHADED)
score_algorithm(start_date, end_date, repository_parameters, limit=60, interpolation_name="test")
if __name__ == "__main__":
demo()
|
1kastner/analyse_weather_data
|
interpolation/interpolate_experimental.py
|
Python
|
agpl-3.0
| 7,078
|
# -*- coding: utf-8; -*-
#
# This file is part of Superdesk.
#
# Copyright 2013, 2014 Sourcefabric z.u. and contributors.
#
# For the full copyright and license information, please see the
# AUTHORS and LICENSE files distributed with this source code, or
# at https://www.sourcefabric.org/superdesk/license
from test_factory import SuperdeskTestCase
from apps.content_filters.content_filter import ContentFilterService
from superdesk.publish import SubscribersService
from eve.utils import ParsedRequest
import json
import os
import superdesk
from settings import URL_PREFIX
from superdesk.errors import SuperdeskApiError
from superdesk.vocabularies.command import VocabulariesPopulateCommand
class ContentFilterTests(SuperdeskTestCase):
def setUp(self):
super().setUp()
self.req = ParsedRequest()
with self.app.test_request_context(URL_PREFIX):
self.f = ContentFilterService(datasource='content_filters', backend=superdesk.get_backend())
self.s = SubscribersService(datasource='subscribers', backend=superdesk.get_backend())
self.articles = [{'_id': '1', 'urgency': 1, 'headline': 'story', 'state': 'fetched'},
{'_id': '2', 'headline': 'prtorque', 'state': 'fetched'},
{'_id': '3', 'urgency': 3, 'headline': 'creator', 'state': 'fetched'},
{'_id': '4', 'urgency': 4, 'state': 'fetched'},
{'_id': '5', 'urgency': 2, 'state': 'fetched'},
{'_id': '6', 'state': 'fetched'}]
self.app.data.insert('archive', self.articles)
self.app.data.insert('filter_conditions',
[{'_id': 1,
'field': 'headline',
'operator': 'like',
'value': 'tor',
'name': 'test-1'}])
self.app.data.insert('filter_conditions',
[{'_id': 2,
'field': 'urgency',
'operator': 'in',
'value': '2',
'name': 'test-2'}])
self.app.data.insert('filter_conditions',
[{'_id': 3,
'field': 'headline',
'operator': 'endswith',
'value': 'tor',
'name': 'test-3'}])
self.app.data.insert('filter_conditions',
[{'_id': 4,
'field': 'urgency',
'operator': 'in',
'value': '2,3,4',
'name': 'test-4'}])
self.app.data.insert('filter_conditions',
[{'_id': 5,
'field': 'headline',
'operator': 'startswith',
'value': 'sto',
'name': 'test-5'}])
self.app.data.insert('content_filters',
[{"_id": 1,
"content_filter": [{"expression": {"fc": [1]}}],
"name": "soccer-only"}])
self.app.data.insert('content_filters',
[{"_id": 2,
"content_filter": [{"expression": {"fc": [4, 3]}}],
"name": "soccer-only2"}])
self.app.data.insert('content_filters',
[{"_id": 3,
"content_filter": [{"expression": {"pf": [1], "fc": [2]}}],
"name": "soccer-only3"}])
self.app.data.insert('content_filters',
[{"_id": 4,
"content_filter": [{"expression": {"fc": [3]}}, {"expression": {"fc": [5]}}],
"name": "soccer-only4"}])
self.app.data.insert('subscribers',
[{"_id": 1,
"content_filter": {"filter_id": 3, "filter_type": "blocking"},
"name": "sub1"}])
self.app.data.insert('subscribers',
[{"_id": 2,
"content_filter": {"filter_id": 1, "filter_type": "blocking"},
"name": "sub2"}])
self.app.data.insert('routing_schemes', [
{
"_id": 1,
"name": "routing_scheme_1",
"rules": [{
"filter": 4,
"name": "routing_rule_4",
"schedule": {
"day_of_week": ["MON"],
"hour_of_day_from": "0000",
"hour_of_day_to": "2355",
},
"actions": {
"fetch": [],
"publish": [],
"exit": False
}
}]
}
])
class RetrievingDataTests(ContentFilterTests):
def test_build_mongo_query_using_like_filter_single_fc(self):
doc = {'content_filter': [{"expression": {"fc": [1]}}], 'name': 'pf-1'}
with self.app.app_context():
query = self.f.build_mongo_query(doc)
docs = superdesk.get_resource_service('archive').\
get_from_mongo(req=self.req, lookup=query)
doc_ids = [d['_id'] for d in docs]
self.assertEqual(3, docs.count())
self.assertTrue('1' in doc_ids)
self.assertTrue('2' in doc_ids)
self.assertTrue('3' in doc_ids)
def test_build_mongo_query_using_like_filter_single_pf(self):
doc = {'content_filter': [{"expression": {"pf": [1]}}], 'name': 'pf-1'}
with self.app.app_context():
query = self.f.build_mongo_query(doc)
docs = superdesk.get_resource_service('archive').\
get_from_mongo(req=self.req, lookup=query)
doc_ids = [d['_id'] for d in docs]
self.assertEqual(3, docs.count())
self.assertTrue('1' in doc_ids)
self.assertTrue('2' in doc_ids)
self.assertTrue('3' in doc_ids)
def test_build_mongo_query_using_like_filter_multi_filter_condition(self):
doc = {'content_filter': [{"expression": {"fc": [1]}}, {"expression": {"fc": [2]}}], 'name': 'pf-1'}
with self.app.app_context():
query = self.f.build_mongo_query(doc)
docs = superdesk.get_resource_service('archive').\
get_from_mongo(req=self.req, lookup=query)
doc_ids = [d['_id'] for d in docs]
self.assertEqual(4, docs.count())
self.assertTrue('1' in doc_ids)
self.assertTrue('2' in doc_ids)
self.assertTrue('5' in doc_ids)
def test_build_mongo_query_using_like_filter_multi_pf(self):
doc = {'content_filter': [{"expression": {"pf": [1]}}, {"expression": {"fc": [2]}}], 'name': 'pf-1'}
with self.app.app_context():
query = self.f.build_mongo_query(doc)
docs = superdesk.get_resource_service('archive').\
get_from_mongo(req=self.req, lookup=query)
doc_ids = [d['_id'] for d in docs]
self.assertEqual(4, docs.count())
self.assertTrue('1' in doc_ids)
self.assertTrue('2' in doc_ids)
self.assertTrue('5' in doc_ids)
def test_build_mongo_query_using_like_filter_multi_filter_condition2(self):
doc = {'content_filter': [{"expression": {"fc": [3, 4]}}], 'name': 'pf-1'}
with self.app.app_context():
query = self.f.build_mongo_query(doc)
docs = superdesk.get_resource_service('archive').\
get_from_mongo(req=self.req, lookup=query)
doc_ids = [d['_id'] for d in docs]
self.assertEqual(1, docs.count())
self.assertTrue('3' in doc_ids)
def test_build_mongo_query_using_like_filter_multi_pf2(self):
doc = {'content_filter': [{"expression": {"pf": [2]}}], 'name': 'pf-1'}
with self.app.app_context():
query = self.f.build_mongo_query(doc)
docs = superdesk.get_resource_service('archive').\
get_from_mongo(req=self.req, lookup=query)
doc_ids = [d['_id'] for d in docs]
self.assertEqual(1, docs.count())
self.assertTrue('3' in doc_ids)
def test_build_mongo_query_using_like_filter_multi_condition3(self):
doc = {'content_filter': [{"expression": {"fc": [3, 4]}}, {"expression": {"fc": [1, 2]}}], 'name': 'pf-1'}
with self.app.app_context():
query = self.f.build_mongo_query(doc)
docs = superdesk.get_resource_service('archive').\
get_from_mongo(req=self.req, lookup=query)
doc_ids = [d['_id'] for d in docs]
self.assertEqual(1, docs.count())
self.assertTrue('3' in doc_ids)
def test_build_mongo_query_using_like_filter_multi_pf3(self):
doc = {'content_filter': [{"expression": {"pf": [2]}}, {"expression": {"pf": [1], "fc": [2]}}], 'name': 'pf-1'}
with self.app.app_context():
query = self.f.build_mongo_query(doc)
docs = superdesk.get_resource_service('archive').\
get_from_mongo(req=self.req, lookup=query)
doc_ids = [d['_id'] for d in docs]
self.assertEqual(1, docs.count())
self.assertTrue('3' in doc_ids)
def test_build_elastic_query_using_like_filter_single_filter_condition(self):
doc = {'content_filter': [{"expression": {"fc": [1]}}], 'name': 'pf-1'}
with self.app.app_context():
query = {'query': {'filtered': {'query': self.f._get_elastic_query(doc)}}}
self.req.args = {'source': json.dumps(query)}
docs = superdesk.get_resource_service('archive').get(req=self.req, lookup=None)
doc_ids = [d['_id'] for d in docs]
self.assertEqual(3, docs.count())
self.assertTrue('1' in doc_ids)
self.assertTrue('2' in doc_ids)
self.assertTrue('3' in doc_ids)
def test_build_elastic_query_using_like_filter_single_content_filter(self):
doc = {'content_filter': [{"expression": {"pf": [1]}}], 'name': 'pf-1'}
with self.app.app_context():
query = {'query': {'filtered': {'query': self.f._get_elastic_query(doc)}}}
self.req.args = {'source': json.dumps(query)}
docs = superdesk.get_resource_service('archive').get(req=self.req, lookup=None)
doc_ids = [d['_id'] for d in docs]
self.assertEqual(3, docs.count())
self.assertTrue('1' in doc_ids)
self.assertTrue('2' in doc_ids)
self.assertTrue('3' in doc_ids)
def test_build_elastic_query_using_like_filter_multi_filter_condition(self):
doc = {'content_filter': [{"expression": {"fc": [1]}}, {"expression": {"fc": [2]}}], 'name': 'pf-1'}
with self.app.app_context():
query = {'query': {'filtered': {'query': self.f._get_elastic_query(doc)}}}
self.req.args = {'source': json.dumps(query)}
docs = superdesk.get_resource_service('archive').get(req=self.req, lookup=None)
doc_ids = [d['_id'] for d in docs]
self.assertEqual(4, docs.count())
self.assertTrue('1' in doc_ids)
self.assertTrue('2' in doc_ids)
self.assertTrue('3' in doc_ids)
self.assertTrue('5' in doc_ids)
def test_build_mongo_query_using_like_filter_multi_content_filter(self):
doc = {'content_filter': [{"expression": {"pf": [1]}}, {"expression": {"fc": [2]}}], 'name': 'pf-1'}
with self.app.app_context():
query = {'query': {'filtered': {'query': self.f._get_elastic_query(doc)}}}
self.req.args = {'source': json.dumps(query)}
docs = superdesk.get_resource_service('archive').get(req=self.req, lookup=None)
doc_ids = [d['_id'] for d in docs]
self.assertEqual(4, docs.count())
self.assertTrue('1' in doc_ids)
self.assertTrue('2' in doc_ids)
self.assertTrue('3' in doc_ids)
self.assertTrue('5' in doc_ids)
def test_build_elastic_query_using_like_filter_multi_filter_condition2(self):
doc = {'content_filter': [{"expression": {"fc": [3, 4]}}, {"expression": {"fc": [1, 2]}}], 'name': 'pf-1'}
with self.app.app_context():
query = {'query': {'filtered': {'query': self.f._get_elastic_query(doc)}}}
self.req.args = {'source': json.dumps(query)}
docs = superdesk.get_resource_service('archive').get(req=self.req, lookup=None)
doc_ids = [d['_id'] for d in docs]
self.assertEqual(1, docs.count())
self.assertTrue('3' in doc_ids)
def test_build_elastic_query_using_like_filter_multi_content_filter2(self):
doc = {'content_filter': [{"expression": {"fc": [4, 3]}},
{"expression": {"pf": [1], "fc": [2]}}], 'name': 'pf-1'}
with self.app.app_context():
query = {'query': {'filtered': {'query': self.f._get_elastic_query(doc)}}}
self.req.args = {'source': json.dumps(query)}
docs = superdesk.get_resource_service('archive').get(req=self.req, lookup=None)
doc_ids = [d['_id'] for d in docs]
self.assertEqual(1, docs.count())
self.assertTrue('3' in doc_ids)
def test_build_elastic_query_using_like_filter_multi_content_filter3(self):
doc = {'content_filter': [{"expression": {"pf": [2]}}, {"expression": {"pf": [1], "fc": [2]}}], 'name': 'pf-1'}
with self.app.app_context():
query = {'query': {'filtered': {'query': self.f._get_elastic_query(doc)}}}
self.req.args = {'source': json.dumps(query)}
docs = superdesk.get_resource_service('archive').get(req=self.req, lookup=None)
doc_ids = [d['_id'] for d in docs]
self.assertEqual(1, docs.count())
self.assertTrue('3' in doc_ids)
def test_build_elastic_query_using_like_filter_multi_content_filter4(self):
doc = {'content_filter': [{"expression": {"pf": [2]}}, {"expression": {"pf": [3]}}], 'name': 'pf-1'}
with self.app.app_context():
query = {'query': {'filtered': {'query': self.f._get_elastic_query(doc)}}}
self.req.args = {'source': json.dumps(query)}
docs = superdesk.get_resource_service('archive').get(req=self.req, lookup=None)
doc_ids = [d['_id'] for d in docs]
self.assertEqual(1, docs.count())
self.assertTrue('3' in doc_ids)
def test_build_elastic_query_using_like_filter_multi_content_filter4(self):
doc = {'content_filter': [{"expression": {"pf": [4], "fc": [4]}}], 'name': 'pf-1'}
with self.app.app_context():
query = {'query': {'filtered': {'query': self.f._get_elastic_query(doc)}}}
self.req.args = {'source': json.dumps(query)}
docs = superdesk.get_resource_service('archive').get(req=self.req, lookup=None)
doc_ids = [d['_id'] for d in docs]
self.assertEqual(1, docs.count())
self.assertTrue('3' in doc_ids)
def test_does_match_returns_true_for_nonexisting_filter(self):
for article in self.articles:
self.assertTrue(self.f.does_match(None, article))
def test_does_match_using_like_filter_single_fc(self):
doc = {'content_filter': [{"expression": {"fc": [1]}}], 'name': 'pf-1'}
with self.app.app_context():
self.assertTrue(self.f.does_match(doc, self.articles[0]))
self.assertTrue(self.f.does_match(doc, self.articles[1]))
self.assertTrue(self.f.does_match(doc, self.articles[2]))
self.assertFalse(self.f.does_match(doc, self.articles[3]))
self.assertFalse(self.f.does_match(doc, self.articles[4]))
self.assertFalse(self.f.does_match(doc, self.articles[5]))
def test_does_match_using_like_filter_single_pf(self):
doc = {'content_filter': [{"expression": {"pf": [1]}}], 'name': 'pf-1'}
with self.app.app_context():
self.assertTrue(self.f.does_match(doc, self.articles[0]))
self.assertTrue(self.f.does_match(doc, self.articles[1]))
self.assertTrue(self.f.does_match(doc, self.articles[2]))
self.assertFalse(self.f.does_match(doc, self.articles[3]))
self.assertFalse(self.f.does_match(doc, self.articles[4]))
self.assertFalse(self.f.does_match(doc, self.articles[5]))
def test_does_match_using_like_filter_multi_fc(self):
doc = {'content_filter': [{"expression": {"fc": [1]}}, {"expression": {"fc": [2]}}], 'name': 'pf-1'}
with self.app.app_context():
self.assertTrue(self.f.does_match(doc, self.articles[0]))
self.assertTrue(self.f.does_match(doc, self.articles[1]))
self.assertTrue(self.f.does_match(doc, self.articles[2]))
self.assertFalse(self.f.does_match(doc, self.articles[3]))
self.assertTrue(self.f.does_match(doc, self.articles[4]))
self.assertFalse(self.f.does_match(doc, self.articles[5]))
def test_does_match_using_like_filter_multi_pf(self):
doc = {'content_filter': [{"expression": {"pf": [1]}}, {"expression": {"fc": [2]}}], 'name': 'pf-1'}
with self.app.app_context():
self.assertTrue(self.f.does_match(doc, self.articles[0]))
self.assertTrue(self.f.does_match(doc, self.articles[1]))
self.assertTrue(self.f.does_match(doc, self.articles[2]))
self.assertFalse(self.f.does_match(doc, self.articles[3]))
self.assertTrue(self.f.does_match(doc, self.articles[4]))
self.assertFalse(self.f.does_match(doc, self.articles[5]))
def test_does_match_using_like_filter_multi_fc2(self):
doc = {'content_filter': [{"expression": {"fc": [3, 4]}}], 'name': 'pf-1'}
with self.app.app_context():
self.assertFalse(self.f.does_match(doc, self.articles[0]))
self.assertFalse(self.f.does_match(doc, self.articles[1]))
self.assertTrue(self.f.does_match(doc, self.articles[2]))
self.assertFalse(self.f.does_match(doc, self.articles[3]))
self.assertFalse(self.f.does_match(doc, self.articles[4]))
self.assertFalse(self.f.does_match(doc, self.articles[5]))
def test_does_match_using_like_filter_multi_pf2(self):
doc = {'content_filter': [{"expression": {"pf": [2]}}], 'name': 'pf-1'}
with self.app.app_context():
self.assertFalse(self.f.does_match(doc, self.articles[0]))
self.assertFalse(self.f.does_match(doc, self.articles[1]))
self.assertTrue(self.f.does_match(doc, self.articles[2]))
self.assertFalse(self.f.does_match(doc, self.articles[3]))
self.assertFalse(self.f.does_match(doc, self.articles[4]))
self.assertFalse(self.f.does_match(doc, self.articles[5]))
def test_does_match_using_like_filter_multi_fc3(self):
doc = {'content_filter': [{"expression": {"fc": [3, 4]}}, {"expression": {"fc": [1, 2]}}], 'name': 'pf-1'}
with self.app.app_context():
self.assertFalse(self.f.does_match(doc, self.articles[0]))
self.assertFalse(self.f.does_match(doc, self.articles[1]))
self.assertTrue(self.f.does_match(doc, self.articles[2]))
self.assertFalse(self.f.does_match(doc, self.articles[3]))
self.assertFalse(self.f.does_match(doc, self.articles[4]))
self.assertFalse(self.f.does_match(doc, self.articles[5]))
def test_does_match_using_like_filter_multi_pf3(self):
doc = {'content_filter': [{"expression": {"pf": [4], "fc": [4]}}], 'name': 'pf-1'}
with self.app.app_context():
self.assertFalse(self.f.does_match(doc, self.articles[0]))
self.assertFalse(self.f.does_match(doc, self.articles[1]))
self.assertTrue(self.f.does_match(doc, self.articles[2]))
self.assertFalse(self.f.does_match(doc, self.articles[3]))
self.assertFalse(self.f.does_match(doc, self.articles[4]))
self.assertFalse(self.f.does_match(doc, self.articles[5]))
def test_if_pf_is_used(self):
with self.app.app_context():
self.assertTrue(self.f._get_content_filters_by_content_filter(1).count() == 1)
self.assertTrue(self.f._get_content_filters_by_content_filter(4).count() == 0)
def test_if_fc_is_used(self):
with self.app.app_context():
self.assertTrue(len(self.f.get_content_filters_by_filter_condition(1)) == 2)
self.assertTrue(len(self.f.get_content_filters_by_filter_condition(3)) == 2)
self.assertTrue(len(self.f.get_content_filters_by_filter_condition(2)) == 1)
def test_get_subscribers_by_filter_condition(self):
filter_condition1 = {'field': 'urgency', 'operator': 'in', 'value': '2'}
filter_condition2 = {'field': 'urgency', 'operator': 'in', 'value': '1'}
filter_condition3 = {'field': 'headline', 'operator': 'like', 'value': 'tor'}
filter_condition4 = {'field': 'urgency', 'operator': 'nin', 'value': '3'}
with self.app.app_context():
cmd = VocabulariesPopulateCommand()
filename = os.path.join(os.path.abspath(
os.path.dirname("apps/prepopulate/data_initialization/vocabularies.json")), "vocabularies.json")
cmd.run(filename)
r1 = self.s._get_subscribers_by_filter_condition(filter_condition1)
r2 = self.s._get_subscribers_by_filter_condition(filter_condition2)
r3 = self.s._get_subscribers_by_filter_condition(filter_condition3)
r4 = self.s._get_subscribers_by_filter_condition(filter_condition4)
self.assertTrue(len(r1[0]['selected_subscribers']) == 1)
self.assertTrue(len(r2[0]['selected_subscribers']) == 0)
self.assertTrue(len(r3[0]['selected_subscribers']) == 2)
self.assertTrue(len(r4[0]['selected_subscribers']) == 1)
class DeleteMethodTestCase(ContentFilterTests):
"""Tests for the delete() method."""
def test_raises_error_if_filter_referenced_by_subscribers(self):
with self.assertRaises(SuperdeskApiError) as ctx:
self.f.delete({'_id': 1})
self.assertEqual(ctx.exception.status_code, 400) # bad request error
def test_raises_error_if_filter_referenced_by_routing_rules(self):
with self.assertRaises(SuperdeskApiError) as ctx:
self.f.delete({'_id': 4})
self.assertEqual(ctx.exception.status_code, 400) # bad request error
|
fritzSF/superdesk
|
server/apps/content_filters/content_filter_tests.py
|
Python
|
agpl-3.0
| 23,348
|
# -*- coding: utf-8 -*-
"""
The Degreed2 Integrated Channel package.
"""
__version__ = "0.0.1"
default_app_config = ("integrated_channels.degreed2.apps."
"Degreed2Config")
|
edx/edx-enterprise
|
integrated_channels/degreed2/__init__.py
|
Python
|
agpl-3.0
| 196
|
# -*- encoding: utf-8 -*-
################################################################################
# #
# Copyright (C) 2013-Today Carlos Eduardo Vercelino - CLVsol #
# #
# This program is free software: you can redistribute it and/or modify #
# it under the terms of the GNU Affero General Public License as published by #
# the Free Software Foundation, either version 3 of the License, or #
# (at your option) any later version. #
# #
# This program is distributed in the hope that it will be useful, #
# but WITHOUT ANY WARRANTY; without even the implied warranty of #
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
# GNU Affero General Public License for more details. #
# #
# You should have received a copy of the GNU Affero General Public License #
# along with this program. If not, see <http://www.gnu.org/licenses/>. #
################################################################################
import cl_address_category
|
CLVsol/odoo_cl_addons
|
cl_address/category/__init__.py
|
Python
|
agpl-3.0
| 1,431
|
# -*- encoding: utf-8 -*-
###########################################################################
# Module Writen to OpenERP, Open Source Management Solution
#
# Copyright (c) 2010 Vauxoo - http://www.vauxoo.com/
# All Rights Reserved.
# info Vauxoo (info@vauxoo.com)
############################################################################
# Coded by: Luis Torres (luis_t@vauxoo.com)
############################################################################
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import installer
|
imarin/Odoo-Mexico-localization
|
l10n_mx_company_cif/wizard/__init__.py
|
Python
|
agpl-3.0
| 1,271
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import time
import datetime
from openerp.osv import fields, osv
from openerp import tools
from openerp.tools.translate import _
class project_project(osv.osv):
_inherit = 'project.project'
def onchange_partner_id(self, cr, uid, ids, part=False, context=None):
res = super(project_project, self).onchange_partner_id(cr, uid, ids, part, context)
if part and res and ('value' in res):
# set Invoice Task Work to 100%
data_obj = self.pool.get('ir.model.data')
data_id = data_obj._get_id(cr, uid, 'hr_timesheet_invoice', 'timesheet_invoice_factor1')
if data_id:
factor_id = data_obj.browse(cr, uid, data_id).res_id
res['value'].update({'to_invoice': factor_id})
return res
_defaults = {
'invoice_on_timesheets': True,
}
def open_timesheets(self, cr, uid, ids, context=None):
""" open Timesheets view """
mod_obj = self.pool.get('ir.model.data')
act_obj = self.pool.get('ir.actions.act_window')
project = self.browse(cr, uid, ids[0], context)
view_context = {
'search_default_account_id': [project.analytic_account_id.id],
'default_account_id': project.analytic_account_id.id,
}
help = _("""<p class="oe_view_nocontent_create">Record your timesheets for the project '%s'.</p>""") % (project.name,)
try:
if project.to_invoice and project.partner_id:
help+= _("""<p>Timesheets on this project may be invoiced to %s, according to the terms defined in the contract.</p>""" ) % (project.partner_id.name,)
except:
# if the user do not have access rights on the partner
pass
res = mod_obj.get_object_reference(cr, uid, 'hr_timesheet', 'act_hr_timesheet_line_evry1_all_form')
id = res and res[1] or False
result = act_obj.read(cr, uid, [id], context=context)[0]
result['name'] = _('Timesheets')
result['context'] = view_context
result['help'] = help
return result
class project_work(osv.osv):
_inherit = "project.task.work"
def get_user_related_details(self, cr, uid, user_id):
res = {}
emp_obj = self.pool.get('hr.employee')
emp_id = emp_obj.search(cr, uid, [('user_id', '=', user_id)])
if not emp_id:
user_name = self.pool.get('res.users').read(cr, uid, [user_id], ['name'])[0]['name']
raise osv.except_osv(_('Bad Configuration!'),
_('Please define employee for user "%s". You must create one.')% (user_name,))
emp = emp_obj.browse(cr, uid, emp_id[0])
if not emp.product_id:
raise osv.except_osv(_('Bad Configuration!'),
_('Please define product and product category property account on the related employee.\nFill in the HR Settings tab of the employee form.'))
if not emp.journal_id:
raise osv.except_osv(_('Bad Configuration!'),
_('Please define journal on the related employee.\nFill in the timesheet tab of the employee form.'))
acc_id = emp.product_id.property_account_expense.id
if not acc_id:
acc_id = emp.product_id.categ_id.property_account_expense_categ.id
if not acc_id:
raise osv.except_osv(_('Bad Configuration!'),
_('Please define product and product category property account on the related employee.\nFill in the timesheet tab of the employee form.'))
res['product_id'] = emp.product_id.id
res['journal_id'] = emp.journal_id.id
res['general_account_id'] = acc_id
res['product_uom_id'] = emp.product_id.uom_id.id
return res
def _create_analytic_entries(self, cr, uid, vals, context):
"""Create the hr analytic timesheet from project task work"""
timesheet_obj = self.pool['hr.analytic.timesheet']
task_obj = self.pool['project.task']
vals_line = {}
timeline_id = False
acc_id = False
task_obj = task_obj.browse(cr, uid, vals['task_id'], context=context)
result = self.get_user_related_details(cr, uid, vals.get('user_id', uid))
vals_line['name'] = '%s: %s' % (tools.ustr(task_obj.name), tools.ustr(vals['name'] or '/'))
vals_line['user_id'] = vals['user_id']
vals_line['product_id'] = result['product_id']
if vals.get('date'):
vals_line['date' ] = vals['date'][:10]
# Calculate quantity based on employee's product's uom
vals_line['unit_amount'] = vals['hours']
default_uom = self.pool['res.users'].browse(cr, uid, uid, context=context).company_id.project_time_mode_id.id
if result['product_uom_id'] != default_uom:
vals_line['unit_amount'] = self.pool['product.uom']._compute_qty(cr, uid, default_uom, vals['hours'], result['product_uom_id'])
acc_id = task_obj.project_id and task_obj.project_id.analytic_account_id.id or acc_id
if acc_id:
vals_line['account_id'] = acc_id
res = timesheet_obj.on_change_account_id(cr, uid, False, acc_id)
if res.get('value'):
vals_line.update(res['value'])
vals_line['general_account_id'] = result['general_account_id']
vals_line['journal_id'] = result['journal_id']
vals_line['amount'] = 0.0
vals_line['product_uom_id'] = result['product_uom_id']
amount = vals_line['unit_amount']
prod_id = vals_line['product_id']
unit = False
timeline_id = timesheet_obj.create(cr, uid, vals=vals_line, context=context)
# Compute based on pricetype
amount_unit = timesheet_obj.on_change_unit_amount(cr, uid, timeline_id,
prod_id, amount, False, unit, vals_line['journal_id'], context=context)
if amount_unit and 'amount' in amount_unit.get('value',{}):
updv = { 'amount': amount_unit['value']['amount'] }
timesheet_obj.write(cr, uid, [timeline_id], updv, context=context)
return timeline_id
def create(self, cr, uid, vals, *args, **kwargs):
context = kwargs.get('context', {})
if not context.get('no_analytic_entry',False):
vals['hr_analytic_timesheet_id'] = self._create_analytic_entries(cr, uid, vals, context=context)
return super(project_work,self).create(cr, uid, vals, *args, **kwargs)
def write(self, cr, uid, ids, vals, context=None):
"""
When a project task work gets updated, handle its hr analytic timesheet.
"""
if context is None:
context = {}
timesheet_obj = self.pool.get('hr.analytic.timesheet')
uom_obj = self.pool.get('product.uom')
result = {}
if isinstance(ids, (long, int)):
ids = [ids]
for task in self.browse(cr, uid, ids, context=context):
line_id = task.hr_analytic_timesheet_id
if not line_id:
# if a record is deleted from timesheet, the line_id will become
# null because of the foreign key on-delete=set null
continue
vals_line = {}
if 'name' in vals:
vals_line['name'] = '%s: %s' % (tools.ustr(task.task_id.name), tools.ustr(vals['name'] or '/'))
if 'user_id' in vals:
vals_line['user_id'] = vals['user_id']
if 'date' in vals:
vals_line['date'] = vals['date'][:10]
if 'hours' in vals:
vals_line['unit_amount'] = vals['hours']
prod_id = vals_line.get('product_id', line_id.product_id.id) # False may be set
# Put user related details in analytic timesheet values
details = self.get_user_related_details(cr, uid, vals.get('user_id', task.user_id.id))
for field in ('product_id', 'general_account_id', 'journal_id', 'product_uom_id'):
if details.get(field, False):
vals_line[field] = details[field]
# Check if user's default UOM differs from product's UOM
user_default_uom_id = self.pool.get('res.users').browse(cr, uid, uid).company_id.project_time_mode_id.id
if details.get('product_uom_id', False) and details['product_uom_id'] != user_default_uom_id:
vals_line['unit_amount'] = uom_obj._compute_qty(cr, uid, user_default_uom_id, vals['hours'], details['product_uom_id'])
# Compute based on pricetype
amount_unit = timesheet_obj.on_change_unit_amount(cr, uid, line_id.id,
prod_id=prod_id, company_id=False,
unit_amount=vals_line['unit_amount'], unit=False, journal_id=vals_line['journal_id'], context=context)
if amount_unit and 'amount' in amount_unit.get('value',{}):
vals_line['amount'] = amount_unit['value']['amount']
if vals_line:
self.pool.get('hr.analytic.timesheet').write(cr, uid, [line_id.id], vals_line, context=context)
return super(project_work,self).write(cr, uid, ids, vals, context)
def unlink(self, cr, uid, ids, *args, **kwargs):
hat_obj = self.pool.get('hr.analytic.timesheet')
hat_ids = []
for task in self.browse(cr, uid, ids):
if task.hr_analytic_timesheet_id:
hat_ids.append(task.hr_analytic_timesheet_id.id)
# Delete entry from timesheet too while deleting entry to task.
if hat_ids:
hat_obj.unlink(cr, uid, hat_ids, *args, **kwargs)
return super(project_work,self).unlink(cr, uid, ids, *args, **kwargs)
_columns={
'hr_analytic_timesheet_id':fields.many2one('hr.analytic.timesheet','Related Timeline Id', ondelete='set null'),
}
class task(osv.osv):
_inherit = "project.task"
def unlink(self, cr, uid, ids, *args, **kwargs):
for task_obj in self.browse(cr, uid, ids, *args, **kwargs):
if task_obj.work_ids:
work_ids = [x.id for x in task_obj.work_ids]
self.pool.get('project.task.work').unlink(cr, uid, work_ids, *args, **kwargs)
return super(task,self).unlink(cr, uid, ids, *args, **kwargs)
def write(self, cr, uid, ids, vals, context=None):
if context is None:
context = {}
task_work_obj = self.pool['project.task.work']
acc_id = False
missing_analytic_entries = {}
if vals.get('project_id',False) or vals.get('name',False):
vals_line = {}
hr_anlytic_timesheet = self.pool.get('hr.analytic.timesheet')
if vals.get('project_id',False):
project_obj = self.pool.get('project.project').browse(cr, uid, vals['project_id'], context=context)
acc_id = project_obj.analytic_account_id.id
for task_obj in self.browse(cr, uid, ids, context=context):
if len(task_obj.work_ids):
for task_work in task_obj.work_ids:
if not task_work.hr_analytic_timesheet_id:
if acc_id :
# missing timesheet activities to generate
missing_analytic_entries[task_work.id] = {
'name' : task_work.name,
'user_id' : task_work.user_id.id,
'date' : task_work.date and task_work.date[:10] or False,
'account_id': acc_id,
'hours' : task_work.hours,
'task_id' : task_obj.id
}
continue
line_id = task_work.hr_analytic_timesheet_id.id
if vals.get('project_id',False):
vals_line['account_id'] = acc_id
if vals.get('name',False):
vals_line['name'] = '%s: %s' % (tools.ustr(vals['name']), tools.ustr(task_work.name) or '/')
hr_anlytic_timesheet.write(cr, uid, [line_id], vals_line, {})
res = super(task,self).write(cr, uid, ids, vals, context)
for task_work_id, analytic_entry in missing_analytic_entries.items():
timeline_id = task_work_obj._create_analytic_entries(cr, uid, analytic_entry, context=context)
task_work_obj.write(cr, uid, task_work_id, {'hr_analytic_timesheet_id' : timeline_id}, context=context)
return res
class res_partner(osv.osv):
_inherit = 'res.partner'
def unlink(self, cursor, user, ids, context=None):
parnter_id=self.pool.get('project.project').search(cursor, user, [('partner_id', 'in', ids)])
if parnter_id:
raise osv.except_osv(_('Invalid Action!'), _('You cannot delete a partner which is assigned to project, but you can uncheck the active box.'))
return super(res_partner,self).unlink(cursor, user, ids,
context=context)
class account_analytic_line(osv.osv):
_inherit = "account.analytic.line"
def get_product(self, cr, uid, context=None):
emp_obj = self.pool.get('hr.employee')
emp_ids = emp_obj.search(cr, uid, [('user_id', '=', uid)], context=context)
if emp_ids:
employee = emp_obj.browse(cr, uid, emp_ids, context=context)[0]
if employee.product_id:return employee.product_id.id
return False
_defaults = {'product_id': get_product,}
def on_change_account_id(self, cr, uid, ids, account_id):
res = {}
if not account_id:
return res
res.setdefault('value',{})
acc = self.pool.get('account.analytic.account').browse(cr, uid, account_id)
st = acc.to_invoice.id
res['value']['to_invoice'] = st or False
if acc.state == 'close' or acc.state == 'cancelled':
raise osv.except_osv(_('Invalid Analytic Account!'), _('You cannot select a Analytic Account which is in Close or Cancelled state.'))
return res
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
ubic135/odoo-design
|
addons/project_timesheet/project_timesheet.py
|
Python
|
agpl-3.0
| 15,312
|
#!/usr/bin/env python
import os
import re
import sys
import h5py
import logging
import numpy as np
from openquake.baselib import sap
from openquake.hazardlib.const import TRT
from openquake.hazardlib.geo.point import Point
from openquake.hazardlib.geo.surface.gridded import GriddedSurface
from openquake.hazardlib.source import NonParametricSeismicSource
from openquake.hazardlib.source import BaseRupture
from openquake.hazardlib.pmf import PMF
from openquake.hazardlib.sourcewriter import write_source_model
from openquake.hazardlib.nrml import SourceModel
from openquake.hazardlib.sourceconverter import SourceGroup
from decimal import Decimal, getcontext
getcontext().prec = 10
def create_nrml_source(rup, mag, sid, name, tectonic_region_type):
"""
:param rup:
:param mag:
:param sid:
:param name:
:param tectonic_region_type:
"""
data = []
for key in rup.keys():
d = rup[key][:]
#
# creating the surface
llo = np.squeeze(d['lons'])
lla = np.squeeze(d['lats'])
lde = np.squeeze(d['deps'])
#
# find a node in the middle of the rupture
if len(llo.shape):
ihyp = (int(np.round(llo.shape[0]/2)))
if len(llo.shape) > 1:
ihyp = (ihyp, int(np.round(llo.shape[1]/2)))
hlo = llo[ihyp]
hla = lla[ihyp]
hde = lde[ihyp]
#
#
ppp = np.squeeze(d['prbs'])
i = np.isfinite(llo)
points = [Point(x, y, z) for x, y, z in
zip(llo[i], lla[i], lde[i])]
srf = GriddedSurface.from_points_list(points)
"""
br = BaseRupture(mag=mag,
rake=-90.,
tectonic_region_type=tectonic_region_type,
hypocenter=Point(hlo, hla, hde),
surface=srf,
source_typology=NonParametricSeismicSource)
"""
br = BaseRupture(mag=mag,
rake=-90.,
tectonic_region_type=tectonic_region_type,
hypocenter=Point(hlo, hla, hde),
surface=srf)
xxx = Decimal('{:.8f}'.format(ppp[1]))
pmf = PMF(data=[((Decimal('1')-xxx), 0), (xxx, 1)])
data.append((br, pmf))
src = NonParametricSeismicSource(sid, name, tectonic_region_type, data)
return src
def create(label, rupture_hdf5_fname, output_folder, investigation_t, srcid):
"""
:param label:
:param rupture_hdf5_fname:
:param output_folder:
"""
#
#
f = h5py.File(rupture_hdf5_fname, 'r')
if not os.path.exists(output_folder):
os.mkdir(output_folder)
#
#
trt = TRT.SUBDUCTION_INTRASLAB
for mag in f['ruptures'].keys():
#
# check the number of ruptures defined for the current magnitude value
grp = f['ruptures'][mag]
if len(grp) < 1:
tmps = 'Skipping ruptures for magnitude {:.2f}'.format(float(mag))
logging.warning(tmps)
continue
#
# set the name of the output nrml file
fnrml = os.path.join(output_folder, '{:s}.nrml'.format(mag))
#
# source ID
mags = re.sub('\.', 'pt', mag)
sid = 'src_{:s}_{:s}'.format(label, mags)
if srcid:
sid += '_{:s}'.format(srcid)
name = 'Ruptures for mag bin {:s}'.format(mags)
#
# creates a non-parametric seismic source
src = create_nrml_source(grp, float(mag), sid, name, trt)
#
# create source group
sgrp = SourceGroup(trt, [src])
#
# create source model
name = 'Source model for {:s} magnitude {:s}'.format(label, mags)
mdl = SourceModel([sgrp], name, investigation_t)
#
# write source model
write_source_model(fnrml, mdl, mag)
f.close()
print('Done')
def main(argv):
p = sap.Script(create)
p.arg(name='label', help='TR label')
p.arg(name='rupture_hdf5_fname', help='hdf5 file with the ruptures')
p.arg(name='output_folder', help='Name of the output folder')
p.arg(name='investigation_t', help='Investigation time')
p.opt(name='srcid', help='Source ID')
if len(argv) < 1:
print(p.help())
else:
p.callfunc()
if __name__ == '__main__':
main(sys.argv[1:])
|
GEMScienceTools/oq-subduction
|
bin/create_inslab_nrml.py
|
Python
|
agpl-3.0
| 4,493
|
# This file is part of Wolnelektury, licensed under GNU Affero GPLv3 or later.
# Copyright © Fundacja Nowoczesna Polska. See NOTICE for more information.
#
from functools import wraps
from django import template
from catalogue.models import Book
register = template.Library()
class StatsNode(template.Node):
def __init__(self, value, varname=None):
self.value = value
self.varname = varname
def render(self, context):
if self.varname:
context[self.varname] = self.value
return ''
else:
return self.value
def register_counter(f):
"""Turns a simple counting function into a registered counter tag.
You can run a counter tag as a simple {% tag_name %} tag, or
as {% tag_name var_name %} to store the result in a variable.
"""
@wraps(f)
def wrapped(parser, token):
try:
tag_name, args = token.contents.split(None, 1)
except ValueError:
args = None
return StatsNode(f(), args)
return register.tag(wrapped)
@register_counter
def count_books_all():
return Book.objects.all().count()
@register_counter
def count_books():
return Book.objects.filter(children=None).count()
@register_counter
def count_books_parent():
return Book.objects.exclude(children=None).count()
@register_counter
def count_books_root():
return Book.objects.filter(parent=None).count()
|
fnp/wolnelektury
|
src/reporting/templatetags/reporting_stats.py
|
Python
|
agpl-3.0
| 1,435
|
# -*- coding: utf-8 -*-
"""Setup the tg2app application"""
import logging
from tg2app.config.environment import load_environment
__all__ = ['setup_app']
log = logging.getLogger(__name__)
from schema import setup_schema
import bootstrap
def setup_app(command, conf, vars):
"""Place any commands to setup tg2app here"""
load_environment(conf.global_conf, conf.local_conf)
setup_schema(command, conf, vars)
bootstrap.bootstrap(command, conf, vars)
|
ralphbean/monroe
|
wsgi/tg2app/tg2app/websetup/__init__.py
|
Python
|
agpl-3.0
| 467
|
from setuptools import setup, find_packages
setup(
name = "routez",
version = "1.0",
url = 'http://github.com/wlach/routez',
description = "Routez",
author = 'William Lachance',
packages = find_packages('.'),
package_dir = {'': '.'},
install_requires = ['setuptools'],
)
|
wlach/routez
|
src/setup.py
|
Python
|
agpl-3.0
| 304
|
from rest_framework import serializers
from student.models import UserEvent
class UserEventSerializer(serializers.ModelSerializer):
class Meta:
model = UserEvent
fields = ('id', 'user', 'title', 'start', 'end', 'color')
|
mjirayu/sit_academy
|
openedx/core/djangoapps/user_api/user_events/serializers.py
|
Python
|
agpl-3.0
| 243
|
MATLAB_PARAMETER_FILENAME = 'p.mat'
MATLAB_F_FILENAME = 'f.mat'
MATLAB_DF_FILENAME = 'df.mat'
NODES_MAX_FILENAME = 'max_nodes.txt'
|
jor-/simulation
|
simulation/optimization/matlab/constants.py
|
Python
|
agpl-3.0
| 131
|
# -*- coding: utf-8 -*-
# Copyright 2016-2018 Flensburg University of Applied Sciences,
# Europa-Universität Flensburg,
# Centre for Sustainable Energy Systems,
# DLR-Institute for Networked Energy Systems
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation; either version 3 of the
# License, or (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
# File description
"""
This is the application file for the tool eTraGo.
Define your connection parameters and power flow settings before executing
the function etrago.
"""
import datetime
import os
import os.path
import numpy as np
__copyright__ = (
"Flensburg University of Applied Sciences, "
"Europa-Universität Flensburg, Centre for Sustainable Energy Systems, "
"DLR-Institute for Networked Energy Systems")
__license__ = "GNU Affero General Public License Version 3 (AGPL-3.0)"
__author__ = "ulfmueller, lukasol, wolfbunke, mariusves, s3pp"
if 'READTHEDOCS' not in os.environ:
# Sphinx does not run this code.
# Do not import internal packages directly
from etrago import Etrago
args = {
# Setup and Configuration:
'db': 'egon-data', # database session
'gridversion': None, # None for model_draft or Version number
'method': { # Choose method and settings for optimization
'type': 'lopf', # type of optimization, currently only 'lopf'
'n_iter': 2, # abort criterion of iterative optimization, 'n_iter' or 'threshold'
'pyomo': True}, # set if pyomo is used for model building
'pf_post_lopf': {
'active': False, # choose if perform a pf after a lopf simulation
'add_foreign_lopf': True, # keep results of lopf for foreign DC-links
'q_allocation': 'p_nom'}, # allocate reactive power via 'p_nom' or 'p'
'start_snapshot': 1,
'end_snapshot': 10,
'solver': 'gurobi', # glpk, cplex or gurobi
'solver_options': {},
'model_formulation': 'kirchhoff', # angles or kirchhoff
'scn_name': 'eGon2035', # a scenario: eGon2035 or eGon100RE
# Scenario variations:
'scn_extension': None, # None or array of extension scenarios
'scn_decommissioning': None, # None or decommissioning scenario
# Export options:
'lpfile': False, # save pyomo's lp file: False or /path/tofolder
'csv_export': 'results', # save results as csv: False or /path/tofolder
# Settings:
'extendable': ['as_in_db'], # Array of components to optimize
'generator_noise': 789456, # apply generator noise, False or seed number
'extra_functionality':{}, # Choose function name or {}
# Clustering:
'network_clustering_kmeans': {
'active': True, # choose if clustering is activated
'n_clusters': 30, # number of resulting nodes
'n_clusters_gas': 5, # number of resulting nodes in Germany
'kmeans_busmap': False, # False or path/to/busmap.csv
'kmeans_gas_busmap': False, # False or path/to/ch4_busmap.csv
'line_length_factor': 1, #
'remove_stubs': False, # remove stubs bevore kmeans clustering
'use_reduced_coordinates': False, #
'bus_weight_tocsv': None, # None or path/to/bus_weight.csv
'bus_weight_fromcsv': None, # None or path/to/bus_weight.csv
'n_init': 10, # affects clustering algorithm, only change when neccesary
'max_iter': 100, # affects clustering algorithm, only change when neccesary
'tol': 1e-6,}, # affects clustering algorithm, only change when neccesary
'network_clustering_ehv': True, # clustering of HV buses to EHV buses.
'disaggregation': 'uniform', # None, 'mini' or 'uniform'
'snapshot_clustering': {
'active': False, # choose if clustering is activated
'method':'typical_periods', # 'typical_periods' or 'segmentation'
'how': 'daily', # type of period, currently only 'daily' - only relevant for 'typical_periods'
'storage_constraints': '', # additional constraints for storages - only relevant for 'typical_periods'
'n_clusters': 5, # number of periods - only relevant for 'typical_periods'
'n_segments': 5}, # number of segments - only relevant for segmentation
# Simplifications:
'skip_snapshots': False, # False or number of snapshots to skip
'branch_capacity_factor': {'HV': 0.5, 'eHV': 0.7}, # p.u. branch derating
'load_shedding': False, # meet the demand at value of loss load cost
'foreign_lines': {'carrier': 'AC', # 'DC' for modeling foreign lines as links
'capacity': 'osmTGmod'}, # 'osmTGmod', 'ntc_acer' or 'thermal_acer'
'comments': None}
def run_etrago(args, json_path):
"""The etrago function works with following arguments:
Parameters
----------
db : str
``'oedb'``,
Name of Database session setting stored in *config.ini* of *.egoio*
gridversion : NoneType or str
``'v0.4.6'``,
Name of the data version number of oedb: state ``'None'`` for
model_draft (sand-box) or an explicit version number
(e.g. 'v0.4.6') for the grid schema.
method : dict
{'type': 'lopf', 'n_iter': 5, 'pyomo': True},
Choose 'lopf' for 'type'. In case of extendable lines, several lopfs
have to be performed. Choose either 'n_init' and a fixed number of
iterations or 'thershold' and a threashold of the objective function as
abort criteria.
Set 'pyomo' to False for big optimization problems, currently only
possible when solver is 'gurobi'.
pf_post_lopf :dict
{'active': True, 'add_foreign_lopf': True, 'q_allocation': 'p_nom'},
Option to run a non-linear power flow (pf) directly after the
linear optimal power flow (and thus the dispatch) has finished.
If foreign lines are modeled as DC-links (see foreign_lines), results
of the lopf can be added by setting 'add_foreign_lopf'.
Reactive power can be distributed either by 'p_nom' or 'p'.
start_snapshot : int
1,
Start hour of the scenario year to be calculated.
end_snapshot : int
2,
End hour of the scenario year to be calculated.
If temporal clustering is used, the selected snapshots should cover
whole days.
solver : str
'glpk',
Choose your preferred solver. Current options: 'glpk' (open-source),
'cplex' or 'gurobi'.
solver_options: dict
Choose settings of solver to improve simulation time and result.
Options are described in documentation of choosen solver.
model_formulation: str
'angles'
Choose formulation of pyomo-model.
Current options: angles, cycles, kirchhoff, ptdf
scn_name : str
'eGon2035',
Choose your scenario. Currently, there are two different
scenarios: 'eGon2035', 'eGon100RE'.
scn_extension : NoneType or list
None,
Choose extension-scenarios which will be added to the existing
network container. Data of the extension scenarios are located in
extension-tables (e.g. model_draft.ego_grid_pf_hv_extension_bus)
with the prefix 'extension_'.
Currently there are three overlay networks:
'nep2035_confirmed' includes all planed new lines confirmed by the
Bundesnetzagentur
'nep2035_b2' includes all new lines planned by the
Netzentwicklungsplan 2025 in scenario 2035 B2
'BE_NO_NEP 2035' includes planned lines to Belgium and Norway and
adds BE and NO as electrical neighbours
scn_decommissioning : str
None,
Choose an extra scenario which includes lines you want to decommise
from the existing network. Data of the decommissioning scenarios are
located in extension-tables
(e.g. model_draft.ego_grid_pf_hv_extension_bus) with the prefix
'decommissioning_'.
Currently, there are two decommissioning_scenarios which are linked to
extension-scenarios:
'nep2035_confirmed' includes all lines that will be replaced in
confirmed projects
'nep2035_b2' includes all lines that will be replaced in
NEP-scenario 2035 B2
lpfile : obj
False,
State if and where you want to save pyomo's lp file. Options:
False or '/path/tofolder'.import numpy as np
csv_export : obj
False,
State if and where you want to save results as csv files.Options:
False or '/path/tofolder'.
extendable : list
['network', 'storages'],
Choose components you want to optimize.
Settings can be added in /tools/extendable.py.
The most important possibilities:
'as_in_db': leaves everything as it is defined in the data coming
from the database
'network': set all lines, links and transformers extendable
'german_network': set lines and transformers in German grid
extendable
'foreign_network': set foreign lines and transformers extendable
'transformers': set all transformers extendable
'overlay_network': set all components of the 'scn_extension'
extendable
'storages': allow to install extendable storages
(unlimited in size) at each grid node in order to meet
the flexibility demand.
'network_preselection': set only preselected lines extendable,
method is chosen in function call
generator_noise : bool or int
State if you want to apply a small random noise to the marginal costs
of each generator in order to prevent an optima plateau. To reproduce
a noise, choose the same integer (seed number).
extra_functionality : dict or None
None,
Choose extra functionalities and their parameters for PyPSA-model.
Settings can be added in /tools/constraints.py.
Current options are:
'max_line_ext': float
Maximal share of network extension in p.u.
'min_renewable_share': float
Minimal share of renewable generation in p.u.
'cross_border_flow': array of two floats
Limit cross-border-flows between Germany and its neigbouring
countries, set values in p.u. of german loads in snapshots
for all countries
(positiv: export from Germany)
'cross_border_flows_per_country': dict of cntr and array of floats
Limit cross-border-flows between Germany and its neigbouring
countries, set values in p.u. of german loads in snapshots
for each country
(positiv: export from Germany)
'max_curtailment_per_gen': float
Limit curtailment of all wind and solar generators in Germany,
values set in p.u. of generation potential.
'max_curtailment_per_gen': float
Limit curtailment of each wind and solar generator in Germany,
values set in p.u. of generation potential.
'capacity_factor': dict of arrays
Limit overall energy production for each carrier,
set upper/lower limit in p.u.
'capacity_factor_per_gen': dict of arrays
Limit overall energy production for each generator by carrier,
set upper/lower limit in p.u.
'capacity_factor_per_cntr': dict of dict of arrays
Limit overall energy production country-wise for each carrier,
set upper/lower limit in p.u.
'capacity_factor_per_gen_cntr': dict of dict of arrays
Limit overall energy production country-wise for each generator
by carrier, set upper/lower limit in p.u.
network_clustering_kmeans : dict
{'active': True, 'n_clusters': 10, 'kmeans_busmap': False,
'line_length_factor': 1.25, 'remove_stubs': False,
'use_reduced_coordinates': False, 'bus_weight_tocsv': None,
'bus_weight_fromcsv': None, 'n_init': 10, 'max_iter': 300,
'tol': 1e-4, 'n_jobs': 1},
State if you want to apply a clustering of all network buses down to
only ``'n_clusters'`` buses. The weighting takes place considering
generation and load at each node.
With ``'kmeans_busmap'`` you can choose if you want to load cluster
coordinates from a previous run.
Option ``'remove_stubs'`` reduces the overestimating of line meshes.
The other options affect the kmeans algorithm and should only be
changed carefully, documentation and possible settings are described
in sklearn-package (sklearn/cluster/k_means_.py).
This function doesn't work together with ``'line_grouping = True'``.
network_clustering_ehv : bool
False,
Choose if you want to cluster the full HV/EHV dataset down to only the
EHV buses. In that case, all HV buses are assigned to their closest EHV
sub-station, taking into account the shortest distance on power lines.
snapshot_clustering : dict
{'active': False, 'method':'typical_periods', 'how': 'daily',
'storage_constraints': '', 'n_clusters': 5, 'n_segments': 5},
State if you want to apply a temporal clustering and run the optimization
only on a subset of snapshot periods.
You can choose between a method clustering to typical periods, e.g. days
or a method clustering to segments of adjacent hours.
With ``'how'``, ``'storage_constraints'`` and ``'n_clusters'`` you choose
the length of the periods, constraints considering the storages and the number
of clusters for the usage of the method typical_periods.
With ``'n_segments'`` you choose the number of segments for the usage of
the method segmentation.
branch_capacity_factor : dict
{'HV': 0.5, 'eHV' : 0.7},
Add a factor here if you want to globally change line capacities
(e.g. to "consider" an (n-1) criterion or for debugging purposes).
load_shedding : bool
False,
State here if you want to make use of the load shedding function which
is helpful when debugging: a very expensive generator is set to each
bus and meets the demand when regular
generators cannot do so.
foreign_lines : dict
{'carrier':'AC', 'capacity': 'osmTGmod}'
Choose transmission technology and capacity of foreign lines:
'carrier': 'AC' or 'DC'
'capacity': 'osmTGmod', 'ntc_acer' or 'thermal_acer'
comments : str
None
Returns
-------
network : `pandas.DataFrame<dataframe>`
eTraGo result network based on `PyPSA network
<https://www.pypsa.org/doc/components.html#network>`_
"""
etrago = Etrago(args, json_path)
# import network from database
etrago.build_network_from_db()
etrago.network.lines.type = ''
etrago.network.lines.carrier.fillna('AC', inplace=True)
etrago.network.buses.v_mag_pu_set.fillna(1., inplace=True)
etrago.network.loads.sign = -1
etrago.network.links.capital_cost.fillna(0, inplace=True)
etrago.network.links.p_nom_min.fillna(0, inplace=True)
etrago.network.transformers.tap_ratio.fillna(1., inplace=True)
etrago.network.stores.e_nom_max.fillna(np.inf, inplace=True)
etrago.network.links.p_nom_max.fillna(np.inf, inplace=True)
etrago.network.links.efficiency.fillna(1., inplace=True)
etrago.network.links.marginal_cost.fillna(0., inplace=True)
etrago.network.links.p_min_pu.fillna(0., inplace=True)
etrago.network.links.p_max_pu.fillna(1., inplace=True)
etrago.network.links.p_nom.fillna(0.1, inplace=True)
etrago.network.storage_units.p_nom.fillna(0, inplace=True)
etrago.network.stores.e_nom.fillna(0, inplace=True)
etrago.network.stores.capital_cost.fillna(0, inplace=True)
etrago.network.stores.e_nom_max.fillna(np.inf, inplace=True)
etrago.network.storage_units.efficiency_dispatch.fillna(1., inplace=True)
etrago.network.storage_units.efficiency_store.fillna(1., inplace=True)
etrago.network.storage_units.capital_cost.fillna(0., inplace=True)
etrago.network.storage_units.p_nom_max.fillna(np.inf, inplace=True)
etrago.network.storage_units.standing_loss.fillna(0., inplace=True)
etrago.network.storage_units.lifetime = np.inf
etrago.network.lines.v_ang_min.fillna(0., inplace=True)
etrago.network.links.terrain_factor.fillna(1., inplace=True)
etrago.network.lines.v_ang_max.fillna(1., inplace=True)
etrago.adjust_network()
# Set marginal costs for gas feed-in
etrago.network.generators.marginal_cost[
etrago.network.generators.carrier=='CH4']+= 25.6+0.201*76.5
# ehv network clustering
etrago.ehv_clustering()
# k-mean clustering
etrago.kmean_clustering()
etrago.kmean_clustering_gas()
etrago.args['load_shedding']=True
etrago.load_shedding()
# skip snapshots
etrago.skip_snapshots()
# snapshot clustering
# needs to be adjusted for new sectors
etrago.snapshot_clustering()
# start linear optimal powerflow calculations
# needs to be adjusted for new sectors
etrago.lopf()
# TODO: check if should be combined with etrago.lopf()
# needs to be adjusted for new sectors
# etrago.pf_post_lopf()
# spatial disaggregation
# needs to be adjusted for new sectors
# etrago.disaggregation()
# calculate central etrago results
# needs to be adjusted for new sectors
# etrago.calc_results()
return etrago
if __name__ == '__main__':
# execute etrago function
print(datetime.datetime.now())
etrago = run_etrago(args, json_path=None)
print(datetime.datetime.now())
etrago.session.close()
# plots
# make a line loading plot
# plot_line_loading(network)
# plot stacked sum of nominal power for each generator type and timestep
# plot_stacked_gen(network, resolution="MW")
# plot to show extendable storages
# storage_distribution(network)
# extension_overlay_network(network)
|
openego/eTraGo
|
etrago/appl.py
|
Python
|
agpl-3.0
| 18,854
|
#! /usr/bin/env python
# -*- coding: utf-8 -*-
# Etalab-CKAN-Harvesters -- Harvesters for Etalab's CKAN
# By: Emmanuel Raviart <emmanuel@raviart.com>
#
# Copyright (C) 2013 Etalab
# http://github.com/etalab/etalab-ckan-harvesters
#
# This file is part of Etalab-CKAN-Harvesters.
#
# Etalab-CKAN-Harvesters is free software; you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# Etalab-CKAN-Harvesters is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""Harvest "Le Grand Lyon" CSW repository.
http://catalogue.data.grandlyon.com/
See also: http://smartdata.grandlyon.com/
"""
import argparse
import ConfigParser
import logging
import os
import sys
from biryani1 import baseconv, custom_conv, states, strings
from lxml import etree
from owslib.csw import CatalogueServiceWeb, namespaces
import owslib.iso
from . import helpers
app_name = os.path.splitext(os.path.basename(__file__))[0]
conv = custom_conv(baseconv, states)
log = logging.getLogger(app_name)
namespaces = namespaces.copy()
for key, value in owslib.iso.namespaces.iteritems():
if key is not None:
namespaces[key] = value
def main():
parser = argparse.ArgumentParser(description = __doc__)
parser.add_argument('config', help = 'path of configuration file')
parser.add_argument('-d', '--dry-run', action = 'store_true',
help = "simulate harvesting, don't update CKAN repository")
parser.add_argument('-v', '--verbose', action = 'store_true', help = 'increase output verbosity')
global args
args = parser.parse_args()
logging.basicConfig(level = logging.DEBUG if args.verbose else logging.WARNING, stream = sys.stdout)
config_parser = ConfigParser.SafeConfigParser(dict(
here = os.path.dirname(os.path.abspath(os.path.normpath(args.config))),
))
config_parser.read(args.config)
conf = conv.check(conv.pipe(
conv.test_isinstance(dict),
conv.struct(
{
'ckan.api_key': conv.pipe(
conv.cleanup_line,
conv.not_none,
),
'ckan.site_url': conv.pipe(
conv.make_input_to_url(error_if_fragment = True, error_if_path = True, error_if_query = True,
full = True),
conv.not_none,
),
'user_agent': conv.pipe(
conv.cleanup_line,
conv.not_none,
),
},
default = 'drop',
),
conv.not_none,
))(dict(config_parser.items('Etalab-CKAN-Harvesters')), conv.default_state)
harvester = helpers.Harvester(
supplier_abbreviation = u'gl',
supplier_title = u"Grand Lyon",
target_headers = {
'Authorization': conf['ckan.api_key'],
'User-Agent': conf['user_agent'],
},
target_site_url = conf['ckan.site_url'],
)
source_site_url = u'http://catalogue.data.grandlyon.com/geosource/srv/fr/csw'
if not args.dry_run:
harvester.retrieve_target()
# Retrieve short infos of packages in source.
csw = CatalogueServiceWeb(source_site_url)
bad_indexes = []
index = 0
limit = 50
record_by_id = {}
while True:
try:
csw.getrecords(maxrecords = limit, startposition = index)
except:
if limit == 1:
# Bad record found. Skip it.
bad_indexes.append(index)
index += 1
limit = 50
else:
# Retry one by one to find bad record and skip it.
limit = 1
else:
for id, record in csw.records.iteritems():
record_by_id[id] = record
next_index = csw.results['nextrecord']
if next_index <= index:
break
index = next_index
# Retrieve packages from source.
formats = set()
licenses_url = set()
protocols = set()
rights = set()
temporals = set()
types = set()
for record_id in record_by_id.iterkeys():
csw.getrecordbyid(id = [record_id])
dc_record = csw.records[record_id]
csw.getrecordbyid(id = [record_id], outputschema = 'http://www.isotc211.org/2005/gmd')
gmd_record = csw.records.get(record_id)
format = dc_record.format
if format is not None:
format = format.split(u' (', 1)[0]
formats.add(format)
copyright = dc_record.rights
if copyright and isinstance(copyright, list):
copyright = tuple(copyright)
rights.add(copyright)
if gmd_record is None:
frequency = None
else:
for frequency_xml in etree.fromstring(gmd_record.xml).xpath('./gmd:identificationInfo'
'/gmd:MD_DataIdentification/gmd:resourceMaintenance/gmd:MD_MaintenanceInformation'
'/gmd:userDefinedMaintenanceFrequency/gts:TM_PeriodDuration',
namespaces = namespaces):
frequency = frequency_xml.text
break
else:
frequency = None
if frequency is not None:
assert frequency in frequency_by_code, 'Unknown frequency: {}'.format(frequency)
frequency = frequency_by_code[frequency]
for uri in dc_record.uris:
if uri['url'].startswith('http://opendata.data.grandlyon.com/Licence'):
licenses_url.add(uri['url'])
protocols.add(uri['protocol'])
subjects = [
subject
for subject in dc_record.subjects
if subject != 'OpenData'
]
groups = [
harvester.upsert_group(dict(
title = subjects[0],
)),
] if subjects else []
groups.append(harvester.upsert_group(dict(
title = u'Territoires et Transports',
)))
tags = [
dict(name = strings.slugify(subject))
for subject in subjects
]
related = []
if gmd_record is None:
resources = [
dict(
description = uri.get('description') or None,
format = {
'application/pdf': 'PDF',
'application/zip': 'ZIP',
'pdf': 'PDF',
'text/csv': 'CSV',
'text/plain': 'TXT',
}.get(format, format),
name = uri.get('name') or None,
url = uri['url'],
)
for uri in dc_record.uris
if uri.get('protocol') in ('WWW:DOWNLOAD-1.0-http--download', 'WWW:LINK-1.0-http--link')
and uri['url'].startswith('http://opendata.data.grandlyon.com/')
and uri['url'] != 'http://opendata.data.grandlyon.com/Licence_ODbL_Grand_Lyon.pdf'
]
else:
kml_resource = False
resources = []
for online in gmd_record.distribution.online:
if online.url.startswith((
'http://catalogue.data.grandlyon.com/geosource/srv/en/resources.get?id=',
'file:',
'jdbc:',
)) \
or online.url == 'http://opendata.data.grandlyon.com/Licence_ODbL_Grand_Lyon.pdf':
continue
if online.protocol == 'OGC:WFS':
if not kml_resource:
resources.append(dict(
description = online.description or None,
format = 'KML',
name = online.name or None,
url = 'http://kml.data.grandlyon.com/grandlyon/?request=list&typename={}'.format(
online.name),
))
kml_resource = True
if '?' not in online.url:
resources.append(dict(
description = online.description or None,
format = 'GML',
name = online.name or None,
url = u'{}?SERVICE={}&REQUEST=GetFeature&VERSION=1.1.0&typename={}'.format(online.url,
online.protocol.split(':', 1)[1], online.name),
))
elif online.protocol == 'OGC:WMS':
if '?' not in online.url:
bounding_box = gmd_record.identification.extent.boundingBox
related.append(dict(
image_url = u'{}?SERVICE={}&REQUEST=GetMap&VERSION=1.1.1&LAYERS={}&FORMAT=image/png'
u'&SRS=EPSG:4326&BBOX={},{},{},{}&WIDTH=400&HEIGHT=300'.format(online.url,
online.protocol.split(':', 1)[1], online.name, bounding_box.minx, bounding_box.miny,
bounding_box.maxx, bounding_box.maxy),
title = u'Vignette',
type = u'visualization',
# url = None,
))
resources.append(dict(
description = online.description or None,
format = {
'DB:POSTGIS': 'POSTGIS',
'FILE:RASTER': 'RASTER',
'OGC:WCS': 'WCS',
'OGC:WFS': 'WFS',
'OGC:WMS': 'WMS',
'WWW:DOWNLOAD-1.0-http--download': None,
'WWW:LINK-1.0-http--link': None,
}[online.protocol],
name = online.name or None,
url = online.url,
))
temporals.add(dc_record.temporal)
types.add(dc_record.type)
if args.dry_run:
log.info(u'Harvested package: {}'.format(dc_record.title))
else:
package = dict(
frequency = {
'P0Y0M0DT0H1M0S': u"ponctuelle",
}.get(frequency),
license_id = {
'copyright': None,
('Licence ODbL GRAND LYON', u"Pas de restriction d'accès public"): u'odc-odbl',
'license': None,
}.get(copyright),
notes = u'\n\n'.join(
fragment
for fragment in (
dc_record.abstract,
dc_record.source,
)
if fragment
),
resources = resources,
tags = [
dict(name = strings.slugify(subject))
for subject in dc_record.subjects
],
# territorial_coverage = TODO
title = dc_record.title,
# TODO: Use this URL once Grand Lyon is ready to use it. Before end of year.
# url = u'http://smartdata.grandlyon.com/single/{}'.format(record_id),
url = u'http://smartdata.grandlyon.com/',
)
# if gmd_record is not None:
# for graphic_filename_xml in etree.fromstring(gmd_record.xml).xpath('./gmd:identificationInfo'
# '/gmd:MD_DataIdentification/gmd:graphicOverview'
# '/gmd:MD_BrowseGraphic[gmd:fileDescription/gco:CharacterString="large_thumbnail"]'
# '/gmd:fileName/gco:CharacterString',
# namespaces = namespaces):
# related.append(dict(
# image_url = urlparse.urljoin(base_url, unicode(graphic_filename_xml.text)),
# title = u'Vignette',
# type = u'visualization',
# # url = TODO,
# ))
log.info(u'Harvested package: {}'.format(package['title']))
harvester.add_package(package, harvester.supplier, dc_record.title, package['url'],
related = related or None)
if not args.dry_run:
harvester.update_target()
log.info(u'Formats: {}'.format(sorted(formats)))
log.info(u'Licenses: {}'.format(sorted(licenses_url)))
log.info(u'Protocols: {}'.format(sorted(protocols)))
log.info(u'Rights: {}'.format(sorted(rights)))
log.info(u'Temporals: {}'.format(sorted(temporals)))
log.info(u'Types: {}'.format(sorted(types)))
return 0
if __name__ == '__main__':
sys.exit(main())
|
etalab/etalab-ckan-harvesters
|
etalabckanharvesters/grandlyon.py
|
Python
|
agpl-3.0
| 13,313
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright (C) 2015-2015: Alignak team, see AUTHORS.txt file for contributors
#
# This file is part of Alignak.
#
# Alignak is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Alignak is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with Alignak. If not, see <http://www.gnu.org/licenses/>.
#
#
# This file incorporates work covered by the following copyright and
# permission notice:
#
# Copyright (C) 2009-2014:
# Jean Gabes, naparuba@gmail.com
# Grégory Starck, g.starck@gmail.com
# Sebastien Coavoux, s.coavoux@free.fr
# This file is part of Shinken.
#
# Shinken is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Shinken is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with Shinken. If not, see <http://www.gnu.org/licenses/>.
import sys
import getopt
def main(argv):
try:
opts, args = getopt.getopt(argv, "m:")
ret = 0
for o, a in opts:
if o == "-m":
try:
exec("import " + a)
print "OK"
except Exception:
print "KO"
ret = 2
except Exception:
ret = 1
sys.exit(ret)
if __name__ == "__main__":
main(sys.argv[1:])
|
ddurieux/alignak
|
contrib/install.d/tools/checkmodule.py
|
Python
|
agpl-3.0
| 2,195
|
# -*- coding: utf-8 -*-
#
# Copyright 2016 Taŭga Tecnologia
# Aristides Caldeira <aristides.caldeira@tauga.com.br>
# License AGPL-3 or later (http://www.gnu.org/licenses/agpl)
#
from __future__ import division, print_function, unicode_literals
import logging
from dateutil.relativedelta import relativedelta
from odoo import api, fields, models
from odoo.exceptions import ValidationError
from odoo.addons.l10n_br_base.models.sped_base import SpedBase
from ..constante_tributaria import (
FORMA_PAGAMENTO,
BANDEIRA_CARTAO,
INTEGRACAO_CARTAO,
INTEGRACAO_CARTAO_NAO_INTEGRADO,
FORMA_PAGAMENTO_CARTOES,
FORMA_PAGAMENTO_CARTAO_CREDITO,
FORMA_PAGAMENTO_CARTAO_DEBITO,
FORMA_PAGAMENTO_OUTROS,
FORMA_PAGAMENTO_DICT,
BANDEIRA_CARTAO_DICT,
)
from .sped_base import SpedBase
_logger = logging.getLogger(__name__)
try:
from pybrasil.valor.decimal import Decimal as D
from pybrasil.data import (
dia_util_pagamento,
DIA_SEGUNDA, DIA_TERCA, DIA_QUARTA, DIA_QUINTA, DIA_SEXTA,
primeiro_dia_mes, ultimo_dia_mes, dias_uteis,
)
except (ImportError, IOError) as err:
_logger.debug(err)
class AccountPaymentTerm(SpedBase, models.Model):
_name = b'account.payment.term'
_inherit = ['account.payment.term']
_rec_name = 'nome_comercial'
_order = 'sequence, name'
DIAS_UTEIS = (
(str(DIA_SEGUNDA), 'Segundas-feiras'),
(str(DIA_TERCA), 'Terças-feiras'),
(str(DIA_QUARTA), 'Quartas-feiras'),
(str(DIA_QUINTA), 'Quintas-feiras'),
(str(DIA_SEXTA), 'Sextas-feiras'),
)
ADIA_ANTECIPA_DIA_UTIL = (
('P', 'Adia'),
('A', 'Antecipa'),
)
ADIA_DIA_UTIL = 'P'
ANTECIPA_DIA_UTIL = 'A'
DIAS_MES = (
('1', '1º'),
# ('2', '2'),
# ('3', '3'),
# ('4', '4'),
('5', '5'),
# ('6', '6'),
# ('7', '7'),
# ('8', '8'),
# ('9', '9'),
('10', '10'),
# ('11', '11'),
# ('12', '12'),
# ('13', '13'),
# ('14', '14'),
('15', '15'),
# ('16', '16'),
# ('17', '17'),
# ('18', '18'),
# ('19', '19'),
('20', '20'),
# ('21', '21'),
# ('22', '22'),
# ('23', '23'),
# ('24', '24'),
('25', '25'),
# ('26', '26'),
# ('27', '27'),
# ('28', '28'),
# ('29', '29'),
('30', '30'),
# ('31', '31'),
)
DIAS_MES_UTIL = (
('1', '1º'),
# ('2', '2º'),
# ('3', '3º'),
# ('4', '4º'),
('5', '5º'),
# ('6', '6º'),
# ('7', '7º'),
# ('8', '8º'),
# ('9', '9º'),
('10', '10º'),
# ('11', '11º'),
# ('12', '12º'),
# ('13', '13º'),
# ('14', '14º'),
('15', '15º'),
# ('16', '16º'),
# ('17', '17º'),
# ('18', '18º'),
# ('19', '19º'),
('20', '20º'),
)
sequence = fields.Integer(
default=10,
)
nome_comercial = fields.Char(
string='Condição da pagamento',
compute='_compute_nome_comercial',
)
em_parcelas_mensais = fields.Boolean(
string='Em parcelas mensais?',
default=False, # Colocar este campo como true faz com que os testes
# do core falhem
)
#
# Configuração das datas de vencimento
#
meses = fields.Integer(
string='Meses',
)
evitar_dia_semana = fields.Selection(
selection=DIAS_UTEIS,
string='Evitar vencimento às',
)
somente_dias_uteis = fields.Boolean(
string='Somente dias úteis?',
)
antecipa_dia_util = fields.Selection(
selection=ADIA_ANTECIPA_DIA_UTIL,
string='Adia ou antecipa dia útil?',
default=ADIA_DIA_UTIL,
)
todo_dia_mes = fields.Selection(
selection=DIAS_MES,
string='Vencimento todo dia',
)
todo_dia_mes_util = fields.Selection(
selection=DIAS_MES_UTIL,
string='Vencimento todo dia útil',
)
#
# Configuração do valor das parcelas
#
com_entrada = fields.Boolean(
string='Com entrada?',
)
al_entrada = fields.Float(
string='Percentual de entrada',
currency_field='currency_aliquota_id',
)
com_juros = fields.Boolean(
string='Com juros?',
)
al_juros = fields.Float(
string='Percentual de juros',
currency_field='currency_aliquota_id',
)
#
# Campos para NF-e e SPED
#
forma_pagamento = fields.Selection(
selection=FORMA_PAGAMENTO,
string='Forma de pagamento',
default=FORMA_PAGAMENTO_OUTROS,
)
bandeira_cartao = fields.Selection(
selection=BANDEIRA_CARTAO,
string='Bandeira do cartão',
)
integracao_cartao = fields.Selection(
selection=INTEGRACAO_CARTAO,
string='Integração do cartão',
default=INTEGRACAO_CARTAO_NAO_INTEGRADO,
)
participante_id = fields.Many2one(
string='Operadora do cartão',
ondelete='restrict',
)
@api.multi
def _compute_nome_comercial(self):
if self.env.context.get('currency_id'):
currency = self.env['res.currency'].browse(
self.env.context['currency_id'])
else:
currency = self.env.user.company_id.currency_id
if self.env.context.get('lang'):
lang = self.env['res.lang']._lang_get(self.env.context.get('lang'))
else:
lang = self.env['res.lang']._lang_get('pt_BR')
valor = D(self.env.context.get('value') or 0)
for payment_term in self:
nome_comercial = ''
if payment_term.forma_pagamento in FORMA_PAGAMENTO_CARTOES:
if payment_term.forma_pagamento == \
FORMA_PAGAMENTO_CARTAO_CREDITO:
nome_comercial += '[Crédito '
elif payment_term.forma_pagamento == \
FORMA_PAGAMENTO_CARTAO_DEBITO:
nome_comercial += '[Débito '
nome_comercial += \
BANDEIRA_CARTAO_DICT[payment_term.bandeira_cartao]
nome_comercial += '] '
elif payment_term.forma_pagamento:
nome_comercial += '['
nome_comercial += \
FORMA_PAGAMENTO_DICT[payment_term.forma_pagamento]
nome_comercial += '] '
nome_comercial += payment_term.name
if valor <= 0 or not payment_term.em_parcelas_mensais:
if payment_term.com_entrada:
nome_comercial += ' com entrada '
if payment_term.com_juros and payment_term.al_juros:
nome_comercial += ', com juros de '
nome_comercial += lang.format('%.2f',
payment_term.al_juros,
True, True)
nome_comercial += '%'
payment_term.nome_comercial = nome_comercial
continue
meses = D(payment_term.meses or 1)
valor_entrada = D(0)
if payment_term.com_entrada:
if self.env.context.get('valor_entrada'):
valor_entrada = D(self.env.context['currency_id'] or 0)
elif payment_term.al_entrada:
valor_entrada = \
valor * D(payment_term.al_entrada) / 100
valor_entrada = valor_entrada.quantize(D('0.01'))
if valor_entrada > 0:
nome_comercial += ' com entrada de '
nome_comercial += currency.symbol
nome_comercial += ' '
nome_comercial += lang.format('%.2f', valor_entrada, True,
True)
valor_parcela, diferenca = payment_term._calcula_valor_parcela(
valor, meses, valor_entrada)
if valor_parcela > 0:
nome_comercial += ' de '
nome_comercial += currency.symbol
nome_comercial += ' '
nome_comercial += lang.format('%.2f', valor_parcela, True,
True)
if payment_term.com_juros and payment_term.al_juros:
nome_comercial += ', com juros de '
nome_comercial += lang.format('%.2f', payment_term.al_juros,
True, True)
nome_comercial += '%'
payment_term.nome_comercial = nome_comercial
@api.depends('meses', 'com_entrada')
def _onchange_meses(self):
res = {}
valores = {}
res['value'] = valores
#
# Não pode ter entrada sendo somente 1 parcela
#
if meses <= 1:
valores['com_entrada'] = False
return res
def _verifica_dia_util(self, data):
self.ensure_one()
if self.somente_dias_uteis:
if self.env.user.company_id.sped_empresa_id:
empresa = self.env.user.company_id.sped_empresa_id
if self.antecipa_dia_util == self.ANTECIPA_DIA_UTIL:
data = dia_util_pagamento(data, empresa.estado,
empresa.cidade, antecipa=True)
else:
data = dia_util_pagamento(data, empresa.estado,
empresa.cidade)
else:
if self.antecipa_dia_util == self.ANTECIPA_DIA_UTIL:
data = dia_util_pagamento(data, antecipa=True)
else:
data = dia_util_pagamento(data)
if self.evitar_dia_semana and data.weekday() == \
int(self.evitar_dia_semana):
data += relativedelta(days=1)
data = self._verifica_dia_util(data)
return data
def _verifica_dia_mes(self, data):
self.ensure_one()
if self.todo_dia_mes:
data += relativedelta(day=int(self.todo_dia_mes))
elif self.todo_dia_mes_util:
if self.env.user.company_id.sped_empresa_id:
empresa = self.env.user.company_id.sped_empresa_id
dias = dias_uteis(primeiro_dia_mes(data), ultimo_dia_mes(data),
empresa.estado, empresa.cidade)
else:
dias = dias_uteis(primeiro_dia_mes(data), ultimo_dia_mes(data))
if int(self.todo_dia_mes_util) <= len(dias):
data = dias[int(self.todo_dia_mes_util) - 1]
return data
def _calcula_valor_parcela(self, valor, meses, valor_entrada=0):
self.ensure_one()
#
# Tratamento dos juros
#
if self.com_juros and self.al_juros:
al_juros = D(self.al_juros) / 100
valor_parcela = D(0)
if valor_entrada > 0:
if meses > 1:
fator_juros = 1 - ((1 + al_juros) ** ((meses - 1) * -1))
fator_juros /= al_juros
valor_parcela = (valor - valor_entrada) / fator_juros
else:
fator_juros = 1 - ((1 + al_juros) ** (meses * -1))
fator_juros /= al_juros
valor_parcela = valor / fator_juros
valor_parcela = valor_parcela.quantize(D('0.01'))
if valor_entrada > 0:
valor = valor_entrada + (valor_parcela * (meses - 1))
else:
valor = valor_parcela * meses
#
# Aponta o valor da parcela e a diferença em centavos a ser ajustada
#
if valor_entrada > 0:
if meses > 1:
valor_parcela = (valor - valor_entrada) / (meses - 1)
valor_parcela = valor_parcela.quantize(D('0.01'))
diferenca = valor - valor_entrada - \
(valor_parcela * (meses - 1))
else:
valor_parcela = valor / meses
valor_parcela = valor_parcela.quantize(D('0.01'))
diferenca = valor - (valor_parcela * meses)
return valor_parcela, diferenca
def compute(self, value, date_ref=False, entrada=0):
self.ensure_one()
if not self.em_parcelas_mensais:
return super(AccountPaymentTerm, self).compute(value,
date_ref=date_ref)
data_referencia = date_ref or fields.Date.today()
valor = D(value)
meses = D(self.meses or 1)
res = []
if self.env.context.get('currency_id'):
currency = self.env['res.currency'].browse(
self.env.context['currency_id'])
else:
currency = self.env.user.company_id.currency_id
#
# Tratamento do valor de entrada
#
valor_entrada = D(0)
if self.com_entrada:
if entrada:
valor_entrada = D(entrada or 0)
elif self.env.context.get('valor_entrada'):
valor_entrada = D(self.env.context['currency_id'] or 0)
elif self.al_entrada:
valor_entrada = valor * D(self.al_entrada) / 100
valor_entrada = valor_entrada.quantize(D('0.01'))
valor_parcela, diferenca = \
self._calcula_valor_parcela(valor, meses, valor_entrada)
#
# Gera as datas e valores de cada parcela
#
for i in range(meses):
proxima_data = fields.Date.from_string(data_referencia)
proxima_data += relativedelta(months=i + 1)
proxima_data = self._verifica_dia_mes(proxima_data)
proxima_data = self._verifica_dia_util(proxima_data)
if valor_entrada > 0 and i == 0:
parcela = [
fields.Date.to_string(proxima_data),
valor_entrada,
]
else:
parcela = [
fields.Date.to_string(proxima_data),
valor_parcela,
]
if i == 0 and diferenca > 0:
parcela[1] += diferenca
diferenca = 0
elif i + 1 == meses and diferenca != 0:
parcela[1] += diferenca
res.append(parcela)
#
# Emula o retorno conforme o decorator api.one, colocando o retorno
# numa lista com um único elemento
#
return [res]
|
odoo-brazil/l10n-brazil-wip
|
l10n_br_base/models/inherited_account_payment_term.py
|
Python
|
agpl-3.0
| 14,637
|
# -*- coding: utf-8 -*-
################################################################################
# header_operations expanded v.1.0.1 #
################################################################################
# TABLE OF CONTENTS
################################################################################
#
# [ Z00 ] Introduction and Credits.
# [ Z01 ] Operation Modifiers.
# [ Z02 ] Flow Control.
# [ Z03 ] Mathematical Operations.
# [ Z04 ] Script/Trigger Parameters and Results.
# [ Z05 ] Keyboard and Mouse Input.
# [ Z06 ] World Map.
# [ Z07 ] Game Settings.
# [ Z08 ] Factions.
# [ Z09 ] Parties and Party Templates.
# [ Z10 ] Troops.
# [ Z11 ] Quests.
# [ Z12 ] Items.
# [ Z13 ] Sounds and Music Tracks.
# [ Z14 ] Positions.
# [ Z15 ] Game Notes.
# [ Z16 ] Tableaus and Heraldics.
# [ Z17 ] String Operations.
# [ Z18 ] Output And Messages.
# [ Z19 ] Game Control: Screens, Menus, Dialogs and Encounters.
# [ Z20 ] Scenes and Missions.
# [ Z21 ] Scene Props and Prop Instances.
# [ Z22 ] Agents and Teams.
# [ Z23 ] Presentations.
# [ Z24 ] Multiplayer And Networking.
# [ Z25 ] Remaining Esoteric Stuff.
# [ Z26 ] Hardcoded Compiler-Related Code.
#
################################################################################
################################################################################
# [ Z00 ] INTRODUCTION AND CREDITS
################################################################################
# Everyone who has ever tried to mod Mount&Blade games knows perfectly well,
# that the documentation for it's Module System is severely lacking. Warband
# Module System, while introducing many new and useful operations, did not
# improve considerably in the way of documentation. What's worse, a number of
# outright errors and inconsistencies appeared between what was documented in
# the comments to the header_operations.py file (which was the root source of
# all Warband scripting documentation, whether you like it or not), and what
# was actually implemented in the game engine.
# Sooner or later someone was bound to dedicate some time and effort to fix
# this problem by properly documenting the file. It just so happened that I
# was the first person crazy enough to accept the challenge.
# I have tried to make this file a self-sufficient source of information on
# every operation that the Warband scripting engine knows of. Naturally I
# failed - there are still many operations for which there is simply not
# enough information, or operations with effects that have not yet been
# thoroughly tested and confirmed. But as far as I know, there is currently
# no other reference more exhaustive than this. I tried to make the file
# useful to both seasoned scripters and complete newbies, and to a certain
# degree this file can even serve as a tutorial into Warband scripting -
# though it still won't replace the wealth of tutorials produced by the
# Warband modding community.
# I really hope you will find it useful as well.
# Alexander Lomski AKA Lav. Jan 18th, 2012.
# And the credits.
# First of all, I should credit Taleworlds for the creation of this game and
# it's Module System. Without them, I wouldn't be able to work on this file
# so even though I'm often sceptical about their programming style and quality
# of their code, they still did a damn good job delivering this game to all
# of us.
# And then I should credit many members from the Warband modding community
# who have shared their knowledge and helped me clear out many uncertainties
# and inconsistencies. Special credits (in no particular order) go to
# cmpxchg8b, Caba'drin, SonKidd, MadVader, dunde, Ikaguia, MadocComadrin,
# Cjkjvfnby, shokkueibu.
################################################################################
# [ Z01 ] OPERATION MODIFIERS
################################################################################
neg = 0x80000000 # (neg|<operation_name>, ...),
# Used in combination with conditional operations to invert their results.
this_or_next = 0x40000000 # (this_or_next|<operation_name>, ...),
# Used in combination with conditional operations to group them into OR blocks.
################################################################################
# [ Z02 ] FLOW CONTROL
################################################################################
call_script = 1 # (call_script, <script_id>, [<script_param>...]),
# Calls specified script with or without parameters.
try_begin = 4 # (try_begin),
# Opens a conditional block.
else_try = 5 # (else_try),
# If conditional operations in the conditional block fail, this block of code will be executed.
else_try_begin = 5 # (else_try_begin),
# Deprecated form of (else_try).
try_end = 3 # (try_end),
# Concludes a conditional block or a cycle.
end_try = 3 # (end_try),
# Deprecated form of (try_end),
try_for_range = 6 # (try_for_range, <destination>, <lower_bound>, <upper_bound>),
# Runs a cycle, iterating the value in the <lower_bound>..<upper_bound>-1 range.
try_for_range_backwards = 7 # (try_for_range_backwards, <destination>, <lower_bound>, <upper_bound>),
# Same as above, but iterates the value in the opposite direction (from higher values to lower).
try_for_parties = 11 # (try_for_parties, <destination>),
# Runs a cycle, iterating all parties on the map.
try_for_agents = 12 # (try_for_agents, <destination>),
# Runs a cycle, iterating all agents on the scene.
try_for_prop_instances = 16 # (try_for_prop_instances, <destination>, [<scene_prop_id>]),
# Version 1.161+. Runs a cycle, iterating all scene prop instances on the scene, or all scene prop instances of specific type if optional parameter is provided.
try_for_players = 17 # (try_for_players, <destination>, [skip_server]),
# Version 1.165+. Iterates through all players in a multiplayer game. Set optional parameter to 1 to skip server player entry.
################################################################################
# [ Z03 ] MATHEMATICAL OPERATIONS
################################################################################
# Mathematical operations deal with numbers. Warband Module System can only
# deal with integers. Floating point numbers are emulated by the so-called
# "fixed point numbers". Wherever you encounter a fixed point parameter for
# some Module System operation, keep in mind that it is actually just a
# regular integer number, HOWEVER it is supposed to represent a floating
# point number equal to fixed_point_number / fixed_point_multiplier. As you
# might have guessed, to convert a floating point number to fixed point, you
# have to multiply it by fixed_point_multiplier. You can change the value of
# multiplier with the operation (set_fixed_point_multiplier), thus influencing
# the precision of all operations dealing with fixed point numbers.
# A notion very important for Warband modding is that you reference all
# Warband objects by their numeric values. In other words, you can do maths
# with your items, troops, agents, scenes, parties et cetera. This is used
# extensively in the code, so don't be surprised to see code looking like
# (store_add, ":value", "itm_pike", 4). This code is just calculating a
# reference to an item which is located 4 positions after "itm_pike" inside
# the module_items.py file.
# Conditional operations
gt = 32 # (gt, <value1>, <value2>),
# Checks that value1 > value2
ge = 30 # (ge, <value1>, <value2>),
# Checks that value1 >= value2
eq = 31 # (eq, <value1>, <value2>),
# Checks that value1 == value2
neq = neg|eq # (neq, <value1>, <value2>),
# Checks that value1 != value2
le = neg|gt # (le, <value1>, <value2>),
# Checks that value1 <= value2
lt = neg|ge # (lt, <value1>, <value2>),
# Checks that value1 < value2
is_between = 33 # (is_between, <value>, <lower_bound>, <upper_bound>),
# Checks that lower_bound <= value < upper_bound
# Mathematical and assignment operations
assign = 2133 # (assign, <destination>, <value>),
# Directly assigns a value to a variable or register.
store_add = 2120 # (store_add, <destination>, <value>, <value>),
# Assigns <destination> := <value> + <value>
store_sub = 2121 # (store_sub, <destination>, <value>, <value>),
# Assigns <destination> := <value> - <value>
store_mul = 2122 # (store_mul, <destination>, <value>, <value>),
# Assigns <destination> := <value> * <value>
store_div = 2123 # (store_div, <destination>, <value>, <value>),
# Assigns <destination> := <value> / <value>
store_mod = 2119 # (store_mod, <destination>, <value>, <value>),
# Assigns <destination> := <value> MOD <value>
val_add = 2105 # (val_add, <destination>, <value>),
# Assigns <destination> := <destination> + <value>
val_sub = 2106 # (val_sub, <destination>, <value>),
# Assigns <destination> := <destination> - <value>
val_mul = 2107 # (val_mul, <destination>, <value>),
# Assigns <destination> := <destination> * <value>
val_div = 2108 # (val_div, <destination>, <value>),
# Assigns <destination> := <destination> / <value>
val_mod = 2109 # (val_mod, <destination>, <value>),
# Assigns <destination> := <destination> MOD <value>
val_min = 2110 # (val_min, <destination>, <value>),
# Assigns <destination> := MIN (<destination>, <value>)
val_max = 2111 # (val_max, <destination>, <value>),
# Assigns <destination> := MAX (<destination>, <value>)
val_clamp = 2112 # (val_clamp, <destination>, <lower_bound>, <upper_bound>),
# Enforces <destination> value to be within <lower_bound>..<upper_bound>-1 range.
val_abs = 2113 # (val_abs, <destination>),
# Assigns <destination> := ABS (<destination>)
store_or = 2116 # (store_or, <destination>, <value>, <value>),
# Binary OR
store_and = 2117 # (store_and, <destination>, <value>, <value>),
# Binary AND
val_or = 2114 # (val_or, <destination>, <value>),
# Binary OR, overwriting first operand.
val_and = 2115 # (val_and, <destination>, <value>),
# Binary AND, overwriting first operand.
val_lshift = 2100 # (val_lshift, <destination>, <value>),
# Bitwise shift left (dest = dest * 2 ^ value)
val_rshift = 2101 # (val_rshift, <destination>, <value>),
# Bitwise shift right (dest = dest / 2 ^ value)
store_sqrt = 2125 # (store_sqrt, <destination_fixed_point>, <value_fixed_point>),
# Assigns dest := SQRT (value)
store_pow = 2126 # (store_pow, <destination_fixed_point>, <value_fixed_point>, <power_fixed_point),
# Assigns dest := value ^ power
store_sin = 2127 # (store_sin, <destination_fixed_point>, <value_fixed_point>),
# Assigns dest := SIN (value)
store_cos = 2128 # (store_cos, <destination_fixed_point>, <value_fixed_point>),
# Assigns dest := COS (value)
store_tan = 2129 # (store_tan, <destination_fixed_point>, <value_fixed_point>),
# Assigns dest := TAN (value)
store_asin = 2140 # (store_asin, <destination_fixed_point>, <value_fixed_point>),
# Assigns dest := ARCSIN (value)
store_acos = 2141 # (store_acos, <destination_fixed_point>, <value_fixed_point>),
# Assigns dest := ARCCOS (value)
store_atan = 2142 # (store_atan, <destination_fixed_point>, <value_fixed_point>),
# Assigns dest := ARCTAN (value)
store_atan2 = 2143 # (store_atan2, <destination_fixed_point>, <y_fixed_point>, <x_fixed_point>),
# Returns the angle between the x axis and a point with coordinates (X,Y) in degrees. Note the angle is calculated counter-clockwise, i.e. (1,1) will return 45, not -45.
# Random number generation
store_random = 2135 # (store_random, <destination>, <upper_range>),
# Stores a random value in the range of 0..<upper_range>-1. Deprecated, use (store_random_in_range) instead.
store_random_in_range = 2136 # (store_random_in_range, <destination>, <range_low>, <range_high>),
# Stores a random value in the range of <range_low>..<range_high>-1.
shuffle_range = 2134 # (shuffle_range, <reg_no>, <reg_no>),
# Randomly shuffles a range of registers, reordering the values contained in them. Commonly used for list randomization.
# Fixed point values handling
set_fixed_point_multiplier = 2124 # (set_fixed_point_multiplier, <value>),
# Affects all operations dealing with fixed point numbers. Default value is 1.
convert_to_fixed_point = 2130 # (convert_to_fixed_point, <destination_fixed_point>),
# Converts integer value to fixed point (multiplies by the fixed point multiplier).
convert_from_fixed_point = 2131 # (convert_from_fixed_point, <destination>),
# Converts fixed point value to integer (divides by the fixed point multiplier).
################################################################################
# [ Z04 ] SCRIPT/TRIGGER PARAMETERS AND RESULTS
################################################################################
# Many scripts can accept additional parameters, and many triggers have some
# parameters of their own (as details in header_triggers.py file). You can
# only pass numeric values as parameters. Since string constants are also
# Warband objects, you can pass them as well, and you can also pass string
# or position registers. However you cannot pass quick strings (string
# defined directly in the code).
# You can declare your scripts with as many parameters as you wish. Triggers,
# however, are always called with their predefined parameters. Also the game
# engine does not support more than 3 parameters per trigger. As the result,
# some triggers receive extra information which could not be fit into those
# three parameters in numeric, string or position registers.
# Some triggers and scripts called from the game engine (those have names
# starting with "game_") expect you to return some value to the game engine.
# That value may be either a number or a string and is set by special
# operations listed below. Scripts called from the Module System, however,
# typically use registers to store their return data.
# Note that if you call a script from a trigger, you can still use operations
# to retrieve trigger's calling parameters, and they will retrieve values that
# have been passed to the trigger, not values that have been passed to the
# script.
store_script_param_1 = 21 # (store_script_param_1, <destination>),
# Retrieve the value of the first script parameter.
store_script_param_2 = 22 # (store_script_param_2, <destination>),
# Retrieve the value of the second script parameter.
store_script_param = 23 # (store_script_param, <destination>, <script_param_index>),
# Retrieve the value of arbitrary script parameter (generally used when script accepts more than two). Parameters are enumerated starting from 1.
set_result_string = 60 # (set_result_string, <string>),
# Sets the return value of a game_* script, when a string value is expected by game engine.
store_trigger_param_1 = 2071 # (store_trigger_param_1, <destination>),
# Retrieve the value of the first trigger parameter. Will retrieve trigger's parameters even when called from inside a script, for as long as that script is running within trigger context.
store_trigger_param_2 = 2072 # (store_trigger_param_2, <destination>),
# Retrieve the value of the second trigger parameter. Will retrieve trigger's parameters even when called from inside a script, for as long as that script is running within trigger context.
store_trigger_param_3 = 2073 # (store_trigger_param_3, <destination>),
# Retrieve the value of the third trigger parameter. Will retrieve trigger's parameters even when called from inside a script, for as long as that script is running within trigger context.
store_trigger_param = 2070 # (store_trigger_param, <destination>, <trigger_param_no>),
# Version 1.153+. Retrieve the value of arbitrary trigger parameter. Parameters are enumerated starting from 1. Note that despite the introduction of this operation, there's not a single trigger with more than 3 parameters.
get_trigger_object_position = 702 # (get_trigger_object_position, <position>),
# Retrieve the position of an object which caused the trigger to fire (when appropriate).
set_trigger_result = 2075 # (set_trigger_result, <value>),
# Sets the return value of a trigger or game_* script, when an integer value is expected by game engine.
################################################################################
# [ Z05 ] KEYBOARD AND MOUSE INPUT
################################################################################
# The game provides modders with limited ability to control keyboard input and
# mouse movements. It is also possible to tamper with game keys (i.e. keys
# bound to specific game actions), including the ability to override game's
# reaction to those keys. Note that mouse buttons are keys too, and can be
# detected with the corresponding operations.
# Conditional operations
key_is_down = 70 # (key_is_down, <key_code>),
# Checks that the specified key is currently pressed. See header_triggers.py for key code reference.
key_clicked = 71 # (key_clicked, <key_code>),
# Checks that the specified key has just been pressed. See header_triggers.py for key code reference.
game_key_is_down = 72 # (game_key_is_down, <game_key_code>),
# Checks that the specified game key is currently pressed. See header_triggers.py for game key code reference.
game_key_clicked = 73 # (game_key_clicked, <game_key_code>),
# Checks that the specified key has just been pressed. See header_triggers.py for game key code reference.
# Generic operations
omit_key_once = 77 # (omit_key_once, <key_code>),
# Forces the game to ignore default bound action for the specified game key on current game frame.
clear_omitted_keys = 78 # (clear_omitted_keys),
# Commonly called when exiting from a presentation which made any calls to (omit_key_once). However the effects of those calls disappear by the next frame, so apparently usage of this operation is not necessary. It is still recommended to be on the safe side though.
mouse_get_position = 75 # (mouse_get_position, <position>),
# Stores mouse x and y coordinates in the specified position.
################################################################################
# [ Z06 ] WORLD MAP
################################################################################
# Generally, all operations which only make sense on the worldmap and have no
# specific category have been assembled here. These mostly deal with weather,
# time and resting.
# Conditional operations
is_currently_night = 2273 # (is_currently_night),
# Checks that it's currently night in the game.
map_free = 37 # (map_free),
# Checks that the player is currently on the global map and no game screens are open.
# Weather-handling operations
get_global_cloud_amount = 90 # (get_global_cloud_amount, <destination>),
# Returns current cloudiness (a value between 0..100).
set_global_cloud_amount = 91 # (set_global_cloud_amount, <value>),
# Sets current cloudiness (value is clamped to 0..100).
get_global_haze_amount = 92 # (get_global_haze_amount, <destination>),
# Returns current fogginess (value between 0..100).
set_global_haze_amount = 93 # (set_global_haze_amount, <value>),
# Sets current fogginess (value is clamped to 0..100).
# Time-related operations
store_current_hours = 2270 # (store_current_hours, <destination>),
# Stores number of hours that have passed since beginning of the game. Commonly used to track time when accuracy up to hours is required.
store_time_of_day = 2271 # (store_time_of_day, <destination>),
# Stores current day hour (value in 0..24 range).
store_current_day = 2272 # (store_current_day, <destination>),
# Stores number of days that have passed since beginning of the game. Commonly used to track time when high accuracy is not required.
rest_for_hours = 1030 # (rest_for_hours, <rest_time_in_hours>, [time_speed_multiplier], [remain_attackable]),
# Forces the player party to rest for specified number of hours. Time can be accelerated and player can be made immune or subject to attacks.
rest_for_hours_interactive = 1031 # (rest_for_hours_interactive, <rest_time_in_hours>, [time_speed_multiplier], [remain_attackable]),
# Forces the player party to rest for specified number of hours. Player can break the rest at any moment. Time can be accelerated and player can be made immune or subject to attacks.
################################################################################
# [ Z07 ] GAME SETTINGS AND STATISTICS
################################################################################
# This group of operations allows you to retrieve some of the game settings
# as configured by the player on Options page, and change them as necessary
# (possibly forcing a certain level of difficulty on the player). Operations
# dealing with achievements (an interesting, but underdeveloped feature of
# Warband) are also placed in this category.
# Conditional operations
is_trial_version = 250 # (is_trial_version),
# Checks if the game is in trial mode (has not been purchased). Player cannot get higher than level 6 in this mode.
is_edit_mode_enabled = 255 # (is_edit_mode_enabled),
# Version 1.153+. Checks that Edit Mode is currently enabled in the game.
# Generic operations
get_operation_set_version = 55 # (get_operation_set_version, <destination>),
# Version 1.165+. 4research. Apparently returns the current version of Module System operations set, allowing transparent support for multiple Warband engine versions.
set_player_troop = 47 # (set_player_troop, <troop_id>),
# Changes the troop player controls. Generally used in quick-battle scenarios to give player a predefined character.
show_object_details_overlay = 960 # (show_object_details_overlay, <value>),
# Turns various popup tooltips on (value = 1) and off (value = 0). This includes agent names and dropped item names during missions, item stats in inventory on mouse over, etc.
auto_save = 985 # (auto_save),
# Version 1.161+. Saves the game to the current save slot.
# Access to game options
options_get_damage_to_player = 260 # (options_get_damage_to_player, <destination>),
# 0 = 1/4, 1 = 1/2, 2 = 1/1
options_set_damage_to_player = 261 # (options_set_damage_to_player, <value>),
# 0 = 1/4, 1 = 1/2, 2 = 1/1
options_get_damage_to_friends = 262 # (options_get_damage_to_friends, <destination>),
# 0 = 1/2, 1 = 3/4, 2 = 1/1
options_set_damage_to_friends = 263 # (options_set_damage_to_friends, <value>),
# 0 = 1/2, 1 = 3/4, 2 = 1/1
options_get_combat_ai = 264 # (options_get_combat_ai, <destination>),
# 0 = good, 1 = average, 2 = poor
options_set_combat_ai = 265 # (options_set_combat_ai, <value>),
# 0 = good, 1 = average, 2 = poor
game_get_reduce_campaign_ai = 424 # (game_get_reduce_campaign_ai, <destination>),
# Deprecated operation. Use options_get_campaign_ai instead
options_get_campaign_ai = 266 # (options_get_campaign_ai, <destination>),
# 0 = good, 1 = average, 2 = poor
options_set_campaign_ai = 267 # (options_set_campaign_ai, <value>),
# 0 = good, 1 = average, 2 = poor
options_get_combat_speed = 268 # (options_get_combat_speed, <destination>),
# 0 = slowest, 1 = slower, 2 = normal, 3 = faster, 4 = fastest
options_set_combat_speed = 269 # (options_set_combat_speed, <value>),
# 0 = slowest, 1 = slower, 2 = normal, 3 = faster, 4 = fastest
options_get_battle_size = 270 # (options_get_battle_size, <destination>),
# Version 1.161+. Retrieves current battle size slider value (in the range of 0..1000). Note that this is the slider value, not the battle size itself.
options_set_battle_size = 271 # (options_set_battle_size, <value>),
# Version 1.161+. Sets battle size slider to provided value (in the range of 0..1000). Note that this is the slider value, not the battle size itself.
get_average_game_difficulty = 990 # (get_average_game_difficulty, <destination>),
# Returns calculated game difficulty rating (as displayed on the Options page). Commonly used for score calculation when ending the game.
# Achievements and kill stats
get_achievement_stat = 370 # (get_achievement_stat, <destination>, <achievement_id>, <stat_index>),
# Retrieves the numeric value associated with an achievement. Used to keep track of player's results before finally unlocking it.
set_achievement_stat = 371 # (set_achievement_stat, <achievement_id>, <stat_index>, <value>),
# Sets the new value associated with an achievement. Used to keep track of player's results before finally unlocking it.
unlock_achievement = 372 # (unlock_achievement, <achievement_id>),
# Unlocks player's achievement. Apparently doesn't have any game effects.
get_player_agent_kill_count = 1701 # (get_player_agent_kill_count, <destination>, [get_wounded]),
# Retrieves the total number of enemies killed by the player. Call with non-zero <get_wounded> parameter to retrieve the total number of knocked down enemies.
get_player_agent_own_troop_kill_count = 1705 # (get_player_agent_own_troop_kill_count, <destination>, [get_wounded]),
# Retrieves the total number of allies killed by the player. Call with non-zero <get_wounded> parameter to retrieve the total number of knocked down allies.
################################################################################
# [ Z08 ] FACTIONS
################################################################################
# Despite the importance of factions to the game, there aren't that many
# actions to deal with them. Essentially, you can control colors and name of
# existing game factions, set or retrieve relations between them, and work
# with faction slots. There's also a number of operations which assign or
# retrieve the factional allegiance of other game objects, like parties and
# troops, but these have been placed in the respective sections of the file.
# Slot operations for factions
faction_set_slot = 502 # (faction_set_slot, <faction_id>, <slot_no>, <value>),
faction_get_slot = 522 # (faction_get_slot, <destination>, <faction_id>, <slot_no>),
faction_slot_eq = 542 # (faction_slot_eq, <faction_id>, <slot_no>, <value>),
faction_slot_ge = 562 # (faction_slot_ge, <faction_id>, <slot_no>, <value>),
# Generic operations
set_relation = 1270 # (set_relation, <faction_id_1>, <faction_id_2>, <value>),
# Sets relation between two factions. Relation is in -100..100 range.
store_relation = 2190 # (store_relation, <destination>, <faction_id_1>, <faction_id_2>),
# Retrieves relation between two factions. Relation is in -100..100 range.
faction_set_name = 1275 # (faction_set_name, <faction_id>, <string>),
# Sets the name of the faction. See also (str_store_faction_name) in String Operations.
faction_set_color = 1276 # (faction_set_color, <faction_id>, <color_code>),
# Sets the faction color. All parties and centers belonging to this faction will be displayed with this color on global map.
faction_get_color = 1277 # (faction_get_color, <destination>, <faction_id>)
# Gets the faction color value.
################################################################################
# [ Z09 ] PARTIES AND PARTY TEMPLATES
################################################################################
# Parties are extremely important element of single-player modding, because
# they are the only object which can be present on the world map. Each party
# is a semi-independent object with it's own behavior. Note that you cannot
# control party's behavior directly, instead you can change various factors
# which affect party behavior (including party AI settings).
# There are two things of importance when dealing with parties. First, parties
# can be attached to each other, this allows you, for example, to stack a
# number of armies inside a single city. Second, parties may encounter each
# other. When two AI parties are in encounter, it usually means they are
# fighting. Player's encounter with an AI party is usually much more complex
# and may involve pretty much anything, which is why player's encounters are
# covered in a separate section of the file.
# Each party consists of troop stacks. Each troop stack is either a single
# hero (troop defined as tf_hero in module_troops.py file) or a number of
# regular troops (their number may vary from 1 and above). Each party has two
# sets of troop stacks: members (or companions) set of stacks, and prisoners
# set of stacks. Many operations will only affect members, others may only
# affect prisoners, and there are even operations to switch their roles.
# Another important concept is a party template. It's definition looks very
# similar to a party. Templates are used when there's a need to create a
# number of parties with similar set of members, parameters or flags. Also
# templates can be easily used to differentiate parties from each other,
# so they are akin to a "party_type" in the game.
# Note that parties are the only game object which is persistent (i.e. it
# will be saved to the savegame file and restored on load), has slots and
# can be created during runtime. This makes parties ideal candidates for
# dynamic information storage of unlimited volume, which the game otherwise
# lacks.
# Conditional operations
hero_can_join = 101 # (hero_can_join, [party_id]),
# Checks if party can accept one hero troop. Player's party is default value.
hero_can_join_as_prisoner = 102 # (hero_can_join_as_prisoner, [party_id]),
# Checks if party can accept one hero prisoner troop. Player's party is default value.
party_can_join = 103 # (party_can_join),
# During encounter dialog, checks if encountered party can join player's party.
party_can_join_as_prisoner = 104 # (party_can_join_as_prisoner),
# During encounter dialog, checks if encountered party can join player's party as prisoners.
troops_can_join = 105 # (troops_can_join, <value>),
# Checks if player party has enough space for provided number of troops.
troops_can_join_as_prisoner = 106 # (troops_can_join_as_prisoner, <value>),
# Checks if player party has enough space for provided number of prisoners..
party_can_join_party = 107 # (party_can_join_party, <joiner_party_id>, <host_party_id>, [flip_prisoners]),
# Checks if first party can join second party (enough space for both troops and prisoners). If flip_prisoners flag is 1, then members and prisoners in the joinning party are flipped.
main_party_has_troop = 110 # (main_party_has_troop, <troop_id>),
# Checks if player party has specified troop.
party_is_in_town = 130 # (party_is_in_town, <party_id>, <town_party_id>),
# Checks that the party has successfully reached it's destination (after being set to ai_bhvr_travel_to_party) and that it's destination is actually the referenced town_party_id.
party_is_in_any_town = 131 # (party_is_in_any_town, <party_id>),
# Checks that the party has successfully reached it's destination (after being set to ai_bhvr_travel_to_party).
party_is_active = 132 # (party_is_active, <party_id>),
# Checks that <party_id> is valid and not disabled.
# Slot operations for parties and party templates
party_template_set_slot = 504 # (party_template_set_slot, <party_template_id>, <slot_no>, <value>),
party_template_get_slot = 524 # (party_template_get_slot, <destination>, <party_template_id>, <slot_no>),
party_template_slot_eq = 544 # (party_template_slot_eq, <party_template_id>, <slot_no>, <value>),
party_template_slot_ge = 564 # (party_template_slot_ge, <party_template_id>, <slot_no>, <value>),
party_set_slot = 501 # (party_set_slot, <party_id>, <slot_no>, <value>),
party_get_slot = 521 # (party_get_slot, <destination>, <party_id>, <slot_no>),
party_slot_eq = 541 # (party_slot_eq, <party_id>, <slot_no>, <value>),
party_slot_ge = 561 # (party_slot_ge, <party_id>, <slot_no>, <value>),
# Generic operations
set_party_creation_random_limits = 1080 # (set_party_creation_random_limits, <min_value>, <max_value>),
# Affects party sizes spawned from templates. May be used to spawn larger parties when player is high level. Values should be in 0..100 range.
set_spawn_radius = 1103 # (set_spawn_radius, <value>),
# Sets radius for party spawning with subsequent <spawn_around_party> operations.
spawn_around_party = 1100 # (spawn_around_party, <party_id>, <party_template_id>),
# Creates a new party from a party template and puts it's <party_id> into reg0.
disable_party = 1230 # (disable_party, <party_id>),
# Party disappears from the map. Note that (try_for_parties) will still iterate over disabled parties, so you need to make additional checks with (party_is_active).
enable_party = 1231 # (enable_party, <party_id>),
# Reactivates a previously disabled party.
remove_party = 1232 # (remove_party, <party_id>),
# Destroys a party completely. Should ONLY be used with dynamically spawned parties, as removing parties pre-defined in module_parties.py file will corrupt the savegame.
party_get_current_terrain = 1608 # (party_get_current_terrain, <destination>, <party_id>),
# Returns a value from header_terrain_types.py
party_relocate_near_party = 1623 # (party_relocate_near_party, <relocated_party_id>, <target_party_id>, <spawn_radius>),
# Teleports party into vicinity of another party.
party_get_position = 1625 # (party_get_position, <dest_position>, <party_id>),
# Stores current position of the party on world map.
party_set_position = 1626 # (party_set_position, <party_id>, <position>),
# Teleports party to a specified position on the world map.
set_camera_follow_party = 1021 # (set_camera_follow_party, <party_id>),
# Self-explanatory. Can be used on world map only. Commonly used to make camera follow a party which has captured player as prisoner.
party_attach_to_party = 1660 # (party_attach_to_party, <party_id>, <party_id_to_attach_to>),
# Attach a party to another one (like lord's army staying in a town/castle).
party_detach = 1661 # (party_detach, <party_id>),
# Remove a party from attachments and place it on the world map.
party_collect_attachments_to_party = 1662 # (party_collect_attachments_to_party, <source_party_id>, <collected_party_id>),
# Mostly used in various battle and AI calculations. Will create an aggregate party from all parties attached to the source party.
party_get_cur_town = 1665 # (party_get_cur_town, <destination>, <party_id>),
# When a party has reached it's destination (using ai_bhvr_travel_to_party), this operation will retrieve the party_id of the destination party.
party_get_attached_to = 1694 # (party_get_attached_to, <destination>, <party_id>),
# Retrieves the party that the referenced party is attached to, if any.
party_get_num_attached_parties = 1695 # (party_get_num_attached_parties, <destination>, <party_id>),
# Retrieves total number of parties attached to referenced party.
party_get_attached_party_with_rank = 1696 # (party_get_attached_party_with_rank, <destination>, <party_id>, <attached_party_index>),
# Extract party_id of a specified party among attached.
party_set_name = 1669 # (party_set_name, <party_id>, <string>),
# Sets party name (will be displayed as label and/or in the party details popup).
party_set_extra_text = 1605 # (party_set_extra_text, <party_id>, <string>),
# Allows to put extra text in party details popup. Used in Native to set status for villages or towns (being raided, razed, under siege...).
party_get_icon = 1681 # (party_get_icon, <destination>, <party_id>),
# Retrieve map icon used for the party.
party_set_icon = 1676 # (party_set_icon, <party_id>, <map_icon_id>),
# Sets what map icon will be used for the party.
party_set_banner_icon = 1677 # (party_set_banner_icon, <party_id>, <map_icon_id>),
# Sets what map icon will be used as the party banner. Use 0 to remove banner from a party.
party_set_extra_icon = 1682 # (party_set_extra_icon, <party_id>, <map_icon_id>, <vertical_offset_fixed_point>, <up_down_frequency_fixed_point>, <rotate_frequency_fixed_point>, <fade_in_out_frequency_fixed_point>),
# Adds or removes an extra map icon to a party, possibly with some animations. Use -1 as map_icon_id to remove extra icon.
party_add_particle_system = 1678 # (party_add_particle_system, <party_id>, <particle_system_id>),
# Appends some special visual effects to the party on the map. Used in Native to add fire and smoke over villages.
party_clear_particle_systems = 1679 # (party_clear_particle_systems, <party_id>),
# Removes all special visual effects from the party on the map.
context_menu_add_item = 980 # (context_menu_add_item, <string_id>, <value>),
# Must be called inside script_game_context_menu_get_buttons. Adds context menu option for a party and it's respective identifier (will be passed to script_game_event_context_menu_button_clicked).
party_get_template_id = 1609 # (party_get_template_id, <destination>, <party_id>),
# Retrieves what party template was used to create the party (if any). Commonly used to identify encountered party type.
party_set_faction = 1620 # (party_set_faction, <party_id>, <faction_id>),
# Sets party faction allegiance. Party color is changed appropriately.
store_faction_of_party = 2204 # (store_faction_of_party, <destination>, <party_id>),
# Retrieves current faction allegiance of the party.
store_random_party_in_range = 2254 # (store_random_party_in_range, <destination>, <lower_bound>, <upper_bound>),
# Retrieves one random party from the range. Generally used only for predefined parties (towns, villages etc).
store01_random_parties_in_range = 2255 # (store01_random_parties_in_range, <lower_bound>, <upper_bound>),
# Stores two random, different parties in a range to reg0 and reg1. Generally used only for predefined parties (towns, villages etc).
store_distance_to_party_from_party = 2281 # (store_distance_to_party_from_party, <destination>, <party_id>, <party_id>),
# Retrieves distance between two parties on the global map.
store_num_parties_of_template = 2310 # (store_num_parties_of_template, <destination>, <party_template_id>),
# Stores number of active parties which were created using specified party template.
store_random_party_of_template = 2311 # (store_random_party_of_template, <destination>, <party_template_id>),
# Retrieves one random party which was created using specified party template. Fails if no party exists with provided template.
store_num_parties_created = 2300 # (store_num_parties_created, <destination>, <party_template_id>),
# Stores the total number of created parties of specified type. Not used in Native.
store_num_parties_destroyed = 2301 # (store_num_parties_destroyed, <destination>, <party_template_id>),
# Stores the total number of destroyed parties of specified type.
store_num_parties_destroyed_by_player = 2302 # (store_num_parties_destroyed_by_player, <destination>, <party_template_id>),
# Stores the total number of parties of specified type which have been destroyed by player.
party_get_morale = 1671 # (party_get_morale, <destination>, <party_id>),
# Returns a value in the range of 0..100. Party morale does not affect party behavior on the map, but will be taken in account if the party is engaged in battle (except auto-calc).
party_set_morale = 1672 # (party_set_morale, <party_id>, <value>),
# Value should be in the range of 0..100. Party morale does not affect party behavior on the map, but will be taken in account if the party is engaged in battle (except auto-calc).
# Party members manipulation
party_join = 1201 # (party_join),
# During encounter, joins encountered party to player's party
party_join_as_prisoner = 1202 # (party_join_as_prisoner),
# During encounter, joins encountered party to player's party as prisoners
troop_join = 1203 # (troop_join, <troop_id>),
# Specified hero joins player's party
troop_join_as_prisoner = 1204 # (troop_join_as_prisoner, <troop_id>),
# Specified hero joins player's party as prisoner
add_companion_party = 1233 # (add_companion_party, <troop_id_hero>),
# Creates a new empty party with specified hero as party leader and the only member. Party is spawned at the position of player's party.
party_add_members = 1610 # (party_add_members, <party_id>, <troop_id>, <number>),
# Returns total number of added troops in reg0.
party_add_prisoners = 1611 # (party_add_prisoners, <party_id>, <troop_id>, <number>),
# Returns total number of added prisoners in reg0.
party_add_leader = 1612 # (party_add_leader, <party_id>, <troop_id>, [number]),
# Adds troop(s) to the party and makes it party leader.
party_force_add_members = 1613 # (party_force_add_members, <party_id>, <troop_id>, <number>),
# Adds troops to party ignoring party size limits. Mostly used to add hero troops.
party_force_add_prisoners = 1614 # (party_force_add_prisoners, <party_id>, <troop_id>, <number>),
# Adds prisoners to party ignoring party size limits. Mostly used to add hero prisoners.
party_add_template = 1675 # (party_add_template, <party_id>, <party_template_id>, [reverse_prisoner_status]),
# Reinforces the party using the specified party template. Optional flag switches troop/prisoner status for reinforcements.
distribute_party_among_party_group = 1698 # (distribute_party_among_party_group, <party_to_be_distributed>, <group_root_party>),
# Distributes troops from first party among all parties attached to the second party. Commonly used to divide prisoners and resqued troops among NPC parties.
remove_member_from_party = 1210 # (remove_member_from_party, <troop_id>, [party_id]),
# Removes hero member from party. Player party is default value. Will display a message about companion leaving the party. Should not be used with regular troops (it will successfully remove one of them, but will produce some meaningless spam).
remove_regular_prisoners = 1211 # (remove_regular_prisoners, <party_id>),
# Removes all non-hero prisoners from the party.
remove_troops_from_companions = 1215 # (remove_troops_from_companions, <troop_id>, <value>),
# Removes troops from player's party, duplicating functionality of (party_remove_members) but providing less flexibility.
remove_troops_from_prisoners = 1216 # (remove_troops_from_prisoners, <troop_id>, <value>),
# Removes prisoners from player's party.
party_remove_members = 1615 # (party_remove_members, <party_id>, <troop_id>, <number>),
# Removes specified number of troops from a party. Stores number of actually removed troops in reg0.
party_remove_prisoners = 1616 # (party_remove_members, <party_id>, <troop_id>, <number>),
# Removes specified number of prisoners from a party. Stores number of actually removed prisoners in reg0.
party_clear = 1617 # (party_clear, <party_id>),
# Removes all members and prisoners from the party.
add_gold_to_party = 1070 # (add_gold_to_party, <value>, <party_id>),
# Marks the party as carrying the specified amount of gold, which can be pillaged by player if he destroys it. Operation must not be used to give gold to player's party.
# Calculating party and stack sizes
party_get_num_companions = 1601 # (party_get_num_companions, <destination>, <party_id>),
# Returns total number of party members, including leader.
party_get_num_prisoners = 1602 # (party_get_num_prisoners, <destination>, <party_id>),
# Returns total number of party prisoners.
party_count_members_of_type = 1630 # (party_count_members_of_type, <destination>, <party_id>, <troop_id>),
# Returns total number of party members of specific type.
party_count_companions_of_type = 1631 # (party_count_companions_of_type, <destination>, <party_id>, <troop_id>),
# Duplicates (party_count_members_of_type).
party_count_prisoners_of_type = 1632 # (party_count_prisoners_of_type, <destination>, <party_id>, <troop_id>),
# Returns total number of prisoners of specific type.
party_get_free_companions_capacity = 1633 # (party_get_free_companions_capacity, <destination>, <party_id>),
# Calculates how many members can be added to the party.
party_get_free_prisoners_capacity = 1634 # (party_get_free_prisoners_capacity, <destination>, <party_id>),
# Calculates how many prisoners can be added to the party.
party_get_num_companion_stacks = 1650 # (party_get_num_companion_stacks, <destination>, <party_id>),
# Returns total number of troop stacks in the party (including player and heroes).
party_get_num_prisoner_stacks = 1651 # (party_get_num_prisoner_stacks, <destination>, <party_id>),
# Returns total number of prisoner stacks in the party (including any heroes).
party_stack_get_troop_id = 1652 # (party_stack_get_troop_id, <destination>, <party_id>, <stack_no>),
# Extracts troop type of the specified troop stack.
party_stack_get_size = 1653 # (party_stack_get_size, <destination>, <party_id>, <stack_no>),
# Extracts number of troops in the specified troop stack.
party_stack_get_num_wounded = 1654 # (party_stack_get_num_wounded, <destination>, <party_id>, <stack_no>),
# Extracts number of wounded troops in the specified troop stack.
party_stack_get_troop_dna = 1655 # (party_stack_get_troop_dna, <destination>, <party_id>, <stack_no>),
# Extracts DNA from the specified troop stack. Used to properly generate appereance in conversations.
party_prisoner_stack_get_troop_id = 1656 # (party_get_prisoner_stack_troop, <destination>, <party_id>, <stack_no>),
# Extracts troop type of the specified prisoner stack.
party_prisoner_stack_get_size = 1657 # (party_get_prisoner_stack_size, <destination>, <party_id>, <stack_no>),
# Extracts number of troops in the specified prisoner stack.
party_prisoner_stack_get_troop_dna = 1658 # (party_prisoner_stack_get_troop_dna, <destination>, <party_id>, <stack_no>),
# Extracts DNA from the specified prisoner stack. Used to properly generate appereance in conversations.
store_num_free_stacks = 2154 # (store_num_free_stacks, <destination>, <party_id>),
# Deprecated, as Warband no longer has limits on number of stacks in the party. Always returns 10.
store_num_free_prisoner_stacks = 2155 # (store_num_free_prisoner_stacks, <destination>, <party_id>),
# Deprecated, as Warband no longer has limits on number of stacks in the party. Always returns 10.
store_party_size = 2156 # (store_party_size, <destination>,[party_id]),
# Stores total party size (all members and prisoners).
store_party_size_wo_prisoners = 2157 # (store_party_size_wo_prisoners, <destination>, [party_id]),
# Stores total number of members in the party (without prisoners), duplicating (party_get_num_companions).
store_troop_kind_count = 2158 # (store_troop_kind_count, <destination>, <troop_type_id>),
# Counts number of troops of specified type in player's party. Deprecated, use party_count_members_of_type instead.
store_num_regular_prisoners = 2159 # (store_num_regular_prisoners, <destination>, <party_id>),
# Deprecated and does not work. Do not use.
store_troop_count_companions = 2160 # (store_troop_count_companions, <destination>, <troop_id>, [party_id]),
# Apparently deprecated, duplicates (party_get_num_companions). Not used in Native.
store_troop_count_prisoners = 2161 # (store_troop_count_prisoners, <destination>, <troop_id>, [party_id]),
# Apparently deprecated, duplicates (party_get_num_prisoners). Not used in Native.
# Party experience and skills
party_add_xp_to_stack = 1670 # (party_add_xp_to_stack, <party_id>, <stack_no>, <xp_amount>),
# Awards specified number of xp points to a single troop stack in the party.
party_upgrade_with_xp = 1673 # (party_upgrade_with_xp, <party_id>, <xp_amount>, <upgrade_path>), #upgrade_path can be:
# Awards specified number of xp points to entire party (split between all stacks) and upgrades all eligible troops. Upgrade direction: (0 = random, 1 = first, 2 = second).
party_add_xp = 1674 # (party_add_xp, <party_id>, <xp_amount>),
# Awards specified number of xp points to entire party (split between all stacks).
party_get_skill_level = 1685 # (party_get_skill_level, <destination>, <party_id>, <skill_no>),
# Retrieves skill level for the specified party (usually max among the heroes). Makes a callback to (script_game_get_skill_modifier_for_troop).
# Combat related operations
heal_party = 1225 # (heal_party, <party_id>),
# Heals all wounded party members.
party_wound_members = 1618 # (party_wound_members, <party_id>, <troop_id>, <number>),
# Wounds a specified number of troops in the party.
party_remove_members_wounded_first = 1619 # (party_remove_members_wounded_first, <party_id>, <troop_id>, <number>),
# Removes a certain number of troops from the party, starting with wounded. Stores total number removed in reg0.
party_quick_attach_to_current_battle = 1663 # (party_quick_attach_to_current_battle, <party_id>, <side>),
# Adds any party into current encounter at specified side (0 = ally, 1 = enemy).
party_leave_cur_battle = 1666 # (party_leave_cur_battle, <party_id>),
# Forces the party to leave it's current battle (if it's engaged).
party_set_next_battle_simulation_time = 1667 # (party_set_next_battle_simulation_time, <party_id>, <next_simulation_time_in_hours>),
# Defines the period of time (in hours) after which the battle must be simulated for the specified party for the next time. When a value <= 0 is passed, the combat simulation round is performed immediately.
party_get_battle_opponent = 1680 # (party_get_battle_opponent, <destination>, <party_id>)
# When a party is engaged in battle with another party, returns it's opponent party. Otherwise returns -1.
inflict_casualties_to_party_group = 1697 # (inflict_casualties_to_party, <parent_party_id>, <damage_amount>, <party_id_to_add_causalties_to>),
# Delivers auto-calculated damage to the party (and all other parties attached to it). Killed troops are moved to another party to keep track of.
party_end_battle = 108 # (party_end_battle, <party_no>),
# Version 1.153+. UNTESTED. Supposedly ends the battle in which the party is currently participating.
# Party AI
party_set_marshall = 1604 # (party_set_marshall, <party_id>, <value>),
party_set_marshal = party_set_marshall # (party_set_marshal, <party_id>, <value>),
# Sets party as a marshall party or turns it back to normal party. Value is either 1 or 0. This affects party behavior, but exact effects are not known. Alternative operation name spelling added to enable compatibility with Viking Conquest DLC module system.
party_set_flags = 1603 # (party_set_flag, <party_id>, <flag>, <clear_or_set>),
# Sets (1) or clears (0) party flags in runtime. See header_parties.py for flags reference.
party_set_aggressiveness = 1606 # (party_set_aggressiveness, <party_id>, <number>),
# Sets aggressiveness value for the party (range 0..15).
party_set_courage = 1607 # (party_set_courage, <party_id>, <number>),
# Sets courage value for the party (range 4..15).
party_get_ai_initiative = 1638 # (party_get_ai_initiative, <destination>, <party_id>),
# Gets party current AI initiative value (range 0..100).
party_set_ai_initiative = 1639 # (party_set_ai_initiative, <party_id>, <value>),
# Sets AI initiative value for the party (range 0..100).
party_set_ai_behavior = 1640 # (party_set_ai_behavior, <party_id>, <ai_bhvr>),
# Sets AI behavior for the party. See header_parties.py for reference.
party_set_ai_object = 1641 # (party_set_ai_object, <party_id>, <object_party_id>),
# Sets another party as the object for current AI behavior (follow that party).
party_set_ai_target_position = 1642 # (party_set_ai_target_position, <party_id>, <position>),
# Sets a specific world map position as the object for current AI behavior (travel to that point).
party_set_ai_patrol_radius = 1643 # (party_set_ai_patrol_radius, <party_id>, <radius_in_km>),
# Sets a radius for AI patrolling behavior.
party_ignore_player = 1644 # (party_ignore_player, <party_id>, <duration_in_hours>),
# Makes AI party ignore player for the specified time.
party_set_bandit_attraction = 1645 # (party_set_bandit_attraction, <party_id>, <attaraction>),
# Sets party attractiveness to parties with bandit behavior (range 0..100).
party_get_helpfulness = 1646 # (party_get_helpfulness, <destination>, <party_id>),
# Gets party current AI helpfulness value (range 0..100).
party_set_helpfulness = 1647 # (party_set_helpfulness, <party_id>, <number>),
# Sets AI helpfulness value for the party (range 0..10000, default 100).
get_party_ai_behavior = 2290 # (get_party_ai_behavior, <destination>, <party_id>),
# Retrieves current AI behavior pattern for the party.
get_party_ai_object = 2291 # (get_party_ai_object, <destination>, <party_id>),
# Retrieves what party is currently used as object for AI behavior.
party_get_ai_target_position = 2292 # (party_get_ai_target_position, <position>, <party_id>),
# Retrieves what position is currently used as object for AI behavior.
get_party_ai_current_behavior = 2293 # (get_party_ai_current_behavior, <destination>, <party_id>),
# Retrieves current AI behavior pattern when it was overridden by current situation (fleeing from enemy when en route to destination).
get_party_ai_current_object = 2294 # (get_party_ai_current_object, <destination>, <party_id>),
# Retrieves what party has caused temporary behavior switch.
party_set_ignore_with_player_party = 1648 # (party_set_ignore_with_player_party, <party_id>, <value>),
# Version 1.161+. Effects uncertain. 4research
party_get_ignore_with_player_party = 1649 # (party_get_ignore_with_player_party, <party_id>),
# Version 1.161+. Effects uncertain. Documented official syntax is suspicious and probably incorrect. 4research
################################################################################
# [ Z10 ] TROOPS
################################################################################
# What troops are.
# There are two major types of troops: heroes and regulars. They are treated
# very differently by the game, so it's important not to confuse them. At the
# same time, most Module System operations will not make any differentiation
# between hero and regular troops.
# First of all, hero troops do not stack. You cannot have a stack of heroes
# in a party, each hero will always occupy a separate troop slot. At the same
# time, you can put any number of regular troops into a single troop slot.
# Second, the way the game treats equipment of heroes and troops is also
# different. All heroes' items are treated in the same way as player's (no
# big surprise, since player is actually a hero troop himself). Meanwhile,
# items that the troop has are just suggestions for what this troop *might*
# take into battle. On the battlefield, each agent spawned from the regular
# troop, will only take a limited number of items from the inventory provided
# by the troop definition in module_troops.py. Choice is absolutely random and
# modder has only limited control over it through the use of guarantee flags.
# There's one more additional caveat: while you can easily change the outfit
# of a hero troop and your changes will persist through the game, same applies
# to regular troops. In other words, by changing equipment of some regular
# troop, you are changing all instances of that troop throughout the entire
# game. In other words, you cannot re-equip a stack of regulars in a single
# party - your changes will affect all parties in the world.
# Third, while all heroes have a single predefined face code, which is used
# consistently through the game, troops have entire range of face codes. This
# range is used to randomize each agent's face within those constraints, so a
# group of 12 pikemen will not look like a bunch of clones.
# Fourth, hero troops can't be killed in battle. Every time hero's hit points
# are reduced to zero, hero is always knocked down. For regular troops, chance
# to be knocked down depends on the number of factors, but their default fate
# when driven to zero health is death.
# Conditional operators
troop_has_item_equipped = 151 # (troop_has_item_equipped, <troop_id>, <item_id>),
# Checks that the troop has this item equipped (worn or wielded).
troop_is_mounted = 152 # (troop_is_mounted, <troop_id>),
# Checks the troop for tf_mounted flag (see header_troops.py). Does NOT check that the troop has a horse.
troop_is_guarantee_ranged = 153 # (troop_is_guarantee_ranged, <troop_id>),
# Checks the troop for tf_guarantee_ranged flag (see header_troops.py). Does not check that troop actually has some ranged weapon.
troop_is_guarantee_horse = 154 # (troop_is_guarantee_horse, <troop_id>),
# Checks the troop for tf_guarantee_horse flag (see header_troops.py). Does not check that troop actually has some horse.
troop_is_hero = 1507 # (troop_is_hero, <troop_id>),
# Checks the troop for tf_hero flag (see header_troops.py). Hero troops are actual characters and do not stack in party window.
troop_is_wounded = 1508 # (troop_is_wounded, <troop_id>),
# Checks that the troop is wounded. Only works for hero troops.
player_has_item = 150 # (player_has_item, <item_id>),
# Checks that player has the specified item.
# Slot operations for troops
troop_set_slot = 500 # (troop_set_slot, <troop_id>, <slot_no>, <value>),
troop_get_slot = 520 # (troop_get_slot, <destination>, <troop_id>, <slot_no>),
troop_slot_eq = 540 # (troop_slot_eq, <troop_id>, <slot_no>, <value>),
troop_slot_ge = 560 # (troop_slot_ge, <troop_id>, <slot_no>, <value>),
# Troop attributes and skills
troop_set_type = 1505 # (troop_set_type, <troop_id>, <gender>),
# Changes the troop skin. There are two skins in Native: male and female, so in effect this operation sets troop gender. However mods may declare other skins.
troop_get_type = 1506 # (troop_get_type, <destination>, <troop_id>),
# Returns troop current skin (i.e. gender).
troop_set_class = 1517 # (troop_set_class, <troop_id>, <value>),
# Sets troop class (infantry, archers, cavalry or any of custom classes). Accepts values in range 0..8. See grc_* constants in header_mission_templates.py.
troop_get_class = 1516 # (troop_get_class, <destination>, <troop_id>),
# Retrieves troop class. Returns values in range 0..8.
class_set_name = 1837 # (class_set_name, <sub_class>, <string_id>),
# Sets a new name for troop class (aka "Infantry", "Cavalry", "Custom Group 3"...).
add_xp_to_troop = 1062 # (add_xp_to_troop, <value>, [troop_id]),
# Adds some xp points to troop. Only makes sense for player and hero troops. Default troop_id is player. Amount of xp can be negative.
add_xp_as_reward = 1064 # (add_xp_as_reward, <value>),
# Adds the specified amount of xp points to player. Typically used as a quest reward operation.
troop_get_xp = 1515 # (troop_get_xp, <destination>, <troop_id>),
# Retrieves total amount of xp specified troop has.
store_attribute_level = 2172 # (store_attribute_level, <destination>, <troop_id>, <attribute_id>),
# Stores current value of troop attribute. See ca_* constants in header_troops.py for reference.
troop_raise_attribute = 1520 # (troop_raise_attribute, <troop_id>, <attribute_id>, <value>),
# Increases troop attribute by the specified amount. See ca_* constants in header_troops.py for reference. Use negative values to reduce attributes. When used on non-hero troop, will affect all instances of that troop.
store_skill_level = 2170 # (store_skill_level, <destination>, <skill_id>, [troop_id]),
# Stores current value of troop skill. See header_skills.py for reference.
troop_raise_skill = 1521 # (troop_raise_skill, <troop_id>, <skill_id>, <value>),
# Increases troop skill by the specified value. Value can be negative. See header_skills.py for reference. When used on non-hero troop, will affect all instances of that troop.
store_proficiency_level = 2176 # (store_proficiency_level, <destination>, <troop_id>, <attribute_id>),
# Stores current value of troop weapon proficiency. See wpt_* constants in header_troops.py for reference.
troop_raise_proficiency = 1522 # (troop_raise_proficiency, <troop_id>, <proficiency_no>, <value>),
# Increases troop weapon proficiency by the specified value. Value can be negative. Increase is subject to limits defined by Weapon Master skill. When used on non-hero troop, will affect all instances of that troop.
troop_raise_proficiency_linear = 1523 # (troop_raise_proficiency, <troop_id>, <proficiency_no>, <value>),
# Same as (troop_raise_proficiency), but does not take Weapon Master skill into account (i.e. can increase proficiencies indefinitely).
troop_add_proficiency_points = 1525 # (troop_add_proficiency_points, <troop_id>, <value>),
# Adds some proficiency points to a hero troop which can later be distributed by player.
store_troop_health = 2175 # (store_troop_health, <destination>, <troop_id>, [absolute]), # set absolute to 1 to get actual health; otherwise this will return percentage health in range (0-100)
# Retrieves current troop health. Use absolute = 1 to retrieve actual number of hp points left, use absolute = 0 to retrieve a value in 0..100 range (percentage).
troop_set_health = 1560 # (troop_set_health, <troop_id>, <relative health (0-100)>),
# Sets troop health. Accepts value in range 0..100 (percentage).
troop_get_upgrade_troop = 1561 # (troop_get_upgrade_troop, <destination>, <troop_id>, <upgrade_path>),
# Retrieves possible directions for non-hero troop upgrade. Use 0 to retrieve first upgrade path, and 1 to return second. Result of -1 means there's no such upgrade path for this troop.
store_character_level = 2171 # (store_character_level, <destination>, [troop_id]),
# Retrieves character level of the troop. Default troop is the player.
get_level_boundary = 991 # (get_level_boundary, <destination>, <level_no>),
# Returns the amount of experience points required to reach the specified level (will return 0 for 1st level). Maximum possible level in the game is 63.
add_gold_as_xp = 1063 # (add_gold_as_xp, <value>, [troop_id]), # Default troop is player
# Adds a certain amount of experience points, depending on the amount of gold specified. Conversion rate is unclear and apparently somewhat randomized (three runs with 1000 gold produced values 1091, 804 and 799).
# Troop equipment handling
troop_set_auto_equip = 1509 # (troop_set_auto_equip, <troop_id>, <value>),
# Sets (value = 1) or disables (value = 0) auto-equipping the troop with any items added to it's inventory or purchased. Similar to tf_is_merchant flag.
troop_ensure_inventory_space = 1510 # (troop_ensure_inventory_space, <troop_id>, <value>),
# Removes items from troop inventory until troop has specified number of free inventory slots. Will free inventory slots starting from the end (items at the bottom of inventory will be removed first if there's not enough free space).
troop_sort_inventory = 1511 # (troop_sort_inventory, <troop_id>),
# Sorts items in troop inventory by their price (expensive first).
troop_add_item = 1530 # (troop_add_item, <troop_id>, <item_id>, [modifier]),
# Adds an item to the troop, optionally with a modifier (see imod_* constants in header_item_modifiers.py).
troop_remove_item = 1531 # (troop_remove_item, <troop_id>, <item_id>),
# Removes an item from the troop equipment or inventory. Operation will remove first matching item it finds.
troop_clear_inventory = 1532 # (troop_clear_inventory, <troop_id>),
# Clears entire troop inventory. Does not affect equipped items.
troop_equip_items = 1533 # (troop_equip_items, <troop_id>),
# Makes the troop reconsider it's equipment. If troop has better stuff in it's inventory, he will equip it. Note this operation sucks with weapons and may force the troop to equip himself with 4 two-handed swords.
troop_inventory_slot_set_item_amount = 1534 # (troop_inventory_slot_set_item_amount, <troop_id>, <inventory_slot_no>, <value>),
# Sets the stack size for a specified equipment or inventory slot. Only makes sense for items like ammo or food (which show stuff like "23/50" in inventory). Equipment slots are in range 0..9, see ek_* constants in header_items.py for reference.
troop_inventory_slot_get_item_amount = 1537 # (troop_inventory_slot_get_item_amount, <destination>, <troop_id>, <inventory_slot_no>),
# Retrieves the stack size for a specified equipment or inventory slot (if some Bread is 23/50, this operation will return 23).
troop_inventory_slot_get_item_max_amount = 1538 # (troop_inventory_slot_get_item_max_amount, <destination>, <troop_id>, <inventory_slot_no>),
# Retrieves the maximum possible stack size for a specified equipment or inventory slot (if some Bread is 23/50, this operation will return 50).
troop_add_items = 1535 # (troop_add_items, <troop_id>, <item_id>, <number>),
# Adds multiple items of specified type to the troop.
troop_remove_items = 1536 # (troop_remove_items, <troop_id>, <item_id>, <number>),
# Removes multiple items of specified type from the troop. Total price of actually removed items will be stored in reg0.
troop_loot_troop = 1539 # (troop_loot_troop, <target_troop>, <source_troop_id>, <probability>),
# Adds to target_troop's inventory some items from source_troop's equipment and inventory with some probability. Does not actually remove items from source_troop. Commonly used in Native to generate random loot after the battle.
troop_get_inventory_capacity = 1540 # (troop_get_inventory_capacity, <destination>, <troop_id>),
# Returns the total inventory capacity (number of inventory slots) for the specified troop. Note that this number will include equipment slots as well. Substract num_equipment_kinds (see header_items.py) to get the number of actual *inventory* slots.
troop_get_inventory_slot = 1541 # (troop_get_inventory_slot, <destination>, <troop_id>, <inventory_slot_no>),
# Retrieves the item_id of a specified equipment or inventory slot. Returns -1 when there's nothing there.
troop_get_inventory_slot_modifier = 1542 # (troop_get_inventory_slot_modifier, <destination>, <troop_id>, <inventory_slot_no>),
# Retrieves the modifier value (see imod_* constants in header_items.py) for an item in the specified equipment or inventory slot. Returns 0 when there's nothing there, or if item does not have any modifiers.
troop_set_inventory_slot = 1543 # (troop_set_inventory_slot, <troop_id>, <inventory_slot_no>, <item_id>),
# Puts the specified item into troop's equipment or inventory slot. Be careful with setting equipment slots this way.
troop_set_inventory_slot_modifier = 1544 # (troop_set_inventory_slot_modifier, <troop_id>, <inventory_slot_no>, <imod_value>),
# Sets the modifier for the item in the troop's equipment or inventory slot. See imod_* constants in header_items.py for reference.
store_item_kind_count = 2165 # (store_item_kind_count, <destination>, <item_id>, [troop_id]),
# Calculates total number of items of specified type that the troop has. Default troop is player.
store_free_inventory_capacity = 2167 # (store_free_inventory_capacity, <destination>, [troop_id]),
# Calculates total number of free inventory slots that the troop has. Default troop is player.
# Merchandise handling
reset_price_rates = 1170 # (reset_price_rates),
# Resets customized price rates for merchants.
set_price_rate_for_item = 1171 # (set_price_rate_for_item, <item_id>, <value_percentage>),
# Sets individual price rate for a single item type. Normal price rate is 100. Deprecated, as Warband uses (game_get_item_[buy/sell]_price_factor) scripts instead.
set_price_rate_for_item_type = 1172 # (set_price_rate_for_item_type, <item_type_id>, <value_percentage>),
# Sets individual price rate for entire item class (see header_items.py for itp_type_* constants). Normal price rate is 100. Deprecated, as Warband uses (game_get_item_[buy/sell]_price_factor) scripts instead.
set_merchandise_modifier_quality = 1490 # (set_merchandise_modifier_quality, <value>),
# Affects the probability of items with quality modifiers appearing in merchandise. Value is percentage, standard value is 100.
set_merchandise_max_value = 1491 # (set_merchandise_max_value, <value>),
# Not used in Native. Apparently prevents items with price higher than listed from being generated as merchandise.
reset_item_probabilities = 1492 # (reset_item_probabilities, <value>),
# Sets all items probability of being generated as merchandise to the provided value. Use zero with subsequent calls to (set_item_probability_in_merchandise) to only allow generation of certain items.
set_item_probability_in_merchandise = 1493 # (set_item_probability_in_merchandise, <item_id>, <value>),
# Sets item probability of being generated as merchandise to the provided value.
troop_add_merchandise = 1512 # (troop_add_merchandise, <troop_id>, <item_type_id>, <value>),
# Adds a specified number of random items of certain type (see itp_type_* constants in header_items.py) to troop inventory. Only adds items with itp_merchandise flags.
troop_add_merchandise_with_faction = 1513 # (troop_add_merchandise_with_faction, <troop_id>, <faction_id>, <item_type_id>, <value>), #faction_id is given to check if troop is eligible to produce that item
# Same as (troop_add_merchandise), but with additional filter: only adds items which belong to specified faction, or without any factions at all.
# Miscellaneous troop information
troop_set_name = 1501 # (troop_set_name, <troop_id>, <string_no>),
# Renames the troop, setting a new singular name for it.
troop_set_plural_name = 1502 # (troop_set_plural_name, <troop_id>, <string_no>),
# Renames the troop, setting a new plural name for it.
troop_set_face_key_from_current_profile = 1503 # (troop_set_face_key_from_current_profile, <troop_id>),
# Forces the troop to adopt the face from player's currently selected multiplayer profile.
troop_add_gold = 1528 # (troop_add_gold, <troop_id>, <value>),
# Adds gold to troop. Generally used with player or hero troops.
troop_remove_gold = 1529 # (troop_remove_gold, <troop_id>, <value>),
# Removes gold from troop. Generally used with player or hero troops.
store_troop_gold = 2149 # (store_troop_gold, <destination>, <troop_id>),
# Retrieves total number of gold that the troop has.
troop_set_faction = 1550 # (troop_set_faction, <troop_id>, <faction_id>),
# Sets a new faction for the troop (mostly used to switch lords allegiances in Native).
store_troop_faction = 2173 # (store_troop_faction, <destination>, <troop_id>),
# Retrieves current troop faction allegiance.
store_faction_of_troop = 2173 # (store_troop_faction, <destination>, <troop_id>),
# Alternative spelling of the above operation.
troop_set_age = 1555 # (troop_set_age, <troop_id>, <age_slider_pos>),
# Defines a new age for the troop (will be used by the game engine to generate appropriately aged face). Age is in range 0.100.
store_troop_value = 2231 # (store_troop_value, <destination>, <troop_id>),
# Stores some value which is apparently related to troop's overall fighting value. Swadian infantry line troops from Native produced values 24, 47, 80, 133, 188. Calling on player produced 0.
# Troop face code handling
str_store_player_face_keys = 2747 # (str_store_player_face_keys, <string_no>, <player_id>),
# Version 1.161+. Stores player's face keys into string register.
player_set_face_keys = 2748 # (player_set_face_keys, <player_id>, <string_no>),
# Version 1.161+. Sets player's face keys from string.
str_store_troop_face_keys = 2750 # (str_store_troop_face_keys, <string_no>, <troop_no>, [<alt>]),
# Version 1.161+. Stores specified troop's face keys into string register. Use optional <alt> parameter to determine what facekey set to retrieve: 0 for first and 1 for second.
troop_set_face_keys = 2751 # (troop_set_face_keys, <troop_no>, <string_no>, [<alt>]),
# Version 1.161+. Sets troop face keys from string. Use optional <alt> parameter to determine what face keys to update: 0 for first and 1 for second.
face_keys_get_hair = 2752 # (face_keys_get_hair, <destination>, <string_no>),
# Version 1.161+. Unpacks selected hair mesh from string containing troop/player face keys to <destination>.
face_keys_set_hair = 2753 # (face_keys_set_hair, <string_no>, <value>),
# Version 1.161+. Updates face keys string with a new hair value. Hair meshes associated with skin (as defined in module_skins) are numbered from 1. Use 0 for no hair.
face_keys_get_beard = 2754 # (face_keys_get_beard, <destination>, <string_no>),
# Version 1.161+. Unpacks selected beard mesh from string containing troop/player face keys to <destination>.
face_keys_set_beard = 2755 # (face_keys_set_beard, <string_no>, <value>),
# Version 1.161+. Updates face keys string with a new beard value. Beard meshes associated with skin (as defined in module_skins) are numbered from 1. Use 0 for no beard.
face_keys_get_face_texture = 2756 # (face_keys_get_face_texture, <destination>, <string_no>),
# Version 1.161+. Unpacks selected face texture from string containing troop/player face keys to <destination>.
face_keys_set_face_texture = 2757 # (face_keys_set_face_texture, <string_no>, <value>),
# Version 1.161+. Updates face keys string with a new face texture value. Face textures associated with skin (as defined in module_skins) are numbered from 0.
face_keys_get_hair_texture = 2758 # (face_keys_get_hair_texture, <destination>, <string_no>),
# Version 1.161+. Unpacks selected hair texture from string containing troop/player face keys to <destination>. Apparently hair textures have no effect. 4 research.
face_keys_set_hair_texture = 2759 # (face_keys_set_hair_texture, <string_no>, <value>),
# Version 1.161+. Updates face keys string with a new hair texture value. Doesn't seem to have an effect. 4research.
face_keys_get_hair_color = 2760 # (face_keys_get_hair_color, <destination>, <string_no>),
# Version 1.161+. Unpacks hair color slider value from face keys string. Values are in the range of 0..63. Mapping to specific colors depends on the hair color range defined for currently selected skin / face_texture combination.
face_keys_set_hair_color = 2761 # (face_keys_set_hair_color, <string_no>, <value>),
# Version 1.161+. Updates face keys string with a new hair color slider value. Value should be in the 0..63 range.
face_keys_get_age = 2762 # (face_keys_get_age, <destination>, <string_no>),
# Version 1.161+. Unpacks age slider value from face keys string. Values are in the range of 0..63.
face_keys_set_age = 2763 # (face_keys_set_age, <string_no>, <value>),
# Version 1.161+. Updates face keys string with a new age slider value. Value should be in the 0..63 range.
face_keys_get_skin_color = 2764 # (face_keys_get_skin_color, <destination>, <string_no>),
# Version 1.161+. Apparently doesn't work. Should retrieve skin color value from face keys string into <destination>.
face_keys_set_skin_color = 2765 # (face_keys_set_skin_color, <string_no>, <value>),
# Version 1.161+. Apparently doesn't work. Should update face keys string with a new skin color value.
face_keys_get_morph_key = 2766 # (face_keys_get_morph_key, <destination>, <string_no>, <key_no>),
# Version 1.161+. Unpacks morph key value from face keys string. See morph key indices in module_skins.py file. Note that only 8 out of 27 morph keys are actually accessible (from 'chin_size' to 'cheeks'). Morph key values are in the 0..7 range.
face_keys_set_morph_key = 2767 # (face_keys_set_morph_key, <string_no>, <key_no>, <value>),
# Version 1.161+. Updates face keys string with a new morph key value. See morph key indices in module_skins.py file. Note that only 8 out of 27 morph keys are actually accessible (from 'chin_size' to 'cheeks'). Morph key values should be in the 0..7 range.
################################################################################
# [ Z11 ] QUESTS
################################################################################
# Quests are just that: some tasks that characters in the game world want the
# player to do. It's interesting to note that in Warband quests can have three
# possible outcomes: success, failure and conclusion. Generally the last
# option is used to indicate some "intermediate" quest result, which is
# neither a full success, nor a total failure.
# Conditional operations
check_quest_active = 200 # (check_quest_active, <quest_id>),
# Checks that the quest has been started but not yet cancelled or completed. Will not fail for concluded, failed or succeeded quests for as long as they have not yet been completed.
check_quest_finished = 201 # (check_quest_finished, <quest_id>),
# Checks that the quest has been completed (result does not matter) and not taken again yet.
check_quest_succeeded = 202 # (check_quest_succeeded, <quest_id>),
# Checks that the quest has succeeded and not taken again yet (check will be successful even after the quest is completed).
check_quest_failed = 203 # (check_quest_failed, <quest_id>),
# Checks that the quest has failed and not taken again yet (check will be successful even after the quest is completed).
check_quest_concluded = 204 # (check_quest_concluded, <quest_id>),
# Checks that the quest was concluded with any result and not taken again yet.
# Slot operations for quests
quest_set_slot = 506 # (quest_set_slot, <quest_id>, <slot_no>, <value>),
quest_get_slot = 526 # (quest_get_slot, <destination>, <quest_id>, <slot_no>),
quest_slot_eq = 546 # (quest_slot_eq, <quest_id>, <slot_no>, <value>),
quest_slot_ge = 566 # (quest_slot_ge, <quest_id>, <slot_no>, <value>),
# Quest management
start_quest = 1280 # (start_quest, <quest_id>, <giver_troop_id>),
# Starts the quest and marks giver_troop as the troop who gave it.
conclude_quest = 1286 # (conclude_quest, <quest_id>),
# Sets quest status as concluded but keeps it in the list. Frequently used to indicate "uncertain" quest status, when it's neither fully successful nor a total failure.
succeed_quest = 1282 # (succeed_quest, <quest_id>), #also concludes the quest
# Sets quest status as successful but keeps it in the list (player must visit quest giver to complete it before he can get another quest of the same type).
fail_quest = 1283 # (fail_quest, <quest_id>), #also concludes the quest
# Sets quest status as failed but keeps it in the list (player must visit quest giver to complete it before he can get another quest of the same type).
complete_quest = 1281 # (complete_quest, <quest_id>),
# Successfully completes specified quest, removing it from the list of active quests.
cancel_quest = 1284 # (cancel_quest, <quest_id>),
# Cancels specified quest without completing it, removing it from the list of active quests.
setup_quest_text = 1290 # (setup_quest_text, <quest_id>),
# Operation will refresh default quest description (as defined in module_quests.py). This is important when quest description contains references to variables and registers which need to be initialized with their current values.
store_partner_quest = 2240 # (store_partner_quest, <destination>),
# During conversation, if there's a quest given by conversation partner, the operation will return it's id.
setup_quest_giver = 1291 # (setup_quest_giver, <quest_id>, <string_id>),
# Apparently deprecated, as quest giver troop is now defined as a parameter of (start_quest).
store_random_quest_in_range = 2250 # (store_random_quest_in_range, <destination>, <lower_bound>, <upper_bound>),
# Apparently deprecated as the logic for picking a new quest has been moved to module_scripts.
set_quest_progression = 1285 # (set_quest_progression, <quest_id>, <value>),
# Deprecated and useless, operation has no game effects and it's impossible to retrieve quest progression status anyway.
store_random_troop_to_raise = 2251 # (store_random_troop_to_raise, <destination>, <lower_bound>, <upper_bound>),
# Apparently deprecated.
store_random_troop_to_capture = 2252 # (store_random_troop_to_capture, <destination>, <lower_bound>, <upper_bound>),
# Apparently deprecated.
store_quest_number = 2261 # (store_quest_number, <destination>, <quest_id>),
# Apparently deprecated.
store_quest_item = 2262 # (store_quest_item, <destination>, <item_id>),
# Apparently deprecated. Native now uses quest slots to keep track of this information.
store_quest_troop = 2263 # (store_quest_troop, <destination>, <troop_id>),
# Apparently deprecated. Native now uses quest slots to keep track of this information.
################################################################################
# [ Z12 ] ITEMS
################################################################################
# The title is a bit deceitful here. Items, despite the name, are not actual
# game items. Rather these are the *definitions* for real game items, and you
# can frequently see them referenced as "item types". However you should not
# confuse this with so called itp_type_* constants which define the major item
# classes existing in the game.
# Consider this: a Smoked Fish (50/50) in your character's inventory is an
# item in the game world. It's item type is "itm_smoked_fish" and it's basic
# class is itp_type_food. So take care: operations in this section are dealing
# with "itm_smoked_fish", not with actual fish in your inventory. The latter
# is actually just an inventory slot from the Module System's point of view,
# and operations to work with it are in the troops section of the file.
# Conditional operations
item_has_property = 2723 # (item_has_property, <item_kind_no>, <property>),
# Version 1.161+. Check that the item has specified property flag set. See the list of itp_* flags in header_items.py.
item_has_capability = 2724 # (item_has_capability, <item_kind_no>, <capability>),
# Version 1.161+. Checks that the item has specified capability flag set. See the list of itcf_* flags in header_items.py
item_has_modifier = 2725 # (item_has_modifier, <item_kind_no>, <item_modifier_no>),
# Version 1.161+. Checks that the specified modifiers is valid for the item. See the list of imod_* values in header_item_modifiers.py.
item_has_faction = 2726 # (item_has_faction, <item_kind_no>, <faction_no>),
# Version 1.161+. Checks that the item is available for specified faction. Note that an item with no factions set is available to all factions.
# Item slot operations
item_set_slot = 507 # (item_set_slot, <item_id>, <slot_no>, <value>),
item_get_slot = 527 # (item_get_slot, <destination>, <item_id>, <slot_no>),
item_slot_eq = 547 # (item_slot_eq, <item_id>, <slot_no>, <value>),
item_slot_ge = 567 # (item_slot_ge, <item_id>, <slot_no>, <value>),
# Generic item operations
item_get_type = 1570 # (item_get_type, <destination>, <item_id>),
# Returns item class (see header_items.py for itp_type_* constants).
store_item_value = 2230 # (store_item_value, <destination>, <item_id>),
# Stores item nominal price as listed in module_items.py. Does not take item modifier or quantity (for food items) into account.
store_random_horse = 2257 # (store_random_horse, <destination>),
# Deprecated since early M&B days.
store_random_equipment = 2258 # (store_random_equipment, <destination>),
# Deprecated since early M&B days.
store_random_armor = 2259 # (store_random_armor, <destination>),
# Deprecated since early M&B days.
cur_item_add_mesh = 1964 # (cur_item_add_mesh, <mesh_name_string>, [<lod_begin>], [<lod_end>]),
# Version 1.161+. Only call inside ti_on_init_item trigger. Adds another mesh to item, allowing the creation of combined items. Parameter <mesh_name_string> should contain mesh name itself, NOT a mesh reference. LOD values are optional. If <lod_end> is used, it will not be loaded.
cur_item_set_material = 1978 # (cur_item_set_material, <string_no>, <sub_mesh_no>, [<lod_begin>], [<lod_end>]),
# Version 1.161+. Only call inside ti_on_init_item trigger. Replaces material that will be used to render the item mesh. Use 0 for <sub_mesh_no> to replace material for base mesh. LOD values are optional. If <lod_end> is used, it will not be loaded.
item_get_weight = 2700 # (item_get_weight, <destination_fixed_point>, <item_kind_no>),
# Version 1.161+. Retrieves item weight as a fixed point value.
item_get_value = 2701 # (item_get_value, <destination>, <item_kind_no>),
# Version 1.161+. Retrieves item base price. Essentially a duplicate of (store_item_value).
item_get_difficulty = 2702 # (item_get_difficulty, <destination>, <item_kind_no>),
# Version 1.161+. Retrieves item difficulty value.
item_get_head_armor = 2703 # (item_get_head_armor, <destination>, <item_kind_no>),
# Version 1.161+. Retrieves item head armor value.
item_get_body_armor = 2704 # (item_get_body_armor, <destination>, <item_kind_no>),
# Version 1.161+. Retrieves item body armor value.
item_get_leg_armor = 2705 # (item_get_leg_armor, <destination>, <item_kind_no>),
# Version 1.161+. Retrieves item leg armor value.
item_get_hit_points = 2706 # (item_get_hit_points, <destination>, <item_kind_no>),
# Version 1.161+. Retrieves item hit points amount.
item_get_weapon_length = 2707 # (item_get_weapon_length, <destination>, <item_kind_no>),
# Version 1.161+. Retrieves item length (for weapons) or shield half-width (for shields). To get actual shield width, multiply this value by 2. Essentially, it is a distance from shield's "center" point to it's left, right and top edges (and bottom edge as well if shield height is not defined).
item_get_speed_rating = 2708 # (item_get_speed_rating, <destination>, <item_kind_no>),
# Version 1.161+. Retrieves item speed rating.
item_get_missile_speed = 2709 # (item_get_missile_speed, <destination>, <item_kind_no>),
# Version 1.161+. Retrieves item missile speed rating.
item_get_max_ammo = 2710 # (item_get_max_ammo, <destination>, <item_kind_no>),
# Version 1.161+. Retrieves item max ammo amount.
item_get_accuracy = 2711 # (item_get_accuracy, <destination>, <item_kind_no>),
# Version 1.161+. Retrieves item accuracy value. Note that this operation will return 0 for an item with undefined accuracy, even though the item accuracy will actually default to 100.
item_get_shield_height = 2712 # (item_get_shield_height, <destination_fixed_point>, <item_kind_no>),
# Version 1.161+. Retrieves distance from shield "center" to it's bottom edge as a fixed point number. Use (set_fixed_point_multiplier, 100), to retrieve the correct value with this operation. To get actual shield height, use shield_height + weapon_length if this operation returns a non-zero value, otherwise use 2 * weapon_length.
item_get_horse_scale = 2713 # (item_get_horse_scale, <destination_fixed_point>, <item_kind_no>),
# Version 1.161+. Retrieves horse scale value as fixed point number.
item_get_horse_speed = 2714 # (item_get_horse_speed, <destination>, <item_kind_no>),
# Version 1.161+. Retrieves horse speed value.
item_get_horse_maneuver = 2715 # (item_get_horse_maneuver, <destination>, <item_kind_no>),
# Version 1.161+. Retrieves horse maneuverability value.
item_get_food_quality = 2716 # (item_get_food_quality, <destination>, <item_kind_no>),
# Version 1.161+. Retrieves food quality coefficient (as of Warband 1.165, this coefficient is actually set for many food items, but never used in the code as there was no way to retrieve this coeff before 1.161 patch).
item_get_abundance = 2717 # (item_get_abundance, <destination>, <item_kind_no>),
# Version 1.161+. Retrieve item abundance value. Note that this operation will return 0 for an item with undefined abundance, even though the item abundance will actually default to 100.
item_get_thrust_damage = 2718 # (item_get_thrust_damage, <destination>, <item_kind_no>),
# Version 1.161+. Retrieves thrust base damage value for item.
item_get_thrust_damage_type = 2719 # (item_get_thrust_damage_type, <destination>, <item_kind_no>),
# Version 1.161+. Retrieves thrust damage type for item (see definitions for "cut", "pierce" and "blunt" in header_items.py).
item_get_swing_damage = 2720 # (item_get_swing_damage, <destination>, <item_kind_no>),
# Version 1.161+. Retrieves swing base damage value for item.
item_get_swing_damage_type = 2721 # (item_get_swing_damage_type, <destination>, <item_kind_no>),
# Version 1.161+. Retrieves swing damage type for item (see definitions for "cut", "pierce" and "blunt" in header_items.py).
item_get_horse_charge_damage = 2722 # (item_get_horse_charge_damage, <destination>, <item_kind_no>),
# Version 1.161+. Retrieves horse charge base damage.
################################################################################
# [ Z13 ] SOUNDS AND MUSIC TRACKS
################################################################################
# There are two types of sound in the game: sounds and tracks. Sounds are just
# short sound effects. They can be positional (i.e. emitted by some object on
# the scene or by player's opponent during the dialog). They can be generic
# sound effects, like playing some drums when player meets mountain bandits.
# Tracks are the background music. The game works as a kind of a musuc box,
# cycling the available melodies according to the situation. It is up to the
# Module System developer, however, to tell the game what the situation is.
# There are two factors which you can tell the game: situation and culture.
# So you can tell the game that the situation is "ambush" and the culture is
# "khergits", and the game will select the musuc tracks which fit this
# combination of situation and culture and will rotate them randomly. And of
# course, you can also tell the game to play one specific track if you want.
play_sound_at_position = 599 # (play_sound_at_position, <sound_id>, <position>, [options]),
# Plays a sound in specified scene position. See sf_* flags in header_sounds.py for reference on possible options.
play_sound = 600 # (play_sound, <sound_id>, [options]),
# Plays a sound. If the operation is called from agent, scene_prop or item trigger, then the sound will be positional and 3D. See sf_* flags in header_sounds.py for reference on possible options.
play_track = 601 # (play_track, <track_id>, [options]),
# Plays specified music track. Possible options: 0 = finish current then play this, 1 = fade out current and start this, 2 = stop current abruptly and start this
play_cue_track = 602 # (play_cue_track, <track_id>),
# Plays specified music track OVER any currently played music track (so you can get two music tracks playing simultaneously). Hardly useful.
music_set_situation = 603 # (music_set_situation, <situation_type>),
# Sets current situation(s) in the game (see mtf_* flags in header_music.py for reference) so the game engine can pick matching tracks from module_music.py. Use 0 to stop any currently playing music (it will resume when situation is later set to something).
music_set_culture = 604 # (music_set_culture, <culture_type>),
# Sets current culture(s) in the game (see mtf_* flags in header_music.py for reference) so the game engine can pick matching tracks from module_music.py. Use 0 to stop any currently playing music (it will resume when cultures are later set to something).
stop_all_sounds = 609 # (stop_all_sounds, [options]),
# Stops all playing sounds. Version 1.153 options: 0 = stop only looping sounds, 1 = stop all sounds. Version 1.143 options: 0 = let current track finish, 1 = fade it out, 2 = stop it abruptly.
store_last_sound_channel = 615 # (store_last_sound_channel, <destination>),
# Version 1.153+. UNTESTED. Stores the sound channel used for the last sound operation.
stop_sound_channel = 616 # (stop_sound_channel, <sound_channel_no>),
# Version 1.153+. UNTESTED. Stops sound playing on specified sound channel.
################################################################################
# [ Z14 ] POSITIONS
################################################################################
# Positions are the 3D math of the game. If you want to handle objects in
# space, you will inevitably have to deal with positions. Note that while most
# position-handling operations work both on global map and on the scenes,
# there are operations which will only work in one or another mode.
# Each position consists of three parts: coordinates, rotation and scale.
# Coordinates are three numbers - (X,Y,Z) - which define a certain point in
# space relative to the base of coordinates. Most of the time, the base of
# coordinates is either the center of the global map, or the center of the
# scene, but there may be exceptions. Note that all operations with
# coordinates nearly always use fixed point numbers.
# Position rotation determines just that - rotation around corresponding
# world axis. So rotation around Z axis means rotation around vertical axis,
# in other words - turning right and left. Rotation around X and Y axis will
# tilt the position forward/backwards and right/left respectively.
# It is common game convention that X world axis points to the East, Y world
# axis points to the North and Z world axis points straight up. However this
# is so-called global coordinates system, and more often than not you'll be
# dealing with local coordinates. Local coordinates are the coordinate system
# defined by the object's current position. For the object, his X axis is to
# the right, Y axis is forward, and Z axis is up. This is simple enough, but
# consider what happens if that object is turned upside down in world space?
# Object's Z axis will point upwards *from the object's point of view*, in
# other words, in global space it will be pointing *downwards*. And if the
# object is moving, then it's local coordinates system is moving with it...
# you get the idea.
# Imagine the position as a small point with an arrow somewhere in space.
# Position's coordinates are the point's position. Arrow points horizontally
# to the North by default, and position's rotation determines how much was
# it turned in the each of three directions.
# Final element of position is scale. It is of no direct relevance to the
# position itself, and it does not participate in any calculations. However
# it is important when you retrieve or set positions of objects. In this
# case, position's scale is object's scale - so you can shrink that wall
# or quite the opposite, make it grow to the sky, depending on your whim.
# Generic position operations
init_position = 701 # (init_position, <position>),
# Sets position coordinates to [0,0,0], without any rotation and default scale.
copy_position = 700 # (copy_position, <position_target>, <position_source>),
# Makes a duplicate of position_source.
position_copy_origin = 719 # (position_copy_origin, <position_target>, <position_source>),
# Copies coordinates from source position to target position, without changing rotation or scale.
position_copy_rotation = 718 # (position_copy_rotation, <position_target>, <position_source>),
# Copies rotation from source position to target position, without changing coordinates or scale.
position_transform_position_to_parent = 716 # (position_transform_position_to_parent, <position_dest>, <position_anchor>, <position_relative_to_anchor>),
# Converts position from local coordinate space to parent coordinate space. In other words, if you have some position on the scene (anchor) and a position describing some place *relative* to anchor (for example [10,20,0] means "20 meters forward and 10 meters to the right"), after calling this operation you will get that position coordinates on the scene in <position_dest>. Rotation and scale is also taken care of, so you can use relative angles.
position_transform_position_to_local = 717 # (position_transform_position_to_local, <position_dest>, <position_anchor>, <position_source>),
# The opposite to (position_transform_position_to_parent), this operation allows you to get source's *relative* position to your anchor. Suppose you want to run some decision making for your bot agent depending on player's position. In order to know where player is located relative to your bot you call (position_transform_position_to_local, <position_dest>, <bot_position>, <player_position>). Then we check position_dest's Y coordinate - if it's negative, then the player is behind our bot's back.
# Position (X,Y,Z) coordinates
position_get_x = 726 # (position_get_x, <destination_fixed_point>, <position>),
# Return position X coordinate (to the east, or to the right). Base unit is meters. Use (set_fixed_point_multiplier) to set another measurement unit (100 will get you centimeters, 1000 will get you millimeters, etc).
position_get_y = 727 # (position_get_y, <destination_fixed_point>, <position>),
# Return position Y coordinate (to the north, or forward). Base unit is meters. Use (set_fixed_point_multiplier) to set another measurement unit (100 will get you centimeters, 1000 will get you millimeters, etc).
position_get_z = 728 # (position_get_z, <destination_fixed_point>, <position>),
# Return position Z coordinate (to the top). Base unit is meters. Use (set_fixed_point_multiplier) to set another measurement unit (100 will get you centimeters, 1000 will get you millimeters, etc).
position_set_x = 729 # (position_set_x, <position>, <value_fixed_point>),
# Set position X coordinate.
position_set_y = 730 # (position_set_y, <position>, <value_fixed_point>),
# Set position Y coordinate.
position_set_z = 731 # (position_set_z, <position>, <value_fixed_point>),
# Set position Z coordinate.
position_move_x = 720 # (position_move_x, <position>, <movement>, [value]),
# Moves position along X axis. Movement distance is in cms. Optional parameter determines whether the position is moved along the local (value=0) or global (value=1) X axis (i.e. whether the position will be moved to it's right/left, or to the global east/west).
position_move_y = 721 # (position_move_y, <position>, <movement>,[value]),
# Moves position along Y axis. Movement distance is in cms. Optional parameter determines whether the position is moved along the local (value=0) or global (value=1) Y axis (i.e. whether the position will be moved forward/backwards, or to the global north/south).
position_move_z = 722 # (position_move_z, <position>, <movement>,[value]),
# Moves position along Z axis. Movement distance is in cms. Optional parameter determines whether the position is moved along the local (value=0) or global (value=1) Z axis (i.e. whether the position will be moved to it's above/below, or to the global above/below - these directions will be different if the position is tilted).
position_set_z_to_ground_level = 791 # (position_set_z_to_ground_level, <position>),
# This will bring the position Z coordinate so it rests on the ground level (i.e. an agent could stand on that position). This takes scene props with their collision meshes into account. Only works during a mission, so you can't measure global map height using this.
position_get_distance_to_terrain = 792 # (position_get_distance_to_terrain, <destination>, <position>),
# This will measure the distance between position and terrain below, ignoring all scene props and their collision meshes. Operation only works on the scenes and cannot be used on the global map.
position_get_distance_to_ground_level = 793 # (position_get_distance_to_ground_level, <destination>, <position>),
# This will measure the distance between position and the ground level, taking scene props and their collision meshes into account. Operation only works on the scenes and cannot be used on the global map.
# Position rotation
position_get_rotation_around_x = 742 # (position_get_rotation_around_x, <destination>, <position>),
# Returns angle (in degrees) that the position is rotated around X axis (tilt forward/backwards).
position_get_rotation_around_y = 743 # (position_get_rotation_around_y, <destination>, <position>),
# Returns angle (in degrees) that the position is rotated around Y axis (tilt right/left).
position_get_rotation_around_z = 740 # (position_get_rotation_around_z, <destination>, <position>),
# Returns angle (in degrees) that the position is rotated around Z axis (turning right/left).
position_rotate_x = 723 # (position_rotate_x, <position>, <angle>),
# Rotates position around it's X axis (tilt forward/backwards).
position_rotate_y = 724 # (position_rotate_y, <position>, <angle>),
# Rotates position around Y axis (tilt right/left).
position_rotate_z = 725 # (position_rotate_z, <position>, <angle>, [use_global_z_axis]),
# Rotates position around Z axis (rotate right/left). Pass 1 for use_global_z_axis to rotate the position around global axis instead.
position_rotate_x_floating = 738 # (position_rotate_x_floating, <position>, <angle_fixed_point>),
# Same as (position_rotate_x), but takes fixed point value as parameter, allowing for more precise rotation.
position_rotate_y_floating = 739 # (position_rotate_y_floating, <position>, <angle_fixed_point>),
# Same as (position_rotate_y), but takes fixed point value as parameter, allowing for more precise rotation.
position_rotate_z_floating = 734 # (position_rotate_z_floating, <position_no>, <angle_fixed_point>),
# Version 1.161+. Same as (position_rotate_z), but takes fixed point value as parameter, allowing for more precise rotation.
# Position scale
position_get_scale_x = 735 # (position_get_scale_x, <destination_fixed_point>, <position>),
# Retrieves position scaling along X axis.
position_get_scale_y = 736 # (position_get_scale_y, <destination_fixed_point>, <position>),
# Retrieves position scaling along Y axis.
position_get_scale_z = 737 # (position_get_scale_z, <destination_fixed_point>, <position>),
# Retrieves position scaling along Z axis.
position_set_scale_x = 744 # (position_set_scale_x, <position>, <value_fixed_point>),
# Sets position scaling along X axis.
position_set_scale_y = 745 # (position_set_scale_y, <position>, <value_fixed_point>),
# Sets position scaling along Y axis.
position_set_scale_z = 746 # (position_set_scale_z, <position>, <value_fixed_point>),
# Sets position scaling along Z axis.
# Measurement of distances and angles
get_angle_between_positions = 705 # (get_angle_between_positions, <destination_fixed_point>, <position_no_1>, <position_no_2>),
# Calculates angle between positions, using positions as vectors. Only rotation around Z axis is used. In other words, the function returns the difference between Z rotations of both positions.
position_has_line_of_sight_to_position = 707 # (position_has_line_of_sight_to_position, <position_no_1>, <position_no_2>),
# Checks that you can see one position from another. This obviously implies that both positions must be in global space. Note this is computationally expensive, so try to keep number of these to a minimum.
get_distance_between_positions = 710 # (get_distance_between_positions, <destination>, <position_no_1>, <position_no_2>),
# Returns distance between positions in centimeters.
get_distance_between_positions_in_meters = 711 # (get_distance_between_positions_in_meters, <destination>, <position_no_1>, <position_no_2>),
# Returns distance between positions in meters.
get_sq_distance_between_positions = 712 # (get_sq_distance_between_positions, <destination>, <position_no_1>, <position_no_2>),
# Returns squared distance between two positions in centimeters.
get_sq_distance_between_positions_in_meters = 713 # (get_sq_distance_between_positions_in_meters, <destination>, <position_no_1>, <position_no_2>),
# Returns squared distance between two positions in meters.
position_is_behind_position = 714 # (position_is_behind_position, <position_base>, <position_to_check>),
# Checks if the second position is behind the first.
get_sq_distance_between_position_heights = 715 # (get_sq_distance_between_position_heights, <destination>, <position_no_1>, <position_no_2>),
# Returns squared distance between position *heights* in centimeters.
position_normalize_origin = 741 # (position_normalize_origin, <destination_fixed_point>, <position>),
# What this operation seems to do is calculate the distance between the zero point [0,0,0] and the point with position's coordinates. Can be used to quickly calculate distance to relative positions.
position_get_screen_projection = 750 # (position_get_screen_projection, <position_screen>, <position_world>),
# Calculates the screen coordinates of the position and stores it as position_screen's X and Y coordinates.
# Global map positions
map_get_random_position_around_position = 1627 # (map_get_random_position_around_position, <dest_position_no>, <source_position_no>, <radius>),
# Returns a random position on the global map in the vicinity of the source_position.
map_get_land_position_around_position = 1628 # (map_get_land_position_around_position, <dest_position_no>, <source_position_no>, <radius>),
# Returns a random position on the global map in the vicinity of the source_position. Will always return a land position (i.e. some place you can walk to).
map_get_water_position_around_position = 1629 # (map_get_water_position_around_position, <dest_position_no>, <source_position_no>, <radius>),
# Returns a random position on the global map in the vicinity of the source_position. Will always return a water position (i.e. sea, lake or river).
################################################################################
# [ Z15 ] GAME NOTES
################################################################################
# The game provides the player with the Notes screen, where there are several
# sections: Troops, Factions, Parties, Quests and Information. This is the
# player's "diary", where all information player knows is supposed to be
# stored. With the operations from this section, modder can control what
# objects the player will be able to see in their corresponding sections of
# the Notes screen, and what information will be displayed on each object.
# Note that there's a number of engine-called scripts which take priority to
# text notes created by these operations. Any information in these notes will
# only be visible to the player if those scripts "refuse" to generate the note
# page dynamically. The following scripts can override these notes:
# script_game_get_troop_note
# script_game_get_center_note
# script_game_get_faction_notze
# script_game_get_quest_note
# script_game_get_info_page_note
troop_set_note_available = 1095 # (troop_set_note_available, <troop_id>, <value>),
# Enables (value = 1) or disables (value = 0) troop's page in the Notes / Characters section.
add_troop_note_tableau_mesh = 1108 # (add_troop_note_tableau_mesh, <troop_id>, <tableau_material_id>),
# Adds graphical elements to the troop's information page (usually banner and portrait).
add_troop_note_from_dialog = 1114 # (add_troop_note_from_dialog, <troop_id>, <note_slot_no>, <expires_with_time>),
# Adds current dialog text to troop notes. Each troop has 16 note slots. Last parameter is used to mark the note as time-dependent, if it's value is 1, then the note will be marked ("Report is current") and will be updated appropriately as the game progresses ("Report is X days old").
add_troop_note_from_sreg = 1117 # (add_troop_note_from_sreg, <troop_id>, <note_slot_no>, <string_id>, <expires_with_time>),
# Adds any text stored in string register to troop notes. Each troop has 16 note slots. Last parameter is used to mark the note as time-dependent, if it's value is 1, then the note will be marked ("Report is current") and will be updated appropriately as the game progresses ("Report is X days old").
faction_set_note_available = 1096 # (faction_set_note_available, <faction_id>, <value>), #1 = available, 0 = not available
# Enables (value = 1) or disables (value = 0) faction's page in the Notes / Characters section.
add_faction_note_tableau_mesh = 1109 # (add_faction_note_tableau_mesh, <faction_id>, <tableau_material_id>),
# Adds graphical elements to the faction's information page (usually graphical collage).
add_faction_note_from_dialog = 1115 # (add_faction_note_from_dialog, <faction_id>, <note_slot_no>, <expires_with_time>),
# Adds current dialog text to faction notes. Each faction has 16 note slots. Last parameter is used to mark the note as time-dependent, if it's value is 1, then the note will be marked ("Report is current") and will be updated appropriately as the game progresses ("Report is X days old").
add_faction_note_from_sreg = 1118 # (add_faction_note_from_sreg, <faction_id>, <note_slot_no>, <string_id>, <expires_with_time>),
# Adds any text stored in string register to faction notes. Each faction has 16 note slots. Last parameter is used to mark the note as time-dependent, if it's value is 1, then the note will be marked ("Report is current") and will be updated appropriately as the game progresses ("Report is X days old").
party_set_note_available = 1097 # (party_set_note_available, <party_id>, <value>), #1 = available, 0 = not available
# Enables (value = 1) or disables (value = 0) party's page in the Notes / Characters section.
add_party_note_tableau_mesh = 1110 # (add_party_note_tableau_mesh, <party_id>, <tableau_material_id>),
# Adds graphical elements to the party's information page (usually map icon).
add_party_note_from_dialog = 1116 # (add_party_note_from_dialog, <party_id>, <note_slot_no>, <expires_with_time>),
# Adds current dialog text to party notes. Each party has 16 note slots. Last parameter is used to mark the note as time-dependent, if it's value is 1, then the note will be marked ("Report is current") and will be updated appropriately as the game progresses ("Report is X days old").
add_party_note_from_sreg = 1119 # (add_party_note_from_sreg, <party_id>, <note_slot_no>, <string_id>, <expires_with_time>),
# Adds any text stored in string register to party notes. Each party has 16 note slots. Last parameter is used to mark the note as time-dependent, if it's value is 1, then the note will be marked ("Report is current") and will be updated appropriately as the game progresses ("Report is X days old").
quest_set_note_available = 1098 # (quest_set_note_available, <quest_id>, <value>), #1 = available, 0 = not available
# Enables (value = 1) or disables (value = 0) quest's page in the Notes / Characters section.
add_quest_note_tableau_mesh = 1111 # (add_quest_note_tableau_mesh, <quest_id>, <tableau_material_id>),
# Adds graphical elements to the quest's information page (not used in Native).
add_quest_note_from_dialog = 1112 # (add_quest_note_from_dialog, <quest_id>, <note_slot_no>, <expires_with_time>),
# Adds current dialog text to quest notes. Each quest has 16 note slots. Last parameter is used to mark the note as time-dependent, if it's value is 1, then the note will be marked ("Report is current") and will be updated appropriately as the game progresses ("Report is X days old").
add_quest_note_from_sreg = 1113 # (add_quest_note_from_sreg, <quest_id>, <note_slot_no>, <string_id>, <expires_with_time>),
# Adds any text stored in string register to quest notes. Each quest has 16 note slots. Last parameter is used to mark the note as time-dependent, if it's value is 1, then the note will be marked ("Report is current") and will be updated appropriately as the game progresses ("Report is X days old").
add_info_page_note_tableau_mesh = 1090 # (add_info_page_note_tableau_mesh, <info_page_id>, <tableau_material_id>),
# Adds graphical elements to the info page (not used in Native).
add_info_page_note_from_dialog = 1091 # (add_info_page_note_from_dialog, <info_page_id>, <note_slot_no>, <expires_with_time>),
# Adds current dialog text to info page notes. Each info page has 16 note slots. Last parameter is used to mark the note as time-dependent, if it's value is 1, then the note will be marked ("Report is current") and will be updated appropriately as the game progresses ("Report is X days old").
add_info_page_note_from_sreg = 1092 # (add_info_page_note_from_sreg, <info_page_id>, <note_slot_no>, <string_id>, <expires_with_time>),
# Adds any text stored in string register to info page notes. Each info page has 16 note slots. Last parameter is used to mark the note as time-dependent, if it's value is 1, then the note will be marked ("Report is current") and will be updated appropriately as the game progresses ("Report is X days old").
################################################################################
# [ Z16 ] TABLEAUS AND HERALDICS
################################################################################
# Tableaus are the tool that gives you limited access to the game graphical
# renderer. If you know 3D graphics, you know that all 3D objects consist of
# a mesh (which defines it's form) and the material (which defines how this
# mesh is "painted"). With tableau functions you can do two things. First, you
# can replace or alter the materials used to render the game objects (with
# many restrictions). If this sounds esoteric to you, have a look at the game
# heraldry - it is implemented using tableaus. Second, you can render images
# of various game objects and place them on the game menus, presentations and
# so on. For example, if you open the game Inventory window, you can see your
# character in his current equipment. This character is rendered using tableau
# operations. Similarly, if you open the Notes screen and select some kingdom
# lord on the Troops section, you will see that lord's face and banner. Both
# face and banner are drawn using tableaus.
cur_item_set_tableau_material = 1981 # (cur_item_set_tableu_material, <tableau_material_id>, <instance_code>),
# Can only be used inside ti_on_init_item trigger in module_items.py. Assigns tableau to the item instance. Value of <instance_code> will be passed to tableau code. Commonly used for heraldic armors and shields.
cur_scene_prop_set_tableau_material = 1982 # (cur_scene_prop_set_tableau_material, <tableau_material_id>, <instance_code>),
# Can only be used inside ti_on_init_scene_prop trigger in module_scene_props.py. Assigns tableau to the scene prop instance. Value of <instance_code> will be passed to tableau code. Commonly used for static banners.
cur_map_icon_set_tableau_material = 1983 # (cur_map_icon_set_tableau_material, <tableau_material_id>, <instance_code>),
# Can only be used inside ti_on_init_map_icon trigger in module_map_icons.py. Assigns tableau to the icon prop instance. Value of <instance_code> will be passed to tableau code. Commonly used for player/lord party banners.
cur_agent_set_banner_tableau_material = 1986 # (cur_agent_set_banner_tableau_material, <tableau_material_id>)
# Can only be used inside ti_on_agent_spawn trigger in module_mission_templates. Assigns heraldry .
# Operations used in module_tableau_materials.py module
cur_tableau_add_tableau_mesh = 1980 # (cur_tableau_add_tableau_mesh, <tableau_material_id>, <value>, <position_register_no>),
# Used in module_tableau_materials.py to add one tableau to another. Value parameter is passed to tableau_material as is.
cur_tableau_render_as_alpha_mask = 1984 # (cur_tableau_render_as_alpha_mask)
# Tells the engine to treat the tableau as an alpha (transparency) mask.
cur_tableau_set_background_color = 1985 # (cur_tableau_set_background_color, <value>),
# Defines solid background color for the current tableau.
cur_tableau_set_ambient_light = 1987 # (cur_tableau_set_ambient_light, <red_fixed_point>, <green_fixed_point>, <blue_fixed_point>),
# Not documented. Used for tableaus rendered from 3D objects to provide uniform tinted lighting.
cur_tableau_set_camera_position = 1988 # (cur_tableau_set_camera_position, <position>),
# Not documented. Used for tableaus rendered from 3D objects to position camera as necessary (usually with a perspective camera).
cur_tableau_set_camera_parameters = 1989 # (cur_tableau_set_camera_parameters, <is_perspective>, <camera_width_times_1000>, <camera_height_times_1000>, <camera_near_times_1000>, <camera_far_times_1000>),
# Not documented. Used to define camera parameters for tableau rendering. Perspective camera is generally used to render 3D objects for tableaus, while non-perspective camera is used to modify tableau texture meshes.
cur_tableau_add_point_light = 1990 # (cur_tableau_add_point_light, <position>, <red_fixed_point>, <green_fixed_point>, <blue_fixed_point>),
# Not documented. Typically used for tableaus rendered from 3D objects to add a point light source.
cur_tableau_add_sun_light = 1991 # (cur_tableau_add_sun_light, <position>, <red_fixed_point>, <green_fixed_point>, <blue_fixed_point>),
# Not documented. Typically used for tableaus rendered from 3D objects to add a directional light source. Note that position coordinates do not matter, only rotation (i.e. light rays direction) does.
cur_tableau_add_mesh = 1992 # (cur_tableau_add_mesh, <mesh_id>, <position>, <value_fixed_point>, <value_fixed_point>),
# Adds a static mesh to the tableau with specified offset, scale and alpha. First value fixed point is the scale factor, second value fixed point is alpha. use 0 for default values.
cur_tableau_add_mesh_with_vertex_color = 1993 # (cur_tableau_add_mesh_with_vertex_color, <mesh_id>, <position>, <value_fixed_point>, <value_fixed_point>, <value>),
# Adds a static mesh to the tableau with specified offset, scale, alpha and vertex color. First value fixed point is the scale factor, second value fixed point is alpha. Value is vertex color.
cur_tableau_add_mesh_with_scale_and_vertex_color = 2000 # (cur_tableau_add_mesh_with_scale_and_vertex_color, <mesh_id>, <position>, <scale_position>, <value_fixed_point>, <value>),
# Similar to (cur_tableau_add_mesh_with_vertex_color), but allows non-uniform scaling. Scale factors are stored as (x,y,z) position properties with fixed point values.
cur_tableau_add_map_icon = 1994 # (cur_tableau_add_map_icon, <map_icon_id>, <position>, <value_fixed_point>),
# Adds a rendered image of a map icon to current tableau. Last parameter is the scale factor for the model.
cur_tableau_add_troop = 1995 # (cur_tableau_add_troop, <troop_id>, <position>, <animation_id>, <instance_no>),
# Adds a rendered image of the troop in a specified animation to current tableau. If instance_no is 0 or less, then the face is not generated randomly (important for heroes).
cur_tableau_add_horse = 1996 # (cur_tableau_add_horse, <item_id>, <position>, <animation_id>),
# Adds a rendered image of a horse in a specified animation to current tableau.
cur_tableau_set_override_flags = 1997 # (cur_tableau_set_override_flags, <value>),
# When creating a troop image for current tableau, this operation allows to override troop's inventory partially or completely. See af_* flags in header_mission_templates.py for reference.
cur_tableau_clear_override_items = 1998 # (cur_tableau_clear_override_items),
# Removes and previously defined equipment overrides for the troop, allowing to start from scratch.
cur_tableau_add_override_item = 1999 # (cur_tableau_add_override_item, <item_kind_id>),
# When creating a troop image for current tableau, the operation will add a new item to troop's equipment.
################################################################################
# [ Z17 ] STRING OPERATIONS
################################################################################
# The game provides you only limited control over string information. Most
# operations will either retrieve some string (usually the name) from the game
# object, or set that object's name to a string.
# Two important functions are str_store_string and str_store_string_reg. They
# are different from all others because they not only assign the string to a
# string register, they *process* it. For example, if source string contains
# "{reg3}", then the resulting string will have the register name and it's
# surrounding brackets replaced with the value currently stored in that
# register. Other strings can be substituted as well, and even some limited
# logic can be implemented using this mechanism. You can try to read through
# the module_strings.py file and try to deduce what each particular
# substitution does.
# Conditional operations
str_is_empty = 2318 # (str_is_empty, <string_register>),
# Checks that referenced string register is empty.
# Other string operations
str_clear = 2319 # (str_clear, <string_register>)
# Clears the contents of the referenced string register.
str_store_string = 2320 # (str_store_string, <string_register>, <string_id>),
# Stores a string value in the referenced string register. Only string constants and quick strings can be stored this way.
str_store_string_reg = 2321 # (str_store_string, <string_register>, <string_no>),
# Copies the contents of one string register from another.
str_store_troop_name = 2322 # (str_store_troop_name, <string_register>, <troop_id>),
# Stores singular troop name in referenced string register.
str_store_troop_name_plural = 2323 # (str_store_troop_name_plural, <string_register>, <troop_id>),
# Stores plural troop name in referenced string register.
str_store_troop_name_by_count = 2324 # (str_store_troop_name_by_count, <string_register>, <troop_id>, <number>),
# Stores singular or plural troop name with number of troops ("29 Archers", "1 Bandit").
str_store_item_name = 2325 # (str_store_item_name, <string_register>, <item_id>),
# Stores singular item name in referenced string register.
str_store_item_name_plural = 2326 # (str_store_item_name_plural, <string_register>, <item_id>),
# Stores plural item name in referenced string register.
str_store_item_name_by_count = 2327 # (str_store_item_name_by_count, <string_register>, <item_id>),
# Stores singular or plural item name with number of items ("11 Swords", "1 Bottle of Wine").
str_store_party_name = 2330 # (str_store_party_name, <string_register>, <party_id>),
# Stores party name in referenced string register.
str_store_agent_name = 2332 # (str_store_agent_name, <string_register>, <agent_id>),
# Stores agent name in referenced string register.
str_store_faction_name = 2335 # (str_store_faction_name, <string_register>, <faction_id>),
# Stores faction name in referenced string register.
str_store_quest_name = 2336 # (str_store_quest_name, <string_register>, <quest_id>),
# Stores quest name (as defined in module_quests.py) in referenced string register.
str_store_info_page_name = 2337 # (str_store_info_page_name, <string_register>, <info_page_id>),
# Stores info page title (as defined in module_info_pages.py) in referenced string register.
str_store_date = 2340 # (str_store_date, <string_register>, <number_of_hours_to_add_to_the_current_date>),
# Stores formatted date string, using the number of hours since start of the game (can be retrieved by a call to store_current_hours).
str_store_troop_name_link = 2341 # (str_store_troop_name_link, <string_register>, <troop_id>),
# Stores troop name as an internal game link. Resulting string can be used in game notes, will be highlighted, and clicking on it will redirect the player to the details page of the referenced troop.
str_store_party_name_link = 2342 # (str_store_party_name_link, <string_register>, <party_id>),
# Stores party name as an internal game link. Resulting string can be used in game notes, will be highlighted, and clicking on it will redirect the player to the details page of the referenced party.
str_store_faction_name_link = 2343 # (str_store_faction_name_link, <string_register>, <faction_id>),
# Stores faction name as an internal game link. Resulting string can be used in game notes, will be highlighted, and clicking on it will redirect the player to the details page of the referenced faction.
str_store_quest_name_link = 2344 # (str_store_quest_name_link, <string_register>, <quest_id>),
# Stores quest name as an internal game link. Resulting string can be used in game notes, will be highlighted, and clicking on it will redirect the player to the details page of the referenced quest.
str_store_info_page_name_link = 2345 # (str_store_info_page_name_link, <string_register>, <info_page_id>),
# Stores info page title as an internal game link. Resulting string can be used in game notes, will be highlighted, and clicking on it will redirect the player to the details page of the referenced info page.
str_store_class_name = 2346 # (str_store_class_name, <stribg_register>, <class_id>)
# Stores name of the selected troop class (Infantry, Archers, Cavalry or any of the custom class names) in referenced string register.
game_key_get_mapped_key_name = 65 # (game_key_get_mapped_key_name, <string_register>, <game_key>),
# Version 1.161+. Stores human-readable key name that's currently assigned to the provided game key. May store "unknown" and "No key assigned" strings (the latter is defined in languages/en/ui.csv, the former seems to be hardcoded).
# Network/multiplayer-related string operations
str_store_player_username = 2350 # (str_store_player_username, <string_register>, <player_id>),
# Stores player's multiplayer username in referenced string register. Can be used in multiplayer mode only.
str_store_server_password = 2351 # (str_store_server_password, <string_register>),
# Stores server's password in referenced string register.
str_store_server_name = 2352 # (str_store_server_name, <string_register>),
# Stores server's name (as displayed to clients in server's list window) in referenced string register.
str_store_welcome_message = 2353 # (str_store_welcome_message, <string_register>),
# Stores server's welcome message in referenced string register.
str_encode_url = 2355 # (str_encode_url, <string_register>),
# This operation will "sanitize" a string to be used as part of network URL, replacing any non-standard characters with their '%'-codes.
################################################################################
# [ Z18 ] OUTPUT AND MESSAGES
################################################################################
# These operations will provide some textual information to the player during
# the game. There are three operations which will generate a game message
# (displayed as a chat-like series of text strings in the bottom-left part of
# the screen), while most others will be displaying various types of dialog
# boxes. You can also ask a question to player using these operations.
display_debug_message = 1104 # (display_debug_message, <string_id>, [hex_colour_code]),
# Displays a string message, but only in debug mode, using provided color (hex-coded 0xRRGGBB). The message is additionally written to rgl_log.txt file in both release and debug modes when edit mode is enabled.
display_log_message = 1105 # (display_log_message, <string_id>, [hex_colour_code]),
# Display a string message using provided color (hex-coded 0xRRGGBB). The message will also be written to game log (accessible through Notes / Game Log), and will persist between sessions (i.e. it will be stored as part of the savegame).
display_message = 1106 # (display_message, <string_id>,[hex_colour_code]),
# Display a string message using provided color (hex-coded 0xRRGGBB).
set_show_messages = 1107 # (set_show_messages, <value>),
# Suppresses (value = 0) or enables (value = 1) game messages, including those generated by the game engine.
tutorial_box = 1120 # (tutorial_box, <string_id>, <string_id>),
# This operation is deprecated but is still used in Native.
dialog_box = 1120 # (dialog_box, <text_string_id>, [title_string_id]),
# Displays a popup window with the text message and an optional caption.
question_box = 1121 # (question_box, <string_id>, [<yes_string_id>], [<no_string_id>]),
# Displays a popup window with the text of the question and two buttons (Yes and No by default, but can be overridden). When the player selects one of possible responses, a ti_on_question_answered trigger will be executed.
tutorial_message = 1122 # (tutorial_message, <string_id>, [color], [auto_close_time]),
# Displays a popup window with tutorial text stored in referenced string or string register. Use -1 to close any currently open tutorial box. Optional parameters allow you to define text color and time period after which the tutorial box will close automatically.
tutorial_message_set_position = 1123 # (tutorial_message_set_position, <position_x>, <position_y>),
# Defines screen position for the tutorial box. Assumes screen size is 1000*750.
tutorial_message_set_size = 1124 # (tutorial_message_set_size, <size_x>, <size_y>),
# Defines size of the tutorial box. Assumes screen size is 1000*750.
tutorial_message_set_center_justify = 1125 # (tutorial_message_set_center_justify, <val>),
# Sets tutorial box to be center justified (value = 1), or use positioning dictated by tutorial_message_set_position (value = 0).
tutorial_message_set_background = 1126 # (tutorial_message_set_background, <value>),
# Defines whether the tutorial box will have a background or not (1 or 0). Default is off.
################################################################################
# [ Z19 ] GAME CONTROL: SCREENS, MENUS, DIALOGS AND ENCOUNTERS
################################################################################
# An encounter is what happens when player's party meets another party on the
# world map. While most operations in the game can be performed outside of
# encounter, there's one thing you can only do when in encounter context -
# standard game battle. When you are initiating the battle from an encounter,
# the game engine will do most of the grunt work for you. You can order the
# engine to add some parties to the battle on this or that side, and the
# soldiers from those parties will spawn on the battlefield, in the numbers
# proportional to the party sizes, and the agents will maintain links with
# their parties. If agents earn experience, this will be reflected on the
# world map, and if some agents die, party sizes will be decreased. All this
# stuff can potentially be emulated by the Module System code, but it's tons
# of work and is still much less efficient than the tool the game engine
# already provides to you.
# An important notice: when player encounters an AI party on the map, the game
# calls "game_event_party_encounter" script in the module_scripts.py. So if
# you want to implement some non-standard processing of game encounters, this
# is the place you should start from. Also note that the game implements the
# Camp menu as an encounter with a hardcoded party "p_camp_bandits".
# Also you can find many operations in this section dealing with game screens,
# game menus and game dialogs. Keep in mind that some screens only make sense
# in certain contexts, and game menus are only available on the world map, you
# cannot use game menus during the mission.
# Conditional operations
entering_town = 36 # (entering_town, <town_id>),
# Apparently deprecated.
encountered_party_is_attacker = 39 # (encountered_party_is_attacker),
# Checks that the party encountered on the world map was following player (i.e. either player was trying to run away or at the very least this is a head-on clash).
conversation_screen_is_active = 42 # (conversation_screen_active),
# Checks that the player is currently in dialogue with some agent. Can only be used in triggers of module_mission_templates.py file.
in_meta_mission = 44 # (in_meta_mission),
# Deprecated, do not use.
# Game hardcoded windows and related operations
change_screen_return = 2040 # (change_screen_return),
# Closes any current screen and returns the player to worldmap (to scene?). 4research how it behaves in missions.
change_screen_loot = 2041 # (change_screen_loot, <troop_id>),
# Opens the Looting interface, using the provided troop as loot storage. Player has full access to troop inventory.
change_screen_trade = 2042 # (change_screen_trade, [troop_id]),
# Opens the Trade screen, using the provided troop as the trading partner. When called from module_dialogs, troop_id is optional and defaults to current dialogue partner.
change_screen_exchange_members = 2043 # (change_screen_exchange_members, [exchange_leader], [party_id]),
# Opens the Exchange Members With Party interface, using the specified party_id. If called during an encounter, party_id is optional and defaults to the encountered party. Second parameter determines whether the party leader is exchangeable (useful when managing the castle garrison).
change_screen_trade_prisoners = 2044 # (change_screen_trade_prisoners),
# Opens the Sell Prisoners interface. Script "script_game_get_prisoner_price" will be used to determine prisoner price.
change_screen_buy_mercenaries = 2045 # (change_screen_buy_mercenaries),
# Opens the Buy Mercenaries interface, where player can hire troops from the party specified with (set_mercenary_source_party) operation. Only works from the dialog.
change_screen_view_character = 2046 # (change_screen_view_character),
# Opens the character screen of another troop. Can only be used in dialogs.
change_screen_training = 2047 # (change_screen_training),
# Opens the character screen for the troop that player is currently talking to. Only works in dialogs. Deprecated, use (change_screen_view_character) instead.
change_screen_mission = 2048 # (change_screen_mission),
# Starts the mission, using previously defined scene and mission template.
change_screen_map_conversation = 2049 # (change_screen_map_conversation, <troop_id>),
# Starts the mission, same as (change_screen_mission). However once the mission starts, player will get into dialog with the specified troop, and once the dialog ends, the mission will automatically end.
change_screen_exchange_with_party = 2050 # (change_screen_exchange_with_party, <party_id>),
# Effectively duplicates (change_screen_exchange_members), but party_id parameter is obligatory and the operation doesn't have an option to prevent party leader from being exchanged.
change_screen_equip_other = 2051 # (change_screen_equip_other, [troop_id]),
# Opens the Equip Companion interface. When calling from a dialog, it is not necessary to specify troop_id.
change_screen_map = 2052 # (change_screen_map),
# Changes the screen to global map, closing any currently running game menu, dialog or mission.
change_screen_notes = 2053 # (change_screen_notes, <note_type>, <object_id>),
# Opens the Notes screen, in the selected category (note_type: 1=troops, 2=factions, 3=parties, 4=quests, 5=info_pages) and for the specified object in that category.
change_screen_quit = 2055 # (change_screen_quit),
# Quits the game to the main menu.
change_screen_give_members = 2056 # (change_screen_give_members, [party_id]),
# Opens the Give Troops to Another Party interface. Party_id parameter is optional during an encounter and will use encountered party as default value.
change_screen_controls = 2057 # (change_screen_controls),
# Opens the standard Configure Controls screen, pausing the game.
change_screen_options = 2058 # (change_screen_options),
# Opens the standard Game Options screen, pausing the game.
set_mercenary_source_party = 1320 # (set_mercenary_source_party, <party_id>),
# Defines the party from which the player will buy mercenaries with (change_screen_buy_mercenaries).
start_map_conversation = 1025 # (start_map_conversation, <troop_id>, [troop_dna]),
# Starts a conversation with the selected troop. Can be called directly from global map or game menus. Troop DNA parameter allows you to randomize non-hero troop appearances.
# Game menus
set_background_mesh = 2031 # (set_background_mesh, <mesh_id>),
# Sets the specified mesh as the background for the current menu. Possibly can be used for dialogs or presentations, but was not tested.
set_game_menu_tableau_mesh = 2032 # (set_game_menu_tableau_mesh, <tableau_material_id>, <value>, <position_register_no>),
# Adds a tableau to the current game menu screen. Position (X,Y) coordinates define mesh position, Z coordinate defines scaling. Parameter <value> will be passed as tableau_material script parameter.
jump_to_menu = 2060 # (jump_to_menu, <menu_id>),
# Opens the specified game menu. Note this only happens after the current block of code completes execution.
disable_menu_option = 2061 # (disable_menu_option),
# Never used in native. Apparently deprecated as menu options have prerequisite code blocks now.
# Game encounter handling operations
set_party_battle_mode = 1020 # (set_party_battle_mode),
# Used before or during the mission to start battle mode (and apparently make agents use appropriate AI).
finish_party_battle_mode = 1019 # (finish_party_battle_mode),
# Used during the mission to stop battle mode.
start_encounter = 1300 # (start_encounter, <party_id>),
# Forces the player party to initiate encounter with the specified party. Distance does not matter in this situation.
leave_encounter = 1301 # (leave_encounter),
# Leaves encounter mode.
encounter_attack = 1302 # (encounter_attack),
# Apparently starts the standard battle with the encountered party. 4research.
select_enemy = 1303 # (select_enemy, <value>),
# When joining a battle, this determines what side player will be helping. Defending party is always 0, and attacking party is always 1. Player can support either attackers (value = 0, i.e. defenders are the enemy) or defenders (value = 1).
set_passage_menu = 1304 # (set_passage_menu, <value>),
# When setting up a mission, this allows you to determine what game menu will be used for that mission passages instead of "mnu_town". Passage menu item number will determine what menu option (in sequential order, starting from 0) will be executed when the player activates that passage on the scene. Note that menu option condition code block will be ignored.
start_mission_conversation = 1920 # (start_mission_conversation, <troop_id>),
# During the mission, initiates the dialog with specified troop.
set_conversation_speaker_troop = 2197 # (set_conversation_speaker_troop, <troop_id>),
# Allows to dynamically switch speaking troops during the dialog when developer doesn't know in advance who will be doing the speaking. Should be placed in post-talk code section of dialog entry.
set_conversation_speaker_agent = 2198 # (set_conversation_speaker_agent, <agent_id>),
# Allows to dynamically switch speaking agents during the dialog when developer doesn't know in advance who will be doing the speaking. Should be placed in post-talk code section of dialog entry.
store_conversation_agent = 2199 # (store_conversation_agent, <destination>),
# Stores identifier of agent who is currently speaking.
store_conversation_troop = 2200 # (store_conversation_troop, <destination>),
# Stores identifier of troop who is currently speaking.
store_partner_faction = 2201 # (store_partner_faction, <destination>),
# Stores faction of the troop player is speaking to.
store_encountered_party = 2202 # (store_encountered_party, <destination>),
# Stores identifier of the encountered party.
store_encountered_party2 = 2203 # (store_encountered_party2, <destination>),
# Stores the identifier of the second encountered party (when first party is in battle, this one will return it's battle opponent).
set_encountered_party = 2205 # (set_encountered_party, <party_no>),
# Sets the specified party as encountered by player, but does not run the entire encounter routine. Used in Native during chargen to set up the starting town and then immediately throw the player into street fight without showing him the town menu.
end_current_battle = 1307 # (end_current_battle),
# Apparently ends the battle between player's party and it's opponent. Exact effects not clear. 4research.
# Operations specific to dialogs
store_repeat_object = 50 # (store_repeat_object, <destination>),
# Used in the dialogs code in combination with repeat_for_* dialog parameters, when creating dynamical player responses. Stores the value for the current iteration (i.e. a faction ID when repeat_for_factions is used, etc).
talk_info_show = 2020 # (talk_info_show, <hide_or_show>),
# Used in the dialogs code to display relations bar on opponent's portrait when mouse is hovering over it (value = 1) or disable this functionality (value = 0)
talk_info_set_relation_bar = 2021 # (talk_info_set_relation_bar, <value>),
# Sets the relations value for relationship bar in the dialog. Value should be in range -100..100.
talk_info_set_line = 2022 # (talk_info_set_line, <line_no>, <string_no>)
# Sets the additional text information (usually troop name) to be displayed together with the relations bar.
################################################################################
# [ Z20 ] SCENES AND MISSIONS
################################################################################
# To put the player into a 3D scene, you need two things. First is the scene
# itself. All scenes are defined in module_scenes.py file. The second element
# is no less important, and it's called mission template. Mission template
# will determine the context of the events on the scene - who will spawn
# where, who will be hostile or friendly to player or to each other, etc.
# Because of all this, when player is put on the 3D scene in the game, it is
# commonly said that player is "in a mission".
# Conditional operations
all_enemies_defeated = 1003 # (all_enemies_defeated, [team_id]),
# Checks if all agents from the specified team are defeated. When team_id is omitted default enemy team is checked.
race_completed_by_player = 1004 # (race_completed_by_player),
# Not documented. Not used in Native. Apparently deprecated.
num_active_teams_le = 1005 # (num_active_teams_le, <value>),
# Checks that the number of active teams (i.e. teams with at least one active agent) is less than or equal to given value.
main_hero_fallen = 1006 # (main_hero_fallen),
# Checks that the player has been knocked out.
scene_allows_mounted_units = 1834 # (scene_allows_mounted_units),
# Not documented. Used in multiplayer, but it's not clear where horses could be disallowed in the first place. 4research.
is_zoom_disabled = 2222 # (is_zoom_disabled),
# Version 1.153+. Checks that the zoom is currently disabled in the game.
# Scene slot operations
scene_set_slot = 503 # (scene_set_slot, <scene_id>, <slot_no>, <value>),
scene_get_slot = 523 # (scene_get_slot, <destination>, <scene_id>, <slot_no>),
scene_slot_eq = 543 # (scene_slot_eq, <scene_id>, <slot_no>, <value>),
scene_slot_ge = 563 # (scene_slot_ge, <scene_id>, <slot_no>, <value>),
# Scene visitors handling operations
add_troop_to_site = 1250 # (add_troop_to_site, <troop_id>, <scene_id>, <entry_no>),
# Set troop's position in the world to the specified scene and entry point. Entry point must have mtef_scene_source type. Agent will always appear at that entry when entering that scene. No longer used in Native.
remove_troop_from_site = 1251 # (remove_troop_from_site, <troop_id>, <scene_id>),
# Removes the troop from the specified scene. No longer used in Native.
modify_visitors_at_site = 1261 # (modify_visitors_at_site, <scene_id>),
# Declares the scene which visitors will be modified from that moment on.
reset_visitors = 1262 # (reset_visitors),
# Resets all visitors to the scene.
set_visitor = 1263 # (set_visitor, <entry_no>, <troop_id>, [<dna>]),
# Adds the specified troop as the visitor to the entry point of the scene defined with (modify_visitors_at_site). Entry point must have mtef_visitor_source type. Optional DNA parameter allows for randomization of agent looks (only applies to non-hero troops).
set_visitors = 1264 # (set_visitors, <entry_no>, <troop_id>, <number_of_troops>),
# Save as (set_visitors), but spawns an entire group of some troop type.
add_visitors_to_current_scene = 1265 # (add_visitors_to_current_scene, <entry_no>, <troop_id>, <number_of_troops>, <team_no>, <group_no>),
# Adds a number of troops to the specified entry point when the scene is already loaded. Team and group parameters are used in multiplayer mode only, singleplayer mode uses team settings for selected entry point as defined in module_mission_templates.py.
mission_tpl_entry_set_override_flags = 1940 # (mission_entry_set_override_flags, <mission_template_id>, <entry_no>, <value>),
# Allows modder to use a different set of equipment override flags (see af_* constants in header_mission_templates.py) for the selected entry point.
mission_tpl_entry_clear_override_items = 1941 # (mission_entry_clear_override_items, <mission_template_id>, <entry_no>),
# Clears the list of override equipment provided by the entry point definition in module_mission_templates.py.
mission_tpl_entry_add_override_item = 1942 # (mission_entry_add_override_item, <mission_template_id>, <entry_no>, <item_kind_id>),
# Specified item will be added to any agent spawning on specified entry point.
# Mission/scene general operations
set_mission_result = 1906 # (set_mission_result, <value>),
# Sets the result of the current mission (1 for victory, -1 for defeat).
finish_mission = 1907 # (finish_mission, <delay_in_seconds>),
# Exits the scene after the specified delay.
set_jump_mission = 1911 # (set_jump_mission, <mission_template_id>),
# Tells the game to use the specified mission template for the next mission. Apparently should precede the call to (jump_to_scene).
jump_to_scene = 1910 # (jump_to_scene, <scene_id>, [entry_no]),
# Tells the game to use the specified scene for the next mission. Usually followed by (change_screen_mission) call. Parameter entry_no does not seem to have any effect.
set_jump_entry = 1912 # (set_jump_entry, <entry_no>),
# Defines what entry point the player will appear at when the mission starts.
store_current_scene = 2211 # (store_current_scene, <destination>),
# Retrieves the identifier of the current scene. Note that the operation will return the scene id even after the mission is completed and the player is already on global map.
close_order_menu = 1789 # (close_order_menu),
# Version 1.161+. If orders menu is currently open, it will be closed.
entry_point_get_position = 1780 # (entry_point_get_position, <position>, <entry_no>),
# Retrieves the position of the entry point on the scene.
entry_point_set_position = 1781 # (entry_point_set_position, <entry_no>, <position>),
# Moves the entry point to the specified position on the scene.
entry_point_is_auto_generated = 1782 # (entry_point_is_auto_generated, <entry_no>),
# Checks that the entry point is auto-generated (in other words, there was no such entry point placed in the scene file).
# Scene parameters handling
scene_set_day_time = 1266 # (scene_set_day_time, <value>),
# Defines the time for the scene to force the engine to select a different skybox than the one dictated by current game time. Must be called within ti_before_mission_start trigger in module_mission_templates.py. Value should be in range 0..23.
set_rain = 1797 # (set_rain, <rain-type>, <strength>),
# Sets a new weather for the mission. Rain_type values: 0 = clear, 1 = rain, 2 = snow. Strength is in range 0..100.
set_fog_distance = 1798 # (set_fog_distance, <distance_in_meters>, [fog_color]),
# Sets the density (and optionally color) of the fog for the mission.
set_skybox = 2389 # (set_skybox, <non_hdr_skybox_index>, <hdr_skybox_index>),
# Version 1.153+. Forces the scene to be rendered with specified skybox. Index of -1 will disable.
set_startup_sun_light = 2390 # (set_startup_sun_light, <r>, <g>, <b>),
# Version 1.153+. Defines the sunlight color for the scene.
set_startup_ambient_light = 2391 # (set_startup_ambient_light, <r>, <g>, <b>),
# Version 1.153+. Defines the ambient light color for the scene.
set_startup_ground_ambient_light = 2392 # (set_startup_ground_ambient_light, <r>, <g>, <b>),
# Version 1.153+. Defines the ambient light color for the ground.
get_startup_sun_light = 2394 # (get_startup_sun_light, <position_no>),
# Version 1.165+. Returns startup sunlight color in (x, y, z) coordinates of position register.
get_startup_ambient_light = 2395 # (get_startup_ambient_light, <position_no>),
# Version 1.165+. Returns startup ambient light color in (x, y, z) coordinates of position register.
get_startup_ground_ambient_light = 2396 # (get_startup_ground_ambient_light, <position_no>),
# Version 1.165+. Returns startup ambient ground lighting color in (x, y, z) coordinates of position register.
get_battle_advantage = 1690 # (get_battle_advantage, <destination>),
# Retrieves the calculated battle advantage.
set_battle_advantage = 1691 # (set_battle_advantage, <value>),
# Sets a new value for battle advantage.
get_scene_boundaries = 1799 # (get_scene_boundaries, <position_min>, <position_max>),
# Retrieves the coordinates of the top-left and bottom-right corner of the scene to the provided position registers.
mission_enable_talk = 1935 # (mission_enable_talk),
# Allows dialogue with agents on the scene.
mission_disable_talk = 1936 # (mission_disable_talk),
# Disables dialogue with agents on the scene.
mission_get_time_speed = 2002 # (mission_get_time_speed, <destination_fixed_point>),
# Retrieves current time speed factor for the mission.
mission_set_time_speed = 2003 # (mission_set_time_speed, <value_fixed_point>),
# Instantly changes the speed of time during the mission. Speed of time cannot be set to zero or below. Operation only works when cheat mode is enabled.
mission_time_speed_move_to_value = 2004 # (mission_speed_move_to_value, <value_fixed_point>, <duration-in-1/1000-seconds>),
# Changes the speed of time during the mission gradually, within the specified duration period. Speed of time cannot be set to zero or below. Operation only works when cheat mode is enabled.
mission_set_duel_mode = 2006 # (mission_set_duel_mode, <value>),
# Sets duel mode for the multiplayer mission. Values: 0 = off, 1 = on.
store_zoom_amount = 2220 # (store_zoom_amount, <destination_fixed_point>),
# Version 1.153+. Stores current zoom rate.
set_zoom_amount = 2221 # (set_zoom_amount, <value_fixed_point>),
# Version 1.153+. Sets new zoom rate.
# Mission timers
reset_mission_timer_a = 2375 # (reset_mission_timer_a),
# Resets the value of first mission timer and starts it from zero.
reset_mission_timer_b = 2376 # (reset_mission_timer_b),
# Resets the value of second mission timer and starts it from zero.
reset_mission_timer_c = 2377 # (reset_mission_timer_c),
# Resets the value of third mission timer and starts it from zero.
store_mission_timer_a = 2370 # (store_mission_timer_a, <destination>),
# Retrieves current value of first mission timer, in seconds.
store_mission_timer_b = 2371 # (store_mission_timer_b, <destination>),
# Retrieves current value of second mission timer, in seconds.
store_mission_timer_c = 2372 # (store_mission_timer_c, <destination>),
# Retrieves current value of third mission timer, in seconds.
store_mission_timer_a_msec = 2365 # (store_mission_timer_a_msec, <destination>),
# Retrieves current value of first mission timer, in milliseconds.
store_mission_timer_b_msec = 2366 # (store_mission_timer_b_msec, <destination>),
# Retrieves current value of second mission timer, in milliseconds.
store_mission_timer_c_msec = 2367 # (store_mission_timer_c_msec, <destination>),
# Retrieves current value of third mission timer, in milliseconds.
# Camera and rendering operations
mission_cam_set_mode = 2001 # (mission_cam_set_mode, <mission_cam_mode>, <duration-in-1/1000-seconds>, <value>),
# Not documented. Changes main camera mode. Camera mode is 0 for automatic and 1 for manual (controlled by code). Duration parameter is used when switching from manual to auto, to determine how long will camera move to it's new position. Third parameter is not documented.
mission_cam_set_screen_color = 2008 # (mission_cam_set_screen_color, <value>),
# Not documented. Paints the screen with solid color. Parameter <value> contains color code with alpha component. Can be used to block screen entirely, add tint etc.
mission_cam_animate_to_screen_color = 2009 #(mission_cam_animate_to_screen_color, <value>, <duration-in-1/1000-seconds>),
# Not documented. Same as above, but color change is gradual. Used in Native to fill the screen with white before the end of marriage scene.
mission_cam_get_position = 2010 # (mission_cam_get_position, <position_register_no>)
# Retrieves the current position of camera during the mission (i.e. the point from which the player is observing the game).
mission_cam_set_position = 2011 # (mission_cam_set_position, <position_register_no>)
# Moves the camera to the specified position during the mission.
mission_cam_animate_to_position = 2012 # (mission_cam_animate_to_position, <position_register_no>, <duration-in-1/1000-seconds>, <value>)
# Moves the camera to the specified position smoothly. Second parameter determines how long it will take camera to move to destination, third parameter determines whether camera velocity will be linear (value = 0) or non-linear (value = 1).
mission_cam_get_aperture = 2013 # (mission_cam_get_aperture, <destination>)
# Not documented. View angle?
mission_cam_set_aperture = 2014 # (mission_cam_set_aperture, <value>)
# Not documented.
mission_cam_animate_to_aperture = 2015 # (mission_cam_animate_to_aperture, <value>, <duration-in-1/1000-seconds>, <value>)
# Not documented. if value = 0, then camera velocity will be linear. else it will be non-linear
mission_cam_animate_to_position_and_aperture = 2016 # (mission_cam_animate_to_position_and_aperture, <position_register_no>, <value>, <duration-in-1/1000-seconds>, <value>)
# Not documented. if value = 0, then camera velocity will be linear. else it will be non-linear
mission_cam_set_target_agent = 2017 # (mission_cam_set_target_agent, <agent_id>, <value>)
# Not documented. if value = 0 then do not use agent's rotation, else use agent's rotation
mission_cam_clear_target_agent = 2018 # (mission_cam_clear_target_agent)
# Not documented.
mission_cam_set_animation = 2019 # (mission_cam_set_animation, <anim_id>),
# Not documented.
mouse_get_world_projection = 751 # (mouse_get_world_projection, <position_no_1>, <position_no_2>),
# Version 1.161+. Returns current camera coordinates (first position) and mouse projection to the back of the world (second position). Rotation data of resulting positions seems unreliable.
cast_ray = 1900 # (cast_ray, <destination>, <hit_position_register>, <ray_position_register>, [<ray_length_fixed_point>]),
# Version 1.161+. Casts a ray starting from <ray_position_register> and stores the closest hit position into <hit_position_register> (fails if no hits). If the body hit is a scene prop, its instance id will be stored into <destination>, otherwise it will be -1. Optional <ray_length> parameter seems to have no effect.
set_postfx = 2386 # (set_postfx, ???)
# This operation is not documented nor any examples of it's use could be found. Parameters are unknown.
set_river_shader_to_mud = 2387 # (set_river_shader_to_mud, ???)
# Changes river material for muddy env. This operation is not documented nor any examples of it's use could be found. Parameters are unknown.
rebuild_shadow_map = 2393 # (rebuild_shadow_map),
# Version 1.153+. UNTESTED. Effects unknown. Rebuilds shadow map for the current scene. Apparently useful after heavy manipulation with scene props.
set_shader_param_int = 2400 # (set_shader_param_int, <parameter_name>, <value>), #Sets the int shader parameter <parameter_name> to <value>
# Version 1.153+. UNTESTED. Allows direct manupulation of shader parameters. Operation scope is unknown, possibly global. Parameter is an int value.
set_shader_param_float = 2401 # (set_shader_param_float, <parameter_name>, <value_fixed_point>),
# Version 1.153+. Allows direct manupulation of shader parameters. Operation scope is unknown, possibly global. Parameter is a float value.
set_shader_param_float4 = 2402 # (set_shader_param_float4, <parameter_name>, <valuex>, <valuey>, <valuez>, <valuew>),
# Version 1.153+. Allows direct manupulation of shader parameters. Operation scope is unknown, possibly global. Parameter is a set of 4 float values.
set_shader_param_float4x4 = 2403 # (set_shader_param_float4x4, <parameter_name>, [0][0], [0][1], [0][2], [1][0], [1][1], [1][2], [2][0], [2][1], [2][2], [3][0], [3][1], [3][2]),
# Version 1.153+. Allows direct manupulation of shader parameters. Operation scope is unknown, possibly global. Parameter is a set of 4x4 float values.
################################################################################
# [ Z21 ] SCENE PROPS, SCENE ITEMS, LIGHT SOURCES AND PARTICLE SYSTEMS
################################################################################
# On each scene in the game you can find scene props and scene items.
# Scene props are the building bricks of the scene. Nearly every 3D object you
# will see on any scene in the game is a scene prop, with the exception of
# terrain and flora (on some scenes flora elements are actually scene props
# as well though).
# Just like with troops and agents, it is important to differentiate between
# scene props and scene prop instances. You can have a dozen archer agents on
# the scene, and each of them will be an instance of the archer troop. Scene
# props are the same - there can be many castle wall sections on the scene,
# and these are instances of the same castle wall scene prop.
# It is also possible to use game items as elements of the scene. These are
# the scene items, and they behave just like normal scene props. However all
# operations will affect either scene prop instances, or scene items, but
# not both.
# Finally, there are spawned items. These are the "dropped" items which the
# player can pick up during the mission.
# Conditional operations
prop_instance_is_valid = 1838 # (prop_instance_is_valid, <scene_prop_instance_id>),
# Checks that the reference to a scene prop instance is valid (i.e. it was not removed).
prop_instance_is_animating = 1862 # (prop_instance_is_animating, <destination>, <scene_prop_id>),
# Checks that the scene prop instance is currently animating.
prop_instance_intersects_with_prop_instance = 1880 # (prop_instance_intersects_with_prop_instance, <checked_scene_prop_id>, <scene_prop_id>),
# Checks if two scene props are intersecting (i.e. collided). Useful when animating scene props movement. Pass -1 for second parameter to check the prop against all other props on the scene.
scene_prop_has_agent_on_it = 1801 # (scene_prop_has_agent_on_it, <scene_prop_instance_id>, <agent_id>)
# Checks that the specified agent is standing on the scene prop instance.
# Scene prop instance slot operations
scene_prop_set_slot = 510 # (scene_prop_set_slot, <scene_prop_instance_id>, <slot_no>, <value>),
scene_prop_get_slot = 530 # (scene_prop_get_slot, <destination>, <scene_prop_instance_id>, <slot_no>),
scene_prop_slot_eq = 550 # (scene_prop_slot_eq, <scene_prop_instance_id>, <slot_no>, <value>),
scene_prop_slot_ge = 570 # (scene_prop_slot_ge, <scene_prop_instance_id>, <slot_no>, <value>),
# Scene prop general operations
prop_instance_get_scene_prop_kind = 1853 # (prop_instance_get_scene_prop_type, <destination>, <scene_prop_id>)
# Retrieves the scene prop for the specified prop instance.
scene_prop_get_num_instances = 1810 # (scene_prop_get_num_instances, <destination>, <scene_prop_id>),
# Retrieves the total number of instances of a specified scene prop on the current scene.
scene_prop_get_instance = 1811 # (scene_prop_get_instance, <destination>, <scene_prop_id>, <instance_no>),
# Retrieves the reference to a scene prop instance by it's number.
scene_prop_enable_after_time = 1800 # (scene_prop_enable_after_time, <scene_prop_id>, <time_period>),
# Prevents usable scene prop from being used for the specified time period in 1/100th of second. Commonly used to implement "cooldown" periods.
set_spawn_position = 1970 # (set_spawn_position, <position>),
# Defines the position which will later be used by (spawn_scene_prop), (spawn_scene_item), (spawn_agent) and (spawn_horse) operations.
spawn_scene_prop = 1974 # (spawn_scene_prop, <scene_prop_id>),
# Spawns a new scene prop instance of the specified type at the position defined by the last call to (set_spawn_position). Operation was supposed to store the prop_instance_id of the spawned position in reg0, but does not do this at the moment.
prop_instance_get_variation_id = 1840 # (prop_instance_get_variation_id, <destination>, <scene_prop_id>),
# Retrieves the first variation ID number for the specified scene prop instance.
prop_instance_get_variation_id_2 = 1841 # (prop_instance_get_variation_id_2, <destination>, <scene_prop_id>),
# Retrieves the second variation ID number for the specified scene prop instance.
replace_prop_instance = 1889 # (replace_prop_instance, <scene_prop_id>, <new_scene_prop_id>),
# Replaces a single scene prop instance with an instance of another scene prop (usually with the same dimensions, but not necessarily so). Can only be called in ti_before_mission_start trigger in module_mission_templates.py.
replace_scene_props = 1890 # (replace_scene_props, <old_scene_prop_id>, <new_scene_prop_id>),
# Replaces all instances of specified scene prop type with another scene prop type. Commonly used to replace damaged walls with their intact versions during normal visits to castle scenes. Can only be called in ti_before_mission_start trigger in module_mission_templates.py.
scene_prop_fade_out = 1822 # (scene_prop_fade_out, <scene_prop_id>, <fade_out_time>)
# Version 1.153+. Makes the scene prop instance disappear within specified time.
scene_prop_fade_in = 1823 # (scene_prop_fade_in, <scene_prop_id>, <fade_in_time>)
# Version 1.153+. Makes the scene prop instance reappear within specified time.
prop_instance_set_material = 2617 # (prop_instance_set_material, <prop_instance_no>, <sub_mesh_no>, <string_register>),
# Version 1.161+. 4research. give sub mesh as -1 to change all meshes' materials.
# Scene prop manipulation
scene_prop_get_visibility = 1812 # (scene_prop_get_visibility, <destination>, <scene_prop_id>),
# Retrieves the current visibility state of the scene prop instance (1 = visible, 0 = invisible).
scene_prop_set_visibility = 1813 # (scene_prop_set_visibility, <scene_prop_id>, <value>),
# Shows (value = 1) or hides (value = 0) the scene prop instance. What does it do with collision? 4research.
scene_prop_get_hit_points = 1815 # (scene_prop_get_hit_points, <destination>, <scene_prop_id>),
# Retrieves current number of hit points that the scene prop instance has.
scene_prop_get_max_hit_points = 1816 # (scene_prop_get_max_hit_points, <destination>, <scene_prop_id>),
# Retrieves the maximum number of hit points that the scene prop instance has (useful to calculate the percent of damage).
scene_prop_set_hit_points = 1814 # (scene_prop_set_hit_points, <scene_prop_id>, <value>),
# Sets the number of hit points that the scene prop has. Both current and max hit points are affected. Only makes sense for sokf_destructible scene props.
scene_prop_set_cur_hit_points = 1820 # (scene_prop_set_cur_hit_points, <scene_prop_id>, <value>),
# Version 1.153+. Sets current HP amount for scene prop.
prop_instance_receive_damage = 1877 # (prop_instance_receive_damage, <scene_prop_id>, <agent_id>, <damage_value>),
# Makes scene prop instance receive specified amount of damage from any arbitrary agent. Agent reference is apparently necessary to properly initialize ti_on_scene_prop_hit trigger parameters.
prop_instance_refill_hit_points = 1870 # (prop_instance_refill_hit_points, <scene_prop_id>),
# Restores hit points of a scene prop instance to their maximum value.
scene_prop_get_team = 1817 # (scene_prop_get_team, <value>, <scene_prop_id>),
# Retrieves the team controlling the scene prop instance.
scene_prop_set_team = 1818 # (scene_prop_set_team, <scene_prop_id>, <value>),
# Assigns the scene prop instance to a certain team.
scene_prop_set_prune_time = 1819 # (scene_prop_set_prune_time, <scene_prop_id>, <value>),
# Not documented. Not used in Native. Taleworlds comment: Prune time can only be set to objects that are already on the prune queue. Static objects are not affected by this operation.
prop_instance_get_position = 1850 # (prop_instance_get_position, <position>, <scene_prop_id>),
# Retrieves the prop instance current position on the scene.
prop_instance_get_starting_position = 1851 # (prop_instance_get_starting_position, <position>, <scene_prop_id>),
# Retrieves the prop instance starting position on the scene (i.e. the place where it was positioned when initialized).
prop_instance_set_position = 1855 # (prop_instance_set_position, <scene_prop_id>, <position>, [dont_send_to_clients]),
# Teleports prop instance to another position. Optional flag dont_send_to_clients can be used on the server to prevent position change from being replicated to client machines (useful when doing some calculations which require to move the prop temporarily to another place).
prop_instance_animate_to_position = 1860 # (prop_instance_animate_to_position, <scene_prop_id>, position, <duration-in-1/100-seconds>),
# Moves prop instance to another position during the specified time frame (i.e. animates). Time is specified in 1/100th of second.
prop_instance_get_animation_target_position = 1863 # (prop_instance_get_animation_target_position, <pos>, <scene_prop_id>)
# Retrieves the position that the prop instance is currently animating to.
prop_instance_stop_animating = 1861 # (prop_instance_stop_animating, <scene_prop_id>),
# Stops animating of the prop instance in the current position.
prop_instance_get_scale = 1852 # (prop_instance_get_scale, <position>, <scene_prop_id>),
# Retrieves the current scaling factors of the prop instance.
prop_instance_set_scale = 1854 # (prop_instance_set_scale, <scene_prop_id>, <value_x_fixed_point>, <value_y_fixed_point>, <value_z_fixed_point>),
# Sets new scaling factors for the scene prop.
prop_instance_enable_physics = 1864 # (prop_instance_enable_physics, <scene_prop_id>, <value>),
# Enables (value = 1) or disables (value = 0) physics calculation (gravity, collision checks) for the scene prop instance.
prop_instance_initialize_rotation_angles = 1866 # (prop_instance_initialize_rotation_angles, <scene_prop_id>),
# Should be called to initialize the scene prop instance prior to any calls to (prop_instance_rotate_to_position).
prop_instance_rotate_to_position = 1865 # (prop_instance_rotate_to_position, <scene_prop_id>, <position>, <duration-in-1/100-seconds>, <total_rotate_angle_fixed_point>),
# Specified prop instance will move to the target position within the specified duration of time, and within the same time it will rotate for the specified angle. Used in Native code to simulate behavior of belfry wheels and rotating winches.
prop_instance_clear_attached_missiles = 1885 # (prop_instance_clear_attached_missiles, <scene_prop_id>),
# Version 1.153+. Removes all missiles currently attached to the scene prop. Only works with dynamic scene props.
prop_instance_dynamics_set_properties = 1871 # (prop_instance_dynamics_set_properties, <scene_prop_id>, <position>),
# Initializes physical parameters of a scene prop. Position (X,Y) coordinates are used to store object's mass and friction coefficient. Coordinate Z is reserved (set it to zero just in case). Scene prop must be defined as sokf_moveable|sokf_dynamic_physics, and a call to (prop_instance_enable_physics) must be previously made.
prop_instance_dynamics_set_velocity = 1872 # (prop_instance_dynamics_set_velocity, <scene_prop_id>, <position>),
# Sets current movement speed for a scene prop. Position's coordinates define velocity along corresponding axis. Same comments as for (prop_instance_dynamics_set_properties).
prop_instance_dynamics_set_omega = 1873 # (prop_instance_dynamics_set_omega, <scene_prop_id>, <position>),
# Sets current rotation speed for a scene prop. Position's coordinates define rotational speed around corresponding axis. Same comments as for (prop_instance_dynamics_set_properties).
prop_instance_dynamics_apply_impulse = 1874 # (prop_instance_dynamics_apply_impulse, <scene_prop_id>, <position>),
# Applies an impulse of specified scale to the scene prop. Position's coordinates define instant change in movement speed along corresponding axis. Same comments as for (prop_instance_dynamics_set_properties).
prop_instance_deform_to_time = 2610 # (prop_instance_deform_to_time, <prop_instance_no>, <value>),
# Version 1.161+. Deforms a vertex-animated scene prop to specified vertex time. If you open the mesh in OpenBrf, right one of "Time of frame" boxes contains the relevant value.
prop_instance_deform_in_range = 2611 # (prop_instance_deform_in_range, <prop_instance_no>, <start_frame>, <end_frame>, <duration-in-1/1000-seconds>),
# Version 1.161+. Animate vertex-animated scene prop from start frame to end frame within the specified time period (in milliseconds). If you open the mesh in OpenBrf, right one of "Time of frame" boxes contains the relevant values for frame parameters.
prop_instance_deform_in_cycle_loop = 2612 # (prop_instance_deform_in_cycle_loop, <prop_instance_no>, <start_frame>, <end_frame>, <duration-in-1/1000-seconds>),
# Version 1.161+. Performs looping animation of vertex-animated scene prop within the specified vertex frame ranges and within specified time (in milliseconds). If you open the mesh in OpenBrf, right one of "Time of frame" boxes contains the relevant values for frame parameters.
prop_instance_get_current_deform_progress = 2615 # (prop_instance_get_current_deform_progress, <destination>, <prop_instance_no>),
# Version 1.161+. Returns a percentage value between 0 and 100 if animation is still in progress. Returns 100 otherwise.
prop_instance_get_current_deform_frame = 2616 # (prop_instance_get_current_deform_frame, <destination>, <prop_instance_no>),
# Version 1.161+. Returns current frame of a vertex-animated scene prop, rounded to nearest integer value.
prop_instance_play_sound = 1881 # (prop_instance_play_sound, <scene_prop_id>, <sound_id>, [flags]),
# Version 1.153+. Makes the scene prop play a specified sound. See sf_* flags in header_sounds.py for reference on possible options.
prop_instance_stop_sound = 1882 # (prop_instance_stop_sound, <scene_prop_id>),
# Version 1.153+. Stops any sound currently played by the scene prop instance.
# Scene items operations
scene_item_get_num_instances = 1830 # (scene_item_get_num_instances, <destination>, <item_id>),
# Gets the number of specified scene items present on the scene. Scene items behave exactly like scene props (i.e. cannot be picked).
scene_item_get_instance = 1831 # (scene_item_get_instance, <destination>, <item_id>, <instance_no>),
# Retrieves the reference to a single instance of a scene item by it's sequential number.
scene_spawned_item_get_num_instances = 1832 # (scene_spawned_item_get_num_instances, <destination>, <item_id>),
# Retrieves the number of specified spawned items present on the scene. Spawned items are actual items, i.e. they can be picked by player.
scene_spawned_item_get_instance = 1833 # (scene_spawned_item_get_instance, <destination>, <item_id>, <instance_no>),
# Retrieves the reference to a single instance of a spawned item by it's sequential number.
replace_scene_items_with_scene_props = 1891 # (replace_scene_items_with_scene_props, <old_item_id>, <new_scene_prop_id>),
# Replaces all instances of specified scene item with scene props. Can only be called in ti_before_mission_start trigger in module_mission_templates.py.
set_spawn_position = 1970 # (set_spawn_position, <position>), ## DUPLICATE ENTRY
# Defines the position which will later be used by (spawn_scene_prop), (spawn_scene_item), (spawn_agent) and (spawn_horse) operations.
spawn_item = 1971 # (spawn_item, <item_kind_id>, <item_modifier>, [seconds_before_pruning]),
# Spawns a new item, possibly with modifier, on the scene in the position specified by previous call to (set_spawn_position). Optional parameter determines time period (in second) after which the item will disappear. Using 0 will prevent the item from disappearing.
spawn_item_without_refill = 1976 # (spawn_item_without_refill, <item_kind_id>, <item_modifier>, [seconds_before_pruning]),
# Version 1.153+. UNTESTED. It is unclear how this is different from standard (spawn_item).
# Light sources and particle systems
set_current_color = 1950 # (set_current_color, <red_value>, <green_value>, <blue_value>),
# Sets color for subsequent calls to (add_point_light) etc. Color component ranges are 0..255.
set_position_delta = 1955 # (set_position_delta, <value>, <value>, <value>),
# Can only be called inside item or scene prop triggers. Sets (X,Y,Z) offsets from the item/prop current position for subsequent calls to (add_point_light) etc. Offsets are apparently in centimeters.
add_point_light = 1960 # (add_point_light, [flicker_magnitude], [flicker_interval]),
# Adds a point light source to an object with optional flickering magnitude (range 0..100) and flickering interval (in 1/100th of second). Uses position offset and color provided to previous calls to (set_position_delta) and (set_current_color). Can only be used in item triggers.
add_point_light_to_entity = 1961 # (add_point_light_to_entity, [flicker_magnitude], [flicker_interval]),
# Adds a point light source to an object with optional flickering magnitude (range 0..100) and flickering interval (in 1/100th of second). Uses position offset and color provided to previous calls to (set_position_delta) and (set_current_color). Can only be used in scene prop triggers.
particle_system_add_new = 1965 # (particle_system_add_new, <par_sys_id>,[position]),
# Adds a new particle system to an object. Uses position offset and color provided to previous calls to (set_position_delta) and (set_current_color). Can only be used in item/prop triggers.
particle_system_emit = 1968 # (particle_system_emit, <par_sys_id>, <value_num_particles>, <value_period>),
# Adds a particle system in some fancy way. Uses position offset and color provided to previous calls to (set_position_delta) and (set_current_color). Can only be used in item/prop triggers.
particle_system_burst = 1969 # (particle_system_burst, <par_sys_id>, <position>, [percentage_burst_strength]),
# Bursts a particle system in specified position.
particle_system_burst_no_sync = 1975 # (particle_system_burst_without_sync,<par_sys_id>,<position_no>,[percentage_burst_strength]),
# Version 1.153+. Same as above, but apparently does not synchronize this between server and client.
prop_instance_add_particle_system = 1886 # (prop_instance_add_particle_system, <scene_prop_id>, <par_sys_id>, <position_no>),
# Version 1.153+. Adds a new particle system to the scene prop. Note that <position_no> is local, i.e. in relation to scene prop's coordinates and rotation.
prop_instance_stop_all_particle_systems = 1887 # (prop_instance_stop_all_particle_systems, <scene_prop_id>),
# Version 1.153+. Removes all particle systems currently associated with scene prop instance.
################################################################################
# [ Z22 ] AGENTS AND TEAMS
################################################################################
# An agent represents of a single soldier on the 3D scene. Always keep this in
# mind when dealing with regular troops. A party may have 30 Swadian Knights.
# They will form a single troop stack in the party, and they will all be
# copies of the one and only Swadian Knight troop. However when the battle
# starts, this stack will spawn 30 distinct Agents.
# Agents do not persist - they only exist in the game for the duration of the
# mission. As soon as the player returns to the world map, all agents who were
# present on the scene immediately disappear. If this was a battle during a
# normal game encounter, then the game will keep track of the battle results,
# and depending on the number of agents killed from all sides the engine will
# kill or wound some troops in the troop stacks of the parties who were
# participating in the battle.
# During the mission, all agents are split into teams. By default player and
# his companions are placed into Team 0, but this may be changed in the
# mission template or by code. Player's enemies are usually team 1 (though
# again, this is not set in stone). Module System provides the modder with
# a great degree of control over teams composition, relation to each other
# (you can make hostile, allied or neutral teams, and you can have more than
# one team on the scene).
# Conditional operations
agent_is_in_special_mode = 1693 # (agent_is_in_special_mode, <agent_id>),
# Checks that the agent is currently in scripted mode.
agent_is_routed = 1699 # (agent_is_routed, <agent_id>),
# Checks that the agent has fled from the map (i.e. reached the edge of the map in fleeing mode and then faded).
agent_is_alive = 1702 # (agent_is_alive, <agent_id>),
# Checks that the agent is alive.
agent_is_wounded = 1703 # (agent_is_wounded, <agent_id>),
# Checks that the agent has been knocked unconscious.
agent_is_human = 1704 # (agent_is_human, <agent_id>),
# Checks that the agent is human (i.e. not horse).
agent_is_ally = 1706 # (agent_is_ally, <agent_id>),
# Checks that the agent is allied to the player (belongs to player's party or allied party in current encounter).
agent_is_non_player = 1707 # (agent_is_non_player, <agent_id>),
# Checks that the agent is not a player.
agent_is_defender = 1708 # (agent_is_defender, <agent_id>),
# Checks that the agent belongs to the defending side (see encounter operations for details).
agent_is_active = 1712 # (agent_is_active, <agent_id>),
# Checks that the agent reference is active. This will succeed for dead or routed agents, for as long as the agent reference itself is valid.
agent_has_item_equipped = 1729 # (agent_has_item_equipped, <agent_id>, <item_id>),
# Checks that the agent has a specific item equipped.
agent_is_in_parried_animation = 1769 # (agent_is_in_parried_animation, <agent_id>),
# Checks that the agent is currently in parrying animation (defending from some attack).
agent_is_alarmed = 1806 # (agent_is_alarmed, <agent_id>),
# Checks that the agent is alarmed (in combat mode with weapon drawn).
class_is_listening_order = 1775 # (class_is_listening_order, <team_no>, <sub_class>),
# Checks that the specified group of specified team is listening to player's orders.
teams_are_enemies = 1788 # (teams_are_enemies, <team_no>, <team_no_2>),
# Checks that the two teams are hostile to each other.
agent_is_in_line_of_sight = 1826 # (agent_is_in_line_of_sight, <agent_id>, <position_no>),
# Version 1.153+. Checks that the agent can be seen from specified position. Rotation of position register is not used (i.e. agent will be seen even if position is "looking" the other way).
# Team and agent slot operations
team_set_slot = 509 # (team_set_slot, <team_id>, <slot_no>, <value>),
team_get_slot = 529 # (team_get_slot, <destination>, <player_id>, <slot_no>),
team_slot_eq = 549 # (team_slot_eq, <team_id>, <slot_no>, <value>),
team_slot_ge = 569 # (team_slot_ge, <team_id>, <slot_no>, <value>),
agent_set_slot = 505 # (agent_set_slot, <agent_id>, <slot_no>, <value>),
agent_get_slot = 525 # (agent_get_slot, <destination>, <agent_id>, <slot_no>),
agent_slot_eq = 545 # (agent_slot_eq, <agent_id>, <slot_no>, <value>),
agent_slot_ge = 565 # (agent_slot_ge, <agent_id>, <slot_no>, <value>),
# Agent spawning, removal and general operations
add_reinforcements_to_entry = 1930 # (add_reinforcements_to_entry, <mission_template_entry_no>, <wave_size>),
# For battle missions, adds reinforcement wave to the specified entry point. Additional parameter determines relative wave size. Agents in reinforcement wave are taken from all parties of the side that the entry point belongs to due to mtef_team_* flags.
set_spawn_position = 1970 # (set_spawn_position, <position>), ## DUPLICATE ENTRY
# Defines the position which will later be used by (spawn_scene_prop), (spawn_scene_item), (spawn_agent) and (spawn_horse) operations.
spawn_agent = 1972 # (spawn_agent, <troop_id>),
# Spawns a new troop in the specified position and saves the reference to the new agent in reg0.
spawn_horse = 1973 # (spawn_horse, <item_kind_id>, <item_modifier>),
# Spawns a new horse (with any modifier) in the specified position and saves the reference to the new agent in reg0.
remove_agent = 1755 # (remove_agent, <agent_id>),
# Immediately removes the agent from the scene.
agent_fade_out = 1749 # (agent_fade_out, <agent_id>),
# Fades out the agent from the scene (same effect as fleeing enemies when they get to the edge of map).
agent_play_sound = 1750 # (agent_play_sound, <agent_id>, <sound_id>),
# Makes the agent emit the specified sound.
agent_stop_sound = 1808 # (agent_stop_sound, <agent_id>),
# Stops whatever sound agent is currently performing.
agent_set_visibility = 2096 # (agent_set_visibility, <agent_id>, <value>),
# Version 1.153+. Sets agent visibility. 0 for invisible, 1 for visible.
get_player_agent_no = 1700 # (get_player_agent_no, <destination>),
# Retrieves the reference to the player-controlled agent. Singleplayer mode only.
agent_get_kill_count = 1723 # (agent_get_kill_count, <destination>, <agent_id>, [get_wounded]),
# Retrieves the total number of kills by the specified agent during this battle. Call with non-zero <get_wounded> parameter to retrieve the total number of enemies the agent has knocked down.
agent_get_position = 1710 # (agent_get_position, <position>, <agent_id>),
# Retrieves the position of the specified agent on the scene.
agent_set_position = 1711 # (agent_set_position, <agent_id>, <position>),
# Teleports the agent to specified position on the scene. Be careful with riders - you must teleport the horse, not the rider for the operation to work correctly!
agent_get_horse = 1714 # (agent_get_horse, <destination>, <agent_id>),
# Retrieves the reference to the horse agent that the specified agent is riding, or -1 if he's not riding a horse (or is a horse himself).
agent_get_rider = 1715 # (agent_get_rider, <destination>, <horse_agent_id>),
# Retrieves the reference to the rider agent who is riding the specified horse, or -1 if there's no rider or the specified agent is not a horse.
agent_get_party_id = 1716 # (agent_get_party_id, <destination>, <agent_id>),
# Retrieves the party that the specified agent belongs to (supposedly should only work in battle missions for agents spawned as starting/reinforcement waves).
agent_get_entry_no = 1717 # (agent_get_entry_no, <destination>, <agent_id>),
# Retrieves the entry point number where this agent has spawned. What does this return for agents spawned with (spawn_agent)? 4research.
agent_get_troop_id = 1718 # (agent_get_troop_id, <destination>, <agent_id>),
# Retrieves the troop type of the specified agent. Returns -1 for horses (because horses are items, not troops).
agent_get_item_id = 1719 # (agent_get_item_id, <destination>, <horse_agent_id>),
# Retrieves the item type of the specified horse agent. Returns -1 for humans.
# Agent combat parameters and stats
store_agent_hit_points = 1720 # (store_agent_hit_points, <destination>, <agent_id>, [absolute]),
# Retrieves current agent health. Optional last parameter determines whether actual health (absolute = 1) or relative percentile health (absolute = 0) is returned. Default is relative.
agent_set_hit_points = 1721 # (agent_set_hit_points, <agent_id>, <value>,[absolute]),
# Sets new value for agent health. Optional last parameter determines whether the value is interpreted as actual health (absolute = 1) or relative percentile health (absolute = 0). Default is relative.
agent_set_max_hit_points = 2090 # (agent_set_max_hit_points, <agent_id>, <value>, [absolute]),
# Version 1.153+. Changes agent's max hit points. Optional flag [absolute] determines if <value> is an absolute number of his points, or relative percentage (0..1000) of default value. Treated as percentage by default.
agent_deliver_damage_to_agent = 1722 # (agent_deliver_damage_to_agent, <agent_id_deliverer>, <agent_id>, [damage_amount], [weapon_item_id]),
# Makes one agent deal damage to another. Parameter damage_amount is optional, if it is skipped or <= 0, then damage will be calculated using attacker's weapon item and stats (like a normal weapon attack). Optional parameter weapon_item_id was added in 1.153 and will force the game the calculate the damage using this weapon.
agent_deliver_damage_to_agent_advanced = 1827 # (agent_deliver_damage_to_agent_advanced, <destination>, <attacker_agent_id>, <agent_id>, <value>, [weapon_item_id]),
# Version 1.153+. Same as (agent_deliver_damage_to_agent), but resulting damage is returned. Also operation takes relations between agents into account, which may result in no damage, or even damage to attacker due to friendly fire rules.
add_missile = 1829 # (add_missile, <agent_id>, <starting_position>, <starting_speed_fixed_point>, <weapon_item_id>, <weapon_item_modifier>, <missile_item_id>, <missile_item_modifier>),
# Version 1.153+. Creates a missile with specified parameters. Note that <starting_position> parameter also determines the direction in which missile flies.
agent_get_speed = 1689 # (agent_get_speed, <position>, <agent_id>),
# Retrieves agent speed to (X,Y) coordinates of the position register. What do these mean - speed by world axis?
agent_set_no_death_knock_down_only = 1733 # (agent_set_no_death_knock_down_only, <agent_id>, <value>),
# Sets the agent as unkillable (value = 1) or normal (value = 0). Unkillable agents will drop on the ground instead of dying and will stand up afterwards.
agent_set_horse_speed_factor = 1734 # (agent_set_horse_speed_factor, <agent_id>, <speed_multiplier-in-1/100>),
# Multiplies agent's horse speed (and maneuverability?) by the specified percentile value (using 100 will make the horse). Note that this is called on the rider, not on the horse! Supposedly will persist even if the agent changes horses. 4research.
agent_set_speed_limit = 1736 # (agent_set_speed_limit, <agent_id>, <speed_limit(kilometers/hour)>),
# Limits agent speed by the specified value in kph. Use 5 for average walking speed. Affects only AI agents.
agent_set_damage_modifier = 2091 # (agent_set_damage_modifier, <agent_id>, <value>),
# Version 1.153+. Changes the damage delivered by this agent. Value is in percentage, 100 is default, 1000 is max possible value.
agent_set_accuracy_modifier = 2092 # (agent_set_accuracy_modifier, <agent_id>, <value>),
# Version 1.153+. Changes agent's accuracy (with ranged weapons?). Value is in percentage, 100 is default, value can be between [0..1000]
agent_set_speed_modifier = 2093 # (agent_set_speed_modifier, <agent_id>, <value>),
# Version 1.153+. Changes agent's speed. Value is in percentage, 100 is default, value can be between [0..1000]
agent_set_reload_speed_modifier = 2094 # (agent_set_reload_speed_modifier, <agent_id>, <value>),
# Version 1.153+. Changes agent's reload speed. Value is in percentage, 100 is default, value can be between [0..1000]
agent_set_use_speed_modifier = 2095 # (agent_set_use_speed_modifier, <agent_id>, <value>),
# Version 1.153+. Changes agent's speed with using various scene props. Value is in percentage, 100 is default, value can be between [0..1000]
agent_set_ranged_damage_modifier = 2099 # (agent_set_ranged_damage_modifier, <agent_id>, <value>),
# Version 1.157+. Changes agent's damage with ranged weapons. Value is in percentage, 100 is default, value can be between [0..1000]
agent_get_time_elapsed_since_removed = 1760 # (agent_get_time_elapsed_since_removed, <destination>, <agent_id>),
# Retrieves the number of seconds that have passed since agent's death. Native uses this only for multiplayer to track player's respawns. Can it be used in singleplayer too? 4research.
# Agent equipment
agent_refill_wielded_shield_hit_points = 1692 # (agent_refill_wielded_shield_hit_points, <agent_id>),
# Restores all hit points for the shield the agent is currently wielding.
agent_set_invulnerable_shield = 1725 # (agent_set_invulnerable_shield, <agent_id>, <value>),
# Makes the agent invulnerable to any damage (value = 1) or makes him vulnerable again (value = 0).
agent_get_wielded_item = 1726 # (agent_get_wielded_item, <destination>, <agent_id>, <hand_no>),
# Retrieves the item reference that the agent is currently wielding in his right hand (hand_no = 0) or left hand (hand_no = 1). Note that weapons are always wielded in right hand, and shield in left hand. When wielding a two-handed weapon (including bows and crossbows), this operation will return -1 for left hand.
agent_get_ammo = 1727 # (agent_get_ammo, <destination>, <agent_id>, <value>),
# Retrieves the current ammo amount agent has for his wielded item (value = 1) or all his items (value = 0).
agent_get_item_cur_ammo = 1977 # (agent_get_item_cur_ammo, <destination>, <agent_id>, <slot_no>),
# Version 1.153+. Returns remaining ammo for specified agent's item.
agent_refill_ammo = 1728 # (agent_refill_ammo, <agent_id>),
# Refills all ammo and throwing weapon stacks that the agent has in his equipment.
agent_set_wielded_item = 1747 # (agent_set_wielded_item, <agent_id>, <item_id>),
# Forces the agent to wield the specified item. Agent must have that item in his equipment for this to work. Use item_id = -1 to unwield any currently wielded item.
agent_equip_item = 1779 # (agent_equip_item, <agent_id>, <item_id>, [weapon_slot_no]),
# Adds the specified item to agent and forces him to equip it. Optional weapon_slot_no parameter is only used with weapons and will put the newly added item to that slot (range 1..4). If it is omitted with a weapon item, then the agent must have an empty weapon slot for the operation to succeed.
agent_unequip_item = 1774 # (agent_unequip_item, <agent_id>, <item_id>, [weapon_slot_no]),
# Removes the specified item from the agent. Optional parameter weapon_slot_no is in range 1..4 and determines what weapon slot to remove (item_id must still be set correctly).
agent_set_ammo = 1776 # (agent_set_ammo, <agent_id>, <item_id>, <value>),
# Sets current agent ammo amount to the specified value between 0 and maximum ammo. Not clear what item_id means - weapon item or ammo item? 4research.
agent_get_item_slot = 1804 # (agent_get_item_slot, <destination>, <agent_id>, <value>),
# Retrieves item_id for specified agent's slot Possible slot values range in 0..7, order is weapon1, weapon2, weapon3, weapon4, head_armor, body_armor, leg_armor, hand_armor.
agent_get_ammo_for_slot = 1825 # (agent_get_ammo_for_slot, <destination>, <agent_id>, <slot_no>),
# Retrieves the amount of ammo agent has in the referenced slot (range 0..3).
# Agent animations
agent_set_no_dynamics = 1762 # (agent_set_no_dynamics, <agent_id>, <value>),
# Makes the agent stand on the spot (value = 1) or move normally (value = 0). When frozen on the spot the agent can still turn around and fight if necessary. Used in Native for the wedding scene.
agent_get_animation = 1768 # (agent_get_animation, <destination>, <agent_id>, <body_part),
# Retrieves current agent animation for specified body part (0 = lower, 1 = upper).
agent_set_animation = 1740 # (agent_set_animation, <agent_id>, <anim_id>, [channel_no]),
# Forces the agent to perform the specified animation. Optional channel_no parameter determines whether upper body (value = 1) or lower body (value = 0, default) is affected by animation.
agent_set_stand_animation = 1741 # (agent_set_stand_action, <agent_id>, <anim_id>),
# Defines the animation that this agent will use when standing still. Does not force the agent into actually doing this animation.
agent_set_walk_forward_animation = 1742 # (agent_set_walk_forward_action, <agent_id>, <anim_id>),
# Defines the animation that this agent will use when walking forward. Only works for NPC agents.
agent_set_animation_progress = 1743 # (agent_set_animation_progress, <agent_id>, <value_fixed_point>),
# Allows to skip the agent to a certain point in the animation cycle, as specified by the fixed point value (0..fixed_point_multiplier).
agent_ai_set_can_crouch = 2083 # (agent_ai_set_can_crouch, <agent_id>, <value>),
# Version 1.153+. Allows or forbids the agent to crouch. 0 to forbid, 1 to allow.
agent_get_crouch_mode = 2097 # (agent_ai_get_crouch_mode, <destination>, <agent_id>),
# Version 1.153+. Retrieves agent's crouch status (1 = crouching, 0 = standing).
agent_set_crouch_mode = 2098 # (agent_ai_set_crouch_mode, <agent_id>, <value>),
# Version 1.153+. Sets agent's crouch status (1 = crouch, 0 = stand up).
agent_get_attached_scene_prop = 1756 # (agent_get_attached_scene_prop, <destination>, <agent_id>)
# Retrieves the reference to scene prop instance which is attached to the agent, or -1 if there isn't any.
agent_set_attached_scene_prop = 1757 # (agent_set_attached_scene_prop, <agent_id>, <scene_prop_id>)
# Attaches the specified prop instance to the agent. Used in multiplayer CTF missions to attach flags to players.
agent_set_attached_scene_prop_x = 1758 # (agent_set_attached_scene_prop_x, <agent_id>, <value>)
# Offsets the position of the attached scene prop in relation to agent, in centimeters, along the X axis (left/right).
agent_set_attached_scene_prop_y = 1809 # (agent_set_attached_scene_prop_y, <agent_id>, <value>)
# Offsets the position of the attached scene prop in relation to agent, in centimeters, along the Y axis (backwards/forward).
agent_set_attached_scene_prop_z = 1759 # (agent_set_attached_scene_prop_z, <agent_id>, <value>)
# Offsets the position of the attached scene prop in relation to agent, in centimeters, along the Z axis (down/up).
agent_get_bone_position = 2076 # (agent_get_bone_position, <position_no>, <agent_no>, <bone_no>, [<local_or_global>]),
# Version 1.161+. Returns current position for agent's bone (examine skeleton in openBrf to learn bone numbers). Pass 1 as optional <local_or_global> parameter to retrieve global bone coordinates.
# Agent AI and scripted behavior
agent_ai_set_interact_with_player = 2077 # (agent_ai_set_interact_with_player, <agent_no>, <value>),
# Version 1.165+. Enables or disables agent AI interation with player. Dialog? Combat? 4research.
agent_set_is_alarmed = 1807 # (agent_set_is_alarmed, <agent_id>, <value>),
# Sets agent's status as alarmed (value = 1) or peaceful (value = 0).
agent_clear_relations_with_agents = 1802 # (agent_clear_relations_with_agents, <agent_id>),
# Clears any agent-to-agent relations for specified agent.
agent_add_relation_with_agent = 1803 # (agent_add_relation_with_agent, <agent_id>, <agent_id>, <value>),
# Changes relations between two agents on the scene to enemy (value = -1), neutral (value = 0), ally (value = 1). Note that neutral agents are immune to friendly fire.
agent_get_number_of_enemies_following = 1761 # (agent_get_number_of_enemies_following, <destination>, <agent_id>),
# Retrieves the total number of enemies who are currently attacking the specified agents. May be used for AI decision-making.
agent_ai_get_num_cached_enemies = 2670 # (agent_ai_get_num_cached_enemies, <destination>, <agent_no>),
# Version 1.165+. Returns total number of nearby enemies as has been cached by agent AI. Enemies are numbered from nearest to farthest.
agent_ai_get_cached_enemy = 2671 # (agent_ai_get_cached_enemy, <destination>, <agent_no>, <cache_index>),
# Version 1.165+. Return agent reference from AI's list of cached enemies, from nearest to farthest. Returns -1 if the cached enemy is not active anymore.
agent_get_attack_action = 1763 # (agent_get_attack_action, <destination>, <agent_id>),
# Retrieves agent's current attack action. Possible values: free = 0, readying_attack = 1, releasing_attack = 2, completing_attack_after_hit = 3, attack_parried = 4, reloading = 5, after_release = 6, cancelling_attack = 7.
agent_get_defend_action = 1764 # (agent_get_defend_action, <destination>, <agent_id>),
# Retrieves agent's current defend action. Possible values: free = 0, parrying = 1, blocking = 2.
agent_get_action_dir = 1767 # (agent_get_action_dir, <destination>, <agent_id>),
# Retrieves the direction of current agent's action. Possible values: invalid = -1, down = 0, right = 1, left = 2, up = 3.
agent_set_attack_action = 1745 # (agent_set_attack_action, <agent_id>, <direction_value>, <action_value>),
# Forces the agent to perform an attack action. Direction value: -2 = cancel any action (1.153+), 0 = thrust, 1 = slashright, 2 = slashleft, 3 = overswing. Action value: 0 = ready and release, 1 = ready and hold.
agent_set_defend_action = 1746 # (agent_set_defend_action, <agent_id>, <value>, <duration-in-1/1000-seconds>),
# Forces the agent to perform a defend action. Possible values: -2 = cancel any action (1.153+), 0 = defend_down, 1 = defend_right, 2 = defend_left, 3 = defend_up. Does time value determine delay, speed or duration? 4research.
agent_set_scripted_destination = 1730 # (agent_set_scripted_destination, <agent_id>, <position>, [auto_set_z_to_ground_level], [no_rethink]),
# Forces the agent to travel to specified position and stay there until new behavior is set or scripted mode cleared. First optional parameter determines whether the position Z coordinate will be automatically set to ground level (value = 1) or not (value = 0). Second optional parameter added in 1.165 patch, set it to 1 to save resources.
agent_set_scripted_destination_no_attack = 1748 # (agent_set_scripted_destination_no_attack, <agent_id>, <position>, <auto_set_z_to_ground_level>),
# Same as above, but the agent will not attack his enemies.
agent_get_scripted_destination = 1731 # (agent_get_scripted_destination, <position>, <agent_id>),
# Retrieves the position which is defined as agent's scripted destination, if any.
agent_force_rethink = 1732 # (agent_force_rethink, <agent_id>),
# Forces the agent to recalculate his current actions after setting him a new scripted destination or changing other factors affecting his behavior.
agent_clear_scripted_mode = 1735 # (agent_clear_scripted_mode, <agent_id>),
# Clears scripting mode from the agent, making him behave as usual again.
agent_ai_set_always_attack_in_melee = 1737 # (agent_ai_set_always_attack_in_melee, <agent_id>, <value>),
# Forces the agent to continuously attack in melee combat, instead of defending. Used in Native to prevent stalling at the top of the siege ladder. Use value = 0 to clear this mode.
agent_get_simple_behavior = 1738 # (agent_get_simple_behavior, <destination>, <agent_id>),
# Retrieves agent's current simple behavior (see aisb_* constants in header_mission_templates.py for details).
agent_ai_get_behavior_target = 2082 # (agent_ai_get_behavior_target, <destination>, <agent_id>),
# Version 1.153+. UNTESTED. Supposedly returns agent_id which is the target of current agent's behavior.
agent_get_combat_state = 1739 # (agent_get_combat_state, <destination>, <agent_id>),
# Retrieves agent's current combat state:
# 0 = nothing special, this value is also always returned for player and for dead agents.
# 1 = target in sight (for ranged units)
# 2 = guarding (without a shield)
# 3 = preparing a melee attack or firing a ranged weapon
# 4 = releasing a melee attack or reloading a crossbow
# 7 = recovering after being hit in melee OR blocking with a shield. Contradictory information, 4research.
# 8 = target to the right (horse archers) OR no target in sight (ranged units). Contradictory information, 4research.
agent_ai_get_move_target = 2081 # (agent_ai_get_move_target, <destination>, <agent_id>),
# Version 1.153+. UNTESTED. Supposedly returns the enemy agent to whom the agent is currently moving to.
agent_get_look_position = 1709 # (agent_get_look_position, <position>, <agent_id>),
# Retrieves the position that the agent is currently looking at.
agent_set_look_target_position = 1744 # (agent_set_look_target_position, <agent_id>, <position>),
# Forces the agent to look at specified position (turn his head as necessary). Alarmed agents will ignore this.
agent_ai_get_look_target = 2080 # (agent_ai_get_look_target, <destination>, <agent_id>),
# Version 1.153+. UNTESTED. Supposedly returns agent_id that the agent is currently looking at.
agent_set_look_target_agent = 1713 # (agent_set_look_target_agent, <watcher_agent_id>, <observed_agent_id>),
# Forces the agent to look at specified agent (track his movements). Alarmed agents will ignore this.
agent_start_running_away = 1751 # (agent_start_running_away, <agent_id>, [<position_no>]),
# Makes the agent flee the battlefield, ignoring everything else and not attacking. If the agent reaches the edge of map in this mode, he will fade out. Optional position_no parameter added in 1.153 and will make the agent flee to specified position instead (pos0 is not allowed and will be ignored).
agent_stop_running_away = 1752 # (agent_stop_run_away, <agent_id>),
# Cancels fleeing behavior for the agent, turning him back to combat state.
agent_ai_set_aggressiveness = 1753 # (agent_ai_set_aggressiveness, <agent_id>, <value>),
# Sets the aggressiveness parameter for agent AI to use. Default value is 100. Higher values make agent more aggressive. Actual game effects are not obvious, apparently used to speed up mob aggravation when previously neutral.
agent_set_kick_allowed = 1754 # (agent_set_kick_allowed, <agent_id>, <value>),
# Enables (value = 1) or disables (value = 0) kicking for the specified agent. Only makes sense for player-controlled agents as bots don't know how to kick anyway.
set_cheer_at_no_enemy = 2379 # (set_cheer_at_no_enemy, <value>),
# Version 1.153+. Determines whether the agents will cheer when no enemy remain on the map. 0 = do not cheer, 1 = cheer.
agent_add_offer_with_timeout = 1777 # (agent_add_offer_with_timeout, <agent_id>, <offerer_agent_id>, <duration-in-1/1000-seconds>),
# Esoteric stuff. Used in multiplayer duels. Second agent_id is offerer, 0 value for duration is an infinite offer.
agent_check_offer_from_agent = 1778 # (agent_check_offer_from_agent, <agent_id>, <offerer_agent_id>), #second agent_id is offerer
# Esoteric stuff. Used in multiplayer duels. Second agent_id is offerer.
# Team operations
agent_get_group = 1765 # (agent_get_group, <destination>, <agent_id>),
# Retrieves reference to player who is currently the leader of specified bot agent. Only works in multiplayer.
agent_set_group = 1766 # (agent_set_group, <agent_id>, <player_leader_id>),
# Puts the bot agent under command of specified player. Only works in multiplayer.
agent_get_team = 1770 # (agent_get_team, <destination>, <agent_id>),
# Retrieves the team that the agent belongs to.
agent_set_team = 1771 # (agent_set_team, <agent_id>, <value>),
# Puts the agent to specified team number.
agent_get_class = 1772 # (agent_get_class , <destination>, <agent_id>),
# Retrieves the agent class (see grc_* constants in header_mission_templates.py for reference). Note this operation returns the troop class that the game divines from troop equipment and flags, ignoring any custom troop class settings.
agent_get_division = 1773 # (agent_get_division , <destination>, <agent_id>),
# Retrieves the agent division (custom troop class number in 0..8 range).
agent_set_division = 1783 # (agent_set_division, <agent_id>, <value>),
# Puts the agent into the specified division. This does not affect agent's troop class. Note that there's a bug in Warband: if an order is issued to agent's original division, the agent will immediately switch back to it's original division number. Therefore, if you want to manipulate agent divisions dynamically during the battle, you need to implement some workarounds for this bug.
team_get_hold_fire_order = 1784 # (team_get_hold_fire_order, <destination>, <team_no>, <division>),
# Retrieves current status of hold fire order for specified team/division (see aordr_* constants in header_mission_templates.py for reference).
team_get_movement_order = 1785 # (team_get_movement_order, <destination>, <team_no>, <division>),
# Retrieves current movement orders for specified team/division (see mordr_* constants in header_mission_templates.py for reference).
team_get_riding_order = 1786 # (team_get_riding_order, <destination>, <team_no>, <division>),
# Retrieves current status of riding order for specified team/division (see rordr_* constants in header_mission_templates.py for reference).
team_get_weapon_usage_order = 1787 # (team_get_weapon_usage_order, <destination>, <team_no>, <division>),
# Retrieves current status of weapon usage order for specified team/division (see wordr_* constants in header_mission_templates.py for reference).
team_give_order = 1790 # (team_give_order, <team_no>, <division>, <order_id>),
# Issues an order to specified team/division.
team_set_order_position = 1791 # (team_set_order_position, <team_no>, <division>, <position>),
# Defines the position for specified team/division when currently issued order requires one.
team_get_leader = 1792 # (team_get_leader, <destination>, <team_no>),
# Retrieves the reference to the agent who is the leader of specified team.
team_set_leader = 1793 # (team_set_leader, <team_no>, <new_leader_agent_id>),
# Sets the agent as the new leader of specified team.
team_get_order_position = 1794 # (team_get_order_position, <position>, <team_no>, <division>),
# Retrieves position which is used for specified team/division current orders.
team_set_order_listener = 1795 # (team_set_order_listener, <team_no>, <division>, [add_to_listeners]),
# Set the specified division as the one which will be following orders issued by the player (assuming the player is on the same team). If optional parameter add_to_listeners is greater than 0, then the operation will instead *add* specified division to order listeners. If division number is -1, then list of order listeners is cleared. If division number is 9, then all divisions will listen to player's orders.
team_set_relation = 1796 # (team_set_relation, <team_no>, <team_no_2>, <value>),
# Sets relations between two teams. Possible values: enemy (-1), neutral (0) and friendly (1).
store_remaining_team_no = 2360 # (store_remaining_team_no, <destination>),
# Retrieves the number of the last remaining team. Currently not used in Native, possibly deprecated.
team_get_gap_distance = 1828 # (team_get_gap_distance, <destination>, <team_no>, <sub_class>),
# Version 1.153+. UNTESTED. Supposedly returns average gap between troops of a specified team/class (depends on how many Stand Closer/Spread Out orders were given).
# Combat statistics
store_enemy_count = 2380 # (store_enemy_count, <destination>),
# No longer used in Native. Apparently stores total number of active enemy agents. Possibly deprecated. 4research.
store_friend_count = 2381 # (store_friend_count, <destination>),
# No longer used in Native. Apparently stores total number of active friendly agents. Possibly deprecated. 4research.
store_ally_count = 2382 # (store_ally_count, <destination>),
# No longer used in Native. Apparently stores total number of active allied agents (how is it different from friends?). Possibly deprecated. 4research.
store_defender_count = 2383 # (store_defender_count, <destination>),
# No longer used in Native. Apparently stores total number of active agents on defender's side. Possibly deprecated. 4research.
store_attacker_count = 2384 # (store_attacker_count, <destination>),
# No longer used in Native. Apparently stores total number of active agents on attacker's side. Possibly deprecated. 4research.
store_normalized_team_count = 2385 # (store_normalized_team_count, <destination>, <team_no>),
# Stores the number of agents belonging to specified team, normalized according to battle_size and advantage. Commonly used to calculate advantage and possibly reinforcement wave sizes.
################################################################################
# [ Z23 ] PRESENTATIONS
################################################################################
# Presentations are a complex subject, because of their flexibility. Each
# presentation is nothing more but a number of screen control elements, called
# overlays. There are many types of overlays, each coming with it's own
# behavior and looks. For as long as the presentation is running, you can
# monitor the status of those overlays and change their looks, contents and
# position on the screen.
# Presentation is nothing but a set of triggers. There are only five triggers
# that the presentation can have, but skillful control of them allows you to
# do nearly everything you can think of.
# ti_on_presentation_load fires only once when the presentation is started.
# This is the place where you will usually create all overlays that your
# presentation needs, initialize their looks and contents and put them to
# their positions on the screen.
# ti_on_presentation_event_state_change is probably the most important and
# easy one. It fires every time some overlay in your presentation changes
# state. For each type of overlay this means something. For a button overlay,
# this means that the user has clicked the button. In this case, you will want
# to run the code responsible for that button effects. So you can put a "Win"
# button on your presentation, and when it's clicked, you can run the code
# which will give all castles and towns in the game to you. :-)
# ti_on_presentation_mouse_press trigger fires every time user clicks a mouse
# button on one of presentation overlays, even if the overlay did not change
# it's state as the result.
# ti_on_presentation_mouse_enter_leave trigger fires when the mouse pointer
# moves over one of presentation's overlays, or moves out of it. This might
# be useful if you want your presentation to react to user's mouse movements,
# not only clicks.
# ti_on_presentation_run trigger will fire every frame (in other words, with
# the frequency of your game FPS). You can put some code in this trigger if
# you want your presentation to constantly do something even if the user is
# passive.
# Note that while a running presentation will usually pause your game until
# you stop it, it is also possible to write presentations which will not stop
# the game, but will run as the time goes. To see an example, go into any
# battle in Warband and press Backspace. You will see the interface which
# displays the mini-map of the battle, positions of all troops, and elements
# that you can use to issue orders to your companions (if you have any). All
# this is a presentation as well, called "prsnt_battle". And if you have
# played multiplayer, then you might be interested to know that all menus,
# including equipment selection for your character, are presentations as well.
# Conditional operations
is_presentation_active = 903 # (is_presentation_active, <presentation_id),
# Checks that the specified presentation is currently running.
# General presentation operations
start_presentation = 900 # (start_presentation, <presentation_id>),
# Starts the specified presentation.
start_background_presentation = 901 # (start_background_presentation, <presentation_id>),
# Apparently allows you to start a presentation in background but stay in the menu. 4research.
presentation_set_duration = 902 # (presentation_set_duration, <duration-in-1/100-seconds>),
# Sets presentation duration time, in 1/100th of second. Must be called when a presentation is active. If several presentations are active, duration will be set for all of them.
# Creating overlays
create_text_overlay = 910 # (create_text_overlay, <destination>, <string_id>),
# Creates a text label overlay and returns it's overlay_id.
create_mesh_overlay = 911 # (create_mesh_overlay, <destination>, <mesh_id>),
# Creates a mesh overlay and returns it's overlay_id.
create_mesh_overlay_with_item_id = 944 # (create_mesh_overlay_with_item_id, <destination>, <item_id>),
# Creates a mesh overlay, using the specified item mesh. Returns overlay_id.
create_mesh_overlay_with_tableau_material = 939 # (create_mesh_overlay_with_tableau_material, <destination>, <mesh_id>, <tableau_material_id>, <value>),
# Creates a mesh overlay, using the specified tableau_material. When mesh_id = -1, it is generated automatically. Value is passed as the parameter for tableau_material script. Returns overlay_id.
create_button_overlay = 912 # (create_button_overlay, <destination>, <string_id>),
# Creates a generic button overlay and returns it's overlay_id. The only difference between this and subsequent two operations is that they use different button meshes.
create_game_button_overlay = 940 # (create_game_button_overlay, <destination>, <string_id>),
# Creates a game button overlay and returns it's overlay_id.
create_in_game_button_overlay = 941 # (create_in_game_button_overlay, <destination>, <string_id>),
# Creates an in-game button overlay and returns it's overlay_id.
create_image_button_overlay = 913 # (create_image_button_overlay, <destination>, <mesh_id>, <mesh_id>),
# Creates an image button, using two meshes for normal (1st mesh) and pressed (2nd mesh) status. Button does not have a textual label. Returns button overlay_id.
create_image_button_overlay_with_tableau_material = 938 # (create_image_button_overlay_with_tableau_material, <destination>, <mesh_id>, <tableau_material_id>, <value>),
# Creates an image button from the specified mesh, using tableau_material as the image. When mesh = -1, it is generated automatically. Value is passed as the parameter to the tableau_material script. Returns overlay_id.
create_slider_overlay = 914 # (create_slider_overlay, <destination>, <min_value>, <max_value>),
# Creates horizontal slider overlay, with positions of the slider varying between min and max values. Current value of the slider can be changed with (overlay_set_val). Returns slider's overlay_id.
create_progress_overlay = 915 # (create_progress_overlay, <destination>, <min_value>, <max_value>),
# Creates progress bar overlay, with positions of the bar varying between min and max values. Current value of the progress bar can be changed with (overlay_set_val). Returns bar's overlay_id.
create_number_box_overlay = 942 # (create_number_box_overlay, <destination>, <min_value>, <max_value>),
# Creates a number box overlay (a small field for numeric value and small increase/decrease buttons to the right) with specified min and max values. Returns number box overlay_id.
create_text_box_overlay = 917 # (create_text_box_overlay, <destination>),
# Apparently deprecated. No longer used in Native.
create_simple_text_box_overlay = 919 # (create_simple_text_box_overlay, <destination>),
# Creates a text field overlay, where user can enter any text. Returns text field's overlay_id. Text contents of the field can be retrieved from s0 trigger in ti_on_presentation_event_state_change event for the text field.
create_check_box_overlay = 918 # (create_check_box_overlay, <destination>, <checkbox_off_mesh>, <checkbox_on_mesh>),
# Creates a checkbox overlay. Returns checkbox overlay_id.
create_listbox_overlay = 943 # (create_list_box_overlay, <destination>, <string>, <value>),
# Creates a listbox overlay. Individual items can be added with (overlay_add_item) and index of currently selected item can be set with (overlay_set_val). Returns listbox overlay_id. Importance of later two parameters unclear (default text&value?). 4research.
create_combo_label_overlay = 948 # (create_combo_label_overlay, <destination>),
# Creates a combo label overlay. Looks like plain text label. Individual items can be added with (overlay_add_item) and currently selected item can be set with (overlay_set_val). Returns combo block's overlay_id.
create_combo_button_overlay = 916 # (create_combo_button_overlay, <destination>),
# Creates a combo button overlay. For example see "Screen Resolution" dropdown in Settings menu. Individual items can be added with (overlay_add_item) and currently selected item can be set with (overlay_set_val). Returns combo block's overlay_id.
overlay_add_item = 931 # (overlay_add_item, <overlay_id>, <string_id>),
# Adds an item to the listbox or combobox. Items are indexed from 0. Note the order in which items appear in the dropdown is reverse to the order in which they're added.
# Overlays hierarchy manipulation
set_container_overlay = 945 # (set_container_overlay, <overlay_id>),
# Defines the specified overlay as the container. All subsequently created overlays will be placed inside the container, and their coordinates will be based on container's position. All containers with their contents will be displayed *above* any non-container overlays. Use -1 to stop placing overlays to current container and resume normal behavior.
overlay_set_container_overlay = 951 # (overlay_set_container_overlay, <overlay_id>, <container_overlay_id>),
# Allows you to put one overlay into a container, or remove it from container (if container_overlay_id = -1) without setting current overlay. May be unreliable.
# Overlay manipulation
overlay_get_position = 946 # (overlay_get_position, <position>, <overlay_id>)
# Retrieves overlay current position to specified position trigger, using position's X and Y coordinates. Note that the screen size in Warband is (1.00,0.75), further modified by fixed point multiplier.
overlay_set_val = 927 # (overlay_set_val, <overlay_id>, <value>),
# Sets the value of the overlays which have numeric values.
overlay_set_text = 920 # (overlay_set_text, <overlay_id>, <string_id>),
# Changes the overlay text (if it has any). Works for labels, text fields, buttons with text labels...
overlay_set_boundaries = 928 # (overlay_set_boundaries, <overlay_id>, <min_value>, <max_value>),
# Changes the value boundaries for the overlays that have them.
overlay_set_position = 926 # (overlay_set_position, <overlay_id>, <position>),
# Sets the overlay position on the screen, using position's X and Y coordinates. Note that the screen size in Warband is (1.00,0.75), further modified by fixed point multiplier.
overlay_set_size = 925 # (overlay_set_size, <overlay_id>, <position>),
# Sets the overlay size, using position's X and Y coordinates. Note that the screen size in Warband is (1.00,0.75), further modified by fixed point multiplier. Also see (overlay_set_area_size).
overlay_set_area_size = 929 # (overlay_set_area_size, <overlay_id>, <position>),
# Defines the actual area on the screen used to display the overlay. If it's size is greater than area size, it will create a scrollable area with appropriate scrollbars. Can be used to create scrollable areas for large text, or scrollable containers with many children elements (see Host Game screen for a typical example).
overlay_set_additional_render_height = 952 # (overlay_set_additional_render_height, <overlay_id>, <height_adder>),
# Version 1.153+. Effects uncertain. 4research.
overlay_animate_to_position = 937 # (overlay_animate_to_position, <overlay_id>, <duration-in-1/1000-seconds>, <position>),
# Moves overlay to specified position during a specified timeframe, specified in 1/1000th of second.
overlay_animate_to_size = 936 # (overlay_animate_to_size, <overlay_id>, <duration-in-1/1000-seconds>, <position>),
# Changes overlay size to specified value during a specified timeframe, specified in 1/1000th of second.
overlay_set_mesh_rotation = 930 # (overlay_set_mesh_rotation, <overlay_id>, <position>),
# Despite the name, works with any overlay, allowing you to put it on the screen in rotated position. To determine the angles, position's rotation values are used (not coordinates!). Usually you will want to only use rotation around Z axis (which results in clockwise or anti-clockwise rotation as seen by user). Note that rotating overlays which are placed inside a container may cause strange results, so some trial and error will be necessary in such situation.
overlay_set_material = 956 # (overlay_set_material, <overlay_id>, <string_no>),
# Version 1.161+. Replaces the material used for rendering specified overlay.
overlay_set_color = 921 # (overlay_set_color, <overlay_id>, <color>),
# Changes the overlay color (hexadecimal value 0xRRGGBB). May not work with some overlay types.
overlay_set_alpha = 922 # (overlay_set_alpha, <overlay_id>, <alpha>),
# Changes the overlay alpha (hexadecimal value in 0x00..0xFF range). May not work with some overlay types.
overlay_set_hilight_color = 923 # (overlay_set_hilight_color, <overlay_id>, <color>),
# Highlights the overlay with specified color. May not work with some overlay types.
overlay_set_hilight_alpha = 924 # (overlay_set_hilight_alpha, <overlay_id>, <alpha>),
# Highlights the overlay with specified alpha. May not work with some overlay types.
overlay_animate_to_color = 932 # (overlay_animate_to_color, <overlay_id>, <duration-in-1/1000-seconds>, <color>)
# Changes overlay's color during a specified timeframe, specified in 1/000th of second.
overlay_animate_to_alpha = 933 # (overlay_animate_to_alpha, <overlay_id>, <duration-in-1/1000-seconds>, <color>),
# Changes overlay's alpha during a specified timeframe, specified in 1/000th of second.
overlay_animate_to_highlight_color = 934 # (overlay_animate_to_highlight_color, <overlay_id>, <duration-in-1/1000-seconds>, <color>),
# Highlights overlay to specified color during a specified timeframe, specified in 1/000th of second.
overlay_animate_to_highlight_alpha = 935 # (overlay_animate_to_highlight_alpha, <overlay_id>, <duration-in-1/1000-seconds>, <color>),
# Highlights overlay to specified alpha during a specified timeframe, specified in 1/000th of second.
overlay_set_display = 947 # (overlay_set_display, <overlay_id>, <value>),
# Shows (value = 1) or hides (value = 0) the specified overlay.
overlay_obtain_focus = 949 # (overlay_obtain_focus, <overlay_id>),
# Makes the specified overlay obtain input focus. Only works for text fields.
overlay_set_tooltip = 950 # (overlay_set_tooltip, <overlay_id>, <string_id>),
# Defines a text which will be displayed as a tooltip when mouse pointer will hover over the specified overlay. Unreliable, always test how it works.
# Popups and some esoteric stuff
show_item_details = 970 # (show_item_details, <item_id>, <position>, <price_multiplier_percentile>),
# Shows a popup box at the specified position, containing standard game information for the specified item. Last parameter determines price percentile multiplier. Multiplier value of 100 will display item standard price, value of 0 will display "Default Item" instead of price (used in multiplayer equipment selection presentation).
show_item_details_with_modifier = 972 # (show_item_details_with_modifier, <item_id>, <item_modifier>, <position>, <price_multiplier_percentile>),
# Same as above, but displays stats and price information for an item with a modifier.
close_item_details = 971 # (close_item_details)
# Closes the item details popup box.
show_troop_details = 2388 # (show_troop_details, <troop_id>, <position>, <troop_price>)
# Version 1.153+. Supposedly displays a popup with troop information at specified place. 4research.
################################################################################
# [ Z24 ] MULTIPLAYER AND NETWORKING (LEFT FOR SOMEONE MORE FAMILIAR WITH THIS)
################################################################################
# This section is eagerly waiting for someone to write documentation comments.
# Conditional operations
player_is_active = 401 # (player_is_active, <player_id>),
# Checks that the specified player is active (i.e. connected to server).
multiplayer_is_server = 417 # (multiplayer_is_server),
# Checks that the code is running on multiplayer server. Operation will fail on client machines or in singleplayer mode.
multiplayer_is_dedicated_server = 418 # (multiplayer_is_dedicated_server),
# Checks that the code is running on dedicated multiplayer server machine.
game_in_multiplayer_mode = 419 # (game_in_multiplayer_mode),
# Checks that the game is running in multiplayer mode.
player_is_admin = 430 # (player_is_admin, <player_id>),
# Checks that the specified player has administrative rights.
player_is_busy_with_menus = 438 # (player_is_busy_with_menus, <player_id>),
# Undocumented. Educated guess is it's true when player is running a presentation without prsntf_read_only flag.
player_item_slot_is_picked_up = 461 # (player_item_slot_is_picked_up, <player_id>, <item_slot_no>),
# Checks that the specified player's equipment slot contains an item that the player has picked up from ground.
# Player slot operations
player_set_slot = 508 # (player_set_slot, <player_id>, <slot_no>, <value>),
player_get_slot = 528 # (player_get_slot, <destination>, <player_id>, <slot_no>),
player_slot_eq = 548 # (player_slot_eq, <player_id>, <slot_no>, <value>),
player_slot_ge = 568 # (player_slot_ge, <player_id>, <slot_no>, <value>),
# Network communication operations
send_message_to_url = 380 # (send_message_to_url, <string_id>, <encode_url>),
# Sends an HTTP request. Response from that URL will be returned to "script_game_receive_url_response". Parameter <encode_url> is optional and effects are unclear. Supposedly it's equivalent of calling (str_encode_url) on the first parameter which doesn't make sense for me.
multiplayer_send_message_to_server = 388 # (multiplayer_send_message_to_server, <message_type>),
# Multiplayer client operation. Send a simple message (only message code, no data) to game server.
multiplayer_send_int_to_server = 389 # (multiplayer_send_int_to_server, <message_type>, <value>),
# Multiplayer client operation. Send a message with a single extra integer value to game server.
multiplayer_send_2_int_to_server = 390 # (multiplayer_send_2_int_to_server, <message_type>, <value>, <value>),
# Same as (multiplayer_send_int_to_server), but two integer values are sent.
multiplayer_send_3_int_to_server = 391 # (multiplayer_send_3_int_to_server, <message_type>, <value>, <value>, <value>),
# Same as (multiplayer_send_int_to_server), but three integer values are sent.
multiplayer_send_4_int_to_server = 392 # (multiplayer_send_4_int_to_server, <message_type>, <value>, <value>, <value>, <value>),
# Same as (multiplayer_send_int_to_server), but four integer values are sent.
multiplayer_send_string_to_server = 393 # (multiplayer_send_string_to_server, <message_type>, <string_id>),
# Multiplayer client operation. Send a message with a string value to game server.
multiplayer_send_message_to_player = 394 # (multiplayer_send_message_to_player, <player_id>, <message_type>),
# Multiplayer server operation. Send a simple message (only message code, no data) to one of connected players.
multiplayer_send_int_to_player = 395 # (multiplayer_send_int_to_player, <player_id>, <message_type>, <value>),
# Multiplayer server operation. Send a message with a single extra integer value to one of connected players.
multiplayer_send_2_int_to_player = 396 # (multiplayer_send_2_int_to_player, <player_id>, <message_type>, <value>, <value>),
# Same as (multiplayer_send_int_to_player), but two integer values are sent.
multiplayer_send_3_int_to_player = 397 # (multiplayer_send_3_int_to_player, <player_id>, <message_type>, <value>, <value>, <value>),
# Same as (multiplayer_send_int_to_player), but three integer values are sent.
multiplayer_send_4_int_to_player = 398 # (multiplayer_send_4_int_to_player, <player_id>, <message_type>, <value>, <value>, <value>, <value>),
# Same as (multiplayer_send_int_to_player), but four integer values are sent.
multiplayer_send_string_to_player = 399 # (multiplayer_send_string_to_player, <player_id>, <message_type>, <string_id>),
# Multiplayer server operation. Send a message with a string value to one of connected players.
# Player handling operations
get_max_players = 400 # (get_max_players, <destination>),
# Returns maximum possible number of connected players. Apparently always returns a constant value, however it's return value can change as maximum increases with new patches.
player_get_team_no = 402 # (player_get_team_no, <destination>, <player_id>),
# Retrieves player's selected team.
player_set_team_no = 403 # (player_get_team_no, <player_id>, <team_id>),
# Assigns a player to the specified team.
player_get_troop_id = 404 # (player_get_troop_id, <destination>, <player_id>),
# Retrieves player's selected troop reference.
player_set_troop_id = 405 # (player_get_troop_id, <player_id>, <troop_id>),
# Assigns the selected troop reference to a player.
player_get_agent_id = 406 # (player_get_agent_id, <destination>, <player_id>),
# Retrieves player's current agent reference. Returns a negative value if player has no agent.
agent_get_player_id = 1724 # (agent_get_player_id, <destination>, <agent_id>),
# Retrieves player reference that is currently controlling the specified agent.
player_get_gold = 407 # (player_get_gold, <destination>, <player_id>),
# Retrieves player's current gold amount.
player_set_gold = 408 # (player_set_gold, <player_id>, <value>, <max_value>),
# Sets player's new gold amount and maximum allowed gold amount. Use 0 for <max_value> to remove gold limit.
player_spawn_new_agent = 409 # (player_spawn_new_agent, <player_id>, <entry_point>),
# Spawns a new agent for the specified player. Essentially a combination of (spawn_agent) and (player_control_agent) operations.
player_add_spawn_item = 410 # (player_add_spawn_item, <player_id>, <item_slot_no>, <item_id>),
#
multiplayer_get_my_team = 411 # (multiplayer_get_my_team, <destination>),
# Client operation. Retrieves player's currently selected team.
multiplayer_get_my_troop = 412 # (multiplayer_get_my_troop, <destination>),
# Client operation. Retrieves player's currently selected troop.
multiplayer_set_my_troop = 413 # (multiplayer_get_my_troop, <destination>),
# Client operation. Selects a new troop for the player.
multiplayer_get_my_gold = 414 # (multiplayer_get_my_gold, <destination>),
# Client operation. Retrieves current player's gold amount.
multiplayer_get_my_player = 415 # (multiplayer_get_my_player, <destination>),
# Client operation. Retrieves current player's player_id reference.
multiplayer_make_everyone_enemy = 420 # (multiplayer_make_everyone_enemy),
# Used in deathmatch mode to make everyone hostile to all other agents.
player_control_agent = 421 # (player_control_agent, <player_id>, <agent_id>),
# Server operation. Puts the agent under specified player's control. Operation will change agent's face code and banner to those of player.
player_get_item_id = 422 # (player_get_item_id, <destination>, <player_id>, <item_slot_no>),
# Server operation. Retrieves item that's currently equipped by specified player in <item_slot_no> equipment slot.
player_get_banner_id = 423 # (player_get_banner_id, <destination>, <player_id>),
# Server operation. Retrieves banner_id reference used by the specified player. Note that in MP banners are enumerated starting from 0 (unlike single-player where they're enumeration depends on scene prop banners' reference range).
player_set_is_admin = 429 # (player_set_is_admin, <player_id>, <value>),
# Server operation. Set the current player as admin (value = 1) or not (value = 0).
player_get_score = 431 # (player_get_score, <destination>, <player_id>),
#
player_set_score = 432 # (player_set_score, <player_id>, <value>),
#
player_get_kill_count = 433 # (player_get_kill_count, <destination>, <player_id>),
#
player_set_kill_count = 434 # (player_set_kill_count, <player_id>, <value>),
#
player_get_death_count = 435 # (player_get_death_count, <destination>, <player_id>),
#
player_set_death_count = 436 # (player_set_death_count, <player_id>, <value>),
#
player_get_ping = 437 # (player_get_ping, <destination>, <player_id>),
#
player_get_is_muted = 439 # (player_get_is_muted, <destination>, <player_id>),
#
player_set_is_muted = 440 # (player_set_is_muted, <player_id>, <value>, [mute_for_everyone]), #mute_for_everyone optional parameter should be set to 1 if player is muted for everyone (this works only on server).
#
player_get_unique_id = 441 # (player_get_unique_id, <destination>, <player_id>), #can only bew used on server side
# Server operation. Retrieves player's unique identifier which is determined by player's game license code. This number is supposed to be unique for each license, allowing reliable player identification across servers.
player_get_gender = 442 # (player_get_gender, <destination>, <player_id>),
#
player_save_picked_up_items_for_next_spawn = 459 # (player_save_picked_up_items_for_next_spawn, <player_id>),
#
player_get_value_of_original_items = 460 # (player_get_value_of_original_items, <player_id>),
# Undocumented. Official docs: this operation returns values of the items, but default troop items will be counted as zero (except horse)
profile_get_banner_id = 350 # (profile_get_banner_id, <destination>),
# Client operation. Retrieves banner_id reference used by the game for multiplayer. Note that in MP banners are enumerated starting from 0 (unlike single-player where they're enumeration depends on scene prop banners' reference range).
profile_set_banner_id = 351 # (profile_set_banner_id, <value>),
# Client operation. Assigns a new banner_id to be used for multiplayer. Note that in MP banners are enumerated starting from 0 (unlike single-player where they're enumeration depends on scene prop banners' reference range).
# Team handling operations
team_get_bot_kill_count = 450 # (team_get_bot_kill_count, <destination>, <team_id>),
#
team_set_bot_kill_count = 451 # (team_get_bot_kill_count, <destination>, <team_id>),
#
team_get_bot_death_count = 452 # (team_get_bot_death_count, <destination>, <team_id>),
#
team_set_bot_death_count = 453 # (team_get_bot_death_count, <destination>, <team_id>),
#
team_get_kill_count = 454 # (team_get_kill_count, <destination>, <team_id>),
#
team_get_score = 455 # (team_get_score, <destination>, <team_id>),
#
team_set_score = 456 # (team_set_score, <team_id>, <value>),
#
team_set_faction = 457 # (team_set_faction, <team_id>, <faction_id>),
#
team_get_faction = 458 # (team_get_faction, <destination>, <team_id>),
#
# General scene and mission handling operations
multiplayer_clear_scene = 416 # (multiplayer_clear_scene),
#
multiplayer_find_spawn_point = 425 # (multiplayer_find_spawn_point, <destination>, <team_no>, <examine_all_spawn_points>, <is_horseman>),
#
set_spawn_effector_scene_prop_kind = 426 # (set_spawn_effector_scene_prop_kind, <team_no>, <scene_prop_kind_no>),
# Specifies some scene prop kind as one of the teams' spawn effector, making players of that team more likely to spawn closer to the specified effector prop instances. Use -1 to disable spawn effector for a team.
set_spawn_effector_scene_prop_id = 427 # (set_spawn_effector_scene_prop_id, <team_no>, <scene_prop_id>),
# Specifies a single prop instance as a team's spawn effector. Different from (set_spawn_effector_scene_prop_kind) as other instances of the same scene prop will not affect player spawning.
start_multiplayer_mission = 470 # (start_multiplayer_mission, <mission_template_id>, <scene_id>, <started_manually>),
#
# Administrative operations and settings
kick_player = 465 # (kick_player, <player_id>),
#
ban_player = 466 # (ban_player, <player_id>, <value>, <player_id>),
# Official docs: set value = 1 for banning temporarily, assign 2nd player id as the administrator player id if banning is permanent
save_ban_info_of_player = 467 # (save_ban_info_of_player, <player_id>),
#
ban_player_using_saved_ban_info = 468 # (ban_player_using_saved_ban_info),
#
server_add_message_to_log = 473 # (server_add_message_to_log, <string_id>),
#
server_get_renaming_server_allowed = 475 # (server_get_renaming_server_allowed, <destination>),
# Official docs: 0-1
server_get_changing_game_type_allowed = 476 # (server_get_changing_game_type_allowed, <destination>),
# Official docs: 0-1
server_get_combat_speed = 478 # (server_get_combat_speed, <destination>),
# Official docs: 0-2
server_set_combat_speed = 479 # (server_set_combat_speed, <value>),
# Official docs: 0-2
server_get_friendly_fire = 480 # (server_get_friendly_fire, <destination>),
#
server_set_friendly_fire = 481 # (server_set_friendly_fire, <value>),
# Official docs: 0 = off, 1 = on
server_get_control_block_dir = 482 # (server_get_control_block_dir, <destination>),
#
server_set_control_block_dir = 483 # (server_set_control_block_dir, <value>),
# Official docs: 0 = automatic, 1 = by mouse movement
server_set_password = 484 # (server_set_password, <string_id>),
#
server_get_add_to_game_servers_list = 485 # (server_get_add_to_game_servers_list, <destination>),
#
server_set_add_to_game_servers_list = 486 # (server_set_add_to_game_servers_list, <value>),
#
server_get_ghost_mode = 487 # (server_get_ghost_mode, <destination>),
#
server_set_ghost_mode = 488 # (server_set_ghost_mode, <value>),
#
server_set_name = 489 # (server_set_name, <string_id>),
#
server_get_max_num_players = 490 # (server_get_max_num_players, <destination>),
#
server_set_max_num_players = 491 # (server_set_max_num_players, <value>),
#
server_set_welcome_message = 492 # (server_set_welcome_message, <string_id>),
#
server_get_melee_friendly_fire = 493 # (server_get_melee_friendly_fire, <destination>),
#
server_set_melee_friendly_fire = 494 # (server_set_melee_friendly_fire, <value>),
# Official docs: 0 = off, 1 = on
server_get_friendly_fire_damage_self_ratio = 495 # (server_get_friendly_fire_damage_self_ratio, <destination>),
#
server_set_friendly_fire_damage_self_ratio = 496 # (server_set_friendly_fire_damage_self_ratio, <value>),
# Official docs: 0-100
server_get_friendly_fire_damage_friend_ratio = 497 # (server_get_friendly_fire_damage_friend_ratio, <destination>),
#
server_set_friendly_fire_damage_friend_ratio = 498 # (server_set_friendly_fire_damage_friend_ratio, <value>),
# Official docs: 0-100
server_get_anti_cheat = 499 # (server_get_anti_cheat, <destination>),
#
server_set_anti_cheat = 477 # (server_set_anti_cheat, <value>),
# Official docs: 0 = off, 1 = on
################################################################################
# [ Z25 ] REMAINING ESOTERIC STUFF (NO IDEA WHAT IT DOES)
################################################################################
# Honestly, I have no idea what these functions could be used for. If you
# know, please let me know ASAP! :-)
set_tooltip_text = 1130 # (set_tooltip_text, <string_id>),
ai_mesh_face_group_show_hide = 1805 # (ai_mesh_face_group_show_hide, <group_no>, <value>), # 1 for enable, 0 for disable
auto_set_meta_mission_at_end_commited = 1305 # (auto_set_meta_mission_at_end_commited), Not documented. Not used in Native. Was (simulate_battle, <value>) before.
################################################################################
# [ Z26 ] HARDCODED COMPILER-RELATED CODE
################################################################################
# Do not touch this stuff unless necessary. Module System compiler needs this
# code to correctly compile your module into format that Warband understands.
lhs_operations = [try_for_range, try_for_range_backwards, try_for_parties, try_for_agents, store_script_param_1, store_script_param_2, store_script_param, store_repeat_object,
get_global_cloud_amount, get_global_haze_amount, options_get_damage_to_player, options_get_damage_to_friends, options_get_combat_ai, options_get_campaign_ai, options_get_combat_speed,
profile_get_banner_id, get_achievement_stat, get_max_players, player_get_team_no, player_get_troop_id, player_get_agent_id, player_get_gold, multiplayer_get_my_team,
multiplayer_get_my_troop, multiplayer_get_my_gold, multiplayer_get_my_player, player_get_score, player_get_kill_count, player_get_death_count, player_get_ping, player_get_is_muted,
player_get_unique_id, player_get_gender, player_get_item_id, player_get_banner_id, game_get_reduce_campaign_ai, multiplayer_find_spawn_point, team_get_bot_kill_count,
team_get_bot_death_count, team_get_kill_count, team_get_score, team_get_faction, player_get_value_of_original_items, server_get_renaming_server_allowed,
server_get_changing_game_type_allowed, server_get_friendly_fire, server_get_control_block_dir, server_get_combat_speed, server_get_add_to_game_servers_list, server_get_ghost_mode,
server_get_max_num_players, server_get_melee_friendly_fire, server_get_friendly_fire_damage_self_ratio, server_get_friendly_fire_damage_friend_ratio, server_get_anti_cheat, troop_get_slot,
party_get_slot, faction_get_slot, scene_get_slot, party_template_get_slot, agent_get_slot, quest_get_slot, item_get_slot, player_get_slot, team_get_slot, scene_prop_get_slot,
store_last_sound_channel, get_angle_between_positions, get_distance_between_positions, get_distance_between_positions_in_meters, get_sq_distance_between_positions,
get_sq_distance_between_positions_in_meters, get_sq_distance_between_position_heights, position_get_x, position_get_y, position_get_z, position_get_scale_x,
position_get_scale_y, position_get_scale_z, position_get_rotation_around_z, position_normalize_origin, position_get_rotation_around_x, position_get_rotation_around_y,
position_get_distance_to_terrain, position_get_distance_to_ground_level, create_text_overlay, create_mesh_overlay, create_button_overlay, create_image_button_overlay, create_slider_overlay,
create_progress_overlay, create_combo_button_overlay, create_text_box_overlay, create_check_box_overlay, create_simple_text_box_overlay, create_image_button_overlay_with_tableau_material,
create_mesh_overlay_with_tableau_material, create_game_button_overlay, create_in_game_button_overlay, create_number_box_overlay, create_listbox_overlay, create_mesh_overlay_with_item_id,
overlay_get_position, create_combo_label_overlay, get_average_game_difficulty, get_level_boundary, faction_get_color, troop_get_type, troop_get_xp, troop_get_class,
troop_inventory_slot_get_item_amount, troop_inventory_slot_get_item_max_amount, troop_get_inventory_capacity, troop_get_inventory_slot, troop_get_inventory_slot_modifier,
troop_get_upgrade_troop, item_get_type, party_get_num_companions, party_get_num_prisoners, party_get_current_terrain, party_get_template_id, party_count_members_of_type,
party_count_companions_of_type, party_count_prisoners_of_type, party_get_free_companions_capacity, party_get_free_prisoners_capacity, party_get_helpfulness, party_get_ai_initiative,
party_get_num_companion_stacks, party_get_num_prisoner_stacks, party_stack_get_troop_id, party_stack_get_size, party_stack_get_num_wounded, party_stack_get_troop_dna,
party_prisoner_stack_get_troop_id, party_prisoner_stack_get_size, party_prisoner_stack_get_troop_dna, party_get_cur_town, party_get_morale, party_get_battle_opponent, party_get_icon,
party_get_skill_level, get_battle_advantage, party_get_attached_to, party_get_num_attached_parties, party_get_attached_party_with_rank, get_player_agent_no, get_player_agent_kill_count,
get_player_agent_own_troop_kill_count, agent_get_horse, agent_get_rider, agent_get_party_id, agent_get_entry_no, agent_get_troop_id, agent_get_item_id, store_agent_hit_points,
agent_get_kill_count, agent_get_player_id, agent_get_wielded_item, agent_get_ammo, agent_get_simple_behavior, agent_get_combat_state, agent_get_attached_scene_prop,
agent_get_time_elapsed_since_removed, agent_get_number_of_enemies_following, agent_get_attack_action, agent_get_defend_action, agent_get_group, agent_get_action_dir, agent_get_animation,
agent_get_team, agent_get_class, agent_get_division, team_get_hold_fire_order, team_get_movement_order, team_get_riding_order, team_get_weapon_usage_order, team_get_leader,
agent_get_item_slot, scene_prop_get_num_instances, scene_prop_get_instance, scene_prop_get_visibility, scene_prop_get_hit_points, scene_prop_get_max_hit_points, scene_prop_get_team,
agent_get_ammo_for_slot, agent_deliver_damage_to_agent_advanced, team_get_gap_distance, scene_item_get_num_instances, scene_item_get_instance, scene_spawned_item_get_num_instances,
scene_spawned_item_get_instance, prop_instance_get_variation_id, prop_instance_get_variation_id_2, prop_instance_get_position, prop_instance_get_starting_position, prop_instance_get_scale,
prop_instance_get_scene_prop_kind, prop_instance_is_animating, prop_instance_get_animation_target_position, agent_get_item_cur_ammo, mission_get_time_speed, mission_cam_get_aperture,
store_trigger_param, store_trigger_param_1, store_trigger_param_2, store_trigger_param_3, agent_ai_get_look_target, agent_ai_get_move_target, agent_ai_get_behavior_target,
agent_get_crouch_mode, store_or, store_and, store_mod, store_add, store_sub, store_mul, store_div, store_sqrt, store_pow, store_sin, store_cos, store_tan, assign, store_random,
store_random_in_range, store_asin, store_acos, store_atan, store_atan2, store_troop_gold, store_num_free_stacks, store_num_free_prisoner_stacks, store_party_size,
store_party_size_wo_prisoners, store_troop_kind_count, store_num_regular_prisoners, store_troop_count_companions, store_troop_count_prisoners, store_item_kind_count,
store_free_inventory_capacity, store_skill_level, store_character_level, store_attribute_level, store_troop_faction, store_troop_health, store_proficiency_level, store_relation,
store_conversation_agent, store_conversation_troop, store_partner_faction, store_encountered_party, store_encountered_party2, store_faction_of_party, store_current_scene, store_zoom_amount,
store_item_value, store_troop_value, store_partner_quest, store_random_quest_in_range, store_random_troop_to_raise, store_random_troop_to_capture, store_random_party_in_range,
store_random_horse, store_random_equipment, store_random_armor, store_quest_number, store_quest_item, store_quest_troop, store_current_hours, store_time_of_day, store_current_day,
store_distance_to_party_from_party, get_party_ai_behavior, get_party_ai_object, get_party_ai_current_behavior, get_party_ai_current_object, store_num_parties_created,
store_num_parties_destroyed, store_num_parties_destroyed_by_player, store_num_parties_of_template, store_random_party_of_template, store_remaining_team_no, store_mission_timer_a_msec,
store_mission_timer_b_msec, store_mission_timer_c_msec, store_mission_timer_a, store_mission_timer_b, store_mission_timer_c, store_enemy_count, store_friend_count, store_ally_count,
store_defender_count, store_attacker_count, store_normalized_team_count, item_get_weight, item_get_value, item_get_difficulty, item_get_head_armor, item_get_body_armor, item_get_leg_armor,
item_get_hit_points, item_get_weapon_length, item_get_speed_rating, item_get_missile_speed, item_get_max_ammo, item_get_accuracy, item_get_shield_height, item_get_horse_scale,
item_get_horse_speed, item_get_horse_maneuver, item_get_food_quality, item_get_abundance, item_get_thrust_damage, item_get_thrust_damage_type, item_get_swing_damage,
item_get_swing_damage_type, item_get_horse_charge_damage, try_for_prop_instances, options_get_battle_size, party_get_ignore_with_player_party, cast_ray,
prop_instance_get_current_deform_progress, prop_instance_get_current_deform_frame, face_keys_get_hair, face_keys_get_beard, face_keys_get_face_texture, face_keys_get_hair_texture,
face_keys_get_hair_color, face_keys_get_age, face_keys_get_skin_color, face_keys_get_morph_key, try_for_players, get_operation_set_version, get_startup_sun_light, get_startup_ambient_light,
get_startup_ground_ambient_light, agent_ai_get_num_cached_enemies, agent_ai_get_cached_enemy, ]
global_lhs_operations = [val_lshift, val_rshift, val_add, val_sub, val_mul, val_div, val_max, val_min, val_mod, ]
can_fail_operations = [ge, eq, gt, is_between, entering_town, map_free, encountered_party_is_attacker, conversation_screen_is_active, in_meta_mission, troop_is_hero, troop_is_wounded,
key_is_down, key_clicked, game_key_is_down, game_key_clicked, hero_can_join, hero_can_join_as_prisoner, party_can_join, party_can_join_as_prisoner, troops_can_join,
troops_can_join_as_prisoner, party_can_join_party, main_party_has_troop, party_is_in_town, party_is_in_any_town, party_is_active, player_has_item, troop_has_item_equipped, troop_is_mounted,
troop_is_guarantee_ranged, troop_is_guarantee_horse, player_is_active, multiplayer_is_server, multiplayer_is_dedicated_server, game_in_multiplayer_mode, player_is_admin,
player_is_busy_with_menus, player_item_slot_is_picked_up, check_quest_active, check_quest_finished, check_quest_succeeded, check_quest_failed, check_quest_concluded, is_trial_version,
is_edit_mode_enabled, troop_slot_eq, party_slot_eq, faction_slot_eq, scene_slot_eq, party_template_slot_eq, agent_slot_eq, quest_slot_eq, item_slot_eq, player_slot_eq, team_slot_eq,
scene_prop_slot_eq, troop_slot_ge, party_slot_ge, faction_slot_ge, scene_slot_ge, party_template_slot_ge, agent_slot_ge, quest_slot_ge, item_slot_ge, player_slot_ge, team_slot_ge,
scene_prop_slot_ge, position_has_line_of_sight_to_position, position_is_behind_position, is_presentation_active, all_enemies_defeated, race_completed_by_player, num_active_teams_le,
main_hero_fallen, lt, neq, le, teams_are_enemies, agent_is_alive, agent_is_wounded, agent_is_human, agent_is_ally, agent_is_non_player, agent_is_defender, agent_is_active, agent_is_routed,
agent_is_in_special_mode, agent_is_in_parried_animation, class_is_listening_order, agent_check_offer_from_agent, entry_point_is_auto_generated, scene_prop_has_agent_on_it, agent_is_alarmed,
agent_is_in_line_of_sight, scene_prop_get_instance, scene_item_get_instance, scene_allows_mounted_units, prop_instance_is_valid, prop_instance_intersects_with_prop_instance,
agent_has_item_equipped, map_get_land_position_around_position, map_get_water_position_around_position, is_zoom_disabled, is_currently_night, store_random_party_of_template, str_is_empty,
item_has_property, item_has_capability, item_has_modifier, item_has_faction, cast_ray, ]
depth_operations = [try_begin, try_for_range, try_for_range_backwards, try_for_parties, try_for_agents, try_for_prop_instances, try_for_players, ]
|
berth64/modded_modded_1257ad
|
source/header_operations.py
|
Python
|
agpl-3.0
| 319,871
|
"""
Python API for various enterprise functionality.
"""
from enterprise import roles_api
from enterprise.models import PendingEnterpriseCustomerAdminUser
from enterprise.utils import create_tableau_user, delete_tableau_user
def activate_admin_permissions(enterprise_customer_user):
"""
Activates admin permissions for an existing PendingEnterpriseCustomerAdminUser.
Specifically, the "enterprise_admin" system-wide role is assigned to the user and
the PendingEnterpriseCustomerAdminUser record is removed.
Requires an EnterpriseCustomerUser record to exist which ensures the user already
has the "enterprise_learner" role as a prerequisite.
Arguments:
enterprise_customer_user: an EnterpriseCustomerUser instance
"""
try:
pending_admin_user = PendingEnterpriseCustomerAdminUser.objects.get(
user_email=enterprise_customer_user.user.email,
enterprise_customer=enterprise_customer_user.enterprise_customer,
)
except PendingEnterpriseCustomerAdminUser.DoesNotExist:
return # this is ok, nothing to do
if not enterprise_customer_user.linked:
# EnterpriseCustomerUser is no longer linked, so delete the "enterprise_admin" role and
# their Tableau user.
# TODO: ENT-3914 | Add `enterprise_customer=enterprise_customer_user.enterprise_customer`
# kwarg so that we delete at most a single assignment instance.
roles_api.delete_admin_role_assignment(
enterprise_customer_user.user,
)
delete_tableau_user(enterprise_customer_user)
return # nothing left to do
roles_api.assign_admin_role(
enterprise_customer_user.user,
enterprise_customer=enterprise_customer_user.enterprise_customer
)
# Also create the Enterprise admin user in third-party analytics application with the enterprise
# customer uuid as username.
tableau_username = str(enterprise_customer_user.enterprise_customer.uuid).replace('-', '')
create_tableau_user(tableau_username, enterprise_customer_user)
# delete the PendingEnterpriseCustomerAdminUser record
pending_admin_user.delete()
|
edx/edx-enterprise
|
enterprise/api/__init__.py
|
Python
|
agpl-3.0
| 2,176
|
#---------------------------------------------------------------------------
# Testing infrastructure
#---------------------------------------------------------------------------
import unittest
from os import path
from pyramid import testing
from pyramid.paster import get_appsettings
from webob.multidict import MultiDict
from webtest import TestApp
import logging
from .. testcase import TestCase
import davezdominoes.gamecoordinator
import davezdominoes.gamecoordinator.routes
import davezdominoes.gamecoordinator.webassets
from davezdominoes.gamecoordinator.models.meta import DBSession
def neverCommit(request, response):
return True
class DummyRequest(testing.DummyRequest):
def __init__(self):
testing.DummyRequest.__init__(self)
self.POST = MultiDict(self.POST)
self.GET = MultiDict(self.GET)
self.user = None
class UnitTestCase(TestCase):
"""Base test case for pyramid unittests."""
def setUp(self):
self.config = testing.setUp()
davezdominoes.gamecoordinator.routes.includeme(self.config)
# davezdominoes.gamecoordinator.webassets.includeme(self.config)
def tearDown(self):
testing.tearDown()
class FuncTestCase(TestCase):
def setUp(self):
here = path.abspath(path.dirname(__file__))
settings = get_appsettings(path.join(here, 'tests.ini'))
app = davezdominoes.gamecoordinator.main({}, **settings)
self.testapp = TestApp(app)
def tearDown(self):
DBSession.remove()
del self.testapp
for handler in logging.getLogger("").handlers:
handler.flush()
handler.close()
|
linuxsoftware/dominoes
|
tests/test_gamecoordinator/helpers.py
|
Python
|
agpl-3.0
| 1,653
|
"""Generates common contexts"""
from __future__ import absolute_import
import logging
from opaque_keys import InvalidKeyError
from opaque_keys.edx.keys import CourseKey, LearningContextKey
from six import text_type
from openedx.core.lib.request_utils import COURSE_REGEX
log = logging.getLogger(__name__)
def course_context_from_url(url):
"""
Extracts the course_context from the given `url` and passes it on to
`course_context_from_course_id()`.
"""
url = url or ''
match = COURSE_REGEX.match(url)
course_id = None
if match:
course_id_string = match.group('course_id')
try:
course_id = CourseKey.from_string(course_id_string)
except InvalidKeyError:
log.warning(
'unable to parse course_id "{course_id}"'.format(
course_id=course_id_string
),
exc_info=True
)
return course_context_from_course_id(course_id)
def course_context_from_course_id(course_id):
"""
Creates a course context from a `course_id`.
For newer parts of the system (i.e. Blockstore-based libraries/courses/etc.)
use context_dict_for_learning_context instead of this method.
Example Returned Context::
{
'course_id': 'org/course/run',
'org_id': 'org'
}
"""
context_dict = context_dict_for_learning_context(course_id)
# Remove the newer 'context_id' field for now in this method so we're not
# adding a new field to the course tracking logs
del context_dict['context_id']
return context_dict
def context_dict_for_learning_context(context_key):
"""
Creates a tracking log context dictionary for the given learning context
key, which may be None, a CourseKey, a content library key, or any other
type of LearningContextKey.
Example Returned Context Dict::
{
'context_id': 'course-v1:org+course+run',
'course_id': 'course-v1:org+course+run',
'org_id': 'org'
}
Example 2::
{
'context_id': 'lib:edX:a-content-library',
'course_id': '',
'org_id': 'edX'
}
"""
context_dict = {
'context_id': text_type(context_key) if context_key else '',
'course_id': '',
'org_id': '',
}
if context_key is not None:
assert isinstance(context_key, LearningContextKey)
if context_key.is_course:
context_dict['course_id'] = text_type(context_key)
if hasattr(context_key, 'org'):
context_dict['org_id'] = context_key.org
return context_dict
|
ESOedX/edx-platform
|
common/djangoapps/track/contexts.py
|
Python
|
agpl-3.0
| 2,667
|
# © 2016 Camptocamp SA, Sodexis
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html)
from odoo import fields, models
class AccountPaymentMode(models.Model):
_inherit = "account.payment.mode"
workflow_process_id = fields.Many2one(
comodel_name="sale.workflow.process", string="Automatic Workflow"
)
|
OCA/sale-workflow
|
sale_automatic_workflow_payment_mode/models/account_payment_mode.py
|
Python
|
agpl-3.0
| 338
|
# -*- coding: utf-8 -*-
import sys
from django.db import models
from django.core.urlresolvers import reverse
from datetime import datetime
from django.db.models.signals import post_syncdb
from django.contrib.sites.models import Site
from allauth.socialaccount.providers import registry
from allauth.socialaccount.models import SocialApp
from allauth.socialaccount.providers.oauth.provider import OAuthProvider
from allauth.socialaccount.providers.oauth2.provider import OAuth2Provider
# -------------
# List Model
# -------------
class List(models.Model):
id = models.AutoField(primary_key=True)
slug = models.SlugField(unique=True, max_length=12)
ListName = models.CharField(max_length=100)
ListAuthOnly = models.BooleanField("Auth Only")
ListPubDate = models.DateTimeField("Date List Published")
## List Owner Fields
ListOwner = models.CharField("Owner's Handle",max_length=100)
ListOwnerFN = models.CharField("Owner's First Name",max_length=100)
ListOwnerLN = models.CharField("Owner's Last Name",max_length=100)
ListOwnerState = models.CharField("Owner's Confirmation State",max_length=100)
ListOwnerPrvdr = models.CharField("Owner's Provider",max_length=100)
ListOwnerLink = models.CharField("Owner's Provider Link",max_length=150)
ListIsPrivate = models.BooleanField("This list is Private")
ListIsHidden = models.BooleanField("This list is Hidden")
class Meta:
ordering = ["ListOwner"]
def __unicode__(self):
return self.ListName
# -------------
# Item Model
# -------------
class Item(models.Model):
list = models.ForeignKey('List')
content = models.TextField()
ItemMarked = models.BooleanField()
ItemLocked = models.BooleanField()
ItemDueDate = models.DateField("Item Due Date",null=True)
## Item Done infos
ItemDone = models.BooleanField()
WhoDone = models.CharField(max_length=150)
TimeDone = models.DateTimeField("Date Completed")
## Item Owner Fields
ItemOwner = models.CharField("Owner's Handle",max_length=100)
ItemOwnerFN = models.CharField("Owner's First Name",max_length=100)
ItemOwnerLN = models.CharField("Owner's Last Name",max_length=100)
ItemOwnerState = models.CharField("Owner's Confirmation State",max_length=100)
ItemOwnerPrvdr = models.CharField("Owner's Provider",max_length=100)
ItemOwnerAvtr = models.CharField("Owner's Provider Avatar",max_length=150)
ItemOwnerLink = models.CharField("Owner's Provider Link",max_length=150)
class Meta:
ordering = ["ItemOwner","ItemOwnerState","-ItemMarked","ItemDone"]
def __unicode__(self):
return self.content
# ---------------
# Comment Model
# ---------------
class Comment(models.Model):
NewComment = models.ForeignKey(List)
ComOwner = models.CharField("Owner's Handle",max_length=100)
ComOwnerFN = models.CharField("Owner's First Name",max_length=100)
ComOwnerState = models.CharField("Owner's Confirmation State",max_length=100)
ComContent = models.TextField("Comment",max_length=100)
ComPubDate = models.DateTimeField("Date Comment Published")
def __unicode__(self):
return self.ComContent
# ------------
# All-auth
# -----------
def setup_dummy_social_apps(sender, **kwargs):
"""
`allauth` needs tokens for OAuth based providers. So let's
setup some dummy tokens
"""
site = Site.objects.get_current()
for provider in registry.get_list():
if (isinstance(provider, OAuth2Provider)
or isinstance(provider, OAuthProvider)):
try:
SocialApp.objects.get(provider=provider.id,
sites=site)
except SocialApp.DoesNotExist:
print ("Installing dummy application credentials for %s."
" Authentication via this provider will not work"
" until you configure proper credentials via the"
" Django admin (`SocialApp` models)" % provider.id)
app = SocialApp.objects.create(provider=provider.id,
secret='secret',
client_id='client-id',
name='%s application' % provider.id)
app.sites.add(site)
# We don't want to interfere with unittests et al
if 'syncdb' in sys.argv:
post_syncdb.connect(setup_dummy_social_apps, sender=sys.modules[__name__])
|
soplerproject/sopler
|
core/models.py
|
Python
|
agpl-3.0
| 4,574
|
"""
Extended Textbooks page
"""
from edxapp_acceptance.pages.studio.textbook_upload import TextbookUploadPage
from selenium.webdriver.common.action_chains import ActionChains
from regression.pages import UPLOAD_FILE_DIR
from regression.tests.helpers import get_url
class TextbookPageExtended(TextbookUploadPage):
"""
Extended Page for textbook page
"""
@property
def url(self):
"""
Construct a URL to the page within the course.
"""
return get_url(self.url_path, self.course_info)
def upload_textbook(self, file_name):
"""
Uploads a pdf textbook
"""
self.q(css='.action.action-upload').click()
self.q(css='.upload-dialog input').results[0].send_keys(
UPLOAD_FILE_DIR + "/" + file_name)
self.q(css='.button.action-primary.action-upload').click()
self.wait_for_element_absence(
".modal-window-overlay", "Upload modal closed")
def click_edit_button(self):
"""
Clicks edit button
"""
self.q(css='.edit').results[-1].click()
self.wait_for_element_visibility(
'.action-add-chapter', 'Text book form'
)
def get_textbook_count(self):
"""
Returns the count of textbooks
"""
return len(self.q(css='.textbooks-list .textbook'))
def get_textbook_names(self):
"""
Returns names of textbooks
"""
return self.q(css='.textbook-title').text
def click_delete_button(self):
"""
Clicks delete
"""
self.q(css='.delete.action-icon').results[-1].click()
self.wait_for_element_visibility(
'#prompt-warning-title', 'Delete pop up box'
)
self.q(css='button.action-primary').first.click()
self.wait_for_element_invisibility(
'#prompt-warning-title', 'Delete warning box'
)
def delete_all_textbooks(self):
"""
Deletes all textbooks
"""
while self.get_textbook_count() > 0:
self.click_delete_button()
def click_view_live_textbook(self):
"""
Clicks View Live button on the first textbook available
"""
button = self.q(css='.view').results[0]
# This button is hidden, hovering on it makes it visible
# Using ActionChains to handle this
ActionChains(
self.browser
).move_to_element(button).click(button).perform()
|
raeeschachar/edx-e2e-mirror
|
regression/pages/studio/studio_textbooks.py
|
Python
|
agpl-3.0
| 2,492
|
# This file is part of Tech Tip of the Day.
#
# Tech Tip of the Day is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Tech Tip of the Day is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Tech Tip of the Day. If not, see <http://www.gnu.org/licenses/>.
from django.contrib import auth, messages
from django.contrib.auth.decorators import login_required
from django.contrib.auth.forms import UserCreationForm
from django.core.mail import mail_managers
from django.core.urlresolvers import reverse
from django.http import HttpResponseRedirect
from django.shortcuts import render_to_response
from django.template import Context, loader, RequestContext
from django.views.decorators.csrf import csrf_protect
from django.views.generic import ListView, DetailView
from techtips.tips.forms import TipForm, UserChangeForm
from techtips.tips.models import Tip
class TipListView(ListView):
context_object_name = 'tip_list'
def get_queryset(self):
"""If the user is a member of the Moderators group, show all tips,
otherwise just show published tips.
"""
if self.request.user.is_authenticated() \
and self.request.user.groups.filter(name='Moderators').count():
return Tip.objects.all()
return Tip.objects.filter(is_published=True)
class TipDetailView(DetailView):
context_object_name = 'tip'
def get_queryset(self):
"""Allow moderators to see unpublished tips.
"""
if self.request.user.is_authenticated() \
and self.request.user.groups.filter(name='Moderators').count():
return Tip.objects.all()
return Tip.objects.filter(is_published=True)
def register(request):
"""Uses standard register form
"""
if request.method == 'POST':
form = UserCreationForm(request.POST)
if form.is_valid():
form.save()
user = auth.authenticate(username=request.POST['username'],
password=request.POST['password1'])
auth.login(request, user)
return HttpResponseRedirect(reverse('tip_list_view'))
else:
form = UserCreationForm()
return render_to_response("registration/register.html",
{'form': form},
context_instance=RequestContext(request))
def logout(request):
"""Logs out and redirects to the tip list.
"""
auth.logout(request)
return HttpResponseRedirect(reverse('tip_list_view'))
@login_required
@csrf_protect
def add_tip(request):
"""Submit a new tip.
"""
if request.method == 'POST':
form = TipForm(request.POST)
if form.is_valid():
# Save the submission
tip = form.save(commit=False)
tip.created_by = request.user
tip.save()
# Notify managers of new submission
c = Context({'tip': tip})
t = loader.get_template('tips/tip_email.txt')
text_message = t.render(c)
t = loader.get_template('tips/tip_email.html')
html_message = t.render(c)
mail_managers('New tip submission', text_message,
fail_silently=True, html_message=html_message)
# Confirm submission
messages.success(request,
'Thank you. Your tip has been submitted.')
return HttpResponseRedirect(reverse('tip_list_view'))
else:
form = TipForm()
return render_to_response('tips/tip_add.html',
{'form': form},
context_instance=RequestContext(request))
@login_required
def profile(request):
return render_to_response('registration/profile.html', {},
context_instance=RequestContext(request))
@login_required
@csrf_protect
def edit_profile(request):
if request.method == 'POST':
form = UserChangeForm(request.POST, instance=request.user)
if form.is_valid():
form.save(commit=False)
return HttpResponseRedirect(reverse('tip_list_view'))
else:
form = UserChangeForm(instance=request.user)
return render_to_response('registration/edit_profile.html',
{'form': form},
context_instance=RequestContext(request))
|
kaapstorm/techtips
|
techtips/tips/views.py
|
Python
|
agpl-3.0
| 4,838
|
# -*- coding: utf-8 -*-
#
# Copyright (c) 2014-2015 Université Catholique de Louvain.
#
# This file is part of INGInious.
#
# INGInious is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# INGInious is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public
# License along with INGInious. If not, see <http://www.gnu.org/licenses/>.
""" Allow to create/edit/delete/move/download files associated to tasks """
import codecs
import json
import mimetypes
import os.path
import shutil
import tarfile
import tempfile
import web
from common.base import get_tasks_directory, id_checker
from common.task_file_managers.manage import get_available_task_file_managers
from frontend.base import get_template_renderer
from frontend.custom.courses import FrontendCourse
from frontend.pages.course_admin.utils import get_course_and_check_rights
class CourseTaskFiles(object):
""" Edit a task """
def GET(self, courseid, taskid):
""" Edit a task """
if not id_checker(taskid):
raise Exception("Invalid task id")
get_course_and_check_rights(courseid, allow_all_staff=False)
request = web.input()
if request.get("action") == "download" and request.get('path') is not None:
return self.action_download(courseid, taskid, request.get('path'))
elif request.get("action") == "delete" and request.get('path') is not None:
return self.action_delete(courseid, taskid, request.get('path'))
elif request.get("action") == "rename" and request.get('path') is not None and request.get('new_path') is not None:
return self.action_rename(courseid, taskid, request.get('path'), request.get('new_path'))
elif request.get("action") == "create" and request.get('path') is not None:
return self.action_create(courseid, taskid, request.get('path'))
elif request.get("action") == "edit" and request.get('path') is not None:
return self.action_edit(courseid, taskid, request.get('path'))
else:
return self.show_tab_file(courseid, taskid)
def POST(self, courseid, taskid):
""" Upload or modify a file """
if not id_checker(taskid):
raise Exception("Invalid task id")
get_course_and_check_rights(courseid, allow_all_staff=False)
request = web.input(file={})
if request.get("action") == "upload" and request.get('path') is not None and request.get('file') is not None:
return self.action_upload(courseid, taskid, request.get('path'), request.get('file'))
elif request.get("action") == "edit_save" and request.get('path') is not None and request.get('content') is not None:
return self.action_edit_save(courseid, taskid, request.get('path'), request.get('content'))
else:
return self.show_tab_file(courseid, taskid)
def show_tab_file(self, courseid, taskid, error=False):
""" Return the file tab """
return get_template_renderer('templates/').course_admin.edit_tabs.files(FrontendCourse(courseid), taskid,
self.get_task_filelist(courseid, taskid))
@classmethod
def get_task_filelist(cls, courseid, taskid):
""" Returns a flattened version of all the files inside the task directory, excluding the files task.* and hidden files.
It returns a list of tuples, of the type (Integer Level, Boolean IsDirectory, String Name, String CompleteName)
"""
path = os.path.join(get_tasks_directory(), courseid, taskid)
if not os.path.exists(path):
return []
result_dict = {}
for root, _, files in os.walk(path):
rel_root = os.path.normpath(os.path.relpath(root, path))
insert_dict = result_dict
if rel_root != ".":
hidden_dir = False
for i in rel_root.split(os.path.sep):
if i.startswith("."):
hidden_dir = True
break
if i not in insert_dict:
insert_dict[i] = {}
insert_dict = insert_dict[i]
if hidden_dir:
continue
for f in files:
# Do not follow symlinks and do not take into account task describers
if not os.path.islink(
os.path.join(
root, f)) and not (
root == path and os.path.splitext(f)[0] == "task" and os.path.splitext(f)[1][
1:] in get_available_task_file_managers().keys()) and not f.startswith(
"."):
insert_dict[f] = None
def recur_print(current, level, current_name):
iteritems = sorted(current.iteritems())
# First, the files
recur_print.flattened += [(level, False, f, os.path.join(current_name, f)) for f, t in iteritems if t is None]
# Then, the dirs
for name, sub in iteritems:
if sub is not None:
recur_print.flattened.append((level, True, name, os.path.join(current_name, name)))
recur_print(sub, level + 1, os.path.join(current_name, name))
recur_print.flattened = []
recur_print(result_dict, 0, '')
return recur_print.flattened
def verify_path(self, courseid, taskid, path, new_path=False):
""" Return the real wanted path (relative to the INGInious root) or None if the path is not valid/allowed """
task_dir_path = os.path.join(get_tasks_directory(), courseid, taskid)
# verify that the dir exists
if not os.path.exists(task_dir_path):
return None
wanted_path = os.path.normpath(os.path.join(task_dir_path, path))
rel_wanted_path = os.path.relpath(wanted_path, task_dir_path) # normalized
# verify that the path we want exists and is withing the directory we want
if (new_path == os.path.exists(wanted_path)) or os.path.islink(wanted_path) or rel_wanted_path.startswith('..'):
return None
# do not allow touching the task.* file
if os.path.splitext(rel_wanted_path)[0] == "task" and os.path.splitext(rel_wanted_path)[1][1:] in get_available_task_file_managers().keys():
return None
# do not allow hidden dir/files
if rel_wanted_path != ".":
for i in rel_wanted_path.split(os.path.sep):
if i.startswith("."):
return None
return wanted_path
def action_edit(self, courseid, taskid, path):
""" Edit a file """
wanted_path = self.verify_path(courseid, taskid, path)
if wanted_path is None or not os.path.isfile(wanted_path):
return "Internal error"
content = open(wanted_path, 'r').read()
try:
content.decode('utf-8')
return json.dumps({"content": content})
except:
return json.dumps({"error": "not-readable"})
def action_edit_save(self, courseid, taskid, path, content):
""" Save an edited file """
wanted_path = self.verify_path(courseid, taskid, path)
if wanted_path is None or not os.path.isfile(wanted_path):
return "Internal error"
try:
with codecs.open(wanted_path, "w", "utf-8") as f:
f.write(content)
return json.dumps({"ok": True})
except:
return json.dumps({"error": True})
def action_upload(self, courseid, taskid, path, fileobj):
""" Upload a file """
wanted_path = self.verify_path(courseid, taskid, path, True)
if wanted_path is None:
return self.show_tab_file(courseid, taskid, "Invalid new path")
curpath = os.path.join(get_tasks_directory(), courseid, taskid)
rel_path = os.path.relpath(wanted_path, curpath)
for i in rel_path.split(os.path.sep)[:-1]:
curpath = os.path.join(curpath, i)
if not os.path.exists(curpath):
os.mkdir(curpath)
if not os.path.isdir(curpath):
return self.show_tab_file(courseid, taskid, i + " is not a directory!")
try:
open(wanted_path, "w").write(fileobj.file.read())
return self.show_tab_file(courseid, taskid)
except:
return self.show_tab_file(courseid, taskid, "An error occurred while writing the file")
def action_create(self, courseid, taskid, path):
""" Delete a file or a directory """
want_directory = path.strip().endswith("/")
wanted_path = self.verify_path(courseid, taskid, path, True)
if wanted_path is None:
return self.show_tab_file(courseid, taskid, "Invalid new path")
curpath = os.path.join(get_tasks_directory(), courseid, taskid)
rel_path = os.path.relpath(wanted_path, curpath)
for i in rel_path.split(os.path.sep)[:-1]:
curpath = os.path.join(curpath, i)
if not os.path.exists(curpath):
os.mkdir(curpath)
if not os.path.isdir(curpath):
return self.show_tab_file(courseid, taskid, i + " is not a directory!")
if rel_path.split(os.path.sep)[-1] != "":
if want_directory:
os.mkdir(os.path.join(curpath, rel_path.split(os.path.sep)[-1]))
else:
open(os.path.join(curpath, rel_path.split(os.path.sep)[-1]), 'a')
return self.show_tab_file(courseid, taskid)
def action_rename(self, courseid, taskid, path, new_path):
""" Delete a file or a directory """
old_path = self.verify_path(courseid, taskid, path)
if old_path is None:
return self.show_tab_file(courseid, taskid, "Internal error")
wanted_path = self.verify_path(courseid, taskid, new_path, True)
if wanted_path is None:
return self.show_tab_file(courseid, taskid, "Invalid new path")
try:
shutil.move(old_path, wanted_path)
return self.show_tab_file(courseid, taskid)
except:
return self.show_tab_file(courseid, taskid, "An error occurred while moving the files")
def action_delete(self, courseid, taskid, path):
""" Delete a file or a directory """
wanted_path = self.verify_path(courseid, taskid, path)
if wanted_path is None:
return self.show_tab_file(courseid, taskid, "Internal error")
# special case: cannot delete current directory of the task
if "." == os.path.relpath(wanted_path, os.path.join(get_tasks_directory(), courseid, taskid)):
return self.show_tab_file(courseid, taskid, "Internal error")
if os.path.isdir(wanted_path):
shutil.rmtree(wanted_path)
else:
os.unlink(wanted_path)
return self.show_tab_file(courseid, taskid)
def action_download(self, courseid, taskid, path):
""" Download a file or a directory """
wanted_path = self.verify_path(courseid, taskid, path)
if wanted_path is None:
raise web.notfound()
# if the user want a dir:
if os.path.isdir(wanted_path):
tmpfile = tempfile.TemporaryFile()
tar = tarfile.open(fileobj=tmpfile, mode='w:gz')
for root, _, files in os.walk(wanted_path):
for fname in files:
info = tarfile.TarInfo(name=os.path.join(os.path.relpath(root, wanted_path), fname))
file_stat = os.stat(os.path.join(root, fname))
info.size = file_stat.st_size
info.mtime = file_stat.st_mtime
tar.addfile(info, fileobj=open(os.path.join(root, fname), 'r'))
tar.close()
tmpfile.seek(0)
web.header('Content-Type', 'application/x-gzip', unique=True)
web.header('Content-Disposition', 'attachment; filename="dir.tgz"', unique=True)
return tmpfile
else:
mimetypes.init()
mime_type = mimetypes.guess_type(wanted_path)
web.header('Content-Type', mime_type[0])
web.header('Content-Disposition', 'attachment; filename="' + os.path.split(wanted_path)[1] + '"', unique=True)
return open(wanted_path, 'r')
|
layus/INGInious
|
frontend/pages/course_admin/task_edit_file.py
|
Python
|
agpl-3.0
| 12,962
|
handler_list = ['dsl','dialup']
|
Exa-Networks/scavengerexa
|
config/handlers/__init__.py
|
Python
|
agpl-3.0
| 32
|
# - coding: utf-8 -*-
##############################################################################
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp import models, fields, api
from openerp.exceptions import ValidationError
class AccountBankReconcileLine(models.Model):
_name = 'account.bank.reconcile.line'
_order = 'date_stop desc'
date_start = fields.Date(
string='Fecha de inicio',
readonly=True,
)
date_stop = fields.Date(
string='Fecha de fin',
readonly=True,
)
last_balance = fields.Float(
'Balance anterior',
)
current_balance = fields.Float(
'Balance actual',
)
last_balance_currency = fields.Float(
'Balance anterior en moneda'
)
current_balance_currency = fields.Float(
'Balance actual en moneda'
)
reconcile_move_line_ids = fields.One2many(
comodel_name='account.reconcile.move.line',
inverse_name='bank_reconcile_line_id',
string='Movimientos',
)
bank_reconcile_id = fields.Many2one(
comodel_name='account.bank.reconcile',
string='Conciliacion bancaria',
readonly=True,
)
last = fields.Boolean(
string='Ultimo',
)
company_id = fields.Many2one('res.company', string='Compania', related='bank_reconcile_id.company_id', store=True,
readonly=True, related_sudo=False)
@api.onchange('reconcile_move_line_ids')
def onchange_balance(self):
self.current_balance_currency = sum(
line.amount_currency
for line in self.reconcile_move_line_ids
) + self.last_balance_currency
self.current_balance = sum(
line.debit_move_line - line.credit_move_line
for line in self.reconcile_move_line_ids
) + self.last_balance
def unlink(self):
if self.bank_reconcile_id.get_last_conciliation() != self:
raise ValidationError('Solo se puede borrar la ultima conciliacion.')
bank_reconcile = self.bank_reconcile_id
self.reconcile_move_line_ids.unlink()
res = super(AccountBankReconcileLine, self).unlink()
if bank_reconcile.bank_reconcile_line_ids:
bank_reconcile.get_last_conciliation().last = True
return res
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
odoo-arg/odoo_l10n_ar
|
l10n_ar_bank_reconcile/models/account_bank_reconcile_line.py
|
Python
|
agpl-3.0
| 3,102
|
from autopwn2.api import get_or_model
from autopwn2.database import db, with_session
from autopwn2.database.models import Setting, Tool, Job
from autopwn2.schedule import scheduler, scheduled_task
@with_session
def create_setting(data):
name = data.get('name')
value = data.get('value')
example = data.get('example')
setting = Setting(name, value, example)
db.session.add(setting)
@with_session
def update_setting(id, data):
setting = Setting.query.filter(Setting.id == id).one()
setting.name = get_or_model('name', data, setting)
setting.value = get_or_model('value', data, setting)
setting.example = get_or_model('example', data, setting)
db.session.add(setting)
@with_session
def delete_setting(id):
setting = Setting.query.filter(Setting.id == id).one()
db.session.delete(setting)
@with_session
def create_tool(data):
name = data.get('name')
command = data.get('command')
description = data.get('description')
url = data.get('url')
stdout = data.get('stdout')
tool = Tool(name, command, description, url, stdout)
db.session.add(tool)
@with_session
def update_tool(id, data):
tool = Tool.query.filter(Tool.id == id).one()
tool.name = get_or_model('name', data, tool)
tool.command = get_or_model('command', data, tool)
tool.description = get_or_model('description', data, tool)
tool.url = get_or_model('url', data, tool)
tool.stdout = get_or_model('stdout', data, tool)
db.session.add(tool)
@with_session
def delete_tool(id):
tool = Tool.query.filter(Tool.id == id).one()
db.session.delete(tool)
@with_session
def create_job(data):
settings = {}
for s in Setting.query.all():
settings[s.name] = s.value
# command = data.get('command')
tool = Tool.query.filter(Tool.id == data['tool_id']).one()
command = tool.command.format(**settings)
job = Job(command, tool)
job.tool = tool
db.session.add(job)
@with_session
def update_job(id, data):
job = Job.query.filter(Job.id == id).one()
job.command = get_or_model('command', data, job)
tool_id = data.get('tool_id')
job.tool = Tool.query.filter(Tool.id == tool_id).one()
db.session.add(job)
@with_session
def delete_job(id):
job = Job.query.filter(Job.id == id).one()
db.session.delete(job)
def start_job(id):
scheduler.add_job(func=scheduled_task, trigger='date', args=[id], id='j' + str(id))
|
nccgroup/autopwn
|
autopwn2/api/business.py
|
Python
|
agpl-3.0
| 2,443
|
import os
import platform
class Constants:
THIS_IS_A_BUG = ("This is a bug. We are sorry for the inconvenience. "
"Please contact the developers for assistance.")
CRASH_MESSAGE = ("An unknown error occurred. This is a bug. We are "
"sorry for the inconvenience. Please contact the "
"developers for assistance. During execution of "
"coala an exception was raised. This should never "
"happen. When asked for, the following information "
"may help investigating:")
OBJ_NOT_ACCESSIBLE = "{} is not accessible and will be ignored!"
TRUE_STRINGS = ['1',
"on",
'y',
'yes',
"yeah",
"sure",
'true',
'definitely',
'yup',
"right"]
FALSE_STRINGS = ['0',
'off',
'n',
'no',
'nope',
'nah',
'false',
"wrong"]
# This string contains many unicode characters to challenge tests.
COMPLEX_TEST_STRING = ("4 r34l ch4ll3n63: 123 ÄÖü ABc @€¥ §&% {[( ←↓→↑ "
"ĦŊħ ß°^ \\\n\u2192")
# Results from coverage for unittests are stored here.
COVERAGE_DIR = "./.coverageresults"
# Path to the coalib directory
coalib_root = os.path.join(os.path.dirname(__file__),
os.path.pardir)
# Path to the directory containing the default bears
coalib_bears_root = os.path.join(coalib_root, os.path.pardir, "bears")
# Path to the language definition files
language_definitions = os.path.join(coalib_root,
"bearlib",
"languages",
"definitions")
system_coafile = os.path.join(coalib_root, "default_coafile")
user_coafile = os.path.join(os.path.expanduser("~"), ".coarc")
VERSION_FILE = os.path.join(coalib_root, "VERSION")
with open(VERSION_FILE, 'r') as ver:
VERSION = ver.readline().strip()
BUS_NAME = "org.coala_analyzer.v1"
if platform.system() == 'Windows': # pragma: no cover
USER_DIR = os.path.join(os.getenv("APPDATA"), "coala")
else:
USER_DIR = os.path.join(os.path.expanduser("~"), ".local", "coala")
TAGS_DIR = os.path.join(USER_DIR, "tags")
try:
os.makedirs(TAGS_DIR)
except: # pragma: no cover
pass
|
Tanmay28/coala
|
coalib/misc/Constants.py
|
Python
|
agpl-3.0
| 2,678
|
"""
Django models for videos for Video Abstraction Layer (VAL)
When calling a serializers' .errors field, there is a priority in which the
errors are returned. This may cause a partial return of errors, starting with
the highest priority.
Missing a field, having an incorrect input type (expected an int, not a str),
nested serialization errors, or any similar errors will be returned by
themselves. After these are resolved, errors such as a negative file_size or
invalid profile_name will be returned.
"""
import json
import logging
import os
from contextlib import closing
from uuid import uuid4
from django.core.exceptions import ValidationError
from django.core.validators import MinValueValidator, RegexValidator
from django.db import models
from django.dispatch import receiver
from django.urls import reverse
from model_utils.models import TimeStampedModel
from edxval.utils import (
TranscriptFormat,
get_video_image_storage,
get_video_transcript_storage,
validate_generated_images,
video_image_path,
video_transcript_path,
)
logger = logging.getLogger(__name__)
URL_REGEX = '^[a-zA-Z0-9\\-_]*$'
LIST_MAX_ITEMS = 3
EXTERNAL_VIDEO_STATUS = 'external'
class ModelFactoryWithValidation:
"""
A Model mixin that provides validation-based factory methods.
"""
@classmethod
def create_with_validation(cls, *args, **kwargs):
"""
Factory method that creates and validates the model object before it is saved.
"""
ret_val = cls(*args, **kwargs)
ret_val.full_clean() # pylint: disable=no-member
ret_val.save() # pylint: disable=no-member
return ret_val
@classmethod
def get_or_create_with_validation(cls, *args, **kwargs):
"""
Factory method that gets or creates-and-validates the model object before it is saved.
Similar to the get_or_create method on Models, it returns a tuple of (object, created),
where created is a boolean specifying whether an object was created.
"""
try:
return cls.objects.get(*args, **kwargs), False
except cls.DoesNotExist:
return cls.create_with_validation(*args, **kwargs), True
class Profile(models.Model):
"""
Details for pre-defined encoding format
The profile_name has a regex validator because in case this field will be
used in a url.
"""
profile_name = models.CharField(
max_length=50,
unique=True,
validators=[
RegexValidator(
regex=URL_REGEX,
message='profile_name has invalid characters',
code='invalid profile_name'
),
]
)
def __str__(self):
return self.profile_name
class Video(models.Model):
"""
Model for a Video group with the same content.
A video can have multiple formats. This model are the fields that represent
the collection of those videos that do not change across formats.
Attributes:
status: Used to keep track of the processing video as it goes through
the video pipeline, e.g., "Uploading", "File Complete"...
"""
created = models.DateTimeField(auto_now_add=True)
edx_video_id = models.CharField(
max_length=100,
unique=True,
validators=[
RegexValidator(
regex=URL_REGEX,
message='edx_video_id has invalid characters',
code='invalid edx_video_id'
),
]
)
client_video_id = models.CharField(max_length=255, db_index=True, blank=True)
duration = models.FloatField(validators=[MinValueValidator(0)])
status = models.CharField(max_length=255, db_index=True)
error_description = models.TextField('Error Description', blank=True, null=True)
def get_absolute_url(self):
"""
Returns the full url link to the edx_video_id
"""
return reverse('video-detail', args=[self.edx_video_id])
def __str__(self):
return self.edx_video_id
@classmethod
def get_or_none(cls, **filter_kwargs):
"""
Returns a video or None.
"""
try:
video = cls.objects.get(**filter_kwargs)
except cls.DoesNotExist:
video = None
return video
@classmethod
def by_youtube_id(cls, youtube_id):
"""
Look up video by youtube id
"""
qset = cls.objects.filter(
encoded_videos__profile__profile_name='youtube',
encoded_videos__url=youtube_id
).prefetch_related('encoded_videos', 'courses')
return qset
class CourseVideo(models.Model, ModelFactoryWithValidation):
"""
Model for the course_id associated with the video content.
Every course-semester has a unique course_id. A video can be paired with
multiple course_id's but each pair is unique together.
"""
course_id = models.CharField(max_length=255)
video = models.ForeignKey(Video, related_name='courses', on_delete=models.CASCADE)
is_hidden = models.BooleanField(default=False, help_text='Hide video for course.')
class Meta:
"""
course_id is listed first in this composite index
"""
unique_together = ("course_id", "video")
def image_url(self):
"""
Return image url for a course video image or None if no image.
"""
if hasattr(self, 'video_image'):
return self.video_image.image_url()
return None
def __str__(self):
return self.course_id
class EncodedVideo(models.Model):
"""
Video/encoding pair
"""
created = models.DateTimeField(auto_now_add=True)
modified = models.DateTimeField(auto_now=True)
url = models.CharField(max_length=200)
file_size = models.PositiveIntegerField()
bitrate = models.PositiveIntegerField()
profile = models.ForeignKey(Profile, related_name="+", on_delete=models.CASCADE)
video = models.ForeignKey(Video, related_name="encoded_videos", on_delete=models.CASCADE)
def __str__(self):
return str(self.id)
class CustomizableImageField(models.ImageField):
"""
Subclass of ImageField that allows custom settings to not
be serialized (hard-coded) in migrations. Otherwise,
migrations include optional settings for storage (such as
the storage class and bucket name); we don't want to
create new migration files for each configuration change.
"""
def __init__(self, *args, **kwargs):
kwargs.update(dict(
upload_to=video_image_path,
storage=get_video_image_storage(),
max_length=500, # allocate enough for filepath
blank=True,
null=True
))
super().__init__(*args, **kwargs)
def deconstruct(self):
"""
Override base class method.
"""
name, path, args, kwargs = super().deconstruct()
del kwargs['upload_to']
del kwargs['storage']
del kwargs['max_length']
return name, path, args, kwargs
class ListField(models.TextField):
"""
ListField use to store and retrieve list data.
"""
def __init__(self, max_items=LIST_MAX_ITEMS, *args, **kwargs): # pylint: disable=keyword-arg-before-vararg
self.max_items = max_items
super().__init__(*args, **kwargs)
def get_prep_value(self, value):
"""
Converts a list to its json representation to store in database as text.
"""
if value and not isinstance(value, list):
raise ValidationError(f'ListField value {value} is not a list.')
return json.dumps(self.validate_list(value) or [])
def from_db_value(self, value, expression, connection): # pylint: disable=unused-argument
"""
Converts a json list representation in a database to a python object.
"""
return self.to_python(value)
def to_python(self, value):
"""
Converts the value into a list.
"""
if not value:
value = []
# If a list is set then validated its items
if isinstance(value, list):
py_list = self.validate_list(value)
else: # try to de-serialize value and expect list and then validate
try:
py_list = json.loads(value)
if not isinstance(py_list, list):
raise TypeError
self.validate_list(py_list)
except (ValueError, TypeError) as error:
raise ValidationError('Must be a valid list of strings.') from error
return py_list
def validate_list(self, value):
"""
Validate data before saving to database.
Arguemtns:
value(list): list to be validated
Returns:
list if validation is successful
Raises:
ValidationError
"""
return validate_generated_images(value, self.max_items)
def deconstruct(self):
name, path, args, kwargs = super().deconstruct()
# Only include kwarg if it's not the default
if self.max_items != LIST_MAX_ITEMS:
kwargs['max_items'] = self.max_items
return name, path, args, kwargs
class VideoImage(TimeStampedModel):
"""
Image model for course video.
"""
course_video = models.OneToOneField(CourseVideo, related_name="video_image", on_delete=models.CASCADE)
image = CustomizableImageField()
generated_images = ListField()
@classmethod
def create_or_update(cls, course_video, file_name=None, image_data=None, generated_images=None):
"""
Create a VideoImage object for a CourseVideo.
NOTE: If `image_data` is None then `file_name` value will be used as it is, otherwise
a new file name is constructed based on uuid and extension from `file_name` value.
`image_data` will be None in case of course re-run and export. `generated_images` list
contains names of images auto generated by VEDA. If an image is not already set then first
image name from `generated_images` list will be used.
Arguments:
course_video (CourseVideo): CourseVideo instance
file_name (str): File name of the image
image_data (InMemoryUploadedFile): Image data to be saved.
generated_images (list): auto generated image names
Returns:
Returns a tuple of (video_image, created).
"""
video_image, created = cls.objects.get_or_create(course_video=course_video)
if image_data:
# Delete the existing image only if this image is not used by anyone else. This is necessary because
# after a course re-run, a video in original course and the new course points to same image, So when
# we update an image in new course and delete the existing image. This will delete the image from
# original course as well, thus leaving video with having no image.
if not created and VideoImage.objects.filter(image=video_image.image).count() == 1:
video_image.image.delete()
with closing(image_data) as image_file:
file_name = '{uuid}{ext}'.format(uuid=uuid4().hex, ext=os.path.splitext(file_name)[1])
try:
video_image.image.save(file_name, image_file)
except Exception:
logger.exception(
'VAL: Video Image save failed to storage for course_id [%s] and video_id [%s]',
course_video.course_id,
course_video.video.edx_video_id
)
raise
else:
if generated_images:
video_image.generated_images = generated_images
if not video_image.image.name:
file_name = generated_images[0]
if file_name:
video_image.image.name = file_name
video_image.save()
return video_image, created
def image_url(self):
"""
Return image url for a course video image.
"""
storage = get_video_image_storage()
return storage.url(self.image.name)
def __str__(self):
"""
Returns unicode representation of object.
"""
return f'{self.id} {self.course_video.id}'
class TranscriptProviderType:
"""
class for providing tuple choices.
"""
CUSTOM = 'Custom'
THREE_PLAY_MEDIA = '3PlayMedia'
CIELO24 = 'Cielo24'
CHOICES = (
(CUSTOM, CUSTOM),
(THREE_PLAY_MEDIA, THREE_PLAY_MEDIA),
(CIELO24, CIELO24),
)
class CustomizableFileField(models.FileField):
"""
Subclass of FileField that allows custom settings to not
be serialized (hard-coded) in migrations. Otherwise,
migrations include optional settings for storage (such as
the storage class and bucket name); we don't want to
create new migration files for each configuration change.
"""
def __init__(self, *args, **kwargs):
kwargs.update(dict(
upload_to=video_transcript_path,
storage=get_video_transcript_storage(),
max_length=255, # enoungh for uuid
blank=True,
null=True
))
super().__init__(*args, **kwargs)
def deconstruct(self):
"""
Override base class method.
"""
name, path, args, kwargs = super().deconstruct()
del kwargs['upload_to']
del kwargs['storage']
del kwargs['max_length']
return name, path, args, kwargs
class VideoTranscript(TimeStampedModel):
"""
Transcript for a video
"""
video = models.ForeignKey(Video, related_name='video_transcripts', null=True, on_delete=models.CASCADE)
transcript = CustomizableFileField()
language_code = models.CharField(max_length=50, db_index=True)
provider = models.CharField(
max_length=30,
choices=TranscriptProviderType.CHOICES,
default=TranscriptProviderType.CUSTOM,
)
file_format = models.CharField(max_length=20, db_index=True, choices=TranscriptFormat.CHOICES)
class Meta:
unique_together = ('video', 'language_code')
@property
def filename(self):
"""
Returns readable filename for a transcript
"""
client_id, __ = os.path.splitext(self.video.client_video_id)
file_name = '{name}-{language}.{format}'.format(
name=client_id,
language=self.language_code,
format=self.file_format
).replace('\n', ' ')
return file_name
def save_transcript(self, file_data, file_format, file_name=None):
"""
Saves Transcript Content to a Video Transcript File
Arguments:
file_data(InMemoryUploadedFile): Transcript content.
file_format(unicode): Transcript file format.
"""
# generate transcript file name if not already given
if not file_name:
file_name = '{uuid}.{ext}'.format(uuid=uuid4().hex, ext=file_format)
# save the transcript file
if file_data:
self.transcript.save(file_name, file_data)
else:
self.transcript.name = file_name
# save the object
self.save()
@classmethod
def get_or_none(cls, video_id, language_code):
"""
Returns a data model object if found or none otherwise.
Arguments:
video_id(unicode): video id to which transcript may be associated
language_code(unicode): language of the requested transcript
"""
try:
transcript = cls.objects.get(video__edx_video_id=video_id, language_code=language_code)
except cls.DoesNotExist:
transcript = None
return transcript
@classmethod
def create(cls, video, language_code, file_format, content, provider):
"""
Create a Video Transcript.
Arguments:
video(Video): Video data model object
language_code(unicode): A language code.
file_format(unicode): Transcript file format.
content(InMemoryUploadedFile): Transcript content.
provider(unicode): Transcript provider.
"""
video_transcript = cls(video=video, language_code=language_code, file_format=file_format, provider=provider)
try:
video_transcript.save_transcript(content, file_format)
except Exception:
logger.exception(
'[VAL] Transcript save failed to storage for video_id "%s" language code "%s"',
video.edx_video_id,
language_code
)
raise
return video_transcript
@classmethod
def create_or_update(cls, video, language_code, metadata, file_data=None):
"""
Create or update Transcript object.
Arguments:
video (Video): Video for which transcript is going to be saved.
language_code (str): language code for (to be created/updated) transcript
metadata (dict): A dict containing (to be overwritten) properties
file_data (InMemoryUploadedFile): File data to be saved
Returns:
Returns a tuple of (video_transcript, created).
"""
try:
video_transcript = cls.objects.get(video=video, language_code=language_code)
retrieved = True
except cls.DoesNotExist:
video_transcript = cls(video=video, language_code=language_code)
retrieved = False
for prop, value in metadata.items():
if prop in ['language_code', 'file_format', 'provider']:
setattr(video_transcript, prop, value)
transcript_name = metadata.get('file_name')
try:
video_transcript.save_transcript(file_data, video_transcript.file_format, file_name=transcript_name)
except Exception:
logger.exception(
'[VAL] Transcript save failed to storage for video_id "%s" language code "%s"',
video.edx_video_id,
language_code
)
raise
return video_transcript, not retrieved
def url(self):
"""
Returns language transcript url for a particular language.
"""
storage = get_video_transcript_storage()
return storage.url(self.transcript.name)
def __str__(self):
return f'{self.language_code} Transcript for {self.video.edx_video_id}'
class Cielo24Turnaround:
"""
Cielo24 turnarounds.
"""
STANDARD = 'STANDARD'
PRIORITY = 'PRIORITY'
CHOICES = (
(STANDARD, 'Standard, 48h'),
(PRIORITY, 'Priority, 24h'),
)
class Cielo24Fidelity:
"""
Cielo24 fidelity.
"""
MECHANICAL = 'MECHANICAL'
PREMIUM = 'PREMIUM'
PROFESSIONAL = 'PROFESSIONAL'
CHOICES = (
(MECHANICAL, 'Mechanical, 75% Accuracy'),
(PREMIUM, 'Premium, 95% Accuracy'),
(PROFESSIONAL, 'Professional, 99% Accuracy'),
)
class ThreePlayTurnaround:
"""
3PlayMedia turnarounds.
"""
EXTENDED = 'extended'
STANDARD = 'standard'
EXPEDITED = 'expedited'
RUSH = 'rush'
SAME_DAY = 'same_day'
TWO_HOUR = 'two_hour'
CHOICES = (
(EXTENDED, '10-Day/Extended'),
(STANDARD, '4-Day/Standard'),
(EXPEDITED, '2-Day/Expedited'),
(RUSH, '24 hour/Rush'),
(SAME_DAY, 'Same Day'),
(TWO_HOUR, '2 Hour'),
)
class TranscriptPreference(TimeStampedModel):
"""
Third Party Transcript Preferences for a Course
"""
course_id = models.CharField(verbose_name='Course ID', max_length=255, unique=True)
provider = models.CharField(
verbose_name='Provider',
max_length=20,
choices=TranscriptProviderType.CHOICES,
)
cielo24_fidelity = models.CharField(
verbose_name='Cielo24 Fidelity',
max_length=20,
choices=Cielo24Fidelity.CHOICES,
null=True,
blank=True,
)
cielo24_turnaround = models.CharField(
verbose_name='Cielo24 Turnaround',
max_length=20,
choices=Cielo24Turnaround.CHOICES,
null=True,
blank=True,
)
three_play_turnaround = models.CharField(
verbose_name='3PlayMedia Turnaround',
max_length=20,
choices=ThreePlayTurnaround.CHOICES,
null=True,
blank=True,
)
preferred_languages = ListField(verbose_name='Preferred Languages', max_items=50, default=[], blank=True)
video_source_language = models.CharField(
verbose_name='Video Source Language',
max_length=50,
null=True,
blank=True,
help_text='This specifies the speech language of a Video.'
)
def __str__(self):
return f'{self.course_id} - {self.provider}'
class ThirdPartyTranscriptCredentialsState(TimeStampedModel):
"""
State of transcript credentials for a course organization
"""
class Meta:
unique_together = ('org', 'provider')
org = models.CharField(verbose_name='Course Organization', max_length=32)
provider = models.CharField(
verbose_name='Transcript Provider',
max_length=20,
choices=TranscriptProviderType.CHOICES,
)
has_creds = models.BooleanField(default=False, help_text='Transcript credentials state')
@classmethod
def update_or_create(cls, org, provider, has_creds):
"""
Update or create credentials state.
"""
instance, created = cls.objects.update_or_create(
org=org,
provider=provider,
defaults={'has_creds': has_creds},
)
return instance, created
def __str__(self):
"""
Returns unicode representation of provider credentials state for an organization.
NOTE: Message will look like below:
edX has Cielo24 credentials
edX doesn't have 3PlayMedia credentials
"""
return '{org} {state} {provider} credentials'.format(
org=self.org, provider=self.provider, state='has' if self.has_creds else "doesn't have"
)
@receiver(models.signals.post_save, sender=Video)
def video_status_update_callback(sender, **kwargs): # pylint: disable=unused-argument
"""
Log video status for an existing video instance
"""
video = kwargs['instance']
if kwargs['created']:
logger.info('VAL: Video created with id [%s] and status [%s]', video.edx_video_id, video.status)
else:
logger.info('VAL: Status changed to [%s] for video [%s]', video.status, video.edx_video_id)
|
edx/edx-val
|
edxval/models.py
|
Python
|
agpl-3.0
| 22,828
|
"""Test that we're meeting delicious API specifications"""
# Need to create a new renderer that wraps the jsonp renderer and adds these
# heads to all responses. Then the api needs to be adjusted to use this new
# renderer type vs jsonp.
import logging
import json
import transaction
import unittest
from pyramid import testing
from bookie.models import DBSession
from bookie.tests import BOOKIE_TEST_INI
from bookie.tests import empty_db
from bookie.tests import factory
from datetime import datetime
GOOGLE_HASH = u'aa2239c17609b2'
BMARKUS_HASH = u'c5c21717c99797'
LOG = logging.getLogger(__name__)
API_KEY = None
class BookieAPITest(unittest.TestCase):
"""Test the Bookie API"""
def setUp(self):
from pyramid.paster import get_app
app = get_app(BOOKIE_TEST_INI, 'bookie')
from webtest import TestApp
self.testapp = TestApp(app)
testing.setUp()
global API_KEY
res = DBSession.execute(
"SELECT api_key FROM users WHERE username = 'admin'").fetchone()
API_KEY = res['api_key']
def tearDown(self):
"""We need to empty the bmarks table on each run"""
testing.tearDown()
empty_db()
def _check_cors_headers(self, res):
""" Make sure that the request has proper CORS headers."""
self.assertEqual(res.headers['access-control-allow-origin'], '*')
self.assertEqual(
res.headers['access-control-allow-headers'], 'X-Requested-With')
def _get_good_request(self, content=False, second_bmark=False):
"""Return the basics for a good add bookmark request"""
session = DBSession()
# the main bookmark, added second to prove popular will sort correctly
prms = {
'url': u'http://google.com',
'description': u'This is my google desc',
'extended': u'And some extended notes about it in full form',
'tags': u'python search',
'api_key': API_KEY,
'username': u'admin',
'inserted_by': u'chrome_ext',
}
# if we want to test the readable fulltext side we want to make sure we
# pass content into the new bookmark
if content:
prms['content'] = u"<p>There's some content in here dude</p>"
# rself.assertEqualparams = urllib.urlencode(prms)
res = self.testapp.post(
'/api/v1/admin/bmark?',
content_type='application/json',
params=json.dumps(prms),
)
if second_bmark:
prms = {
'url': u'http://bmark.us',
'description': u'Bookie',
'extended': u'Exteded notes',
'tags': u'bookmarks',
'api_key': API_KEY,
'username': u'admin',
'inserted_by': u'chrome_ext',
}
# if we want to test the readable fulltext side we want to make
# sure we pass content into the new bookmark
prms['content'] = u"<h1>Second bookmark man</h1>"
# rself.assertEqualparams = urllib.urlencode(prms)
res = self.testapp.post(
'/api/v1/admin/bmark?',
content_type='application/json',
params=json.dumps(prms)
)
session.flush()
transaction.commit()
# Run the celery task for indexing this bookmark.
from bookie.bcelery import tasks
tasks.reindex_fulltext_allbookmarks(sync=True)
return res
def _setup_user_bookmark_count(self):
"""Fake user bookmark counts are inserted into the database"""
test_date_1 = datetime(2013, 11, 25)
stat1 = factory.make_user_bookmark_count(username=u'admin',
data=20,
tstamp=test_date_1)
test_date_2 = datetime(2013, 11, 15)
stat2 = factory.make_user_bookmark_count(username=u'admin',
data=30,
tstamp=test_date_2)
test_date_3 = datetime(2013, 12, 28)
stat3 = factory.make_user_bookmark_count(username=u'admin',
data=15,
tstamp=test_date_3)
transaction.commit()
return [stat1, stat2, stat3]
def test_add_bookmark(self):
"""We should be able to add a new bookmark to the system"""
# we need to know what the current admin's api key is so we can try to
# add
res = DBSession.execute(
"SELECT api_key FROM users WHERE username = 'admin'").fetchone()
key = res['api_key']
test_bmark = {
'url': u'http://bmark.us',
'description': u'Bookie',
'extended': u'Extended notes',
'tags': u'bookmarks',
'api_key': key,
}
res = self.testapp.post('/api/v1/admin/bmark',
params=test_bmark,
status=200)
self.assertTrue(
'"location":' in res.body,
"Should have a location result: " + res.body)
self.assertTrue(
'description": "Bookie"' in res.body,
"Should have Bookie in description: " + res.body)
self._check_cors_headers(res)
def test_add_bookmark_empty_body(self):
"""When missing a POST body we get an error response."""
res = DBSession.execute(
"SELECT api_key FROM users WHERE username = 'admin'").fetchone()
key = res['api_key']
res = self.testapp.post(
str('/api/v1/admin/bmark?api_key={0}'.format(key)),
params={},
status=400)
data = json.loads(res.body)
self.assertTrue('error' in data)
self.assertEqual(data['error'], 'Bad Request: No url provided')
def test_add_bookmark_missing_url_in_JSON(self):
"""When missing the url in the JSON POST we get an error response."""
res = DBSession.execute(
"SELECT api_key FROM users WHERE username = 'admin'").fetchone()
key = res['api_key']
params = {
'description': u'This is my test desc',
}
res = self.testapp.post(
str('/api/v1/admin/bmark?api_key={0}'.format(key)),
content_type='application/json',
params=json.dumps(params),
status=400)
data = json.loads(res.body)
self.assertTrue('error' in data)
self.assertEqual(data['error'], 'Bad Request: No url provided')
def test_bookmark_fetch(self):
"""Test that we can get a bookmark and it's details"""
self._get_good_request(content=True)
res = self.testapp.get('/api/v1/admin/bmark/{0}?api_key={1}'.format(
GOOGLE_HASH,
API_KEY),
status=200)
# make sure we can decode the body
bmark = json.loads(res.body)['bmark']
self.assertEqual(
GOOGLE_HASH,
bmark[u'hash_id'],
"The hash_id should match: " + str(bmark[u'hash_id']))
self.assertTrue(
u'tags' in bmark,
"We should have a list of tags in the bmark returned")
self.assertTrue(
bmark[u'tags'][0][u'name'] in [u'python', u'search'],
"Tag should be either python or search:" +
str(bmark[u'tags'][0][u'name']))
self.assertTrue(
u'readable' not in bmark,
"We should not have readable content")
self.assertEqual(
u'python search', bmark[u'tag_str'],
"tag_str should be populated: " + str(dict(bmark)))
# to get readble content we need to pass the flash with_content
res = self.testapp.get(
'/api/v1/admin/bmark/{0}?api_key={1}&with_content=true'.format(
GOOGLE_HASH,
API_KEY),
status=200)
# make sure we can decode the body
bmark = json.loads(res.body)['bmark']
self.assertTrue(
u'readable' in bmark,
"We should have readable content")
self.assertTrue(
'dude' in bmark['readable']['content'],
"We should have 'dude' in our content: " +
bmark['readable']['content'])
self._check_cors_headers(res)
def test_bookmark_fetch_fail(self):
"""Verify we get a failed response when wrong bookmark"""
self._get_good_request()
# test that we get a 404
res = self.testapp.get(
'/api/v1/admin/bmark/{0}?api_key={1}'.format(BMARKUS_HASH,
API_KEY),
status=404)
self._check_cors_headers(res)
def test_bookmark_diff_user(self):
"""Verify that anon users can access the bookmark"""
self._get_good_request()
# test that we get a 404
res = self.testapp.get(
'/api/v1/admin/bmark/{0}'.format(GOOGLE_HASH),
status=200)
self._check_cors_headers(res)
def test_bookmark_diff_user_authed(self):
"""Verify an auth'd user can fetch another's bookmark"""
self._get_good_request()
# test that we get a 404
res = self.testapp.get(
'/api/v1/admin/bmark/{0}'.format(GOOGLE_HASH, 'invalid'),
status=200)
self._check_cors_headers(res)
def test_bookmark_remove(self):
"""A delete call should remove the bookmark from the system"""
self._get_good_request(content=True, second_bmark=True)
# now let's delete the google bookmark
res = self.testapp.delete(
'/api/v1/admin/bmark/{0}?api_key={1}'.format(
GOOGLE_HASH,
API_KEY),
status=200)
self.assertTrue(
'message": "done"' in res.body,
"Should have a message of done: " + res.body)
# we're going to cheat like mad, use the sync call to get the hash_ids
# of bookmarks in the system and verify that only the bmark.us hash_id
# is in the response body
res = self.testapp.get('/api/v1/admin/extension/sync',
params={'api_key': API_KEY},
status=200)
self.assertTrue(
GOOGLE_HASH not in res.body,
"Should not have the google hash: " + res.body)
self.assertTrue(
BMARKUS_HASH in res.body,
"Should have the bmark.us hash: " + res.body)
self._check_cors_headers(res)
def test_bookmark_recent_user(self):
"""Test that we can get list of bookmarks with details"""
self._get_good_request(content=True)
res = self.testapp.get('/api/v1/admin/bmarks?api_key=' + API_KEY,
status=200)
# make sure we can decode the body
bmark = json.loads(res.body)['bmarks'][0]
self.assertEqual(
GOOGLE_HASH,
bmark[u'hash_id'],
"The hash_id should match: " + str(bmark[u'hash_id']))
self.assertTrue(
u'tags' in bmark,
"We should have a list of tags in the bmark returned")
self.assertTrue(
bmark[u'tags'][0][u'name'] in [u'python', u'search'],
"Tag should be either python or search:" +
str(bmark[u'tags'][0][u'name']))
res = self.testapp.get(
'/api/v1/admin/bmarks?with_content=true&api_key=' + API_KEY,
status=200)
self._check_cors_headers(res)
# make sure we can decode the body
# @todo this is out because of the issue noted in the code. We'll
# clean this up at some point.
# bmark = json.loads(res.body)['bmarks'][0]
# self.assertTrue('here dude' in bmark[u'readable']['content'],
# "There should be content: " + str(bmark))
def test_bookmark_recent(self):
"""Test that we can get list of bookmarks with details"""
self._get_good_request(content=True)
res = self.testapp.get('/api/v1/bmarks?api_key=' + API_KEY,
status=200)
# make sure we can decode the body
bmark = json.loads(res.body)['bmarks'][0]
self.assertEqual(
GOOGLE_HASH,
bmark[u'hash_id'],
"The hash_id should match: " + str(bmark[u'hash_id']))
self.assertTrue(
u'tags' in bmark,
"We should have a list of tags in the bmark returned")
self.assertTrue(
bmark[u'tags'][0][u'name'] in [u'python', u'search'],
"Tag should be either python or search:" +
str(bmark[u'tags'][0][u'name']))
res = self.testapp.get(
'/api/v1/admin/bmarks?with_content=true&api_key=' + API_KEY,
status=200)
self._check_cors_headers(res)
# make sure we can decode the body
# @todo this is out because of the issue noted in the code. We'll
# clean this up at some point.
# bmark = json.loads(res.body)['bmarks'][0]
# self.assertTrue('here dude' in bmark[u'readable']['content'],
# "There should be content: " + str(bmark))
def test_bookmark_sync(self):
"""Test that we can get the sync list from the server"""
self._get_good_request(content=True, second_bmark=True)
# test that we only get one resultback
res = self.testapp.get('/api/v1/admin/extension/sync',
params={'api_key': API_KEY},
status=200)
self.assertEqual(
res.status, "200 OK",
msg='Get status is 200, ' + res.status)
self.assertTrue(
GOOGLE_HASH in res.body,
"The google hash id should be in the json: " + res.body)
self.assertTrue(
BMARKUS_HASH in res.body,
"The bmark.us hash id should be in the json: " + res.body)
self._check_cors_headers(res)
def test_search_api(self):
"""Test that we can get list of bookmarks ordered by clicks"""
self._get_good_request(content=True, second_bmark=True)
res = self.testapp.get('/api/v1/bmarks/search/google', status=200)
# make sure we can decode the body
bmark_list = json.loads(res.body)
results = bmark_list['search_results']
self.assertEqual(
len(results),
1,
"We should have one result coming back: {0}".format(len(results)))
bmark = results[0]
self.assertEqual(
GOOGLE_HASH,
bmark[u'hash_id'],
"The hash_id {0} should match: {1} ".format(
str(GOOGLE_HASH),
str(bmark[u'hash_id'])))
self.assertTrue(
'clicks' in bmark,
"The clicks field should be in there")
self._check_cors_headers(res)
def test_search_api_fail(self):
"""Test that request to an out of bound page returns error message"""
self._get_good_request(content=True, second_bmark=False)
res = self.testapp.get(
'/api/v1/bmarks/search/google?page=10',
status=404
)
# make sure we can decode the body
bmark_list = json.loads(res.body)
self.assertTrue(
'error' in bmark_list,
"The error field should be in there")
self.assertEqual(
bmark_list['error'],
"Bad Request: Page number out of bound",
"We should have the error message: {0}".format(bmark_list['error'])
)
self._check_cors_headers(res)
def test_bookmark_tag_complete(self):
"""Test we can complete tags in the system
By default we should have tags for python, search, bookmarks
"""
self._get_good_request(second_bmark=True)
res = self.testapp.get(
'/api/v1/admin/tags/complete',
params={
'tag': 'py',
'api_key': API_KEY},
status=200)
self.assertTrue(
'python' in res.body,
"Should have python as a tag completion: " + res.body)
# we shouldn't get python as an option if we supply bookmarks as the
# current tag. No bookmarks have both bookmarks & python as tags
res = self.testapp.get(
'/api/v1/admin/tags/complete',
params={
'tag': u'py',
'current': u'bookmarks',
'api_key': API_KEY
},
status=200)
self.assertTrue(
'python' not in res.body,
"Should not have python as a tag completion: " + res.body)
self._check_cors_headers(res)
def test_start_defined_end(self):
"""Test getting a user's bookmark count over a period of time when
only start_date is defined and end_date is None"""
test_dates = self._setup_user_bookmark_count()
res = self.testapp.get(u'/api/v1/admin/stats/bmarkcount',
params={u'api_key': API_KEY,
u'start_date': u'2013-11-16'},
status=200)
data = json.loads(res.body)
count = data['count'][0]
self.assertEqual(
count['attrib'], test_dates[0][0])
self.assertEqual(
count['data'], test_dates[0][1])
self.assertEqual(
count['tstamp'], str(test_dates[0][2]))
# Test start_date and end_date.
self.assertEqual(
data['start_date'], u'2013-11-16 00:00:00')
self.assertEqual(
data['end_date'], u'2013-12-16 00:00:00')
def test_start_defined_end_defined(self):
"""Test getting a user's bookmark count over a period of time when both
start_date and end_date are defined"""
test_dates = self._setup_user_bookmark_count()
res = self.testapp.get(u'/api/v1/admin/stats/bmarkcount',
params={u'api_key': API_KEY,
u'start_date': u'2013-11-14',
u'end_date': u'2013-11-16'},
status=200)
data = json.loads(res.body)
count = data['count'][0]
self.assertEqual(
count['attrib'], test_dates[1][0])
self.assertEqual(
count['data'], test_dates[1][1])
self.assertEqual(
count['tstamp'], str(test_dates[1][2]))
# Test start_date and end_date.
self.assertEqual(
data['start_date'], u'2013-11-14 00:00:00')
self.assertEqual(
data['end_date'], u'2013-11-16 00:00:00')
def test_start_end_defined(self):
"""Test getting a user's bookmark count over a period of time when
start_date is None and end_date is defined"""
test_dates = self._setup_user_bookmark_count()
res = self.testapp.get(u'/api/v1/admin/stats/bmarkcount',
params={u'api_key': API_KEY,
u'end_date': u'2013-12-29'},
status=200)
data = json.loads(res.body)
count = data['count'][0]
self.assertEqual(
count['attrib'], test_dates[2][0])
self.assertEqual(
count['data'], test_dates[2][1])
self.assertEqual(
count['tstamp'], str(test_dates[2][2]))
# Test start_date and end_date.
self.assertEqual(
data['start_date'], u'2013-11-29 00:00:00')
self.assertEqual(
data['end_date'], u'2013-12-29 00:00:00')
def test_start_of_month(self):
"""Test getting a user's bookmark count when start_date is the
first day of the month"""
test_dates = self._setup_user_bookmark_count()
res = self.testapp.get(u'/api/v1/admin/stats/bmarkcount',
params={u'api_key': API_KEY,
u'start_date': u'2013-11-1'},
status=200)
data = json.loads(res.body)
count = data['count']
self.assertEqual(
count[0]['attrib'], test_dates[1][0])
self.assertEqual(
count[0]['data'], test_dates[1][1])
self.assertEqual(
count[0]['tstamp'], str(test_dates[1][2]))
self.assertEqual(
count[1]['attrib'], test_dates[0][0])
self.assertEqual(
count[1]['data'], test_dates[0][1])
self.assertEqual(
count[1]['tstamp'], str(test_dates[0][2]))
# Test start_date and end_date.
self.assertEqual(
data['start_date'], u'2013-11-01 00:00:00')
self.assertEqual(
data['end_date'], u'2013-11-30 00:00:00')
def user_bookmark_count_authorization(self):
"""If no API_KEY is present, it is unauthorized request"""
self.testapp.get(u'/api/v1/admin/stats/bmarkcount',
status=403)
def test_account_information(self):
"""Test getting a user's account information"""
res = self.testapp.get(u'/api/v1/admin/account?api_key=' + API_KEY,
status=200)
# make sure we can decode the body
user = json.loads(res.body)
self.assertEqual(
user['username'], 'admin',
"Should have a username of admin {0}".format(user))
self.assertTrue(
'password' not in user,
'Should not have a field password {0}'.format(user))
self.assertTrue(
'_password' not in user,
'Should not have a field password {0}'.format(user))
self.assertTrue(
'api_key' not in user,
'Should not have a field password {0}'.format(user))
self._check_cors_headers(res)
def test_account_update(self):
"""Test updating a user's account information"""
params = {
'name': u'Test Admin'
}
res = self.testapp.post(
str(u"/api/v1/admin/account?api_key=" + str(API_KEY)),
content_type='application/json',
params=json.dumps(params),
status=200)
# make sure we can decode the body
user = json.loads(res.body)
self.assertEqual(
user['username'], 'admin',
"Should have a username of admin {0}".format(user))
self.assertEqual(
user['name'], 'Test Admin',
"Should have a new name of Test Admin {0}".format(user))
self.assertTrue(
'password' not in user,
"Should not have a field password {0}".format(user))
self.assertTrue(
'_password' not in user,
"Should not have a field password {0}".format(user))
self.assertTrue(
'api_key' not in user,
"Should not have a field password {0}".format(user))
self._check_cors_headers(res)
def test_account_apikey(self):
"""Fetching a user's api key"""
res = self.testapp.get(
u"/api/v1/admin/api_key?api_key=" + str(API_KEY),
status=200)
# make sure we can decode the body
user = json.loads(res.body)
self.assertEqual(
user['username'], 'admin',
"Should have a username of admin {0}".format(user))
self.assertTrue(
'api_key' in user,
"Should have an api key in there: {0}".format(user))
self._check_cors_headers(res)
def test_account_password_change(self):
"""Change a user's password"""
params = {
'current_password': 'admin',
'new_password': 'not_testing'
}
res = self.testapp.post(
"/api/v1/admin/password?api_key=" + str(API_KEY),
params=params,
status=200)
# make sure we can decode the body
user = json.loads(res.body)
self.assertEqual(
user['username'], 'admin',
"Should have a username of admin {0}".format(user))
self.assertTrue(
'message' in user,
"Should have a message key in there: {0}".format(user))
params = {
'current_password': 'not_testing',
'new_password': 'admin'
}
res = self.testapp.post(
"/api/v1/admin/password?api_key=" + str(API_KEY),
params=params,
status=200)
self._check_cors_headers(res)
def test_account_password_failure(self):
"""Change a user's password, in bad ways"""
params = {
'current_password': 'test',
'new_password': 'not_testing'
}
res = self.testapp.post(
"/api/v1/admin/password?api_key=" + str(API_KEY),
params=params,
status=403)
# make sure we can decode the body
user = json.loads(res.body)
self.assertEqual(
user['username'], 'admin',
"Should have a username of admin {0}".format(user))
self.assertTrue(
'error' in user,
"Should have a error key in there: {0}".format(user))
self.assertTrue(
'typo' in user['error'],
"Should have a error key in there: {0}".format(user))
self._check_cors_headers(res)
def test_api_ping_success(self):
"""We should be able to ping and make sure we auth'd and are ok"""
res = self.testapp.get('/api/v1/admin/ping?api_key=' + API_KEY,
status=200)
ping = json.loads(res.body)
self.assertTrue(ping['success'])
self._check_cors_headers(res)
def test_api_ping_failed_invalid_api(self):
"""If you don't supply a valid api key, you've failed the ping"""
# Login a user and then test the validation of api key
user_data = {'login': u'admin',
'password': u'admin',
'form.submitted': u'true'}
# Assuming user logged in without errors
login_res = self.testapp.post('/login',
params=user_data)
# Check for authentication of api key
res = self.testapp.get('/api/v1/admin/ping?api_key=' + 'invalid',
status=200)
ping = json.loads(res.body)
self.assertFalse(ping['success'])
self.assertEqual(ping['message'], "API key is invalid.")
self._check_cors_headers(res)
def test_api_ping_failed_nouser(self):
"""If you don't supply a username, you've failed the ping"""
res = self.testapp.get('/api/v1/ping?api_key=' + API_KEY,
status=200)
ping = json.loads(res.body)
self.assertTrue(not ping['success'])
self.assertEqual(ping['message'], "Missing username in your api url.")
self._check_cors_headers(res)
def test_api_ping_failed_missing_api(self):
"""If you don't supply a username, you've failed the ping"""
res = self.testapp.get('/ping?api_key=' + API_KEY,
status=200)
ping = json.loads(res.body)
self.assertTrue(not ping['success'])
self.assertEqual(ping['message'], "The API url should be /api/v1")
self._check_cors_headers(res)
|
ojengwa/Bookie
|
bookie/tests/test_api/test_base_api.py
|
Python
|
agpl-3.0
| 27,660
|
#####################################################################################
#
# Copyright (c) Crossbar.io Technologies GmbH
#
# Unless a separate license agreement exists between you and Crossbar.io GmbH (e.g.
# you have purchased a commercial license), the license terms below apply.
#
# Should you enter into a separate license agreement after having received a copy of
# this software, then the terms of such license agreement replace the terms below at
# the time at which such license agreement becomes effective.
#
# In case a separate license agreement ends, and such agreement ends without being
# replaced by another separate license agreement, the license terms below apply
# from the time at which said agreement ends.
#
# LICENSE TERMS
#
# This program is free software: you can redistribute it and/or modify it under the
# terms of the GNU Affero General Public License, version 3, as published by the
# Free Software Foundation. This program is distributed in the hope that it will be
# useful, but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
#
# See the GNU Affero General Public License Version 3 for more details.
#
# You should have received a copy of the GNU Affero General Public license along
# with this program. If not, see <http://www.gnu.org/licenses/agpl-3.0.en.html>.
#
#####################################################################################
from __future__ import absolute_import
import json
from autobahn.wamp.types import PublishOptions
from crossbar._compat import native_string
from crossbar.adapter.rest.common import _CommonResource
__all__ = ('WebhookResource',)
class WebhookResource(_CommonResource):
"""
A HTTP WebHook to WAMP-Publisher bridge.
"""
decode_as_json = False
def _process(self, request, event):
# The topic we're going to send to
topic = self._options["topic"]
message = {}
message[u"headers"] = {
native_string(x): [native_string(z) for z in y]
for x, y in request.requestHeaders.getAllRawHeaders()}
message[u"body"] = event
publish_options = PublishOptions(acknowledge=True)
def _succ(result):
return self._complete_request(
request, 202, b"OK", reason="Successfully sent webhook from {ip} to {topic}",
topic=topic, ip=request.getClientIP(), log_category="AR201")
def _err(result):
return self._fail_request(
request, 500, "Unable to send webhook from {ip} to {topic}",
topic=topic, ip=request.getClientIP(), body=b"NOT OK",
log_failure=result, log_category="AR457")
d = self._session.publish(topic,
json.loads(json.dumps(message)),
options=publish_options)
d.addCallback(_succ)
d.addErrback(_err)
return d
|
NinjaMSP/crossbar
|
crossbar/adapter/rest/webhook.py
|
Python
|
agpl-3.0
| 2,998
|
##############################################################################
# Copyright (c) 2013-2017, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/spack/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
class Cairo(AutotoolsPackage):
"""Cairo is a 2D graphics library with support for multiple output
devices."""
homepage = "http://cairographics.org"
url = "http://cairographics.org/releases/cairo-1.14.8.tar.xz"
version('1.14.8', 'c6f7b99986f93c9df78653c3e6a3b5043f65145e')
version('1.14.0', '53cf589b983412ea7f78feee2e1ba9cea6e3ebae')
variant('X', default=False, description="Build with X11 support")
depends_on('libx11', when='+X')
depends_on('libxext', when='+X')
depends_on('libxrender', when='+X')
depends_on('libxcb', when='+X')
depends_on('python', when='+X', type='build')
depends_on("libpng")
depends_on("glib")
depends_on("pixman")
depends_on("freetype")
depends_on("pkg-config", type="build")
depends_on("fontconfig@2.10.91:") # Require newer version of fontconfig.
def configure_args(self):
args = ["--disable-trace", # can cause problems with libiberty
"--enable-tee"]
if '+X' in self.spec:
args.extend(["--enable-xlib", "--enable-xcb"])
else:
args.extend(["--disable-xlib", "--disable-xcb"])
return args
|
skosukhin/spack
|
var/spack/repos/builtin/packages/cairo/package.py
|
Python
|
lgpl-2.1
| 2,432
|
# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
def cross_detect():
if spack.architecture.platform().name == 'cray':
if which('srun'):
return 'cray-aries-slurm'
if which('aprun'):
return 'cray-aries-alps'
return 'none'
class Upcxx(Package):
"""UPC++ is a C++ library that supports Partitioned Global Address Space
(PGAS) programming, and is designed to interoperate smoothly and
efficiently with MPI, OpenMP, CUDA and AMTs. It leverages GASNet-EX to
deliver low-overhead, fine-grained communication, including Remote Memory
Access (RMA) and Remote Procedure Call (RPC)."""
homepage = "https://upcxx.lbl.gov"
maintainers = ['bonachea']
git = 'https://bonachea@bitbucket.org/berkeleylab/upcxx.git'
version('develop', branch='develop')
version('master', branch='master')
version('2020.11.0', sha256='f6f212760a485a9f346ca11bb4751e7095bbe748b8e5b2389ff9238e9e321317',
url='https://bitbucket.org/berkeleylab/upcxx/downloads/upcxx-2020.11.0-memory_kinds_prototype.tar.gz')
version('2020.10.0', sha256='623e074b512bf8cad770a04040272e1cc660d2749760398b311f9bcc9d381a37',
preferred=True)
version('2020.3.0', sha256='01be35bef4c0cfd24e9b3d50c88866521b9cac3ad4cbb5b1fc97aea55078810f')
version('2019.9.0', sha256='7d67ccbeeefb59de9f403acc719f52127a30801a2c2b9774a1df03f850f8f1d4')
version('2019.3.2', sha256='dcb0b337c05a0feb2ed5386f5da6c60342412b49cab10f282f461e74411018ad')
variant('mpi', default=False,
description='Enables MPI-based spawners and mpi-conduit')
variant('cuda', default=False,
description='Builds a CUDA-enabled version of UPC++')
variant('cross', default=cross_detect(),
description="UPC++ cross-compile target (autodetect by default)")
conflicts('cross=none', when='platform=cray',
msg='cross=none is unacceptable on Cray.' +
'Please specify an appropriate "cross" value')
depends_on('mpi', when='+mpi')
depends_on('cuda', when='+cuda')
# Require Python2 2.7.5+ up to v2019.9.0
depends_on('python@2.7.5:2.999',
type=("build", "run"), when='@:2019.9.0')
# v2020.3.0 and later also permit Python3
depends_on('python@2.7.5:', type=("build", "run"), when='@2020.3.0:')
# All flags should be passed to the build-env in autoconf-like vars
flag_handler = env_flags
def url_for_version(self, version):
if version > Version('2019.3.2'):
url = "https://bitbucket.org/berkeleylab/upcxx/downloads/upcxx-{0}.tar.gz"
else:
url = "https://bitbucket.org/berkeleylab/upcxx/downloads/upcxx-{0}-offline.tar.gz"
return url.format(version)
def setup_build_environment(self, env):
# ensure we use the correct python
env.set('UPCXX_PYTHON', self.spec['python'].command.path)
if '+mpi' in self.spec:
env.set('GASNET_CONFIGURE_ARGS',
'--enable-mpi --enable-mpi-compat')
else:
env.set('GASNET_CONFIGURE_ARGS', '--without-mpicc')
if 'cross=none' not in self.spec:
env.set('CROSS', self.spec.variants['cross'].value)
if '+cuda' in self.spec:
env.set('UPCXX_CUDA', '1')
env.set('UPCXX_CUDA_NVCC', self.spec['cuda'].prefix.bin.nvcc)
def setup_run_environment(self, env):
# ensure we use the correct python
env.set('UPCXX_PYTHON', self.spec['python'].command.path)
env.set('UPCXX_INSTALL', self.prefix)
env.set('UPCXX', self.prefix.bin.upcxx)
if 'platform=cray' in self.spec:
env.set('UPCXX_GASNET_CONDUIT', 'aries')
env.set('UPCXX_NETWORK', 'aries')
def setup_dependent_package(self, module, dep_spec):
dep_spec.upcxx = self.prefix.bin.upcxx
def setup_dependent_build_environment(self, env, dependent_spec):
env.set('UPCXX_INSTALL', self.prefix)
env.set('UPCXX', self.prefix.bin.upcxx)
if 'platform=cray' in self.spec:
env.set('UPCXX_GASNET_CONDUIT', 'aries')
env.set('UPCXX_NETWORK', 'aries')
def install(self, spec, prefix):
# UPC++ follows autoconf naming convention for LDLIBS, which is 'LIBS'
if (env.get('LDLIBS')):
env['LIBS'] = env['LDLIBS']
if spec.version <= Version('2019.9.0'):
env['CC'] = self.compiler.cc
if '+mpi' in self.spec:
if 'platform=cray' in self.spec:
env['GASNET_CONFIGURE_ARGS'] += \
" --with-mpicc=" + self.compiler.cc
else:
env['CXX'] = spec['mpi'].mpicxx
else:
env['CXX'] = self.compiler.cxx
installsh = Executable("./install")
installsh(prefix)
else:
if 'platform=cray' in self.spec:
# Spack loads the cray-libsci module incorrectly on ALCF theta,
# breaking the Cray compiler wrappers
# cray-libsci is irrelevant to our build, so disable it
for var in ['PE_PKGCONFIG_PRODUCTS', 'PE_PKGCONFIG_LIBS']:
env[var] = ":".join(
filter(lambda x: "libsci" not in x.lower(),
env[var].split(":")))
# Undo spack compiler wrappers:
# the C/C++ compilers must work post-install
# hack above no longer works after the fix to UPC++ issue #287
real_cc = join_path(env['CRAYPE_DIR'], 'bin', 'cc')
real_cxx = join_path(env['CRAYPE_DIR'], 'bin', 'CC')
# workaround a bug in the UPC++ installer: (issue #346)
env['GASNET_CONFIGURE_ARGS'] += \
" --with-cc=" + real_cc + " --with-cxx=" + real_cxx
if '+mpi' in self.spec:
env['GASNET_CONFIGURE_ARGS'] += " --with-mpicc=" + real_cc
else:
real_cc = self.compiler.cc
real_cxx = self.compiler.cxx
if '+mpi' in self.spec:
real_cxx = spec['mpi'].mpicxx
env['CC'] = real_cc
env['CXX'] = real_cxx
installsh = Executable("./configure")
installsh('--prefix=' + prefix)
make()
make('install')
@run_after('install')
@on_package_attributes(run_tests=True)
def test_install(self):
if self.spec.version <= Version('2019.9.0'):
spack.main.send_warning_to_tty(
"run_tests not supported in UPC++ version " +
self.spec.version.string + " -- SKIPPED")
else:
# enable testing of unofficial conduits (mpi)
test_networks = 'NETWORKS=$(CONDUITS)'
# build hello world against installed tree in all configurations
make('test_install', test_networks)
make('tests-clean') # cleanup
# build all tests for all networks in debug mode
make('tests', test_networks)
if 'cross=none' in self.spec:
make('run-tests', 'NETWORKS=smp') # runs tests for smp backend
make('tests-clean') # cleanup
|
iulian787/spack
|
var/spack/repos/builtin/packages/upcxx/package.py
|
Python
|
lgpl-2.1
| 7,459
|
############################################################################
#
# Copyright (C) 2014 Digia Plc and/or its subsidiary(-ies).
# Contact: http://www.qt-project.org/legal
#
# This file is part of Qt Creator.
#
# Commercial License Usage
# Licensees holding valid commercial Qt licenses may use this file in
# accordance with the commercial license agreement provided with the
# Software or, alternatively, in accordance with the terms contained in
# a written agreement between you and Digia. For licensing terms and
# conditions see http://qt.digia.com/licensing. For further information
# use the contact form at http://qt.digia.com/contact-us.
#
# GNU Lesser General Public License Usage
# Alternatively, this file may be used under the terms of the GNU Lesser
# General Public License version 2.1 as published by the Free Software
# Foundation and appearing in the file LICENSE.LGPL included in the
# packaging of this file. Please review the following information to
# ensure the GNU Lesser General Public License version 2.1 requirements
# will be met: http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html.
#
# In addition, as a special exception, Digia gives you certain additional
# rights. These rights are described in the Digia Qt LGPL Exception
# version 1.1, included in the file LGPL_EXCEPTION.txt in this package.
#
#############################################################################
import platform
from dumper import *
def qdump__QAtomicInt(d, value):
d.putValue(int(value["_q_value"]))
d.putNumChild(0)
def qdump__QBasicAtomicInt(d, value):
d.putValue(int(value["_q_value"]))
d.putNumChild(0)
def qdump__QAtomicPointer(d, value):
d.putType(value.type)
q = value["_q_value"]
p = toInteger(q)
d.putValue("@0x%x" % p)
d.putNumChild(1 if p else 0)
if d.isExpanded():
with Children(d):
d.putSubItem("_q_value", q.dereference())
def qform__QByteArray():
return "Inline,As Latin1 in Separate Window,As UTF-8 in Separate Window"
def qdump__QByteArray(d, value):
d.putByteArrayValue(value)
data, size, alloc = d.byteArrayData(value)
d.putNumChild(size)
format = d.currentItemFormat()
if format == 1:
d.putDisplay(StopDisplay)
elif format == 2:
d.putField("editformat", DisplayLatin1String)
d.putField("editvalue", d.encodeByteArray(value))
elif format == 3:
d.putField("editformat", DisplayUtf8String)
d.putField("editvalue", d.encodeByteArray(value))
if d.isExpanded():
d.putArrayData(d.charType(), data, size)
def qdump__QByteArrayData(d, value):
data, size, alloc = d.byteArrayDataHelper(d.addressOf(value))
d.putValue(d.readMemory(data, size), Hex2EncodedLatin1)
d.putNumChild(1)
if d.isExpanded():
with Children(d):
d.putIntItem("size", size)
d.putIntItem("alloc", alloc)
def qdump__QChar(d, value):
d.putValue(int(value["ucs"]))
d.putNumChild(0)
def qform__QAbstractItemModel():
return "Normal,Enhanced"
def qdump__QAbstractItemModel(d, value):
format = d.currentItemFormat()
if format == 1:
d.putPlainChildren(value)
return
#format == 2:
# Create a default-constructed QModelIndex on the stack.
try:
ri = d.makeValue(d.qtNamespace() + "QModelIndex", "-1, -1, 0, 0")
this_ = d.makeExpression(value)
ri_ = d.makeExpression(ri)
rowCount = int(d.parseAndEvaluate("%s.rowCount(%s)" % (this_, ri_)))
columnCount = int(d.parseAndEvaluate("%s.columnCount(%s)" % (this_, ri_)))
except:
d.putPlainChildren(value)
return
d.putValue("%d x %d" % (rowCount, columnCount))
d.putNumChild(rowCount * columnCount)
if d.isExpanded():
with Children(d, numChild=rowCount * columnCount, childType=ri.type):
i = 0
for row in xrange(rowCount):
for column in xrange(columnCount):
with SubItem(d, i):
d.putName("[%s, %s]" % (row, column))
mi = d.parseAndEvaluate("%s.index(%d,%d,%s)"
% (this_, row, column, ri_))
#warn("MI: %s " % mi)
#name = "[%d,%d]" % (row, column)
#d.putValue("%s" % mi)
d.putItem(mi)
i = i + 1
#warn("MI: %s " % mi)
#d.putName("[%d,%d]" % (row, column))
#d.putValue("%s" % mi)
#d.putNumChild(0)
#d.putType(mi.type)
#gdb.execute("call free($ri)")
def qform__QModelIndex():
return "Normal,Enhanced"
def qdump__QModelIndex(d, value):
format = d.currentItemFormat()
if format == 1:
d.putPlainChildren(value)
return
r = value["r"]
c = value["c"]
try:
p = value["p"]
except:
p = value["i"]
m = value["m"]
if d.isNull(m) or r < 0 or c < 0:
d.putValue("(invalid)")
d.putPlainChildren(value)
return
mm = m.dereference()
mm = mm.cast(mm.type.unqualified())
ns = d.qtNamespace()
try:
mi = d.makeValue(ns + "QModelIndex", "%s,%s,%s,%s" % (r, c, p, m))
mm_ = d.makeExpression(mm)
mi_ = d.makeExpression(mi)
rowCount = int(d.parseAndEvaluate("%s.rowCount(%s)" % (mm_, mi_)))
columnCount = int(d.parseAndEvaluate("%s.columnCount(%s)" % (mm_, mi_)))
except:
d.putPlainChildren(value)
return
try:
# Access DisplayRole as value
val = d.parseAndEvaluate("%s.data(%s, 0)" % (mm_, mi_))
v = val["d"]["data"]["ptr"]
d.putStringValue(d.makeValue(ns + 'QString', v))
except:
d.putValue("")
d.putNumChild(rowCount * columnCount)
if d.isExpanded():
with Children(d):
i = 0
for row in xrange(rowCount):
for column in xrange(columnCount):
with UnnamedSubItem(d, i):
d.putName("[%s, %s]" % (row, column))
mi2 = d.parseAndEvaluate("%s.index(%d,%d,%s)"
% (mm_, row, column, mi_))
d.putItem(mi2)
i = i + 1
d.putFields(value)
#d.putCallItem("parent", val, "parent")
#with SubItem(d, "model"):
# d.putValue(m)
# d.putType(ns + "QAbstractItemModel*")
# d.putNumChild(1)
#gdb.execute("call free($mi)")
def qdump__QDate(d, value):
jd = int(value["jd"])
if jd:
d.putValue(jd, JulianDate)
d.putNumChild(1)
if d.isExpanded():
# FIXME: This improperly uses complex return values.
with Children(d):
d.putCallItem("toString", value, "toString",
d.enumExpression("DateFormat", "TextDate"))
d.putCallItem("(ISO)", value, "toString",
d.enumExpression("DateFormat", "ISODate"))
d.putCallItem("(SystemLocale)", value, "toString",
d.enumExpression("DateFormat", "SystemLocaleDate"))
d.putCallItem("(Locale)", value, "toString",
d.enumExpression("DateFormat", "LocaleDate"))
d.putFields(value)
else:
d.putValue("(invalid)")
d.putNumChild(0)
def qdump__QTime(d, value):
mds = int(value["mds"])
if mds >= 0:
d.putValue(mds, MillisecondsSinceMidnight)
d.putNumChild(1)
if d.isExpanded():
# FIXME: This improperly uses complex return values.
with Children(d):
d.putCallItem("toString", value, "toString",
d.enumExpression("DateFormat", "TextDate"))
d.putCallItem("(ISO)", value, "toString",
d.enumExpression("DateFormat", "ISODate"))
d.putCallItem("(SystemLocale)", value, "toString",
d.enumExpression("DateFormat", "SystemLocaleDate"))
d.putCallItem("(Locale)", value, "toString",
d.enumExpression("DateFormat", "LocaleDate"))
d.putFields(value)
else:
d.putValue("(invalid)")
d.putNumChild(0)
def qdump__QTimeZone(d, value):
base = d.extractPointer(value)
if base == 0:
d.putValue("(null)")
d.putNumChild(0)
return
idAddr = base + 2 * d.ptrSize() # [QSharedData] + [vptr]
d.putByteArrayValueByAddress(idAddr)
d.putPlainChildren(value["d"])
def qdump__QDateTime(d, value):
qtVersion = d.qtVersion()
isValid = False
# This relies on the Qt4/Qt5 internal structure layout:
# {sharedref(4), ...
base = d.extractPointer(value)
is32bit = d.is32bit()
if qtVersion >= 0x050200:
if d.isWindowsTarget():
msecsOffset = 8
specOffset = 16
offsetFromUtcOffset = 20
timeZoneOffset = 24
statusOffset = 28 if is32bit else 32
else:
msecsOffset = 4 if is32bit else 8
specOffset = 12 if is32bit else 16
offsetFromUtcOffset = 16 if is32bit else 20
timeZoneOffset = 20 if is32bit else 24
statusOffset = 24 if is32bit else 32
status = d.extractInt(base + statusOffset)
if int(status & 0x0c == 0x0c): # ValidDate and ValidTime
isValid = True
msecs = d.extractInt64(base + msecsOffset)
spec = d.extractInt(base + specOffset)
offset = d.extractInt(base + offsetFromUtcOffset)
tzp = d.extractPointer(base + timeZoneOffset)
if tzp == 0:
tz = ""
else:
idBase = tzp + 2 * d.ptrSize() # [QSharedData] + [vptr]
tz = d.encodeByteArrayHelper(d.extractPointer(idBase))
d.putValue("%s/%s/%s/%s/%s" % (msecs, spec, offset, tz, status),
DateTimeInternal)
else:
# This relies on the Qt4/Qt5 internal structure layout:
# {sharedref(4), date(8), time(4+x)}
# QDateTimePrivate:
# - QAtomicInt ref; (padded on 64 bit)
# - [QDate date;]
# - - uint jd in Qt 4, qint64 in Qt 5.0 and Qt 5.1; padded on 64 bit
# - [QTime time;]
# - - uint mds;
# - Spec spec;
dateSize = 8 if qtVersion >= 0x050000 else 4 # Qt5: qint64, Qt4 uint
# 4 byte padding after 4 byte QAtomicInt if we are on 64 bit and QDate is 64 bit
refPlusPadding = 8 if qtVersion >= 0x050000 and not d.is32bit() else 4
dateBase = base + refPlusPadding
timeBase = dateBase + dateSize
mds = d.extractInt(timeBase)
isValid = mds > 0
if isValid:
jd = d.extractInt(dateBase)
d.putValue("%s/%s" % (jd, mds), JulianDateAndMillisecondsSinceMidnight)
if isValid:
d.putNumChild(1)
if d.isExpanded():
# FIXME: This improperly uses complex return values.
with Children(d):
d.putCallItem("toTime_t", value, "toTime_t")
d.putCallItem("toString", value, "toString",
d.enumExpression("DateFormat", "TextDate"))
d.putCallItem("(ISO)", value, "toString",
d.enumExpression("DateFormat", "ISODate"))
d.putCallItem("(SystemLocale)", value, "toString",
d.enumExpression("DateFormat", "SystemLocaleDate"))
d.putCallItem("(Locale)", value, "toString",
d.enumExpression("DateFormat", "LocaleDate"))
d.putCallItem("toUTC", value, "toTimeSpec",
d.enumExpression("TimeSpec", "UTC"))
d.putCallItem("toLocalTime", value, "toTimeSpec",
d.enumExpression("TimeSpec", "LocalTime"))
d.putFields(value)
else:
d.putValue("(invalid)")
d.putNumChild(0)
def qdump__QDir(d, value):
d.putNumChild(1)
privAddress = d.extractPointer(value)
bit32 = d.is32bit()
qt5 = d.qtVersion() >= 0x050000
# Change 9fc0965 reorders members again.
# bool fileListsInitialized;\n"
# QStringList files;\n"
# QFileInfoList fileInfos;\n"
# QStringList nameFilters;\n"
# QDir::SortFlags sort;\n"
# QDir::Filters filters;\n"
# Before 9fc0965:
# QDirPrivate:
# QAtomicInt ref
# QStringList nameFilters;
# QDir::SortFlags sort;
# QDir::Filters filters;
# // qt3support:
# QChar filterSepChar;
# bool matchAllDirs;
# // end qt3support
# QScopedPointer<QAbstractFileEngine> fileEngine;
# bool fileListsInitialized;
# QStringList files;
# QFileInfoList fileInfos;
# QFileSystemEntry dirEntry;
# QFileSystemEntry absoluteDirEntry;
# QFileSystemEntry:
# QString m_filePath
# QByteArray m_nativeFilePath
# qint16 m_lastSeparator
# qint16 m_firstDotInFileName
# qint16 m_lastDotInFileName
# + 2 byte padding
fileSystemEntrySize = 2 * d.ptrSize() + 8
if d.qtVersion() < 0x050200:
case = 0
elif d.qtVersion() >= 0x050300:
case = 1
else:
# Try to distinguish bool vs QStringList at the first item
# after the (padded) refcount. If it looks like a bool assume
# this is after 9fc0965. This is not safe.
firstValue = d.extractInt(privAddress + d.ptrSize())
case = 1 if firstValue == 0 or firstValue == 1 else 0
if case == 1:
if bit32:
filesOffset = 4
fileInfosOffset = 8
dirEntryOffset = 0x20
absoluteDirEntryOffset = 0x30
else:
filesOffset = 0x08
fileInfosOffset = 0x10
dirEntryOffset = 0x30
absoluteDirEntryOffset = 0x48
else:
# Assume this is before 9fc0965.
qt3support = d.isQt3Support()
qt3SupportAddition = d.ptrSize() if qt3support else 0
filesOffset = (24 if bit32 else 40) + qt3SupportAddition
fileInfosOffset = filesOffset + d.ptrSize()
dirEntryOffset = fileInfosOffset + d.ptrSize()
absoluteDirEntryOffset = dirEntryOffset + fileSystemEntrySize
d.putStringValueByAddress(privAddress + dirEntryOffset)
if d.isExpanded():
with Children(d):
ns = d.qtNamespace()
d.call(value, "count") # Fill cache.
#d.putCallItem("absolutePath", value, "absolutePath")
#d.putCallItem("canonicalPath", value, "canonicalPath")
with SubItem(d, "absolutePath"):
typ = d.lookupType(ns + "QString")
d.putItem(d.createValue(privAddress + absoluteDirEntryOffset, typ))
with SubItem(d, "entryInfoList"):
typ = d.lookupType(ns + "QList<" + ns + "QFileInfo>")
d.putItem(d.createValue(privAddress + fileInfosOffset, typ))
with SubItem(d, "entryList"):
typ = d.lookupType(ns + "QStringList")
d.putItem(d.createValue(privAddress + filesOffset, typ))
d.putFields(value)
def qdump__QFile(d, value):
# 9fc0965 changes the layout of the private structure
qtVersion = d.qtVersion()
is32bit = d.is32bit()
if qtVersion > 0x050200:
if d.isWindowsTarget():
offset = 180 if is32bit else 272
else:
offset = 176 if is32bit else 272
elif qtVersion >= 0x050000:
offset = 176 if is32bit else 280
else:
if d.isWindowsTarget():
offset = 144 if is32bit else 232
else:
offset = 140 if is32bit else 232
privAddress = d.extractPointer(d.addressOf(value) + d.ptrSize())
fileNameAddress = privAddress + offset
d.putStringValueByAddress(fileNameAddress)
d.putNumChild(1)
if d.isExpanded():
with Children(d):
d.putCallItem("exists", value, "exists")
d.putFields(value)
def qdump__QFileInfo(d, value):
privAddress = d.extractPointer(value)
#bit32 = d.is32bit()
#qt5 = d.qtVersion() >= 0x050000
#try:
# d.putStringValue(value["d_ptr"]["d"].dereference()["fileNames"][3])
#except:
# d.putPlainChildren(value)
# return
filePathAddress = privAddress + d.ptrSize()
d.putStringValueByAddress(filePathAddress)
d.putNumChild(1)
if d.isExpanded():
ns = d.qtNamespace()
with Children(d, childType=d.lookupType(ns + "QString")):
d.putCallItem("absolutePath", value, "absolutePath")
d.putCallItem("absoluteFilePath", value, "absoluteFilePath")
d.putCallItem("canonicalPath", value, "canonicalPath")
d.putCallItem("canonicalFilePath", value, "canonicalFilePath")
d.putCallItem("completeBaseName", value, "completeBaseName")
d.putCallItem("completeSuffix", value, "completeSuffix")
d.putCallItem("baseName", value, "baseName")
if False:
#ifdef Q_OS_MACX
d.putCallItem("isBundle", value, "isBundle")
d.putCallItem("bundleName", value, "bundleName")
d.putCallItem("fileName", value, "fileName")
d.putCallItem("filePath", value, "filePath")
# Crashes gdb (archer-tromey-python, at dad6b53fe)
#d.putCallItem("group", value, "group")
#d.putCallItem("owner", value, "owner")
d.putCallItem("path", value, "path")
d.putCallItem("groupid", value, "groupId")
d.putCallItem("ownerid", value, "ownerId")
#QFile::Permissions permissions () const
perms = d.call(value, "permissions")
if perms is None:
d.putValue("<not available>")
else:
with SubItem(d, "permissions"):
d.putEmptyValue()
d.putType(ns + "QFile::Permissions")
d.putNumChild(10)
if d.isExpanded():
with Children(d, 10):
perms = perms['i']
d.putBoolItem("ReadOwner", perms & 0x4000)
d.putBoolItem("WriteOwner", perms & 0x2000)
d.putBoolItem("ExeOwner", perms & 0x1000)
d.putBoolItem("ReadUser", perms & 0x0400)
d.putBoolItem("WriteUser", perms & 0x0200)
d.putBoolItem("ExeUser", perms & 0x0100)
d.putBoolItem("ReadGroup", perms & 0x0040)
d.putBoolItem("WriteGroup", perms & 0x0020)
d.putBoolItem("ExeGroup", perms & 0x0010)
d.putBoolItem("ReadOther", perms & 0x0004)
d.putBoolItem("WriteOther", perms & 0x0002)
d.putBoolItem("ExeOther", perms & 0x0001)
#QDir absoluteDir () const
#QDir dir () const
d.putCallItem("caching", value, "caching")
d.putCallItem("exists", value, "exists")
d.putCallItem("isAbsolute", value, "isAbsolute")
d.putCallItem("isDir", value, "isDir")
d.putCallItem("isExecutable", value, "isExecutable")
d.putCallItem("isFile", value, "isFile")
d.putCallItem("isHidden", value, "isHidden")
d.putCallItem("isReadable", value, "isReadable")
d.putCallItem("isRelative", value, "isRelative")
d.putCallItem("isRoot", value, "isRoot")
d.putCallItem("isSymLink", value, "isSymLink")
d.putCallItem("isWritable", value, "isWritable")
d.putCallItem("created", value, "created")
d.putCallItem("lastModified", value, "lastModified")
d.putCallItem("lastRead", value, "lastRead")
d.putFields(value)
def qdump__QFixed(d, value):
v = int(value["val"])
d.putValue("%s/64 = %s" % (v, v/64.0))
d.putNumChild(0)
def qform__QFiniteStack():
return arrayForms()
def qdump__QFiniteStack(d, value):
alloc = int(value["_alloc"])
size = int(value["_size"])
d.check(0 <= size and size <= alloc and alloc <= 1000 * 1000 * 1000)
d.putItemCount(size)
d.putNumChild(size)
if d.isExpanded():
innerType = d.templateArgument(value.type, 0)
d.putPlotData(innerType, value["_array"], size)
# Stock gdb 7.2 seems to have a problem with types here:
#
# echo -e "namespace N { struct S { enum E { zero, one, two }; }; }\n"\
# "int main() { N::S::E x = N::S::one;\n return x; }" >> main.cpp
# g++ -g main.cpp
# gdb-7.2 -ex 'file a.out' -ex 'b main' -ex 'run' -ex 'step' \
# -ex 'ptype N::S::E' -ex 'python print gdb.lookup_type("N::S::E")' -ex 'q'
# gdb-7.1 -ex 'file a.out' -ex 'b main' -ex 'run' -ex 'step' \
# -ex 'ptype N::S::E' -ex 'python print gdb.lookup_type("N::S::E")' -ex 'q'
# gdb-cvs -ex 'file a.out' -ex 'b main' -ex 'run' -ex 'step' \
# -ex 'ptype N::S::E' -ex 'python print gdb.lookup_type("N::S::E")' -ex 'q'
#
# gives as of 2010-11-02
#
# type = enum N::S::E {N::S::zero, N::S::one, N::S::two} \n
# Traceback (most recent call last): File "<string>", line 1,
# in <module> RuntimeError: No type named N::S::E.
# type = enum N::S::E {N::S::zero, N::S::one, N::S::two} \n N::S::E
# type = enum N::S::E {N::S::zero, N::S::one, N::S::two} \n N::S::E
#
# i.e. there's something broken in stock 7.2 that is was ok in 7.1 and is ok later.
def qdump__QFlags(d, value):
i = value["i"]
try:
enumType = d.templateArgument(value.type.unqualified(), 0)
d.putValue("%s (%s)" % (i.cast(enumType), i))
except:
d.putValue("%s" % i)
d.putNumChild(0)
def qform__QHash():
return mapForms()
def qdump__QHash(d, value):
def hashDataFirstNode(dPtr, numBuckets):
ePtr = dPtr.cast(nodeTypePtr)
bucket = dPtr.dereference()["buckets"]
for n in xrange(numBuckets - 1, -1, -1):
n = n - 1
if n < 0:
break
if d.pointerValue(bucket.dereference()) != d.pointerValue(ePtr):
return bucket.dereference()
bucket = bucket + 1
return ePtr;
def hashDataNextNode(nodePtr, numBuckets):
nextPtr = nodePtr.dereference()["next"]
if d.pointerValue(nextPtr.dereference()["next"]):
return nextPtr
start = (int(nodePtr.dereference()["h"]) % numBuckets) + 1
dPtr = nextPtr.cast(dataTypePtr)
bucket = dPtr.dereference()["buckets"] + start
for n in xrange(numBuckets - start):
if d.pointerValue(bucket.dereference()) != d.pointerValue(nextPtr):
return bucket.dereference()
bucket += 1
return nextPtr
keyType = d.templateArgument(value.type, 0)
valueType = d.templateArgument(value.type, 1)
anon = d.childAt(value, 0)
d_ptr = anon["d"]
e_ptr = anon["e"]
size = int(d_ptr["size"])
dataTypePtr = d_ptr.type # QHashData * = { Node *fakeNext, Node *buckets }
nodeTypePtr = d_ptr.dereference()["fakeNext"].type # QHashData::Node
d.check(0 <= size and size <= 100 * 1000 * 1000)
d.checkRef(d_ptr["ref"])
d.putItemCount(size)
d.putNumChild(size)
if d.isExpanded():
numBuckets = int(d_ptr.dereference()["numBuckets"])
innerType = e_ptr.dereference().type
isCompact = d.isMapCompact(keyType, valueType)
childType = valueType if isCompact else innerType
with Children(d, size, maxNumChild=1000, childType=childType):
for i in d.childRange():
if i == 0:
node = hashDataFirstNode(d_ptr, numBuckets)
else:
node = hashDataNextNode(node, numBuckets)
it = node.dereference().cast(innerType)
with SubItem(d, i):
if isCompact:
key = it["key"]
if not key:
# LLDB can't access directly since it's in anonymous union
# for Qt4 optimized int keytype
key = it[1]["key"]
d.putMapName(key)
d.putItem(it["value"])
d.putType(valueType)
else:
d.putItem(it)
def qdump__QHashNode(d, value):
key = value["key"]
if not key:
# LLDB can't access directly since it's in anonymous union
# for Qt4 optimized int keytype
key = value[1]["key"]
val = value["value"]
d.putEmptyValue()
d.putNumChild(2)
if d.isExpanded():
with Children(d):
d.putSubItem("key", key)
d.putSubItem("value", val)
def qHashIteratorHelper(d, value):
typeName = str(value.type)
hashTypeName = typeName[0:typeName.rfind("::")]
hashType = d.lookupType(hashTypeName)
keyType = d.templateArgument(hashType, 0)
valueType = d.templateArgument(hashType, 1)
d.putNumChild(1)
d.putEmptyValue()
if d.isExpanded():
with Children(d):
# We need something like QHash<int, float>::iterator
# -> QHashNode<int, float> with 'proper' spacing,
# as space changes confuse LLDB.
innerTypeName = hashTypeName.replace("QHash", "QHashNode", 1)
node = value["i"].cast(d.lookupType(innerTypeName).pointer()).dereference()
key = node["key"]
if not key:
# LLDB can't access directly since it's in anonymous union
# for Qt4 optimized int keytype
key = node[1]["key"]
d.putSubItem("key", key)
d.putSubItem("value", node["value"])
def qdump__QHash__const_iterator(d, value):
qHashIteratorHelper(d, value)
def qdump__QHash__iterator(d, value):
qHashIteratorHelper(d, value)
def qdump__QHostAddress(d, value):
# QHostAddress in Qt 4.5 (byte offsets)
# quint32 a (0)
# Q_IPV6ADDR a6 (4)
# protocol (20)
# QString ipString (24)
# QString scopeId (24 + ptrSize)
# bool isParsed (24 + 2 * ptrSize)
# QHostAddress in Qt 5.0
# QString ipString (0)
# QString scopeId (ptrSize)
# quint32 a (2*ptrSize)
# Q_IPV6ADDR a6 (2*ptrSize + 4)
# protocol (2*ptrSize + 20)
# bool isParsed (2*ptrSize + 24)
privAddress = d.extractPointer(value)
isQt5 = d.qtVersion() >= 0x050000
sizeofQString = d.ptrSize()
ipStringAddress = privAddress + (0 if isQt5 else 24)
isParsedAddress = privAddress + 24 + 2 * sizeofQString
# value.d.d->ipString
ipString = d.encodeStringHelper(d.extractPointer(ipStringAddress))
if d.extractByte(isParsedAddress) and len(ipString) > 0:
d.putValue(ipString, Hex4EncodedLittleEndian)
else:
# value.d.d->protocol:
# QAbstractSocket::IPv4Protocol = 0
# QAbstractSocket::IPv6Protocol = 1
protoAddress = privAddress + 20 + (2 * sizeofQString if isQt5 else 0);
proto = d.extractInt(protoAddress)
if proto == 1:
# value.d.d->a6
a6Offset = 4 + (2 * sizeofQString if isQt5 else 0)
data = d.readMemory(privAddress + a6Offset, 16)
address = ':'.join("%x" % int(data[i:i+4], 16) for i in xrange(0, 32, 4))
scopeId = privAddress + sizeofQString + (0 if isQt5 else 24)
scopeId = d.encodeStringHelper(d.extractPointer(scopeId))
d.putValue("%s%%%s" % (address, scopeId), IPv6AddressAndHexScopeId)
elif proto == 0:
# value.d.d->a
a = d.extractInt(privAddress + (2 * sizeofQString if isQt5 else 0))
a, n4 = divmod(a, 256)
a, n3 = divmod(a, 256)
a, n2 = divmod(a, 256)
a, n1 = divmod(a, 256)
d.putValue("%d.%d.%d.%d" % (n1, n2, n3, n4));
else:
d.putValue("<unspecified>")
d.putPlainChildren(value["d"]["d"].dereference())
def qdump__QIPv6Address(d, value):
#warn("IPV6.VALUE: %s" % value)
#warn("IPV6.ADDR: 0x%x" % d.addressOf(value))
#warn("IPV6.LOADADDR: 0x%x" % value.GetLoadAddress())
c = value["c"]
data = d.readMemory(d.addressOf(c), 16)
d.putValue(':'.join("%x" % int(data[i:i+4], 16) for i in xrange(0, 32, 4)))
#d.putValue('xx')
#d.putValue("0x%x - 0x%x" % (d.addressOf(value), d.addressOf(c)))
#d.putValue("0x%x - 0x%x" % (value.GetAddress(), c.GetAddress()))
#d.putValue("0x%x - 0x%x" % (value.GetLoadAddress(), c.GetLoadAddress()))
d.putPlainChildren(c)
def qform__QList():
return "Assume Direct Storage,Assume Indirect Storage"
def qdump__QList(d, value):
base = d.extractPointer(value)
begin = d.extractInt(base + 8)
end = d.extractInt(base + 12)
array = base + 16
if d.qtVersion() < 0x50000:
array += d.ptrSize()
d.check(begin >= 0 and end >= 0 and end <= 1000 * 1000 * 1000)
size = end - begin
d.check(size >= 0)
#d.checkRef(private["ref"])
innerType = d.templateArgument(value.type, 0)
d.putItemCount(size)
d.putNumChild(size)
if d.isExpanded():
innerSize = innerType.sizeof
stepSize = d.ptrSize()
addr = array + begin * stepSize
# The exact condition here is:
# QTypeInfo<T>::isLarge || QTypeInfo<T>::isStatic
# but this data is available neither in the compiled binary nor
# in the frontend.
# So as first approximation only do the 'isLarge' check:
format = d.currentItemFormat()
if format == 1:
isInternal = True
elif format == 2:
isInternal = False
else:
isInternal = innerSize <= stepSize and d.isMovableType(innerType)
if isInternal:
if innerSize == stepSize:
p = d.createPointerValue(addr, innerType)
d.putArrayData(innerType, p, size)
else:
with Children(d, size, childType=innerType):
for i in d.childRange():
p = d.createValue(addr + i * stepSize, innerType)
d.putSubItem(i, p)
else:
# about 0.5s / 1000 items
with Children(d, size, maxNumChild=2000, childType=innerType):
for i in d.childRange():
p = d.extractPointer(addr + i * stepSize)
x = d.createValue(p, innerType)
d.putSubItem(i, x)
def qform__QImage():
return "Normal,Displayed"
def qdump__QImage(d, value):
# This relies on current QImage layout:
# QImageData:
# - QAtomicInt ref
# - int width, height, depth, nbytes
# - padding on 64 bit machines
# - qreal devicePixelRatio (+20 + padding) # Assume qreal == double, Qt 5 only
# - QVector<QRgb> colortable (+20 + padding + gap)
# - uchar *data (+20 + padding + gap + ptr)
# [- uchar **jumptable jumptable with Qt 3 suppor]
# - enum format (+20 + padding + gap + 2 * ptr)
ptrSize = d.ptrSize()
isQt5 = d.qtVersion() >= 0x050000
offset = (3 if isQt5 else 2) * ptrSize
base = d.extractPointer(d.addressOf(value) + offset)
if base == 0:
d.putValue("(invalid)")
return
qt3Support = d.isQt3Support()
width = d.extractInt(base + 4)
height = d.extractInt(base + 8)
nbytes = d.extractInt(base + 16)
padding = d.ptrSize() - d.intSize()
pixelRatioSize = 8 if isQt5 else 0
jumpTableSize = ptrSize if qt3Support else 0
bits = d.extractPointer(base + 20 + padding + pixelRatioSize + ptrSize)
iformat = d.extractInt(base + 20 + padding + pixelRatioSize + jumpTableSize + 2 * ptrSize)
d.putValue("(%dx%d)" % (width, height))
d.putNumChild(1)
if d.isExpanded():
with Children(d):
d.putIntItem("width", width)
d.putIntItem("height", height)
d.putIntItem("nbytes", nbytes)
d.putIntItem("format", iformat)
with SubItem(d, "data"):
d.putValue("0x%x" % bits)
d.putNumChild(0)
d.putType("void *")
format = d.currentItemFormat()
if format == 1:
d.putDisplay(StopDisplay)
elif format == 2:
# This is critical for performance. Writing to an external
# file using the following is faster when using GDB.
# file = tempfile.mkstemp(prefix="gdbpy_")
# filename = file[1].replace("\\", "\\\\")
# gdb.execute("dump binary memory %s %s %s" %
# (filename, bits, bits + nbytes))
# d.putDisplay(DisplayImageFile, " %d %d %d %d %s"
# % (width, height, nbytes, iformat, filename))
d.putField("editformat", DisplayImageData)
d.put('editvalue="')
d.put('%08x%08x%08x%08x' % (width, height, nbytes, iformat))
d.put(d.readMemory(bits, nbytes))
d.put('",')
def qdump__QLinkedList(d, value):
dd = d.extractPointer(value)
ptrSize = d.ptrSize()
n = d.extractInt(dd + 4 + 2 * ptrSize);
ref = d.extractInt(dd + 2 * ptrSize);
d.check(0 <= n and n <= 100*1000*1000)
d.check(-1 <= ref and ref <= 1000)
d.putItemCount(n)
d.putNumChild(n)
if d.isExpanded():
innerType = d.templateArgument(value.type, 0)
with Children(d, n, maxNumChild=1000, childType=innerType):
pp = d.extractPointer(dd)
for i in d.childRange():
d.putSubItem(i, d.createValue(pp + 2 * ptrSize, innerType))
pp = d.extractPointer(pp)
qqLocalesCount = None
def qdump__QLocale(d, value):
# Check for uninitialized 'index' variable. Retrieve size of
# QLocale data array from variable in qlocale.cpp.
# Default is 368 in Qt 4.8, 438 in Qt 5.0.1, the last one
# being 'System'.
#global qqLocalesCount
#if qqLocalesCount is None:
# #try:
# qqLocalesCount = int(value(ns + 'locale_data_size'))
# #except:
# qqLocalesCount = 438
#try:
# index = int(value["p"]["index"])
#except:
# try:
# index = int(value["d"]["d"]["m_index"])
# except:
# index = int(value["d"]["d"]["m_data"]...)
#d.check(index >= 0)
#d.check(index <= qqLocalesCount)
d.putStringValue(d.call(value, "name"))
d.putNumChild(0)
return
# FIXME: Poke back for variants.
if d.isExpanded():
ns = d.qtNamespace()
with Children(d, childType=d.lookupType(ns + "QChar"), childNumChild=0):
d.putCallItem("country", value, "country")
d.putCallItem("language", value, "language")
d.putCallItem("measurementSystem", value, "measurementSystem")
d.putCallItem("numberOptions", value, "numberOptions")
d.putCallItem("timeFormat_(short)", value,
"timeFormat", ns + "QLocale::ShortFormat")
d.putCallItem("timeFormat_(long)", value,
"timeFormat", ns + "QLocale::LongFormat")
d.putCallItem("decimalPoint", value, "decimalPoint")
d.putCallItem("exponential", value, "exponential")
d.putCallItem("percent", value, "percent")
d.putCallItem("zeroDigit", value, "zeroDigit")
d.putCallItem("groupSeparator", value, "groupSeparator")
d.putCallItem("negativeSign", value, "negativeSign")
d.putFields(value)
def qdump__QMapNode(d, value):
d.putEmptyValue()
d.putNumChild(2)
if d.isExpanded():
with Children(d):
d.putSubItem("key", value["key"])
d.putSubItem("value", value["value"])
def qdumpHelper__Qt4_QMap(d, value):
anon = d.childAt(value, 0)
d_ptr = anon["d"].dereference()
e_ptr = anon["e"].dereference()
n = int(d_ptr["size"])
d.check(0 <= n and n <= 100*1000*1000)
d.checkRef(d_ptr["ref"])
d.putItemCount(n)
d.putNumChild(n)
if d.isExpanded():
if n > 10000:
n = 10000
keyType = d.templateArgument(value.type, 0)
valueType = d.templateArgument(value.type, 1)
it = e_ptr["forward"].dereference()
# QMapPayloadNode is QMapNode except for the 'forward' member, so
# its size is most likely the offset of the 'forward' member therein.
# Or possibly 2 * sizeof(void *)
# Note: Keeping the spacing in the type lookup
# below is important for LLDB.
needle = str(value.type).replace("QMap", "QMapNode", 1)
needle = d.qtNamespace() + "QMapNode<%s,%s>" % (keyType, valueType)
nodeType = d.lookupType(needle)
nodePointerType = nodeType.pointer()
# symbols reports payload size at wrong size 24
if d.isArmArchitecture() and d.isQnxTarget() and str(valueType) == 'QVariant':
payloadSize = 28
else:
payloadSize = nodeType.sizeof - 2 * nodePointerType.sizeof
with PairedChildren(d, n, useKeyAndValue=True,
keyType=keyType, valueType=valueType, pairType=nodeType):
for i in xrange(n):
base = it.cast(d.charPtrType()) - payloadSize
node = base.cast(nodePointerType).dereference()
with SubItem(d, i):
#d.putField("iname", d.currentIName)
d.putPair(node, i)
it = it.dereference()["forward"].dereference()
def qdumpHelper__Qt5_QMap(d, value):
d_ptr = value["d"].dereference()
n = int(d_ptr["size"])
d.check(0 <= n and n <= 100*1000*1000)
d.checkRef(d_ptr["ref"])
d.putItemCount(n)
d.putNumChild(n)
if d.isExpanded():
if n > 10000:
n = 10000
keyType = d.templateArgument(value.type, 0)
valueType = d.templateArgument(value.type, 1)
# Note: Keeping the spacing in the type lookup
# below is important for LLDB.
needle = str(d_ptr.type).replace("QMapData", "QMapNode", 1)
nodeType = d.lookupType(needle)
def helper(d, node, nodeType, i):
left = node["left"]
if not d.isNull(left):
i = helper(d, left.dereference(), nodeType, i)
if i >= n:
return i
nodex = node.cast(nodeType)
with SubItem(d, i):
d.putPair(nodex, i)
i += 1
if i >= n:
return i
right = node["right"]
if not d.isNull(right):
i = helper(d, right.dereference(), nodeType, i)
return i
with PairedChildren(d, n, useKeyAndValue=True,
keyType=keyType, valueType=valueType, pairType=nodeType):
node = d_ptr["header"]
helper(d, node, nodeType, 0)
def qform__QMap():
return mapForms()
def qdump__QMap(d, value):
if d.qtVersion() < 0x50000:
qdumpHelper__Qt4_QMap(d, value)
else:
qdumpHelper__Qt5_QMap(d, value)
def qform__QMultiMap():
return mapForms()
def qdump__QMultiMap(d, value):
qdump__QMap(d, value)
def extractCString(table, offset):
result = ""
while True:
d = table[offset]
if d == 0:
break
result += "%c" % d
offset += 1
return result
def qdump__QMetaObjectPrivate(d, value):
d.putEmptyValue()
d.putNumChild(1)
if d.isExpanded():
with Children(d):
# int revision;
# int className;
# int classInfoCount, classInfoData;
# int methodCount, methodData;
# int propertyCount, propertyData;
# int enumeratorCount, enumeratorData;
# int constructorCount, constructorData; //since revision 2
# int flags; //since revision 3
# int signalCount; //since revision 4
d.putIntItem("revision", value["revision"])
d.putIntItem("methodCount", value["methodCount"])
d.putIntItem("propertyCount", value["propertyCount"])
d.putIntItem("enumeratorCount", value["enumeratorCount"])
d.putIntItem("constructorCount", value["constructorCount"])
d.putIntItem("flags", value["flags"])
d.putIntItem("signalCount", value["signalCount"])
def qdump__QMetaObject(d, value):
d.putEmptyValue()
d.putNumChild(1)
if d.isExpanded():
with Children(d):
dd = value["d"]
d.putSubItem("d", dd)
data = d.extractPointer(dd["data"])
propertyNames = d.staticQObjectPropertyNames(value)
propertyIndex = 0
for propertyName in propertyNames:
with SubItem(d, "property_%s" % propertyIndex):
d.putValue(propertyName)
propertyIndex += 1
#byteArrayDataType = d.lookupType(d.qtNamespace() + "QByteArrayData")
#byteArrayDataSize = byteArrayDataType.sizeof
#sd = d.extractPointer(dd["stringdata"])
#stringdata, size, alloc = d.byteArrayDataHelper(sd)
#propertyCount = d.extractInt(data + 24)
#propertyData = d.extractInt(data + 28)
## This is the 'data' member in the qt_meta_stringdata_qobject__*_t struct
#d.putIntItem("_byteArrayDataSize", byteArrayDataSize)
#d.putAddressItem("_data", data)
#d.putAddressItem("_sd_", stringdata)
#with SubItem(d, "_sd"):
# d.putValue(d.readMemory(stringdata, size), Hex2EncodedLatin1)
#with SubItem(d, "_cn"):
# d.putValue(d.readMemory(stringdata + d.extractInt(data + 4), size), Hex2EncodedLatin1)
#for i in range(propertyCount):
# with SubItem(d, "property_%s" % i):
# x = data + (propertyData + 3 * i) * 4
# literal = sd + d.extractInt(x) * byteArrayDataSize
# ldata, lsize, lalloc = d.byteArrayDataHelper(literal)
# d.putValue(d.readMemory(ldata, lsize), Hex2EncodedLatin1)
# d.putNumChild(1)
# if d.isExpanded():
# with Children(d):
# if d.isExpanded():
# d.putAddressItem("_literal", literal)
# d.putIntItem("__data", ldata)
# d.putIntItem("__size", lsize)
# d.putIntItem("__alloc", lalloc)
# d.putIntItem("name", d.extractInt(x))
# d.putIntItem("type", d.extractInt(x + 4))
# d.putIntItem("flags", d.extractInt(x + 8))
methodCount = d.extractInt(data + 16)
methodData = d.extractInt(data + 20)
for i in range(methodCount):
with SubItem(d, "method_%s" % i):
x = data + (methodData + 5 * i) * 4
#d.putEmptyValue()
d.putValue(d.readCString(stringdata + d.extractInt(x)))
d.putNumChild(1)
if d.isExpanded():
with Children(d):
if d.isExpanded():
d.putIntItem("name", d.extractInt(x))
d.putIntItem("argc", d.extractInt(x + 4))
d.putIntItem("argv", d.extractInt(x + 8))
d.putIntItem("type", d.extractInt(x + 12))
d.putIntItem("flags", d.extractInt(x + 16))
d.putSubItem("stringData", dd["stringdata"])
d.putIntItem("revision", d.extractInt(data))
d.putIntItem("className", d.extractInt(data + 4))
d.putIntItem("classInfoCount", d.extractInt(data + 8))
d.putIntItem("className", d.extractInt(data + 12))
d.putIntItem("methodCount", d.extractInt(data + 16))
d.putIntItem("methodData", d.extractInt(data + 20))
d.putIntItem("propertyCount", d.extractInt(data + 24))
d.putIntItem("propertyData", d.extractInt(data + 28))
d.putIntItem("enumeratorCount", d.extractInt(data + 32))
d.putIntItem("enumeratorData", d.extractInt(data + 36))
d.putIntItem("constructorCount", d.extractInt(data + 40))
d.putIntItem("constructorData", d.extractInt(data + 44))
d.putIntItem("flags", d.extractInt(data + 48))
d.putIntItem("signalCount", d.extractInt(data + 52))
def _qdump__QObject(d, value):
d.putQObjectNameValue(value)
ns = d.qtNamespace()
try:
privateTypeName = ns + "QObjectPrivate"
privateType = d.lookupType(privateTypeName)
staticMetaObject = value["staticMetaObject"]
except:
d.putPlainChildren(value)
return
#warn("SMO: %s " % staticMetaObject)
#warn("SMO DATA: %s " % staticMetaObject["d"]["stringdata"])
superData = staticMetaObject["d"]["superdata"]
#warn("SUPERDATA: %s" % superData)
#while not d.isNull(superData):
# superData = superData.dereference()["d"]["superdata"]
# warn("SUPERDATA: %s" % superData)
if privateType is None:
#d.putValue(d.cleanAddress(d.pointerValue(value))
d.putPlainChildren(value)
return
#warn("OBJECTNAME: %s " % objectName)
dd = value["d_ptr"]["d"]
d_ptr = dd.cast(privateType.pointer()).dereference()
#warn("D_PTR: %s " % d_ptr)
mo = d_ptr["metaObject"]
if d.isNull(mo):
mo = staticMetaObject
#warn("MO: %s " % mo)
#warn("MO.D: %s " % mo["d"])
metaData = mo["d"]["data"]
metaStringData = mo["d"]["stringdata"]
# This is char * in Qt 4 and ByteArrayData * in Qt 5.
# Force it to the char * data in the Qt 5 case.
try:
offset = metaStringData["offset"]
metaStringData = metaStringData.cast(d.charPtrType()) + int(offset)
except:
pass
#extradata = mo["d"]["extradata"] # Capitalization!
#warn("METADATA: %s " % metaData)
#warn("STRINGDATA: %s " % metaStringData)
#warn("TYPE: %s " % value.type)
#warn("INAME: %s " % d.currentIName)
d.putEmptyValue()
#QSignalMapper::staticMetaObject
#d.checkRef(d_ptr["ref"])
d.putNumChild(4)
if d.isExpanded():
with Children(d):
d.putQObjectGuts(value)
# Local data.
if privateTypeName != ns + "QObjectPrivate":
if not privateType is None:
with SubItem(d, "data"):
d.putEmptyValue()
d.putNoType()
d.putPlainChildren(d_ptr, False)
d.putFields(value)
# Parent and children.
if stripClassTag(str(value.type)) == ns + "QObject":
d.putSubItem("parent", d_ptr["parent"])
d.putSubItem("children", d_ptr["children"])
# Metaobject.
d.putSubItem("metaobject", mo)
# Dynamic Properties.
with SubItem(d, "dynamics"):
# Prolog
extraData = d_ptr["extraData"] # Capitalization!
if d.isNull(extraData):
dynamicPropertyCount = 0
else:
extraDataType = d.lookupType(
ns + "QObjectPrivate::ExtraData").pointer()
extraData = extraData.cast(extraDataType)
ed = extraData.dereference()
names = ed["propertyNames"]
values = ed["propertyValues"]
#userData = ed["userData"]
namesBegin = names["d"]["begin"]
namesEnd = names["d"]["end"]
namesArray = names["d"]["array"]
dynamicPropertyCount = namesEnd - namesBegin
d.putNoType()
d.putItemCount(dynamicPropertyCount)
d.putNumChild(dynamicPropertyCount)
if d.isExpanded() and d.isGdb:
import gdb
# FIXME: Make this global. Don't leak.
variant = "'%sQVariant'" % ns
# Avoid malloc symbol clash with QVector
gdb.execute("set $d = (%s*)calloc(sizeof(%s), 1)"
% (variant, variant))
gdb.execute("set $d.d.is_shared = 0")
with Children(d):
dummyType = d.voidPtrType().pointer()
namesType = d.lookupType(ns + "QByteArray")
valuesBegin = values["d"]["begin"]
valuesEnd = values["d"]["end"]
valuesArray = values["d"]["array"]
valuesType = d.lookupType(ns + "QVariant")
p = namesArray.cast(dummyType) + namesBegin
q = valuesArray.cast(dummyType) + valuesBegin
for i in xrange(dynamicPropertyCount):
with SubItem(d, i):
pp = p.cast(namesType.pointer()).dereference();
d.putField("key", d.encodeByteArray(pp))
d.putField("keyencoded", Hex2EncodedLatin1)
qq = q.cast(valuesType.pointer().pointer())
qq = qq.dereference();
d.putField("addr", d.cleanAddress(qq))
d.putField("exp", "*(%s*)%s"
% (variant, d.cleanAddress(qq)))
t = qdump__QVariant(d, qq)
# Override the "QVariant (foo)" output.
d.putBetterType(t)
p += 1
q += 1
# Connections.
with SubItem(d, "connections"):
d.putNoType()
connections = d_ptr["connectionLists"]
connectionListCount = 0
if not d.isNull(connections):
connectionListCount = connections["d"]["size"]
d.putItemCount(connectionListCount, 0)
d.putNumChild(connectionListCount)
if d.isExpanded():
pp = 0
with Children(d):
vectorType = d.fieldAt(connections.type.target(), 0).type
innerType = d.templateArgument(vectorType, 0)
# Should check: innerType == ns::QObjectPrivate::ConnectionList
p = gdb.Value(connections["p"]["array"]).cast(innerType.pointer())
for i in xrange(connectionListCount):
first = p.dereference()["first"]
while not d.isNull(first):
with SubItem(d, pp):
connection = first.dereference()
d.putItem(connection)
d.putValue(connection["callFunction"])
first = first["nextConnectionList"]
# We need to enforce some upper limit.
pp += 1
if pp > 1000:
break
p += 1
if pp < 1000:
d.putItemCount(pp)
# Active connection.
with SubItem(d, "currentSender"):
d.putNoType()
sender = d_ptr["currentSender"]
d.putPointerValue(sender)
if d.isNull(sender):
d.putNumChild(0)
else:
d.putNumChild(1)
if d.isExpanded():
with Children(d):
# Sending object
d.putSubItem("object", sender["sender"])
# Signal in sending object
with SubItem(d, "signal"):
d.putValue(sender["signal"])
d.putNoType()
d.putNumChild(0)
# QObject
# static const uint qt_meta_data_QObject[] = {
# int revision;
# int className;
# int classInfoCount, classInfoData;
# int methodCount, methodData;
# int propertyCount, propertyData;
# int enumeratorCount, enumeratorData;
# int constructorCount, constructorData; //since revision 2
# int flags; //since revision 3
# int signalCount; //since revision 4
# // content:
# 4, // revision
# 0, // classname
# 0, 0, // classinfo
# 4, 14, // methods
# 1, 34, // properties
# 0, 0, // enums/sets
# 2, 37, // constructors
# 0, // flags
# 2, // signalCount
# /* 14 */
# // signals: signature, parameters, type, tag, flags
# 9, 8, 8, 8, 0x05,
# 29, 8, 8, 8, 0x25,
# /* 24 */
# // slots: signature, parameters, type, tag, flags
# 41, 8, 8, 8, 0x0a,
# 55, 8, 8, 8, 0x08,
# /* 34 */
# // properties: name, type, flags
# 90, 82, 0x0a095103,
# /* 37 */
# // constructors: signature, parameters, type, tag, flags
# 108, 101, 8, 8, 0x0e,
# 126, 8, 8, 8, 0x2e,
# 0 // eod
# };
# static const char qt_meta_stringdata_QObject[] = {
# "QObject\0\0destroyed(QObject*)\0destroyed()\0"
# "deleteLater()\0_q_reregisterTimers(void*)\0"
# "QString\0objectName\0parent\0QObject(QObject*)\0"
# "QObject()\0"
# };
# QSignalMapper
# static const uint qt_meta_data_QSignalMapper[] = {
# // content:
# 4, // revision
# 0, // classname
# 0, 0, // classinfo
# 7, 14, // methods
# 0, 0, // properties
# 0, 0, // enums/sets
# 0, 0, // constructors
# 0, // flags
# 4, // signalCount
# // signals: signature, parameters, type, tag, flags
# 15, 14, 14, 14, 0x05,
# 27, 14, 14, 14, 0x05,
# 43, 14, 14, 14, 0x05,
# 60, 14, 14, 14, 0x05,
# // slots: signature, parameters, type, tag, flags
# 77, 14, 14, 14, 0x0a,
# 90, 83, 14, 14, 0x0a,
# 104, 14, 14, 14, 0x08,
# 0 // eod
# };
# static const char qt_meta_stringdata_QSignalMapper[] = {
# "QSignalMapper\0\0mapped(int)\0mapped(QString)\0"
# "mapped(QWidget*)\0mapped(QObject*)\0"
# "map()\0sender\0map(QObject*)\0"
# "_q_senderDestroyed()\0"
# };
# const QMetaObject QSignalMapper::staticMetaObject = {
# { &QObject::staticMetaObject, qt_meta_stringdata_QSignalMapper,
# qt_meta_data_QSignalMapper, 0 }
# };
# // Meta enumeration helpers
# static inline void dumpMetaEnumType(QDumper &d, const QMetaEnum &me)
# {
# QByteArray type = me.scope()
# if !type.isEmpty())
# type += "::"
# type += me.name()
# d.putField("type", type.constData())
# }
#
# static inline void dumpMetaEnumValue(QDumper &d, const QMetaProperty &mop,
# int value)
# {
#
# const QMetaEnum me = mop.enumerator()
# dumpMetaEnumType(d, me)
# if const char *enumValue = me.valueToKey(value)) {
# d.putValue(enumValue)
# } else {
# d.putValue(value)
# }
# d.putField("numchild", 0)
# }
#
# static inline void dumpMetaFlagValue(QDumper &d, const QMetaProperty &mop,
# int value)
# {
# const QMetaEnum me = mop.enumerator()
# dumpMetaEnumType(d, me)
# const QByteArray flagsValue = me.valueToKeys(value)
# if flagsValue.isEmpty():
# d.putValue(value)
# else:
# d.putValue(flagsValue.constData())
# d.putNumChild(0)
# }
def qdump__QPixmap(d, value):
offset = (3 if d.qtVersion() >= 0x050000 else 2) * d.ptrSize()
base = d.extractPointer(d.addressOf(value) + offset)
if base == 0:
d.putValue("(invalid)")
else:
width = d.extractInt(base + d.ptrSize())
height = d.extractInt(base + d.ptrSize() + 4)
d.putValue("(%dx%d)" % (width, height))
d.putNumChild(0)
def qdump__QPoint(d, value):
x = int(value["xp"])
y = int(value["yp"])
d.putValue("(%s, %s)" % (x, y))
d.putPlainChildren(value)
def qdump__QPointF(d, value):
x = float(value["xp"])
y = float(value["yp"])
d.putValue("(%s, %s)" % (x, y))
d.putPlainChildren(value)
def qdump__QRect(d, value):
def pp(l):
if l >= 0: return "+%s" % l
return l
x1 = int(value["x1"])
y1 = int(value["y1"])
x2 = int(value["x2"])
y2 = int(value["y2"])
w = x2 - x1 + 1
h = y2 - y1 + 1
d.putValue("%sx%s%s%s" % (w, h, pp(x1), pp(y1)))
d.putPlainChildren(value)
def qdump__QRectF(d, value):
def pp(l):
if l >= 0: return "+%s" % l
return l
x = float(value["xp"])
y = float(value["yp"])
w = float(value["w"])
h = float(value["h"])
d.putValue("%sx%s%s%s" % (w, h, pp(x), pp(y)))
d.putPlainChildren(value)
def qdump__QRegExp(d, value):
# value.priv.engineKey.pattern
privAddress = d.extractPointer(value)
engineKeyAddress = privAddress + d.ptrSize()
patternAddress = engineKeyAddress
d.putStringValueByAddress(patternAddress)
d.putNumChild(1)
if d.isExpanded():
with Children(d):
# QRegExpPrivate:
# - QRegExpEngine *eng (+0)
# - QRegExpEngineKey: (+1ptr)
# - QString pattern; (+1ptr)
# - QRegExp::PatternSyntax patternSyntax; (+2ptr)
# - Qt::CaseSensitivity cs; (+2ptr +1enum +pad?)
# - bool minimal (+2ptr +2enum +2pad?)
# - QString t (+2ptr +2enum +1bool +3pad?)
# - QStringList captures (+3ptr +2enum +1bool +3pad?)
# FIXME: Remove need to call. Needed to warm up cache.
d.call(value, "capturedTexts") # create cache
ns = d.qtNamespace()
with SubItem(d, "syntax"):
# value["priv"]["engineKey"["capturedCache"]
address = engineKeyAddress + d.ptrSize()
typ = d.lookupType(ns + "QRegExp::PatternSyntax")
d.putItem(d.createValue(address, typ))
with SubItem(d, "captures"):
# value["priv"]["capturedCache"]
address = privAddress + 3 * d.ptrSize() + 12
typ = d.lookupType(ns + "QStringList")
d.putItem(d.createValue(address, typ))
def qdump__QRegion(d, value):
p = value["d"].dereference()["qt_rgn"]
if d.isNull(p):
d.putValue("<empty>")
d.putNumChild(0)
else:
# struct QRegionPrivate:
# int numRects;
# QVector<QRect> rects;
# QRect extents;
# QRect innerRect;
# int innerArea;
pp = d.extractPointer(p)
n = d.extractInt(pp)
d.putItemCount(n)
d.putNumChild(n)
if d.isExpanded():
with Children(d):
v = d.ptrSize()
ns = d.qtNamespace()
rectType = d.lookupType(ns + "QRect")
d.putIntItem("numRects", n)
d.putSubItem("extents", d.createValue(pp + 2 * v, rectType))
d.putSubItem("innerRect", d.createValue(pp + 2 * v + rectType.sizeof, rectType))
d.putIntItem("innerArea", d.extractInt(pp + 2 * v + 2 * rectType.sizeof))
# FIXME
try:
# Can fail if QVector<QRect> debuginfo is missing.
vectType = d.lookupType("%sQVector<%sQRect>" % (ns, ns))
d.putSubItem("rects", d.createValue(pp + v, vectType))
except:
with SubItem(d, "rects"):
d.putItemCount(n)
d.putType("%sQVector<%sQRect>" % (ns, ns))
d.putNumChild(0)
def qdump__QScopedPointer(d, value):
d.putBetterType(d.currentType)
d.putItem(value["d"])
def qdump__QSet(d, value):
def hashDataFirstNode(dPtr, numBuckets):
ePtr = dPtr.cast(nodeTypePtr)
bucket = dPtr["buckets"]
for n in xrange(numBuckets - 1, -1, -1):
n = n - 1
if n < 0:
break
if d.pointerValue(bucket.dereference()) != d.pointerValue(ePtr):
return bucket.dereference()
bucket = bucket + 1
return ePtr
def hashDataNextNode(nodePtr, numBuckets):
nextPtr = nodePtr.dereference()["next"]
if d.pointerValue(nextPtr.dereference()["next"]):
return nextPtr
dPtr = nodePtr.cast(hashDataType.pointer()).dereference()
start = (int(nodePtr.dereference()["h"]) % numBuckets) + 1
bucket = dPtr.dereference()["buckets"] + start
for n in xrange(numBuckets - start):
if d.pointerValue(bucket.dereference()) != d.pointerValue(nextPtr):
return bucket.dereference()
bucket += 1
return nodePtr
anon = d.childAt(value, 0)
if d.isLldb: # Skip the inheritance level.
anon = d.childAt(anon, 0)
d_ptr = anon["d"]
e_ptr = anon["e"]
size = int(d_ptr.dereference()["size"])
d.check(0 <= size and size <= 100 * 1000 * 1000)
d.checkRef(d_ptr["ref"])
d.putItemCount(size)
d.putNumChild(size)
if d.isExpanded():
hashDataType = d_ptr.type
nodeTypePtr = d_ptr.dereference()["fakeNext"].type
numBuckets = int(d_ptr.dereference()["numBuckets"])
innerType = e_ptr.dereference().type
with Children(d, size, maxNumChild=1000, childType=innerType):
for i in d.childRange():
if i == 0:
node = hashDataFirstNode(d_ptr, numBuckets)
else:
node = hashDataNextNode(node, numBuckets)
it = node.dereference().cast(innerType)
with SubItem(d, i):
key = it["key"]
if not key:
# LLDB can't access directly since it's in anonymous union
# for Qt4 optimized int keytype
key = it[1]["key"]
d.putItem(key)
def qdump__QSharedData(d, value):
d.putValue("ref: %s" % value["ref"]["_q_value"])
d.putNumChild(0)
def qdump__QSharedDataPointer(d, value):
d_ptr = value["d"]
if d.isNull(d_ptr):
d.putValue("(null)")
d.putNumChild(0)
else:
# This replaces the pointer by the pointee, making the
# pointer transparent.
try:
innerType = d.templateArgument(value.type, 0)
except:
d.putValue(d_ptr)
d.putPlainChildren(value)
return
d.putBetterType(d.currentType)
d.putItem(d_ptr.cast(innerType.pointer()).dereference())
def qdump__QSharedPointer(d, value):
qdump__QWeakPointer(d, value)
def qdump__QSize(d, value):
w = int(value["wd"])
h = int(value["ht"])
d.putValue("(%s, %s)" % (w, h))
d.putPlainChildren(value)
def qdump__QSizeF(d, value):
w = float(value["wd"])
h = float(value["ht"])
d.putValue("(%s, %s)" % (w, h))
d.putPlainChildren(value)
def qform__QStack():
return arrayForms()
def qdump__QStack(d, value):
qdump__QVector(d, value)
def qdump__QStandardItem(d, value):
d.putBetterType(d.currentType)
try:
d.putItem(value["d_ptr"])
except:
d.putPlainChildren(value)
def qedit__QString(d, value, data):
d.call(value, "resize", str(len(data)))
(base, size, alloc) = d.stringData(value)
d.setValues(base, "short", [ord(c) for c in data])
def qform__QString():
return "Inline,Separate Window"
def qdump__QString(d, value):
d.putStringValue(value)
d.putNumChild(0)
format = d.currentItemFormat()
if format == 1:
d.putDisplay(StopDisplay)
elif format == 2:
d.putField("editformat", DisplayUtf16String)
d.putField("editvalue", d.encodeString(value))
def qdump__QStringRef(d, value):
if d.isNull(value["m_string"]):
d.putValue("(null)");
d.putNumChild(0)
return
s = value["m_string"].dereference()
data, size, alloc = d.stringData(s)
data += 2 * int(value["m_position"])
size = int(value["m_size"])
s = d.readMemory(data, 2 * size)
d.putValue(s, Hex4EncodedLittleEndian)
d.putPlainChildren(value)
def qdump__QStringList(d, value):
listType = d.directBaseClass(value.type)
qdump__QList(d, value.cast(listType))
d.putBetterType(value.type)
def qdump__QTemporaryFile(d, value):
qdump__QFile(d, value)
def qdump__QTextCodec(d, value):
name = d.call(value, "name")
d.putValue(d.encodeByteArray(d, name), 6)
d.putNumChild(2)
if d.isExpanded():
with Children(d):
d.putCallItem("name", value, "name")
d.putCallItem("mibEnum", value, "mibEnum")
d.putFields(value)
def qdump__QTextCursor(d, value):
privAddress = d.extractPointer(value)
if privAddress == 0:
d.putValue("(invalid)")
d.putNumChild(0)
else:
positionAddress = privAddress + 2 * d.ptrSize() + 8
d.putValue(d.extractInt(positionAddress))
d.putNumChild(1)
if d.isExpanded():
with Children(d):
positionAddress = privAddress + 2 * d.ptrSize() + 8
d.putIntItem("position", d.extractInt(positionAddress))
d.putIntItem("anchor", d.extractInt(positionAddress + d.intSize()))
d.putCallItem("selected", value, "selectedText")
d.putFields(value)
def qdump__QTextDocument(d, value):
d.putEmptyValue()
d.putNumChild(1)
if d.isExpanded():
with Children(d):
d.putCallItem("blockCount", value, "blockCount")
d.putCallItem("characterCount", value, "characterCount")
d.putCallItem("lineCount", value, "lineCount")
d.putCallItem("revision", value, "revision")
d.putCallItem("toPlainText", value, "toPlainText")
d.putFields(value)
def qform__QUrl():
return "Inline,Separate Window"
def qdump__QUrl(d, value):
if d.qtVersion() < 0x050000:
privAddress = d.extractPointer(value)
if not privAddress:
# d == 0 if QUrl was constructed with default constructor
d.putValue("<invalid>")
return
encodedOriginalAddress = privAddress + 8 * d.ptrSize()
d.putValue(d.encodeByteArrayHelper(d.extractPointer(encodedOriginalAddress)), Hex2EncodedLatin1)
d.putNumChild(8)
if d.isExpanded():
stringType = d.lookupType(d.qtNamespace() + "QString")
baType = d.lookupType(d.qtNamespace() + "QByteArray")
with Children(d):
# Qt 4 only decodes the original string if some detail is requested
d.putCallItem("scheme", value, "scheme")
d.putCallItem("userName", value, "userName")
d.putCallItem("password", value, "password")
d.putCallItem("host", value, "host")
d.putCallItem("path", value, "path")
d.putCallItem("query", value, "encodedQuery")
d.putCallItem("fragment", value, "fragment")
d.putCallItem("port", value, "port")
d.putFields(value)
else:
# QUrlPrivate:
# - QAtomicInt ref;
# - int port;
# - QString scheme;
# - QString userName;
# - QString password;
# - QString host;
# - QString path;
# - QString query;
# - QString fragment;
privAddress = d.extractPointer(value)
if not privAddress:
# d == 0 if QUrl was constructed with default constructor
d.putValue("<invalid>")
return
schemeAddr = privAddress + 2 * d.intSize()
scheme = d.encodeStringHelper(d.extractPointer(schemeAddr))
userName = d.encodeStringHelper(d.extractPointer(schemeAddr + 1 * d.ptrSize()))
password = d.encodeStringHelper(d.extractPointer(schemeAddr + 2 * d.ptrSize()))
host = d.encodeStringHelper(d.extractPointer(schemeAddr + 3 * d.ptrSize()))
path = d.encodeStringHelper(d.extractPointer(schemeAddr + 4 * d.ptrSize()))
query = d.encodeStringHelper(d.extractPointer(schemeAddr + 5 * d.ptrSize()))
fragment = d.encodeStringHelper(d.extractPointer(schemeAddr + 6 * d.ptrSize()))
port = d.extractInt(d.extractPointer(value) + d.intSize())
url = scheme
url += "3a002f002f00"
if len(userName):
url += userName
url += "4000"
url += host
if port >= 0:
url += "3a00"
url += ''.join(["%02x00" % ord(c) for c in str(port)])
url += path
d.putValue(url, Hex4EncodedLittleEndian)
format = d.currentItemFormat()
if format == 1:
d.putDisplay(StopDisplay)
elif format == 2:
d.putField("editformat", DisplayUtf16String)
d.putField("editvalue", url)
d.putNumChild(8)
if d.isExpanded():
stringType = d.lookupType(d.qtNamespace() + "QString")
with Children(d):
d.putIntItem("port", port)
d.putGenericItem("scheme", stringType, scheme, Hex4EncodedLittleEndian)
d.putGenericItem("userName", stringType, userName, Hex4EncodedLittleEndian)
d.putGenericItem("password", stringType, password, Hex4EncodedLittleEndian)
d.putGenericItem("host", stringType, host, Hex4EncodedLittleEndian)
d.putGenericItem("path", stringType, path, Hex4EncodedLittleEndian)
d.putGenericItem("query", stringType, query, Hex4EncodedLittleEndian)
d.putGenericItem("fragment", stringType, fragment, Hex4EncodedLittleEndian)
def qdumpHelper_QVariant_0(d, blob):
# QVariant::Invalid
d.putBetterType("%sQVariant (invalid)" % d.qtNamespace())
d.putValue("(invalid)")
def qdumpHelper_QVariant_1(d, blob):
# QVariant::Bool
d.putBetterType("%sQVariant (bool)" % d.qtNamespace())
d.putValue("true" if blob.extractByte() else "false")
def qdumpHelper_QVariant_2(d, blob):
# QVariant::Int
d.putBetterType("%sQVariant (int)" % d.qtNamespace())
d.putValue("%s" % blob.extractInt())
def qdumpHelper_QVariant_3(d, blob):
# uint
d.putBetterType("%sQVariant (uint)" % d.qtNamespace())
d.putValue(blob.extractUInt())
def qdumpHelper_QVariant_4(d, blob):
# qlonglong
d.putBetterType("%sQVariant (qlonglong)" % d.qtNamespace())
d.putValue(blob.extractInt64())
def qdumpHelper_QVariant_5(d, blob):
# qulonglong
d.putBetterType("%sQVariant (qulonglong)" % d.qtNamespace())
d.putValue(blob.extractUInt64())
def qdumpHelper_QVariant_6(d, blob):
# QVariant::Double
d.putBetterType("%sQVariant (double)" % d.qtNamespace())
d.putValue(blob.extractDouble())
qdumpHelper_QVariants_A = [
qdumpHelper_QVariant_0,
qdumpHelper_QVariant_1,
qdumpHelper_QVariant_2,
qdumpHelper_QVariant_3,
qdumpHelper_QVariant_4,
qdumpHelper_QVariant_5,
qdumpHelper_QVariant_6
]
qdumpHelper_QVariants_B = [
"QChar", # 7
"QVariantMap", # 8
"QVariantList",# 9
"QString", # 10
"QStringList", # 11
"QByteArray", # 12
"QBitArray", # 13
"QDate", # 14
"QTime", # 15
"QDateTime", # 16
"QUrl", # 17
"QLocale", # 18
"QRect", # 19
"QRectF", # 20
"QSize", # 21
"QSizeF", # 22
"QLine", # 23
"QLineF", # 24
"QPoint", # 25
"QPointF", # 26
"QRegExp", # 27
"QVariantHash",# 28
]
def qdumpHelper_QVariant_31(d, blob):
# QVariant::VoidStar
d.putBetterType("%sQVariant (void *)" % d.qtNamespace())
d.putValue("0x%x" % d.extractPointer(blob))
def qdumpHelper_QVariant_32(d, blob):
# QVariant::Long
d.putBetterType("%sQVariant (long)" % d.qtNamespace())
d.putValue("%s" % blob.extractLong())
def qdumpHelper_QVariant_33(d, blob):
# QVariant::Short
d.putBetterType("%sQVariant (short)" % d.qtNamespace())
d.putValue("%s" % blob.extractShort())
def qdumpHelper_QVariant_34(d, blob):
# QVariant::Char
d.putBetterType("%sQVariant (char)" % d.qtNamespace())
d.putValue("%s" % blob.extractByte())
def qdumpHelper_QVariant_35(d, blob):
# QVariant::ULong
d.putBetterType("%sQVariant (unsigned long)" % d.qtNamespace())
d.putValue("%s" % blob.extractULong())
def qdumpHelper_QVariant_36(d, blob):
# QVariant::UShort
d.putBetterType("%sQVariant (unsigned short)" % d.qtNamespace())
d.putValue("%s" % blob.extractUShort())
def qdumpHelper_QVariant_37(d, blob):
# QVariant::UChar
d.putBetterType("%sQVariant (unsigned char)" % d.qtNamespace())
d.putValue("%s" % blob.extractByte())
def qdumpHelper_QVariant_38(d, blob):
# QVariant::Float
d.putBetterType("%sQVariant (float)" % d.qtNamespace())
d.putValue("%s" % blob.extractFloat())
qdumpHelper_QVariants_D = [
qdumpHelper_QVariant_31,
qdumpHelper_QVariant_32,
qdumpHelper_QVariant_33,
qdumpHelper_QVariant_34,
qdumpHelper_QVariant_35,
qdumpHelper_QVariant_36,
qdumpHelper_QVariant_37,
qdumpHelper_QVariant_38
]
qdumpHelper_QVariants_E = [
"QFont", # 64
"QPixmap", # 65
"QBrush", # 66
"QColor", # 67
"QPalette", # 68
"QIcon", # 69
"QImage", # 70
"QPolygon", # 71
"QRegion", # 72
"QBitmap", # 73
"QCursor", # 74
]
qdumpHelper_QVariants_F = [
# Qt 5. In Qt 4 add one.
"QKeySequence",# 75
"QPen", # 76
"QTextLength", # 77
"QTextFormat", # 78
"X",
"QTransform", # 80
"QMatrix4x4", # 81
"QVector2D", # 82
"QVector3D", # 83
"QVector4D", # 84
"QQuaternion", # 85
"QPolygonF" # 86
]
def qdump__QVariant(d, value):
variantType = int(value["d"]["type"])
#warn("VARIANT TYPE: %s : " % variantType)
# Well-known simple type.
if variantType <= 6:
blob = d.toBlob(value)
qdumpHelper_QVariants_A[variantType](d, blob)
d.putNumChild(0)
return None
# Extended Core type (Qt 5)
if variantType >= 31 and variantType <= 38 and d.qtVersion() >= 0x050000:
blob = d.toBlob(value)
qdumpHelper_QVariants_D[variantType - 31](d, blob)
d.putNumChild(0)
return None
# Extended Core type (Qt 4)
if variantType >= 128 and variantType <= 135 and d.qtVersion() < 0x050000:
if variantType == 128:
p = d.extractPointer(value)
d.putBetterType("%sQVariant (void *)" % d.qtNamespace())
d.putValue("0x%x" % p)
else:
if variantType == 135:
blob = d.toBlob(value)
else:
p = d.extractPointer(value)
p = d.extractPointer(p)
blob = d.extractBlob(p, 8)
qdumpHelper_QVariants_D[variantType - 128](d, blob)
d.putNumChild(0)
return None
if variantType <= 86:
# Known Core or Gui type.
if variantType <= 28:
innert = qdumpHelper_QVariants_B[variantType - 7]
elif variantType <= 74:
innert = qdumpHelper_QVariants_E[variantType - 64]
elif d.qtVersion() < 0x050000:
innert = qdumpHelper_QVariants_F[variantType - 76]
else:
innert = qdumpHelper_QVariants_F[variantType - 75]
data = value["d"]["data"]
ns = d.qtNamespace()
inner = ns + innert
if d.isLldb:
# Looking up typedefs is problematic.
if innert == "QVariantMap":
inner = "%sQMap<%sQString, %sQVariant>" % (ns, ns, ns)
elif innert == "QVariantHash":
inner = "%sQHash<%sQString, %sQVariant>" % (ns, ns, ns)
elif innert == "QVariantList":
inner = "%sQList<%sQVariant>" % (ns, ns)
innerType = d.lookupType(inner)
if toInteger(value["d"]["is_shared"]):
val = data["ptr"].cast(innerType.pointer().pointer()).dereference().dereference()
else:
val = data["ptr"].cast(innerType)
d.putEmptyValue(-99)
d.putItem(val)
d.putBetterType("%sQVariant (%s)" % (d.qtNamespace(), innert))
return innert
# User types.
d_ptr = value["d"]
typeCode = int(d_ptr["type"])
ns = d.qtNamespace()
try:
exp = "((const char *(*)(int))%sQMetaType::typeName)(%d)" % (ns, typeCode)
type = str(d.parseAndEvaluate(exp))
except:
exp = "%sQMetaType::typeName(%d)" % (ns, typeCode)
type = str(d.parseAndEvaluate(exp))
type = type[type.find('"') + 1 : type.rfind('"')]
type = type.replace("Q", ns + "Q") # HACK!
type = type.replace("uint", "unsigned int") # HACK!
type = type.replace("COMMA", ",") # HACK!
type = type.replace(" ,", ",") # Lldb
#warn("TYPE: %s" % type)
data = d.call(value, "constData")
#warn("DATA: %s" % data)
d.putEmptyValue(-99)
d.putType("%sQVariant (%s)" % (ns, type))
d.putNumChild(1)
tdata = data.cast(d.lookupType(type).pointer()).dereference()
if d.isExpanded():
with Children(d):
with NoAddress(d):
d.putSubItem("data", tdata)
return tdata.type
def qedit__QVector(d, value, data):
values = data.split(',')
size = len(values)
d.call(value, "resize", str(size))
innerType = d.templateArgument(value.type, 0)
try:
# Qt 5. Will fail on Qt 4 due to the missing 'offset' member.
offset = value["d"]["offset"]
base = d.pointerValue(value["d"].cast(d.charPtrType()) + offset)
except:
# Qt 4.
base = d.pointerValue(value["p"]["array"])
d.setValues(base, innerType, values)
def qform__QVector():
return arrayForms()
def qdump__QVector(d, value):
data, size, alloc = d.vectorDataHelper(d.extractPointer(value))
d.check(0 <= size and size <= alloc and alloc <= 1000 * 1000 * 1000)
d.putItemCount(size)
d.putNumChild(size)
innerType = d.templateArgument(value.type, 0)
d.putPlotData(innerType, data, size)
def qdump__QWeakPointer(d, value):
d_ptr = value["d"]
val = value["value"]
if d.isNull(d_ptr) and d.isNull(val):
d.putValue("(null)")
d.putNumChild(0)
return
if d.isNull(d_ptr) or d.isNull(val):
d.putValue("<invalid>")
d.putNumChild(0)
return
weakref = int(d_ptr["weakref"]["_q_value"])
strongref = int(d_ptr["strongref"]["_q_value"])
d.check(strongref >= -1)
d.check(strongref <= weakref)
d.check(weakref <= 10*1000*1000)
innerType = d.templateArgument(value.type, 0)
if d.isSimpleType(innerType):
d.putSimpleValue(val.dereference())
else:
d.putEmptyValue()
d.putNumChild(3)
if d.isExpanded():
with Children(d):
d.putSubItem("data", val.dereference().cast(innerType))
d.putIntItem("weakref", weakref)
d.putIntItem("strongref", strongref)
def qdump__QXmlAttributes(d, value):
qdump__QList(d, value["attList"])
#######################################################################
#
# V4
#
#######################################################################
def qdump__QV4__String(d, value):
d.putStringValue(value["identifier"]["string"])
d.putNumChild(0)
def qdump__QV4__TypedValue(d, value):
qdump__QV4__Value(d, d.directBaseObject(value))
d.putBetterType(value.type)
def qdump__QV4__Value(d, value):
try:
if d.is64bit():
vtable = value["m"]["internalClass"]["vtable"]
if toInteger(vtable["isString"]):
d.putBetterType(d.qtNamespace() + "QV4::Value (String)")
d.putStringValue(value["s"]["identifier"]["string"])
d.putNumChild(0)
return
except:
pass
# Fall back for cases that we do not handle specifically.
d.putPlainChildren(value)
#######################################################################
#
# Webkit
#
#######################################################################
def jstagAsString(tag):
# enum { Int32Tag = 0xffffffff };
# enum { CellTag = 0xfffffffe };
# enum { TrueTag = 0xfffffffd };
# enum { FalseTag = 0xfffffffc };
# enum { NullTag = 0xfffffffb };
# enum { UndefinedTag = 0xfffffffa };
# enum { EmptyValueTag = 0xfffffff9 };
# enum { DeletedValueTag = 0xfffffff8 };
if tag == -1:
return "Int32"
if tag == -2:
return "Cell"
if tag == -3:
return "True"
if tag == -4:
return "Null"
if tag == -5:
return "Undefined"
if tag == -6:
return "Empty"
if tag == -7:
return "Deleted"
return "Unknown"
def qdump__QTJSC__JSValue(d, value):
d.putEmptyValue()
d.putNumChild(1)
if d.isExpanded():
with Children(d):
tag = value["u"]["asBits"]["tag"]
payload = value["u"]["asBits"]["payload"]
#d.putIntItem("tag", tag)
with SubItem(d, "tag"):
d.putValue(jstagAsString(int(tag)))
d.putNoType()
d.putNumChild(0)
d.putIntItem("payload", int(payload))
d.putFields(value["u"])
if tag == -2:
cellType = d.lookupType("QTJSC::JSCell").pointer()
d.putSubItem("cell", payload.cast(cellType))
try:
# FIXME: This might not always be a variant.
delegateType = d.lookupType(d.qtNamespace() + "QScript::QVariantDelegate").pointer()
delegate = scriptObject["d"]["delegate"].cast(delegateType)
#d.putSubItem("delegate", delegate)
variant = delegate["m_value"]
d.putSubItem("variant", variant)
except:
pass
def qdump__QScriptValue(d, value):
# structure:
# engine QScriptEnginePrivate
# jscValue QTJSC::JSValue
# next QScriptValuePrivate *
# numberValue 5.5987310416280426e-270 myns::qsreal
# prev QScriptValuePrivate *
# ref QBasicAtomicInt
# stringValue QString
# type QScriptValuePrivate::Type: { JavaScriptCore, Number, String }
#d.putEmptyValue()
dd = value["d_ptr"]["d"]
ns = d.qtNamespace()
if d.isNull(dd):
d.putValue("(invalid)")
d.putNumChild(0)
return
if int(dd["type"]) == 1: # Number
d.putValue(dd["numberValue"])
d.putType("%sQScriptValue (Number)" % ns)
d.putNumChild(0)
return
if int(dd["type"]) == 2: # String
d.putStringValue(dd["stringValue"])
d.putType("%sQScriptValue (String)" % ns)
return
d.putType("%sQScriptValue (JSCoreValue)" % ns)
x = dd["jscValue"]["u"]
tag = x["asBits"]["tag"]
payload = x["asBits"]["payload"]
#isValid = int(x["asBits"]["tag"]) != -6 # Empty
#isCell = int(x["asBits"]["tag"]) == -2
#warn("IS CELL: %s " % isCell)
#isObject = False
#className = "UNKNOWN NAME"
#if isCell:
# # isCell() && asCell()->isObject();
# # in cell: m_structure->typeInfo().type() == ObjectType;
# cellType = d.lookupType("QTJSC::JSCell").pointer()
# cell = payload.cast(cellType).dereference()
# dtype = "NO DYNAMIC TYPE"
# try:
# dtype = cell.dynamic_type
# except:
# pass
# warn("DYNAMIC TYPE: %s" % dtype)
# warn("STATUC %s" % cell.type)
# type = cell["m_structure"]["m_typeInfo"]["m_type"]
# isObject = int(type) == 7 # ObjectType;
# className = "UNKNOWN NAME"
#warn("IS OBJECT: %s " % isObject)
#inline bool JSCell::inherits(const ClassInfo* info) const
#for (const ClassInfo* ci = classInfo(); ci; ci = ci->parentClass) {
# if (ci == info)
# return true;
#return false;
try:
# This might already fail for "native" payloads.
scriptObjectType = d.lookupType(ns + "QScriptObject").pointer()
scriptObject = payload.cast(scriptObjectType)
# FIXME: This might not always be a variant.
delegateType = d.lookupType(ns + "QScript::QVariantDelegate").pointer()
delegate = scriptObject["d"]["delegate"].cast(delegateType)
#d.putSubItem("delegate", delegate)
variant = delegate["m_value"]
#d.putSubItem("variant", variant)
t = qdump__QVariant(d, variant)
# Override the "QVariant (foo)" output
d.putBetterType("%sQScriptValue (%s)" % (ns, t))
if t != "JSCoreValue":
return
except:
pass
# This is a "native" JSCore type for e.g. QDateTime.
d.putValue("<native>")
d.putNumChild(1)
if d.isExpanded():
with Children(d):
d.putSubItem("jscValue", dd["jscValue"])
|
omniacreator/qtcreator
|
share/qtcreator/debugger/qttypes.py
|
Python
|
lgpl-2.1
| 85,573
|
import os
from random import randrange
from math import ceil
# Déclaration des variables de départ
argent = 1000 # On a 1000 $ au début du jeu
continuer_partie = True # Booléen qui est vrai tant qu'on doit
# continuer la partie
print("Vous vous installez à la table de roulette avec", argent, "$.")
while continuer_partie: # Tant qu'on doit continuer la partie
# on demande à l'utilisateur de saisir le nombre sur lequel il va miser
nombre_mise = -1
while nombre_mise < 0 or nombre_mise > 49:
nombre_mise = input("Tapez le nombre sur lequel vous voulez miser (entre 0 et 49) : ")
# On convertit le nombre misé
try:
nombre_mise = int(nombre_mise)
except ValueError:
print("Vous n'avez pas saisi de nombre")
nombre_mise = -1
continue
if nombre_mise < 0:
print("Ce nombre est négatif")
if nombre_mise > 49:
print("Ce nombre est supérieur à 49")
# À présent, on sélectionne la somme à miser sur le nombre
mise = 0
while mise <= 0 or mise > argent:
mise = input("Tapez le montant de votre mise : ")
# On convertit la mise
try:
mise = int(mise)
except ValueError:
print("Vous n'avez pas saisi de nombre")
mise = -1
continue
if mise <= 0:
print("La mise saisie est négative ou nulle.")
if mise > argent:
print("Vous ne pouvez miser autant, vous n'avez que", argent, "$")
# Le nombre misé et la mise ont été sélectionnés par
# l'utilisateur, on fait tourner la roulette
numero_gagnant = randrange(50)
print("La roulette tourne... ... et s'arrête sur le numéro", numero_gagnant)
# On établit le gain du joueur
if numero_gagnant == nombre_mise:
print("Félicitations ! Vous obtenez", mise * 3, "$ !")
argent += mise * 3
elif numero_gagnant % 2 == nombre_mise % 2: # ils sont de la même couleur
mise = ceil(mise * 0.5)
print("Vous avez misé sur la bonne couleur. Vous obtenez", mise, "$")
argent += mise
else:
print("Désolé l'ami, c'est pas pour cette fois. Vous perdez votre mise.")
argent -= mise
# On interrompt la partie si le joueur est ruiné
if argent <= 0:
print("Vous êtes ruiné ! C'est la fin de la partie.")
continuer_partie = False
else:
# On affiche l'argent du joueur
print("Vous avez à présent", argent, "$")
quitter = input("Souhaitez-vous quitter le casino (o/n) ? ")
if quitter == "o" or quitter == "O":
print("Vous quittez le casino avec vos gains.")
continuer_partie = False
# On met en pause le système (Windows)
os.system("pause")
|
david-co/veamos
|
test.py
|
Python
|
lgpl-2.1
| 2,875
|
#!/usr/bin/env python
import gtk
from kiwi.controllers import BaseController
from kiwi.ui.views import BaseView
class FarenControl(BaseController):
def convert_temperature(self, temp):
celsius = (temp - 32) * 5/9.0
farenheit = (temp * 9/5.0) + 32
return farenheit, celsius
def on_quitbutton__clicked(self, *args):
self.view.hide_and_quit()
# use changed instead of insert_text, since it catches deletes too
def after_temperature__changed(self, entry, *args):
temp = view.get_temp()
if temp == None:
self.view.clear_temp()
else:
farenheit, celsius = self.convert_temperature(float(temp))
self.view.update_temp(farenheit, celsius)
class FarenView(BaseView):
widgets = ["quitbutton", "temperature", "celsius", "farenheit",
"celsius_label" , "farenheit_label", "temperature_label"]
def __init__(self):
BaseView.__init__(self, gladefile="faren",
delete_handler=self.quit_if_last)
def get_temp(self):
return self.temperature.get_text() or None
def update_temp(self, farenheit, celsius):
self.farenheit.set_text("%.2f" % farenheit)
self.celsius.set_text("%.2f" % celsius)
def clear_temp(self):
self.farenheit.set_text("")
self.celsius.set_text("")
view = FarenView()
ctl = FarenControl(view)
view.show()
gtk.main()
|
Schevo/kiwi
|
examples/framework/faren/faren2.py
|
Python
|
lgpl-2.1
| 1,435
|
# -*- coding: utf-8 -*-
# Focus
# Copyright (C) 2010-2012 Grid Dynamics Consulting Services, Inc
# All Rights Reserved
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program. If not, see
# <http://www.gnu.org/licenses/>.
import os
import flask
class SessionData(dict, flask.sessions.SessionMixin):
pass
class Session(flask.sessions.SessionInterface):
session_class = SessionData
def open_session(self, app, request):
self.cookie_session_id = request.cookies.get(
app.session_cookie_name, None)
self.session_new = False
if self.cookie_session_id is None:
self.cookie_session_id = os.urandom(40).encode('hex')
self.session_new = True
self.memcache_session_id = '@'.join(
[
request.remote_addr,
self.cookie_session_id
]
)
session = app.cache.get(self.memcache_session_id) or {}
app.cache.set(self.memcache_session_id, session)
return self.session_class(session)
def save_session(self, app, session, response):
expires = self.get_expiration_time(app, session)
domain = self.get_cookie_domain(app)
path = self.get_cookie_path(app)
httponly = self.get_cookie_httponly(app)
secure = self.get_cookie_secure(app)
app.cache.set(self.memcache_session_id, session)
if self.session_new:
response.set_cookie(
app.session_cookie_name, self.cookie_session_id, path=path,
expires=expires, httponly=httponly,
secure=secure, domain=domain)
|
altai/focus
|
focus/flask_memcache_session.py
|
Python
|
lgpl-2.1
| 2,177
|
# -*- coding: utf-8 -*-
#
# gensim documentation build configuration file, created by
# sphinx-quickstart on Wed Mar 17 13:42:21 2010.
#
# This file is execfile()d with the current directory set to its containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys, os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.append(os.path.abspath('.'))
# -- General configuration -----------------------------------------------------
html_theme = 'gensim_theme'
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['sphinx.ext.autodoc', 'sphinxcontrib.napoleon']
autoclass_content = "both"
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8'
# The master toctree document.
master_doc = 'indextoc'
# Additional templates that should be rendered to pages, maps page names to
# template names.
html_additional_pages = {'index': './_templates/indexcontent.html'}
# General information about the project.
project = u'gensim'
copyright = u'2009-now, Radim Řehůřek <me(at)radimrehurek.com>'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '2.1'
# The full version, including alpha/beta/rc tags.
release = '2.1.0'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of documents that shouldn't be included in the build.
#unused_docs = []
# List of directories, relative to source directory, that shouldn't be searched
# for source files.
exclude_trees = ['_build']
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. Major themes that come with
# Sphinx are currently 'default' and 'sphinxdoc'.
#html_theme = 'default'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#main_colour = "#ffbbbb"
html_theme_options = {
#"rightsidebar": "false",
#"stickysidebar": "true",
#"bodyfont": "'Lucida Grande', 'Lucida Sans Unicode', 'Geneva', 'Verdana', 'sans-serif'",
#"headfont": "'Lucida Grande', 'Lucida Sans Unicode', 'Geneva', 'Verdana', 'sans-serif'",
#"sidebarbgcolor": "fuckyou",
#"footerbgcolor": "#771111",
#"relbarbgcolor": "#993333",
#"sidebartextcolor": "#000000",
#"sidebarlinkcolor": "#330000",
#"codebgcolor": "#fffff0",
#"headtextcolor": "#000080",
#"headbgcolor": "#f0f0ff",
#"bgcolor": "#ffffff",
}
# Add any paths that contain custom themes here, relative to this directory.
html_theme_path = ['.']
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
html_title = "gensim"
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = ''
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
html_favicon = 'favicon.ico'
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
html_sidebars = {} #{'index': ['download.html', 'globaltoc.html', 'searchbox.html', 'indexsidebar.html']}
#html_sidebars = {'index': ['globaltoc.html', 'searchbox.html']}
# If false, no module index is generated.
#html_use_modindex = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
html_split_index = False
# If true, links to the reST sources are added to the pages.
html_show_sourcelink = False
html_domain_indices = False
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# If nonempty, this is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = ''
# Output file base name for HTML help builder.
htmlhelp_basename = 'gensimdoc'
html_show_sphinx = False
# -- Options for LaTeX output --------------------------------------------------
# The paper size ('letter' or 'a4').
#latex_paper_size = 'letter'
# The font size ('10pt', '11pt' or '12pt').
#latex_font_size = '10pt'
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
('index', 'gensim.tex', u'gensim Documentation', u'Radim Řehůřek', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
latex_use_parts = False
# Additional stuff for the LaTeX preamble.
#latex_preamble = ''
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_use_modindex = True
|
robotcator/gensim
|
docs/src/conf.py
|
Python
|
lgpl-2.1
| 7,200
|
try:
import cccanvas
except:
import sys
sys.path.insert(0,"./../.libs/")
import cccanvas
import page
import gtk
import fifteen_grid
class Fifteen(page.Page):
def scramble(self, button):
self.canvas.scramble()
def __init__(self):
self.title = 'Fifteen'
# main widget
self.widget = gtk.VBox(False, 6)
self.widget.set_border_width(6)
self.widget.show()
# Create CCCanvas item and widget with this item as root
self.view = cccanvas.ViewWidget()
self.canvas = fifteen_grid.FifteenGrid(self.view.get_style().font_desc)
self.view.set_root(self.canvas)
self.view.show()
self.widget.pack_start(self.view)
button = gtk.Button("_Scramble")
button.connect("clicked", self.scramble)
button.show()
self.widget.pack_start(button)
|
herzi/ccc
|
python/demo/fifteen.py
|
Python
|
lgpl-2.1
| 903
|
# Copyright (C) 2013 Red Hat, Inc. All rights reserved.
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
#
# Authors: Jan Safranek <jsafrane@redhat.com>
# -*- coding: utf-8 -*-
""" Module for FormatProvider."""
from openlmi.storage.FormatProvider import FormatProvider
import blivet.formats.lvmpv
import openlmi.common.cmpi_logging as cmpi_logging
class LMI_PVFormatProvider(FormatProvider):
"""
Provider of MD RAID format on a device.
"""
@cmpi_logging.trace_method
def __init__(self, *args, **kwargs):
super(LMI_PVFormatProvider, self).__init__(
"LMI_PVFormat",
"lvmpv",
*args, **kwargs)
@cmpi_logging.trace_method
def provides_format(self, device, fmt):
if isinstance(fmt, blivet.formats.lvmpv.LVMPhysicalVolume):
return True
return False
@cmpi_logging.trace_method
def get_instance(self, env, model, fmt=None):
"""
Get instance.
"""
model = super(LMI_PVFormatProvider, self).get_instance(
env, model, fmt)
if not fmt:
fmt = self.get_format_for_id(model['Name'])
print fmt
if fmt.uuid:
model['UUID'] = fmt.uuid
return model
|
jsafrane/openlmi-storage
|
src/openlmi/storage/LMI_PVFormatProvider.py
|
Python
|
lgpl-2.1
| 1,946
|
import sys
import os
import argparse
import docker
import json
import subprocess
import getpass
import requests
import pipes
import pwd
import time
import math
import Atomic.mount as mount
import Atomic.util as util
import Atomic.satellite as satellite
import Atomic.pulp as pulp
try:
from subprocess import DEVNULL # pylint: disable=no-name-in-module
except ImportError:
DEVNULL = open(os.devnull, 'wb')
IMAGES = []
def convert_size(size):
if size > 0:
size_name = ("B", "KB", "MB", "GB", "TB", "PB", "EB", "ZB", "YB")
i = int(math.floor(math.log(size, 1000)))
p = math.pow(1000, i)
s = round(size/p, 2)
if s > 0:
return '%s %s' % (s, size_name[i])
return '0B'
def find_repo_tag(d, id):
global IMAGES
if len(IMAGES) == 0:
IMAGES = d.images()
for image in IMAGES:
if id == image["Id"]:
return image["RepoTags"][0]
return ""
class Atomic(object):
INSTALL_ARGS = ["/usr/bin/docker", "run",
"-t",
"-i",
"--rm",
"--privileged",
"-v", "/:/host",
"--net=host",
"--ipc=host",
"--pid=host",
"-e", "HOST=/host",
"-e", "NAME=${NAME}",
"-e", "IMAGE=${IMAGE}",
"-v", "${CONFDIR}:/etc/${NAME}",
"-v", "${LOGDIR}:/var/log/${NAME}",
"-v", "${DATADIR}:/var/lib/${NAME}",
"-e", "CONFDIR=${CONFDIR}",
"-e", "LOGDIR=${LOGDIR}",
"-e", "DATADIR=${DATADIR}",
"--name", "${NAME}",
"${IMAGE}"]
SPC_ARGS = ["/usr/bin/docker", "run",
"-t",
"-i",
"--rm",
"--privileged",
"-v", "/:/host",
"-v", "/run:/run",
"-v", "/etc/localtime:/etc/localtime",
"--net=host",
"--ipc=host",
"--pid=host",
"-e", "HOST=/host",
"-e", "NAME=${NAME}",
"-e", "IMAGE=${IMAGE}",
"${IMAGE}"]
RUN_ARGS = ["/usr/bin/docker", "create",
"-t",
"-i",
"--name", "${NAME}",
"${IMAGE}"]
def __init__(self):
self.d = docker.Client()
self.name = None
self.image = None
self.spc = False
self.inspect = None
self.force = False
self._images = []
def writeOut(self, output, lf="\n"):
sys.stdout.flush()
sys.stdout.write(output + lf)
def get_label(self, label, image=None):
inspect = self._inspect_image(image)
cfg = inspect.get("Config", None)
if cfg:
labels = cfg.get("Labels", [])
if labels and label in labels:
return labels[label]
return ""
def force_delete_containers(self):
if self._inspect_image():
image = self.image
if self.image.find(":") == -1:
image += ":latest"
for c in self.d.containers(all=True):
if c["Image"] == image:
self.d.remove_container(c["Id"], force=True)
def update(self):
if self.force:
self.force_delete_containers()
return subprocess.check_call(["/usr/bin/docker", "pull", self.image])
def pull(self):
prevstatus = ""
for line in self.d.pull(self.image, stream=True):
bar = json.loads(line)
status = bar['status']
if prevstatus != status:
self.writeOut(status, "")
if 'id' not in bar:
continue
if status == "Downloading":
self.writeOut(bar['progress'] + " ")
elif status == "Extracting":
self.writeOut("Extracting: " + bar['id'])
elif status == "Pull complete":
pass
elif status.startswith("Pulling"):
self.writeOut("Pulling: " + bar['id'])
prevstatus = status
self.writeOut("")
def push(self):
prevstatus = ""
# Priority order:
# If user passes in a password/username/url/ssl flag, use that
# If not, read from the config file
# If still nothing, ask again for registry user/pass
if self.args.pulp:
config = pulp.PulpConfig().config()
if self.args.satellite:
config = satellite.SatelliteConfig().config()
if (self.args.satellite | self.args.pulp):
if not self.args.username:
self.args.username = config["username"]
if not self.args.password:
self.args.password = config["password"]
if not self.args.url:
self.args.url = config["url"]
if self.args.verify_ssl is None:
self.args.verify_ssl = config["verify_ssl"]
if self.args.verify_ssl is None:
self.args.verify_ssl = False
if not self.args.username:
self.args.username = util.input("Registry Username: ")
if not self.args.password:
self.args.password = getpass.getpass("Registry Password: ")
if (self.args.satellite | self.args.pulp):
if not self.args.url:
self.args.url = util.input("URL: ")
if self.args.pulp:
return pulp.push_image_to_pulp(self.image, self.args.url,
self.args.username,
self.args.password,
self.args.verify_ssl,
self.d)
if self.args.satellite:
if not self.args.activation_key:
self.args.activation_key = util.input("Activation Key: ")
if not self.args.repo_id:
self.args.repo_id = util.input("Repository ID: ")
return satellite.push_image_to_satellite(self.image,
self.args.url,
self.args.username,
self.args.password,
self.args.verify_ssl,
self.d,
self.args.activation_key,
self.args.repo_id,
self.args.debug)
else:
self.d.login(self.args.username, self.args.password)
for line in self.d.push(self.image, stream=True):
bar = json.loads(line)
status = bar['status']
if prevstatus != status:
self.writeOut(status, "")
if 'id' not in bar:
continue
if status == "Uploading":
self.writeOut(bar['progress'] + " ")
elif status == "Push complete":
pass
elif status.startswith("Pushing"):
self.writeOut("Pushing: " + bar['id'])
prevstatus = status
def set_args(self, args):
self.args = args
try:
self.image = args.image
except:
pass
try:
self.command = args.command
except:
self.command = None
try:
self.spc = args.spc
except:
self.spc = False
try:
self.name = args.name
except:
pass
try:
self.force = args.force
except:
pass
if not self.name and self.image is not None:
self.name = self.image.split("/")[-1].split(":")[0]
if self.spc:
self.name = self.name + "-spc"
def _getconfig(self, key, default=None):
assert self.inspect is not None
cfg = self.inspect.get("Config")
if cfg is None:
return default
val = cfg.get(key, default)
if val is None:
return default
return val
def _get_cmd(self):
return self._getconfig("Cmd", ["/bin/sh"])
def _get_labels(self):
return self._getconfig("Labels", [])
def _interactive(self):
return (self._getconfig("AttachStdin", False) and
self._getconfig("AttachStdout", False) and
self._getconfig("AttachStderr", False))
def _running(self):
if self._interactive():
cmd = ["/usr/bin/docker", "exec", "-t", "-i", self.name]
if self.command:
cmd += self.command
else:
cmd += self._get_cmd()
if self.args.display:
return self.display(cmd)
else:
return subprocess.check_call(cmd, stderr=DEVNULL)
else:
if self.command:
if self.args.display:
return self.writeOut("/usr/bin/docker exec -t -i %s %s" %
(self.name, self.command))
else:
return subprocess.check_call(
["/usr/bin/docker", "exec", "-t", "-i", self.name] +
self.command, stderr=DEVNULL)
else:
if not self.args.display:
self.writeOut("Container is running")
def _start(self):
if self._interactive():
if self.command:
subprocess.check_call(
["/usr/bin/docker", "start", self.name],
stderr=DEVNULL)
return subprocess.check_call(
["/usr/bin/docker", "exec", "-t", "-i", self.name] +
self.command)
else:
return subprocess.check_call(
["/usr/bin/docker", "start", "-i", "-a", self.name],
stderr=DEVNULL)
else:
if self.command:
subprocess.check_call(
["/usr/bin/docker", "start", self.name],
stderr=DEVNULL)
return subprocess.check_call(
["/usr/bin/docker", "exec", "-t", "-i", self.name] +
self.command)
else:
return subprocess.check_call(
["/usr/bin/docker", "start", self.name],
stderr=DEVNULL)
def _inspect_image(self, image=None):
try:
if image:
return self.d.inspect_image(image)
return self.d.inspect_image(self.image)
except docker.errors.APIError:
pass
except requests.exceptions.ConnectionError as e:
raise IOError("Unable to communicate with docker daemon: %s\n" %
str(e))
return None
def _inspect_container(self):
try:
return self.d.inspect_container(self.name)
except docker.errors.APIError:
pass
except requests.exceptions.ConnectionError as e:
raise IOError("Unable to communicate with docker daemon: %s\n" %
str(e))
return None
def _get_args(self, label):
labels = self._get_labels()
for l in [label, label.lower(), label.capitalize(), label.upper()]:
if l in labels:
return labels[l].split()
return None
def _check_latest(self):
inspect = self._inspect_image()
if inspect and inspect["Id"] != self.inspect["Image"]:
sys.stdout.write(
"The '%(name)s' container is using an older version of the "
"installed\n'%(image)s' container image. If you wish to use "
"the newer image,\nyou must either create a new container "
"with a new name or\nuninstall the '%(name)s' container."
"\n\n# atomic uninstall --name %(name)s %(image)s\n\nand "
"create new container on the '%(image)s' image.\n\n# atomic "
"update --force %(image)s\n\n removes all containers based on "
"an image." % {"name": self.name, "image": self.image})
def container_run_command(self):
command = "%s run " % sys.argv[0]
if self.spc:
command += "--spc "
if self.name != self.image:
command += "--name %s " % self.name
command += self.image
return command
def run(self):
missing_RUN = False
self.inspect = self._inspect_container()
if self.inspect:
self._check_latest()
# Container exists
if self.inspect["State"]["Running"]:
return self._running()
elif not self.args.display:
return self._start()
# Container does not exist
self.inspect = self._inspect_image()
if not self.inspect:
if self.args.display:
return self.display("Need to pull %s" % self.image)
self.update()
self.inspect = self._inspect_image()
if self.spc:
if self.command:
args = self.SPC_ARGS + self.command
else:
args = self.SPC_ARGS + self._get_cmd()
cmd = self.gen_cmd(args)
else:
args = self._get_args("RUN")
if args:
args += self.command
else:
missing_RUN = True
if self.command:
args = self.RUN_ARGS + self.command
else:
args = self.RUN_ARGS + self._get_cmd()
cmd = self.gen_cmd(args)
self.display(cmd)
if self.args.display:
return
if missing_RUN:
subprocess.check_call(cmd, env=self.cmd_env,
shell=True, stderr=DEVNULL,
stdout=DEVNULL)
return self._start()
self.display(cmd)
if not self.args.display:
subprocess.check_call(cmd, env=self.cmd_env, shell=True)
def stop(self):
self.inspect = self._inspect_container()
if self.inspect is None:
self.inspect = self._inspect_image()
if self.inspect is None:
raise ValueError("Container/Image '%s' does not exists" %
self.name)
args = self._get_args("STOP")
if args:
cmd = self.gen_cmd(args)
self.display(cmd)
subprocess.check_call(cmd, env=self.cmd_env, shell=True)
# Container exists
try:
if self.inspect["State"]["Running"]:
self.d.stop(self.name)
except KeyError:
pass
def _rpmostree(self, *args):
os.execl("/usr/bin/rpm-ostree", "rpm-ostree", *args)
def host_status(self):
self._rpmostree("status")
def host_upgrade(self):
argv = ["upgrade"]
if self.args.reboot:
argv.append("--reboot")
self._rpmostree(*argv)
def host_rollback(self):
argv = ["rollback"]
if self.args.reboot:
argv.append("--reboot")
self._rpmostree(*argv)
def host_rebase(self):
argv = ["rebase", self.args.refspec]
self._rpmostree(*argv)
def uninstall(self):
self.inspect = self._inspect_container()
if self.inspect and self.force:
self.force_delete_containers()
if self.name != self.image:
try:
# Attempt to remove container, if it exists just return
self.d.stop(self.name)
self.d.remove_container(self.name)
return
except:
# On exception attempt to remove image
pass
try:
self.d.stop(self.image)
self.d.remove_container(self.image)
except docker.errors.APIError:
pass
self.inspect = self._inspect_image()
if not self.inspect:
raise ValueError("Image '%s' is not installed" % self.image)
args = self._get_args("UNINSTALL")
if args:
cmd = self.gen_cmd(args + list(map(pipes.quote, self.args.args)))
self.display(cmd)
subprocess.check_call(cmd, env=self.cmd_env, shell=True)
self.writeOut("/usr/bin/docker rmi %s" % self.image)
subprocess.check_call(["/usr/bin/docker", "rmi", self.image])
@property
def cmd_env(self):
env = {'NAME': self.name,
'IMAGE': self.image,
'CONFDIR': "/etc/%s" % self.name,
'LOGDIR': "/var/log/%s" % self.name,
'DATADIR': "/var/lib/%s" % self.name}
if hasattr(self.args, 'env') and self.args.env:
for i in self.args.env:
pair = i.split('=', 1)
name = pair[0]
env[name] = ""
if len(pair) > 1:
env[name] = pair[1]
elif name in os.environ:
env[name] = os.environ[name]
if hasattr(self.args, 'opt1') and self.args.opt1:
env['OPT1'] = self.args.opt1
if hasattr(self.args, 'opt2') and self.args.opt2:
env['OPT2'] = self.args.opt2
if hasattr(self.args, 'opt3') and self.args.opt3:
env['OPT3'] = self.args.opt3
default_uid = "0"
with open("/proc/self/loginuid") as f:
default_uid = f.readline()
if "SUDO_UID" in os.environ:
env["SUDO_UID"] = os.environ["SUDO_UID"]
else:
env["SUDO_UID"] = default_uid
if 'SUDO_GID' in os.environ:
env['SUDO_GID'] = os.environ['SUDO_GID']
else:
try:
env['SUDO_GID'] = str(pwd.getpwuid(int(env["SUDO_UID"]))[3])
except:
env["SUDO_GID"] = default_uid
return env
def gen_cmd(self, cargs):
args = []
for c in cargs:
if c == "IMAGE":
args.append(self.image)
continue
if c == "IMAGE=IMAGE":
args.append("IMAGE=%s" % self.image)
continue
if c == "NAME=NAME":
args.append("NAME=%s" % self.name)
continue
if c == "NAME":
args.append(self.name)
continue
args.append(c)
return " ".join(args)
def info(self):
"""
Retrieve and print all LABEL information for a given image.
"""
def _no_such_image():
raise ValueError('Could not find any image matching "{}".'
''.format(self.args.image))
inspection = None
if not self.args.force_remote_info:
try:
inspection = self.d.inspect_image(self.args.image)
except docker.errors.APIError:
# No such image locally, but fall back to remote
pass
if inspection is None:
try:
inspection = self.d.inspect_image(self.args.image, remote=True)
except docker.errors.APIError:
# image does not exist on any configured registry
_no_such_image()
except TypeError: # pragma: no cover
# If a user doesn't have remote-inspection, setting remote=True
# above will raise TypeError.
# TODO: remove if remote inspection is accepted into docker
# But we should error if the user specifically requested remote
if self.args.force_remote_info:
raise ValueError('Your docker daemon does not support '
'remote inspection.')
else:
_no_such_image()
# By this point, inspection cannot be "None"
try:
labels = inspection['Config']['Labels']
except TypeError: # pragma: no cover
# Some images may not have a 'Labels' key.
raise ValueError('{} has no label information.'
''.format(self.args.image))
if labels is not None:
for label in labels:
self.writeOut('{0}: {1}'.format(label, labels[label]))
def dangling(self, image):
if image == "<none>":
return "*"
return " "
def images(self):
if self.args.prune:
cmd = "/usr/bin/docker images --filter dangling=true -q".split()
for i in subprocess.check_output(cmd, stderr=DEVNULL).split():
self.d.remove_image(i, force=True)
return
self.writeOut(" %-35s %-19s %.12s %-19s %-10s" %
("REPOSITORY", "TAG", "IMAGE ID", "CREATED",
"VIRTUAL SIZE"))
for image in self.d.images():
repo, tag = image["RepoTags"][0].split(":")
self.writeOut(
"%s%-35s %-19s %.12s %-19s %-12s" %
(self.dangling(repo), repo, tag, image["Id"],
time.strftime("%F %H:%M",
time.localtime(image["Created"])),
convert_size(image["VirtualSize"])))
def install(self):
self.inspect = self._inspect_image()
if not self.inspect:
if self.args.display:
self.display("Need to pull %s" % self.image)
return
self.update()
self.inspect = self._inspect_image()
args = self._get_args("INSTALL")
if not args:
args = self.INSTALL_ARGS
cmd = self.gen_cmd(args + list(map(pipes.quote, self.args.args)))
self.display(cmd)
if not self.args.display:
return subprocess.check_call(cmd, env=self.cmd_env, shell=True)
def help(self):
if os.path.exists("/usr/bin/rpm-ostree"):
return _('Atomic Management Tool')
else:
return _('Atomic Container Tool')
def print_spc(self):
return " ".join(self.SPC_ARGS)
def print_run(self):
return " ".join(self.RUN_ARGS)
def print_install(self):
return " ".join(self.INSTALL_ARGS) + " /usr/bin/INSTALLCMD"
def print_uninstall(self):
return " ".join(self.INSTALL_ARGS) + " /usr/bin/UNINSTALLCMD"
def _get_layer(self, image):
def get_label(label):
return self.get_label(label, image["Id"])
image = self._inspect_image(image)
if not image:
raise ValueError("Image '%s' does not exist" % self.image)
version = ("%s-%s-%s" % (get_label("Name"), get_label("Version"),
get_label("Release"))).strip("-")
return({"Id": image['Id'], "Name": get_label("Name"),
"Version": version, "Tag": find_repo_tag(self.d, image['Id']),
"Parent": image['Parent']})
def get_layers(self):
layers = []
layer = self._get_layer(self.image)
layers.append(layer)
while layer["Parent"] != "":
layer = self._get_layer(layer["Parent"])
layers.append(layer)
return layers
def _get_image(self, image):
def get_label(label):
return self.get_label(label, image["Id"])
return {"Id": image['Id'], "Name": get_label("Name"),
"Version": ("%s-%s-%s" % (get_label("Name"),
get_label("Version"),
get_label("Release"))).strip(":"),
"Tag": image["RepoTags"][0]}
def get_images(self):
if len(self._images) > 0:
return self._images
images = self.d.images()
for image in images:
self._images.append(self._get_image(image))
return self._images
def verify(self):
def get_label(label):
val = self._get_args(label)
if val:
return val[0]
return ""
self.inspect = self._inspect_image()
if not self.inspect:
raise ValueError("Image %s does not exist" % self.image)
current_name = get_label("Name")
version = ""
if current_name:
version = "%s-%s-%s" % (current_name, get_label("Version"),
get_label("Release"))
name = None
buf = ""
for layer in self.get_layers():
if name == layer["Name"]:
continue
name = layer["Name"]
if len(name) > 0:
for i in self.get_images():
if i["Name"] == name:
if i["Version"] > layer["Version"]:
buf = ("Image '%s' contains a layer '%s' that is "
"out of date.\nImage version '%s' is "
"available, current version could contain "
"vulnerabilities." % (self.image,
layer["Version"],
i["Version"]))
buf += ("You should rebuild the '%s' image using "
"docker build." % (self.image))
break
return buf
def print_verify(self):
self.writeOut(self.verify())
def mount(self):
if os.geteuid() != 0:
raise ValueError("This command must be run as root.")
try:
options = [opt for opt in self.args.options.split(',') if opt]
mount.DockerMount(self.args.mountpoint,
self.args.live).mount(self.args.image, options)
# only need to bind-mount on the devicemapper driver
if self.d.info()['Driver'] == 'devicemapper':
mount.Mount.mount_path(os.path.join(self.args.mountpoint,
"rootfs"),
self.args.mountpoint, bind=True)
except mount.MountError as dme:
raise ValueError(str(dme))
def unmount(self):
if os.geteuid() != 0:
raise ValueError("This command must be run as root.")
try:
dev = mount.Mount.get_dev_at_mountpoint(self.args.mountpoint)
# If there's a bind-mount over the directory, unbind it.
if dev.rsplit('[', 1)[-1].strip(']') == '/rootfs' \
and self.d.info()['Driver'] == 'devicemapper':
mount.Mount.unmount_path(self.args.mountpoint)
return mount.DockerMount(self.args.mountpoint).unmount()
except mount.MountError as dme:
raise ValueError(str(dme))
def version(self):
def get_label(label):
val = self._get_args(label)
if val:
return val[0]
return ""
try:
self.inspect = self.d.inspect_image(self.image)
except docker.errors.APIError:
self.update()
self.inspect = self.d.inspect_image(self.image)
if self.args.recurse:
return self.get_layers()
else:
return [self._get_layer(self.image)]
def print_version(self):
for layer in self.version():
version = layer["Version"]
if layer["Version"] == '':
version = "None"
self.writeOut("%s %s %s" % (layer["Id"], version, layer["Tag"]))
def display(self, cmd):
subprocess.check_call(
"/bin/echo \"" + cmd + "\"", env=self.cmd_env, shell=True)
def SetFunc(function):
class customAction(argparse.Action):
def __call__(self, parser, namespace, values, option_string=None):
setattr(namespace, self.dest, function)
return customAction
|
praiskup/atomic
|
Atomic/atomic.py
|
Python
|
lgpl-2.1
| 28,477
|
#! /usr/bin/env python
from openturns import *
TESTPREAMBLE()
RandomGenerator().SetSeed(0)
try :
# Create a collection of test-cases and the associated references
numberOfTests = 3
testCases = list()
references = DistributionCollection(numberOfTests)
testCases.append(DistributionCollection(2))
testCases[0][0] = Uniform(-1.0, 3.0)
testCases[0][1] = Uniform(-1.0, 3.0)
references[0] = Triangular(-2.0, 2.0, 6.0)
testCases.append(DistributionCollection(3))
testCases[1][0] = Normal()
testCases[1][1] = Normal(1.0, 2.0)
testCases[1][2] = Normal(-2.0, 2.0)
references[1] = Normal(-1.0, 3.0)
testCases.append(DistributionCollection(3))
testCases[2][0] = Exponential()
testCases[2][1] = Exponential()
testCases[2][2] = Exponential()
references[2] = Gamma(3.0, 1.0, 0.0)
print "testCases=", testCases
print "references=", references
for testIndex in range(len(testCases)):
# Instanciate one distribution object
distribution = RandomMixture(testCases[testIndex])
distribution.setBlockMin(5)
distribution.setBlockMax(20)
distributionReference = references[testIndex]
print "Distribution ", repr(distribution)
print "Distribution ", distribution
# Is this distribution elliptical ?
print "Elliptical = ", distribution.isElliptical()
# Is this distribution continuous ?
print "Continuous = ", distribution.isContinuous()
# Test for realization of distribution
oneRealization = distribution.getRealization()
print "oneRealization=", oneRealization
# Test for sampling
size = 10000
oneSample = distribution.getSample( size )
print "oneSample first=", oneSample[0], " last=", oneSample[size - 1]
print "mean=", oneSample.computeMean()
print "covariance=", oneSample.computeCovariance()
# Define a point
point = NumericalPoint(distribution.getDimension(), 0.5)
print "Point= ", point
# Show PDF and CDF of point
eps = 1e-5
DDF = distribution.computeDDF(point)
print "ddf =", DDF
print "ddf (ref)=", distributionReference.computeDDF(point)
PDF = distribution.computePDF(point)
print "pdf =%.6f" % PDF
print "pdf (FD)=%.6f" % ((distribution.computeCDF( point + NumericalPoint(1, eps) ) - distribution.computeCDF( point + NumericalPoint(1, -eps) )) / (2.0 * eps))
print "pdf (ref)=%.6f" % distributionReference.computePDF(point)
CDF = distribution.computeCDF( point )
print "cdf =%.6f" % CDF
print "cdf (ref)=%.6f" % distributionReference.computeCDF(point)
CF = distribution.computeCharacteristicFunction( point[0] )
print "characteristic function=%.6f + %.6fi" % (CF.real, CF.imag)
LCF = distribution.computeLogCharacteristicFunction( point[0] )
print "log characteristic function=%.6f + %.6fi" % (LCF.real, LCF.imag)
quantile = distribution.computeQuantile( 0.95 )
print "quantile =", quantile
print "quantile (ref)=", distributionReference.computeQuantile(0.95)
print "cdf(quantile)=%.6f" % distribution.computeCDF(quantile)
mean = distribution.getMean()
print "mean =", mean
print "mean (ref)=", distributionReference.getMean()
standardDeviation = distribution.getStandardDeviation()
print "standard deviation =", standardDeviation
print "standard deviation (ref)=", distributionReference.getStandardDeviation()
skewness = distribution.getSkewness()
print "skewness =", skewness
print "skewness (ref)=", distributionReference.getSkewness()
kurtosis = distribution.getKurtosis()
print "kurtosis =", kurtosis
print "kurtosis (ref)=", distributionReference.getKurtosis()
covariance = distribution.getCovariance()
print "covariance =", covariance
print "covariance (ref)=", distributionReference.getCovariance()
parameters = distribution.getParametersCollection()
print "parameters=", parameters
print "Standard representative=", distribution.getStandardRepresentative()
# Specific to this distribution
weights = distribution.getWeights()
print "weights=", weights
distribution.setWeights(2.0 * weights)
print "new weights=", distribution.getWeights()
print "blockMin=", distribution.getBlockMin()
print "blockMax=", distribution.getBlockMax()
print "maxSize=", distribution.getMaxSize()
print "alpha=", distribution.getAlpha()
print "beta=", distribution.getBeta()
# Tests of the simplification mechanism
weights = NumericalPoint(0)
coll = DistributionCollection(0)
coll.add(Dirac(0.5))
weights.add(1.0)
coll.add(Normal(1.0, 2.0))
weights.add(2.0)
coll.add(Normal(2.0, 1.0))
weights.add(-3.0)
coll.add(Uniform(-2.0, 2.0))
weights.add(-1.0)
coll.add(Exponential(2.0, -3.0))
weights.add(1.5)
rm = RandomMixture(coll, weights)
coll.add(rm)
weights.add(-2.5)
coll.add(Gamma(3.0, 4.0, -2.0))
weights.add(2.5)
distribution = RandomMixture(coll, weights)
print "distribution=", repr(distribution)
print "distribution=", distribution
for i in range(10):
x = distribution.getMean()[0] + (-3.0 + 6.0 * i / 9.0) + distribution.getStandardDeviation()[0]
print "pdf(", x, ")=%.6f" % distribution.computePDF(x)
except :
import sys
print "t_RandomMixture_std.py", sys.exc_type, sys.exc_value
|
dbarbier/privot
|
python/test/t_RandomMixture_std.py
|
Python
|
lgpl-3.0
| 5,911
|
# -*- coding: utf-8 -*-
########################## Copyrights and license ############################
# #
# Copyright 2011-2015 Christian Lupien <christian.lupien@usherbrooke.ca> #
# #
# This file is part of pyHegel. http://github.com/lupien/pyHegel #
# #
# pyHegel is free software: you can redistribute it and/or modify it under #
# the terms of the GNU Lesser General Public License as published by the #
# Free Software Foundation, either version 3 of the License, or (at your #
# option) any later version. #
# #
# pyHegel is distributed in the hope that it will be useful, but WITHOUT #
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or #
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public #
# License for more details. #
# #
# You should have received a copy of the GNU Lesser General Public License #
# along with pyHegel. If not, see <http://www.gnu.org/licenses/>. #
# #
##############################################################################
from __future__ import absolute_import
import os.path
import sys
import numpy as np
from ..instruments_base import BaseInstrument, scpiDevice, ChoiceIndex,\
wait_on_event, BaseDevice, MemoryDevice, ReadvalDev,\
_retry_wait, locked_calling, CHECKING
from ..instruments_registry import register_instrument, add_to_instruments
clr = None
Int32 = None
Int64 = None
IntPtr = None
Marshal = None
OlBase = None
OlException = None
_assembly_Version = None
_delayed_imports_done = False
def _delayed_imports():
global _delayed_imports_done
if not _delayed_imports_done:
global clr, Int32, Int64, IntPtr, Marshal, OlBase, OlException
try:
import clr
from System import Int32, Int64, IntPtr
from System.Runtime.InteropServices import Marshal
except ImportError as exc:
raise RuntimeError('Unable to import windows clr/System: %s'%exc)
try:
_datatranslation_dir = r'C:\Program Files (x86)\Data Translation\DotNet\OLClassLib\Framework 2.0 Assemblies'
if not os.path.isdir(_datatranslation_dir):
# Version 6.1.0 has a slightly different directory name:
_datatranslation_dir += ' (32-bit)'
if _datatranslation_dir not in sys.path:
sys.path.append(_datatranslation_dir)
import OpenLayers.Base as OlBase
from OpenLayers.Base import OlException
_assembly_Version = OlBase.Utility.AssemblyVersion.ToString()
except ImportError as exc:
raise RuntimeError(
"Unable to load data_translation Module. Make sure pythonnet and "
"Data Translation Omni software are installed: %s"%exc)
_delayed_imports_done = True
@add_to_instruments
def find_all_Ol():
"""
This returns a list of all the connected Data Translation boxes.
(Ol stands for OpenLayer which is the protocol it uses.)
Every entry in the returned list is a tuple with the device
name followed by a dictionnary of information.
"""
_delayed_imports()
devmgr = OlBase.DeviceMgr.Get()
card_present = devmgr.HardwareAvailable()
ret = []
if not card_present:
return ret
card_list = list(devmgr.GetDeviceNames())
for c in card_list:
dev = devmgr.GetDevice(c)
hwinfo = dev.GetHardwareInfo()
# BoardId is year (1 or 2 digits), week (1 or 2 digist), test station (1 digit)
# sequence # (3 digits)
dev_info = dict(drv_name=dev.DriverName, drv_version=dev.DriverVersion,
name=dev.DeviceName, model=dev.BoardModelName,
version=hwinfo.DeviceId, serial=hwinfo.BoardId)
dev.Dispose()
ret.append((c, dev_info))
return ret
class Ol_Device(scpiDevice):
"""
This device is for all OpenLayer object properties
"""
def __init__(self, setstr=None, getstr=None, autoget=True, **kwarg):
if getstr is None and autoget and setstr is not None:
if '{val}' not in setstr:
getstr = setstr
setstr = setstr+'={val}'
else:
getstr = setstr.replace('={val}', '')
super(Ol_Device, self).__init__(setstr=setstr, getstr=getstr, autoget=autoget, **kwarg)
class Ol_ChoiceIndex(ChoiceIndex):
def __init__(self, OlDataType, normalize=False):
names = list(OlDataType.GetNames(OlDataType))
values = list(OlDataType.GetValues(OlDataType))
d = dict(zip(names, values))
super(Ol_ChoiceIndex, self).__init__(d, normalize=normalize)
def __getitem__(self, key_val):
""" For a key this returns the value. For value it returns the corresponding key.
It checks for keys First. This should be ok as long as keys and values
don't have overlap (integers vs strings)
"""
if key_val in self.dict:
return super(Ol_ChoiceIndex, self).__getitem__(key_val)
else:
return self.keys[self.index(key_val)]
#######################################################
## DataTranslation instrument
#######################################################
#@register_instrument('Data Translation', 'DT9847-3-1', 7.0.0.12)
@register_instrument('Data Translation', 'DT9847-3-1')
#@register_instrument('Data Translation', 'DT9837-C', '6.7.4.28')
@register_instrument('Data Translation', 'DT9837-C')
class DataTranslation(BaseInstrument):
def __init__(self, dev_name=0):
"""
To initialize a device, give it the device name as returned
by find_all_Ol(), or the integer to use as an index in that
list (defaults to 0).
Only one process at a time can access this type of instrument.
"""
_delayed_imports()
devmgr = OlBase.DeviceMgr.Get()
all_Ol = find_all_Ol()
if CHECKING():
raise RuntimeError('You cannot load DataTranslation in checking mode')
try:
name, info = all_Ol[dev_name]
except TypeError:
all_names = [n for n,i in all_Ol]
try:
name, info = all_Ol[all_names.index(dev_name)]
except ValueError:
raise IndexError, 'The device requested is not there. dev_name string not found'
except IndexError:
raise IndexError, 'The device requested is not there. dev_name too large (or no box detected).'
self._name = name
self.info = info
dev = devmgr.GetDevice(name)
self._dev = dev
self._idn_string = 'Data Translation,%s,%s,%s'%(dev.BoardModelName, dev.GetHardwareInfo().BoardId, dev.DriverVersion)
self._num_in = dev.GetNumSubsystemElements(OlBase.SubsystemType.AnalogInput)
if self._num_in < 1:
raise ValueError, 'No input available for ', name
self._coupling_type = Ol_ChoiceIndex(OlBase.CouplingType)
self._cursrc_type = Ol_ChoiceIndex(OlBase.ExcitationCurrentSource)
self._dataflow_type = Ol_ChoiceIndex(OlBase.DataFlow)
self._io_type = Ol_ChoiceIndex(OlBase.IOType)
self._sync_type = Ol_ChoiceIndex(OlBase.SynchronizationModes)
self._trig_type = Ol_ChoiceIndex(OlBase.TriggerType)
self._buffer_state = Ol_ChoiceIndex(OlBase.OlBuffer.BufferState)
self._sub_state = Ol_ChoiceIndex(OlBase.SubsystemBase.States)
# We hard code the first element here. TODO check _num_in
ai = dev.AnalogInputSubsystem(0)
self._analog_in = ai
in_supports = dict(single=ai.SupportsSingleValue, continuous=ai.SupportsContinuous,
single_ended=ai.SupportsSingleEnded, differential=ai.SupportsDifferential,
dc_coupl=ai.SupportsDCCoupling, ac_coupl=ai.SupportsACCoupling,
current_src=ai.SupportsInternalExcitationCurrentSrc,
adj_gain=ai.SupportsProgrammableGain,
bin_enc=ai.SupportsBinaryEncoding, two_compl_enc=ai.SupportsTwosCompEncoding,
sync=ai.SupportsSynchronization,
simultaneous_start=ai.SupportsSimultaneousStart, simultaneous_SH=ai.SupportsSimultaneousSampleHold,
buffering=ai.SupportsBuffering, in_process=ai.SupportsInProcessFlush)
self.in_supports = in_supports
in_info = dict(max_freq=ai.Clock.MaxFrequency, min_freq=ai.Clock.MinFrequency,
max_single_ch=ai.MaxSingleEndedChannels, fifo=ai.FifoSize,
Nchan=ai.NumberOfChannels, NGains=ai.NumberOfSupportedGains,
gains=list(ai.SupportedGains), exc_currents=list(ai.SupportedExcitationCurrentValues),
resolution=ai.Resolution, volt_range=[ai.VoltageRange.Low, ai.VoltageRange.High])
self.in_info = in_info
self.in_trig_info = {'level': ai.Trigger.Level,
'type': self._trig_type[ai.Trigger.TriggerType],
'threshold_ch': ai.Trigger.ThresholdTriggerChannel}
self.in_ref_trig_info = {'level': ai.ReferenceTrigger.Level,
'type': self._trig_type[ai.ReferenceTrigger.TriggerType],
'threshold_ch': ai.ReferenceTrigger.ThresholdTriggerChannel,
'post_count':ai.ReferenceTrigger.PostTriggerScanCount}
all_channels = [ai.SupportedChannels[i] for i in range(ai.SupportedChannels.Count)]
self.all_channels = all_channels
self._update_all_channels_info(init=True)
# Note that DT9837C actually advertizes 2 output system
# The second one might be used internally for trigger threshold
# But changing it directly does not seem to have any effect.
self._num_out = dev.GetNumSubsystemElements(OlBase.SubsystemType.AnalogOutput)
if self._num_out < 1:
raise ValueError, 'No output available for ', name
# We hard code the first element here. TODO check _num_in
ao = dev.AnalogOutputSubsystem(0)
self._analog_out = ao
# TODO: Here I assume a single Channel and make it work in single mode
ao.DataFlow=self._dataflow_type['SingleValue']
ao.Config()
#Make sure ai is in continuous mode
ai.DataFlow=self._dataflow_type['Continuous']
self._inbuffer = None
# See also System.AssemblyLoadEventHandler which instantiates an delegate
ai.SynchronousBufferDone = True
#TODO: figure out why this causes a crash
#ai.BufferDoneEvent += self._delegate_handler
# init the parent class
BaseInstrument.__init__(self)
self._async_mode = 'acq_run'
def idn(self):
return self._idn_string
def _update_all_channels_info(self, init=False):
if CHECKING():
# Note that all_channels_info coupling and current_src key will not
# be updated properly. This will affect file headers when under checking
return
if init:
gain = 1.
self.all_channels_info = [{'coupling':self._coupling_type[c.Coupling],
'name': c.Name, 'gain': gain,
'current_src':self._cursrc_type[c.ExcitationCurrentSource],
'num':c.PhysicalChannelNumber,
'type':self._io_type[c.IOType]} for c in self.all_channels]
else:
for info, ch in zip(self.all_channels_info, self.all_channels):
info['coupling'] = self._coupling_type[ch.Coupling]
info['current_src'] = self._cursrc_type[ch.ExcitationCurrentSource]
def __del__(self):
print 'Deleting DataTranslation', self
try:
self._inbuffer.Dispose()
except AttributeError:
pass
try:
self._analog_out.Dispose()
except AttributeError:
pass
try:
self._analog_in.Dispose()
except AttributeError:
pass
try:
self._dev.Dispose()
except AttributeError:
pass
def init(self,full = False):
if full:
self.output.set(0.)
@locked_calling
def _output_setdev(self, val):
self._analog_out.SetSingleValueAsVolts(0, val)
@locked_calling
def write(self, string):
if CHECKING():
return
exec('self.'+string)
@locked_calling
def ask(self, string, raw=False, chunk_size=None):
# raw, chunk_size is not used here but is needed by scpiDevice methods
if CHECKING():
return ''
return eval('self.'+string)
@locked_calling
def _async_trig(self):
super(DataTranslation, self)._async_trig()
self.run()
def _async_detect(self, max_time=.5): # 0.5 s max by default
if CHECKING():
return True
func = lambda: not self._analog_in.IsRunning
return _retry_wait(func, timeout=max_time, delay=0.05)
# Also ai.State
#return instrument.wait_on_event(self._run_finished, check_state=self, max_time=max_time)
@locked_calling
def abort(self):
if CHECKING():
return
self._analog_in.Abort()
def _clean_channel_list(self):
clist = self.channel_list.getcache()
clist = list(set(clist)) # use set to remove duplicates
clist.sort() # order in place
self.channel_list.set(clist)
return clist
@staticmethod
def _delegate_handler(source, args):
print 'My handler Called!', source, args
@locked_calling
def run(self):
clist = self._clean_channel_list()
if len(clist) == 0:
raise ValueError, 'You need to have at least one channel selected (see channel_list)'
if CHECKING():
return
self._analog_in.ChannelList.Clear()
for i,c in enumerate(clist):
#self._analog_in.ChannelList.Add(self.all_channels[c].PhysicalChannelNumber)
self._analog_in.ChannelList.Add(self.all_channels_info[c]['num'])
self._analog_in.ChannelList[i].Gain = self.all_channels_info[c]['gain']
#self._analog_in.ChannelList.Add(OlBase.ChannelListEntry(self.all_channels[c])
self._analog_in.Config()
wanted_size = int(self.nb_samples.getcache() * len(clist))
if self._inbuffer is not None:
if self._inbuffer.BufferSizeInSamples != wanted_size:
#print 'Erasing bnuffer'
self._inbuffer.Dispose()
self._inbuffer = None
if self._inbuffer is None:
self._inbuffer = OlBase.OlBuffer(wanted_size, self._analog_in)
self._analog_in.BufferQueue.QueueBuffer(self._inbuffer)
self._analog_in.Start()
@locked_calling
def _current_config(self, dev_obj=None, options={}):
self._update_all_channels_info()
extra = ['AssemblyVersion=%r'%_assembly_Version, 'boxname=%r'%self._name,
'cardinfo=%r'%self.info, 'all_channel_info=%r'%self.all_channels_info]
base = self._conf_helper('nb_samples', 'in_clock', 'channel_list',
'in_trig_mode', 'in_trig_level', 'in_trig_threshold_ch',
'in_reftrig_mode', 'in_reftrig_level', 'in_reftrig_threshold_ch',
'in_reftrig_count', 'output', options)
return extra+base
def _fetch_getformat(self, **kwarg):
clist = self._clean_channel_list()
#unit = kwarg.get('unit', 'default')
#xaxis = kwarg.get('xaxis', True)
#ch = kwarg.get('ch', None)
multi = []
for c in clist:
multi.append(self.all_channels_info[c]['name'])
fmt = self.fetch._format
if self.nb_samples.getcache() == 1:
fmt.update(multi=multi, graph=range(len(clist)))
else:
fmt.update(multi=tuple(multi), graph=[])
#fmt.update(multi=multi, graph=[], xaxis=xaxis)
return BaseDevice.getformat(self.fetch, **kwarg)
def _fetch_getdev(self):
clist = self._clean_channel_list()
if self._inbuffer is None:
return None
#This conversion is much faster than doing
# v=array(list(buf.GetDataAsVolts()))
buf = self._inbuffer
fullsize = buf.BufferSizeInSamples
validsize = buf.ValidSamples
v=np.ndarray(validsize, dtype=float)
Marshal.Copy(buf.GetDataAsVolts(), 0, IntPtr.op_Explicit(Int64(v.ctypes.data)), len(v))
num_channel = len(clist)
if num_channel != 1 and self.nb_samples.getcache() != 1:
v.shape = (-1, num_channel)
v = v.T
return v
def _create_devs(self):
self.nb_samples = MemoryDevice(1024, min=1, max=1024*1024*100)
self.in_clock = Ol_Device('_analog_in.Clock.Frequency', str_type = float, setget=True,
min=self.in_info['min_freq'], max=self.in_info['max_freq'])
self.channel_list = MemoryDevice([0])
self.in_current_ch = MemoryDevice(0,min=0, max=self.in_info['Nchan'])
def devChOption(*arg, **kwarg):
options = kwarg.pop('options', {}).copy()
options.update(ch=self.in_current_ch)
app = kwarg.pop('options_apply', ['ch'])
kwarg.update(options=options, options_apply=app)
return Ol_Device(*arg, **kwarg)
# These are changed when doing ai.Config() or starting an acq with ai.GetOneBuffer(c1, length, timeout_ms)
# timeout_ms can be -1 to disable it.
self.coupling = devChOption('all_channels[{ch}].Coupling', choices=self._coupling_type)
self.exc_current_src = devChOption('all_channels[{ch}].ExcitationCurrentSource', choices=self._cursrc_type)
# I don't think there is a way to set the Gain when doing GetOneBuffer
self.gain = devChOption('all_channels_info[{ch}]["gain"]', str_type=float, choices=self.in_info['gains'])
#Trigger starts the acquisition
self.in_trig_mode = Ol_Device('_analog_in.Trigger.TriggerType', choices=self._trig_type)
self.in_trig_level = Ol_Device('_analog_in.Trigger.Level',
str_type=float,
min=self.in_info['volt_range'][0], max=self.in_info['volt_range'][1])
self.in_trig_threshold_ch = Ol_Device('_analog_in.Trigger.ThresholdTriggerChannel',
str_type=int,
min=0, max=self.in_info['Nchan'])
# The reference trigger will stop acquisition after ScanCount
# It does not handle TTL
self.in_reftrig_mode = Ol_Device('_analog_in.ReferenceTrigger.TriggerType', choices=self._trig_type)
self.in_reftrig_level = Ol_Device('_analog_in.ReferenceTrigger.Level',
str_type=float,
min=self.in_info['volt_range'][0], max=self.in_info['volt_range'][1])
self.in_reftrig_threshold_ch = Ol_Device('_analog_in.ReferenceTrigger.ThresholdTriggerChannel',
str_type=int,
min=0, max=self.in_info['Nchan'])
self.in_reftrig_count = Ol_Device('_analog_in.ReferenceTrigger.PostTriggerScanCount',
str_type=int, min=0)
self._devwrap('output', autoinit=False)
self._devwrap('fetch', autoinit=False, trig=True)
self.readval = ReadvalDev(self.fetch)
# This needs to be last to complete creation
super(type(self),self)._create_devs()
def force_get(self):
# Since user cannot change change values except without using this program
# the cache are always up to date and this is not needed.
pass
@locked_calling
def set_simple_acq(self, nb_samples=1024, channel_list=[0], clock=None):
"""
nb_sample is the number of samples per channel to acquire
channel_list is either a single channel number (0 based)
a list of channel numbers or None which means all
available channels
clock can be 'min', 'max'(default) or any number in between.
if it is None, it will keep the current clock.
You can also set the trig variables (trig_level, trig_mode, trig_ref_src)
"""
self.nb_samples.set(nb_samples)
if clock == 'max':
clock = self.in_clock.max
print 'Clock set to', clock, 'Hz'
elif clock == 'min':
clock = self.in_clock.min
print 'Clock set to', clock, 'Hz'
if clock is not None:
self.in_clock.set(clock)
if channel_list is None:
channel_list = range(self.in_info['Nchan'])
if type(channel_list) != list:
channel_list = [channel_list]
self.channel_list.set(channel_list)
# TODO: Handle x scales
#Events: BufferDoneEvent, DriverRunTimeErrorEvent, QueueDoneEvent, QueueStoppedEvent
|
lupien/pyHegel
|
pyHegel/instruments/data_translation.py
|
Python
|
lgpl-3.0
| 21,757
|
from PyQt5.QtCore import pyqtSignal, QPoint, Qt, QEvent, QObject
from PyQt5.QtWidgets import QWidget, QHBoxLayout, QVBoxLayout, QLabel, QStyleOption, QStyle, QPushButton, QTextEdit, QPlainTextEdit, QMainWindow
from PyQt5.QtGui import QIcon, QPixmap, QPainter, QMouseEvent, QColor, QKeyEvent
import qtawesome as qta # https://github.com/spyder-ide/qtawesome
import os, sys
try:
import utils_ui
except Exception:
from COMTool import utils_ui
class TitleBar(QWidget):
def __init__(self, parent, icon=None, title="", height=35,
btnIcons = None,
brothers=[],
widgets=[[], []]
) -> None:
super().__init__()
self._height = height
self.parent = parent
if not btnIcons:
btnIcons = [
"mdi.window-minimize",
["mdi.window-maximize", "mdi.window-restore"],
"mdi.window-close",
["ph.push-pin-bold", "ph.push-pin-fill"]
]
self.btnIcons = btnIcons
layout = QHBoxLayout()
if brothers:
rootLayout = QVBoxLayout()
rootLayout.setContentsMargins(0,0,0,0)
rootLayout.setSpacing(0)
widget = QWidget()
widget.setProperty("class", "TitleBar")
widget.setLayout(layout)
widget.setFixedHeight(height)
rootLayout.addWidget(widget)
for w in brothers:
rootLayout.addWidget(w)
self._height += w.height()
self.setLayout(rootLayout)
else:
self.setLayout(layout)
self.setFixedHeight(self._height)
layout.setContentsMargins(0,0,0,0)
layout.setSpacing(0)
iconWidget = None
if icon and os.path.exists(icon):
iconWidget = QLabel()
iconWidget.setPixmap(QPixmap(icon).scaled(height, height))
iconWidget.setProperty("class", "icon")
self.min = QPushButton("")
self.max = QPushButton("")
self.close = QPushButton("")
self.top = QPushButton("")
utils_ui.setButtonIcon(self.min, btnIcons[0])
utils_ui.setButtonIcon(self.max, btnIcons[1][0])
utils_ui.setButtonIcon(self.close, btnIcons[2])
utils_ui.setButtonIcon(self.top, btnIcons[3][0])
self.title = QLabel(title)
widgets_l, widgets_r = widgets
if sys.platform.startswith("darwin"):
layout.addWidget(self.close)
layout.addWidget(self.min)
layout.addWidget(self.max)
for w in widgets_r:
layout.addWidget(w)
layout.addStretch(0)
if iconWidget:
layout.addWidget(iconWidget)
layout.addWidget(self.title)
layout.addStretch(0)
layout.addWidget(self.top)
for w in widgets_l:
layout.addWidget(w)
else:
if iconWidget:
layout.addWidget(iconWidget)
layout.addWidget(self.title)
layout.addWidget(self.top)
for w in widgets_l:
layout.addWidget(w)
layout.addStretch(0)
for w in widgets_r:
layout.addWidget(w)
layout.addWidget(self.min)
layout.addWidget(self.max)
layout.addWidget(self.close)
self.min.setFixedHeight(height)
self.max.setFixedHeight(height)
self.close.setFixedHeight(height)
self.top.setFixedHeight(height)
self.min.setMinimumWidth(height)
self.max.setMinimumWidth(height)
self.close.setMinimumWidth(height)
self.top.setMinimumWidth(height)
self.min.setProperty("class", "min")
self.max.setProperty("class", "max")
self.close.setProperty("class", "close")
self.title.setProperty("class", "title")
self.top.setProperty("class", "top")
self.close.clicked.connect(lambda : parent.close())
self.max.clicked.connect(lambda : self.onSetMaximized(fromMaxBtn=True))
self.min.clicked.connect(lambda : parent.setWindowState(Qt.WindowNoState) if parent.windowState() == Qt.WindowMinimized else parent.setWindowState(Qt.WindowMinimized))
self.top.clicked.connect(self.onSetTop)
self.setProperty("class", "TitleBar")
def mouseDoubleClickEvent(self, event):
if event.buttons() == Qt.LeftButton:
self.onSetMaximized()
def onSetMaximized(self, isMax = None, fromMaxBtn=False, fullScreen = False):
if not isMax is None:
if isMax:
utils_ui.setButtonIcon(self.max, self.btnIcons[1][1])
else:
utils_ui.setButtonIcon(self.max, self.btnIcons[1][0])
return
status = Qt.WindowNoState
if fullScreen:
if self.parent.windowState() != Qt.WindowFullScreen:
status = Qt.WindowFullScreen
elif self.parent.windowState() == Qt.WindowNoState:
if fromMaxBtn and sys.platform.startswith("darwin"): # mac max button to full screen
status = Qt.WindowFullScreen
else:
status = Qt.WindowMaximized
if status == Qt.WindowNoState:
utils_ui.setButtonIcon(self.max, self.btnIcons[1][0])
else:
utils_ui.setButtonIcon(self.max, self.btnIcons[1][1])
self.parent.setWindowState(status)
if status == Qt.WindowFullScreen:
self.hide()
else:
self.show()
def onSetTop(self):
flags = self.parent.windowFlags()
needShow = self.parent.isVisible()
if flags & Qt.WindowStaysOnTopHint:
flags &= (~Qt.WindowStaysOnTopHint)
self.parent.setWindowFlags(flags)
utils_ui.setButtonIcon(self.top, self.btnIcons[3][0])
self.top.setProperty("class", "top")
else:
flags |= Qt.WindowStaysOnTopHint
self.parent.setWindowFlags(flags)
utils_ui.setButtonIcon(self.top, self.btnIcons[3][1])
self.top.setProperty("class", "topActive")
self.style().unpolish(self.top)
self.style().polish(self.top)
self.update()
if needShow:
self.parent.show()
def paintEvent(self, event):
opt = QStyleOption()
opt.initFrom(self)
p = QPainter(self)
self.style().drawPrimitive(QStyle.PE_Widget, opt, p, self)
class EventFilter(QObject):
Margins = 5 # 边缘边距
windows = []
_readyToMove = False
_moving = False
_resizeCursor = False
def listenWindow(self, window):
self.windows.append(window)
def _get_edges(self, pos, width, height, offset=0):
edge = 0
x, y = pos.x(), pos.y()
if y <= self.Margins - offset and y >= 0:
edge |= Qt.TopEdge
if x <= self.Margins - offset and x >= 0:
edge |= Qt.LeftEdge
if x >= width - self.Margins + offset and x < width:
edge |= Qt.RightEdge
if y >= height - self.Margins + offset and y < height:
edge |= Qt.BottomEdge
return edge
def _get_cursor(self, edges):
if edges == Qt.LeftEdge | Qt.TopEdge or edges == Qt.RightEdge | Qt.BottomEdge:
self._resizeCursor = True
return Qt.SizeFDiagCursor
elif edges == Qt.RightEdge | Qt.TopEdge or edges == Qt.LeftEdge | Qt.BottomEdge:
self._resizeCursor = True
return Qt.SizeBDiagCursor
elif edges == Qt.LeftEdge or edges == Qt.RightEdge:
self._resizeCursor = True
return Qt.SizeHorCursor
elif edges == Qt.TopEdge or edges == Qt.BottomEdge:
self._resizeCursor = True
return Qt.SizeVerCursor
if self._resizeCursor:
self._resizeCursor = False
return Qt.ArrowCursor
return None
def moveOrResize(self, window, pos, width, height):
edges = self._get_edges(pos, width, height)
if edges:
if window.windowState() == Qt.WindowNoState:
window.startSystemResize(edges)
else:
if window.windowState() != Qt.WindowFullScreen:
window.startSystemMove()
def eventFilter(self, obj, event):
# print(obj, event.type(), obj.isWindowType(), QEvent.MouseMove)
if obj.isWindowType():
# top window 处理光标样式
if event.type() == QEvent.MouseMove and obj.windowState() == Qt.WindowNoState:
cursor = self._get_cursor(self._get_edges(event.pos(), obj.width(), obj.height(), offset=1))
if not cursor is None:
obj.setCursor(cursor)
if event.type() == QEvent.TouchUpdate and not self._moving:
self._moving = True
self.moveOrResize(obj, event.pos(), obj.width(), obj.height())
elif isinstance(event, QMouseEvent):
if obj in self.windows:
if event.button() == Qt.LeftButton :
if event.type() == QEvent.MouseButtonPress:
self._readyToMove = True
# elif event.type() == QEvent.MouseButtonDblClick:
# print(obj, event.type(), event)
elif event.type() == QEvent.MouseMove and self._readyToMove and not self._moving:
self._moving = True
self.moveOrResize(obj.windowHandle(), event.pos(), obj.width(), obj.height())
if event.type() == QEvent.MouseButtonRelease or event.type() == QEvent.Move:
self._readyToMove = False
self._moving = False
return False
class CustomTitleBarWindowMixin:
def __init__(self, titleBar = None, init = False):
if not init:
return
isQMainWindow = False
for base in self.__class__.__bases__:
if base == QMainWindow:
isQMainWindow = True
break
if isQMainWindow:
self.root = QWidget()
self.setCentralWidget(self.root)
else:
self.root = self
self.root.setProperty("class", "customTilebarWindow")
self.rootLayout = QVBoxLayout()
# title bar
if titleBar:
self.titleBar = titleBar
else:
self.titleBar = TitleBar(self, icon = "assets/logo.png", title="标题", height=35)
self.contentWidget = QWidget()
self.rootLayout.addWidget(self.titleBar)
self.rootLayout.addWidget(self.contentWidget)
self.root.setLayout(self.rootLayout)
self.rootLayout.setContentsMargins(0, 0, 0, 0) # padding
self.root.setMouseTracking(True)
self.titleBar.setMouseTracking(True)
self.setWindowFlags(Qt.FramelessWindowHint | Qt.WindowMinimizeButtonHint)
self.init_vars()
def changeEvent(self, event):
# super(CustomTitleBarWindowMixin, self).changeEvent(event)
self.titleBar.onSetMaximized(isMax = self.isMaximized())
def keyPressEvent(self, event):
if event.key() == Qt.Key_F11:
self.titleBar.onSetMaximized(fullScreen=True)
def keyReleaseEvent(self,event):
pass
# def paintEvent(self, event):
# # 透明背景但是需要留下一个透明度用于鼠标捕获
# painter = QPainter(self)
# painter.fillRect(self.rect(), QColor(255, 255, 255, 1))
def init_vars(self):
self._move_drag = False
self._corner_drag = False
self._bottom_drag = False
self._right_drag = False
self._padding = 6
self.mPos = None
# def resizeEvent(self, QResizeEvent):
# self._right_rect = [QPoint(x, y) for x in range(self.width() - self._padding + 1, self.width())
# for y in range(1, self.height() - self._padding)]
# self._bottom_rect = [QPoint(x, y) for x in range(1, self.width() - self._padding)
# for y in range(self.height() - self._padding + 1, self.height())]
# self._corner_rect = [QPoint(x, y) for x in range(self.width() - self._padding, self.width() + 1)
# for y in range(self.height() - self._padding, self.height() + 1)]
# def mousePressEvent(self, event):
# if event.button() == Qt.LeftButton and not self.mPos:
# self.mPos = event.pos()
# event.accept()
# return
# if (event.button() == Qt.LeftButton) and (event.pos() in self._corner_rect):
# self._corner_drag = True
# event.accept()
# elif (event.button() == Qt.LeftButton) and (event.pos() in self._right_rect):
# self._right_drag = True
# event.accept()
# elif (event.button() == Qt.LeftButton) and (event.pos() in self._bottom_rect):
# self._bottom_drag = True
# event.accept()
# # elif (event.button() == Qt.LeftButton) and (event.y() < self.titleBar.height()):
# # self._move_drag = True
# # self.move_DragPosition = event.globalPos() - self.pos()
# # event.accept()
# def mouseMoveEvent(self, event): # QMouseEvent
# if event.buttons() == Qt.LeftButton and self.mPos:
# pos = self.mapToGlobal(event.pos() - self.mPos)
# if self.windowState() == Qt.WindowMaximized or self.windowState() == Qt.WindowFullScreen:
# return
# self.move(pos)
# event.accept()
# return
# if QMouseEvent.pos() in self._corner_rect:
# self.setCursor(Qt.SizeFDiagCursor)
# elif QMouseEvent.pos() in self._bottom_rect:
# self.setCursor(Qt.SizeVerCursor)
# elif QMouseEvent.pos() in self._right_rect:
# self.setCursor(Qt.SizeHorCursor)
# else:
# self.setCursor(Qt.ArrowCursor)
# if Qt.LeftButton and self._right_drag:
# self.resize(QMouseEvent.pos().x(), self.height())
# QMouseEvent.accept()
# elif Qt.LeftButton and self._bottom_drag:
# self.resize(self.width(), QMouseEvent.pos().y())
# QMouseEvent.accept()
# elif Qt.LeftButton and self._corner_drag:
# self.resize(QMouseEvent.pos().x(), QMouseEvent.pos().y())
# QMouseEvent.accept()
# # elif Qt.LeftButton and self._move_drag:
# # self.move(QMouseEvent.globalPos() - self.move_DragPosition)
# # QMouseEvent.accept()
# def mouseReleaseEvent(self, event):
# if self.mPos:
# self.mPos = None
# self.setCursor(Qt.ArrowCursor)
# event.accept()
# return
# self._move_drag = False
# self._corner_drag = False
# self._bottom_drag = False
# self._right_drag = False
# self.setCursor(Qt.ArrowCursor)
class TextEdit(QTextEdit):
def __init__(self,parent=None):
super(TextEdit,self).__init__(parent=None)
def keyPressEvent(self,event):
if event.key() == Qt.Key_Tab:
tc = self.textCursor()
tc.insertText(" ")
return
return QTextEdit.keyPressEvent(self,event)
class PlainTextEdit(QPlainTextEdit):
onSave = lambda : None
def __init__(self,parent=None):
super(QPlainTextEdit,self).__init__(parent=None)
self.keyControlPressed = False
def keyPressEvent(self,event):
if event.key() == Qt.Key_Control:
self.keyControlPressed = True
return
elif event.key() == Qt.Key_Tab:
tc = self.textCursor()
tc.insertText(" ")
event.accept()
return
elif event.key() == Qt.Key_S:
self.onSave()
return QPlainTextEdit.keyPressEvent(self,event)
def onKeyReleaseEvent(self, event):
if event.key() == Qt.Key_Control:
self.keyControlPressed = False
if __name__ == "__main__":
import sys
from PyQt5.QtWidgets import QApplication, QMainWindow, QVBoxLayout
style = '''
QWidget {
}
.customTilebarWindow {
background-color: white;
}
.TitleBar {
background-color: #0b1722;
color: white;
}
.TitleBar QPushButton {
border: none;
}
.TitleBar .icon{
margin-left: 5;
}
.TitleBar .title{
margin-left: 5;
color: white;
}
.TitleBar .top{
margin-left: 5;
color: white;
border-radius: 20;
background-color: #0b1722;
}
.TitleBar .top:hover{
background-color: #273b4e;
}
.TitleBar .topActive{
margin-left: 5;
color: white;
border-radius: 20;
background-color: #273b4e;
}
.TitleBar .min{
background-color: #53c22a;
color: white;
}
.TitleBar .max{
background-color: #e5bf28;
color: white;
}
.TitleBar .close{
background-color: #f45952;
color: white;
}
.TitleBar .min:hover {
background-color: #2ba13e;
}
.TitleBar .max:hover {
background-color: #cf9001;
}
.TitleBar .close:hover {
background-color: #df2f25;
}
QLabel {
background-color: gray;
}
'''
class MainWindow(QMainWindow, CustomTitleBarWindowMixin):
_padding = 5
def __init__(self) -> None:
super(QMainWindow, self).__init__()
label = QLabel("hello hhhhhhhhhhh")
layout = QVBoxLayout()
layout.addWidget(label)
self.contentWidget.setLayout(layout)
self.resize(800, 600)
self.show()
app = QApplication(sys.argv)
app.setStyleSheet(style)
w = MainWindow()
# w2 = MainWindow()
eventFilter = EventFilter()
eventFilter.listenWindow(w)
# eventFilter.listenWindow(w2)
app.installEventFilter(eventFilter)
app.exec_()
|
Neutree/COMTool
|
COMTool/widgets.py
|
Python
|
lgpl-3.0
| 17,999
|
import unittest
from risky import units
class UnitsTests(unittest.TestCase):
def setUp(self):
units.DOLLARS_PER_HOUR = 1.0
def test_mustDefine_DOLLARS_PER_HOUR(self):
reload(units)
with self.assertRaises(TypeError):
units.hours(10.0)
def test_hours_UsesDOLLARS_PER_HOUR(self):
units.DOLLARS_PER_HOUR = 100.0
self.assertEqual(100.0, units.hours(1.0))
units.DOLLARS_PER_HOUR = 0.01
self.assertEqual(0.01, units.hours(1.0))
def test_weeks(self):
self.assertAlmostEqual(40.0, units.weeks(1))
def test_months(self):
self.assertAlmostEqual(40.0 * 52.0 / 12.0, units.months(1))
def test_years(self):
self.assertAlmostEqual(40.0 * 52.0, units.years(1))
|
SimplyKnownAsG/risky
|
risky_tests/test_units.py
|
Python
|
lgpl-3.0
| 768
|
# SPDX-License-Identifier: LGPL-3.0-or-later
# dlb - a Pythonic build tool
# Copyright (C) 2020 Daniel Lutz <dlu-ch@users.noreply.github.com>
import testenv # also sets up module search paths
import sys
import dlb.ex
import dlb_contrib.msbatch
import unittest
class PathTest(unittest.TestCase):
def test_fails_without_bat_suffix(self):
dlb_contrib.msbatch.BatchFilePath('a.bat')
dlb_contrib.msbatch.BatchFilePath('a.bAt')
with self.assertRaises(ValueError):
dlb_contrib.msbatch.BatchFilePath('a')
with self.assertRaises(ValueError):
dlb_contrib.msbatch.BatchFilePath('.bat')
with self.assertRaises(ValueError):
dlb_contrib.msbatch.BatchFilePath('..bat')
@unittest.skipIf(sys.platform != 'win32', 'requires MS Windows')
class BatchFileTest(testenv.TemporaryWorkingDirectoryTestCase):
def test_batchfile_is_found(self):
for file_name in ['a.bat', 'a b.bat', 'a^b.bat', 'a ^b.bat', '统一码.bat']:
with open(file_name, 'x', encoding='ascii') as f:
f.write('cd %1\n\r')
f.write('echo {"a": "b"} > env.json\n\r')
with dlb.ex.Context():
env = dlb_contrib.msbatch.RunEnvBatch(batch_file='a.bat').start().exported_environment
self.assertEqual({'a': 'b'}, env)
def test_fails_without_envvar_file(self):
open('a.bat', 'x').close()
with self.assertRaises(Exception) as cm:
with dlb.ex.Context():
dlb_contrib.msbatch.RunEnvBatch(batch_file='a.bat').start()
msg = (
"exported environment file not found: 'env.json'\n"
" | create it in the batch file with 'python3 -m dlb_contrib.exportenv'"
)
self.assertEqual(msg, str(cm.exception))
|
dlu-ch/dlb
|
test/dlb_contrib/0/test_msbatch.py
|
Python
|
lgpl-3.0
| 1,806
|
#-*-coding:utf-8-*-
"""
@package bapp.tests.test_settings
@brief tests for bapp.settings
@author Sebastian Thiel
@copyright [GNU Lesser General Public License](https://www.gnu.org/licenses/lgpl.html)
"""
from __future__ import unicode_literals
from __future__ import division
from butility.future import str
__all__ = []
import bapp
from .base import with_application
from butility.tests import (TestCase,
with_rw_directory)
# try * import
from bapp.settings import *
from butility import OrderedDict
from bkvstore import (KeyValueStoreSchema,
RootKey)
class TestSettingsClient(PersistentApplicationSettingsMixin):
"""A client with a simple schema to test out its settings"""
__slots__ = ('_settings_dir')
_schema = KeyValueStoreSchema('dog', dict(name=str,
meal=str,
friends=list(),
location=dict(x=float, y=float)))
settings_id = lambda self: 'dog'
def __init__(self, settings_dir):
self._settings_dir = settings_dir
def _settings_path(self):
return self._settings_dir / PersistentSettings.StreamSerializerType.file_extension
def set_values(self):
"""set some values for serialization"""
dog = self.settings()
dog.name = 'roofus'
dog.meal = 'bone'
dog.friends = ['cat', 'bunny']
dog.location.x = 1.0
dog.location.y = 2.0
self.save_settings()
def assert_values(self):
"""Checek values"""
dog = self.settings()
assert dog.name == 'roofus'
assert dog.meal == 'bone'
assert dog.friends == ['cat', 'bunny']
assert dog.location.x == 1.0
assert dog.location.y == 2.0
# end class TestSettingsClient
class TestSettings(TestCase):
__slots__ = ()
@with_rw_directory
@with_application
def test_settings(self, rw_dir):
"""Tests for settings and JSon serializer in the same moment"""
target = rw_dir / 'foo' + PersistentSettings.StreamSerializerType.file_extension
assert not target.isfile()
schema_dict = OrderedDict()
schema = KeyValueStoreSchema(RootKey, {'foo': dict(baz=int,
foz=float,),
'bar': str,
'ordered': list, })
data = OrderedDict({'foo': OrderedDict(dict(baz=5, foz=4.0)),
'bar': 'hello',
'ordered': [1, 2]})
# Data could come from an actual kvstore if we like
settings = PersistentSettings(data, target, take_ownership=False)
data.bar = 5
assert settings.data().bar != data.bar, 'should have made a copy'
settings_data = settings.value_by_schema(schema)
data.bar = 'hello'
assert settings_data.bar == data.bar
assert settings_data.ordered == data.ordered
settings_data.bar = 5
settings_data.foo.baz = 10 # note the incorrect type ! Its tranformed to a string
settings_data.ordered = [3, 4, 5]
settings.set_value_by_schema(schema, settings_data)
assert settings.changes()
assert 'foz' not in settings.changes().foo
assert settings.save_changes(open(target, 'w')) is settings, "It should be easy to save changes"
assert settings.settings_data(open(target, 'r')), "should have something to work with"
settings_data = settings.value_by_schema(schema)
prev_size = target.stat().st_size
assert target.isfile() and prev_size, "Should have created file by now"
# changes should still be there, and file should be identical
assert settings.changes() and settings.save_changes(open(target, 'w'))
assert target.stat().st_size == prev_size
# Now we pretend to be a in a new session and want to reload our settings
new_settings = PersistentSettings(data, target, take_ownership=False)
new_settings_data = new_settings.value_by_schema(schema)
# NOTE: order is lost when diffing ! we use sets there ... . For now no problem
# assert new_settings_data == settings_data
assert new_settings_data.foo.baz == settings_data.foo.baz
assert new_settings_data.bar == settings_data.bar
assert new_settings_data.ordered == settings_data.ordered
# Changes should be equivalent
nsc = new_settings.changes()
osc = settings.changes()
assert set(nsc.keys()) == set(osc.keys()), "Order seems to change, but not the contents"
assert nsc.foo == osc.foo
assert nsc.bar == osc.bar
assert nsc.ordered == osc.ordered
# Now, instead of a space save, do a full one, which will cause all data to be written unconditionally.
# This will forever override every base value
settings.save_changes(open(target, 'w'), sparse=False)
assert target.stat().st_size > prev_size
# When reading it back, it should still be exactly the same, except that we will never see changes coming
# through from the base
new_settings = PersistentSettings(data, target, take_ownership=False)
# we faithfully assume the other fields match as well
assert new_settings.changes().ordered == settings.changes().ordered
@with_rw_directory
@with_application
def test_client(self, rw_dir):
"""Test settings client"""
client = TestSettingsClient(rw_dir)
client.set_values()
other_client = TestSettingsClient(rw_dir)
other_client.assert_values()
|
Byron/bcore
|
src/python/bapp/tests/test_settings.py
|
Python
|
lgpl-3.0
| 5,753
|
#Fixtures.py
import pytest
from hydra_client.connection import RemoteJSONConnection
@pytest.fixture()
def client():
client=RemoteJSONConnection("http://localhost:8080/json")
client.login("root","")
return client
|
hydraplatform/hydra-server
|
tests/fixtures.py
|
Python
|
lgpl-3.0
| 225
|
from tulip import *
from py2neo import *
import configparser
import os
from graphtulip.createPostCommentTagTlp import CreatePostCommentTagTlp
config = configparser.ConfigParser()
config.read("config.ini")
# todo create a unique Createtlp to avoid code duplication
class CreateTagFullTlp(object):
def __init__(self, value, start, end, force_fresh):
super(CreateTagFullTlp, self).__init__()
print('Initializing')
self.neo4j_graph = Graph(host=config['neo4j']['url'], user=config['neo4j']['user'], password=config['neo4j']['password'])
self.tulip_graph = tlp.newGraph()
self.tulip_graph.setName('opencare - tagToTag')
# todo pass in parameters labels and colors
self.labels = ["label", "label", "label"]
self.colors = {"user_id": tlp.Color(51,122,183), "post_id": tlp.Color(92,184,92), "comment_id": tlp.Color(240, 173, 78), "tag_id": tlp.Color(200, 10, 10), "edges": tlp.Color(204, 204, 204)}
self.filter_occ = value
self.date_start = start
self.date_end = end
self.force_fresh = force_fresh
# for normalisation
self.nb_step = 100
# -----------------------------------------------------------
# the updateVisualization(centerViews = True) function can be called
# during script execution to update the opened views
# the pauseScript() function can be called to pause the script execution.
# To resume the script execution, you will have to click on the "Run script " button.
# the runGraphScript(scriptFile, graph) function can be called to launch another edited script on a tlp.Graph object.
# The scriptFile parameter defines the script name to call (in the form [a-zA-Z0-9_]+.py)
# the main(graph) function must be defined
# to run the script on the current graph
# -----------------------------------------------------------
# Can be used with nodes or edges
def managePropertiesEntity(self, entTlp, entN4J, entProperties):
# print 'WIP'
for i in entN4J.properties:
tmpValue = str(entN4J.properties[i])
if i in self.labels:
word = tmpValue.split(' ')
if len(word) > 3:
tmpValue = "%s %s %s ..." % (word[0], word[1], word[2])
entProperties["viewLabel"] = self.tulip_graph.getStringProperty("viewLabel")
entProperties["viewLabel"][entTlp] = tmpValue
if i in self.colors.keys():
entProperties["viewColor"] = self.tulip_graph.getColorProperty("viewColor")
entProperties["viewColor"][entTlp] = self.colors.get(i)
if i in entProperties:
entProperties[i][entTlp] = tmpValue
else:
# print type(tmpValue)
entProperties[i] = self.tulip_graph.getStringProperty(i)
# print 'i = ' + i
# print 'has key ? ' + str(i in entProperties)
entProperties[i][entTlp] = tmpValue
def manageLabelsNode(self, labelsNode, nodeTlp, nodeN4J):
# print "WIP"
tmpArrayString = []
for s in nodeN4J.properties:
tmpArrayString.append(s)
labelsNode[nodeTlp] = tmpArrayString
# def manageLabelEdge(labelEdge,edgeTlp,edgeN4J):
# labelEdge[edgeTlp] = edgeN4J.type
# def testTransmmission(graph,node):
# testNul = self.tulip_graph.getIntegerProperty("testNul")
# strNul = "testNul"
# exec(strNul)[node] = 1
def create(self, private_gid):
# Entities properties
nodeProperties = {}
edgeProperties = {}
max_occ = 1
if (not os.path.exists("%s%s.tlp" % (config['exporter']['tlp_path'], "TTT"))) or self.force_fresh == 1:
creatorPCT = CreatePostCommentTagTlp(self.date_start, self.date_end, self.force_fresh)
creatorPCT.create()
self.tulip_graph = tlp.loadGraph("%s%s.tlp" % (config['exporter']['tlp_path'], "PostCommentTag"))
print("Compute Tag-Tag graph")
tmpIDNode = self.tulip_graph.getStringProperty("tmpIDNode")
labelsNodeTlp = self.tulip_graph.getStringVectorProperty("labelsNodeTlp")
labelEdgeTlp = self.tulip_graph.getStringProperty("labelEdgeTlp")
entityType = self.tulip_graph.getStringProperty("entityType")
edgeProperties["occ"] = self.tulip_graph.getIntegerProperty("occ")
edgeProperties["TagTagSelection"] = self.tulip_graph.getBooleanProperty("TagTagSelection")
edgeProperties["TagTagSelection"].setAllNodeValue(False)
edgeProperties["TagTagSelection"].setAllEdgeValue(False)
edgeProperties["viewLabel"] = self.tulip_graph.getStringProperty("viewLabel")
edgeProperties["type"] = self.tulip_graph.getStringProperty("type")
edgeProperties["viewColor"] = self.tulip_graph.getColorProperty("viewColor")
edgeProperties["viewSize"] = self.tulip_graph.getSizeProperty("viewSize")
edgeProperties['tag_1'] = self.tulip_graph.getStringProperty("tag_1")
edgeProperties['tag_2'] = self.tulip_graph.getStringProperty("tag_2")
for t1 in self.tulip_graph.getNodes():
if entityType[t1] == "tag":
edgeProperties["TagTagSelection"][t1] = True
for p in self.tulip_graph.getOutNodes(t1):
if entityType[p] == "post" or entityType[p] == "comment":
for t2 in self.tulip_graph.getInNodes(p):
if t1 != t2:
e=self.tulip_graph.existEdge(t1, t2, False)
if e.isValid():
edgeProperties["occ"][e] += 1
edgeProperties["viewLabel"][e] = "occ ("+str(edgeProperties["occ"][e]/2)+")"
labelEdgeTlp[e] = "occ ("+str(edgeProperties["occ"][e]/2)+")"
e_val = edgeProperties['occ'][e]
max_occ = max(max_occ, e_val)
if e_val > edgeProperties["occ"][t1]:
edgeProperties["occ"][t1] = e_val
edgeProperties["viewSize"][t1] = tlp.Size(e_val, e_val, e_val)
if e_val > edgeProperties["occ"][t2]:
edgeProperties["occ"][t2] = e_val
edgeProperties["viewSize"][t2] = tlp.Size(e_val, e_val, e_val)
else:
e = self.tulip_graph.addEdge(t1, t2)
edgeProperties["occ"][e] = 1
edgeProperties["TagTagSelection"][t2] = True
edgeProperties["TagTagSelection"][e] = True
edgeProperties["viewLabel"][e] = "occ ("+str(edgeProperties["occ"][e]/2)+")"
labelEdgeTlp[e] = "occ ("+str(edgeProperties["occ"][e]/2)+")"
edgeProperties["type"][e] = "curve"
edgeProperties["viewColor"][e] = self.colors['edges']
edgeProperties['tag_1'][e] = tmpIDNode[t1]
edgeProperties['tag_2'][e] = tmpIDNode[t2]
sg = self.tulip_graph.addSubGraph(edgeProperties["TagTagSelection"])
tlp.saveGraph(sg, "%s%s.tlp" % (config['exporter']['tlp_path'], "TTT"))
else:
sg = tlp.loadGraph("%s%s.tlp" % (config['exporter']['tlp_path'], "TTT"))
edgeProperties["occ"] = sg.getIntegerProperty("occ")
max_occ = edgeProperties["occ"].getNodeMax()
print("Filter occ")
edgeProperties["occ"] = sg.getIntegerProperty("occ")
for t in sg.getNodes():
if edgeProperties["occ"][t]/2 < self.filter_occ:
sg.delNode(t)
continue
tmp_val = (float(edgeProperties["occ"][t])/max_occ)*(self.nb_step-1)+1
edgeProperties["occ"][t] = int(tmp_val)
for e in sg.getOutEdges(t):
if edgeProperties["occ"][e]/2 < self.filter_occ:
sg.delEdge(e)
continue
tmp_val = (float(edgeProperties["occ"][e])/max_occ)*(self.nb_step-1)+1
edgeProperties["occ"][e] = int(tmp_val)
print("Export")
tlp.saveGraph(sg, "%s%s.tlp" % (config['exporter']['tlp_path'], private_gid))
|
jason-vallet/graph-ryder-api
|
graphtulip/createtagfulltlp.py
|
Python
|
lgpl-3.0
| 8,794
|
#
# Project Kimchi
#
# Copyright IBM, Corp. 2013-2014
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
import base64
import cherrypy
import fcntl
import ldap
import multiprocessing
import os
import PAM
import pty
import re
import termios
import time
import urllib2
from kimchi import template
from kimchi.config import config
from kimchi.exception import InvalidOperation, OperationFailed
from kimchi.utils import get_all_tabs, run_command
USER_NAME = 'username'
USER_GROUPS = 'groups'
USER_ROLES = 'roles'
REFRESH = 'robot-refresh'
tabs = get_all_tabs()
def redirect_login():
url = "/login.html"
if cherrypy.request.path_info.endswith(".html"):
next_url = cherrypy.serving.request.request_line.split()[1]
next_url = urllib2.quote(next_url.encode('utf-8'), safe="")
url = "/login.html?next=%s" % next_url
raise cherrypy.HTTPRedirect(url, 303)
def debug(msg):
pass
# cherrypy.log.error(msg)
class User(object):
@classmethod
def get(cls, auth_args):
auth_type = auth_args.pop('auth_type')
for klass in cls.__subclasses__():
if auth_type == klass.auth_type:
try:
if not klass.authenticate(**auth_args):
debug("cannot verify user with the given password")
return None
except OperationFailed:
raise
return klass(auth_args['username'])
class PAMUser(User):
auth_type = "pam"
def __init__(self, username):
self.user = {}
self.user[USER_NAME] = username
self.user[USER_GROUPS] = None
# after adding support to change user roles that info should be read
# from a specific objstore and fallback to default only if any entry is
# found
self.user[USER_ROLES] = dict.fromkeys(tabs, 'user')
def get_groups(self):
out, err, rc = run_command(['id', '-Gn', self.user[USER_NAME]])
if rc == 0:
self.user[USER_GROUPS] = out.rstrip().split(" ")
return self.user[USER_GROUPS]
def get_roles(self):
if self.has_sudo():
# after adding support to change user roles that info should be
# read from a specific objstore and fallback to default only if
# any entry is found
self.user[USER_ROLES] = dict.fromkeys(tabs, 'admin')
return self.user[USER_ROLES]
def has_sudo(self):
result = multiprocessing.Value('i', 0, lock=False)
p = multiprocessing.Process(target=self._has_sudo, args=(result,))
p.start()
p.join()
return result.value
def _has_sudo(self, result):
result.value = False
_master, slave = pty.openpty()
os.setsid()
fcntl.ioctl(slave, termios.TIOCSCTTY, 0)
out, err, exit = run_command(['sudo', '-l', '-U', self.user[USER_NAME],
'sudo'])
if exit == 0:
debug("User %s is allowed to run sudo" % self.user[USER_NAME])
# sudo allows a wide range of configurations, such as controlling
# which binaries the user can execute with sudo.
# For now, we will just check whether the user is allowed to run
# any command with sudo.
out, err, exit = run_command(['sudo', '-l', '-U',
self.user[USER_NAME]])
for line in out.split('\n'):
if line and re.search("(ALL)", line):
result.value = True
debug("User %s can run any command with sudo" %
result.value)
return
debug("User %s can only run some commands with sudo" %
self.user[USER_NAME])
else:
debug("User %s is not allowed to run sudo" % self.user[USER_NAME])
def get_user(self):
return self.user
@staticmethod
def authenticate(username, password, service="passwd"):
'''Returns True if authenticate is OK via PAM.'''
def _pam_conv(auth, query_list, userData=None):
resp = []
for i in range(len(query_list)):
query, qtype = query_list[i]
if qtype == PAM.PAM_PROMPT_ECHO_ON:
resp.append((username, 0))
elif qtype == PAM.PAM_PROMPT_ECHO_OFF:
resp.append((password, 0))
elif qtype == PAM.PAM_PROMPT_ERROR_MSG:
cherrypy.log.error_log.error(
"PAM authenticate prompt error: %s" % query)
resp.append(('', 0))
elif qtype == PAM.PAM_PROMPT_TEXT_INFO:
resp.append(('', 0))
else:
return None
return resp
auth = PAM.pam()
auth.start(service)
auth.set_item(PAM.PAM_USER, username)
auth.set_item(PAM.PAM_CONV, _pam_conv)
try:
auth.authenticate()
except PAM.error, (resp, code):
msg_args = {'username': username, 'code': code}
raise OperationFailed("KCHAUTH0001E", msg_args)
return True
class LDAPUser(User):
auth_type = "ldap"
def __init__(self, username):
self.user = {}
self.user[USER_NAME] = username
self.user[USER_GROUPS] = list()
# FIXME: user roles will be changed according roles assignment after
# objstore is integrated
self.user[USER_ROLES] = dict.fromkeys(tabs, 'user')
@staticmethod
def authenticate(username, password):
ldap_server = config.get("authentication", "ldap_server").strip('"')
ldap_search_base = config.get(
"authentication", "ldap_search_base").strip('"')
ldap_search_filter = config.get(
"authentication", "ldap_search_filter",
vars={"username": username.encode("utf-8")}).strip('"')
connect = ldap.open(ldap_server)
try:
result = connect.search_s(
ldap_search_base, ldap.SCOPE_SUBTREE, ldap_search_filter)
if len(result) == 0:
entity = ldap_search_filter % {'username': username}
raise ldap.LDAPError("Invalid ldap entity:%s" % entity)
connect.bind_s(result[0][0], password)
connect.unbind_s()
return True
except ldap.INVALID_CREDENTIALS:
# invalid user password
raise OperationFailed("KCHAUTH0002E")
except ldap.NO_SUCH_OBJECT:
# ldap search base specified wrongly.
raise OperationFailed("KCHAUTH0005E", {"item": 'ldap_search_base',
"value": ldap_search_base})
except ldap.LDAPError, e:
arg = {"username": username, "code": e.message}
raise OperationFailed("KCHAUTH0001E", arg)
def get_groups(self):
return self.user[USER_GROUPS]
def get_roles(self):
admin_ids = config.get(
"authentication", "ldap_admin_id").strip('"').split(',')
for admin_id in admin_ids:
if self.user[USER_NAME] == admin_id.strip():
self.user[USER_ROLES] = dict.fromkeys(tabs, 'admin')
return self.user[USER_ROLES]
def get_user(self):
return self.user
def from_browser():
# Enable Basic Authentication for REST tools.
# Ajax request sent from jQuery in browser will have "X-Requested-With"
# header. We will check it to determine whether enable BA.
requestHeader = cherrypy.request.headers.get("X-Requested-With", None)
return (requestHeader == "XMLHttpRequest")
def check_auth_session():
"""
A user is considered authenticated if we have an established session open
for the user.
"""
cherrypy.session.acquire_lock()
session = cherrypy.session.get(USER_NAME, None)
cherrypy.session.release_lock()
if session is not None:
debug("Session authenticated for user %s" % session)
kimchiRobot = cherrypy.request.headers.get('Kimchi-Robot')
if kimchiRobot == "kimchi-robot":
if (time.time() - cherrypy.session[REFRESH] >
cherrypy.session.timeout * 60):
cherrypy.session[USER_NAME] = None
cherrypy.lib.sessions.expire()
raise cherrypy.HTTPError(401, "sessionTimeout")
else:
cherrypy.session[REFRESH] = time.time()
return True
debug("Session not found")
return False
def check_auth_httpba():
"""
REST API users may authenticate with HTTP Basic Auth. This is not allowed
for the UI because web browsers would cache the credentials and make it
impossible for the user to log out without closing their browser completely
"""
if from_browser() or not template.can_accept('application/json'):
return False
authheader = cherrypy.request.headers.get('AUTHORIZATION')
if not authheader:
debug("No authentication headers found")
return False
debug("Authheader: %s" % authheader)
# TODO: what happens if you get an auth header that doesn't use basic auth?
b64data = re.sub("Basic ", "", authheader)
decodeddata = base64.b64decode(b64data.encode("ASCII"))
# TODO: test how this handles ':' characters in username/passphrase.
username, password = decodeddata.decode().split(":", 1)
return login(username, password)
def login(username, password, **kwargs):
auth_args = {'auth_type': config.get("authentication", "method"),
'username': username,
'password': password}
user = User.get(auth_args)
if not user:
debug("User cannot be verified with the supplied password")
return None
debug("User verified, establishing session")
cherrypy.session.acquire_lock()
cherrypy.session.regenerate()
cherrypy.session[USER_NAME] = username
cherrypy.session[USER_GROUPS] = user.get_groups()
cherrypy.session[USER_ROLES] = user.get_roles()
cherrypy.session[REFRESH] = time.time()
cherrypy.session.release_lock()
return user.get_user()
def logout():
cherrypy.session.acquire_lock()
cherrypy.session[USER_NAME] = None
cherrypy.session[REFRESH] = 0
cherrypy.session.release_lock()
cherrypy.lib.sessions.close()
def kimchiauth():
debug("Entering kimchiauth...")
session_missing = cherrypy.session.missing
if check_auth_session():
return
if check_auth_httpba():
return
# not a REST full request, redirect login page directly
if ("Accept" in cherrypy.request.headers and
not template.can_accept('application/json')):
redirect_login()
# from browser, and it stays on one page.
if session_missing and cherrypy.request.cookie.get("lastPage") is not None:
raise cherrypy.HTTPError(401, "sessionTimeout")
if not from_browser():
cherrypy.response.headers['WWW-Authenticate'] = 'Basic realm=kimchi'
e = InvalidOperation('KCHAUTH0002E')
raise cherrypy.HTTPError(401, e.message.encode('utf-8'))
|
gouzongmei/t1
|
src/kimchi/auth.py
|
Python
|
lgpl-3.0
| 11,868
|
# -*- coding: iso-8859-1 -*-
# Copyright (C) 2014 Daniele Simonetti
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
import uuid
from .requirements import read_requirements_list
from .xmlutils import *
from .packitem import PackItem
class Spell(PackItem):
def __init__(self):
super(Spell, self).__init__()
self.id = uuid.uuid1().hex
self.name = None
self.area = None
self.mastery = None
self.range = None
self.duration = None
self.element = None
self.tags = None
self.require = None
self.desc = None
self.elements = []
self.raises = []
@staticmethod
def build_from_xml(elem):
f = Spell()
f.id = elem.attrib['id']
f.name = read_attribute(elem, 'name')
f.area = read_attribute(elem, 'area')
f.mastery = read_attribute_int(elem, 'mastery')
f.range = read_attribute(elem, 'range')
f.duration = read_attribute(elem, 'duration')
f.element = read_attribute(elem, 'element')
f.tags = read_spell_tag_list(elem)
f.require = read_requirements_list(elem)
f.desc = read_sub_element_text(elem, 'Description', "").strip()
f.elements = []
f.raises = []
if elem.find('Raises') is not None:
for se in elem.find('Raises').iter():
if se.tag == 'Raise':
f.raises.append(se.text)
# support for Multi-Element spells
if elem.find('MultiElement') is not None:
for se in elem.find('MultiElement').iter():
if se.tag == 'Element':
f.elements.append(se.text)
return f
def write_into(self, elem):
pass
def __str__(self):
return self.name or self.id
def __unicode__(self):
return self.name or self.id
def __eq__(self, obj):
return obj and obj.id == self.id
def __ne__(self, obj):
return not self.__eq__(obj)
def __hash__(self):
return self.id.__hash__()
|
OpenNingia/l5rcm-data-access
|
l5rdal/spell.py
|
Python
|
lgpl-3.0
| 2,752
|